@marmooo/midy 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -2
- package/esm/midy-GM1.d.ts +86 -10
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1190 -101
- package/esm/midy-GM2.d.ts +103 -10
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1402 -162
- package/esm/midy-GMLite.d.ts +84 -9
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1183 -98
- package/esm/midy.d.ts +77 -15
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1416 -175
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +86 -10
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1190 -101
- package/script/midy-GM2.d.ts +103 -10
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1402 -162
- package/script/midy-GMLite.d.ts +84 -9
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1183 -98
- package/script/midy.d.ts +77 -15
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1416 -175
package/script/midy-GM1.js
CHANGED
|
@@ -4,6 +4,55 @@ exports.MidyGM1 = void 0;
|
|
|
4
4
|
const midi_file_1 = require("midi-file");
|
|
5
5
|
const soundfont_parser_1 = require("@marmooo/soundfont-parser");
|
|
6
6
|
const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
|
|
7
|
+
// Cache mode
|
|
8
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
9
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
10
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
11
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
12
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
13
|
+
//
|
|
14
|
+
// "none"
|
|
15
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
16
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
17
|
+
// fully supported. Higher CPU usage.
|
|
18
|
+
// "ads"
|
|
19
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
20
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
21
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
22
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
23
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
24
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
25
|
+
// "adsr"
|
|
26
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
27
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
28
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
29
|
+
// so notes with the same duration and release shape share a buffer.
|
|
30
|
+
// LFO effects are applied in real time after playback starts,
|
|
31
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
32
|
+
// because LFO variations do not produce separate cache entries.
|
|
33
|
+
// "note"
|
|
34
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
35
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
36
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
37
|
+
// so no real-time processing is needed during playback. Greatly
|
|
38
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
39
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
40
|
+
// "audio"
|
|
41
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
42
|
+
// Call render() to complete rendering before calling start().
|
|
43
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
44
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
45
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
46
|
+
// "rendered" event is dispatched when rendering completes.
|
|
47
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
48
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
49
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
50
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
51
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
52
|
+
function f64ToBigInt(value) {
|
|
53
|
+
_f64Array[0] = value;
|
|
54
|
+
return _u64Array[0];
|
|
55
|
+
}
|
|
7
56
|
let decoderPromise = null;
|
|
8
57
|
let decoderQueue = Promise.resolve();
|
|
9
58
|
function initDecoder() {
|
|
@@ -51,6 +100,24 @@ class Note {
|
|
|
51
100
|
writable: true,
|
|
52
101
|
value: void 0
|
|
53
102
|
});
|
|
103
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
104
|
+
enumerable: true,
|
|
105
|
+
configurable: true,
|
|
106
|
+
writable: true,
|
|
107
|
+
value: null
|
|
108
|
+
});
|
|
109
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
110
|
+
enumerable: true,
|
|
111
|
+
configurable: true,
|
|
112
|
+
writable: true,
|
|
113
|
+
value: null
|
|
114
|
+
});
|
|
115
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
116
|
+
enumerable: true,
|
|
117
|
+
configurable: true,
|
|
118
|
+
writable: true,
|
|
119
|
+
value: null
|
|
120
|
+
});
|
|
54
121
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
55
122
|
enumerable: true,
|
|
56
123
|
configurable: true,
|
|
@@ -95,6 +162,100 @@ class Note {
|
|
|
95
162
|
});
|
|
96
163
|
}
|
|
97
164
|
}
|
|
165
|
+
class Channel {
|
|
166
|
+
constructor(audioNodes, settings) {
|
|
167
|
+
Object.defineProperty(this, "isDrum", {
|
|
168
|
+
enumerable: true,
|
|
169
|
+
configurable: true,
|
|
170
|
+
writable: true,
|
|
171
|
+
value: false
|
|
172
|
+
});
|
|
173
|
+
Object.defineProperty(this, "programNumber", {
|
|
174
|
+
enumerable: true,
|
|
175
|
+
configurable: true,
|
|
176
|
+
writable: true,
|
|
177
|
+
value: 0
|
|
178
|
+
});
|
|
179
|
+
Object.defineProperty(this, "scheduleIndex", {
|
|
180
|
+
enumerable: true,
|
|
181
|
+
configurable: true,
|
|
182
|
+
writable: true,
|
|
183
|
+
value: 0
|
|
184
|
+
});
|
|
185
|
+
Object.defineProperty(this, "detune", {
|
|
186
|
+
enumerable: true,
|
|
187
|
+
configurable: true,
|
|
188
|
+
writable: true,
|
|
189
|
+
value: 0
|
|
190
|
+
});
|
|
191
|
+
Object.defineProperty(this, "dataMSB", {
|
|
192
|
+
enumerable: true,
|
|
193
|
+
configurable: true,
|
|
194
|
+
writable: true,
|
|
195
|
+
value: 0
|
|
196
|
+
});
|
|
197
|
+
Object.defineProperty(this, "dataLSB", {
|
|
198
|
+
enumerable: true,
|
|
199
|
+
configurable: true,
|
|
200
|
+
writable: true,
|
|
201
|
+
value: 0
|
|
202
|
+
});
|
|
203
|
+
Object.defineProperty(this, "rpnMSB", {
|
|
204
|
+
enumerable: true,
|
|
205
|
+
configurable: true,
|
|
206
|
+
writable: true,
|
|
207
|
+
value: 127
|
|
208
|
+
});
|
|
209
|
+
Object.defineProperty(this, "rpnLSB", {
|
|
210
|
+
enumerable: true,
|
|
211
|
+
configurable: true,
|
|
212
|
+
writable: true,
|
|
213
|
+
value: 127
|
|
214
|
+
});
|
|
215
|
+
Object.defineProperty(this, "modulationDepthRange", {
|
|
216
|
+
enumerable: true,
|
|
217
|
+
configurable: true,
|
|
218
|
+
writable: true,
|
|
219
|
+
value: 50
|
|
220
|
+
}); // cent
|
|
221
|
+
Object.defineProperty(this, "fineTuning", {
|
|
222
|
+
enumerable: true,
|
|
223
|
+
configurable: true,
|
|
224
|
+
writable: true,
|
|
225
|
+
value: 0
|
|
226
|
+
}); // cent
|
|
227
|
+
Object.defineProperty(this, "coarseTuning", {
|
|
228
|
+
enumerable: true,
|
|
229
|
+
configurable: true,
|
|
230
|
+
writable: true,
|
|
231
|
+
value: 0
|
|
232
|
+
}); // cent
|
|
233
|
+
Object.defineProperty(this, "scheduledNotes", {
|
|
234
|
+
enumerable: true,
|
|
235
|
+
configurable: true,
|
|
236
|
+
writable: true,
|
|
237
|
+
value: []
|
|
238
|
+
});
|
|
239
|
+
Object.defineProperty(this, "sustainNotes", {
|
|
240
|
+
enumerable: true,
|
|
241
|
+
configurable: true,
|
|
242
|
+
writable: true,
|
|
243
|
+
value: []
|
|
244
|
+
});
|
|
245
|
+
Object.defineProperty(this, "currentBufferSource", {
|
|
246
|
+
enumerable: true,
|
|
247
|
+
configurable: true,
|
|
248
|
+
writable: true,
|
|
249
|
+
value: null
|
|
250
|
+
});
|
|
251
|
+
Object.assign(this, audioNodes);
|
|
252
|
+
Object.assign(this, settings);
|
|
253
|
+
this.state = new ControllerState();
|
|
254
|
+
}
|
|
255
|
+
resetSettings(settings) {
|
|
256
|
+
Object.assign(this, settings);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
98
259
|
// normalized to 0-1 for use with the SF2 modulator model
|
|
99
260
|
const defaultControllerState = {
|
|
100
261
|
noteOnVelocity: { type: 2, defaultValue: 0 },
|
|
@@ -166,13 +327,73 @@ const pitchEnvelopeKeys = [
|
|
|
166
327
|
"playbackRate",
|
|
167
328
|
];
|
|
168
329
|
const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
|
|
330
|
+
class RenderedBuffer {
|
|
331
|
+
constructor(buffer, meta = {}) {
|
|
332
|
+
Object.defineProperty(this, "buffer", {
|
|
333
|
+
enumerable: true,
|
|
334
|
+
configurable: true,
|
|
335
|
+
writable: true,
|
|
336
|
+
value: void 0
|
|
337
|
+
});
|
|
338
|
+
Object.defineProperty(this, "isLoop", {
|
|
339
|
+
enumerable: true,
|
|
340
|
+
configurable: true,
|
|
341
|
+
writable: true,
|
|
342
|
+
value: void 0
|
|
343
|
+
});
|
|
344
|
+
Object.defineProperty(this, "isFull", {
|
|
345
|
+
enumerable: true,
|
|
346
|
+
configurable: true,
|
|
347
|
+
writable: true,
|
|
348
|
+
value: void 0
|
|
349
|
+
});
|
|
350
|
+
Object.defineProperty(this, "adsDuration", {
|
|
351
|
+
enumerable: true,
|
|
352
|
+
configurable: true,
|
|
353
|
+
writable: true,
|
|
354
|
+
value: void 0
|
|
355
|
+
});
|
|
356
|
+
Object.defineProperty(this, "loopStart", {
|
|
357
|
+
enumerable: true,
|
|
358
|
+
configurable: true,
|
|
359
|
+
writable: true,
|
|
360
|
+
value: void 0
|
|
361
|
+
});
|
|
362
|
+
Object.defineProperty(this, "loopDuration", {
|
|
363
|
+
enumerable: true,
|
|
364
|
+
configurable: true,
|
|
365
|
+
writable: true,
|
|
366
|
+
value: void 0
|
|
367
|
+
});
|
|
368
|
+
Object.defineProperty(this, "noteDuration", {
|
|
369
|
+
enumerable: true,
|
|
370
|
+
configurable: true,
|
|
371
|
+
writable: true,
|
|
372
|
+
value: void 0
|
|
373
|
+
});
|
|
374
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
375
|
+
enumerable: true,
|
|
376
|
+
configurable: true,
|
|
377
|
+
writable: true,
|
|
378
|
+
value: void 0
|
|
379
|
+
});
|
|
380
|
+
this.buffer = buffer;
|
|
381
|
+
this.isLoop = meta.isLoop ?? false;
|
|
382
|
+
this.isFull = meta.isFull ?? false;
|
|
383
|
+
this.adsDuration = meta.adsDuration;
|
|
384
|
+
this.loopStart = meta.loopStart;
|
|
385
|
+
this.loopDuration = meta.loopDuration;
|
|
386
|
+
this.noteDuration = meta.noteDuration;
|
|
387
|
+
this.releaseDuration = meta.releaseDuration;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
169
390
|
function cbToRatio(cb) {
|
|
170
391
|
return Math.pow(10, cb / 200);
|
|
171
392
|
}
|
|
172
393
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
173
394
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
174
395
|
class MidyGM1 extends EventTarget {
|
|
175
|
-
constructor(audioContext) {
|
|
396
|
+
constructor(audioContext, options = {}) {
|
|
176
397
|
super();
|
|
177
398
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
178
399
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -274,12 +495,6 @@ class MidyGM1 extends EventTarget {
|
|
|
274
495
|
writable: true,
|
|
275
496
|
value: "wasm-audio-decoders"
|
|
276
497
|
});
|
|
277
|
-
Object.defineProperty(this, "decoderQueue", {
|
|
278
|
-
enumerable: true,
|
|
279
|
-
configurable: true,
|
|
280
|
-
writable: true,
|
|
281
|
-
value: Promise.resolve()
|
|
282
|
-
});
|
|
283
498
|
Object.defineProperty(this, "isPlaying", {
|
|
284
499
|
enumerable: true,
|
|
285
500
|
configurable: true,
|
|
@@ -314,9 +529,7 @@ class MidyGM1 extends EventTarget {
|
|
|
314
529
|
enumerable: true,
|
|
315
530
|
configurable: true,
|
|
316
531
|
writable: true,
|
|
317
|
-
value: new Set([
|
|
318
|
-
"noteOff",
|
|
319
|
-
])
|
|
532
|
+
value: new Set(["noteOff"])
|
|
320
533
|
});
|
|
321
534
|
Object.defineProperty(this, "tempo", {
|
|
322
535
|
enumerable: true,
|
|
@@ -360,7 +573,53 @@ class MidyGM1 extends EventTarget {
|
|
|
360
573
|
writable: true,
|
|
361
574
|
value: new Array(128)
|
|
362
575
|
});
|
|
576
|
+
// "adsr" mode
|
|
577
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
578
|
+
enumerable: true,
|
|
579
|
+
configurable: true,
|
|
580
|
+
writable: true,
|
|
581
|
+
value: new Map()
|
|
582
|
+
});
|
|
583
|
+
// "note" mode
|
|
584
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
585
|
+
enumerable: true,
|
|
586
|
+
configurable: true,
|
|
587
|
+
writable: true,
|
|
588
|
+
value: new Map()
|
|
589
|
+
});
|
|
590
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
591
|
+
enumerable: true,
|
|
592
|
+
configurable: true,
|
|
593
|
+
writable: true,
|
|
594
|
+
value: new Map()
|
|
595
|
+
});
|
|
596
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
597
|
+
enumerable: true,
|
|
598
|
+
configurable: true,
|
|
599
|
+
writable: true,
|
|
600
|
+
value: new Map()
|
|
601
|
+
});
|
|
602
|
+
// "audio" mode
|
|
603
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
604
|
+
enumerable: true,
|
|
605
|
+
configurable: true,
|
|
606
|
+
writable: true,
|
|
607
|
+
value: null
|
|
608
|
+
});
|
|
609
|
+
Object.defineProperty(this, "isRendering", {
|
|
610
|
+
enumerable: true,
|
|
611
|
+
configurable: true,
|
|
612
|
+
writable: true,
|
|
613
|
+
value: false
|
|
614
|
+
});
|
|
615
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
616
|
+
enumerable: true,
|
|
617
|
+
configurable: true,
|
|
618
|
+
writable: true,
|
|
619
|
+
value: null
|
|
620
|
+
});
|
|
363
621
|
this.audioContext = audioContext;
|
|
622
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
364
623
|
this.masterVolume = new GainNode(audioContext);
|
|
365
624
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
366
625
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -430,9 +689,157 @@ class MidyGM1 extends EventTarget {
|
|
|
430
689
|
this.instruments = midiData.instruments;
|
|
431
690
|
this.timeline = midiData.timeline;
|
|
432
691
|
this.totalTime = this.calcTotalTime();
|
|
692
|
+
if (this.cacheMode === "audio") {
|
|
693
|
+
await this.render();
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
buildNoteOnDurations() {
|
|
697
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
698
|
+
noteOnDurations.clear();
|
|
699
|
+
noteOnEvents.clear();
|
|
700
|
+
const inverseTempo = 1 / this.tempo;
|
|
701
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
702
|
+
const activeNotes = new Map();
|
|
703
|
+
const pendingOff = new Map();
|
|
704
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
705
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
706
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
707
|
+
? Infinity
|
|
708
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
709
|
+
noteOnDurations.set(entry.idx, duration);
|
|
710
|
+
noteOnEvents.set(entry.idx, {
|
|
711
|
+
duration,
|
|
712
|
+
durationTicks,
|
|
713
|
+
startTime: entry.startTime,
|
|
714
|
+
events: entry.events,
|
|
715
|
+
});
|
|
716
|
+
};
|
|
717
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
718
|
+
const event = timeline[i];
|
|
719
|
+
const t = event.startTime * inverseTempo;
|
|
720
|
+
switch (event.type) {
|
|
721
|
+
case "noteOn": {
|
|
722
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
723
|
+
if (!activeNotes.has(key))
|
|
724
|
+
activeNotes.set(key, []);
|
|
725
|
+
activeNotes.get(key).push({
|
|
726
|
+
idx: i,
|
|
727
|
+
startTime: t,
|
|
728
|
+
startTicks: event.ticks,
|
|
729
|
+
events: [],
|
|
730
|
+
});
|
|
731
|
+
const pendingStack = pendingOff.get(key);
|
|
732
|
+
if (pendingStack && pendingStack.length > 0)
|
|
733
|
+
pendingStack.shift();
|
|
734
|
+
break;
|
|
735
|
+
}
|
|
736
|
+
case "noteOff": {
|
|
737
|
+
const ch = event.channel;
|
|
738
|
+
const key = event.noteNumber * numChannels + ch;
|
|
739
|
+
if (sustainPedal[ch]) {
|
|
740
|
+
if (!pendingOff.has(key))
|
|
741
|
+
pendingOff.set(key, []);
|
|
742
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
743
|
+
}
|
|
744
|
+
else {
|
|
745
|
+
const stack = activeNotes.get(key);
|
|
746
|
+
if (stack && stack.length > 0) {
|
|
747
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
748
|
+
if (stack.length === 0)
|
|
749
|
+
activeNotes.delete(key);
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
break;
|
|
753
|
+
}
|
|
754
|
+
case "controller": {
|
|
755
|
+
const ch = event.channel;
|
|
756
|
+
for (const [key, entries] of activeNotes) {
|
|
757
|
+
if (key % numChannels !== ch)
|
|
758
|
+
continue;
|
|
759
|
+
for (const entry of entries)
|
|
760
|
+
entry.events.push(event);
|
|
761
|
+
}
|
|
762
|
+
switch (event.controllerType) {
|
|
763
|
+
case 64: { // Sustain Pedal
|
|
764
|
+
const on = event.value >= 64;
|
|
765
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
766
|
+
if (!on) {
|
|
767
|
+
for (const [key, offItems] of pendingOff) {
|
|
768
|
+
if (key % numChannels !== ch)
|
|
769
|
+
continue;
|
|
770
|
+
const activeStack = activeNotes.get(key);
|
|
771
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
772
|
+
if (activeStack && activeStack.length > 0) {
|
|
773
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
774
|
+
if (activeStack.length === 0)
|
|
775
|
+
activeNotes.delete(key);
|
|
776
|
+
}
|
|
777
|
+
}
|
|
778
|
+
pendingOff.delete(key);
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
break;
|
|
782
|
+
}
|
|
783
|
+
case 121: // Reset All Controllers
|
|
784
|
+
sustainPedal[ch] = 0;
|
|
785
|
+
break;
|
|
786
|
+
case 120: // All Sound Off
|
|
787
|
+
case 123: { // All Notes Off
|
|
788
|
+
for (const [key, stack] of activeNotes) {
|
|
789
|
+
if (key % numChannels !== ch)
|
|
790
|
+
continue;
|
|
791
|
+
for (const entry of stack)
|
|
792
|
+
finalizeEntry(entry, t, event.ticks);
|
|
793
|
+
activeNotes.delete(key);
|
|
794
|
+
}
|
|
795
|
+
for (const key of pendingOff.keys()) {
|
|
796
|
+
if (key % numChannels === ch)
|
|
797
|
+
pendingOff.delete(key);
|
|
798
|
+
}
|
|
799
|
+
break;
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
break;
|
|
803
|
+
}
|
|
804
|
+
case "sysEx":
|
|
805
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
806
|
+
// GM1 System On
|
|
807
|
+
if (event.data[3] === 1) {
|
|
808
|
+
sustainPedal.fill(0);
|
|
809
|
+
pendingOff.clear();
|
|
810
|
+
for (const [, stack] of activeNotes) {
|
|
811
|
+
for (const entry of stack)
|
|
812
|
+
finalizeEntry(entry, t, event.ticks);
|
|
813
|
+
}
|
|
814
|
+
activeNotes.clear();
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
else {
|
|
818
|
+
for (const [, entries] of activeNotes) {
|
|
819
|
+
for (const entry of entries)
|
|
820
|
+
entry.events.push(event);
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
break;
|
|
824
|
+
case "pitchBend":
|
|
825
|
+
case "programChange": {
|
|
826
|
+
const ch = event.channel;
|
|
827
|
+
for (const [key, entries] of activeNotes) {
|
|
828
|
+
if (key % numChannels !== ch)
|
|
829
|
+
continue;
|
|
830
|
+
for (const entry of entries)
|
|
831
|
+
entry.events.push(event);
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
for (const [, stack] of activeNotes) {
|
|
837
|
+
for (const entry of stack)
|
|
838
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
839
|
+
}
|
|
433
840
|
}
|
|
434
841
|
cacheVoiceIds() {
|
|
435
|
-
const { channels, timeline, voiceCounter } = this;
|
|
842
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
436
843
|
for (let i = 0; i < timeline.length; i++) {
|
|
437
844
|
const event = timeline[i];
|
|
438
845
|
switch (event.type) {
|
|
@@ -450,6 +857,9 @@ class MidyGM1 extends EventTarget {
|
|
|
450
857
|
voiceCounter.delete(audioBufferId);
|
|
451
858
|
}
|
|
452
859
|
this.GM1SystemOn();
|
|
860
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
861
|
+
this.buildNoteOnDurations();
|
|
862
|
+
}
|
|
453
863
|
}
|
|
454
864
|
getVoiceId(channel, noteNumber, velocity) {
|
|
455
865
|
const programNumber = channel.programNumber;
|
|
@@ -468,7 +878,8 @@ class MidyGM1 extends EventTarget {
|
|
|
468
878
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
469
879
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
470
880
|
const { instrument, sampleID } = voice.generators;
|
|
471
|
-
return soundFontIndex * (2 **
|
|
881
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
882
|
+
(sampleID << 8);
|
|
472
883
|
}
|
|
473
884
|
createChannelAudioNodes(audioContext) {
|
|
474
885
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -478,25 +889,11 @@ class MidyGM1 extends EventTarget {
|
|
|
478
889
|
gainL.connect(merger, 0, 0);
|
|
479
890
|
gainR.connect(merger, 0, 1);
|
|
480
891
|
merger.connect(this.masterVolume);
|
|
481
|
-
return {
|
|
482
|
-
gainL,
|
|
483
|
-
gainR,
|
|
484
|
-
merger,
|
|
485
|
-
};
|
|
892
|
+
return { gainL, gainR, merger };
|
|
486
893
|
}
|
|
487
894
|
createChannels(audioContext) {
|
|
488
|
-
const
|
|
489
|
-
|
|
490
|
-
currentBufferSource: null,
|
|
491
|
-
isDrum: false,
|
|
492
|
-
state: new ControllerState(),
|
|
493
|
-
...this.constructor.channelSettings,
|
|
494
|
-
...this.createChannelAudioNodes(audioContext),
|
|
495
|
-
scheduledNotes: [],
|
|
496
|
-
sustainNotes: [],
|
|
497
|
-
};
|
|
498
|
-
});
|
|
499
|
-
return channels;
|
|
895
|
+
const settings = this.constructor.channelSettings;
|
|
896
|
+
return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
|
|
500
897
|
}
|
|
501
898
|
decodeOggVorbis(sample) {
|
|
502
899
|
const task = decoderQueue.then(async () => {
|
|
@@ -550,13 +947,25 @@ class MidyGM1 extends EventTarget {
|
|
|
550
947
|
return audioBuffer;
|
|
551
948
|
}
|
|
552
949
|
}
|
|
553
|
-
createBufferSource(voiceParams,
|
|
950
|
+
createBufferSource(voiceParams, renderedOrRaw) {
|
|
951
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
952
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
554
953
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
555
954
|
bufferSource.buffer = audioBuffer;
|
|
556
|
-
|
|
955
|
+
const isLoop = isRendered
|
|
956
|
+
? renderedOrRaw.isLoop
|
|
957
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
958
|
+
bufferSource.loop = isLoop;
|
|
557
959
|
if (bufferSource.loop) {
|
|
558
|
-
|
|
559
|
-
|
|
960
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
961
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
962
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
963
|
+
renderedOrRaw.loopDuration;
|
|
964
|
+
}
|
|
965
|
+
else {
|
|
966
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
967
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
968
|
+
}
|
|
560
969
|
}
|
|
561
970
|
return bufferSource;
|
|
562
971
|
}
|
|
@@ -573,13 +982,15 @@ class MidyGM1 extends EventTarget {
|
|
|
573
982
|
break;
|
|
574
983
|
const startTime = t + schedulingOffset;
|
|
575
984
|
switch (event.type) {
|
|
576
|
-
case "noteOn":
|
|
577
|
-
this.
|
|
985
|
+
case "noteOn": {
|
|
986
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
987
|
+
note.timelineIndex = queueIndex;
|
|
988
|
+
this.setupNote(event.channel, note, startTime);
|
|
578
989
|
break;
|
|
579
|
-
|
|
990
|
+
}
|
|
991
|
+
case "noteOff":
|
|
580
992
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
581
993
|
break;
|
|
582
|
-
}
|
|
583
994
|
case "controller":
|
|
584
995
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
585
996
|
break;
|
|
@@ -608,9 +1019,9 @@ class MidyGM1 extends EventTarget {
|
|
|
608
1019
|
}
|
|
609
1020
|
resetAllStates() {
|
|
610
1021
|
this.exclusiveClassNotes.fill(undefined);
|
|
611
|
-
this.drumExclusiveClassNotes.fill(undefined);
|
|
612
1022
|
this.voiceCache.clear();
|
|
613
1023
|
this.realtimeVoiceCache.clear();
|
|
1024
|
+
this.adsrVoiceCache.clear();
|
|
614
1025
|
const channels = this.channels;
|
|
615
1026
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
616
1027
|
channels[ch].scheduledNotes = [];
|
|
@@ -640,11 +1051,95 @@ class MidyGM1 extends EventTarget {
|
|
|
640
1051
|
}
|
|
641
1052
|
}
|
|
642
1053
|
}
|
|
1054
|
+
async playAudioBuffer() {
|
|
1055
|
+
const audioContext = this.audioContext;
|
|
1056
|
+
const paused = this.isPaused;
|
|
1057
|
+
this.isPlaying = true;
|
|
1058
|
+
this.isPaused = false;
|
|
1059
|
+
this.startTime = audioContext.currentTime;
|
|
1060
|
+
if (paused) {
|
|
1061
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1062
|
+
}
|
|
1063
|
+
else {
|
|
1064
|
+
this.dispatchEvent(new Event("started"));
|
|
1065
|
+
}
|
|
1066
|
+
let exitReason;
|
|
1067
|
+
outer: while (true) {
|
|
1068
|
+
const buffer = this.renderedAudioBuffer;
|
|
1069
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1070
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1071
|
+
bufferSource.connect(this.masterVolume);
|
|
1072
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1073
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1074
|
+
this.audioModeBufferSource = bufferSource;
|
|
1075
|
+
let naturalEnded = false;
|
|
1076
|
+
bufferSource.onended = () => {
|
|
1077
|
+
naturalEnded = true;
|
|
1078
|
+
};
|
|
1079
|
+
while (true) {
|
|
1080
|
+
const now = audioContext.currentTime;
|
|
1081
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1082
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1083
|
+
bufferSource.disconnect();
|
|
1084
|
+
this.audioModeBufferSource = null;
|
|
1085
|
+
if (this.loop) {
|
|
1086
|
+
this.resumeTime = 0;
|
|
1087
|
+
this.startTime = audioContext.currentTime;
|
|
1088
|
+
this.dispatchEvent(new Event("looped"));
|
|
1089
|
+
continue outer;
|
|
1090
|
+
}
|
|
1091
|
+
await audioContext.suspend();
|
|
1092
|
+
exitReason = "ended";
|
|
1093
|
+
break outer;
|
|
1094
|
+
}
|
|
1095
|
+
if (this.isPausing) {
|
|
1096
|
+
this.resumeTime = this.currentTime();
|
|
1097
|
+
bufferSource.stop();
|
|
1098
|
+
bufferSource.disconnect();
|
|
1099
|
+
this.audioModeBufferSource = null;
|
|
1100
|
+
await audioContext.suspend();
|
|
1101
|
+
this.isPausing = false;
|
|
1102
|
+
exitReason = "paused";
|
|
1103
|
+
break outer;
|
|
1104
|
+
}
|
|
1105
|
+
else if (this.isStopping) {
|
|
1106
|
+
bufferSource.stop();
|
|
1107
|
+
bufferSource.disconnect();
|
|
1108
|
+
this.audioModeBufferSource = null;
|
|
1109
|
+
await audioContext.suspend();
|
|
1110
|
+
this.isStopping = false;
|
|
1111
|
+
exitReason = "stopped";
|
|
1112
|
+
break outer;
|
|
1113
|
+
}
|
|
1114
|
+
else if (this.isSeeking) {
|
|
1115
|
+
bufferSource.stop();
|
|
1116
|
+
bufferSource.disconnect();
|
|
1117
|
+
this.audioModeBufferSource = null;
|
|
1118
|
+
this.startTime = audioContext.currentTime;
|
|
1119
|
+
this.isSeeking = false;
|
|
1120
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1121
|
+
continue outer;
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
this.isPlaying = false;
|
|
1126
|
+
if (exitReason === "paused") {
|
|
1127
|
+
this.isPaused = true;
|
|
1128
|
+
this.dispatchEvent(new Event("paused"));
|
|
1129
|
+
}
|
|
1130
|
+
else if (exitReason !== undefined) {
|
|
1131
|
+
this.isPaused = false;
|
|
1132
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1133
|
+
}
|
|
1134
|
+
}
|
|
643
1135
|
async playNotes() {
|
|
644
1136
|
const audioContext = this.audioContext;
|
|
645
1137
|
if (audioContext.state === "suspended") {
|
|
646
1138
|
await audioContext.resume();
|
|
647
1139
|
}
|
|
1140
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1141
|
+
return await this.playAudioBuffer();
|
|
1142
|
+
}
|
|
648
1143
|
const paused = this.isPaused;
|
|
649
1144
|
this.isPlaying = true;
|
|
650
1145
|
this.isPaused = false;
|
|
@@ -811,6 +1306,136 @@ class MidyGM1 extends EventTarget {
|
|
|
811
1306
|
this.notePromises = [];
|
|
812
1307
|
return stopPromise;
|
|
813
1308
|
}
|
|
1309
|
+
async render() {
|
|
1310
|
+
if (this.isRendering)
|
|
1311
|
+
return;
|
|
1312
|
+
if (this.timeline.length === 0)
|
|
1313
|
+
return;
|
|
1314
|
+
if (this.voiceCounter.size === 0)
|
|
1315
|
+
this.cacheVoiceIds();
|
|
1316
|
+
this.isRendering = true;
|
|
1317
|
+
this.renderedAudioBuffer = null;
|
|
1318
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1319
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1320
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1321
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1322
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1323
|
+
renderIsDrum[9] = 1;
|
|
1324
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1325
|
+
const state = new Float32Array(256);
|
|
1326
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1327
|
+
state[type] = defaultValue;
|
|
1328
|
+
}
|
|
1329
|
+
return state;
|
|
1330
|
+
});
|
|
1331
|
+
const tasks = [];
|
|
1332
|
+
const timeline = this.timeline;
|
|
1333
|
+
const inverseTempo = 1 / this.tempo;
|
|
1334
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1335
|
+
const event = timeline[i];
|
|
1336
|
+
const ch = event.channel;
|
|
1337
|
+
switch (event.type) {
|
|
1338
|
+
case "noteOn": {
|
|
1339
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1340
|
+
const noteDuration = noteEvent?.duration ??
|
|
1341
|
+
this.noteOnDurations.get(i) ??
|
|
1342
|
+
0;
|
|
1343
|
+
if (noteDuration <= 0)
|
|
1344
|
+
continue;
|
|
1345
|
+
const { noteNumber, velocity } = event;
|
|
1346
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1347
|
+
const programNumber = renderProgramNumber[ch];
|
|
1348
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1349
|
+
if (!bankTable)
|
|
1350
|
+
continue;
|
|
1351
|
+
let bank = isDrum ? 128 : 0;
|
|
1352
|
+
if (bankTable[bank] === undefined) {
|
|
1353
|
+
if (isDrum)
|
|
1354
|
+
continue;
|
|
1355
|
+
bank = 0;
|
|
1356
|
+
}
|
|
1357
|
+
const soundFontIndex = bankTable[bank];
|
|
1358
|
+
if (soundFontIndex === undefined)
|
|
1359
|
+
continue;
|
|
1360
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1361
|
+
const fakeChannel = {
|
|
1362
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1363
|
+
programNumber,
|
|
1364
|
+
isDrum,
|
|
1365
|
+
modulationDepthRange: 50,
|
|
1366
|
+
detune: 0,
|
|
1367
|
+
};
|
|
1368
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1369
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1370
|
+
if (!voice)
|
|
1371
|
+
continue;
|
|
1372
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1373
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1374
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1375
|
+
const promise = (async () => {
|
|
1376
|
+
try {
|
|
1377
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1378
|
+
}
|
|
1379
|
+
catch (err) {
|
|
1380
|
+
console.warn("render: note render failed", err);
|
|
1381
|
+
return null;
|
|
1382
|
+
}
|
|
1383
|
+
})();
|
|
1384
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1385
|
+
break;
|
|
1386
|
+
}
|
|
1387
|
+
case "controller": {
|
|
1388
|
+
const { controllerType, value } = event;
|
|
1389
|
+
const stateIndex = 128 + controllerType;
|
|
1390
|
+
if (stateIndex < 256) {
|
|
1391
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1392
|
+
}
|
|
1393
|
+
break;
|
|
1394
|
+
}
|
|
1395
|
+
case "pitchBend":
|
|
1396
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1397
|
+
break;
|
|
1398
|
+
case "programChange":
|
|
1399
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1400
|
+
break;
|
|
1401
|
+
case "sysEx": {
|
|
1402
|
+
const data = event.data;
|
|
1403
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1404
|
+
if (data[3] === 1) { // GM1 System On
|
|
1405
|
+
renderProgramNumber.fill(0);
|
|
1406
|
+
renderIsDrum.fill(0);
|
|
1407
|
+
renderIsDrum[9] = 1;
|
|
1408
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1409
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1410
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1411
|
+
}
|
|
1412
|
+
}
|
|
1413
|
+
}
|
|
1414
|
+
}
|
|
1415
|
+
break;
|
|
1416
|
+
}
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1420
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1421
|
+
const { t, promise } = tasks[i];
|
|
1422
|
+
const noteBuffer = await promise;
|
|
1423
|
+
if (!noteBuffer)
|
|
1424
|
+
continue;
|
|
1425
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1426
|
+
? noteBuffer.buffer
|
|
1427
|
+
: noteBuffer;
|
|
1428
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1429
|
+
buffer: audioBuffer,
|
|
1430
|
+
});
|
|
1431
|
+
bufferSource.connect(offlineContext.destination);
|
|
1432
|
+
bufferSource.start(t);
|
|
1433
|
+
}
|
|
1434
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1435
|
+
this.isRendering = false;
|
|
1436
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1437
|
+
return this.renderedAudioBuffer;
|
|
1438
|
+
}
|
|
814
1439
|
async start() {
|
|
815
1440
|
if (this.isPlaying || this.isPaused)
|
|
816
1441
|
return;
|
|
@@ -847,11 +1472,22 @@ class MidyGM1 extends EventTarget {
|
|
|
847
1472
|
}
|
|
848
1473
|
}
|
|
849
1474
|
tempoChange(tempo) {
|
|
1475
|
+
const cacheMode = this.cacheMode;
|
|
850
1476
|
const timeScale = this.tempo / tempo;
|
|
851
1477
|
this.resumeTime = this.resumeTime * timeScale;
|
|
852
1478
|
this.tempo = tempo;
|
|
853
1479
|
this.totalTime = this.calcTotalTime();
|
|
854
1480
|
this.seekTo(this.currentTime() * timeScale);
|
|
1481
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1482
|
+
this.buildNoteOnDurations();
|
|
1483
|
+
this.fullVoiceCache.clear();
|
|
1484
|
+
this.adsrVoiceCache.clear();
|
|
1485
|
+
}
|
|
1486
|
+
if (cacheMode === "audio") {
|
|
1487
|
+
if (this.audioModeBufferSource) {
|
|
1488
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
855
1491
|
}
|
|
856
1492
|
calcTotalTime() {
|
|
857
1493
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -872,6 +1508,9 @@ class MidyGM1 extends EventTarget {
|
|
|
872
1508
|
if (!this.isPlaying)
|
|
873
1509
|
return this.resumeTime;
|
|
874
1510
|
const now = this.audioContext.currentTime;
|
|
1511
|
+
if (this.cacheMode === "audio") {
|
|
1512
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1513
|
+
}
|
|
875
1514
|
return now + this.resumeTime - this.startTime;
|
|
876
1515
|
}
|
|
877
1516
|
async processScheduledNotes(channel, callback) {
|
|
@@ -922,6 +1561,8 @@ class MidyGM1 extends EventTarget {
|
|
|
922
1561
|
}
|
|
923
1562
|
updateChannelDetune(channel, scheduleTime) {
|
|
924
1563
|
this.processScheduledNotes(channel, (note) => {
|
|
1564
|
+
if (note.renderedBuffer?.isFull)
|
|
1565
|
+
return;
|
|
925
1566
|
this.setDetune(channel, note, scheduleTime);
|
|
926
1567
|
});
|
|
927
1568
|
}
|
|
@@ -929,6 +1570,8 @@ class MidyGM1 extends EventTarget {
|
|
|
929
1570
|
return channel.detune + note.voiceParams.detune;
|
|
930
1571
|
}
|
|
931
1572
|
setVolumeEnvelope(note, scheduleTime) {
|
|
1573
|
+
if (!note.volumeEnvelopeNode)
|
|
1574
|
+
return;
|
|
932
1575
|
const { voiceParams, startTime } = note;
|
|
933
1576
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
934
1577
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
@@ -946,9 +1589,6 @@ class MidyGM1 extends EventTarget {
|
|
|
946
1589
|
}
|
|
947
1590
|
setDetune(channel, note, scheduleTime) {
|
|
948
1591
|
const detune = this.calcNoteDetune(channel, note);
|
|
949
|
-
note.bufferSource.detune
|
|
950
|
-
.cancelScheduledValues(scheduleTime)
|
|
951
|
-
.setValueAtTime(detune, scheduleTime);
|
|
952
1592
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
953
1593
|
note.bufferSource.detune
|
|
954
1594
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -980,6 +1620,8 @@ class MidyGM1 extends EventTarget {
|
|
|
980
1620
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
981
1621
|
}
|
|
982
1622
|
setFilterEnvelope(note, scheduleTime) {
|
|
1623
|
+
if (!note.filterEnvelopeNode)
|
|
1624
|
+
return;
|
|
983
1625
|
const { voiceParams, startTime } = note;
|
|
984
1626
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
985
1627
|
const baseCent = voiceParams.initialFilterFc;
|
|
@@ -1020,40 +1662,348 @@ class MidyGM1 extends EventTarget {
|
|
|
1020
1662
|
this.setModLfoToVolume(note, scheduleTime);
|
|
1021
1663
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1022
1664
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1023
|
-
|
|
1665
|
+
if (note.filterEnvelopeNode) {
|
|
1666
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
1667
|
+
}
|
|
1024
1668
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1025
1669
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1026
1670
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1027
|
-
note.
|
|
1671
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
1672
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1673
|
+
}
|
|
1674
|
+
async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
|
|
1675
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
1676
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
1677
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
1678
|
+
const decayDuration = voiceParams.volDecay;
|
|
1679
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
1680
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1681
|
+
const loopDuration = isLoop
|
|
1682
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
1683
|
+
: 0;
|
|
1684
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
1685
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
1686
|
+
: 0;
|
|
1687
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
1688
|
+
const renderDuration = isLoop
|
|
1689
|
+
? alignedLoopStart + loopDuration
|
|
1690
|
+
: audioBuffer.duration;
|
|
1691
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
1692
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
1693
|
+
bufferSource.buffer = audioBuffer;
|
|
1694
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
1695
|
+
bufferSource.loop = isLoop;
|
|
1696
|
+
if (isLoop) {
|
|
1697
|
+
bufferSource.loopStart = loopStartTime;
|
|
1698
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
1699
|
+
}
|
|
1700
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
1701
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
1702
|
+
type: "lowpass",
|
|
1703
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
1704
|
+
frequency: initialFreq,
|
|
1705
|
+
});
|
|
1706
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
1707
|
+
const offlineNote = {
|
|
1708
|
+
...note,
|
|
1709
|
+
startTime: 0,
|
|
1710
|
+
bufferSource,
|
|
1711
|
+
filterEnvelopeNode,
|
|
1712
|
+
volumeEnvelopeNode,
|
|
1713
|
+
};
|
|
1714
|
+
this.setVolumeEnvelope(offlineNote, 0);
|
|
1715
|
+
this.setFilterEnvelope(offlineNote, 0);
|
|
1716
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
1717
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
1718
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
1719
|
+
if (voiceParams.sample.type === "compressed") {
|
|
1720
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
1721
|
+
}
|
|
1722
|
+
else {
|
|
1723
|
+
bufferSource.start(0);
|
|
1724
|
+
}
|
|
1725
|
+
const buffer = await offlineContext.startRendering();
|
|
1726
|
+
return new RenderedBuffer(buffer, {
|
|
1727
|
+
isLoop,
|
|
1728
|
+
adsDuration,
|
|
1729
|
+
loopStart: alignedLoopStart,
|
|
1730
|
+
loopDuration,
|
|
1731
|
+
});
|
|
1732
|
+
}
|
|
1733
|
+
async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
|
|
1734
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
1735
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
1736
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
1737
|
+
const decayDuration = voiceParams.volDecay;
|
|
1738
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
1739
|
+
const releaseDuration = voiceParams.volRelease;
|
|
1740
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1741
|
+
const loopDuration = isLoop
|
|
1742
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
1743
|
+
: 0;
|
|
1744
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
1745
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
1746
|
+
: 0;
|
|
1747
|
+
const alignedNoteEnd = isLoop
|
|
1748
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
1749
|
+
: noteDuration;
|
|
1750
|
+
const noteOffTime = alignedNoteEnd;
|
|
1751
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
1752
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1753
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
1754
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
1755
|
+
bufferSource.buffer = audioBuffer;
|
|
1756
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
1757
|
+
bufferSource.loop = isLoop;
|
|
1758
|
+
if (isLoop) {
|
|
1759
|
+
bufferSource.loopStart = loopStartTime;
|
|
1760
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
1761
|
+
}
|
|
1762
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
1763
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
1764
|
+
type: "lowpass",
|
|
1765
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
1766
|
+
frequency: initialFreq,
|
|
1767
|
+
});
|
|
1768
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
1769
|
+
const offlineNote = {
|
|
1770
|
+
...note,
|
|
1771
|
+
startTime: 0,
|
|
1772
|
+
bufferSource,
|
|
1773
|
+
filterEnvelopeNode,
|
|
1774
|
+
volumeEnvelopeNode,
|
|
1775
|
+
};
|
|
1776
|
+
this.setVolumeEnvelope(offlineNote, 0);
|
|
1777
|
+
this.setFilterEnvelope(offlineNote, 0);
|
|
1778
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
1779
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1780
|
+
const volDelayTime = voiceParams.volDelay;
|
|
1781
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
1782
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
1783
|
+
let gainAtNoteOff;
|
|
1784
|
+
if (noteOffTime <= volDelayTime) {
|
|
1785
|
+
gainAtNoteOff = 0;
|
|
1786
|
+
}
|
|
1787
|
+
else if (noteOffTime <= volAttackTime) {
|
|
1788
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
1789
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
1790
|
+
}
|
|
1791
|
+
else if (noteOffTime <= volHoldTime) {
|
|
1792
|
+
gainAtNoteOff = attackVolume;
|
|
1793
|
+
}
|
|
1794
|
+
else {
|
|
1795
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
1796
|
+
gainAtNoteOff = sustainVolume +
|
|
1797
|
+
(attackVolume - sustainVolume) *
|
|
1798
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
1799
|
+
}
|
|
1800
|
+
volumeEnvelopeNode.gain
|
|
1801
|
+
.cancelScheduledValues(noteOffTime)
|
|
1802
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
1803
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
1804
|
+
filterEnvelopeNode.frequency
|
|
1805
|
+
.cancelScheduledValues(noteOffTime)
|
|
1806
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
1807
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
1808
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
1809
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
1810
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
1811
|
+
if (isLoop) {
|
|
1812
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
1813
|
+
}
|
|
1814
|
+
else {
|
|
1815
|
+
bufferSource.start(0);
|
|
1816
|
+
}
|
|
1817
|
+
const buffer = await offlineContext.startRendering();
|
|
1818
|
+
return new RenderedBuffer(buffer, {
|
|
1819
|
+
isLoop: false,
|
|
1820
|
+
isFull: false,
|
|
1821
|
+
adsDuration,
|
|
1822
|
+
noteDuration: noteOffTime,
|
|
1823
|
+
releaseDuration,
|
|
1824
|
+
});
|
|
1825
|
+
}
|
|
1826
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
1827
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
1828
|
+
const ch = note.channel ?? 0;
|
|
1829
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
1830
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
1831
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1832
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
1833
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
1834
|
+
cacheMode: "none",
|
|
1835
|
+
});
|
|
1836
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
1837
|
+
offlineContext.resume = () => Promise.resolve();
|
|
1838
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
1839
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
1840
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
1841
|
+
dstChannel.state.array.set(channel.state.array);
|
|
1842
|
+
dstChannel.isDrum = channel.isDrum;
|
|
1843
|
+
dstChannel.programNumber = channel.programNumber;
|
|
1844
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
1845
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
1846
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
1847
|
+
for (const event of noteEvents) {
|
|
1848
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
1849
|
+
if (t < 0 || t > noteDuration)
|
|
1850
|
+
continue;
|
|
1851
|
+
switch (event.type) {
|
|
1852
|
+
case "controller":
|
|
1853
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
1854
|
+
break;
|
|
1855
|
+
case "pitchBend":
|
|
1856
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
1857
|
+
break;
|
|
1858
|
+
case "sysEx":
|
|
1859
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
1860
|
+
}
|
|
1861
|
+
}
|
|
1862
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
1863
|
+
const buffer = await offlineContext.startRendering();
|
|
1864
|
+
return new RenderedBuffer(buffer, {
|
|
1865
|
+
isLoop: false,
|
|
1866
|
+
isFull: true,
|
|
1867
|
+
noteDuration: noteDuration,
|
|
1868
|
+
releaseDuration: releaseEndDuration,
|
|
1869
|
+
});
|
|
1028
1870
|
}
|
|
1029
|
-
async getAudioBuffer(channel,
|
|
1871
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
1872
|
+
const cacheMode = this.cacheMode;
|
|
1873
|
+
const { noteNumber, velocity } = note;
|
|
1030
1874
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
1875
|
+
if (!realtime) {
|
|
1876
|
+
if (cacheMode === "note") {
|
|
1877
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
1878
|
+
}
|
|
1879
|
+
else if (cacheMode === "adsr") {
|
|
1880
|
+
return await this.getAdsrCachedBuffer(note, audioBufferId);
|
|
1881
|
+
}
|
|
1882
|
+
}
|
|
1883
|
+
if (cacheMode === "none") {
|
|
1884
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
1885
|
+
}
|
|
1886
|
+
// fallback to ADS cache:
|
|
1887
|
+
// - "ads" (realtime or not)
|
|
1888
|
+
// - "adsr" + realtime
|
|
1889
|
+
// - "note" + realtime
|
|
1890
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
1891
|
+
}
|
|
1892
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
1893
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
1894
|
+
const voiceParams = note.voiceParams;
|
|
1031
1895
|
if (realtime) {
|
|
1032
|
-
const
|
|
1033
|
-
if (
|
|
1034
|
-
return
|
|
1035
|
-
const
|
|
1036
|
-
this.
|
|
1037
|
-
|
|
1896
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
1897
|
+
if (cached)
|
|
1898
|
+
return cached;
|
|
1899
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1900
|
+
const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
|
|
1901
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
1902
|
+
return rendered;
|
|
1038
1903
|
}
|
|
1039
1904
|
else {
|
|
1040
|
-
const cache = this.voiceCache.get(
|
|
1905
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1041
1906
|
if (cache) {
|
|
1042
1907
|
cache.counter += 1;
|
|
1043
1908
|
if (cache.maxCount <= cache.counter) {
|
|
1044
|
-
this.voiceCache.delete(
|
|
1909
|
+
this.voiceCache.delete(cacheKey);
|
|
1045
1910
|
}
|
|
1046
1911
|
return cache.audioBuffer;
|
|
1047
1912
|
}
|
|
1048
1913
|
else {
|
|
1049
|
-
const maxCount = this.voiceCounter.get(
|
|
1050
|
-
const
|
|
1051
|
-
const
|
|
1052
|
-
|
|
1053
|
-
|
|
1914
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
1915
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1916
|
+
const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
|
|
1917
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
1918
|
+
this.voiceCache.set(cacheKey, cache);
|
|
1919
|
+
return rendered;
|
|
1054
1920
|
}
|
|
1055
1921
|
}
|
|
1056
1922
|
}
|
|
1923
|
+
async getAdsrCachedBuffer(note, audioBufferId) {
|
|
1924
|
+
const voiceParams = note.voiceParams;
|
|
1925
|
+
const timelineIndex = note.timelineIndex;
|
|
1926
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
1927
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
1928
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
1929
|
+
? 0xffffffffn
|
|
1930
|
+
: BigInt(noteDurationTicks);
|
|
1931
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
1932
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
1933
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
1934
|
+
(playbackRateBits << 96n) |
|
|
1935
|
+
(safeTicks << 64n) |
|
|
1936
|
+
volReleaseBits;
|
|
1937
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
1938
|
+
if (!durationMap) {
|
|
1939
|
+
durationMap = new Map();
|
|
1940
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
1941
|
+
}
|
|
1942
|
+
const cached = durationMap.get(cacheKey);
|
|
1943
|
+
if (cached instanceof RenderedBuffer) {
|
|
1944
|
+
return cached;
|
|
1945
|
+
}
|
|
1946
|
+
if (cached instanceof Promise) {
|
|
1947
|
+
const buf = await cached;
|
|
1948
|
+
if (buf == null)
|
|
1949
|
+
return await this.createAudioBuffer(voiceParams);
|
|
1950
|
+
return buf;
|
|
1951
|
+
}
|
|
1952
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
1953
|
+
const renderPromise = (async () => {
|
|
1954
|
+
try {
|
|
1955
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1956
|
+
const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
|
|
1957
|
+
durationMap.set(cacheKey, rendered);
|
|
1958
|
+
return rendered;
|
|
1959
|
+
}
|
|
1960
|
+
catch (err) {
|
|
1961
|
+
durationMap.delete(cacheKey);
|
|
1962
|
+
throw err;
|
|
1963
|
+
}
|
|
1964
|
+
})();
|
|
1965
|
+
durationMap.set(cacheKey, renderPromise);
|
|
1966
|
+
return await renderPromise;
|
|
1967
|
+
}
|
|
1968
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
1969
|
+
const voiceParams = note.voiceParams;
|
|
1970
|
+
const timelineIndex = note.timelineIndex;
|
|
1971
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
1972
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
1973
|
+
const cacheKey = timelineIndex;
|
|
1974
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
1975
|
+
if (!durationMap) {
|
|
1976
|
+
durationMap = new Map();
|
|
1977
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
1978
|
+
}
|
|
1979
|
+
const cached = durationMap.get(cacheKey);
|
|
1980
|
+
if (cached instanceof RenderedBuffer) {
|
|
1981
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
1982
|
+
return cached;
|
|
1983
|
+
}
|
|
1984
|
+
if (cached instanceof Promise) {
|
|
1985
|
+
const buf = await cached;
|
|
1986
|
+
if (buf == null)
|
|
1987
|
+
return await this.createAudioBuffer(voiceParams);
|
|
1988
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
1989
|
+
return buf;
|
|
1990
|
+
}
|
|
1991
|
+
const renderPromise = (async () => {
|
|
1992
|
+
try {
|
|
1993
|
+
const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
|
|
1994
|
+
durationMap.set(cacheKey, rendered);
|
|
1995
|
+
return rendered;
|
|
1996
|
+
}
|
|
1997
|
+
catch (err) {
|
|
1998
|
+
durationMap.delete(cacheKey);
|
|
1999
|
+
throw err;
|
|
2000
|
+
}
|
|
2001
|
+
})();
|
|
2002
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2003
|
+
const rendered = await renderPromise;
|
|
2004
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2005
|
+
return rendered;
|
|
2006
|
+
}
|
|
1057
2007
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1058
2008
|
const audioContext = this.audioContext;
|
|
1059
2009
|
const now = audioContext.currentTime;
|
|
@@ -1062,25 +2012,47 @@ class MidyGM1 extends EventTarget {
|
|
|
1062
2012
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1063
2013
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1064
2014
|
note.voiceParams = voiceParams;
|
|
1065
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2015
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2016
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2017
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1066
2018
|
note.bufferSource = this.createBufferSource(voiceParams, audioBuffer);
|
|
1067
|
-
note.
|
|
1068
|
-
note.
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
this.
|
|
2019
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2020
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2021
|
+
const cacheMode = this.cacheMode;
|
|
2022
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2023
|
+
if (cacheMode === "none") {
|
|
2024
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2025
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2026
|
+
type: "lowpass",
|
|
2027
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2028
|
+
});
|
|
2029
|
+
this.setVolumeEnvelope(note, now);
|
|
2030
|
+
this.setFilterEnvelope(note, now);
|
|
2031
|
+
this.setPitchEnvelope(note, now);
|
|
2032
|
+
this.setDetune(channel, note, now);
|
|
2033
|
+
if (0 < state.modulationDepthMSB) {
|
|
2034
|
+
this.startModulation(channel, note, now);
|
|
2035
|
+
}
|
|
2036
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2037
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2038
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2039
|
+
}
|
|
2040
|
+
else if (isFullCached) { // "note" mode
|
|
2041
|
+
note.volumeEnvelopeNode = null;
|
|
2042
|
+
note.filterEnvelopeNode = null;
|
|
2043
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2044
|
+
}
|
|
2045
|
+
else { // "ads" / "asdr" mode
|
|
2046
|
+
note.volumeEnvelopeNode = null;
|
|
2047
|
+
note.filterEnvelopeNode = null;
|
|
2048
|
+
this.setDetune(channel, note, now);
|
|
2049
|
+
if (0 < state.modulationDepthMSB) {
|
|
2050
|
+
this.startModulation(channel, note, now);
|
|
2051
|
+
}
|
|
2052
|
+
note.bufferSource.connect(note.volumeNode);
|
|
1078
2053
|
}
|
|
1079
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1080
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1081
2054
|
if (voiceParams.sample.type === "compressed") {
|
|
1082
|
-
|
|
1083
|
-
note.bufferSource.start(startTime, offset);
|
|
2055
|
+
note.bufferSource.start(startTime);
|
|
1084
2056
|
}
|
|
1085
2057
|
else {
|
|
1086
2058
|
note.bufferSource.start(startTime);
|
|
@@ -1103,23 +2075,33 @@ class MidyGM1 extends EventTarget {
|
|
|
1103
2075
|
}
|
|
1104
2076
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1105
2077
|
const channel = this.channels[channelNumber];
|
|
1106
|
-
const
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
2078
|
+
const { volumeNode } = note;
|
|
2079
|
+
if (note.renderedBuffer?.isFull) {
|
|
2080
|
+
volumeNode.connect(this.masterVolume);
|
|
2081
|
+
}
|
|
2082
|
+
else {
|
|
2083
|
+
volumeNode.connect(channel.gainL);
|
|
2084
|
+
volumeNode.connect(channel.gainR);
|
|
1111
2085
|
}
|
|
1112
2086
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1113
2087
|
}
|
|
1114
2088
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1115
|
-
const
|
|
1116
|
-
|
|
1117
|
-
|
|
2089
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2090
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2091
|
+
}
|
|
2092
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2093
|
+
if (!(0 <= startTime))
|
|
1118
2094
|
startTime = this.audioContext.currentTime;
|
|
1119
2095
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
scheduledNotes.
|
|
2096
|
+
note.channel = channelNumber;
|
|
2097
|
+
const channel = this.channels[channelNumber];
|
|
2098
|
+
note.index = channel.scheduledNotes.length;
|
|
2099
|
+
channel.scheduledNotes.push(note);
|
|
2100
|
+
return note;
|
|
2101
|
+
}
|
|
2102
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2103
|
+
const realtime = startTime === undefined;
|
|
2104
|
+
const channel = this.channels[channelNumber];
|
|
1123
2105
|
const programNumber = channel.programNumber;
|
|
1124
2106
|
const bankTable = this.soundFontTable[programNumber];
|
|
1125
2107
|
if (!bankTable)
|
|
@@ -1134,33 +2116,134 @@ class MidyGM1 extends EventTarget {
|
|
|
1134
2116
|
if (soundFontIndex === undefined)
|
|
1135
2117
|
return;
|
|
1136
2118
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1137
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2119
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1138
2120
|
if (!note.voice)
|
|
1139
2121
|
return;
|
|
1140
2122
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1141
2123
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1142
2124
|
note.resolveReady();
|
|
2125
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2126
|
+
channel.sustainNotes.push(note);
|
|
2127
|
+
}
|
|
2128
|
+
return note;
|
|
1143
2129
|
}
|
|
1144
2130
|
disconnectNote(note) {
|
|
1145
2131
|
note.bufferSource.disconnect();
|
|
1146
|
-
note.filterEnvelopeNode
|
|
1147
|
-
note.volumeEnvelopeNode
|
|
2132
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2133
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2134
|
+
note.volumeNode.disconnect();
|
|
1148
2135
|
if (note.modLfoToPitch) {
|
|
1149
2136
|
note.modLfoToVolume.disconnect();
|
|
1150
2137
|
note.modLfoToPitch.disconnect();
|
|
1151
2138
|
note.modLfo.stop();
|
|
1152
2139
|
}
|
|
1153
2140
|
}
|
|
2141
|
+
releaseFullCache(note) {
|
|
2142
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2143
|
+
return;
|
|
2144
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2145
|
+
if (!durationMap)
|
|
2146
|
+
return;
|
|
2147
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2148
|
+
if (entry instanceof RenderedBuffer) {
|
|
2149
|
+
durationMap.delete(note.timelineIndex);
|
|
2150
|
+
if (durationMap.size === 0) {
|
|
2151
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2152
|
+
}
|
|
2153
|
+
}
|
|
2154
|
+
}
|
|
1154
2155
|
releaseNote(channel, note, endTime) {
|
|
1155
2156
|
endTime ??= this.audioContext.currentTime;
|
|
2157
|
+
if (note.renderedBuffer?.isFull) {
|
|
2158
|
+
const rb = note.renderedBuffer;
|
|
2159
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2160
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2161
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2162
|
+
if (isEarlyCut) {
|
|
2163
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2164
|
+
const volRelease = endTime + volDuration;
|
|
2165
|
+
note.volumeNode.gain
|
|
2166
|
+
.cancelScheduledValues(endTime)
|
|
2167
|
+
.setValueAtTime(1, endTime)
|
|
2168
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2169
|
+
return new Promise((resolve) => {
|
|
2170
|
+
this.scheduleTask(() => {
|
|
2171
|
+
note.bufferSource.loop = false;
|
|
2172
|
+
note.bufferSource.stop(volRelease);
|
|
2173
|
+
this.disconnectNote(note);
|
|
2174
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2175
|
+
this.releaseFullCache(note);
|
|
2176
|
+
resolve();
|
|
2177
|
+
}, volRelease);
|
|
2178
|
+
});
|
|
2179
|
+
}
|
|
2180
|
+
else {
|
|
2181
|
+
const now = this.audioContext.currentTime;
|
|
2182
|
+
if (naturalEndTime <= now) {
|
|
2183
|
+
this.disconnectNote(note);
|
|
2184
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2185
|
+
this.releaseFullCache(note);
|
|
2186
|
+
return Promise.resolve();
|
|
2187
|
+
}
|
|
2188
|
+
return new Promise((resolve) => {
|
|
2189
|
+
this.scheduleTask(() => {
|
|
2190
|
+
this.disconnectNote(note);
|
|
2191
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2192
|
+
this.releaseFullCache(note);
|
|
2193
|
+
resolve();
|
|
2194
|
+
}, naturalEndTime);
|
|
2195
|
+
});
|
|
2196
|
+
}
|
|
2197
|
+
}
|
|
1156
2198
|
const volDuration = note.voiceParams.volRelease;
|
|
1157
2199
|
const volRelease = endTime + volDuration;
|
|
1158
|
-
note.
|
|
1159
|
-
.
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
.
|
|
1163
|
-
|
|
2200
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2201
|
+
note.filterEnvelopeNode.frequency
|
|
2202
|
+
.cancelScheduledValues(endTime)
|
|
2203
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2204
|
+
note.volumeEnvelopeNode.gain
|
|
2205
|
+
.cancelScheduledValues(endTime)
|
|
2206
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2207
|
+
}
|
|
2208
|
+
else { // "ads" / "adsr" mode
|
|
2209
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2210
|
+
!note.renderedBuffer.isFull;
|
|
2211
|
+
if (isAdsr) {
|
|
2212
|
+
const rb = note.renderedBuffer;
|
|
2213
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2214
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2215
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2216
|
+
if (isEarlyCut) {
|
|
2217
|
+
const volRelease = endTime + volDuration;
|
|
2218
|
+
note.volumeNode.gain
|
|
2219
|
+
.cancelScheduledValues(endTime)
|
|
2220
|
+
.setValueAtTime(1, endTime)
|
|
2221
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2222
|
+
return new Promise((resolve) => {
|
|
2223
|
+
this.scheduleTask(() => {
|
|
2224
|
+
note.bufferSource.stop(volRelease);
|
|
2225
|
+
this.disconnectNote(note);
|
|
2226
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2227
|
+
resolve();
|
|
2228
|
+
}, volRelease);
|
|
2229
|
+
});
|
|
2230
|
+
}
|
|
2231
|
+
else {
|
|
2232
|
+
return new Promise((resolve) => {
|
|
2233
|
+
this.scheduleTask(() => {
|
|
2234
|
+
note.bufferSource.stop();
|
|
2235
|
+
this.disconnectNote(note);
|
|
2236
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2237
|
+
resolve();
|
|
2238
|
+
}, naturalEndTime);
|
|
2239
|
+
});
|
|
2240
|
+
}
|
|
2241
|
+
}
|
|
2242
|
+
note.volumeNode.gain
|
|
2243
|
+
.cancelScheduledValues(endTime)
|
|
2244
|
+
.setValueAtTime(1, endTime)
|
|
2245
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2246
|
+
}
|
|
1164
2247
|
return new Promise((resolve) => {
|
|
1165
2248
|
this.scheduleTask(() => {
|
|
1166
2249
|
const bufferSource = note.bufferSource;
|
|
@@ -1344,7 +2427,7 @@ class MidyGM1 extends EventTarget {
|
|
|
1344
2427
|
},
|
|
1345
2428
|
chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
|
|
1346
2429
|
reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
|
|
1347
|
-
delayModLFO: (
|
|
2430
|
+
delayModLFO: (channel, note, scheduleTime) => {
|
|
1348
2431
|
if (0 < channel.state.modulationDepth) {
|
|
1349
2432
|
this.setDelayModLFO(note, scheduleTime);
|
|
1350
2433
|
}
|
|
@@ -1370,6 +2453,8 @@ class MidyGM1 extends EventTarget {
|
|
|
1370
2453
|
}
|
|
1371
2454
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1372
2455
|
this.processScheduledNotes(channel, (note) => {
|
|
2456
|
+
if (note.renderedBuffer?.isFull)
|
|
2457
|
+
return;
|
|
1373
2458
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1374
2459
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1375
2460
|
let applyVolumeEnvelope = false;
|
|
@@ -1433,6 +2518,8 @@ class MidyGM1 extends EventTarget {
|
|
|
1433
2518
|
const depth = channel.state.modulationDepthMSB *
|
|
1434
2519
|
channel.modulationDepthRange;
|
|
1435
2520
|
this.processScheduledNotes(channel, (note) => {
|
|
2521
|
+
if (note.renderedBuffer?.isFull)
|
|
2522
|
+
return;
|
|
1436
2523
|
if (note.modLfoToPitch) {
|
|
1437
2524
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
1438
2525
|
}
|
|
@@ -1495,11 +2582,15 @@ class MidyGM1 extends EventTarget {
|
|
|
1495
2582
|
const channel = this.channels[channelNumber];
|
|
1496
2583
|
if (!(0 <= scheduleTime))
|
|
1497
2584
|
scheduleTime = this.audioContext.currentTime;
|
|
1498
|
-
|
|
2585
|
+
const state = channel.state;
|
|
2586
|
+
const prevValue = state.sustainPedal;
|
|
2587
|
+
state.sustainPedal = value / 127;
|
|
1499
2588
|
if (64 <= value) {
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
2589
|
+
if (prevValue < 0.5) {
|
|
2590
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2591
|
+
channel.sustainNotes.push(note);
|
|
2592
|
+
});
|
|
2593
|
+
}
|
|
1503
2594
|
}
|
|
1504
2595
|
else {
|
|
1505
2596
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -1629,9 +2720,7 @@ class MidyGM1 extends EventTarget {
|
|
|
1629
2720
|
state[key] = defaultValue;
|
|
1630
2721
|
}
|
|
1631
2722
|
}
|
|
1632
|
-
|
|
1633
|
-
channel[key] = this.constructor.channelSettings[key];
|
|
1634
|
-
}
|
|
2723
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
1635
2724
|
this.mode = "GM1";
|
|
1636
2725
|
}
|
|
1637
2726
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/rp15.pdf
|