@marmooo/midy 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -2
- package/esm/midy-GM1.d.ts +86 -10
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1190 -101
- package/esm/midy-GM2.d.ts +103 -10
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1402 -162
- package/esm/midy-GMLite.d.ts +84 -9
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1183 -98
- package/esm/midy.d.ts +77 -15
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1416 -175
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +86 -10
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1190 -101
- package/script/midy-GM2.d.ts +103 -10
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1402 -162
- package/script/midy-GMLite.d.ts +84 -9
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1183 -98
- package/script/midy.d.ts +77 -15
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1416 -175
package/script/midy.js
CHANGED
|
@@ -4,6 +4,55 @@ exports.Midy = void 0;
|
|
|
4
4
|
const midi_file_1 = require("midi-file");
|
|
5
5
|
const soundfont_parser_1 = require("@marmooo/soundfont-parser");
|
|
6
6
|
const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
|
|
7
|
+
// Cache mode
|
|
8
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
9
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
10
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
11
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
12
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
13
|
+
//
|
|
14
|
+
// "none"
|
|
15
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
16
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
17
|
+
// fully supported. Higher CPU usage.
|
|
18
|
+
// "ads"
|
|
19
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
20
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
21
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
22
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
23
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
24
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
25
|
+
// "adsr"
|
|
26
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
27
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
28
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
29
|
+
// so notes with the same duration and release shape share a buffer.
|
|
30
|
+
// LFO effects are applied in real time after playback starts,
|
|
31
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
32
|
+
// because LFO variations do not produce separate cache entries.
|
|
33
|
+
// "note"
|
|
34
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
35
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
36
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
37
|
+
// so no real-time processing is needed during playback. Greatly
|
|
38
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
39
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
40
|
+
// "audio"
|
|
41
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
42
|
+
// Call render() to complete rendering before calling start().
|
|
43
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
44
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
45
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
46
|
+
// "rendered" event is dispatched when rendering completes.
|
|
47
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
48
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
49
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
50
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
51
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
52
|
+
function f64ToBigInt(value) {
|
|
53
|
+
_f64Array[0] = value;
|
|
54
|
+
return _u64Array[0];
|
|
55
|
+
}
|
|
7
56
|
let decoderPromise = null;
|
|
8
57
|
let decoderQueue = Promise.resolve();
|
|
9
58
|
function initDecoder() {
|
|
@@ -51,6 +100,24 @@ class Note {
|
|
|
51
100
|
writable: true,
|
|
52
101
|
value: void 0
|
|
53
102
|
});
|
|
103
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
104
|
+
enumerable: true,
|
|
105
|
+
configurable: true,
|
|
106
|
+
writable: true,
|
|
107
|
+
value: null
|
|
108
|
+
});
|
|
109
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
110
|
+
enumerable: true,
|
|
111
|
+
configurable: true,
|
|
112
|
+
writable: true,
|
|
113
|
+
value: null
|
|
114
|
+
});
|
|
115
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
116
|
+
enumerable: true,
|
|
117
|
+
configurable: true,
|
|
118
|
+
writable: true,
|
|
119
|
+
value: null
|
|
120
|
+
});
|
|
54
121
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
55
122
|
enumerable: true,
|
|
56
123
|
configurable: true,
|
|
@@ -137,6 +204,173 @@ class Note {
|
|
|
137
204
|
});
|
|
138
205
|
}
|
|
139
206
|
}
|
|
207
|
+
class Channel {
|
|
208
|
+
constructor(audioNodes, settings) {
|
|
209
|
+
Object.defineProperty(this, "isDrum", {
|
|
210
|
+
enumerable: true,
|
|
211
|
+
configurable: true,
|
|
212
|
+
writable: true,
|
|
213
|
+
value: false
|
|
214
|
+
});
|
|
215
|
+
Object.defineProperty(this, "programNumber", {
|
|
216
|
+
enumerable: true,
|
|
217
|
+
configurable: true,
|
|
218
|
+
writable: true,
|
|
219
|
+
value: 0
|
|
220
|
+
});
|
|
221
|
+
Object.defineProperty(this, "scheduleIndex", {
|
|
222
|
+
enumerable: true,
|
|
223
|
+
configurable: true,
|
|
224
|
+
writable: true,
|
|
225
|
+
value: 0
|
|
226
|
+
});
|
|
227
|
+
Object.defineProperty(this, "detune", {
|
|
228
|
+
enumerable: true,
|
|
229
|
+
configurable: true,
|
|
230
|
+
writable: true,
|
|
231
|
+
value: 0
|
|
232
|
+
});
|
|
233
|
+
Object.defineProperty(this, "bankMSB", {
|
|
234
|
+
enumerable: true,
|
|
235
|
+
configurable: true,
|
|
236
|
+
writable: true,
|
|
237
|
+
value: 121
|
|
238
|
+
});
|
|
239
|
+
Object.defineProperty(this, "bankLSB", {
|
|
240
|
+
enumerable: true,
|
|
241
|
+
configurable: true,
|
|
242
|
+
writable: true,
|
|
243
|
+
value: 0
|
|
244
|
+
});
|
|
245
|
+
Object.defineProperty(this, "dataMSB", {
|
|
246
|
+
enumerable: true,
|
|
247
|
+
configurable: true,
|
|
248
|
+
writable: true,
|
|
249
|
+
value: 0
|
|
250
|
+
});
|
|
251
|
+
Object.defineProperty(this, "dataLSB", {
|
|
252
|
+
enumerable: true,
|
|
253
|
+
configurable: true,
|
|
254
|
+
writable: true,
|
|
255
|
+
value: 0
|
|
256
|
+
});
|
|
257
|
+
Object.defineProperty(this, "rpnMSB", {
|
|
258
|
+
enumerable: true,
|
|
259
|
+
configurable: true,
|
|
260
|
+
writable: true,
|
|
261
|
+
value: 127
|
|
262
|
+
});
|
|
263
|
+
Object.defineProperty(this, "rpnLSB", {
|
|
264
|
+
enumerable: true,
|
|
265
|
+
configurable: true,
|
|
266
|
+
writable: true,
|
|
267
|
+
value: 127
|
|
268
|
+
});
|
|
269
|
+
Object.defineProperty(this, "mono", {
|
|
270
|
+
enumerable: true,
|
|
271
|
+
configurable: true,
|
|
272
|
+
writable: true,
|
|
273
|
+
value: false
|
|
274
|
+
}); // CC#124, CC#125
|
|
275
|
+
Object.defineProperty(this, "modulationDepthRange", {
|
|
276
|
+
enumerable: true,
|
|
277
|
+
configurable: true,
|
|
278
|
+
writable: true,
|
|
279
|
+
value: 50
|
|
280
|
+
}); // cent
|
|
281
|
+
Object.defineProperty(this, "fineTuning", {
|
|
282
|
+
enumerable: true,
|
|
283
|
+
configurable: true,
|
|
284
|
+
writable: true,
|
|
285
|
+
value: 0
|
|
286
|
+
}); // cent
|
|
287
|
+
Object.defineProperty(this, "coarseTuning", {
|
|
288
|
+
enumerable: true,
|
|
289
|
+
configurable: true,
|
|
290
|
+
writable: true,
|
|
291
|
+
value: 0
|
|
292
|
+
}); // cent
|
|
293
|
+
Object.defineProperty(this, "scheduledNotes", {
|
|
294
|
+
enumerable: true,
|
|
295
|
+
configurable: true,
|
|
296
|
+
writable: true,
|
|
297
|
+
value: []
|
|
298
|
+
});
|
|
299
|
+
Object.defineProperty(this, "sustainNotes", {
|
|
300
|
+
enumerable: true,
|
|
301
|
+
configurable: true,
|
|
302
|
+
writable: true,
|
|
303
|
+
value: []
|
|
304
|
+
});
|
|
305
|
+
Object.defineProperty(this, "sostenutoNotes", {
|
|
306
|
+
enumerable: true,
|
|
307
|
+
configurable: true,
|
|
308
|
+
writable: true,
|
|
309
|
+
value: []
|
|
310
|
+
});
|
|
311
|
+
Object.defineProperty(this, "controlTable", {
|
|
312
|
+
enumerable: true,
|
|
313
|
+
configurable: true,
|
|
314
|
+
writable: true,
|
|
315
|
+
value: new Int8Array(defaultControlValues)
|
|
316
|
+
});
|
|
317
|
+
Object.defineProperty(this, "scaleOctaveTuningTable", {
|
|
318
|
+
enumerable: true,
|
|
319
|
+
configurable: true,
|
|
320
|
+
writable: true,
|
|
321
|
+
value: new Float32Array(12)
|
|
322
|
+
}); // [-100, 100] cent
|
|
323
|
+
Object.defineProperty(this, "channelPressureTable", {
|
|
324
|
+
enumerable: true,
|
|
325
|
+
configurable: true,
|
|
326
|
+
writable: true,
|
|
327
|
+
value: new Int8Array(defaultPressureValues)
|
|
328
|
+
});
|
|
329
|
+
Object.defineProperty(this, "polyphonicKeyPressureTable", {
|
|
330
|
+
enumerable: true,
|
|
331
|
+
configurable: true,
|
|
332
|
+
writable: true,
|
|
333
|
+
value: new Int8Array(defaultPressureValues)
|
|
334
|
+
});
|
|
335
|
+
Object.defineProperty(this, "keyBasedTable", {
|
|
336
|
+
enumerable: true,
|
|
337
|
+
configurable: true,
|
|
338
|
+
writable: true,
|
|
339
|
+
value: new Int8Array(128 * 128).fill(-1)
|
|
340
|
+
});
|
|
341
|
+
Object.defineProperty(this, "keyBasedGainLs", {
|
|
342
|
+
enumerable: true,
|
|
343
|
+
configurable: true,
|
|
344
|
+
writable: true,
|
|
345
|
+
value: new Array(128)
|
|
346
|
+
});
|
|
347
|
+
Object.defineProperty(this, "keyBasedGainRs", {
|
|
348
|
+
enumerable: true,
|
|
349
|
+
configurable: true,
|
|
350
|
+
writable: true,
|
|
351
|
+
value: new Array(128)
|
|
352
|
+
});
|
|
353
|
+
Object.defineProperty(this, "currentBufferSource", {
|
|
354
|
+
enumerable: true,
|
|
355
|
+
configurable: true,
|
|
356
|
+
writable: true,
|
|
357
|
+
value: null
|
|
358
|
+
});
|
|
359
|
+
Object.assign(this, audioNodes);
|
|
360
|
+
Object.assign(this, settings);
|
|
361
|
+
this.state = new ControllerState();
|
|
362
|
+
}
|
|
363
|
+
resetSettings(settings) {
|
|
364
|
+
Object.assign(this, settings);
|
|
365
|
+
}
|
|
366
|
+
resetTable() {
|
|
367
|
+
this.controlTable.set(defaultControlValues);
|
|
368
|
+
this.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
369
|
+
this.channelPressureTable.set(defaultPressureValues);
|
|
370
|
+
this.polyphonicKeyPressureTable.set(defaultPressureValues);
|
|
371
|
+
this.keyBasedTable.fill(-1);
|
|
372
|
+
}
|
|
373
|
+
}
|
|
140
374
|
const drumExclusiveClassesByKit = new Array(57);
|
|
141
375
|
const drumExclusiveClassCount = 10;
|
|
142
376
|
const standardSet = new Uint8Array(128);
|
|
@@ -286,13 +520,73 @@ const defaultControlValues = new Int8Array([
|
|
|
286
520
|
...[-1, -1, -1, -1, -1, -1],
|
|
287
521
|
...defaultPressureValues,
|
|
288
522
|
]);
|
|
523
|
+
class RenderedBuffer {
|
|
524
|
+
constructor(buffer, meta = {}) {
|
|
525
|
+
Object.defineProperty(this, "buffer", {
|
|
526
|
+
enumerable: true,
|
|
527
|
+
configurable: true,
|
|
528
|
+
writable: true,
|
|
529
|
+
value: void 0
|
|
530
|
+
});
|
|
531
|
+
Object.defineProperty(this, "isLoop", {
|
|
532
|
+
enumerable: true,
|
|
533
|
+
configurable: true,
|
|
534
|
+
writable: true,
|
|
535
|
+
value: void 0
|
|
536
|
+
});
|
|
537
|
+
Object.defineProperty(this, "isFull", {
|
|
538
|
+
enumerable: true,
|
|
539
|
+
configurable: true,
|
|
540
|
+
writable: true,
|
|
541
|
+
value: void 0
|
|
542
|
+
});
|
|
543
|
+
Object.defineProperty(this, "adsDuration", {
|
|
544
|
+
enumerable: true,
|
|
545
|
+
configurable: true,
|
|
546
|
+
writable: true,
|
|
547
|
+
value: void 0
|
|
548
|
+
});
|
|
549
|
+
Object.defineProperty(this, "loopStart", {
|
|
550
|
+
enumerable: true,
|
|
551
|
+
configurable: true,
|
|
552
|
+
writable: true,
|
|
553
|
+
value: void 0
|
|
554
|
+
});
|
|
555
|
+
Object.defineProperty(this, "loopDuration", {
|
|
556
|
+
enumerable: true,
|
|
557
|
+
configurable: true,
|
|
558
|
+
writable: true,
|
|
559
|
+
value: void 0
|
|
560
|
+
});
|
|
561
|
+
Object.defineProperty(this, "noteDuration", {
|
|
562
|
+
enumerable: true,
|
|
563
|
+
configurable: true,
|
|
564
|
+
writable: true,
|
|
565
|
+
value: void 0
|
|
566
|
+
});
|
|
567
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
568
|
+
enumerable: true,
|
|
569
|
+
configurable: true,
|
|
570
|
+
writable: true,
|
|
571
|
+
value: void 0
|
|
572
|
+
});
|
|
573
|
+
this.buffer = buffer;
|
|
574
|
+
this.isLoop = meta.isLoop ?? false;
|
|
575
|
+
this.isFull = meta.isFull ?? false;
|
|
576
|
+
this.adsDuration = meta.adsDuration;
|
|
577
|
+
this.loopStart = meta.loopStart;
|
|
578
|
+
this.loopDuration = meta.loopDuration;
|
|
579
|
+
this.noteDuration = meta.noteDuration;
|
|
580
|
+
this.releaseDuration = meta.releaseDuration;
|
|
581
|
+
}
|
|
582
|
+
}
|
|
289
583
|
function cbToRatio(cb) {
|
|
290
584
|
return Math.pow(10, cb / 200);
|
|
291
585
|
}
|
|
292
586
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
293
587
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
294
588
|
class Midy extends EventTarget {
|
|
295
|
-
constructor(audioContext) {
|
|
589
|
+
constructor(audioContext, options = {}) {
|
|
296
590
|
super();
|
|
297
591
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
298
592
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -474,9 +768,7 @@ class Midy extends EventTarget {
|
|
|
474
768
|
enumerable: true,
|
|
475
769
|
configurable: true,
|
|
476
770
|
writable: true,
|
|
477
|
-
value: new Set([
|
|
478
|
-
"noteOff",
|
|
479
|
-
])
|
|
771
|
+
value: new Set(["noteOff"])
|
|
480
772
|
});
|
|
481
773
|
Object.defineProperty(this, "tempo", {
|
|
482
774
|
enumerable: true,
|
|
@@ -532,6 +824,51 @@ class Midy extends EventTarget {
|
|
|
532
824
|
writable: true,
|
|
533
825
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
534
826
|
});
|
|
827
|
+
// "adsr" mode
|
|
828
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
829
|
+
enumerable: true,
|
|
830
|
+
configurable: true,
|
|
831
|
+
writable: true,
|
|
832
|
+
value: new Map()
|
|
833
|
+
});
|
|
834
|
+
// "note" mode
|
|
835
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
836
|
+
enumerable: true,
|
|
837
|
+
configurable: true,
|
|
838
|
+
writable: true,
|
|
839
|
+
value: new Map()
|
|
840
|
+
});
|
|
841
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
842
|
+
enumerable: true,
|
|
843
|
+
configurable: true,
|
|
844
|
+
writable: true,
|
|
845
|
+
value: new Map()
|
|
846
|
+
});
|
|
847
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
848
|
+
enumerable: true,
|
|
849
|
+
configurable: true,
|
|
850
|
+
writable: true,
|
|
851
|
+
value: new Map()
|
|
852
|
+
});
|
|
853
|
+
// "audio" mode
|
|
854
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
855
|
+
enumerable: true,
|
|
856
|
+
configurable: true,
|
|
857
|
+
writable: true,
|
|
858
|
+
value: null
|
|
859
|
+
});
|
|
860
|
+
Object.defineProperty(this, "isRendering", {
|
|
861
|
+
enumerable: true,
|
|
862
|
+
configurable: true,
|
|
863
|
+
writable: true,
|
|
864
|
+
value: false
|
|
865
|
+
});
|
|
866
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
867
|
+
enumerable: true,
|
|
868
|
+
configurable: true,
|
|
869
|
+
writable: true,
|
|
870
|
+
value: null
|
|
871
|
+
});
|
|
535
872
|
Object.defineProperty(this, "mpeEnabled", {
|
|
536
873
|
enumerable: true,
|
|
537
874
|
configurable: true,
|
|
@@ -559,10 +896,8 @@ class Midy extends EventTarget {
|
|
|
559
896
|
noteToChannel: new Map(),
|
|
560
897
|
}
|
|
561
898
|
});
|
|
562
|
-
this.decoder = new ogg_vorbis_1.OggVorbisDecoderWebWorker();
|
|
563
|
-
this.decoderReady = this.decoder.ready;
|
|
564
|
-
this.decoderQueue = Promise.resolve();
|
|
565
899
|
this.audioContext = audioContext;
|
|
900
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
566
901
|
this.masterVolume = new GainNode(audioContext);
|
|
567
902
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
568
903
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -638,9 +973,178 @@ class Midy extends EventTarget {
|
|
|
638
973
|
this.instruments = midiData.instruments;
|
|
639
974
|
this.timeline = midiData.timeline;
|
|
640
975
|
this.totalTime = this.calcTotalTime();
|
|
976
|
+
if (this.cacheMode === "audio") {
|
|
977
|
+
await this.render();
|
|
978
|
+
}
|
|
979
|
+
}
|
|
980
|
+
buildNoteOnDurations() {
|
|
981
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
982
|
+
noteOnDurations.clear();
|
|
983
|
+
noteOnEvents.clear();
|
|
984
|
+
const inverseTempo = 1 / this.tempo;
|
|
985
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
986
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
987
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
988
|
+
const activeNotes = new Map();
|
|
989
|
+
const pendingOff = new Map();
|
|
990
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
991
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
992
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
993
|
+
? Infinity
|
|
994
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
995
|
+
noteOnDurations.set(entry.idx, duration);
|
|
996
|
+
noteOnEvents.set(entry.idx, {
|
|
997
|
+
duration,
|
|
998
|
+
durationTicks,
|
|
999
|
+
startTime: entry.startTime,
|
|
1000
|
+
events: entry.events,
|
|
1001
|
+
});
|
|
1002
|
+
};
|
|
1003
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1004
|
+
const event = timeline[i];
|
|
1005
|
+
const t = event.startTime * inverseTempo;
|
|
1006
|
+
switch (event.type) {
|
|
1007
|
+
case "noteOn": {
|
|
1008
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
1009
|
+
if (!activeNotes.has(key))
|
|
1010
|
+
activeNotes.set(key, []);
|
|
1011
|
+
activeNotes.get(key).push({
|
|
1012
|
+
idx: i,
|
|
1013
|
+
startTime: t,
|
|
1014
|
+
startTicks: event.ticks,
|
|
1015
|
+
events: [],
|
|
1016
|
+
});
|
|
1017
|
+
const pendingStack = pendingOff.get(key);
|
|
1018
|
+
if (pendingStack && pendingStack.length > 0)
|
|
1019
|
+
pendingStack.shift();
|
|
1020
|
+
break;
|
|
1021
|
+
}
|
|
1022
|
+
case "noteOff": {
|
|
1023
|
+
const ch = event.channel;
|
|
1024
|
+
const key = event.noteNumber * numChannels + ch;
|
|
1025
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
1026
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
1027
|
+
if (!pendingOff.has(key))
|
|
1028
|
+
pendingOff.set(key, []);
|
|
1029
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
1030
|
+
}
|
|
1031
|
+
else {
|
|
1032
|
+
const stack = activeNotes.get(key);
|
|
1033
|
+
if (stack && stack.length > 0) {
|
|
1034
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
1035
|
+
if (stack.length === 0)
|
|
1036
|
+
activeNotes.delete(key);
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
break;
|
|
1040
|
+
}
|
|
1041
|
+
case "controller": {
|
|
1042
|
+
const ch = event.channel;
|
|
1043
|
+
for (const [key, entries] of activeNotes) {
|
|
1044
|
+
if (key % numChannels !== ch)
|
|
1045
|
+
continue;
|
|
1046
|
+
for (const entry of entries)
|
|
1047
|
+
entry.events.push(event);
|
|
1048
|
+
}
|
|
1049
|
+
switch (event.controllerType) {
|
|
1050
|
+
case 64: { // Sustain Pedal
|
|
1051
|
+
const on = event.value >= 64;
|
|
1052
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
1053
|
+
if (!on) {
|
|
1054
|
+
for (const [key, offItems] of pendingOff) {
|
|
1055
|
+
if (key % numChannels !== ch)
|
|
1056
|
+
continue;
|
|
1057
|
+
const activeStack = activeNotes.get(key);
|
|
1058
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
1059
|
+
if (activeStack && activeStack.length > 0) {
|
|
1060
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
1061
|
+
if (activeStack.length === 0)
|
|
1062
|
+
activeNotes.delete(key);
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
pendingOff.delete(key);
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
break;
|
|
1069
|
+
}
|
|
1070
|
+
case 66: { // Sostenuto Pedal
|
|
1071
|
+
const on = event.value >= 64;
|
|
1072
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1073
|
+
for (const [key] of activeNotes) {
|
|
1074
|
+
if (key % numChannels === ch)
|
|
1075
|
+
sostenutoKeys[ch].add(key);
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
else if (!on) {
|
|
1079
|
+
sostenutoKeys[ch].clear();
|
|
1080
|
+
}
|
|
1081
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1082
|
+
break;
|
|
1083
|
+
}
|
|
1084
|
+
case 121: // Reset All Controllers
|
|
1085
|
+
sustainPedal[ch] = 0;
|
|
1086
|
+
sostenutoPedal[ch] = 0;
|
|
1087
|
+
sostenutoKeys[ch].clear();
|
|
1088
|
+
break;
|
|
1089
|
+
case 120: // All Sound Off
|
|
1090
|
+
case 123: { // All Notes Off
|
|
1091
|
+
for (const [key, stack] of activeNotes) {
|
|
1092
|
+
if (key % numChannels !== ch)
|
|
1093
|
+
continue;
|
|
1094
|
+
for (const entry of stack)
|
|
1095
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1096
|
+
activeNotes.delete(key);
|
|
1097
|
+
}
|
|
1098
|
+
for (const key of pendingOff.keys()) {
|
|
1099
|
+
if (key % numChannels === ch)
|
|
1100
|
+
pendingOff.delete(key);
|
|
1101
|
+
}
|
|
1102
|
+
break;
|
|
1103
|
+
}
|
|
1104
|
+
}
|
|
1105
|
+
break;
|
|
1106
|
+
}
|
|
1107
|
+
case "sysEx":
|
|
1108
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1109
|
+
// GM1 System On / GM2 System On
|
|
1110
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1111
|
+
sustainPedal.fill(0);
|
|
1112
|
+
pendingOff.clear();
|
|
1113
|
+
for (const [, stack] of activeNotes) {
|
|
1114
|
+
for (const entry of stack)
|
|
1115
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1116
|
+
}
|
|
1117
|
+
activeNotes.clear();
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
else {
|
|
1121
|
+
for (const [, entries] of activeNotes) {
|
|
1122
|
+
for (const entry of entries)
|
|
1123
|
+
entry.events.push(event);
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
break;
|
|
1127
|
+
case "pitchBend":
|
|
1128
|
+
case "programChange":
|
|
1129
|
+
case "channelAftertouch":
|
|
1130
|
+
case "noteAftertouch": {
|
|
1131
|
+
const ch = event.channel;
|
|
1132
|
+
for (const [key, entries] of activeNotes) {
|
|
1133
|
+
if (key % numChannels !== ch)
|
|
1134
|
+
continue;
|
|
1135
|
+
for (const entry of entries)
|
|
1136
|
+
entry.events.push(event);
|
|
1137
|
+
}
|
|
1138
|
+
}
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
for (const [, stack] of activeNotes) {
|
|
1142
|
+
for (const entry of stack)
|
|
1143
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1144
|
+
}
|
|
641
1145
|
}
|
|
642
1146
|
cacheVoiceIds() {
|
|
643
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1147
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
644
1148
|
for (let i = 0; i < timeline.length; i++) {
|
|
645
1149
|
const event = timeline[i];
|
|
646
1150
|
switch (event.type) {
|
|
@@ -666,6 +1170,9 @@ class Midy extends EventTarget {
|
|
|
666
1170
|
voiceCounter.delete(audioBufferId);
|
|
667
1171
|
}
|
|
668
1172
|
this.GM2SystemOn();
|
|
1173
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1174
|
+
this.buildNoteOnDurations();
|
|
1175
|
+
}
|
|
669
1176
|
}
|
|
670
1177
|
getVoiceId(channel, noteNumber, velocity) {
|
|
671
1178
|
const programNumber = channel.programNumber;
|
|
@@ -684,7 +1191,8 @@ class Midy extends EventTarget {
|
|
|
684
1191
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
685
1192
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
686
1193
|
const { instrument, sampleID } = voice.generators;
|
|
687
|
-
return soundFontIndex * (2 **
|
|
1194
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1195
|
+
(sampleID << 8);
|
|
688
1196
|
}
|
|
689
1197
|
createChannelAudioNodes(audioContext) {
|
|
690
1198
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -694,40 +1202,11 @@ class Midy extends EventTarget {
|
|
|
694
1202
|
gainL.connect(merger, 0, 0);
|
|
695
1203
|
gainR.connect(merger, 0, 1);
|
|
696
1204
|
merger.connect(this.masterVolume);
|
|
697
|
-
return {
|
|
698
|
-
gainL,
|
|
699
|
-
gainR,
|
|
700
|
-
merger,
|
|
701
|
-
};
|
|
702
|
-
}
|
|
703
|
-
resetChannelTable(channel) {
|
|
704
|
-
channel.controlTable.set(defaultControlValues);
|
|
705
|
-
channel.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
706
|
-
channel.channelPressureTable.set(defaultPressureValues);
|
|
707
|
-
channel.polyphonicKeyPressureTable.set(defaultPressureValues);
|
|
708
|
-
channel.keyBasedTable.fill(-1);
|
|
1205
|
+
return { gainL, gainR, merger };
|
|
709
1206
|
}
|
|
710
1207
|
createChannels(audioContext) {
|
|
711
|
-
const
|
|
712
|
-
|
|
713
|
-
currentBufferSource: null,
|
|
714
|
-
isDrum: false,
|
|
715
|
-
state: new ControllerState(),
|
|
716
|
-
...this.constructor.channelSettings,
|
|
717
|
-
...this.createChannelAudioNodes(audioContext),
|
|
718
|
-
scheduledNotes: [],
|
|
719
|
-
sustainNotes: [],
|
|
720
|
-
sostenutoNotes: [],
|
|
721
|
-
controlTable: new Int8Array(defaultControlValues),
|
|
722
|
-
scaleOctaveTuningTable: new Float32Array(12), // [-100, 100] cent
|
|
723
|
-
channelPressureTable: new Int8Array(defaultPressureValues),
|
|
724
|
-
polyphonicKeyPressureTable: new Int8Array(defaultPressureValues),
|
|
725
|
-
keyBasedTable: new Int8Array(128 * 128).fill(-1),
|
|
726
|
-
keyBasedGainLs: new Array(128),
|
|
727
|
-
keyBasedGainRs: new Array(128),
|
|
728
|
-
};
|
|
729
|
-
});
|
|
730
|
-
return channels;
|
|
1208
|
+
const settings = this.constructor.channelSettings;
|
|
1209
|
+
return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
|
|
731
1210
|
}
|
|
732
1211
|
decodeOggVorbis(sample) {
|
|
733
1212
|
const task = decoderQueue.then(async () => {
|
|
@@ -786,15 +1265,26 @@ class Midy extends EventTarget {
|
|
|
786
1265
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
787
1266
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
788
1267
|
}
|
|
789
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1268
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1269
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1270
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
790
1271
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
791
1272
|
bufferSource.buffer = audioBuffer;
|
|
792
|
-
|
|
1273
|
+
const isDrumLoop = channel.isDrum
|
|
793
1274
|
? this.isLoopDrum(channel, noteNumber)
|
|
794
|
-
:
|
|
1275
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1276
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1277
|
+
bufferSource.loop = isLoop;
|
|
795
1278
|
if (bufferSource.loop) {
|
|
796
|
-
|
|
797
|
-
|
|
1279
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1280
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1281
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1282
|
+
renderedOrRaw.loopDuration;
|
|
1283
|
+
}
|
|
1284
|
+
else {
|
|
1285
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1286
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1287
|
+
}
|
|
798
1288
|
}
|
|
799
1289
|
return bufferSource;
|
|
800
1290
|
}
|
|
@@ -811,15 +1301,14 @@ class Midy extends EventTarget {
|
|
|
811
1301
|
break;
|
|
812
1302
|
const startTime = t + schedulingOffset;
|
|
813
1303
|
switch (event.type) {
|
|
814
|
-
case "noteOn":
|
|
815
|
-
this.
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
1304
|
+
case "noteOn": {
|
|
1305
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1306
|
+
note.timelineIndex = queueIndex;
|
|
1307
|
+
this.setupNote(event.channel, note, startTime);
|
|
819
1308
|
break;
|
|
820
1309
|
}
|
|
821
|
-
case "
|
|
822
|
-
this.
|
|
1310
|
+
case "noteOff":
|
|
1311
|
+
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
823
1312
|
break;
|
|
824
1313
|
case "controller":
|
|
825
1314
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
@@ -827,14 +1316,17 @@ class Midy extends EventTarget {
|
|
|
827
1316
|
case "programChange":
|
|
828
1317
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
829
1318
|
break;
|
|
830
|
-
case "channelAftertouch":
|
|
831
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
832
|
-
break;
|
|
833
1319
|
case "pitchBend":
|
|
834
1320
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
835
1321
|
break;
|
|
836
1322
|
case "sysEx":
|
|
837
1323
|
this.handleSysEx(event.data, startTime);
|
|
1324
|
+
break;
|
|
1325
|
+
case "channelAftertouch":
|
|
1326
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
1327
|
+
break;
|
|
1328
|
+
case "noteAftertouch":
|
|
1329
|
+
this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
|
|
838
1330
|
}
|
|
839
1331
|
queueIndex++;
|
|
840
1332
|
}
|
|
@@ -855,6 +1347,7 @@ class Midy extends EventTarget {
|
|
|
855
1347
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
856
1348
|
this.voiceCache.clear();
|
|
857
1349
|
this.realtimeVoiceCache.clear();
|
|
1350
|
+
this.adsrVoiceCache.clear();
|
|
858
1351
|
const channels = this.channels;
|
|
859
1352
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
860
1353
|
channels[ch].scheduledNotes = [];
|
|
@@ -881,14 +1374,104 @@ class Midy extends EventTarget {
|
|
|
881
1374
|
break;
|
|
882
1375
|
case "sysEx":
|
|
883
1376
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1377
|
+
break;
|
|
1378
|
+
case "channelAftertouch":
|
|
1379
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1380
|
+
break;
|
|
1381
|
+
case "noteAftertouch":
|
|
1382
|
+
this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
884
1383
|
}
|
|
885
1384
|
}
|
|
886
1385
|
}
|
|
1386
|
+
async playAudioBuffer() {
|
|
1387
|
+
const audioContext = this.audioContext;
|
|
1388
|
+
const paused = this.isPaused;
|
|
1389
|
+
this.isPlaying = true;
|
|
1390
|
+
this.isPaused = false;
|
|
1391
|
+
this.startTime = audioContext.currentTime;
|
|
1392
|
+
if (paused) {
|
|
1393
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1394
|
+
}
|
|
1395
|
+
else {
|
|
1396
|
+
this.dispatchEvent(new Event("started"));
|
|
1397
|
+
}
|
|
1398
|
+
let exitReason;
|
|
1399
|
+
outer: while (true) {
|
|
1400
|
+
const buffer = this.renderedAudioBuffer;
|
|
1401
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1402
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1403
|
+
bufferSource.connect(this.masterVolume);
|
|
1404
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1405
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1406
|
+
this.audioModeBufferSource = bufferSource;
|
|
1407
|
+
let naturalEnded = false;
|
|
1408
|
+
bufferSource.onended = () => {
|
|
1409
|
+
naturalEnded = true;
|
|
1410
|
+
};
|
|
1411
|
+
while (true) {
|
|
1412
|
+
const now = audioContext.currentTime;
|
|
1413
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1414
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1415
|
+
bufferSource.disconnect();
|
|
1416
|
+
this.audioModeBufferSource = null;
|
|
1417
|
+
if (this.loop) {
|
|
1418
|
+
this.resumeTime = 0;
|
|
1419
|
+
this.startTime = audioContext.currentTime;
|
|
1420
|
+
this.dispatchEvent(new Event("looped"));
|
|
1421
|
+
continue outer;
|
|
1422
|
+
}
|
|
1423
|
+
await audioContext.suspend();
|
|
1424
|
+
exitReason = "ended";
|
|
1425
|
+
break outer;
|
|
1426
|
+
}
|
|
1427
|
+
if (this.isPausing) {
|
|
1428
|
+
this.resumeTime = this.currentTime();
|
|
1429
|
+
bufferSource.stop();
|
|
1430
|
+
bufferSource.disconnect();
|
|
1431
|
+
this.audioModeBufferSource = null;
|
|
1432
|
+
await audioContext.suspend();
|
|
1433
|
+
this.isPausing = false;
|
|
1434
|
+
exitReason = "paused";
|
|
1435
|
+
break outer;
|
|
1436
|
+
}
|
|
1437
|
+
else if (this.isStopping) {
|
|
1438
|
+
bufferSource.stop();
|
|
1439
|
+
bufferSource.disconnect();
|
|
1440
|
+
this.audioModeBufferSource = null;
|
|
1441
|
+
await audioContext.suspend();
|
|
1442
|
+
this.isStopping = false;
|
|
1443
|
+
exitReason = "stopped";
|
|
1444
|
+
break outer;
|
|
1445
|
+
}
|
|
1446
|
+
else if (this.isSeeking) {
|
|
1447
|
+
bufferSource.stop();
|
|
1448
|
+
bufferSource.disconnect();
|
|
1449
|
+
this.audioModeBufferSource = null;
|
|
1450
|
+
this.startTime = audioContext.currentTime;
|
|
1451
|
+
this.isSeeking = false;
|
|
1452
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1453
|
+
continue outer;
|
|
1454
|
+
}
|
|
1455
|
+
}
|
|
1456
|
+
}
|
|
1457
|
+
this.isPlaying = false;
|
|
1458
|
+
if (exitReason === "paused") {
|
|
1459
|
+
this.isPaused = true;
|
|
1460
|
+
this.dispatchEvent(new Event("paused"));
|
|
1461
|
+
}
|
|
1462
|
+
else if (exitReason !== undefined) {
|
|
1463
|
+
this.isPaused = false;
|
|
1464
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
887
1467
|
async playNotes() {
|
|
888
1468
|
const audioContext = this.audioContext;
|
|
889
1469
|
if (audioContext.state === "suspended") {
|
|
890
1470
|
await audioContext.resume();
|
|
891
1471
|
}
|
|
1472
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1473
|
+
return await this.playAudioBuffer();
|
|
1474
|
+
}
|
|
892
1475
|
const paused = this.isPaused;
|
|
893
1476
|
this.isPlaying = true;
|
|
894
1477
|
this.isPaused = false;
|
|
@@ -1028,12 +1611,12 @@ class Midy extends EventTarget {
|
|
|
1028
1611
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1029
1612
|
switch (data[3]) {
|
|
1030
1613
|
case 1:
|
|
1031
|
-
this.GM1SystemOn(
|
|
1614
|
+
this.GM1SystemOn();
|
|
1032
1615
|
break;
|
|
1033
1616
|
case 2: // GM System Off
|
|
1034
1617
|
break;
|
|
1035
1618
|
case 3:
|
|
1036
|
-
this.GM2SystemOn(
|
|
1619
|
+
this.GM2SystemOn();
|
|
1037
1620
|
break;
|
|
1038
1621
|
default:
|
|
1039
1622
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1100,6 +1683,193 @@ class Midy extends EventTarget {
|
|
|
1100
1683
|
this.notePromises = [];
|
|
1101
1684
|
return stopPromise;
|
|
1102
1685
|
}
|
|
1686
|
+
async render() {
|
|
1687
|
+
if (this.isRendering)
|
|
1688
|
+
return;
|
|
1689
|
+
if (this.timeline.length === 0)
|
|
1690
|
+
return;
|
|
1691
|
+
if (this.voiceCounter.size === 0)
|
|
1692
|
+
this.cacheVoiceIds();
|
|
1693
|
+
this.isRendering = true;
|
|
1694
|
+
this.renderedAudioBuffer = null;
|
|
1695
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1696
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1697
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1698
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1699
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1700
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1701
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1702
|
+
const renderNoteAftertouch = new Uint8Array(this.numChannels * 128);
|
|
1703
|
+
renderBankMSB.fill(121);
|
|
1704
|
+
renderIsDrum[9] = 1;
|
|
1705
|
+
renderBankMSB[9] = 120;
|
|
1706
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1707
|
+
const state = new Float32Array(256);
|
|
1708
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1709
|
+
state[type] = defaultValue;
|
|
1710
|
+
}
|
|
1711
|
+
return state;
|
|
1712
|
+
});
|
|
1713
|
+
const tasks = [];
|
|
1714
|
+
const timeline = this.timeline;
|
|
1715
|
+
const inverseTempo = 1 / this.tempo;
|
|
1716
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1717
|
+
const event = timeline[i];
|
|
1718
|
+
const ch = event.channel;
|
|
1719
|
+
switch (event.type) {
|
|
1720
|
+
case "noteOn": {
|
|
1721
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1722
|
+
const noteDuration = noteEvent?.duration ??
|
|
1723
|
+
this.noteOnDurations.get(i) ??
|
|
1724
|
+
0;
|
|
1725
|
+
if (noteDuration <= 0)
|
|
1726
|
+
continue;
|
|
1727
|
+
const { noteNumber, velocity } = event;
|
|
1728
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1729
|
+
const programNumber = renderProgramNumber[ch];
|
|
1730
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1731
|
+
if (!bankTable)
|
|
1732
|
+
continue;
|
|
1733
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1734
|
+
if (bankTable[bank] === undefined) {
|
|
1735
|
+
if (isDrum)
|
|
1736
|
+
continue;
|
|
1737
|
+
bank = 0;
|
|
1738
|
+
}
|
|
1739
|
+
const soundFontIndex = bankTable[bank];
|
|
1740
|
+
if (soundFontIndex === undefined)
|
|
1741
|
+
continue;
|
|
1742
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1743
|
+
const pressure = renderNoteAftertouch[ch * 128 + noteNumber];
|
|
1744
|
+
const fakeChannel = {
|
|
1745
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1746
|
+
programNumber,
|
|
1747
|
+
isDrum,
|
|
1748
|
+
modulationDepthRange: 50,
|
|
1749
|
+
detune: 0,
|
|
1750
|
+
};
|
|
1751
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity, pressure);
|
|
1752
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1753
|
+
if (!voice)
|
|
1754
|
+
continue;
|
|
1755
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1756
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1757
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1758
|
+
const promise = (async () => {
|
|
1759
|
+
try {
|
|
1760
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1761
|
+
}
|
|
1762
|
+
catch (err) {
|
|
1763
|
+
console.warn("render: note render failed", err);
|
|
1764
|
+
return null;
|
|
1765
|
+
}
|
|
1766
|
+
})();
|
|
1767
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1768
|
+
break;
|
|
1769
|
+
}
|
|
1770
|
+
case "controller": {
|
|
1771
|
+
const { controllerType, value } = event;
|
|
1772
|
+
switch (controllerType) {
|
|
1773
|
+
case 0: // bankMSB
|
|
1774
|
+
renderBankMSB[ch] = value;
|
|
1775
|
+
if (this.mode === "GM2") {
|
|
1776
|
+
if (value === 120) {
|
|
1777
|
+
renderIsDrum[ch] = 1;
|
|
1778
|
+
}
|
|
1779
|
+
else if (value === 121) {
|
|
1780
|
+
renderIsDrum[ch] = 0;
|
|
1781
|
+
}
|
|
1782
|
+
}
|
|
1783
|
+
break;
|
|
1784
|
+
case 32: // bankLSB
|
|
1785
|
+
renderBankLSB[ch] = value;
|
|
1786
|
+
break;
|
|
1787
|
+
default: {
|
|
1788
|
+
const stateIndex = 128 + controllerType;
|
|
1789
|
+
if (stateIndex < 256) {
|
|
1790
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1791
|
+
}
|
|
1792
|
+
break;
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1795
|
+
break;
|
|
1796
|
+
}
|
|
1797
|
+
case "pitchBend":
|
|
1798
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1799
|
+
break;
|
|
1800
|
+
case "programChange":
|
|
1801
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1802
|
+
if (this.mode === "GM2") {
|
|
1803
|
+
if (renderBankMSB[ch] === 120) {
|
|
1804
|
+
renderIsDrum[ch] = 1;
|
|
1805
|
+
}
|
|
1806
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1807
|
+
renderIsDrum[ch] = 0;
|
|
1808
|
+
}
|
|
1809
|
+
}
|
|
1810
|
+
break;
|
|
1811
|
+
case "sysEx": {
|
|
1812
|
+
const data = event.data;
|
|
1813
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1814
|
+
if (data[3] === 1) { // GM1 System On
|
|
1815
|
+
renderBankMSB.fill(0);
|
|
1816
|
+
renderBankLSB.fill(0);
|
|
1817
|
+
renderProgramNumber.fill(0);
|
|
1818
|
+
renderIsDrum.fill(0);
|
|
1819
|
+
renderIsDrum[9] = 1;
|
|
1820
|
+
renderBankMSB[9] = 1;
|
|
1821
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1822
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1823
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1824
|
+
}
|
|
1825
|
+
}
|
|
1826
|
+
renderNoteAftertouch.fill(0);
|
|
1827
|
+
}
|
|
1828
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1829
|
+
renderBankMSB.fill(121);
|
|
1830
|
+
renderBankLSB.fill(0);
|
|
1831
|
+
renderProgramNumber.fill(0);
|
|
1832
|
+
renderIsDrum.fill(0);
|
|
1833
|
+
renderIsDrum[9] = 1;
|
|
1834
|
+
renderBankMSB[9] = 120;
|
|
1835
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1836
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1837
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
renderNoteAftertouch.fill(0);
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
break;
|
|
1844
|
+
}
|
|
1845
|
+
case "channelAftertouch":
|
|
1846
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1847
|
+
break;
|
|
1848
|
+
case "noteAftertouch":
|
|
1849
|
+
renderNoteAftertouch[ch * 128 + event.noteNumber] = event.amount;
|
|
1850
|
+
break;
|
|
1851
|
+
}
|
|
1852
|
+
}
|
|
1853
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1854
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1855
|
+
const { t, promise } = tasks[i];
|
|
1856
|
+
const noteBuffer = await promise;
|
|
1857
|
+
if (!noteBuffer)
|
|
1858
|
+
continue;
|
|
1859
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1860
|
+
? noteBuffer.buffer
|
|
1861
|
+
: noteBuffer;
|
|
1862
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1863
|
+
buffer: audioBuffer,
|
|
1864
|
+
});
|
|
1865
|
+
bufferSource.connect(offlineContext.destination);
|
|
1866
|
+
bufferSource.start(t);
|
|
1867
|
+
}
|
|
1868
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1869
|
+
this.isRendering = false;
|
|
1870
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1871
|
+
return this.renderedAudioBuffer;
|
|
1872
|
+
}
|
|
1103
1873
|
async start() {
|
|
1104
1874
|
if (this.isPlaying || this.isPaused)
|
|
1105
1875
|
return;
|
|
@@ -1136,11 +1906,22 @@ class Midy extends EventTarget {
|
|
|
1136
1906
|
}
|
|
1137
1907
|
}
|
|
1138
1908
|
tempoChange(tempo) {
|
|
1909
|
+
const cacheMode = this.cacheMode;
|
|
1139
1910
|
const timeScale = this.tempo / tempo;
|
|
1140
1911
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1141
1912
|
this.tempo = tempo;
|
|
1142
1913
|
this.totalTime = this.calcTotalTime();
|
|
1143
1914
|
this.seekTo(this.currentTime() * timeScale);
|
|
1915
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1916
|
+
this.buildNoteOnDurations();
|
|
1917
|
+
this.fullVoiceCache.clear();
|
|
1918
|
+
this.adsrVoiceCache.clear();
|
|
1919
|
+
}
|
|
1920
|
+
if (cacheMode === "audio") {
|
|
1921
|
+
if (this.audioModeBufferSource) {
|
|
1922
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1923
|
+
}
|
|
1924
|
+
}
|
|
1144
1925
|
}
|
|
1145
1926
|
calcTotalTime() {
|
|
1146
1927
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1161,6 +1942,9 @@ class Midy extends EventTarget {
|
|
|
1161
1942
|
if (!this.isPlaying)
|
|
1162
1943
|
return this.resumeTime;
|
|
1163
1944
|
const now = this.audioContext.currentTime;
|
|
1945
|
+
if (this.cacheMode === "audio") {
|
|
1946
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1947
|
+
}
|
|
1164
1948
|
return now + this.resumeTime - this.startTime;
|
|
1165
1949
|
}
|
|
1166
1950
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1374,6 +2158,8 @@ class Midy extends EventTarget {
|
|
|
1374
2158
|
}
|
|
1375
2159
|
updateChannelDetune(channel, scheduleTime) {
|
|
1376
2160
|
this.processScheduledNotes(channel, (note) => {
|
|
2161
|
+
if (note.renderedBuffer?.isFull)
|
|
2162
|
+
return;
|
|
1377
2163
|
if (this.isPortamento(channel, note)) {
|
|
1378
2164
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1379
2165
|
}
|
|
@@ -1465,6 +2251,8 @@ class Midy extends EventTarget {
|
|
|
1465
2251
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1466
2252
|
}
|
|
1467
2253
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2254
|
+
if (!note.volumeEnvelopeNode)
|
|
2255
|
+
return;
|
|
1468
2256
|
const { voiceParams, startTime, noteNumber } = note;
|
|
1469
2257
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1470
2258
|
(1 + this.getChannelAmplitudeControl(channel));
|
|
@@ -1510,9 +2298,6 @@ class Midy extends EventTarget {
|
|
|
1510
2298
|
}
|
|
1511
2299
|
setDetune(channel, note, scheduleTime) {
|
|
1512
2300
|
const detune = this.calcNoteDetune(channel, note);
|
|
1513
|
-
note.bufferSource.detune
|
|
1514
|
-
.cancelScheduledValues(scheduleTime)
|
|
1515
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1516
2301
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1517
2302
|
note.bufferSource.detune
|
|
1518
2303
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1575,6 +2360,8 @@ class Midy extends EventTarget {
|
|
|
1575
2360
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1576
2361
|
}
|
|
1577
2362
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2363
|
+
if (!note.filterEnvelopeNode)
|
|
2364
|
+
return;
|
|
1578
2365
|
const { voiceParams, startTime, noteNumber } = note;
|
|
1579
2366
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1580
2367
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1620,11 +2407,14 @@ class Midy extends EventTarget {
|
|
|
1620
2407
|
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1621
2408
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1622
2409
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1623
|
-
|
|
2410
|
+
if (note.filterEnvelopeNode) {
|
|
2411
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2412
|
+
}
|
|
1624
2413
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1625
2414
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1626
2415
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1627
|
-
note.
|
|
2416
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2417
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1628
2418
|
}
|
|
1629
2419
|
startVibrato(channel, note, scheduleTime) {
|
|
1630
2420
|
const { voiceParams, noteNumber } = note;
|
|
@@ -1640,34 +2430,346 @@ class Midy extends EventTarget {
|
|
|
1640
2430
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1641
2431
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1642
2432
|
}
|
|
1643
|
-
async
|
|
2433
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2434
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2435
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2436
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2437
|
+
const decayDuration = voiceParams.volDecay;
|
|
2438
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2439
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2440
|
+
const loopDuration = isLoop
|
|
2441
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2442
|
+
: 0;
|
|
2443
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
2444
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
2445
|
+
: 0;
|
|
2446
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
2447
|
+
const renderDuration = isLoop
|
|
2448
|
+
? alignedLoopStart + loopDuration
|
|
2449
|
+
: audioBuffer.duration;
|
|
2450
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
2451
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2452
|
+
bufferSource.buffer = audioBuffer;
|
|
2453
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2454
|
+
bufferSource.loop = isLoop;
|
|
2455
|
+
if (isLoop) {
|
|
2456
|
+
bufferSource.loopStart = loopStartTime;
|
|
2457
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2458
|
+
}
|
|
2459
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2460
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2461
|
+
type: "lowpass",
|
|
2462
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2463
|
+
frequency: initialFreq,
|
|
2464
|
+
});
|
|
2465
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2466
|
+
const offlineNote = {
|
|
2467
|
+
...note,
|
|
2468
|
+
startTime: 0,
|
|
2469
|
+
bufferSource,
|
|
2470
|
+
filterEnvelopeNode,
|
|
2471
|
+
volumeEnvelopeNode,
|
|
2472
|
+
};
|
|
2473
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2474
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2475
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2476
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2477
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2478
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2479
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2480
|
+
}
|
|
2481
|
+
else {
|
|
2482
|
+
bufferSource.start(0);
|
|
2483
|
+
}
|
|
2484
|
+
const buffer = await offlineContext.startRendering();
|
|
2485
|
+
return new RenderedBuffer(buffer, {
|
|
2486
|
+
isLoop,
|
|
2487
|
+
adsDuration,
|
|
2488
|
+
loopStart: alignedLoopStart,
|
|
2489
|
+
loopDuration,
|
|
2490
|
+
});
|
|
2491
|
+
}
|
|
2492
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2493
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2494
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2495
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2496
|
+
const decayDuration = voiceParams.volDecay;
|
|
2497
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2498
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2499
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2500
|
+
const loopDuration = isLoop
|
|
2501
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2502
|
+
: 0;
|
|
2503
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2504
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2505
|
+
: 0;
|
|
2506
|
+
const alignedNoteEnd = isLoop
|
|
2507
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2508
|
+
: noteDuration;
|
|
2509
|
+
const noteOffTime = alignedNoteEnd;
|
|
2510
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2511
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2512
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2513
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2514
|
+
bufferSource.buffer = audioBuffer;
|
|
2515
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2516
|
+
bufferSource.loop = isLoop;
|
|
2517
|
+
if (isLoop) {
|
|
2518
|
+
bufferSource.loopStart = loopStartTime;
|
|
2519
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2520
|
+
}
|
|
2521
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2522
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2523
|
+
type: "lowpass",
|
|
2524
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2525
|
+
frequency: initialFreq,
|
|
2526
|
+
});
|
|
2527
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2528
|
+
const offlineNote = {
|
|
2529
|
+
...note,
|
|
2530
|
+
startTime: 0,
|
|
2531
|
+
bufferSource,
|
|
2532
|
+
filterEnvelopeNode,
|
|
2533
|
+
volumeEnvelopeNode,
|
|
2534
|
+
};
|
|
2535
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2536
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2537
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2538
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2539
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2540
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2541
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2542
|
+
let gainAtNoteOff;
|
|
2543
|
+
if (noteOffTime <= volDelayTime) {
|
|
2544
|
+
gainAtNoteOff = 0;
|
|
2545
|
+
}
|
|
2546
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2547
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2548
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2549
|
+
}
|
|
2550
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2551
|
+
gainAtNoteOff = attackVolume;
|
|
2552
|
+
}
|
|
2553
|
+
else {
|
|
2554
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2555
|
+
gainAtNoteOff = sustainVolume +
|
|
2556
|
+
(attackVolume - sustainVolume) *
|
|
2557
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2558
|
+
}
|
|
2559
|
+
volumeEnvelopeNode.gain
|
|
2560
|
+
.cancelScheduledValues(noteOffTime)
|
|
2561
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2562
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2563
|
+
filterEnvelopeNode.frequency
|
|
2564
|
+
.cancelScheduledValues(noteOffTime)
|
|
2565
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2566
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2567
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2568
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2569
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2570
|
+
if (isLoop) {
|
|
2571
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2572
|
+
}
|
|
2573
|
+
else {
|
|
2574
|
+
bufferSource.start(0);
|
|
2575
|
+
}
|
|
2576
|
+
const buffer = await offlineContext.startRendering();
|
|
2577
|
+
return new RenderedBuffer(buffer, {
|
|
2578
|
+
isLoop: false,
|
|
2579
|
+
isFull: false,
|
|
2580
|
+
adsDuration,
|
|
2581
|
+
noteDuration: noteOffTime,
|
|
2582
|
+
releaseDuration,
|
|
2583
|
+
});
|
|
2584
|
+
}
|
|
2585
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2586
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2587
|
+
const ch = note.channel ?? 0;
|
|
2588
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2589
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2590
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2591
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2592
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2593
|
+
cacheMode: "none",
|
|
2594
|
+
});
|
|
2595
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2596
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2597
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2598
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2599
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2600
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2601
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2602
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2603
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2604
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2605
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2606
|
+
for (const event of noteEvents) {
|
|
2607
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2608
|
+
if (t < 0 || t > noteDuration)
|
|
2609
|
+
continue;
|
|
2610
|
+
switch (event.type) {
|
|
2611
|
+
case "controller":
|
|
2612
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2613
|
+
break;
|
|
2614
|
+
case "pitchBend":
|
|
2615
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2616
|
+
break;
|
|
2617
|
+
case "sysEx":
|
|
2618
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2619
|
+
break;
|
|
2620
|
+
case "channelAftertouch":
|
|
2621
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2622
|
+
break;
|
|
2623
|
+
case "noteAftertouch":
|
|
2624
|
+
offlinePlayer.setPolyphonicKeyPressure(ch, event.noteNumber, event.amount, t);
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2628
|
+
const buffer = await offlineContext.startRendering();
|
|
2629
|
+
return new RenderedBuffer(buffer, {
|
|
2630
|
+
isLoop: false,
|
|
2631
|
+
isFull: true,
|
|
2632
|
+
noteDuration: noteDuration,
|
|
2633
|
+
releaseDuration: releaseEndDuration,
|
|
2634
|
+
});
|
|
2635
|
+
}
|
|
2636
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2637
|
+
const cacheMode = this.cacheMode;
|
|
2638
|
+
const { noteNumber, velocity } = note;
|
|
1644
2639
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2640
|
+
if (!realtime) {
|
|
2641
|
+
if (cacheMode === "note") {
|
|
2642
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
2643
|
+
}
|
|
2644
|
+
else if (cacheMode === "adsr") {
|
|
2645
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2646
|
+
}
|
|
2647
|
+
}
|
|
2648
|
+
if (cacheMode === "none") {
|
|
2649
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2650
|
+
}
|
|
2651
|
+
// fallback to ADS cache:
|
|
2652
|
+
// - "ads" (realtime or not)
|
|
2653
|
+
// - "adsr" + realtime
|
|
2654
|
+
// - "note" + realtime
|
|
2655
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2656
|
+
}
|
|
2657
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2658
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2659
|
+
const voiceParams = note.voiceParams;
|
|
1645
2660
|
if (realtime) {
|
|
1646
|
-
const
|
|
1647
|
-
if (
|
|
1648
|
-
return
|
|
1649
|
-
const
|
|
1650
|
-
this.
|
|
1651
|
-
|
|
2661
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2662
|
+
if (cached)
|
|
2663
|
+
return cached;
|
|
2664
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2665
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2666
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2667
|
+
return rendered;
|
|
1652
2668
|
}
|
|
1653
2669
|
else {
|
|
1654
|
-
const cache = this.voiceCache.get(
|
|
2670
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1655
2671
|
if (cache) {
|
|
1656
2672
|
cache.counter += 1;
|
|
1657
2673
|
if (cache.maxCount <= cache.counter) {
|
|
1658
|
-
this.voiceCache.delete(
|
|
2674
|
+
this.voiceCache.delete(cacheKey);
|
|
1659
2675
|
}
|
|
1660
2676
|
return cache.audioBuffer;
|
|
1661
2677
|
}
|
|
1662
2678
|
else {
|
|
1663
|
-
const maxCount = this.voiceCounter.get(
|
|
1664
|
-
const
|
|
1665
|
-
const
|
|
1666
|
-
|
|
1667
|
-
|
|
2679
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2680
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2681
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2682
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2683
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2684
|
+
return rendered;
|
|
1668
2685
|
}
|
|
1669
2686
|
}
|
|
1670
2687
|
}
|
|
2688
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2689
|
+
const voiceParams = note.voiceParams;
|
|
2690
|
+
const timelineIndex = note.timelineIndex;
|
|
2691
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2692
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2693
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2694
|
+
? 0xffffffffn
|
|
2695
|
+
: BigInt(noteDurationTicks);
|
|
2696
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2697
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2698
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2699
|
+
(playbackRateBits << 96n) |
|
|
2700
|
+
(safeTicks << 64n) |
|
|
2701
|
+
volReleaseBits;
|
|
2702
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2703
|
+
if (!durationMap) {
|
|
2704
|
+
durationMap = new Map();
|
|
2705
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2706
|
+
}
|
|
2707
|
+
const cached = durationMap.get(cacheKey);
|
|
2708
|
+
if (cached instanceof RenderedBuffer) {
|
|
2709
|
+
return cached;
|
|
2710
|
+
}
|
|
2711
|
+
if (cached instanceof Promise) {
|
|
2712
|
+
const buf = await cached;
|
|
2713
|
+
if (buf == null)
|
|
2714
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2715
|
+
return buf;
|
|
2716
|
+
}
|
|
2717
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2718
|
+
const renderPromise = (async () => {
|
|
2719
|
+
try {
|
|
2720
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2721
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2722
|
+
durationMap.set(cacheKey, rendered);
|
|
2723
|
+
return rendered;
|
|
2724
|
+
}
|
|
2725
|
+
catch (err) {
|
|
2726
|
+
durationMap.delete(cacheKey);
|
|
2727
|
+
throw err;
|
|
2728
|
+
}
|
|
2729
|
+
})();
|
|
2730
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2731
|
+
return await renderPromise;
|
|
2732
|
+
}
|
|
2733
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
2734
|
+
const voiceParams = note.voiceParams;
|
|
2735
|
+
const timelineIndex = note.timelineIndex;
|
|
2736
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2737
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2738
|
+
const cacheKey = timelineIndex;
|
|
2739
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2740
|
+
if (!durationMap) {
|
|
2741
|
+
durationMap = new Map();
|
|
2742
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2743
|
+
}
|
|
2744
|
+
const cached = durationMap.get(cacheKey);
|
|
2745
|
+
if (cached instanceof RenderedBuffer) {
|
|
2746
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2747
|
+
return cached;
|
|
2748
|
+
}
|
|
2749
|
+
if (cached instanceof Promise) {
|
|
2750
|
+
const buf = await cached;
|
|
2751
|
+
if (buf == null)
|
|
2752
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2753
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2754
|
+
return buf;
|
|
2755
|
+
}
|
|
2756
|
+
const renderPromise = (async () => {
|
|
2757
|
+
try {
|
|
2758
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2759
|
+
const rendered = await this.createFullRenderedBuffer(note, voiceParams, rawBuffer, noteDuration, noteEvent);
|
|
2760
|
+
durationMap.set(cacheKey, rendered);
|
|
2761
|
+
return rendered;
|
|
2762
|
+
}
|
|
2763
|
+
catch (err) {
|
|
2764
|
+
durationMap.delete(cacheKey);
|
|
2765
|
+
throw err;
|
|
2766
|
+
}
|
|
2767
|
+
})();
|
|
2768
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2769
|
+
const rendered = await renderPromise;
|
|
2770
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2771
|
+
return rendered;
|
|
2772
|
+
}
|
|
1671
2773
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1672
2774
|
const audioContext = this.audioContext;
|
|
1673
2775
|
const now = audioContext.currentTime;
|
|
@@ -1676,50 +2778,72 @@ class Midy extends EventTarget {
|
|
|
1676
2778
|
const controllerState = this.getControllerState(channel, noteNumber, velocity, 0);
|
|
1677
2779
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1678
2780
|
note.voiceParams = voiceParams;
|
|
1679
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2781
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2782
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2783
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1680
2784
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1681
|
-
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
1682
2785
|
note.volumeNode = new GainNode(audioContext);
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
this.
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
2786
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2787
|
+
const cacheMode = this.cacheMode;
|
|
2788
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2789
|
+
if (cacheMode === "none") {
|
|
2790
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2791
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2792
|
+
type: "lowpass",
|
|
2793
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2794
|
+
});
|
|
2795
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2796
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2797
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2798
|
+
}
|
|
2799
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2800
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2801
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2802
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2803
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2804
|
+
}
|
|
2805
|
+
else {
|
|
2806
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2807
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2808
|
+
this.setPitchEnvelope(note, now);
|
|
2809
|
+
this.setDetune(channel, note, now);
|
|
2810
|
+
}
|
|
2811
|
+
if (0 < state.vibratoDepth) {
|
|
2812
|
+
this.startVibrato(channel, note, now);
|
|
2813
|
+
}
|
|
2814
|
+
if (0 < state.modulationDepthMSB) {
|
|
2815
|
+
this.startModulation(channel, note, now);
|
|
2816
|
+
}
|
|
2817
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2818
|
+
channel.currentBufferSource.stop(startTime);
|
|
2819
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2820
|
+
}
|
|
2821
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2822
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2823
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2824
|
+
this.setChorusSend(channel, note, now);
|
|
2825
|
+
this.setReverbSend(channel, note, now);
|
|
2826
|
+
}
|
|
2827
|
+
else if (isFullCached) { // "note" mode
|
|
2828
|
+
note.volumeEnvelopeNode = null;
|
|
2829
|
+
note.filterEnvelopeNode = null;
|
|
2830
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2831
|
+
this.setChorusSend(channel, note, now);
|
|
2832
|
+
this.setReverbSend(channel, note, now);
|
|
2833
|
+
}
|
|
2834
|
+
else { // "ads" / "asdr" mode
|
|
2835
|
+
note.volumeEnvelopeNode = null;
|
|
2836
|
+
note.filterEnvelopeNode = null;
|
|
1703
2837
|
this.setDetune(channel, note, now);
|
|
2838
|
+
if (0 < state.modulationDepthMSB) {
|
|
2839
|
+
this.startModulation(channel, note, now);
|
|
2840
|
+
}
|
|
2841
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2842
|
+
this.setChorusSend(channel, note, now);
|
|
2843
|
+
this.setReverbSend(channel, note, now);
|
|
1704
2844
|
}
|
|
1705
|
-
if (0 < state.vibratoDepth) {
|
|
1706
|
-
this.startVibrato(channel, note, now);
|
|
1707
|
-
}
|
|
1708
|
-
if (0 < state.modulationDepthMSB + state.modulationDepthLSB) {
|
|
1709
|
-
this.startModulation(channel, note, now);
|
|
1710
|
-
}
|
|
1711
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1712
|
-
channel.currentBufferSource.stop(startTime);
|
|
1713
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1714
|
-
}
|
|
1715
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1716
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1717
|
-
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
1718
|
-
this.setChorusSend(channel, note, now);
|
|
1719
|
-
this.setReverbSend(channel, note, now);
|
|
1720
2845
|
if (voiceParams.sample.type === "compressed") {
|
|
1721
|
-
|
|
1722
|
-
note.bufferSource.start(startTime, offset);
|
|
2846
|
+
note.bufferSource.start(startTime);
|
|
1723
2847
|
}
|
|
1724
2848
|
else {
|
|
1725
2849
|
note.bufferSource.start(startTime);
|
|
@@ -1761,51 +2885,58 @@ class Midy extends EventTarget {
|
|
|
1761
2885
|
}
|
|
1762
2886
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1763
2887
|
const channel = this.channels[channelNumber];
|
|
1764
|
-
const {
|
|
1765
|
-
if (
|
|
1766
|
-
|
|
1767
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1768
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1769
|
-
if (!gainL) {
|
|
1770
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1771
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1772
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1773
|
-
}
|
|
1774
|
-
volumeNode.connect(gainL);
|
|
1775
|
-
volumeNode.connect(gainR);
|
|
2888
|
+
const { volumeNode } = note;
|
|
2889
|
+
if (note.renderedBuffer?.isFull) {
|
|
2890
|
+
volumeNode.connect(this.masterVolume);
|
|
1776
2891
|
}
|
|
1777
2892
|
else {
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
2893
|
+
if (channel.isDrum) {
|
|
2894
|
+
const noteNumber = note.noteNumber;
|
|
2895
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2896
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2897
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2898
|
+
if (!gainL) {
|
|
2899
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2900
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2901
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2902
|
+
}
|
|
2903
|
+
volumeNode.connect(gainL);
|
|
2904
|
+
volumeNode.connect(gainR);
|
|
2905
|
+
}
|
|
2906
|
+
else {
|
|
2907
|
+
volumeNode.connect(channel.gainL);
|
|
2908
|
+
volumeNode.connect(channel.gainR);
|
|
2909
|
+
}
|
|
1783
2910
|
}
|
|
1784
2911
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1785
2912
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1786
2913
|
}
|
|
1787
2914
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1788
2915
|
if (this.mpeEnabled) {
|
|
1789
|
-
const
|
|
2916
|
+
const channel = this.channels[channelNumber];
|
|
2917
|
+
const noteIndex = channel.scheduledNotes.length;
|
|
1790
2918
|
if (!this.mpeState.channelToNotes.has(channelNumber)) {
|
|
1791
2919
|
this.mpeState.channelToNotes.set(channelNumber, new Set());
|
|
1792
2920
|
}
|
|
1793
|
-
this.mpeState.channelToNotes.get(channelNumber).add(
|
|
1794
|
-
this.mpeState.noteToChannel.set(
|
|
1795
|
-
}
|
|
1796
|
-
else {
|
|
1797
|
-
await this.startNote(channelNumber, noteNumber, velocity, startTime);
|
|
2921
|
+
this.mpeState.channelToNotes.get(channelNumber).add(noteIndex);
|
|
2922
|
+
this.mpeState.noteToChannel.set(noteIndex, channelNumber);
|
|
1798
2923
|
}
|
|
2924
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2925
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
1799
2926
|
}
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
const realtime = startTime === undefined;
|
|
1803
|
-
if (realtime)
|
|
2927
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2928
|
+
if (!(0 <= startTime))
|
|
1804
2929
|
startTime = this.audioContext.currentTime;
|
|
1805
2930
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
scheduledNotes.
|
|
2931
|
+
note.channel = channelNumber;
|
|
2932
|
+
const channel = this.channels[channelNumber];
|
|
2933
|
+
note.index = channel.scheduledNotes.length;
|
|
2934
|
+
channel.scheduledNotes.push(note);
|
|
2935
|
+
return note;
|
|
2936
|
+
}
|
|
2937
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2938
|
+
const realtime = startTime === undefined;
|
|
2939
|
+
const channel = this.channels[channelNumber];
|
|
1809
2940
|
const programNumber = channel.programNumber;
|
|
1810
2941
|
const bankTable = this.soundFontTable[programNumber];
|
|
1811
2942
|
if (!bankTable)
|
|
@@ -1820,18 +2951,24 @@ class Midy extends EventTarget {
|
|
|
1820
2951
|
if (soundFontIndex === undefined)
|
|
1821
2952
|
return;
|
|
1822
2953
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1823
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2954
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1824
2955
|
if (!note.voice)
|
|
1825
2956
|
return;
|
|
1826
2957
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1827
2958
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1828
2959
|
note.resolveReady();
|
|
2960
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2961
|
+
channel.sustainNotes.push(note);
|
|
2962
|
+
}
|
|
2963
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2964
|
+
channel.sostenutoNotes.push(note);
|
|
2965
|
+
}
|
|
1829
2966
|
return note;
|
|
1830
2967
|
}
|
|
1831
2968
|
disconnectNote(note) {
|
|
1832
2969
|
note.bufferSource.disconnect();
|
|
1833
|
-
note.filterEnvelopeNode
|
|
1834
|
-
note.volumeEnvelopeNode
|
|
2970
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2971
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
1835
2972
|
note.volumeNode.disconnect();
|
|
1836
2973
|
if (note.modLfoToPitch) {
|
|
1837
2974
|
note.modLfoToVolume.disconnect();
|
|
@@ -1849,17 +2986,114 @@ class Midy extends EventTarget {
|
|
|
1849
2986
|
note.chorusSend.disconnect();
|
|
1850
2987
|
}
|
|
1851
2988
|
}
|
|
2989
|
+
releaseFullCache(note) {
|
|
2990
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2991
|
+
return;
|
|
2992
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2993
|
+
if (!durationMap)
|
|
2994
|
+
return;
|
|
2995
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2996
|
+
if (entry instanceof RenderedBuffer) {
|
|
2997
|
+
durationMap.delete(note.timelineIndex);
|
|
2998
|
+
if (durationMap.size === 0) {
|
|
2999
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
3000
|
+
}
|
|
3001
|
+
}
|
|
3002
|
+
}
|
|
1852
3003
|
releaseNote(channel, note, endTime) {
|
|
1853
3004
|
endTime ??= this.audioContext.currentTime;
|
|
3005
|
+
if (note.renderedBuffer?.isFull) {
|
|
3006
|
+
const rb = note.renderedBuffer;
|
|
3007
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
3008
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
3009
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
3010
|
+
if (isEarlyCut) {
|
|
3011
|
+
const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
|
|
3012
|
+
const volDuration = note.voiceParams.volRelease * releaseTime;
|
|
3013
|
+
const volRelease = endTime + volDuration;
|
|
3014
|
+
note.volumeNode.gain
|
|
3015
|
+
.cancelScheduledValues(endTime)
|
|
3016
|
+
.setValueAtTime(1, endTime)
|
|
3017
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3018
|
+
return new Promise((resolve) => {
|
|
3019
|
+
this.scheduleTask(() => {
|
|
3020
|
+
note.bufferSource.loop = false;
|
|
3021
|
+
note.bufferSource.stop(volRelease);
|
|
3022
|
+
this.disconnectNote(note);
|
|
3023
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3024
|
+
this.releaseFullCache(note);
|
|
3025
|
+
resolve();
|
|
3026
|
+
}, volRelease);
|
|
3027
|
+
});
|
|
3028
|
+
}
|
|
3029
|
+
else {
|
|
3030
|
+
const now = this.audioContext.currentTime;
|
|
3031
|
+
if (naturalEndTime <= now) {
|
|
3032
|
+
this.disconnectNote(note);
|
|
3033
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3034
|
+
this.releaseFullCache(note);
|
|
3035
|
+
return Promise.resolve();
|
|
3036
|
+
}
|
|
3037
|
+
return new Promise((resolve) => {
|
|
3038
|
+
this.scheduleTask(() => {
|
|
3039
|
+
this.disconnectNote(note);
|
|
3040
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3041
|
+
this.releaseFullCache(note);
|
|
3042
|
+
resolve();
|
|
3043
|
+
}, naturalEndTime);
|
|
3044
|
+
});
|
|
3045
|
+
}
|
|
3046
|
+
}
|
|
1854
3047
|
const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
|
|
1855
3048
|
const volDuration = note.voiceParams.volRelease * releaseTime;
|
|
1856
3049
|
const volRelease = endTime + volDuration;
|
|
1857
|
-
note.
|
|
1858
|
-
.
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
.
|
|
1862
|
-
|
|
3050
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
3051
|
+
note.filterEnvelopeNode.frequency
|
|
3052
|
+
.cancelScheduledValues(endTime)
|
|
3053
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
3054
|
+
note.volumeEnvelopeNode.gain
|
|
3055
|
+
.cancelScheduledValues(endTime)
|
|
3056
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3057
|
+
}
|
|
3058
|
+
else { // "ads" / "adsr" mode
|
|
3059
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
3060
|
+
!note.renderedBuffer.isFull;
|
|
3061
|
+
if (isAdsr) {
|
|
3062
|
+
const rb = note.renderedBuffer;
|
|
3063
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
3064
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
3065
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
3066
|
+
if (isEarlyCut) {
|
|
3067
|
+
const volRelease = endTime + volDuration;
|
|
3068
|
+
note.volumeNode.gain
|
|
3069
|
+
.cancelScheduledValues(endTime)
|
|
3070
|
+
.setValueAtTime(1, endTime)
|
|
3071
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3072
|
+
return new Promise((resolve) => {
|
|
3073
|
+
this.scheduleTask(() => {
|
|
3074
|
+
note.bufferSource.stop(volRelease);
|
|
3075
|
+
this.disconnectNote(note);
|
|
3076
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3077
|
+
resolve();
|
|
3078
|
+
}, volRelease);
|
|
3079
|
+
});
|
|
3080
|
+
}
|
|
3081
|
+
else {
|
|
3082
|
+
return new Promise((resolve) => {
|
|
3083
|
+
this.scheduleTask(() => {
|
|
3084
|
+
note.bufferSource.stop();
|
|
3085
|
+
this.disconnectNote(note);
|
|
3086
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3087
|
+
resolve();
|
|
3088
|
+
}, naturalEndTime);
|
|
3089
|
+
});
|
|
3090
|
+
}
|
|
3091
|
+
}
|
|
3092
|
+
note.volumeNode.gain
|
|
3093
|
+
.cancelScheduledValues(endTime)
|
|
3094
|
+
.setValueAtTime(1, endTime)
|
|
3095
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3096
|
+
}
|
|
1863
3097
|
return new Promise((resolve) => {
|
|
1864
3098
|
this.scheduleTask(() => {
|
|
1865
3099
|
const bufferSource = note.bufferSource;
|
|
@@ -2096,7 +3330,7 @@ class Midy extends EventTarget {
|
|
|
2096
3330
|
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
2097
3331
|
}
|
|
2098
3332
|
setModLfoToPitch(channel, note, scheduleTime) {
|
|
2099
|
-
if (note.
|
|
3333
|
+
if (note.modLfoToPitch) {
|
|
2100
3334
|
const { modulationDepthMSB, modulationDepthLSB } = channel.state;
|
|
2101
3335
|
const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
|
|
2102
3336
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
@@ -2261,7 +3495,7 @@ class Midy extends EventTarget {
|
|
|
2261
3495
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2262
3496
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2263
3497
|
},
|
|
2264
|
-
delayModLFO: (
|
|
3498
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2265
3499
|
const { modulationDepthMSB, modulationDepthLSB } = channel.state;
|
|
2266
3500
|
if (0 < modulationDepthMSB + modulationDepthLSB) {
|
|
2267
3501
|
this.setDelayModLFO(note);
|
|
@@ -2299,11 +3533,12 @@ class Midy extends EventTarget {
|
|
|
2299
3533
|
state[2] = velocity / 127;
|
|
2300
3534
|
state[3] = noteNumber / 127;
|
|
2301
3535
|
state[10] = polyphonicKeyPressure / 127;
|
|
2302
|
-
state[13] = state.channelPressure / 127;
|
|
2303
3536
|
return state;
|
|
2304
3537
|
}
|
|
2305
3538
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2306
3539
|
this.processScheduledNotes(channel, (note) => {
|
|
3540
|
+
if (note.renderedBuffer?.isFull)
|
|
3541
|
+
return;
|
|
2307
3542
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity, note.pressure);
|
|
2308
3543
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2309
3544
|
let applyVolumeEnvelope = false;
|
|
@@ -2410,8 +3645,8 @@ class Midy extends EventTarget {
|
|
|
2410
3645
|
const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
|
|
2411
3646
|
const depth = modulationDepth * channel.modulationDepthRange;
|
|
2412
3647
|
this.processScheduledNotes(channel, (note) => {
|
|
2413
|
-
if (note.
|
|
2414
|
-
note.
|
|
3648
|
+
if (note.modLfoToPitch) {
|
|
3649
|
+
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2415
3650
|
}
|
|
2416
3651
|
else {
|
|
2417
3652
|
this.startModulation(channel, note, scheduleTime);
|
|
@@ -2566,11 +3801,15 @@ class Midy extends EventTarget {
|
|
|
2566
3801
|
return;
|
|
2567
3802
|
if (!(0 <= scheduleTime))
|
|
2568
3803
|
scheduleTime = this.audioContext.currentTime;
|
|
2569
|
-
|
|
3804
|
+
const state = channel.state;
|
|
3805
|
+
const prevValue = state.sustainPedal;
|
|
3806
|
+
state.sustainPedal = value / 127;
|
|
2570
3807
|
if (64 <= value) {
|
|
2571
|
-
|
|
2572
|
-
|
|
2573
|
-
|
|
3808
|
+
if (prevValue < 0.5) {
|
|
3809
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3810
|
+
channel.sustainNotes.push(note);
|
|
3811
|
+
});
|
|
3812
|
+
}
|
|
2574
3813
|
}
|
|
2575
3814
|
else {
|
|
2576
3815
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2594,13 +3833,17 @@ class Midy extends EventTarget {
|
|
|
2594
3833
|
return;
|
|
2595
3834
|
if (!(0 <= scheduleTime))
|
|
2596
3835
|
scheduleTime = this.audioContext.currentTime;
|
|
2597
|
-
|
|
3836
|
+
const state = channel.state;
|
|
3837
|
+
const prevValue = state.sostenutoPedal;
|
|
3838
|
+
state.sostenutoPedal = value / 127;
|
|
2598
3839
|
if (64 <= value) {
|
|
2599
|
-
|
|
2600
|
-
|
|
2601
|
-
|
|
2602
|
-
|
|
2603
|
-
|
|
3840
|
+
if (prevValue < 0.5) {
|
|
3841
|
+
const sostenutoNotes = [];
|
|
3842
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3843
|
+
sostenutoNotes.push(note);
|
|
3844
|
+
});
|
|
3845
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3846
|
+
}
|
|
2604
3847
|
}
|
|
2605
3848
|
else {
|
|
2606
3849
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2969,10 +4212,8 @@ class Midy extends EventTarget {
|
|
|
2969
4212
|
state[key] = defaultValue;
|
|
2970
4213
|
}
|
|
2971
4214
|
}
|
|
2972
|
-
|
|
2973
|
-
|
|
2974
|
-
}
|
|
2975
|
-
this.resetChannelTable(channel);
|
|
4215
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
4216
|
+
channel.resetTable();
|
|
2976
4217
|
this.mode = "GM2";
|
|
2977
4218
|
this.masterFineTuning = 0; // cent
|
|
2978
4219
|
this.masterCoarseTuning = 0; // cent
|
|
@@ -3135,7 +4376,7 @@ class Midy extends EventTarget {
|
|
|
3135
4376
|
case 9:
|
|
3136
4377
|
switch (data[3]) {
|
|
3137
4378
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
3138
|
-
return this.handleChannelPressureSysEx(data,
|
|
4379
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
3139
4380
|
case 2: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
3140
4381
|
return this.handlePolyphonicKeyPressureSysEx(data, scheduleTime);
|
|
3141
4382
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|