@marmooo/midy 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -2
- package/esm/midy-GM1.d.ts +86 -10
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1190 -101
- package/esm/midy-GM2.d.ts +103 -10
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1402 -162
- package/esm/midy-GMLite.d.ts +84 -9
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1183 -98
- package/esm/midy.d.ts +77 -15
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1416 -175
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +86 -10
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1190 -101
- package/script/midy-GM2.d.ts +103 -10
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1402 -162
- package/script/midy-GMLite.d.ts +84 -9
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1183 -98
- package/script/midy.d.ts +77 -15
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1416 -175
package/esm/midy.js
CHANGED
|
@@ -1,6 +1,55 @@
|
|
|
1
1
|
import { parseMidi } from "midi-file";
|
|
2
2
|
import { parse, SoundFont } from "@marmooo/soundfont-parser";
|
|
3
3
|
import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
|
|
4
|
+
// Cache mode
|
|
5
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
6
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
7
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
8
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
9
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
10
|
+
//
|
|
11
|
+
// "none"
|
|
12
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
13
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
14
|
+
// fully supported. Higher CPU usage.
|
|
15
|
+
// "ads"
|
|
16
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
17
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
18
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
19
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
20
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
21
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
22
|
+
// "adsr"
|
|
23
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
24
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
25
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
26
|
+
// so notes with the same duration and release shape share a buffer.
|
|
27
|
+
// LFO effects are applied in real time after playback starts,
|
|
28
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
29
|
+
// because LFO variations do not produce separate cache entries.
|
|
30
|
+
// "note"
|
|
31
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
32
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
33
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
34
|
+
// so no real-time processing is needed during playback. Greatly
|
|
35
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
36
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
37
|
+
// "audio"
|
|
38
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
39
|
+
// Call render() to complete rendering before calling start().
|
|
40
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
41
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
42
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
43
|
+
// "rendered" event is dispatched when rendering completes.
|
|
44
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
45
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
46
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
47
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
48
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
49
|
+
function f64ToBigInt(value) {
|
|
50
|
+
_f64Array[0] = value;
|
|
51
|
+
return _u64Array[0];
|
|
52
|
+
}
|
|
4
53
|
let decoderPromise = null;
|
|
5
54
|
let decoderQueue = Promise.resolve();
|
|
6
55
|
function initDecoder() {
|
|
@@ -48,6 +97,24 @@ class Note {
|
|
|
48
97
|
writable: true,
|
|
49
98
|
value: void 0
|
|
50
99
|
});
|
|
100
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
101
|
+
enumerable: true,
|
|
102
|
+
configurable: true,
|
|
103
|
+
writable: true,
|
|
104
|
+
value: null
|
|
105
|
+
});
|
|
106
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
107
|
+
enumerable: true,
|
|
108
|
+
configurable: true,
|
|
109
|
+
writable: true,
|
|
110
|
+
value: null
|
|
111
|
+
});
|
|
112
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
113
|
+
enumerable: true,
|
|
114
|
+
configurable: true,
|
|
115
|
+
writable: true,
|
|
116
|
+
value: null
|
|
117
|
+
});
|
|
51
118
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
52
119
|
enumerable: true,
|
|
53
120
|
configurable: true,
|
|
@@ -134,6 +201,173 @@ class Note {
|
|
|
134
201
|
});
|
|
135
202
|
}
|
|
136
203
|
}
|
|
204
|
+
class Channel {
|
|
205
|
+
constructor(audioNodes, settings) {
|
|
206
|
+
Object.defineProperty(this, "isDrum", {
|
|
207
|
+
enumerable: true,
|
|
208
|
+
configurable: true,
|
|
209
|
+
writable: true,
|
|
210
|
+
value: false
|
|
211
|
+
});
|
|
212
|
+
Object.defineProperty(this, "programNumber", {
|
|
213
|
+
enumerable: true,
|
|
214
|
+
configurable: true,
|
|
215
|
+
writable: true,
|
|
216
|
+
value: 0
|
|
217
|
+
});
|
|
218
|
+
Object.defineProperty(this, "scheduleIndex", {
|
|
219
|
+
enumerable: true,
|
|
220
|
+
configurable: true,
|
|
221
|
+
writable: true,
|
|
222
|
+
value: 0
|
|
223
|
+
});
|
|
224
|
+
Object.defineProperty(this, "detune", {
|
|
225
|
+
enumerable: true,
|
|
226
|
+
configurable: true,
|
|
227
|
+
writable: true,
|
|
228
|
+
value: 0
|
|
229
|
+
});
|
|
230
|
+
Object.defineProperty(this, "bankMSB", {
|
|
231
|
+
enumerable: true,
|
|
232
|
+
configurable: true,
|
|
233
|
+
writable: true,
|
|
234
|
+
value: 121
|
|
235
|
+
});
|
|
236
|
+
Object.defineProperty(this, "bankLSB", {
|
|
237
|
+
enumerable: true,
|
|
238
|
+
configurable: true,
|
|
239
|
+
writable: true,
|
|
240
|
+
value: 0
|
|
241
|
+
});
|
|
242
|
+
Object.defineProperty(this, "dataMSB", {
|
|
243
|
+
enumerable: true,
|
|
244
|
+
configurable: true,
|
|
245
|
+
writable: true,
|
|
246
|
+
value: 0
|
|
247
|
+
});
|
|
248
|
+
Object.defineProperty(this, "dataLSB", {
|
|
249
|
+
enumerable: true,
|
|
250
|
+
configurable: true,
|
|
251
|
+
writable: true,
|
|
252
|
+
value: 0
|
|
253
|
+
});
|
|
254
|
+
Object.defineProperty(this, "rpnMSB", {
|
|
255
|
+
enumerable: true,
|
|
256
|
+
configurable: true,
|
|
257
|
+
writable: true,
|
|
258
|
+
value: 127
|
|
259
|
+
});
|
|
260
|
+
Object.defineProperty(this, "rpnLSB", {
|
|
261
|
+
enumerable: true,
|
|
262
|
+
configurable: true,
|
|
263
|
+
writable: true,
|
|
264
|
+
value: 127
|
|
265
|
+
});
|
|
266
|
+
Object.defineProperty(this, "mono", {
|
|
267
|
+
enumerable: true,
|
|
268
|
+
configurable: true,
|
|
269
|
+
writable: true,
|
|
270
|
+
value: false
|
|
271
|
+
}); // CC#124, CC#125
|
|
272
|
+
Object.defineProperty(this, "modulationDepthRange", {
|
|
273
|
+
enumerable: true,
|
|
274
|
+
configurable: true,
|
|
275
|
+
writable: true,
|
|
276
|
+
value: 50
|
|
277
|
+
}); // cent
|
|
278
|
+
Object.defineProperty(this, "fineTuning", {
|
|
279
|
+
enumerable: true,
|
|
280
|
+
configurable: true,
|
|
281
|
+
writable: true,
|
|
282
|
+
value: 0
|
|
283
|
+
}); // cent
|
|
284
|
+
Object.defineProperty(this, "coarseTuning", {
|
|
285
|
+
enumerable: true,
|
|
286
|
+
configurable: true,
|
|
287
|
+
writable: true,
|
|
288
|
+
value: 0
|
|
289
|
+
}); // cent
|
|
290
|
+
Object.defineProperty(this, "scheduledNotes", {
|
|
291
|
+
enumerable: true,
|
|
292
|
+
configurable: true,
|
|
293
|
+
writable: true,
|
|
294
|
+
value: []
|
|
295
|
+
});
|
|
296
|
+
Object.defineProperty(this, "sustainNotes", {
|
|
297
|
+
enumerable: true,
|
|
298
|
+
configurable: true,
|
|
299
|
+
writable: true,
|
|
300
|
+
value: []
|
|
301
|
+
});
|
|
302
|
+
Object.defineProperty(this, "sostenutoNotes", {
|
|
303
|
+
enumerable: true,
|
|
304
|
+
configurable: true,
|
|
305
|
+
writable: true,
|
|
306
|
+
value: []
|
|
307
|
+
});
|
|
308
|
+
Object.defineProperty(this, "controlTable", {
|
|
309
|
+
enumerable: true,
|
|
310
|
+
configurable: true,
|
|
311
|
+
writable: true,
|
|
312
|
+
value: new Int8Array(defaultControlValues)
|
|
313
|
+
});
|
|
314
|
+
Object.defineProperty(this, "scaleOctaveTuningTable", {
|
|
315
|
+
enumerable: true,
|
|
316
|
+
configurable: true,
|
|
317
|
+
writable: true,
|
|
318
|
+
value: new Float32Array(12)
|
|
319
|
+
}); // [-100, 100] cent
|
|
320
|
+
Object.defineProperty(this, "channelPressureTable", {
|
|
321
|
+
enumerable: true,
|
|
322
|
+
configurable: true,
|
|
323
|
+
writable: true,
|
|
324
|
+
value: new Int8Array(defaultPressureValues)
|
|
325
|
+
});
|
|
326
|
+
Object.defineProperty(this, "polyphonicKeyPressureTable", {
|
|
327
|
+
enumerable: true,
|
|
328
|
+
configurable: true,
|
|
329
|
+
writable: true,
|
|
330
|
+
value: new Int8Array(defaultPressureValues)
|
|
331
|
+
});
|
|
332
|
+
Object.defineProperty(this, "keyBasedTable", {
|
|
333
|
+
enumerable: true,
|
|
334
|
+
configurable: true,
|
|
335
|
+
writable: true,
|
|
336
|
+
value: new Int8Array(128 * 128).fill(-1)
|
|
337
|
+
});
|
|
338
|
+
Object.defineProperty(this, "keyBasedGainLs", {
|
|
339
|
+
enumerable: true,
|
|
340
|
+
configurable: true,
|
|
341
|
+
writable: true,
|
|
342
|
+
value: new Array(128)
|
|
343
|
+
});
|
|
344
|
+
Object.defineProperty(this, "keyBasedGainRs", {
|
|
345
|
+
enumerable: true,
|
|
346
|
+
configurable: true,
|
|
347
|
+
writable: true,
|
|
348
|
+
value: new Array(128)
|
|
349
|
+
});
|
|
350
|
+
Object.defineProperty(this, "currentBufferSource", {
|
|
351
|
+
enumerable: true,
|
|
352
|
+
configurable: true,
|
|
353
|
+
writable: true,
|
|
354
|
+
value: null
|
|
355
|
+
});
|
|
356
|
+
Object.assign(this, audioNodes);
|
|
357
|
+
Object.assign(this, settings);
|
|
358
|
+
this.state = new ControllerState();
|
|
359
|
+
}
|
|
360
|
+
resetSettings(settings) {
|
|
361
|
+
Object.assign(this, settings);
|
|
362
|
+
}
|
|
363
|
+
resetTable() {
|
|
364
|
+
this.controlTable.set(defaultControlValues);
|
|
365
|
+
this.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
366
|
+
this.channelPressureTable.set(defaultPressureValues);
|
|
367
|
+
this.polyphonicKeyPressureTable.set(defaultPressureValues);
|
|
368
|
+
this.keyBasedTable.fill(-1);
|
|
369
|
+
}
|
|
370
|
+
}
|
|
137
371
|
const drumExclusiveClassesByKit = new Array(57);
|
|
138
372
|
const drumExclusiveClassCount = 10;
|
|
139
373
|
const standardSet = new Uint8Array(128);
|
|
@@ -283,13 +517,73 @@ const defaultControlValues = new Int8Array([
|
|
|
283
517
|
...[-1, -1, -1, -1, -1, -1],
|
|
284
518
|
...defaultPressureValues,
|
|
285
519
|
]);
|
|
520
|
+
class RenderedBuffer {
|
|
521
|
+
constructor(buffer, meta = {}) {
|
|
522
|
+
Object.defineProperty(this, "buffer", {
|
|
523
|
+
enumerable: true,
|
|
524
|
+
configurable: true,
|
|
525
|
+
writable: true,
|
|
526
|
+
value: void 0
|
|
527
|
+
});
|
|
528
|
+
Object.defineProperty(this, "isLoop", {
|
|
529
|
+
enumerable: true,
|
|
530
|
+
configurable: true,
|
|
531
|
+
writable: true,
|
|
532
|
+
value: void 0
|
|
533
|
+
});
|
|
534
|
+
Object.defineProperty(this, "isFull", {
|
|
535
|
+
enumerable: true,
|
|
536
|
+
configurable: true,
|
|
537
|
+
writable: true,
|
|
538
|
+
value: void 0
|
|
539
|
+
});
|
|
540
|
+
Object.defineProperty(this, "adsDuration", {
|
|
541
|
+
enumerable: true,
|
|
542
|
+
configurable: true,
|
|
543
|
+
writable: true,
|
|
544
|
+
value: void 0
|
|
545
|
+
});
|
|
546
|
+
Object.defineProperty(this, "loopStart", {
|
|
547
|
+
enumerable: true,
|
|
548
|
+
configurable: true,
|
|
549
|
+
writable: true,
|
|
550
|
+
value: void 0
|
|
551
|
+
});
|
|
552
|
+
Object.defineProperty(this, "loopDuration", {
|
|
553
|
+
enumerable: true,
|
|
554
|
+
configurable: true,
|
|
555
|
+
writable: true,
|
|
556
|
+
value: void 0
|
|
557
|
+
});
|
|
558
|
+
Object.defineProperty(this, "noteDuration", {
|
|
559
|
+
enumerable: true,
|
|
560
|
+
configurable: true,
|
|
561
|
+
writable: true,
|
|
562
|
+
value: void 0
|
|
563
|
+
});
|
|
564
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
565
|
+
enumerable: true,
|
|
566
|
+
configurable: true,
|
|
567
|
+
writable: true,
|
|
568
|
+
value: void 0
|
|
569
|
+
});
|
|
570
|
+
this.buffer = buffer;
|
|
571
|
+
this.isLoop = meta.isLoop ?? false;
|
|
572
|
+
this.isFull = meta.isFull ?? false;
|
|
573
|
+
this.adsDuration = meta.adsDuration;
|
|
574
|
+
this.loopStart = meta.loopStart;
|
|
575
|
+
this.loopDuration = meta.loopDuration;
|
|
576
|
+
this.noteDuration = meta.noteDuration;
|
|
577
|
+
this.releaseDuration = meta.releaseDuration;
|
|
578
|
+
}
|
|
579
|
+
}
|
|
286
580
|
function cbToRatio(cb) {
|
|
287
581
|
return Math.pow(10, cb / 200);
|
|
288
582
|
}
|
|
289
583
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
290
584
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
291
585
|
export class Midy extends EventTarget {
|
|
292
|
-
constructor(audioContext) {
|
|
586
|
+
constructor(audioContext, options = {}) {
|
|
293
587
|
super();
|
|
294
588
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
295
589
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -471,9 +765,7 @@ export class Midy extends EventTarget {
|
|
|
471
765
|
enumerable: true,
|
|
472
766
|
configurable: true,
|
|
473
767
|
writable: true,
|
|
474
|
-
value: new Set([
|
|
475
|
-
"noteOff",
|
|
476
|
-
])
|
|
768
|
+
value: new Set(["noteOff"])
|
|
477
769
|
});
|
|
478
770
|
Object.defineProperty(this, "tempo", {
|
|
479
771
|
enumerable: true,
|
|
@@ -529,6 +821,51 @@ export class Midy extends EventTarget {
|
|
|
529
821
|
writable: true,
|
|
530
822
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
531
823
|
});
|
|
824
|
+
// "adsr" mode
|
|
825
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
826
|
+
enumerable: true,
|
|
827
|
+
configurable: true,
|
|
828
|
+
writable: true,
|
|
829
|
+
value: new Map()
|
|
830
|
+
});
|
|
831
|
+
// "note" mode
|
|
832
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
833
|
+
enumerable: true,
|
|
834
|
+
configurable: true,
|
|
835
|
+
writable: true,
|
|
836
|
+
value: new Map()
|
|
837
|
+
});
|
|
838
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
839
|
+
enumerable: true,
|
|
840
|
+
configurable: true,
|
|
841
|
+
writable: true,
|
|
842
|
+
value: new Map()
|
|
843
|
+
});
|
|
844
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
845
|
+
enumerable: true,
|
|
846
|
+
configurable: true,
|
|
847
|
+
writable: true,
|
|
848
|
+
value: new Map()
|
|
849
|
+
});
|
|
850
|
+
// "audio" mode
|
|
851
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
852
|
+
enumerable: true,
|
|
853
|
+
configurable: true,
|
|
854
|
+
writable: true,
|
|
855
|
+
value: null
|
|
856
|
+
});
|
|
857
|
+
Object.defineProperty(this, "isRendering", {
|
|
858
|
+
enumerable: true,
|
|
859
|
+
configurable: true,
|
|
860
|
+
writable: true,
|
|
861
|
+
value: false
|
|
862
|
+
});
|
|
863
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
864
|
+
enumerable: true,
|
|
865
|
+
configurable: true,
|
|
866
|
+
writable: true,
|
|
867
|
+
value: null
|
|
868
|
+
});
|
|
532
869
|
Object.defineProperty(this, "mpeEnabled", {
|
|
533
870
|
enumerable: true,
|
|
534
871
|
configurable: true,
|
|
@@ -556,10 +893,8 @@ export class Midy extends EventTarget {
|
|
|
556
893
|
noteToChannel: new Map(),
|
|
557
894
|
}
|
|
558
895
|
});
|
|
559
|
-
this.decoder = new OggVorbisDecoderWebWorker();
|
|
560
|
-
this.decoderReady = this.decoder.ready;
|
|
561
|
-
this.decoderQueue = Promise.resolve();
|
|
562
896
|
this.audioContext = audioContext;
|
|
897
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
563
898
|
this.masterVolume = new GainNode(audioContext);
|
|
564
899
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
565
900
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -635,9 +970,178 @@ export class Midy extends EventTarget {
|
|
|
635
970
|
this.instruments = midiData.instruments;
|
|
636
971
|
this.timeline = midiData.timeline;
|
|
637
972
|
this.totalTime = this.calcTotalTime();
|
|
973
|
+
if (this.cacheMode === "audio") {
|
|
974
|
+
await this.render();
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
buildNoteOnDurations() {
|
|
978
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
979
|
+
noteOnDurations.clear();
|
|
980
|
+
noteOnEvents.clear();
|
|
981
|
+
const inverseTempo = 1 / this.tempo;
|
|
982
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
983
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
984
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
985
|
+
const activeNotes = new Map();
|
|
986
|
+
const pendingOff = new Map();
|
|
987
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
988
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
989
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
990
|
+
? Infinity
|
|
991
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
992
|
+
noteOnDurations.set(entry.idx, duration);
|
|
993
|
+
noteOnEvents.set(entry.idx, {
|
|
994
|
+
duration,
|
|
995
|
+
durationTicks,
|
|
996
|
+
startTime: entry.startTime,
|
|
997
|
+
events: entry.events,
|
|
998
|
+
});
|
|
999
|
+
};
|
|
1000
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1001
|
+
const event = timeline[i];
|
|
1002
|
+
const t = event.startTime * inverseTempo;
|
|
1003
|
+
switch (event.type) {
|
|
1004
|
+
case "noteOn": {
|
|
1005
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
1006
|
+
if (!activeNotes.has(key))
|
|
1007
|
+
activeNotes.set(key, []);
|
|
1008
|
+
activeNotes.get(key).push({
|
|
1009
|
+
idx: i,
|
|
1010
|
+
startTime: t,
|
|
1011
|
+
startTicks: event.ticks,
|
|
1012
|
+
events: [],
|
|
1013
|
+
});
|
|
1014
|
+
const pendingStack = pendingOff.get(key);
|
|
1015
|
+
if (pendingStack && pendingStack.length > 0)
|
|
1016
|
+
pendingStack.shift();
|
|
1017
|
+
break;
|
|
1018
|
+
}
|
|
1019
|
+
case "noteOff": {
|
|
1020
|
+
const ch = event.channel;
|
|
1021
|
+
const key = event.noteNumber * numChannels + ch;
|
|
1022
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
1023
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
1024
|
+
if (!pendingOff.has(key))
|
|
1025
|
+
pendingOff.set(key, []);
|
|
1026
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
1027
|
+
}
|
|
1028
|
+
else {
|
|
1029
|
+
const stack = activeNotes.get(key);
|
|
1030
|
+
if (stack && stack.length > 0) {
|
|
1031
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
1032
|
+
if (stack.length === 0)
|
|
1033
|
+
activeNotes.delete(key);
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
break;
|
|
1037
|
+
}
|
|
1038
|
+
case "controller": {
|
|
1039
|
+
const ch = event.channel;
|
|
1040
|
+
for (const [key, entries] of activeNotes) {
|
|
1041
|
+
if (key % numChannels !== ch)
|
|
1042
|
+
continue;
|
|
1043
|
+
for (const entry of entries)
|
|
1044
|
+
entry.events.push(event);
|
|
1045
|
+
}
|
|
1046
|
+
switch (event.controllerType) {
|
|
1047
|
+
case 64: { // Sustain Pedal
|
|
1048
|
+
const on = event.value >= 64;
|
|
1049
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
1050
|
+
if (!on) {
|
|
1051
|
+
for (const [key, offItems] of pendingOff) {
|
|
1052
|
+
if (key % numChannels !== ch)
|
|
1053
|
+
continue;
|
|
1054
|
+
const activeStack = activeNotes.get(key);
|
|
1055
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
1056
|
+
if (activeStack && activeStack.length > 0) {
|
|
1057
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
1058
|
+
if (activeStack.length === 0)
|
|
1059
|
+
activeNotes.delete(key);
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
pendingOff.delete(key);
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
break;
|
|
1066
|
+
}
|
|
1067
|
+
case 66: { // Sostenuto Pedal
|
|
1068
|
+
const on = event.value >= 64;
|
|
1069
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1070
|
+
for (const [key] of activeNotes) {
|
|
1071
|
+
if (key % numChannels === ch)
|
|
1072
|
+
sostenutoKeys[ch].add(key);
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
else if (!on) {
|
|
1076
|
+
sostenutoKeys[ch].clear();
|
|
1077
|
+
}
|
|
1078
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1079
|
+
break;
|
|
1080
|
+
}
|
|
1081
|
+
case 121: // Reset All Controllers
|
|
1082
|
+
sustainPedal[ch] = 0;
|
|
1083
|
+
sostenutoPedal[ch] = 0;
|
|
1084
|
+
sostenutoKeys[ch].clear();
|
|
1085
|
+
break;
|
|
1086
|
+
case 120: // All Sound Off
|
|
1087
|
+
case 123: { // All Notes Off
|
|
1088
|
+
for (const [key, stack] of activeNotes) {
|
|
1089
|
+
if (key % numChannels !== ch)
|
|
1090
|
+
continue;
|
|
1091
|
+
for (const entry of stack)
|
|
1092
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1093
|
+
activeNotes.delete(key);
|
|
1094
|
+
}
|
|
1095
|
+
for (const key of pendingOff.keys()) {
|
|
1096
|
+
if (key % numChannels === ch)
|
|
1097
|
+
pendingOff.delete(key);
|
|
1098
|
+
}
|
|
1099
|
+
break;
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
break;
|
|
1103
|
+
}
|
|
1104
|
+
case "sysEx":
|
|
1105
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1106
|
+
// GM1 System On / GM2 System On
|
|
1107
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1108
|
+
sustainPedal.fill(0);
|
|
1109
|
+
pendingOff.clear();
|
|
1110
|
+
for (const [, stack] of activeNotes) {
|
|
1111
|
+
for (const entry of stack)
|
|
1112
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1113
|
+
}
|
|
1114
|
+
activeNotes.clear();
|
|
1115
|
+
}
|
|
1116
|
+
}
|
|
1117
|
+
else {
|
|
1118
|
+
for (const [, entries] of activeNotes) {
|
|
1119
|
+
for (const entry of entries)
|
|
1120
|
+
entry.events.push(event);
|
|
1121
|
+
}
|
|
1122
|
+
}
|
|
1123
|
+
break;
|
|
1124
|
+
case "pitchBend":
|
|
1125
|
+
case "programChange":
|
|
1126
|
+
case "channelAftertouch":
|
|
1127
|
+
case "noteAftertouch": {
|
|
1128
|
+
const ch = event.channel;
|
|
1129
|
+
for (const [key, entries] of activeNotes) {
|
|
1130
|
+
if (key % numChannels !== ch)
|
|
1131
|
+
continue;
|
|
1132
|
+
for (const entry of entries)
|
|
1133
|
+
entry.events.push(event);
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
for (const [, stack] of activeNotes) {
|
|
1139
|
+
for (const entry of stack)
|
|
1140
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1141
|
+
}
|
|
638
1142
|
}
|
|
639
1143
|
cacheVoiceIds() {
|
|
640
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1144
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
641
1145
|
for (let i = 0; i < timeline.length; i++) {
|
|
642
1146
|
const event = timeline[i];
|
|
643
1147
|
switch (event.type) {
|
|
@@ -663,6 +1167,9 @@ export class Midy extends EventTarget {
|
|
|
663
1167
|
voiceCounter.delete(audioBufferId);
|
|
664
1168
|
}
|
|
665
1169
|
this.GM2SystemOn();
|
|
1170
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1171
|
+
this.buildNoteOnDurations();
|
|
1172
|
+
}
|
|
666
1173
|
}
|
|
667
1174
|
getVoiceId(channel, noteNumber, velocity) {
|
|
668
1175
|
const programNumber = channel.programNumber;
|
|
@@ -681,7 +1188,8 @@ export class Midy extends EventTarget {
|
|
|
681
1188
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
682
1189
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
683
1190
|
const { instrument, sampleID } = voice.generators;
|
|
684
|
-
return soundFontIndex * (2 **
|
|
1191
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1192
|
+
(sampleID << 8);
|
|
685
1193
|
}
|
|
686
1194
|
createChannelAudioNodes(audioContext) {
|
|
687
1195
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -691,40 +1199,11 @@ export class Midy extends EventTarget {
|
|
|
691
1199
|
gainL.connect(merger, 0, 0);
|
|
692
1200
|
gainR.connect(merger, 0, 1);
|
|
693
1201
|
merger.connect(this.masterVolume);
|
|
694
|
-
return {
|
|
695
|
-
gainL,
|
|
696
|
-
gainR,
|
|
697
|
-
merger,
|
|
698
|
-
};
|
|
699
|
-
}
|
|
700
|
-
resetChannelTable(channel) {
|
|
701
|
-
channel.controlTable.set(defaultControlValues);
|
|
702
|
-
channel.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
703
|
-
channel.channelPressureTable.set(defaultPressureValues);
|
|
704
|
-
channel.polyphonicKeyPressureTable.set(defaultPressureValues);
|
|
705
|
-
channel.keyBasedTable.fill(-1);
|
|
1202
|
+
return { gainL, gainR, merger };
|
|
706
1203
|
}
|
|
707
1204
|
createChannels(audioContext) {
|
|
708
|
-
const
|
|
709
|
-
|
|
710
|
-
currentBufferSource: null,
|
|
711
|
-
isDrum: false,
|
|
712
|
-
state: new ControllerState(),
|
|
713
|
-
...this.constructor.channelSettings,
|
|
714
|
-
...this.createChannelAudioNodes(audioContext),
|
|
715
|
-
scheduledNotes: [],
|
|
716
|
-
sustainNotes: [],
|
|
717
|
-
sostenutoNotes: [],
|
|
718
|
-
controlTable: new Int8Array(defaultControlValues),
|
|
719
|
-
scaleOctaveTuningTable: new Float32Array(12), // [-100, 100] cent
|
|
720
|
-
channelPressureTable: new Int8Array(defaultPressureValues),
|
|
721
|
-
polyphonicKeyPressureTable: new Int8Array(defaultPressureValues),
|
|
722
|
-
keyBasedTable: new Int8Array(128 * 128).fill(-1),
|
|
723
|
-
keyBasedGainLs: new Array(128),
|
|
724
|
-
keyBasedGainRs: new Array(128),
|
|
725
|
-
};
|
|
726
|
-
});
|
|
727
|
-
return channels;
|
|
1205
|
+
const settings = this.constructor.channelSettings;
|
|
1206
|
+
return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
|
|
728
1207
|
}
|
|
729
1208
|
decodeOggVorbis(sample) {
|
|
730
1209
|
const task = decoderQueue.then(async () => {
|
|
@@ -783,15 +1262,26 @@ export class Midy extends EventTarget {
|
|
|
783
1262
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
784
1263
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
785
1264
|
}
|
|
786
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1265
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1266
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1267
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
787
1268
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
788
1269
|
bufferSource.buffer = audioBuffer;
|
|
789
|
-
|
|
1270
|
+
const isDrumLoop = channel.isDrum
|
|
790
1271
|
? this.isLoopDrum(channel, noteNumber)
|
|
791
|
-
:
|
|
1272
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1273
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1274
|
+
bufferSource.loop = isLoop;
|
|
792
1275
|
if (bufferSource.loop) {
|
|
793
|
-
|
|
794
|
-
|
|
1276
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1277
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1278
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1279
|
+
renderedOrRaw.loopDuration;
|
|
1280
|
+
}
|
|
1281
|
+
else {
|
|
1282
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1283
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1284
|
+
}
|
|
795
1285
|
}
|
|
796
1286
|
return bufferSource;
|
|
797
1287
|
}
|
|
@@ -808,15 +1298,14 @@ export class Midy extends EventTarget {
|
|
|
808
1298
|
break;
|
|
809
1299
|
const startTime = t + schedulingOffset;
|
|
810
1300
|
switch (event.type) {
|
|
811
|
-
case "noteOn":
|
|
812
|
-
this.
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
1301
|
+
case "noteOn": {
|
|
1302
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1303
|
+
note.timelineIndex = queueIndex;
|
|
1304
|
+
this.setupNote(event.channel, note, startTime);
|
|
816
1305
|
break;
|
|
817
1306
|
}
|
|
818
|
-
case "
|
|
819
|
-
this.
|
|
1307
|
+
case "noteOff":
|
|
1308
|
+
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
820
1309
|
break;
|
|
821
1310
|
case "controller":
|
|
822
1311
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
@@ -824,14 +1313,17 @@ export class Midy extends EventTarget {
|
|
|
824
1313
|
case "programChange":
|
|
825
1314
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
826
1315
|
break;
|
|
827
|
-
case "channelAftertouch":
|
|
828
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
829
|
-
break;
|
|
830
1316
|
case "pitchBend":
|
|
831
1317
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
832
1318
|
break;
|
|
833
1319
|
case "sysEx":
|
|
834
1320
|
this.handleSysEx(event.data, startTime);
|
|
1321
|
+
break;
|
|
1322
|
+
case "channelAftertouch":
|
|
1323
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
1324
|
+
break;
|
|
1325
|
+
case "noteAftertouch":
|
|
1326
|
+
this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
|
|
835
1327
|
}
|
|
836
1328
|
queueIndex++;
|
|
837
1329
|
}
|
|
@@ -852,6 +1344,7 @@ export class Midy extends EventTarget {
|
|
|
852
1344
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
853
1345
|
this.voiceCache.clear();
|
|
854
1346
|
this.realtimeVoiceCache.clear();
|
|
1347
|
+
this.adsrVoiceCache.clear();
|
|
855
1348
|
const channels = this.channels;
|
|
856
1349
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
857
1350
|
channels[ch].scheduledNotes = [];
|
|
@@ -878,14 +1371,104 @@ export class Midy extends EventTarget {
|
|
|
878
1371
|
break;
|
|
879
1372
|
case "sysEx":
|
|
880
1373
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1374
|
+
break;
|
|
1375
|
+
case "channelAftertouch":
|
|
1376
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1377
|
+
break;
|
|
1378
|
+
case "noteAftertouch":
|
|
1379
|
+
this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
881
1380
|
}
|
|
882
1381
|
}
|
|
883
1382
|
}
|
|
1383
|
+
async playAudioBuffer() {
|
|
1384
|
+
const audioContext = this.audioContext;
|
|
1385
|
+
const paused = this.isPaused;
|
|
1386
|
+
this.isPlaying = true;
|
|
1387
|
+
this.isPaused = false;
|
|
1388
|
+
this.startTime = audioContext.currentTime;
|
|
1389
|
+
if (paused) {
|
|
1390
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1391
|
+
}
|
|
1392
|
+
else {
|
|
1393
|
+
this.dispatchEvent(new Event("started"));
|
|
1394
|
+
}
|
|
1395
|
+
let exitReason;
|
|
1396
|
+
outer: while (true) {
|
|
1397
|
+
const buffer = this.renderedAudioBuffer;
|
|
1398
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1399
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1400
|
+
bufferSource.connect(this.masterVolume);
|
|
1401
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1402
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1403
|
+
this.audioModeBufferSource = bufferSource;
|
|
1404
|
+
let naturalEnded = false;
|
|
1405
|
+
bufferSource.onended = () => {
|
|
1406
|
+
naturalEnded = true;
|
|
1407
|
+
};
|
|
1408
|
+
while (true) {
|
|
1409
|
+
const now = audioContext.currentTime;
|
|
1410
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1411
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1412
|
+
bufferSource.disconnect();
|
|
1413
|
+
this.audioModeBufferSource = null;
|
|
1414
|
+
if (this.loop) {
|
|
1415
|
+
this.resumeTime = 0;
|
|
1416
|
+
this.startTime = audioContext.currentTime;
|
|
1417
|
+
this.dispatchEvent(new Event("looped"));
|
|
1418
|
+
continue outer;
|
|
1419
|
+
}
|
|
1420
|
+
await audioContext.suspend();
|
|
1421
|
+
exitReason = "ended";
|
|
1422
|
+
break outer;
|
|
1423
|
+
}
|
|
1424
|
+
if (this.isPausing) {
|
|
1425
|
+
this.resumeTime = this.currentTime();
|
|
1426
|
+
bufferSource.stop();
|
|
1427
|
+
bufferSource.disconnect();
|
|
1428
|
+
this.audioModeBufferSource = null;
|
|
1429
|
+
await audioContext.suspend();
|
|
1430
|
+
this.isPausing = false;
|
|
1431
|
+
exitReason = "paused";
|
|
1432
|
+
break outer;
|
|
1433
|
+
}
|
|
1434
|
+
else if (this.isStopping) {
|
|
1435
|
+
bufferSource.stop();
|
|
1436
|
+
bufferSource.disconnect();
|
|
1437
|
+
this.audioModeBufferSource = null;
|
|
1438
|
+
await audioContext.suspend();
|
|
1439
|
+
this.isStopping = false;
|
|
1440
|
+
exitReason = "stopped";
|
|
1441
|
+
break outer;
|
|
1442
|
+
}
|
|
1443
|
+
else if (this.isSeeking) {
|
|
1444
|
+
bufferSource.stop();
|
|
1445
|
+
bufferSource.disconnect();
|
|
1446
|
+
this.audioModeBufferSource = null;
|
|
1447
|
+
this.startTime = audioContext.currentTime;
|
|
1448
|
+
this.isSeeking = false;
|
|
1449
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1450
|
+
continue outer;
|
|
1451
|
+
}
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
this.isPlaying = false;
|
|
1455
|
+
if (exitReason === "paused") {
|
|
1456
|
+
this.isPaused = true;
|
|
1457
|
+
this.dispatchEvent(new Event("paused"));
|
|
1458
|
+
}
|
|
1459
|
+
else if (exitReason !== undefined) {
|
|
1460
|
+
this.isPaused = false;
|
|
1461
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1462
|
+
}
|
|
1463
|
+
}
|
|
884
1464
|
async playNotes() {
|
|
885
1465
|
const audioContext = this.audioContext;
|
|
886
1466
|
if (audioContext.state === "suspended") {
|
|
887
1467
|
await audioContext.resume();
|
|
888
1468
|
}
|
|
1469
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1470
|
+
return await this.playAudioBuffer();
|
|
1471
|
+
}
|
|
889
1472
|
const paused = this.isPaused;
|
|
890
1473
|
this.isPlaying = true;
|
|
891
1474
|
this.isPaused = false;
|
|
@@ -1025,12 +1608,12 @@ export class Midy extends EventTarget {
|
|
|
1025
1608
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1026
1609
|
switch (data[3]) {
|
|
1027
1610
|
case 1:
|
|
1028
|
-
this.GM1SystemOn(
|
|
1611
|
+
this.GM1SystemOn();
|
|
1029
1612
|
break;
|
|
1030
1613
|
case 2: // GM System Off
|
|
1031
1614
|
break;
|
|
1032
1615
|
case 3:
|
|
1033
|
-
this.GM2SystemOn(
|
|
1616
|
+
this.GM2SystemOn();
|
|
1034
1617
|
break;
|
|
1035
1618
|
default:
|
|
1036
1619
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1097,6 +1680,193 @@ export class Midy extends EventTarget {
|
|
|
1097
1680
|
this.notePromises = [];
|
|
1098
1681
|
return stopPromise;
|
|
1099
1682
|
}
|
|
1683
|
+
async render() {
|
|
1684
|
+
if (this.isRendering)
|
|
1685
|
+
return;
|
|
1686
|
+
if (this.timeline.length === 0)
|
|
1687
|
+
return;
|
|
1688
|
+
if (this.voiceCounter.size === 0)
|
|
1689
|
+
this.cacheVoiceIds();
|
|
1690
|
+
this.isRendering = true;
|
|
1691
|
+
this.renderedAudioBuffer = null;
|
|
1692
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1693
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1694
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1695
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1696
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1697
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1698
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1699
|
+
const renderNoteAftertouch = new Uint8Array(this.numChannels * 128);
|
|
1700
|
+
renderBankMSB.fill(121);
|
|
1701
|
+
renderIsDrum[9] = 1;
|
|
1702
|
+
renderBankMSB[9] = 120;
|
|
1703
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1704
|
+
const state = new Float32Array(256);
|
|
1705
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1706
|
+
state[type] = defaultValue;
|
|
1707
|
+
}
|
|
1708
|
+
return state;
|
|
1709
|
+
});
|
|
1710
|
+
const tasks = [];
|
|
1711
|
+
const timeline = this.timeline;
|
|
1712
|
+
const inverseTempo = 1 / this.tempo;
|
|
1713
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1714
|
+
const event = timeline[i];
|
|
1715
|
+
const ch = event.channel;
|
|
1716
|
+
switch (event.type) {
|
|
1717
|
+
case "noteOn": {
|
|
1718
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1719
|
+
const noteDuration = noteEvent?.duration ??
|
|
1720
|
+
this.noteOnDurations.get(i) ??
|
|
1721
|
+
0;
|
|
1722
|
+
if (noteDuration <= 0)
|
|
1723
|
+
continue;
|
|
1724
|
+
const { noteNumber, velocity } = event;
|
|
1725
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1726
|
+
const programNumber = renderProgramNumber[ch];
|
|
1727
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1728
|
+
if (!bankTable)
|
|
1729
|
+
continue;
|
|
1730
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1731
|
+
if (bankTable[bank] === undefined) {
|
|
1732
|
+
if (isDrum)
|
|
1733
|
+
continue;
|
|
1734
|
+
bank = 0;
|
|
1735
|
+
}
|
|
1736
|
+
const soundFontIndex = bankTable[bank];
|
|
1737
|
+
if (soundFontIndex === undefined)
|
|
1738
|
+
continue;
|
|
1739
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1740
|
+
const pressure = renderNoteAftertouch[ch * 128 + noteNumber];
|
|
1741
|
+
const fakeChannel = {
|
|
1742
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1743
|
+
programNumber,
|
|
1744
|
+
isDrum,
|
|
1745
|
+
modulationDepthRange: 50,
|
|
1746
|
+
detune: 0,
|
|
1747
|
+
};
|
|
1748
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity, pressure);
|
|
1749
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1750
|
+
if (!voice)
|
|
1751
|
+
continue;
|
|
1752
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1753
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1754
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1755
|
+
const promise = (async () => {
|
|
1756
|
+
try {
|
|
1757
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1758
|
+
}
|
|
1759
|
+
catch (err) {
|
|
1760
|
+
console.warn("render: note render failed", err);
|
|
1761
|
+
return null;
|
|
1762
|
+
}
|
|
1763
|
+
})();
|
|
1764
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1765
|
+
break;
|
|
1766
|
+
}
|
|
1767
|
+
case "controller": {
|
|
1768
|
+
const { controllerType, value } = event;
|
|
1769
|
+
switch (controllerType) {
|
|
1770
|
+
case 0: // bankMSB
|
|
1771
|
+
renderBankMSB[ch] = value;
|
|
1772
|
+
if (this.mode === "GM2") {
|
|
1773
|
+
if (value === 120) {
|
|
1774
|
+
renderIsDrum[ch] = 1;
|
|
1775
|
+
}
|
|
1776
|
+
else if (value === 121) {
|
|
1777
|
+
renderIsDrum[ch] = 0;
|
|
1778
|
+
}
|
|
1779
|
+
}
|
|
1780
|
+
break;
|
|
1781
|
+
case 32: // bankLSB
|
|
1782
|
+
renderBankLSB[ch] = value;
|
|
1783
|
+
break;
|
|
1784
|
+
default: {
|
|
1785
|
+
const stateIndex = 128 + controllerType;
|
|
1786
|
+
if (stateIndex < 256) {
|
|
1787
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1788
|
+
}
|
|
1789
|
+
break;
|
|
1790
|
+
}
|
|
1791
|
+
}
|
|
1792
|
+
break;
|
|
1793
|
+
}
|
|
1794
|
+
case "pitchBend":
|
|
1795
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1796
|
+
break;
|
|
1797
|
+
case "programChange":
|
|
1798
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1799
|
+
if (this.mode === "GM2") {
|
|
1800
|
+
if (renderBankMSB[ch] === 120) {
|
|
1801
|
+
renderIsDrum[ch] = 1;
|
|
1802
|
+
}
|
|
1803
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1804
|
+
renderIsDrum[ch] = 0;
|
|
1805
|
+
}
|
|
1806
|
+
}
|
|
1807
|
+
break;
|
|
1808
|
+
case "sysEx": {
|
|
1809
|
+
const data = event.data;
|
|
1810
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1811
|
+
if (data[3] === 1) { // GM1 System On
|
|
1812
|
+
renderBankMSB.fill(0);
|
|
1813
|
+
renderBankLSB.fill(0);
|
|
1814
|
+
renderProgramNumber.fill(0);
|
|
1815
|
+
renderIsDrum.fill(0);
|
|
1816
|
+
renderIsDrum[9] = 1;
|
|
1817
|
+
renderBankMSB[9] = 1;
|
|
1818
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1819
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1820
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1821
|
+
}
|
|
1822
|
+
}
|
|
1823
|
+
renderNoteAftertouch.fill(0);
|
|
1824
|
+
}
|
|
1825
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1826
|
+
renderBankMSB.fill(121);
|
|
1827
|
+
renderBankLSB.fill(0);
|
|
1828
|
+
renderProgramNumber.fill(0);
|
|
1829
|
+
renderIsDrum.fill(0);
|
|
1830
|
+
renderIsDrum[9] = 1;
|
|
1831
|
+
renderBankMSB[9] = 120;
|
|
1832
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1833
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1834
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1835
|
+
}
|
|
1836
|
+
}
|
|
1837
|
+
renderNoteAftertouch.fill(0);
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
break;
|
|
1841
|
+
}
|
|
1842
|
+
case "channelAftertouch":
|
|
1843
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1844
|
+
break;
|
|
1845
|
+
case "noteAftertouch":
|
|
1846
|
+
renderNoteAftertouch[ch * 128 + event.noteNumber] = event.amount;
|
|
1847
|
+
break;
|
|
1848
|
+
}
|
|
1849
|
+
}
|
|
1850
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1851
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1852
|
+
const { t, promise } = tasks[i];
|
|
1853
|
+
const noteBuffer = await promise;
|
|
1854
|
+
if (!noteBuffer)
|
|
1855
|
+
continue;
|
|
1856
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1857
|
+
? noteBuffer.buffer
|
|
1858
|
+
: noteBuffer;
|
|
1859
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1860
|
+
buffer: audioBuffer,
|
|
1861
|
+
});
|
|
1862
|
+
bufferSource.connect(offlineContext.destination);
|
|
1863
|
+
bufferSource.start(t);
|
|
1864
|
+
}
|
|
1865
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1866
|
+
this.isRendering = false;
|
|
1867
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1868
|
+
return this.renderedAudioBuffer;
|
|
1869
|
+
}
|
|
1100
1870
|
async start() {
|
|
1101
1871
|
if (this.isPlaying || this.isPaused)
|
|
1102
1872
|
return;
|
|
@@ -1133,11 +1903,22 @@ export class Midy extends EventTarget {
|
|
|
1133
1903
|
}
|
|
1134
1904
|
}
|
|
1135
1905
|
tempoChange(tempo) {
|
|
1906
|
+
const cacheMode = this.cacheMode;
|
|
1136
1907
|
const timeScale = this.tempo / tempo;
|
|
1137
1908
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1138
1909
|
this.tempo = tempo;
|
|
1139
1910
|
this.totalTime = this.calcTotalTime();
|
|
1140
1911
|
this.seekTo(this.currentTime() * timeScale);
|
|
1912
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1913
|
+
this.buildNoteOnDurations();
|
|
1914
|
+
this.fullVoiceCache.clear();
|
|
1915
|
+
this.adsrVoiceCache.clear();
|
|
1916
|
+
}
|
|
1917
|
+
if (cacheMode === "audio") {
|
|
1918
|
+
if (this.audioModeBufferSource) {
|
|
1919
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1920
|
+
}
|
|
1921
|
+
}
|
|
1141
1922
|
}
|
|
1142
1923
|
calcTotalTime() {
|
|
1143
1924
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1158,6 +1939,9 @@ export class Midy extends EventTarget {
|
|
|
1158
1939
|
if (!this.isPlaying)
|
|
1159
1940
|
return this.resumeTime;
|
|
1160
1941
|
const now = this.audioContext.currentTime;
|
|
1942
|
+
if (this.cacheMode === "audio") {
|
|
1943
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1944
|
+
}
|
|
1161
1945
|
return now + this.resumeTime - this.startTime;
|
|
1162
1946
|
}
|
|
1163
1947
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1371,6 +2155,8 @@ export class Midy extends EventTarget {
|
|
|
1371
2155
|
}
|
|
1372
2156
|
updateChannelDetune(channel, scheduleTime) {
|
|
1373
2157
|
this.processScheduledNotes(channel, (note) => {
|
|
2158
|
+
if (note.renderedBuffer?.isFull)
|
|
2159
|
+
return;
|
|
1374
2160
|
if (this.isPortamento(channel, note)) {
|
|
1375
2161
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1376
2162
|
}
|
|
@@ -1462,6 +2248,8 @@ export class Midy extends EventTarget {
|
|
|
1462
2248
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1463
2249
|
}
|
|
1464
2250
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2251
|
+
if (!note.volumeEnvelopeNode)
|
|
2252
|
+
return;
|
|
1465
2253
|
const { voiceParams, startTime, noteNumber } = note;
|
|
1466
2254
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1467
2255
|
(1 + this.getChannelAmplitudeControl(channel));
|
|
@@ -1507,9 +2295,6 @@ export class Midy extends EventTarget {
|
|
|
1507
2295
|
}
|
|
1508
2296
|
setDetune(channel, note, scheduleTime) {
|
|
1509
2297
|
const detune = this.calcNoteDetune(channel, note);
|
|
1510
|
-
note.bufferSource.detune
|
|
1511
|
-
.cancelScheduledValues(scheduleTime)
|
|
1512
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1513
2298
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1514
2299
|
note.bufferSource.detune
|
|
1515
2300
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1572,6 +2357,8 @@ export class Midy extends EventTarget {
|
|
|
1572
2357
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1573
2358
|
}
|
|
1574
2359
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2360
|
+
if (!note.filterEnvelopeNode)
|
|
2361
|
+
return;
|
|
1575
2362
|
const { voiceParams, startTime, noteNumber } = note;
|
|
1576
2363
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1577
2364
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1617,11 +2404,14 @@ export class Midy extends EventTarget {
|
|
|
1617
2404
|
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1618
2405
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1619
2406
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1620
|
-
|
|
2407
|
+
if (note.filterEnvelopeNode) {
|
|
2408
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2409
|
+
}
|
|
1621
2410
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1622
2411
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1623
2412
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1624
|
-
note.
|
|
2413
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2414
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1625
2415
|
}
|
|
1626
2416
|
startVibrato(channel, note, scheduleTime) {
|
|
1627
2417
|
const { voiceParams, noteNumber } = note;
|
|
@@ -1637,34 +2427,346 @@ export class Midy extends EventTarget {
|
|
|
1637
2427
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1638
2428
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1639
2429
|
}
|
|
1640
|
-
async
|
|
2430
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2431
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2432
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2433
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2434
|
+
const decayDuration = voiceParams.volDecay;
|
|
2435
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2436
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2437
|
+
const loopDuration = isLoop
|
|
2438
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2439
|
+
: 0;
|
|
2440
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
2441
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
2442
|
+
: 0;
|
|
2443
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
2444
|
+
const renderDuration = isLoop
|
|
2445
|
+
? alignedLoopStart + loopDuration
|
|
2446
|
+
: audioBuffer.duration;
|
|
2447
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
2448
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2449
|
+
bufferSource.buffer = audioBuffer;
|
|
2450
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2451
|
+
bufferSource.loop = isLoop;
|
|
2452
|
+
if (isLoop) {
|
|
2453
|
+
bufferSource.loopStart = loopStartTime;
|
|
2454
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2455
|
+
}
|
|
2456
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2457
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2458
|
+
type: "lowpass",
|
|
2459
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2460
|
+
frequency: initialFreq,
|
|
2461
|
+
});
|
|
2462
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2463
|
+
const offlineNote = {
|
|
2464
|
+
...note,
|
|
2465
|
+
startTime: 0,
|
|
2466
|
+
bufferSource,
|
|
2467
|
+
filterEnvelopeNode,
|
|
2468
|
+
volumeEnvelopeNode,
|
|
2469
|
+
};
|
|
2470
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2471
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2472
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2473
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2474
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2475
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2476
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2477
|
+
}
|
|
2478
|
+
else {
|
|
2479
|
+
bufferSource.start(0);
|
|
2480
|
+
}
|
|
2481
|
+
const buffer = await offlineContext.startRendering();
|
|
2482
|
+
return new RenderedBuffer(buffer, {
|
|
2483
|
+
isLoop,
|
|
2484
|
+
adsDuration,
|
|
2485
|
+
loopStart: alignedLoopStart,
|
|
2486
|
+
loopDuration,
|
|
2487
|
+
});
|
|
2488
|
+
}
|
|
2489
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2490
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2491
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2492
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2493
|
+
const decayDuration = voiceParams.volDecay;
|
|
2494
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2495
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2496
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2497
|
+
const loopDuration = isLoop
|
|
2498
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2499
|
+
: 0;
|
|
2500
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2501
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2502
|
+
: 0;
|
|
2503
|
+
const alignedNoteEnd = isLoop
|
|
2504
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2505
|
+
: noteDuration;
|
|
2506
|
+
const noteOffTime = alignedNoteEnd;
|
|
2507
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2508
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2509
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2510
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2511
|
+
bufferSource.buffer = audioBuffer;
|
|
2512
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2513
|
+
bufferSource.loop = isLoop;
|
|
2514
|
+
if (isLoop) {
|
|
2515
|
+
bufferSource.loopStart = loopStartTime;
|
|
2516
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2517
|
+
}
|
|
2518
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2519
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2520
|
+
type: "lowpass",
|
|
2521
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2522
|
+
frequency: initialFreq,
|
|
2523
|
+
});
|
|
2524
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2525
|
+
const offlineNote = {
|
|
2526
|
+
...note,
|
|
2527
|
+
startTime: 0,
|
|
2528
|
+
bufferSource,
|
|
2529
|
+
filterEnvelopeNode,
|
|
2530
|
+
volumeEnvelopeNode,
|
|
2531
|
+
};
|
|
2532
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2533
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2534
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2535
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2536
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2537
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2538
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2539
|
+
let gainAtNoteOff;
|
|
2540
|
+
if (noteOffTime <= volDelayTime) {
|
|
2541
|
+
gainAtNoteOff = 0;
|
|
2542
|
+
}
|
|
2543
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2544
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2545
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2546
|
+
}
|
|
2547
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2548
|
+
gainAtNoteOff = attackVolume;
|
|
2549
|
+
}
|
|
2550
|
+
else {
|
|
2551
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2552
|
+
gainAtNoteOff = sustainVolume +
|
|
2553
|
+
(attackVolume - sustainVolume) *
|
|
2554
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2555
|
+
}
|
|
2556
|
+
volumeEnvelopeNode.gain
|
|
2557
|
+
.cancelScheduledValues(noteOffTime)
|
|
2558
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2559
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2560
|
+
filterEnvelopeNode.frequency
|
|
2561
|
+
.cancelScheduledValues(noteOffTime)
|
|
2562
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2563
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2564
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2565
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2566
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2567
|
+
if (isLoop) {
|
|
2568
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2569
|
+
}
|
|
2570
|
+
else {
|
|
2571
|
+
bufferSource.start(0);
|
|
2572
|
+
}
|
|
2573
|
+
const buffer = await offlineContext.startRendering();
|
|
2574
|
+
return new RenderedBuffer(buffer, {
|
|
2575
|
+
isLoop: false,
|
|
2576
|
+
isFull: false,
|
|
2577
|
+
adsDuration,
|
|
2578
|
+
noteDuration: noteOffTime,
|
|
2579
|
+
releaseDuration,
|
|
2580
|
+
});
|
|
2581
|
+
}
|
|
2582
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2583
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2584
|
+
const ch = note.channel ?? 0;
|
|
2585
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2586
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2587
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2588
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2589
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2590
|
+
cacheMode: "none",
|
|
2591
|
+
});
|
|
2592
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2593
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2594
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2595
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2596
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2597
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2598
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2599
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2600
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2601
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2602
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2603
|
+
for (const event of noteEvents) {
|
|
2604
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2605
|
+
if (t < 0 || t > noteDuration)
|
|
2606
|
+
continue;
|
|
2607
|
+
switch (event.type) {
|
|
2608
|
+
case "controller":
|
|
2609
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2610
|
+
break;
|
|
2611
|
+
case "pitchBend":
|
|
2612
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2613
|
+
break;
|
|
2614
|
+
case "sysEx":
|
|
2615
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2616
|
+
break;
|
|
2617
|
+
case "channelAftertouch":
|
|
2618
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2619
|
+
break;
|
|
2620
|
+
case "noteAftertouch":
|
|
2621
|
+
offlinePlayer.setPolyphonicKeyPressure(ch, event.noteNumber, event.amount, t);
|
|
2622
|
+
}
|
|
2623
|
+
}
|
|
2624
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2625
|
+
const buffer = await offlineContext.startRendering();
|
|
2626
|
+
return new RenderedBuffer(buffer, {
|
|
2627
|
+
isLoop: false,
|
|
2628
|
+
isFull: true,
|
|
2629
|
+
noteDuration: noteDuration,
|
|
2630
|
+
releaseDuration: releaseEndDuration,
|
|
2631
|
+
});
|
|
2632
|
+
}
|
|
2633
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2634
|
+
const cacheMode = this.cacheMode;
|
|
2635
|
+
const { noteNumber, velocity } = note;
|
|
1641
2636
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2637
|
+
if (!realtime) {
|
|
2638
|
+
if (cacheMode === "note") {
|
|
2639
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
2640
|
+
}
|
|
2641
|
+
else if (cacheMode === "adsr") {
|
|
2642
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2643
|
+
}
|
|
2644
|
+
}
|
|
2645
|
+
if (cacheMode === "none") {
|
|
2646
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2647
|
+
}
|
|
2648
|
+
// fallback to ADS cache:
|
|
2649
|
+
// - "ads" (realtime or not)
|
|
2650
|
+
// - "adsr" + realtime
|
|
2651
|
+
// - "note" + realtime
|
|
2652
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2653
|
+
}
|
|
2654
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2655
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2656
|
+
const voiceParams = note.voiceParams;
|
|
1642
2657
|
if (realtime) {
|
|
1643
|
-
const
|
|
1644
|
-
if (
|
|
1645
|
-
return
|
|
1646
|
-
const
|
|
1647
|
-
this.
|
|
1648
|
-
|
|
2658
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2659
|
+
if (cached)
|
|
2660
|
+
return cached;
|
|
2661
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2662
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2663
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2664
|
+
return rendered;
|
|
1649
2665
|
}
|
|
1650
2666
|
else {
|
|
1651
|
-
const cache = this.voiceCache.get(
|
|
2667
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1652
2668
|
if (cache) {
|
|
1653
2669
|
cache.counter += 1;
|
|
1654
2670
|
if (cache.maxCount <= cache.counter) {
|
|
1655
|
-
this.voiceCache.delete(
|
|
2671
|
+
this.voiceCache.delete(cacheKey);
|
|
1656
2672
|
}
|
|
1657
2673
|
return cache.audioBuffer;
|
|
1658
2674
|
}
|
|
1659
2675
|
else {
|
|
1660
|
-
const maxCount = this.voiceCounter.get(
|
|
1661
|
-
const
|
|
1662
|
-
const
|
|
1663
|
-
|
|
1664
|
-
|
|
2676
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2677
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2678
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2679
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2680
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2681
|
+
return rendered;
|
|
1665
2682
|
}
|
|
1666
2683
|
}
|
|
1667
2684
|
}
|
|
2685
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2686
|
+
const voiceParams = note.voiceParams;
|
|
2687
|
+
const timelineIndex = note.timelineIndex;
|
|
2688
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2689
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2690
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2691
|
+
? 0xffffffffn
|
|
2692
|
+
: BigInt(noteDurationTicks);
|
|
2693
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2694
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2695
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2696
|
+
(playbackRateBits << 96n) |
|
|
2697
|
+
(safeTicks << 64n) |
|
|
2698
|
+
volReleaseBits;
|
|
2699
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2700
|
+
if (!durationMap) {
|
|
2701
|
+
durationMap = new Map();
|
|
2702
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2703
|
+
}
|
|
2704
|
+
const cached = durationMap.get(cacheKey);
|
|
2705
|
+
if (cached instanceof RenderedBuffer) {
|
|
2706
|
+
return cached;
|
|
2707
|
+
}
|
|
2708
|
+
if (cached instanceof Promise) {
|
|
2709
|
+
const buf = await cached;
|
|
2710
|
+
if (buf == null)
|
|
2711
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2712
|
+
return buf;
|
|
2713
|
+
}
|
|
2714
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2715
|
+
const renderPromise = (async () => {
|
|
2716
|
+
try {
|
|
2717
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2718
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2719
|
+
durationMap.set(cacheKey, rendered);
|
|
2720
|
+
return rendered;
|
|
2721
|
+
}
|
|
2722
|
+
catch (err) {
|
|
2723
|
+
durationMap.delete(cacheKey);
|
|
2724
|
+
throw err;
|
|
2725
|
+
}
|
|
2726
|
+
})();
|
|
2727
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2728
|
+
return await renderPromise;
|
|
2729
|
+
}
|
|
2730
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
2731
|
+
const voiceParams = note.voiceParams;
|
|
2732
|
+
const timelineIndex = note.timelineIndex;
|
|
2733
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2734
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2735
|
+
const cacheKey = timelineIndex;
|
|
2736
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2737
|
+
if (!durationMap) {
|
|
2738
|
+
durationMap = new Map();
|
|
2739
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2740
|
+
}
|
|
2741
|
+
const cached = durationMap.get(cacheKey);
|
|
2742
|
+
if (cached instanceof RenderedBuffer) {
|
|
2743
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2744
|
+
return cached;
|
|
2745
|
+
}
|
|
2746
|
+
if (cached instanceof Promise) {
|
|
2747
|
+
const buf = await cached;
|
|
2748
|
+
if (buf == null)
|
|
2749
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2750
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2751
|
+
return buf;
|
|
2752
|
+
}
|
|
2753
|
+
const renderPromise = (async () => {
|
|
2754
|
+
try {
|
|
2755
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2756
|
+
const rendered = await this.createFullRenderedBuffer(note, voiceParams, rawBuffer, noteDuration, noteEvent);
|
|
2757
|
+
durationMap.set(cacheKey, rendered);
|
|
2758
|
+
return rendered;
|
|
2759
|
+
}
|
|
2760
|
+
catch (err) {
|
|
2761
|
+
durationMap.delete(cacheKey);
|
|
2762
|
+
throw err;
|
|
2763
|
+
}
|
|
2764
|
+
})();
|
|
2765
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2766
|
+
const rendered = await renderPromise;
|
|
2767
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2768
|
+
return rendered;
|
|
2769
|
+
}
|
|
1668
2770
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1669
2771
|
const audioContext = this.audioContext;
|
|
1670
2772
|
const now = audioContext.currentTime;
|
|
@@ -1673,50 +2775,72 @@ export class Midy extends EventTarget {
|
|
|
1673
2775
|
const controllerState = this.getControllerState(channel, noteNumber, velocity, 0);
|
|
1674
2776
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1675
2777
|
note.voiceParams = voiceParams;
|
|
1676
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2778
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2779
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2780
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1677
2781
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1678
|
-
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
1679
2782
|
note.volumeNode = new GainNode(audioContext);
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
this.
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
2783
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2784
|
+
const cacheMode = this.cacheMode;
|
|
2785
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2786
|
+
if (cacheMode === "none") {
|
|
2787
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2788
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2789
|
+
type: "lowpass",
|
|
2790
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2791
|
+
});
|
|
2792
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2793
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2794
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2795
|
+
}
|
|
2796
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2797
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2798
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2799
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2800
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2801
|
+
}
|
|
2802
|
+
else {
|
|
2803
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2804
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2805
|
+
this.setPitchEnvelope(note, now);
|
|
2806
|
+
this.setDetune(channel, note, now);
|
|
2807
|
+
}
|
|
2808
|
+
if (0 < state.vibratoDepth) {
|
|
2809
|
+
this.startVibrato(channel, note, now);
|
|
2810
|
+
}
|
|
2811
|
+
if (0 < state.modulationDepthMSB) {
|
|
2812
|
+
this.startModulation(channel, note, now);
|
|
2813
|
+
}
|
|
2814
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2815
|
+
channel.currentBufferSource.stop(startTime);
|
|
2816
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2817
|
+
}
|
|
2818
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2819
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2820
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2821
|
+
this.setChorusSend(channel, note, now);
|
|
2822
|
+
this.setReverbSend(channel, note, now);
|
|
2823
|
+
}
|
|
2824
|
+
else if (isFullCached) { // "note" mode
|
|
2825
|
+
note.volumeEnvelopeNode = null;
|
|
2826
|
+
note.filterEnvelopeNode = null;
|
|
2827
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2828
|
+
this.setChorusSend(channel, note, now);
|
|
2829
|
+
this.setReverbSend(channel, note, now);
|
|
2830
|
+
}
|
|
2831
|
+
else { // "ads" / "asdr" mode
|
|
2832
|
+
note.volumeEnvelopeNode = null;
|
|
2833
|
+
note.filterEnvelopeNode = null;
|
|
1700
2834
|
this.setDetune(channel, note, now);
|
|
2835
|
+
if (0 < state.modulationDepthMSB) {
|
|
2836
|
+
this.startModulation(channel, note, now);
|
|
2837
|
+
}
|
|
2838
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2839
|
+
this.setChorusSend(channel, note, now);
|
|
2840
|
+
this.setReverbSend(channel, note, now);
|
|
1701
2841
|
}
|
|
1702
|
-
if (0 < state.vibratoDepth) {
|
|
1703
|
-
this.startVibrato(channel, note, now);
|
|
1704
|
-
}
|
|
1705
|
-
if (0 < state.modulationDepthMSB + state.modulationDepthLSB) {
|
|
1706
|
-
this.startModulation(channel, note, now);
|
|
1707
|
-
}
|
|
1708
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1709
|
-
channel.currentBufferSource.stop(startTime);
|
|
1710
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1711
|
-
}
|
|
1712
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1713
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1714
|
-
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
1715
|
-
this.setChorusSend(channel, note, now);
|
|
1716
|
-
this.setReverbSend(channel, note, now);
|
|
1717
2842
|
if (voiceParams.sample.type === "compressed") {
|
|
1718
|
-
|
|
1719
|
-
note.bufferSource.start(startTime, offset);
|
|
2843
|
+
note.bufferSource.start(startTime);
|
|
1720
2844
|
}
|
|
1721
2845
|
else {
|
|
1722
2846
|
note.bufferSource.start(startTime);
|
|
@@ -1758,51 +2882,58 @@ export class Midy extends EventTarget {
|
|
|
1758
2882
|
}
|
|
1759
2883
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1760
2884
|
const channel = this.channels[channelNumber];
|
|
1761
|
-
const {
|
|
1762
|
-
if (
|
|
1763
|
-
|
|
1764
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1765
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1766
|
-
if (!gainL) {
|
|
1767
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1768
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1769
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1770
|
-
}
|
|
1771
|
-
volumeNode.connect(gainL);
|
|
1772
|
-
volumeNode.connect(gainR);
|
|
2885
|
+
const { volumeNode } = note;
|
|
2886
|
+
if (note.renderedBuffer?.isFull) {
|
|
2887
|
+
volumeNode.connect(this.masterVolume);
|
|
1773
2888
|
}
|
|
1774
2889
|
else {
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
2890
|
+
if (channel.isDrum) {
|
|
2891
|
+
const noteNumber = note.noteNumber;
|
|
2892
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2893
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2894
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2895
|
+
if (!gainL) {
|
|
2896
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2897
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2898
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2899
|
+
}
|
|
2900
|
+
volumeNode.connect(gainL);
|
|
2901
|
+
volumeNode.connect(gainR);
|
|
2902
|
+
}
|
|
2903
|
+
else {
|
|
2904
|
+
volumeNode.connect(channel.gainL);
|
|
2905
|
+
volumeNode.connect(channel.gainR);
|
|
2906
|
+
}
|
|
1780
2907
|
}
|
|
1781
2908
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1782
2909
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1783
2910
|
}
|
|
1784
2911
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1785
2912
|
if (this.mpeEnabled) {
|
|
1786
|
-
const
|
|
2913
|
+
const channel = this.channels[channelNumber];
|
|
2914
|
+
const noteIndex = channel.scheduledNotes.length;
|
|
1787
2915
|
if (!this.mpeState.channelToNotes.has(channelNumber)) {
|
|
1788
2916
|
this.mpeState.channelToNotes.set(channelNumber, new Set());
|
|
1789
2917
|
}
|
|
1790
|
-
this.mpeState.channelToNotes.get(channelNumber).add(
|
|
1791
|
-
this.mpeState.noteToChannel.set(
|
|
1792
|
-
}
|
|
1793
|
-
else {
|
|
1794
|
-
await this.startNote(channelNumber, noteNumber, velocity, startTime);
|
|
2918
|
+
this.mpeState.channelToNotes.get(channelNumber).add(noteIndex);
|
|
2919
|
+
this.mpeState.noteToChannel.set(noteIndex, channelNumber);
|
|
1795
2920
|
}
|
|
2921
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2922
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
1796
2923
|
}
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
const realtime = startTime === undefined;
|
|
1800
|
-
if (realtime)
|
|
2924
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2925
|
+
if (!(0 <= startTime))
|
|
1801
2926
|
startTime = this.audioContext.currentTime;
|
|
1802
2927
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
scheduledNotes.
|
|
2928
|
+
note.channel = channelNumber;
|
|
2929
|
+
const channel = this.channels[channelNumber];
|
|
2930
|
+
note.index = channel.scheduledNotes.length;
|
|
2931
|
+
channel.scheduledNotes.push(note);
|
|
2932
|
+
return note;
|
|
2933
|
+
}
|
|
2934
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2935
|
+
const realtime = startTime === undefined;
|
|
2936
|
+
const channel = this.channels[channelNumber];
|
|
1806
2937
|
const programNumber = channel.programNumber;
|
|
1807
2938
|
const bankTable = this.soundFontTable[programNumber];
|
|
1808
2939
|
if (!bankTable)
|
|
@@ -1817,18 +2948,24 @@ export class Midy extends EventTarget {
|
|
|
1817
2948
|
if (soundFontIndex === undefined)
|
|
1818
2949
|
return;
|
|
1819
2950
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1820
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2951
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1821
2952
|
if (!note.voice)
|
|
1822
2953
|
return;
|
|
1823
2954
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1824
2955
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1825
2956
|
note.resolveReady();
|
|
2957
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2958
|
+
channel.sustainNotes.push(note);
|
|
2959
|
+
}
|
|
2960
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2961
|
+
channel.sostenutoNotes.push(note);
|
|
2962
|
+
}
|
|
1826
2963
|
return note;
|
|
1827
2964
|
}
|
|
1828
2965
|
disconnectNote(note) {
|
|
1829
2966
|
note.bufferSource.disconnect();
|
|
1830
|
-
note.filterEnvelopeNode
|
|
1831
|
-
note.volumeEnvelopeNode
|
|
2967
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2968
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
1832
2969
|
note.volumeNode.disconnect();
|
|
1833
2970
|
if (note.modLfoToPitch) {
|
|
1834
2971
|
note.modLfoToVolume.disconnect();
|
|
@@ -1846,17 +2983,114 @@ export class Midy extends EventTarget {
|
|
|
1846
2983
|
note.chorusSend.disconnect();
|
|
1847
2984
|
}
|
|
1848
2985
|
}
|
|
2986
|
+
releaseFullCache(note) {
|
|
2987
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2988
|
+
return;
|
|
2989
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2990
|
+
if (!durationMap)
|
|
2991
|
+
return;
|
|
2992
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2993
|
+
if (entry instanceof RenderedBuffer) {
|
|
2994
|
+
durationMap.delete(note.timelineIndex);
|
|
2995
|
+
if (durationMap.size === 0) {
|
|
2996
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2997
|
+
}
|
|
2998
|
+
}
|
|
2999
|
+
}
|
|
1849
3000
|
releaseNote(channel, note, endTime) {
|
|
1850
3001
|
endTime ??= this.audioContext.currentTime;
|
|
3002
|
+
if (note.renderedBuffer?.isFull) {
|
|
3003
|
+
const rb = note.renderedBuffer;
|
|
3004
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
3005
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
3006
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
3007
|
+
if (isEarlyCut) {
|
|
3008
|
+
const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
|
|
3009
|
+
const volDuration = note.voiceParams.volRelease * releaseTime;
|
|
3010
|
+
const volRelease = endTime + volDuration;
|
|
3011
|
+
note.volumeNode.gain
|
|
3012
|
+
.cancelScheduledValues(endTime)
|
|
3013
|
+
.setValueAtTime(1, endTime)
|
|
3014
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3015
|
+
return new Promise((resolve) => {
|
|
3016
|
+
this.scheduleTask(() => {
|
|
3017
|
+
note.bufferSource.loop = false;
|
|
3018
|
+
note.bufferSource.stop(volRelease);
|
|
3019
|
+
this.disconnectNote(note);
|
|
3020
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3021
|
+
this.releaseFullCache(note);
|
|
3022
|
+
resolve();
|
|
3023
|
+
}, volRelease);
|
|
3024
|
+
});
|
|
3025
|
+
}
|
|
3026
|
+
else {
|
|
3027
|
+
const now = this.audioContext.currentTime;
|
|
3028
|
+
if (naturalEndTime <= now) {
|
|
3029
|
+
this.disconnectNote(note);
|
|
3030
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3031
|
+
this.releaseFullCache(note);
|
|
3032
|
+
return Promise.resolve();
|
|
3033
|
+
}
|
|
3034
|
+
return new Promise((resolve) => {
|
|
3035
|
+
this.scheduleTask(() => {
|
|
3036
|
+
this.disconnectNote(note);
|
|
3037
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3038
|
+
this.releaseFullCache(note);
|
|
3039
|
+
resolve();
|
|
3040
|
+
}, naturalEndTime);
|
|
3041
|
+
});
|
|
3042
|
+
}
|
|
3043
|
+
}
|
|
1851
3044
|
const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
|
|
1852
3045
|
const volDuration = note.voiceParams.volRelease * releaseTime;
|
|
1853
3046
|
const volRelease = endTime + volDuration;
|
|
1854
|
-
note.
|
|
1855
|
-
.
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
.
|
|
1859
|
-
|
|
3047
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
3048
|
+
note.filterEnvelopeNode.frequency
|
|
3049
|
+
.cancelScheduledValues(endTime)
|
|
3050
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
3051
|
+
note.volumeEnvelopeNode.gain
|
|
3052
|
+
.cancelScheduledValues(endTime)
|
|
3053
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3054
|
+
}
|
|
3055
|
+
else { // "ads" / "adsr" mode
|
|
3056
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
3057
|
+
!note.renderedBuffer.isFull;
|
|
3058
|
+
if (isAdsr) {
|
|
3059
|
+
const rb = note.renderedBuffer;
|
|
3060
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
3061
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
3062
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
3063
|
+
if (isEarlyCut) {
|
|
3064
|
+
const volRelease = endTime + volDuration;
|
|
3065
|
+
note.volumeNode.gain
|
|
3066
|
+
.cancelScheduledValues(endTime)
|
|
3067
|
+
.setValueAtTime(1, endTime)
|
|
3068
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3069
|
+
return new Promise((resolve) => {
|
|
3070
|
+
this.scheduleTask(() => {
|
|
3071
|
+
note.bufferSource.stop(volRelease);
|
|
3072
|
+
this.disconnectNote(note);
|
|
3073
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3074
|
+
resolve();
|
|
3075
|
+
}, volRelease);
|
|
3076
|
+
});
|
|
3077
|
+
}
|
|
3078
|
+
else {
|
|
3079
|
+
return new Promise((resolve) => {
|
|
3080
|
+
this.scheduleTask(() => {
|
|
3081
|
+
note.bufferSource.stop();
|
|
3082
|
+
this.disconnectNote(note);
|
|
3083
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3084
|
+
resolve();
|
|
3085
|
+
}, naturalEndTime);
|
|
3086
|
+
});
|
|
3087
|
+
}
|
|
3088
|
+
}
|
|
3089
|
+
note.volumeNode.gain
|
|
3090
|
+
.cancelScheduledValues(endTime)
|
|
3091
|
+
.setValueAtTime(1, endTime)
|
|
3092
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3093
|
+
}
|
|
1860
3094
|
return new Promise((resolve) => {
|
|
1861
3095
|
this.scheduleTask(() => {
|
|
1862
3096
|
const bufferSource = note.bufferSource;
|
|
@@ -2093,7 +3327,7 @@ export class Midy extends EventTarget {
|
|
|
2093
3327
|
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
2094
3328
|
}
|
|
2095
3329
|
setModLfoToPitch(channel, note, scheduleTime) {
|
|
2096
|
-
if (note.
|
|
3330
|
+
if (note.modLfoToPitch) {
|
|
2097
3331
|
const { modulationDepthMSB, modulationDepthLSB } = channel.state;
|
|
2098
3332
|
const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
|
|
2099
3333
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
@@ -2258,7 +3492,7 @@ export class Midy extends EventTarget {
|
|
|
2258
3492
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2259
3493
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2260
3494
|
},
|
|
2261
|
-
delayModLFO: (
|
|
3495
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2262
3496
|
const { modulationDepthMSB, modulationDepthLSB } = channel.state;
|
|
2263
3497
|
if (0 < modulationDepthMSB + modulationDepthLSB) {
|
|
2264
3498
|
this.setDelayModLFO(note);
|
|
@@ -2296,11 +3530,12 @@ export class Midy extends EventTarget {
|
|
|
2296
3530
|
state[2] = velocity / 127;
|
|
2297
3531
|
state[3] = noteNumber / 127;
|
|
2298
3532
|
state[10] = polyphonicKeyPressure / 127;
|
|
2299
|
-
state[13] = state.channelPressure / 127;
|
|
2300
3533
|
return state;
|
|
2301
3534
|
}
|
|
2302
3535
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2303
3536
|
this.processScheduledNotes(channel, (note) => {
|
|
3537
|
+
if (note.renderedBuffer?.isFull)
|
|
3538
|
+
return;
|
|
2304
3539
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity, note.pressure);
|
|
2305
3540
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2306
3541
|
let applyVolumeEnvelope = false;
|
|
@@ -2407,8 +3642,8 @@ export class Midy extends EventTarget {
|
|
|
2407
3642
|
const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
|
|
2408
3643
|
const depth = modulationDepth * channel.modulationDepthRange;
|
|
2409
3644
|
this.processScheduledNotes(channel, (note) => {
|
|
2410
|
-
if (note.
|
|
2411
|
-
note.
|
|
3645
|
+
if (note.modLfoToPitch) {
|
|
3646
|
+
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2412
3647
|
}
|
|
2413
3648
|
else {
|
|
2414
3649
|
this.startModulation(channel, note, scheduleTime);
|
|
@@ -2563,11 +3798,15 @@ export class Midy extends EventTarget {
|
|
|
2563
3798
|
return;
|
|
2564
3799
|
if (!(0 <= scheduleTime))
|
|
2565
3800
|
scheduleTime = this.audioContext.currentTime;
|
|
2566
|
-
|
|
3801
|
+
const state = channel.state;
|
|
3802
|
+
const prevValue = state.sustainPedal;
|
|
3803
|
+
state.sustainPedal = value / 127;
|
|
2567
3804
|
if (64 <= value) {
|
|
2568
|
-
|
|
2569
|
-
|
|
2570
|
-
|
|
3805
|
+
if (prevValue < 0.5) {
|
|
3806
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3807
|
+
channel.sustainNotes.push(note);
|
|
3808
|
+
});
|
|
3809
|
+
}
|
|
2571
3810
|
}
|
|
2572
3811
|
else {
|
|
2573
3812
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2591,13 +3830,17 @@ export class Midy extends EventTarget {
|
|
|
2591
3830
|
return;
|
|
2592
3831
|
if (!(0 <= scheduleTime))
|
|
2593
3832
|
scheduleTime = this.audioContext.currentTime;
|
|
2594
|
-
|
|
3833
|
+
const state = channel.state;
|
|
3834
|
+
const prevValue = state.sostenutoPedal;
|
|
3835
|
+
state.sostenutoPedal = value / 127;
|
|
2595
3836
|
if (64 <= value) {
|
|
2596
|
-
|
|
2597
|
-
|
|
2598
|
-
|
|
2599
|
-
|
|
2600
|
-
|
|
3837
|
+
if (prevValue < 0.5) {
|
|
3838
|
+
const sostenutoNotes = [];
|
|
3839
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3840
|
+
sostenutoNotes.push(note);
|
|
3841
|
+
});
|
|
3842
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3843
|
+
}
|
|
2601
3844
|
}
|
|
2602
3845
|
else {
|
|
2603
3846
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2966,10 +4209,8 @@ export class Midy extends EventTarget {
|
|
|
2966
4209
|
state[key] = defaultValue;
|
|
2967
4210
|
}
|
|
2968
4211
|
}
|
|
2969
|
-
|
|
2970
|
-
|
|
2971
|
-
}
|
|
2972
|
-
this.resetChannelTable(channel);
|
|
4212
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
4213
|
+
channel.resetTable();
|
|
2973
4214
|
this.mode = "GM2";
|
|
2974
4215
|
this.masterFineTuning = 0; // cent
|
|
2975
4216
|
this.masterCoarseTuning = 0; // cent
|
|
@@ -3132,7 +4373,7 @@ export class Midy extends EventTarget {
|
|
|
3132
4373
|
case 9:
|
|
3133
4374
|
switch (data[3]) {
|
|
3134
4375
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
3135
|
-
return this.handleChannelPressureSysEx(data,
|
|
4376
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
3136
4377
|
case 2: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
3137
4378
|
return this.handlePolyphonicKeyPressureSysEx(data, scheduleTime);
|
|
3138
4379
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|