@marmooo/midy 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -2
- package/esm/midy-GM1.d.ts +86 -10
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1190 -101
- package/esm/midy-GM2.d.ts +103 -10
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1402 -162
- package/esm/midy-GMLite.d.ts +84 -9
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1183 -98
- package/esm/midy.d.ts +77 -15
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1416 -175
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +86 -10
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1190 -101
- package/script/midy-GM2.d.ts +103 -10
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1402 -162
- package/script/midy-GMLite.d.ts +84 -9
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1183 -98
- package/script/midy.d.ts +77 -15
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1416 -175
package/esm/midy-GM2.js
CHANGED
|
@@ -1,6 +1,55 @@
|
|
|
1
1
|
import { parseMidi } from "midi-file";
|
|
2
2
|
import { parse, SoundFont } from "@marmooo/soundfont-parser";
|
|
3
3
|
import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
|
|
4
|
+
// Cache mode
|
|
5
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
6
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
7
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
8
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
9
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
10
|
+
//
|
|
11
|
+
// "none"
|
|
12
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
13
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
14
|
+
// fully supported. Higher CPU usage.
|
|
15
|
+
// "ads"
|
|
16
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
17
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
18
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
19
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
20
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
21
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
22
|
+
// "adsr"
|
|
23
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
24
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
25
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
26
|
+
// so notes with the same duration and release shape share a buffer.
|
|
27
|
+
// LFO effects are applied in real time after playback starts,
|
|
28
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
29
|
+
// because LFO variations do not produce separate cache entries.
|
|
30
|
+
// "note"
|
|
31
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
32
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
33
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
34
|
+
// so no real-time processing is needed during playback. Greatly
|
|
35
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
36
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
37
|
+
// "audio"
|
|
38
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
39
|
+
// Call render() to complete rendering before calling start().
|
|
40
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
41
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
42
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
43
|
+
// "rendered" event is dispatched when rendering completes.
|
|
44
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
45
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
46
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
47
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
48
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
49
|
+
function f64ToBigInt(value) {
|
|
50
|
+
_f64Array[0] = value;
|
|
51
|
+
return _u64Array[0];
|
|
52
|
+
}
|
|
4
53
|
let decoderPromise = null;
|
|
5
54
|
let decoderQueue = Promise.resolve();
|
|
6
55
|
function initDecoder() {
|
|
@@ -48,6 +97,24 @@ class Note {
|
|
|
48
97
|
writable: true,
|
|
49
98
|
value: void 0
|
|
50
99
|
});
|
|
100
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
101
|
+
enumerable: true,
|
|
102
|
+
configurable: true,
|
|
103
|
+
writable: true,
|
|
104
|
+
value: null
|
|
105
|
+
});
|
|
106
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
107
|
+
enumerable: true,
|
|
108
|
+
configurable: true,
|
|
109
|
+
writable: true,
|
|
110
|
+
value: null
|
|
111
|
+
});
|
|
112
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
113
|
+
enumerable: true,
|
|
114
|
+
configurable: true,
|
|
115
|
+
writable: true,
|
|
116
|
+
value: null
|
|
117
|
+
});
|
|
51
118
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
52
119
|
enumerable: true,
|
|
53
120
|
configurable: true,
|
|
@@ -122,6 +189,166 @@ class Note {
|
|
|
122
189
|
});
|
|
123
190
|
}
|
|
124
191
|
}
|
|
192
|
+
class Channel {
|
|
193
|
+
constructor(audioNodes, settings) {
|
|
194
|
+
Object.defineProperty(this, "isDrum", {
|
|
195
|
+
enumerable: true,
|
|
196
|
+
configurable: true,
|
|
197
|
+
writable: true,
|
|
198
|
+
value: false
|
|
199
|
+
});
|
|
200
|
+
Object.defineProperty(this, "programNumber", {
|
|
201
|
+
enumerable: true,
|
|
202
|
+
configurable: true,
|
|
203
|
+
writable: true,
|
|
204
|
+
value: 0
|
|
205
|
+
});
|
|
206
|
+
Object.defineProperty(this, "scheduleIndex", {
|
|
207
|
+
enumerable: true,
|
|
208
|
+
configurable: true,
|
|
209
|
+
writable: true,
|
|
210
|
+
value: 0
|
|
211
|
+
});
|
|
212
|
+
Object.defineProperty(this, "detune", {
|
|
213
|
+
enumerable: true,
|
|
214
|
+
configurable: true,
|
|
215
|
+
writable: true,
|
|
216
|
+
value: 0
|
|
217
|
+
});
|
|
218
|
+
Object.defineProperty(this, "bankMSB", {
|
|
219
|
+
enumerable: true,
|
|
220
|
+
configurable: true,
|
|
221
|
+
writable: true,
|
|
222
|
+
value: 121
|
|
223
|
+
});
|
|
224
|
+
Object.defineProperty(this, "bankLSB", {
|
|
225
|
+
enumerable: true,
|
|
226
|
+
configurable: true,
|
|
227
|
+
writable: true,
|
|
228
|
+
value: 0
|
|
229
|
+
});
|
|
230
|
+
Object.defineProperty(this, "dataMSB", {
|
|
231
|
+
enumerable: true,
|
|
232
|
+
configurable: true,
|
|
233
|
+
writable: true,
|
|
234
|
+
value: 0
|
|
235
|
+
});
|
|
236
|
+
Object.defineProperty(this, "dataLSB", {
|
|
237
|
+
enumerable: true,
|
|
238
|
+
configurable: true,
|
|
239
|
+
writable: true,
|
|
240
|
+
value: 0
|
|
241
|
+
});
|
|
242
|
+
Object.defineProperty(this, "rpnMSB", {
|
|
243
|
+
enumerable: true,
|
|
244
|
+
configurable: true,
|
|
245
|
+
writable: true,
|
|
246
|
+
value: 127
|
|
247
|
+
});
|
|
248
|
+
Object.defineProperty(this, "rpnLSB", {
|
|
249
|
+
enumerable: true,
|
|
250
|
+
configurable: true,
|
|
251
|
+
writable: true,
|
|
252
|
+
value: 127
|
|
253
|
+
});
|
|
254
|
+
Object.defineProperty(this, "mono", {
|
|
255
|
+
enumerable: true,
|
|
256
|
+
configurable: true,
|
|
257
|
+
writable: true,
|
|
258
|
+
value: false
|
|
259
|
+
}); // CC#124, CC#125
|
|
260
|
+
Object.defineProperty(this, "modulationDepthRange", {
|
|
261
|
+
enumerable: true,
|
|
262
|
+
configurable: true,
|
|
263
|
+
writable: true,
|
|
264
|
+
value: 50
|
|
265
|
+
}); // cent
|
|
266
|
+
Object.defineProperty(this, "fineTuning", {
|
|
267
|
+
enumerable: true,
|
|
268
|
+
configurable: true,
|
|
269
|
+
writable: true,
|
|
270
|
+
value: 0
|
|
271
|
+
}); // cent
|
|
272
|
+
Object.defineProperty(this, "coarseTuning", {
|
|
273
|
+
enumerable: true,
|
|
274
|
+
configurable: true,
|
|
275
|
+
writable: true,
|
|
276
|
+
value: 0
|
|
277
|
+
}); // cent
|
|
278
|
+
Object.defineProperty(this, "scheduledNotes", {
|
|
279
|
+
enumerable: true,
|
|
280
|
+
configurable: true,
|
|
281
|
+
writable: true,
|
|
282
|
+
value: []
|
|
283
|
+
});
|
|
284
|
+
Object.defineProperty(this, "sustainNotes", {
|
|
285
|
+
enumerable: true,
|
|
286
|
+
configurable: true,
|
|
287
|
+
writable: true,
|
|
288
|
+
value: []
|
|
289
|
+
});
|
|
290
|
+
Object.defineProperty(this, "sostenutoNotes", {
|
|
291
|
+
enumerable: true,
|
|
292
|
+
configurable: true,
|
|
293
|
+
writable: true,
|
|
294
|
+
value: []
|
|
295
|
+
});
|
|
296
|
+
Object.defineProperty(this, "controlTable", {
|
|
297
|
+
enumerable: true,
|
|
298
|
+
configurable: true,
|
|
299
|
+
writable: true,
|
|
300
|
+
value: new Int8Array(defaultControlValues)
|
|
301
|
+
});
|
|
302
|
+
Object.defineProperty(this, "scaleOctaveTuningTable", {
|
|
303
|
+
enumerable: true,
|
|
304
|
+
configurable: true,
|
|
305
|
+
writable: true,
|
|
306
|
+
value: new Int8Array(12)
|
|
307
|
+
}); // [-64, 63] cent
|
|
308
|
+
Object.defineProperty(this, "channelPressureTable", {
|
|
309
|
+
enumerable: true,
|
|
310
|
+
configurable: true,
|
|
311
|
+
writable: true,
|
|
312
|
+
value: new Int8Array(defaultPressureValues)
|
|
313
|
+
});
|
|
314
|
+
Object.defineProperty(this, "keyBasedTable", {
|
|
315
|
+
enumerable: true,
|
|
316
|
+
configurable: true,
|
|
317
|
+
writable: true,
|
|
318
|
+
value: new Int8Array(128 * 128).fill(-1)
|
|
319
|
+
});
|
|
320
|
+
Object.defineProperty(this, "keyBasedGainLs", {
|
|
321
|
+
enumerable: true,
|
|
322
|
+
configurable: true,
|
|
323
|
+
writable: true,
|
|
324
|
+
value: new Array(128)
|
|
325
|
+
});
|
|
326
|
+
Object.defineProperty(this, "keyBasedGainRs", {
|
|
327
|
+
enumerable: true,
|
|
328
|
+
configurable: true,
|
|
329
|
+
writable: true,
|
|
330
|
+
value: new Array(128)
|
|
331
|
+
});
|
|
332
|
+
Object.defineProperty(this, "currentBufferSource", {
|
|
333
|
+
enumerable: true,
|
|
334
|
+
configurable: true,
|
|
335
|
+
writable: true,
|
|
336
|
+
value: null
|
|
337
|
+
});
|
|
338
|
+
Object.assign(this, audioNodes);
|
|
339
|
+
Object.assign(this, settings);
|
|
340
|
+
this.state = new ControllerState();
|
|
341
|
+
}
|
|
342
|
+
resetSettings(settings) {
|
|
343
|
+
Object.assign(this, settings);
|
|
344
|
+
}
|
|
345
|
+
resetTable() {
|
|
346
|
+
this.controlTable.set(defaultControlValues);
|
|
347
|
+
this.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
348
|
+
this.channelPressureTable.set(defaultPressureValues);
|
|
349
|
+
this.keyBasedTable.fill(-1);
|
|
350
|
+
}
|
|
351
|
+
}
|
|
125
352
|
const drumExclusiveClassesByKit = new Array(57);
|
|
126
353
|
const drumExclusiveClassCount = 10;
|
|
127
354
|
const standardSet = new Uint8Array(128);
|
|
@@ -255,13 +482,73 @@ const defaultControlValues = new Int8Array([
|
|
|
255
482
|
...[-1, -1, -1, -1, -1, -1],
|
|
256
483
|
...defaultPressureValues,
|
|
257
484
|
]);
|
|
485
|
+
class RenderedBuffer {
|
|
486
|
+
constructor(buffer, meta = {}) {
|
|
487
|
+
Object.defineProperty(this, "buffer", {
|
|
488
|
+
enumerable: true,
|
|
489
|
+
configurable: true,
|
|
490
|
+
writable: true,
|
|
491
|
+
value: void 0
|
|
492
|
+
});
|
|
493
|
+
Object.defineProperty(this, "isLoop", {
|
|
494
|
+
enumerable: true,
|
|
495
|
+
configurable: true,
|
|
496
|
+
writable: true,
|
|
497
|
+
value: void 0
|
|
498
|
+
});
|
|
499
|
+
Object.defineProperty(this, "isFull", {
|
|
500
|
+
enumerable: true,
|
|
501
|
+
configurable: true,
|
|
502
|
+
writable: true,
|
|
503
|
+
value: void 0
|
|
504
|
+
});
|
|
505
|
+
Object.defineProperty(this, "adsDuration", {
|
|
506
|
+
enumerable: true,
|
|
507
|
+
configurable: true,
|
|
508
|
+
writable: true,
|
|
509
|
+
value: void 0
|
|
510
|
+
});
|
|
511
|
+
Object.defineProperty(this, "loopStart", {
|
|
512
|
+
enumerable: true,
|
|
513
|
+
configurable: true,
|
|
514
|
+
writable: true,
|
|
515
|
+
value: void 0
|
|
516
|
+
});
|
|
517
|
+
Object.defineProperty(this, "loopDuration", {
|
|
518
|
+
enumerable: true,
|
|
519
|
+
configurable: true,
|
|
520
|
+
writable: true,
|
|
521
|
+
value: void 0
|
|
522
|
+
});
|
|
523
|
+
Object.defineProperty(this, "noteDuration", {
|
|
524
|
+
enumerable: true,
|
|
525
|
+
configurable: true,
|
|
526
|
+
writable: true,
|
|
527
|
+
value: void 0
|
|
528
|
+
});
|
|
529
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
530
|
+
enumerable: true,
|
|
531
|
+
configurable: true,
|
|
532
|
+
writable: true,
|
|
533
|
+
value: void 0
|
|
534
|
+
});
|
|
535
|
+
this.buffer = buffer;
|
|
536
|
+
this.isLoop = meta.isLoop ?? false;
|
|
537
|
+
this.isFull = meta.isFull ?? false;
|
|
538
|
+
this.adsDuration = meta.adsDuration;
|
|
539
|
+
this.loopStart = meta.loopStart;
|
|
540
|
+
this.loopDuration = meta.loopDuration;
|
|
541
|
+
this.noteDuration = meta.noteDuration;
|
|
542
|
+
this.releaseDuration = meta.releaseDuration;
|
|
543
|
+
}
|
|
544
|
+
}
|
|
258
545
|
function cbToRatio(cb) {
|
|
259
546
|
return Math.pow(10, cb / 200);
|
|
260
547
|
}
|
|
261
548
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
262
549
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
263
550
|
export class MidyGM2 extends EventTarget {
|
|
264
|
-
constructor(audioContext) {
|
|
551
|
+
constructor(audioContext, options = {}) {
|
|
265
552
|
super();
|
|
266
553
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
267
554
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -443,9 +730,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
443
730
|
enumerable: true,
|
|
444
731
|
configurable: true,
|
|
445
732
|
writable: true,
|
|
446
|
-
value: new Set([
|
|
447
|
-
"noteOff",
|
|
448
|
-
])
|
|
733
|
+
value: new Set(["noteOff"])
|
|
449
734
|
});
|
|
450
735
|
Object.defineProperty(this, "tempo", {
|
|
451
736
|
enumerable: true,
|
|
@@ -495,7 +780,53 @@ export class MidyGM2 extends EventTarget {
|
|
|
495
780
|
writable: true,
|
|
496
781
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
497
782
|
});
|
|
783
|
+
// "adsr" mode
|
|
784
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
785
|
+
enumerable: true,
|
|
786
|
+
configurable: true,
|
|
787
|
+
writable: true,
|
|
788
|
+
value: new Map()
|
|
789
|
+
});
|
|
790
|
+
// "note" mode
|
|
791
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
792
|
+
enumerable: true,
|
|
793
|
+
configurable: true,
|
|
794
|
+
writable: true,
|
|
795
|
+
value: new Map()
|
|
796
|
+
});
|
|
797
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
798
|
+
enumerable: true,
|
|
799
|
+
configurable: true,
|
|
800
|
+
writable: true,
|
|
801
|
+
value: new Map()
|
|
802
|
+
});
|
|
803
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
804
|
+
enumerable: true,
|
|
805
|
+
configurable: true,
|
|
806
|
+
writable: true,
|
|
807
|
+
value: new Map()
|
|
808
|
+
});
|
|
809
|
+
// "audio" mode
|
|
810
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
811
|
+
enumerable: true,
|
|
812
|
+
configurable: true,
|
|
813
|
+
writable: true,
|
|
814
|
+
value: null
|
|
815
|
+
});
|
|
816
|
+
Object.defineProperty(this, "isRendering", {
|
|
817
|
+
enumerable: true,
|
|
818
|
+
configurable: true,
|
|
819
|
+
writable: true,
|
|
820
|
+
value: false
|
|
821
|
+
});
|
|
822
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
823
|
+
enumerable: true,
|
|
824
|
+
configurable: true,
|
|
825
|
+
writable: true,
|
|
826
|
+
value: null
|
|
827
|
+
});
|
|
498
828
|
this.audioContext = audioContext;
|
|
829
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
499
830
|
this.masterVolume = new GainNode(audioContext);
|
|
500
831
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
501
832
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -571,9 +902,177 @@ export class MidyGM2 extends EventTarget {
|
|
|
571
902
|
this.instruments = midiData.instruments;
|
|
572
903
|
this.timeline = midiData.timeline;
|
|
573
904
|
this.totalTime = this.calcTotalTime();
|
|
905
|
+
if (this.cacheMode === "audio") {
|
|
906
|
+
await this.render();
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
buildNoteOnDurations() {
|
|
910
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
911
|
+
noteOnDurations.clear();
|
|
912
|
+
noteOnEvents.clear();
|
|
913
|
+
const inverseTempo = 1 / this.tempo;
|
|
914
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
915
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
916
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
917
|
+
const activeNotes = new Map();
|
|
918
|
+
const pendingOff = new Map();
|
|
919
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
920
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
921
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
922
|
+
? Infinity
|
|
923
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
924
|
+
noteOnDurations.set(entry.idx, duration);
|
|
925
|
+
noteOnEvents.set(entry.idx, {
|
|
926
|
+
duration,
|
|
927
|
+
durationTicks,
|
|
928
|
+
startTime: entry.startTime,
|
|
929
|
+
events: entry.events,
|
|
930
|
+
});
|
|
931
|
+
};
|
|
932
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
933
|
+
const event = timeline[i];
|
|
934
|
+
const t = event.startTime * inverseTempo;
|
|
935
|
+
switch (event.type) {
|
|
936
|
+
case "noteOn": {
|
|
937
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
938
|
+
if (!activeNotes.has(key))
|
|
939
|
+
activeNotes.set(key, []);
|
|
940
|
+
activeNotes.get(key).push({
|
|
941
|
+
idx: i,
|
|
942
|
+
startTime: t,
|
|
943
|
+
startTicks: event.ticks,
|
|
944
|
+
events: [],
|
|
945
|
+
});
|
|
946
|
+
const pendingStack = pendingOff.get(key);
|
|
947
|
+
if (pendingStack && pendingStack.length > 0)
|
|
948
|
+
pendingStack.shift();
|
|
949
|
+
break;
|
|
950
|
+
}
|
|
951
|
+
case "noteOff": {
|
|
952
|
+
const ch = event.channel;
|
|
953
|
+
const key = event.noteNumber * numChannels + ch;
|
|
954
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
955
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
956
|
+
if (!pendingOff.has(key))
|
|
957
|
+
pendingOff.set(key, []);
|
|
958
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
959
|
+
}
|
|
960
|
+
else {
|
|
961
|
+
const stack = activeNotes.get(key);
|
|
962
|
+
if (stack && stack.length > 0) {
|
|
963
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
964
|
+
if (stack.length === 0)
|
|
965
|
+
activeNotes.delete(key);
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
break;
|
|
969
|
+
}
|
|
970
|
+
case "controller": {
|
|
971
|
+
const ch = event.channel;
|
|
972
|
+
for (const [key, entries] of activeNotes) {
|
|
973
|
+
if (key % numChannels !== ch)
|
|
974
|
+
continue;
|
|
975
|
+
for (const entry of entries)
|
|
976
|
+
entry.events.push(event);
|
|
977
|
+
}
|
|
978
|
+
switch (event.controllerType) {
|
|
979
|
+
case 64: { // Sustain Pedal
|
|
980
|
+
const on = event.value >= 64;
|
|
981
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
982
|
+
if (!on) {
|
|
983
|
+
for (const [key, offItems] of pendingOff) {
|
|
984
|
+
if (key % numChannels !== ch)
|
|
985
|
+
continue;
|
|
986
|
+
const activeStack = activeNotes.get(key);
|
|
987
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
988
|
+
if (activeStack && activeStack.length > 0) {
|
|
989
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
990
|
+
if (activeStack.length === 0)
|
|
991
|
+
activeNotes.delete(key);
|
|
992
|
+
}
|
|
993
|
+
}
|
|
994
|
+
pendingOff.delete(key);
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
break;
|
|
998
|
+
}
|
|
999
|
+
case 66: { // Sostenuto Pedal
|
|
1000
|
+
const on = event.value >= 64;
|
|
1001
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1002
|
+
for (const [key] of activeNotes) {
|
|
1003
|
+
if (key % numChannels === ch)
|
|
1004
|
+
sostenutoKeys[ch].add(key);
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
else if (!on) {
|
|
1008
|
+
sostenutoKeys[ch].clear();
|
|
1009
|
+
}
|
|
1010
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1011
|
+
break;
|
|
1012
|
+
}
|
|
1013
|
+
case 121: // Reset All Controllers
|
|
1014
|
+
sustainPedal[ch] = 0;
|
|
1015
|
+
sostenutoPedal[ch] = 0;
|
|
1016
|
+
sostenutoKeys[ch].clear();
|
|
1017
|
+
break;
|
|
1018
|
+
case 120: // All Sound Off
|
|
1019
|
+
case 123: { // All Notes Off
|
|
1020
|
+
for (const [key, stack] of activeNotes) {
|
|
1021
|
+
if (key % numChannels !== ch)
|
|
1022
|
+
continue;
|
|
1023
|
+
for (const entry of stack)
|
|
1024
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1025
|
+
activeNotes.delete(key);
|
|
1026
|
+
}
|
|
1027
|
+
for (const key of pendingOff.keys()) {
|
|
1028
|
+
if (key % numChannels === ch)
|
|
1029
|
+
pendingOff.delete(key);
|
|
1030
|
+
}
|
|
1031
|
+
break;
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
break;
|
|
1035
|
+
}
|
|
1036
|
+
case "sysEx":
|
|
1037
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1038
|
+
// GM1 System On / GM2 System On
|
|
1039
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1040
|
+
sustainPedal.fill(0);
|
|
1041
|
+
pendingOff.clear();
|
|
1042
|
+
for (const [, stack] of activeNotes) {
|
|
1043
|
+
for (const entry of stack)
|
|
1044
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1045
|
+
}
|
|
1046
|
+
activeNotes.clear();
|
|
1047
|
+
}
|
|
1048
|
+
}
|
|
1049
|
+
else {
|
|
1050
|
+
for (const [, entries] of activeNotes) {
|
|
1051
|
+
for (const entry of entries)
|
|
1052
|
+
entry.events.push(event);
|
|
1053
|
+
}
|
|
1054
|
+
}
|
|
1055
|
+
break;
|
|
1056
|
+
case "pitchBend":
|
|
1057
|
+
case "programChange":
|
|
1058
|
+
case "channelAftertouch": {
|
|
1059
|
+
const ch = event.channel;
|
|
1060
|
+
for (const [key, entries] of activeNotes) {
|
|
1061
|
+
if (key % numChannels !== ch)
|
|
1062
|
+
continue;
|
|
1063
|
+
for (const entry of entries)
|
|
1064
|
+
entry.events.push(event);
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
}
|
|
1069
|
+
for (const [, stack] of activeNotes) {
|
|
1070
|
+
for (const entry of stack)
|
|
1071
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1072
|
+
}
|
|
574
1073
|
}
|
|
575
1074
|
cacheVoiceIds() {
|
|
576
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1075
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
577
1076
|
for (let i = 0; i < timeline.length; i++) {
|
|
578
1077
|
const event = timeline[i];
|
|
579
1078
|
switch (event.type) {
|
|
@@ -599,6 +1098,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
599
1098
|
voiceCounter.delete(audioBufferId);
|
|
600
1099
|
}
|
|
601
1100
|
this.GM2SystemOn();
|
|
1101
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1102
|
+
this.buildNoteOnDurations();
|
|
1103
|
+
}
|
|
602
1104
|
}
|
|
603
1105
|
getVoiceId(channel, noteNumber, velocity) {
|
|
604
1106
|
const programNumber = channel.programNumber;
|
|
@@ -617,7 +1119,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
617
1119
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
618
1120
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
619
1121
|
const { instrument, sampleID } = voice.generators;
|
|
620
|
-
return soundFontIndex * (2 **
|
|
1122
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1123
|
+
(sampleID << 8);
|
|
621
1124
|
}
|
|
622
1125
|
createChannelAudioNodes(audioContext) {
|
|
623
1126
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -627,38 +1130,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
627
1130
|
gainL.connect(merger, 0, 0);
|
|
628
1131
|
gainR.connect(merger, 0, 1);
|
|
629
1132
|
merger.connect(this.masterVolume);
|
|
630
|
-
return {
|
|
631
|
-
gainL,
|
|
632
|
-
gainR,
|
|
633
|
-
merger,
|
|
634
|
-
};
|
|
635
|
-
}
|
|
636
|
-
resetChannelTable(channel) {
|
|
637
|
-
channel.controlTable.set(defaultControlValues);
|
|
638
|
-
channel.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
639
|
-
channel.channelPressureTable.set(defaultPressureValues);
|
|
640
|
-
channel.keyBasedTable.fill(-1);
|
|
1133
|
+
return { gainL, gainR, merger };
|
|
641
1134
|
}
|
|
642
1135
|
createChannels(audioContext) {
|
|
643
|
-
const
|
|
644
|
-
|
|
645
|
-
currentBufferSource: null,
|
|
646
|
-
isDrum: false,
|
|
647
|
-
state: new ControllerState(),
|
|
648
|
-
...this.constructor.channelSettings,
|
|
649
|
-
...this.createChannelAudioNodes(audioContext),
|
|
650
|
-
scheduledNotes: [],
|
|
651
|
-
sustainNotes: [],
|
|
652
|
-
sostenutoNotes: [],
|
|
653
|
-
controlTable: new Int8Array(defaultControlValues),
|
|
654
|
-
scaleOctaveTuningTable: new Int8Array(12), // [-64, 63] cent
|
|
655
|
-
channelPressureTable: new Int8Array(defaultPressureValues),
|
|
656
|
-
keyBasedTable: new Int8Array(128 * 128).fill(-1),
|
|
657
|
-
keyBasedGainLs: new Array(128),
|
|
658
|
-
keyBasedGainRs: new Array(128),
|
|
659
|
-
};
|
|
660
|
-
});
|
|
661
|
-
return channels;
|
|
1136
|
+
const settings = this.constructor.channelSettings;
|
|
1137
|
+
return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
|
|
662
1138
|
}
|
|
663
1139
|
decodeOggVorbis(sample) {
|
|
664
1140
|
const task = decoderQueue.then(async () => {
|
|
@@ -717,15 +1193,26 @@ export class MidyGM2 extends EventTarget {
|
|
|
717
1193
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
718
1194
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
719
1195
|
}
|
|
720
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1196
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1197
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1198
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
721
1199
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
722
1200
|
bufferSource.buffer = audioBuffer;
|
|
723
|
-
|
|
1201
|
+
const isDrumLoop = channel.isDrum
|
|
724
1202
|
? this.isLoopDrum(channel, noteNumber)
|
|
725
|
-
:
|
|
1203
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1204
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1205
|
+
bufferSource.loop = isLoop;
|
|
726
1206
|
if (bufferSource.loop) {
|
|
727
|
-
|
|
728
|
-
|
|
1207
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1208
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1209
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1210
|
+
renderedOrRaw.loopDuration;
|
|
1211
|
+
}
|
|
1212
|
+
else {
|
|
1213
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1214
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1215
|
+
}
|
|
729
1216
|
}
|
|
730
1217
|
return bufferSource;
|
|
731
1218
|
}
|
|
@@ -742,27 +1229,29 @@ export class MidyGM2 extends EventTarget {
|
|
|
742
1229
|
break;
|
|
743
1230
|
const startTime = t + schedulingOffset;
|
|
744
1231
|
switch (event.type) {
|
|
745
|
-
case "noteOn":
|
|
746
|
-
this.
|
|
1232
|
+
case "noteOn": {
|
|
1233
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1234
|
+
note.timelineIndex = queueIndex;
|
|
1235
|
+
this.setupNote(event.channel, note, startTime);
|
|
747
1236
|
break;
|
|
748
|
-
|
|
1237
|
+
}
|
|
1238
|
+
case "noteOff":
|
|
749
1239
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
750
1240
|
break;
|
|
751
|
-
}
|
|
752
1241
|
case "controller":
|
|
753
1242
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
754
1243
|
break;
|
|
755
1244
|
case "programChange":
|
|
756
1245
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
757
1246
|
break;
|
|
758
|
-
case "channelAftertouch":
|
|
759
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
760
|
-
break;
|
|
761
1247
|
case "pitchBend":
|
|
762
1248
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
763
1249
|
break;
|
|
764
1250
|
case "sysEx":
|
|
765
1251
|
this.handleSysEx(event.data, startTime);
|
|
1252
|
+
break;
|
|
1253
|
+
case "channelAftertouch":
|
|
1254
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
766
1255
|
}
|
|
767
1256
|
queueIndex++;
|
|
768
1257
|
}
|
|
@@ -783,6 +1272,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
783
1272
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
784
1273
|
this.voiceCache.clear();
|
|
785
1274
|
this.realtimeVoiceCache.clear();
|
|
1275
|
+
this.adsrVoiceCache.clear();
|
|
786
1276
|
const channels = this.channels;
|
|
787
1277
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
788
1278
|
channels[ch].scheduledNotes = [];
|
|
@@ -809,14 +1299,101 @@ export class MidyGM2 extends EventTarget {
|
|
|
809
1299
|
break;
|
|
810
1300
|
case "sysEx":
|
|
811
1301
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1302
|
+
break;
|
|
1303
|
+
case "channelAftertouch":
|
|
1304
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
812
1305
|
}
|
|
813
1306
|
}
|
|
814
1307
|
}
|
|
1308
|
+
async playAudioBuffer() {
|
|
1309
|
+
const audioContext = this.audioContext;
|
|
1310
|
+
const paused = this.isPaused;
|
|
1311
|
+
this.isPlaying = true;
|
|
1312
|
+
this.isPaused = false;
|
|
1313
|
+
this.startTime = audioContext.currentTime;
|
|
1314
|
+
if (paused) {
|
|
1315
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1316
|
+
}
|
|
1317
|
+
else {
|
|
1318
|
+
this.dispatchEvent(new Event("started"));
|
|
1319
|
+
}
|
|
1320
|
+
let exitReason;
|
|
1321
|
+
outer: while (true) {
|
|
1322
|
+
const buffer = this.renderedAudioBuffer;
|
|
1323
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1324
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1325
|
+
bufferSource.connect(this.masterVolume);
|
|
1326
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1327
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1328
|
+
this.audioModeBufferSource = bufferSource;
|
|
1329
|
+
let naturalEnded = false;
|
|
1330
|
+
bufferSource.onended = () => {
|
|
1331
|
+
naturalEnded = true;
|
|
1332
|
+
};
|
|
1333
|
+
while (true) {
|
|
1334
|
+
const now = audioContext.currentTime;
|
|
1335
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1336
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1337
|
+
bufferSource.disconnect();
|
|
1338
|
+
this.audioModeBufferSource = null;
|
|
1339
|
+
if (this.loop) {
|
|
1340
|
+
this.resumeTime = 0;
|
|
1341
|
+
this.startTime = audioContext.currentTime;
|
|
1342
|
+
this.dispatchEvent(new Event("looped"));
|
|
1343
|
+
continue outer;
|
|
1344
|
+
}
|
|
1345
|
+
await audioContext.suspend();
|
|
1346
|
+
exitReason = "ended";
|
|
1347
|
+
break outer;
|
|
1348
|
+
}
|
|
1349
|
+
if (this.isPausing) {
|
|
1350
|
+
this.resumeTime = this.currentTime();
|
|
1351
|
+
bufferSource.stop();
|
|
1352
|
+
bufferSource.disconnect();
|
|
1353
|
+
this.audioModeBufferSource = null;
|
|
1354
|
+
await audioContext.suspend();
|
|
1355
|
+
this.isPausing = false;
|
|
1356
|
+
exitReason = "paused";
|
|
1357
|
+
break outer;
|
|
1358
|
+
}
|
|
1359
|
+
else if (this.isStopping) {
|
|
1360
|
+
bufferSource.stop();
|
|
1361
|
+
bufferSource.disconnect();
|
|
1362
|
+
this.audioModeBufferSource = null;
|
|
1363
|
+
await audioContext.suspend();
|
|
1364
|
+
this.isStopping = false;
|
|
1365
|
+
exitReason = "stopped";
|
|
1366
|
+
break outer;
|
|
1367
|
+
}
|
|
1368
|
+
else if (this.isSeeking) {
|
|
1369
|
+
bufferSource.stop();
|
|
1370
|
+
bufferSource.disconnect();
|
|
1371
|
+
this.audioModeBufferSource = null;
|
|
1372
|
+
this.startTime = audioContext.currentTime;
|
|
1373
|
+
this.isSeeking = false;
|
|
1374
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1375
|
+
continue outer;
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
}
|
|
1379
|
+
this.isPlaying = false;
|
|
1380
|
+
if (exitReason === "paused") {
|
|
1381
|
+
this.isPaused = true;
|
|
1382
|
+
this.dispatchEvent(new Event("paused"));
|
|
1383
|
+
}
|
|
1384
|
+
else if (exitReason !== undefined) {
|
|
1385
|
+
this.isPaused = false;
|
|
1386
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
815
1389
|
async playNotes() {
|
|
816
1390
|
const audioContext = this.audioContext;
|
|
817
1391
|
if (audioContext.state === "suspended") {
|
|
818
1392
|
await audioContext.resume();
|
|
819
1393
|
}
|
|
1394
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1395
|
+
return await this.playAudioBuffer();
|
|
1396
|
+
}
|
|
820
1397
|
const paused = this.isPaused;
|
|
821
1398
|
this.isPlaying = true;
|
|
822
1399
|
this.isPaused = false;
|
|
@@ -949,12 +1526,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
949
1526
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
950
1527
|
switch (data[3]) {
|
|
951
1528
|
case 1:
|
|
952
|
-
this.GM1SystemOn(
|
|
1529
|
+
this.GM1SystemOn();
|
|
953
1530
|
break;
|
|
954
1531
|
case 2: // GM System Off
|
|
955
1532
|
break;
|
|
956
1533
|
case 3:
|
|
957
|
-
this.GM2SystemOn(
|
|
1534
|
+
this.GM2SystemOn();
|
|
958
1535
|
break;
|
|
959
1536
|
default:
|
|
960
1537
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1021,6 +1598,186 @@ export class MidyGM2 extends EventTarget {
|
|
|
1021
1598
|
this.notePromises = [];
|
|
1022
1599
|
return stopPromise;
|
|
1023
1600
|
}
|
|
1601
|
+
async render() {
|
|
1602
|
+
if (this.isRendering)
|
|
1603
|
+
return;
|
|
1604
|
+
if (this.timeline.length === 0)
|
|
1605
|
+
return;
|
|
1606
|
+
if (this.voiceCounter.size === 0)
|
|
1607
|
+
this.cacheVoiceIds();
|
|
1608
|
+
this.isRendering = true;
|
|
1609
|
+
this.renderedAudioBuffer = null;
|
|
1610
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1611
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1612
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1613
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1614
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1615
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1616
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1617
|
+
renderBankMSB.fill(121);
|
|
1618
|
+
renderIsDrum[9] = 1;
|
|
1619
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1620
|
+
const state = new Float32Array(256);
|
|
1621
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1622
|
+
state[type] = defaultValue;
|
|
1623
|
+
}
|
|
1624
|
+
return state;
|
|
1625
|
+
});
|
|
1626
|
+
const tasks = [];
|
|
1627
|
+
const timeline = this.timeline;
|
|
1628
|
+
const inverseTempo = 1 / this.tempo;
|
|
1629
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1630
|
+
const event = timeline[i];
|
|
1631
|
+
const ch = event.channel;
|
|
1632
|
+
switch (event.type) {
|
|
1633
|
+
case "noteOn": {
|
|
1634
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1635
|
+
const noteDuration = noteEvent?.duration ??
|
|
1636
|
+
this.noteOnDurations.get(i) ??
|
|
1637
|
+
0;
|
|
1638
|
+
if (noteDuration <= 0)
|
|
1639
|
+
continue;
|
|
1640
|
+
const { noteNumber, velocity } = event;
|
|
1641
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1642
|
+
const programNumber = renderProgramNumber[ch];
|
|
1643
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1644
|
+
if (!bankTable)
|
|
1645
|
+
continue;
|
|
1646
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1647
|
+
if (bankTable[bank] === undefined) {
|
|
1648
|
+
if (isDrum)
|
|
1649
|
+
continue;
|
|
1650
|
+
bank = 0;
|
|
1651
|
+
}
|
|
1652
|
+
const soundFontIndex = bankTable[bank];
|
|
1653
|
+
if (soundFontIndex === undefined)
|
|
1654
|
+
continue;
|
|
1655
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1656
|
+
const fakeChannel = {
|
|
1657
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1658
|
+
programNumber,
|
|
1659
|
+
isDrum,
|
|
1660
|
+
modulationDepthRange: 50,
|
|
1661
|
+
detune: 0,
|
|
1662
|
+
};
|
|
1663
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1664
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1665
|
+
if (!voice)
|
|
1666
|
+
continue;
|
|
1667
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1668
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1669
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1670
|
+
const promise = (async () => {
|
|
1671
|
+
try {
|
|
1672
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1673
|
+
}
|
|
1674
|
+
catch (err) {
|
|
1675
|
+
console.warn("render: note render failed", err);
|
|
1676
|
+
return null;
|
|
1677
|
+
}
|
|
1678
|
+
})();
|
|
1679
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1680
|
+
break;
|
|
1681
|
+
}
|
|
1682
|
+
case "controller": {
|
|
1683
|
+
const { controllerType, value } = event;
|
|
1684
|
+
switch (controllerType) {
|
|
1685
|
+
case 0: // bankMSB
|
|
1686
|
+
renderBankMSB[ch] = value;
|
|
1687
|
+
if (this.mode === "GM2") {
|
|
1688
|
+
if (value === 120) {
|
|
1689
|
+
renderIsDrum[ch] = 1;
|
|
1690
|
+
}
|
|
1691
|
+
else if (value === 121) {
|
|
1692
|
+
renderIsDrum[ch] = 0;
|
|
1693
|
+
}
|
|
1694
|
+
}
|
|
1695
|
+
break;
|
|
1696
|
+
case 32: // bankLSB
|
|
1697
|
+
renderBankLSB[ch] = value;
|
|
1698
|
+
break;
|
|
1699
|
+
default: {
|
|
1700
|
+
const stateIndex = 128 + controllerType;
|
|
1701
|
+
if (stateIndex < 256) {
|
|
1702
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1703
|
+
}
|
|
1704
|
+
break;
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
break;
|
|
1708
|
+
}
|
|
1709
|
+
case "pitchBend":
|
|
1710
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1711
|
+
break;
|
|
1712
|
+
case "programChange":
|
|
1713
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1714
|
+
if (this.mode === "GM2") {
|
|
1715
|
+
if (renderBankMSB[ch] === 120) {
|
|
1716
|
+
renderIsDrum[ch] = 1;
|
|
1717
|
+
}
|
|
1718
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1719
|
+
renderIsDrum[ch] = 0;
|
|
1720
|
+
}
|
|
1721
|
+
}
|
|
1722
|
+
break;
|
|
1723
|
+
case "sysEx": {
|
|
1724
|
+
const data = event.data;
|
|
1725
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1726
|
+
if (data[3] === 1) { // GM1 System On
|
|
1727
|
+
renderBankMSB.fill(0);
|
|
1728
|
+
renderBankLSB.fill(0);
|
|
1729
|
+
renderProgramNumber.fill(0);
|
|
1730
|
+
renderIsDrum.fill(0);
|
|
1731
|
+
renderIsDrum[9] = 1;
|
|
1732
|
+
renderBankMSB[9] = 1;
|
|
1733
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1734
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1735
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1736
|
+
}
|
|
1737
|
+
}
|
|
1738
|
+
renderNoteAftertouch.fill(0);
|
|
1739
|
+
}
|
|
1740
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1741
|
+
renderBankMSB.fill(121);
|
|
1742
|
+
renderBankLSB.fill(0);
|
|
1743
|
+
renderProgramNumber.fill(0);
|
|
1744
|
+
renderIsDrum.fill(0);
|
|
1745
|
+
renderIsDrum[9] = 1;
|
|
1746
|
+
renderBankMSB[9] = 120;
|
|
1747
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1748
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1749
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1752
|
+
renderNoteAftertouch.fill(0);
|
|
1753
|
+
}
|
|
1754
|
+
}
|
|
1755
|
+
break;
|
|
1756
|
+
}
|
|
1757
|
+
case "channelAftertouch":
|
|
1758
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1759
|
+
}
|
|
1760
|
+
}
|
|
1761
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1762
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1763
|
+
const { t, promise } = tasks[i];
|
|
1764
|
+
const noteBuffer = await promise;
|
|
1765
|
+
if (!noteBuffer)
|
|
1766
|
+
continue;
|
|
1767
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1768
|
+
? noteBuffer.buffer
|
|
1769
|
+
: noteBuffer;
|
|
1770
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1771
|
+
buffer: audioBuffer,
|
|
1772
|
+
});
|
|
1773
|
+
bufferSource.connect(offlineContext.destination);
|
|
1774
|
+
bufferSource.start(t);
|
|
1775
|
+
}
|
|
1776
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1777
|
+
this.isRendering = false;
|
|
1778
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1779
|
+
return this.renderedAudioBuffer;
|
|
1780
|
+
}
|
|
1024
1781
|
async start() {
|
|
1025
1782
|
if (this.isPlaying || this.isPaused)
|
|
1026
1783
|
return;
|
|
@@ -1057,11 +1814,22 @@ export class MidyGM2 extends EventTarget {
|
|
|
1057
1814
|
}
|
|
1058
1815
|
}
|
|
1059
1816
|
tempoChange(tempo) {
|
|
1817
|
+
const cacheMode = this.cacheMode;
|
|
1060
1818
|
const timeScale = this.tempo / tempo;
|
|
1061
1819
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1062
1820
|
this.tempo = tempo;
|
|
1063
1821
|
this.totalTime = this.calcTotalTime();
|
|
1064
1822
|
this.seekTo(this.currentTime() * timeScale);
|
|
1823
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1824
|
+
this.buildNoteOnDurations();
|
|
1825
|
+
this.fullVoiceCache.clear();
|
|
1826
|
+
this.adsrVoiceCache.clear();
|
|
1827
|
+
}
|
|
1828
|
+
if (cacheMode === "audio") {
|
|
1829
|
+
if (this.audioModeBufferSource) {
|
|
1830
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1065
1833
|
}
|
|
1066
1834
|
calcTotalTime() {
|
|
1067
1835
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1082,6 +1850,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
1082
1850
|
if (!this.isPlaying)
|
|
1083
1851
|
return this.resumeTime;
|
|
1084
1852
|
const now = this.audioContext.currentTime;
|
|
1853
|
+
if (this.cacheMode === "audio") {
|
|
1854
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1855
|
+
}
|
|
1085
1856
|
return now + this.resumeTime - this.startTime;
|
|
1086
1857
|
}
|
|
1087
1858
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1279,6 +2050,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1279
2050
|
}
|
|
1280
2051
|
updateChannelDetune(channel, scheduleTime) {
|
|
1281
2052
|
this.processScheduledNotes(channel, (note) => {
|
|
2053
|
+
if (note.renderedBuffer?.isFull)
|
|
2054
|
+
return;
|
|
1282
2055
|
if (this.isPortamento(channel, note)) {
|
|
1283
2056
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1284
2057
|
}
|
|
@@ -1367,6 +2140,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1367
2140
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1368
2141
|
}
|
|
1369
2142
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2143
|
+
if (!note.volumeEnvelopeNode)
|
|
2144
|
+
return;
|
|
1370
2145
|
const { voiceParams, startTime } = note;
|
|
1371
2146
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1372
2147
|
(1 + this.getAmplitudeControl(channel));
|
|
@@ -1395,9 +2170,6 @@ export class MidyGM2 extends EventTarget {
|
|
|
1395
2170
|
}
|
|
1396
2171
|
setDetune(channel, note, scheduleTime) {
|
|
1397
2172
|
const detune = this.calcNoteDetune(channel, note);
|
|
1398
|
-
note.bufferSource.detune
|
|
1399
|
-
.cancelScheduledValues(scheduleTime)
|
|
1400
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1401
2173
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1402
2174
|
note.bufferSource.detune
|
|
1403
2175
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1457,6 +2229,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1457
2229
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1458
2230
|
}
|
|
1459
2231
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2232
|
+
if (!note.filterEnvelopeNode)
|
|
2233
|
+
return;
|
|
1460
2234
|
const { voiceParams, startTime } = note;
|
|
1461
2235
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1462
2236
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1496,14 +2270,17 @@ export class MidyGM2 extends EventTarget {
|
|
|
1496
2270
|
note.modLfoToPitch = new GainNode(audioContext);
|
|
1497
2271
|
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1498
2272
|
note.modLfoToVolume = new GainNode(audioContext);
|
|
1499
|
-
this.setModLfoToVolume(note, scheduleTime);
|
|
2273
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1500
2274
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1501
2275
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1502
|
-
|
|
2276
|
+
if (note.filterEnvelopeNode) {
|
|
2277
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2278
|
+
}
|
|
1503
2279
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1504
2280
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1505
2281
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1506
|
-
note.
|
|
2282
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2283
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1507
2284
|
}
|
|
1508
2285
|
startVibrato(channel, note, scheduleTime) {
|
|
1509
2286
|
const { voiceParams } = note;
|
|
@@ -1519,34 +2296,342 @@ export class MidyGM2 extends EventTarget {
|
|
|
1519
2296
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1520
2297
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1521
2298
|
}
|
|
1522
|
-
async
|
|
2299
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2300
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2301
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2302
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2303
|
+
const decayDuration = voiceParams.volDecay;
|
|
2304
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2305
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2306
|
+
const loopDuration = isLoop
|
|
2307
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2308
|
+
: 0;
|
|
2309
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
2310
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
2311
|
+
: 0;
|
|
2312
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
2313
|
+
const renderDuration = isLoop
|
|
2314
|
+
? alignedLoopStart + loopDuration
|
|
2315
|
+
: audioBuffer.duration;
|
|
2316
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
2317
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2318
|
+
bufferSource.buffer = audioBuffer;
|
|
2319
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2320
|
+
bufferSource.loop = isLoop;
|
|
2321
|
+
if (isLoop) {
|
|
2322
|
+
bufferSource.loopStart = loopStartTime;
|
|
2323
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2324
|
+
}
|
|
2325
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2326
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2327
|
+
type: "lowpass",
|
|
2328
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2329
|
+
frequency: initialFreq,
|
|
2330
|
+
});
|
|
2331
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2332
|
+
const offlineNote = {
|
|
2333
|
+
...note,
|
|
2334
|
+
startTime: 0,
|
|
2335
|
+
bufferSource,
|
|
2336
|
+
filterEnvelopeNode,
|
|
2337
|
+
volumeEnvelopeNode,
|
|
2338
|
+
};
|
|
2339
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2340
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2341
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2342
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2343
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2344
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2345
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2346
|
+
}
|
|
2347
|
+
else {
|
|
2348
|
+
bufferSource.start(0);
|
|
2349
|
+
}
|
|
2350
|
+
const buffer = await offlineContext.startRendering();
|
|
2351
|
+
return new RenderedBuffer(buffer, {
|
|
2352
|
+
isLoop,
|
|
2353
|
+
adsDuration,
|
|
2354
|
+
loopStart: alignedLoopStart,
|
|
2355
|
+
loopDuration,
|
|
2356
|
+
});
|
|
2357
|
+
}
|
|
2358
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2359
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2360
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2361
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2362
|
+
const decayDuration = voiceParams.volDecay;
|
|
2363
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2364
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2365
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2366
|
+
const loopDuration = isLoop
|
|
2367
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2368
|
+
: 0;
|
|
2369
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2370
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2371
|
+
: 0;
|
|
2372
|
+
const alignedNoteEnd = isLoop
|
|
2373
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2374
|
+
: noteDuration;
|
|
2375
|
+
const noteOffTime = alignedNoteEnd;
|
|
2376
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2377
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2378
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2379
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2380
|
+
bufferSource.buffer = audioBuffer;
|
|
2381
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2382
|
+
bufferSource.loop = isLoop;
|
|
2383
|
+
if (isLoop) {
|
|
2384
|
+
bufferSource.loopStart = loopStartTime;
|
|
2385
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2386
|
+
}
|
|
2387
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2388
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2389
|
+
type: "lowpass",
|
|
2390
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2391
|
+
frequency: initialFreq,
|
|
2392
|
+
});
|
|
2393
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2394
|
+
const offlineNote = {
|
|
2395
|
+
...note,
|
|
2396
|
+
startTime: 0,
|
|
2397
|
+
bufferSource,
|
|
2398
|
+
filterEnvelopeNode,
|
|
2399
|
+
volumeEnvelopeNode,
|
|
2400
|
+
};
|
|
2401
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2402
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2403
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2404
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2405
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2406
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2407
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2408
|
+
let gainAtNoteOff;
|
|
2409
|
+
if (noteOffTime <= volDelayTime) {
|
|
2410
|
+
gainAtNoteOff = 0;
|
|
2411
|
+
}
|
|
2412
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2413
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2414
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2415
|
+
}
|
|
2416
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2417
|
+
gainAtNoteOff = attackVolume;
|
|
2418
|
+
}
|
|
2419
|
+
else {
|
|
2420
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2421
|
+
gainAtNoteOff = sustainVolume +
|
|
2422
|
+
(attackVolume - sustainVolume) *
|
|
2423
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2424
|
+
}
|
|
2425
|
+
volumeEnvelopeNode.gain
|
|
2426
|
+
.cancelScheduledValues(noteOffTime)
|
|
2427
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2428
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2429
|
+
filterEnvelopeNode.frequency
|
|
2430
|
+
.cancelScheduledValues(noteOffTime)
|
|
2431
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2432
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2433
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2434
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2435
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2436
|
+
if (isLoop) {
|
|
2437
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2438
|
+
}
|
|
2439
|
+
else {
|
|
2440
|
+
bufferSource.start(0);
|
|
2441
|
+
}
|
|
2442
|
+
const buffer = await offlineContext.startRendering();
|
|
2443
|
+
return new RenderedBuffer(buffer, {
|
|
2444
|
+
isLoop: false,
|
|
2445
|
+
isFull: false,
|
|
2446
|
+
adsDuration,
|
|
2447
|
+
noteDuration: noteOffTime,
|
|
2448
|
+
releaseDuration,
|
|
2449
|
+
});
|
|
2450
|
+
}
|
|
2451
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2452
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2453
|
+
const ch = note.channel ?? 0;
|
|
2454
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2455
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2456
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2457
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2458
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2459
|
+
cacheMode: "none",
|
|
2460
|
+
});
|
|
2461
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2462
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2463
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2464
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2465
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2466
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2467
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2468
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2469
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2470
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2471
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2472
|
+
for (const event of noteEvents) {
|
|
2473
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2474
|
+
if (t < 0 || t > noteDuration)
|
|
2475
|
+
continue;
|
|
2476
|
+
switch (event.type) {
|
|
2477
|
+
case "controller":
|
|
2478
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2479
|
+
break;
|
|
2480
|
+
case "pitchBend":
|
|
2481
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2482
|
+
break;
|
|
2483
|
+
case "sysEx":
|
|
2484
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2485
|
+
break;
|
|
2486
|
+
case "channelAftertouch":
|
|
2487
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2488
|
+
}
|
|
2489
|
+
}
|
|
2490
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2491
|
+
const buffer = await offlineContext.startRendering();
|
|
2492
|
+
return new RenderedBuffer(buffer, {
|
|
2493
|
+
isLoop: false,
|
|
2494
|
+
isFull: true,
|
|
2495
|
+
noteDuration: noteDuration,
|
|
2496
|
+
releaseDuration: releaseEndDuration,
|
|
2497
|
+
});
|
|
2498
|
+
}
|
|
2499
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2500
|
+
const cacheMode = this.cacheMode;
|
|
2501
|
+
const { noteNumber, velocity } = note;
|
|
1523
2502
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2503
|
+
if (!realtime) {
|
|
2504
|
+
if (cacheMode === "note") {
|
|
2505
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
2506
|
+
}
|
|
2507
|
+
else if (cacheMode === "adsr") {
|
|
2508
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2509
|
+
}
|
|
2510
|
+
}
|
|
2511
|
+
if (cacheMode === "none") {
|
|
2512
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2513
|
+
}
|
|
2514
|
+
// fallback to ADS cache:
|
|
2515
|
+
// - "ads" (realtime or not)
|
|
2516
|
+
// - "adsr" + realtime
|
|
2517
|
+
// - "note" + realtime
|
|
2518
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2519
|
+
}
|
|
2520
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2521
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2522
|
+
const voiceParams = note.voiceParams;
|
|
1524
2523
|
if (realtime) {
|
|
1525
|
-
const
|
|
1526
|
-
if (
|
|
1527
|
-
return
|
|
1528
|
-
const
|
|
1529
|
-
this.
|
|
1530
|
-
|
|
2524
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2525
|
+
if (cached)
|
|
2526
|
+
return cached;
|
|
2527
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2528
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2529
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2530
|
+
return rendered;
|
|
1531
2531
|
}
|
|
1532
2532
|
else {
|
|
1533
|
-
const cache = this.voiceCache.get(
|
|
2533
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1534
2534
|
if (cache) {
|
|
1535
2535
|
cache.counter += 1;
|
|
1536
2536
|
if (cache.maxCount <= cache.counter) {
|
|
1537
|
-
this.voiceCache.delete(
|
|
2537
|
+
this.voiceCache.delete(cacheKey);
|
|
1538
2538
|
}
|
|
1539
2539
|
return cache.audioBuffer;
|
|
1540
2540
|
}
|
|
1541
2541
|
else {
|
|
1542
|
-
const maxCount = this.voiceCounter.get(
|
|
1543
|
-
const
|
|
1544
|
-
const
|
|
1545
|
-
|
|
1546
|
-
|
|
2542
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2543
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2544
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2545
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2546
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2547
|
+
return rendered;
|
|
1547
2548
|
}
|
|
1548
2549
|
}
|
|
1549
2550
|
}
|
|
2551
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2552
|
+
const voiceParams = note.voiceParams;
|
|
2553
|
+
const timelineIndex = note.timelineIndex;
|
|
2554
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2555
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2556
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2557
|
+
? 0xffffffffn
|
|
2558
|
+
: BigInt(noteDurationTicks);
|
|
2559
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2560
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2561
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2562
|
+
(playbackRateBits << 96n) |
|
|
2563
|
+
(safeTicks << 64n) |
|
|
2564
|
+
volReleaseBits;
|
|
2565
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2566
|
+
if (!durationMap) {
|
|
2567
|
+
durationMap = new Map();
|
|
2568
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2569
|
+
}
|
|
2570
|
+
const cached = durationMap.get(cacheKey);
|
|
2571
|
+
if (cached instanceof RenderedBuffer) {
|
|
2572
|
+
return cached;
|
|
2573
|
+
}
|
|
2574
|
+
if (cached instanceof Promise) {
|
|
2575
|
+
const buf = await cached;
|
|
2576
|
+
if (buf == null)
|
|
2577
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2578
|
+
return buf;
|
|
2579
|
+
}
|
|
2580
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2581
|
+
const renderPromise = (async () => {
|
|
2582
|
+
try {
|
|
2583
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2584
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2585
|
+
durationMap.set(cacheKey, rendered);
|
|
2586
|
+
return rendered;
|
|
2587
|
+
}
|
|
2588
|
+
catch (err) {
|
|
2589
|
+
durationMap.delete(cacheKey);
|
|
2590
|
+
throw err;
|
|
2591
|
+
}
|
|
2592
|
+
})();
|
|
2593
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2594
|
+
return await renderPromise;
|
|
2595
|
+
}
|
|
2596
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
2597
|
+
const voiceParams = note.voiceParams;
|
|
2598
|
+
const timelineIndex = note.timelineIndex;
|
|
2599
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2600
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2601
|
+
const cacheKey = timelineIndex;
|
|
2602
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2603
|
+
if (!durationMap) {
|
|
2604
|
+
durationMap = new Map();
|
|
2605
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2606
|
+
}
|
|
2607
|
+
const cached = durationMap.get(cacheKey);
|
|
2608
|
+
if (cached instanceof RenderedBuffer) {
|
|
2609
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2610
|
+
return cached;
|
|
2611
|
+
}
|
|
2612
|
+
if (cached instanceof Promise) {
|
|
2613
|
+
const buf = await cached;
|
|
2614
|
+
if (buf == null)
|
|
2615
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2616
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2617
|
+
return buf;
|
|
2618
|
+
}
|
|
2619
|
+
const renderPromise = (async () => {
|
|
2620
|
+
try {
|
|
2621
|
+
const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
|
|
2622
|
+
durationMap.set(cacheKey, rendered);
|
|
2623
|
+
return rendered;
|
|
2624
|
+
}
|
|
2625
|
+
catch (err) {
|
|
2626
|
+
durationMap.delete(cacheKey);
|
|
2627
|
+
throw err;
|
|
2628
|
+
}
|
|
2629
|
+
})();
|
|
2630
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2631
|
+
const rendered = await renderPromise;
|
|
2632
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2633
|
+
return rendered;
|
|
2634
|
+
}
|
|
1550
2635
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1551
2636
|
const audioContext = this.audioContext;
|
|
1552
2637
|
const now = audioContext.currentTime;
|
|
@@ -1555,46 +2640,72 @@ export class MidyGM2 extends EventTarget {
|
|
|
1555
2640
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1556
2641
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1557
2642
|
note.voiceParams = voiceParams;
|
|
1558
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2643
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2644
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2645
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1559
2646
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1560
|
-
note.
|
|
1561
|
-
note.
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
|
|
1566
|
-
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
2647
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2648
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2649
|
+
const cacheMode = this.cacheMode;
|
|
2650
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2651
|
+
if (cacheMode === "none") {
|
|
2652
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2653
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2654
|
+
type: "lowpass",
|
|
2655
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2656
|
+
});
|
|
2657
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2658
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2659
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2660
|
+
}
|
|
2661
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2662
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2663
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2664
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2665
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2666
|
+
}
|
|
2667
|
+
else {
|
|
2668
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2669
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2670
|
+
this.setPitchEnvelope(note, now);
|
|
2671
|
+
this.setDetune(channel, note, now);
|
|
2672
|
+
}
|
|
2673
|
+
if (0 < state.vibratoDepth) {
|
|
2674
|
+
this.startVibrato(channel, note, now);
|
|
2675
|
+
}
|
|
2676
|
+
if (0 < state.modulationDepthMSB) {
|
|
2677
|
+
this.startModulation(channel, note, now);
|
|
2678
|
+
}
|
|
2679
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2680
|
+
channel.currentBufferSource.stop(startTime);
|
|
2681
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2682
|
+
}
|
|
2683
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2684
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2685
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2686
|
+
this.setChorusSend(channel, note, now);
|
|
2687
|
+
this.setReverbSend(channel, note, now);
|
|
2688
|
+
}
|
|
2689
|
+
else if (isFullCached) { // "note" mode
|
|
2690
|
+
note.volumeEnvelopeNode = null;
|
|
2691
|
+
note.filterEnvelopeNode = null;
|
|
2692
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2693
|
+
this.setChorusSend(channel, note, now);
|
|
2694
|
+
this.setReverbSend(channel, note, now);
|
|
2695
|
+
}
|
|
2696
|
+
else { // "ads" / "asdr" mode
|
|
2697
|
+
note.volumeEnvelopeNode = null;
|
|
2698
|
+
note.filterEnvelopeNode = null;
|
|
1579
2699
|
this.setDetune(channel, note, now);
|
|
2700
|
+
if (0 < state.modulationDepthMSB) {
|
|
2701
|
+
this.startModulation(channel, note, now);
|
|
2702
|
+
}
|
|
2703
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2704
|
+
this.setChorusSend(channel, note, now);
|
|
2705
|
+
this.setReverbSend(channel, note, now);
|
|
1580
2706
|
}
|
|
1581
|
-
if (0 < state.vibratoDepth) {
|
|
1582
|
-
this.startVibrato(channel, note, now);
|
|
1583
|
-
}
|
|
1584
|
-
if (0 < state.modulationDepthMSB) {
|
|
1585
|
-
this.startModulation(channel, note, now);
|
|
1586
|
-
}
|
|
1587
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1588
|
-
channel.currentBufferSource.stop(startTime);
|
|
1589
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1590
|
-
}
|
|
1591
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1592
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1593
|
-
this.setChorusSend(channel, note, now);
|
|
1594
|
-
this.setReverbSend(channel, note, now);
|
|
1595
2707
|
if (voiceParams.sample.type === "compressed") {
|
|
1596
|
-
|
|
1597
|
-
note.bufferSource.start(startTime, offset);
|
|
2708
|
+
note.bufferSource.start(startTime);
|
|
1598
2709
|
}
|
|
1599
2710
|
else {
|
|
1600
2711
|
note.bufferSource.start(startTime);
|
|
@@ -1636,40 +2747,53 @@ export class MidyGM2 extends EventTarget {
|
|
|
1636
2747
|
}
|
|
1637
2748
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1638
2749
|
const channel = this.channels[channelNumber];
|
|
1639
|
-
const {
|
|
1640
|
-
if (
|
|
1641
|
-
|
|
1642
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1643
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1644
|
-
if (!gainL) {
|
|
1645
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1646
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1647
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1648
|
-
}
|
|
1649
|
-
volumeEnvelopeNode.connect(gainL);
|
|
1650
|
-
volumeEnvelopeNode.connect(gainR);
|
|
2750
|
+
const { volumeNode } = note;
|
|
2751
|
+
if (note.renderedBuffer?.isFull) {
|
|
2752
|
+
volumeNode.connect(this.masterVolume);
|
|
1651
2753
|
}
|
|
1652
2754
|
else {
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
2755
|
+
if (channel.isDrum) {
|
|
2756
|
+
const noteNumber = note.noteNumber;
|
|
2757
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2758
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2759
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2760
|
+
if (!gainL) {
|
|
2761
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2762
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2763
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2764
|
+
}
|
|
2765
|
+
volumeNode.connect(gainL);
|
|
2766
|
+
volumeNode.connect(gainR);
|
|
2767
|
+
}
|
|
2768
|
+
else {
|
|
2769
|
+
volumeNode.connect(channel.gainL);
|
|
2770
|
+
volumeNode.connect(channel.gainR);
|
|
2771
|
+
}
|
|
1658
2772
|
}
|
|
1659
2773
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1660
2774
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1661
2775
|
}
|
|
1662
2776
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1663
|
-
const
|
|
1664
|
-
|
|
1665
|
-
|
|
2777
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2778
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2779
|
+
}
|
|
2780
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2781
|
+
if (!(0 <= startTime))
|
|
1666
2782
|
startTime = this.audioContext.currentTime;
|
|
1667
2783
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
scheduledNotes.
|
|
2784
|
+
note.channel = channelNumber;
|
|
2785
|
+
const channel = this.channels[channelNumber];
|
|
2786
|
+
note.index = channel.scheduledNotes.length;
|
|
2787
|
+
channel.scheduledNotes.push(note);
|
|
2788
|
+
return note;
|
|
2789
|
+
}
|
|
2790
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2791
|
+
const realtime = startTime === undefined;
|
|
2792
|
+
const channel = this.channels[channelNumber];
|
|
1671
2793
|
const programNumber = channel.programNumber;
|
|
1672
2794
|
const bankTable = this.soundFontTable[programNumber];
|
|
2795
|
+
if (!bankTable)
|
|
2796
|
+
return;
|
|
1673
2797
|
let bank = channel.isDrum ? 128 : channel.bankLSB;
|
|
1674
2798
|
if (bankTable[bank] === undefined) {
|
|
1675
2799
|
if (channel.isDrum)
|
|
@@ -1680,17 +2804,25 @@ export class MidyGM2 extends EventTarget {
|
|
|
1680
2804
|
if (soundFontIndex === undefined)
|
|
1681
2805
|
return;
|
|
1682
2806
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1683
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2807
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1684
2808
|
if (!note.voice)
|
|
1685
2809
|
return;
|
|
1686
2810
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1687
2811
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1688
2812
|
note.resolveReady();
|
|
2813
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2814
|
+
channel.sustainNotes.push(note);
|
|
2815
|
+
}
|
|
2816
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2817
|
+
channel.sostenutoNotes.push(note);
|
|
2818
|
+
}
|
|
2819
|
+
return note;
|
|
1689
2820
|
}
|
|
1690
2821
|
disconnectNote(note) {
|
|
1691
2822
|
note.bufferSource.disconnect();
|
|
1692
|
-
note.filterEnvelopeNode
|
|
1693
|
-
note.volumeEnvelopeNode
|
|
2823
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2824
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2825
|
+
note.volumeNode.disconnect();
|
|
1694
2826
|
if (note.modLfoToPitch) {
|
|
1695
2827
|
note.modLfoToVolume.disconnect();
|
|
1696
2828
|
note.modLfoToPitch.disconnect();
|
|
@@ -1707,16 +2839,112 @@ export class MidyGM2 extends EventTarget {
|
|
|
1707
2839
|
note.chorusSend.disconnect();
|
|
1708
2840
|
}
|
|
1709
2841
|
}
|
|
2842
|
+
releaseFullCache(note) {
|
|
2843
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2844
|
+
return;
|
|
2845
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2846
|
+
if (!durationMap)
|
|
2847
|
+
return;
|
|
2848
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2849
|
+
if (entry instanceof RenderedBuffer) {
|
|
2850
|
+
durationMap.delete(note.timelineIndex);
|
|
2851
|
+
if (durationMap.size === 0) {
|
|
2852
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2853
|
+
}
|
|
2854
|
+
}
|
|
2855
|
+
}
|
|
1710
2856
|
releaseNote(channel, note, endTime) {
|
|
1711
2857
|
endTime ??= this.audioContext.currentTime;
|
|
2858
|
+
if (note.renderedBuffer?.isFull) {
|
|
2859
|
+
const rb = note.renderedBuffer;
|
|
2860
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2861
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2862
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2863
|
+
if (isEarlyCut) {
|
|
2864
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2865
|
+
const volRelease = endTime + volDuration;
|
|
2866
|
+
note.volumeNode.gain
|
|
2867
|
+
.cancelScheduledValues(endTime)
|
|
2868
|
+
.setValueAtTime(1, endTime)
|
|
2869
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2870
|
+
return new Promise((resolve) => {
|
|
2871
|
+
this.scheduleTask(() => {
|
|
2872
|
+
note.bufferSource.loop = false;
|
|
2873
|
+
note.bufferSource.stop(volRelease);
|
|
2874
|
+
this.disconnectNote(note);
|
|
2875
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2876
|
+
this.releaseFullCache(note);
|
|
2877
|
+
resolve();
|
|
2878
|
+
}, volRelease);
|
|
2879
|
+
});
|
|
2880
|
+
}
|
|
2881
|
+
else {
|
|
2882
|
+
const now = this.audioContext.currentTime;
|
|
2883
|
+
if (naturalEndTime <= now) {
|
|
2884
|
+
this.disconnectNote(note);
|
|
2885
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2886
|
+
this.releaseFullCache(note);
|
|
2887
|
+
return Promise.resolve();
|
|
2888
|
+
}
|
|
2889
|
+
return new Promise((resolve) => {
|
|
2890
|
+
this.scheduleTask(() => {
|
|
2891
|
+
this.disconnectNote(note);
|
|
2892
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2893
|
+
this.releaseFullCache(note);
|
|
2894
|
+
resolve();
|
|
2895
|
+
}, naturalEndTime);
|
|
2896
|
+
});
|
|
2897
|
+
}
|
|
2898
|
+
}
|
|
1712
2899
|
const volDuration = note.voiceParams.volRelease;
|
|
1713
2900
|
const volRelease = endTime + volDuration;
|
|
1714
|
-
note.
|
|
1715
|
-
.
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
.
|
|
1719
|
-
|
|
2901
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2902
|
+
note.filterEnvelopeNode.frequency
|
|
2903
|
+
.cancelScheduledValues(endTime)
|
|
2904
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2905
|
+
note.volumeEnvelopeNode.gain
|
|
2906
|
+
.cancelScheduledValues(endTime)
|
|
2907
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2908
|
+
}
|
|
2909
|
+
else { // "ads" / "adsr" mode
|
|
2910
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2911
|
+
!note.renderedBuffer.isFull;
|
|
2912
|
+
if (isAdsr) {
|
|
2913
|
+
const rb = note.renderedBuffer;
|
|
2914
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2915
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2916
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2917
|
+
if (isEarlyCut) {
|
|
2918
|
+
const volRelease = endTime + volDuration;
|
|
2919
|
+
note.volumeNode.gain
|
|
2920
|
+
.cancelScheduledValues(endTime)
|
|
2921
|
+
.setValueAtTime(1, endTime)
|
|
2922
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2923
|
+
return new Promise((resolve) => {
|
|
2924
|
+
this.scheduleTask(() => {
|
|
2925
|
+
note.bufferSource.stop(volRelease);
|
|
2926
|
+
this.disconnectNote(note);
|
|
2927
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2928
|
+
resolve();
|
|
2929
|
+
}, volRelease);
|
|
2930
|
+
});
|
|
2931
|
+
}
|
|
2932
|
+
else {
|
|
2933
|
+
return new Promise((resolve) => {
|
|
2934
|
+
this.scheduleTask(() => {
|
|
2935
|
+
note.bufferSource.stop();
|
|
2936
|
+
this.disconnectNote(note);
|
|
2937
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2938
|
+
resolve();
|
|
2939
|
+
}, naturalEndTime);
|
|
2940
|
+
});
|
|
2941
|
+
}
|
|
2942
|
+
}
|
|
2943
|
+
note.volumeNode.gain
|
|
2944
|
+
.cancelScheduledValues(endTime)
|
|
2945
|
+
.setValueAtTime(1, endTime)
|
|
2946
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2947
|
+
}
|
|
1720
2948
|
return new Promise((resolve) => {
|
|
1721
2949
|
this.scheduleTask(() => {
|
|
1722
2950
|
const bufferSource = note.bufferSource;
|
|
@@ -1935,7 +3163,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
1935
3163
|
if (!note.reverbSend) {
|
|
1936
3164
|
if (0 < value) {
|
|
1937
3165
|
note.reverbSend = new GainNode(this.audioContext, { gain: value });
|
|
1938
|
-
note.
|
|
3166
|
+
note.volumeNode.connect(note.reverbSend);
|
|
1939
3167
|
note.reverbSend.connect(this.reverbEffect.input);
|
|
1940
3168
|
}
|
|
1941
3169
|
}
|
|
@@ -1944,11 +3172,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
1944
3172
|
.cancelScheduledValues(scheduleTime)
|
|
1945
3173
|
.setValueAtTime(value, scheduleTime);
|
|
1946
3174
|
if (0 < value) {
|
|
1947
|
-
note.
|
|
3175
|
+
note.volumeNode.connect(note.reverbSend);
|
|
1948
3176
|
}
|
|
1949
3177
|
else {
|
|
1950
3178
|
try {
|
|
1951
|
-
note.
|
|
3179
|
+
note.volumeNode.disconnect(note.reverbSend);
|
|
1952
3180
|
}
|
|
1953
3181
|
catch { /* empty */ }
|
|
1954
3182
|
}
|
|
@@ -1965,7 +3193,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
1965
3193
|
if (!note.chorusSend) {
|
|
1966
3194
|
if (0 < value) {
|
|
1967
3195
|
note.chorusSend = new GainNode(this.audioContext, { gain: value });
|
|
1968
|
-
note.
|
|
3196
|
+
note.volumeNode.connect(note.chorusSend);
|
|
1969
3197
|
note.chorusSend.connect(this.chorusEffect.input);
|
|
1970
3198
|
}
|
|
1971
3199
|
}
|
|
@@ -1974,11 +3202,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
1974
3202
|
.cancelScheduledValues(scheduleTime)
|
|
1975
3203
|
.setValueAtTime(value, scheduleTime);
|
|
1976
3204
|
if (0 < value) {
|
|
1977
|
-
note.
|
|
3205
|
+
note.volumeNode.connect(note.chorusSend);
|
|
1978
3206
|
}
|
|
1979
3207
|
else {
|
|
1980
3208
|
try {
|
|
1981
|
-
note.
|
|
3209
|
+
note.volumeNode.disconnect(note.chorusSend);
|
|
1982
3210
|
}
|
|
1983
3211
|
catch { /* empty */ }
|
|
1984
3212
|
}
|
|
@@ -2041,7 +3269,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2041
3269
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2042
3270
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2043
3271
|
},
|
|
2044
|
-
delayModLFO: (
|
|
3272
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2045
3273
|
if (0 < channel.state.modulationDepthMSB) {
|
|
2046
3274
|
this.setDelayModLFO(note);
|
|
2047
3275
|
}
|
|
@@ -2076,11 +3304,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
2076
3304
|
state.set(channel.state.array);
|
|
2077
3305
|
state[2] = velocity / 127;
|
|
2078
3306
|
state[3] = noteNumber / 127;
|
|
2079
|
-
state[13] = state.channelPressure / 127;
|
|
2080
3307
|
return state;
|
|
2081
3308
|
}
|
|
2082
3309
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2083
3310
|
this.processScheduledNotes(channel, (note) => {
|
|
3311
|
+
if (note.renderedBuffer?.isFull)
|
|
3312
|
+
return;
|
|
2084
3313
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
2085
3314
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2086
3315
|
let applyVolumeEnvelope = false;
|
|
@@ -2164,6 +3393,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
2164
3393
|
const depth = channel.state.modulationDepthMSB *
|
|
2165
3394
|
channel.modulationDepthRange;
|
|
2166
3395
|
this.processScheduledNotes(channel, (note) => {
|
|
3396
|
+
if (note.renderedBuffer?.isFull)
|
|
3397
|
+
return;
|
|
2167
3398
|
if (note.modLfoToPitch) {
|
|
2168
3399
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2169
3400
|
}
|
|
@@ -2300,11 +3531,15 @@ export class MidyGM2 extends EventTarget {
|
|
|
2300
3531
|
return;
|
|
2301
3532
|
if (!(0 <= scheduleTime))
|
|
2302
3533
|
scheduleTime = this.audioContext.currentTime;
|
|
2303
|
-
|
|
3534
|
+
const state = channel.state;
|
|
3535
|
+
const prevValue = state.sustainPedal;
|
|
3536
|
+
state.sustainPedal = value / 127;
|
|
2304
3537
|
if (64 <= value) {
|
|
2305
|
-
|
|
2306
|
-
|
|
2307
|
-
|
|
3538
|
+
if (prevValue < 0.5) {
|
|
3539
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3540
|
+
channel.sustainNotes.push(note);
|
|
3541
|
+
});
|
|
3542
|
+
}
|
|
2308
3543
|
}
|
|
2309
3544
|
else {
|
|
2310
3545
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2328,13 +3563,17 @@ export class MidyGM2 extends EventTarget {
|
|
|
2328
3563
|
return;
|
|
2329
3564
|
if (!(0 <= scheduleTime))
|
|
2330
3565
|
scheduleTime = this.audioContext.currentTime;
|
|
2331
|
-
|
|
3566
|
+
const state = channel.state;
|
|
3567
|
+
const prevValue = state.sostenutoPedal;
|
|
3568
|
+
state.sostenutoPedal = value / 127;
|
|
2332
3569
|
if (64 <= value) {
|
|
2333
|
-
|
|
2334
|
-
|
|
2335
|
-
|
|
2336
|
-
|
|
2337
|
-
|
|
3570
|
+
if (prevValue < 0.5) {
|
|
3571
|
+
const sostenutoNotes = [];
|
|
3572
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3573
|
+
sostenutoNotes.push(note);
|
|
3574
|
+
});
|
|
3575
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3576
|
+
}
|
|
2338
3577
|
}
|
|
2339
3578
|
else {
|
|
2340
3579
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2530,10 +3769,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
2530
3769
|
state[key] = defaultValue;
|
|
2531
3770
|
}
|
|
2532
3771
|
}
|
|
2533
|
-
|
|
2534
|
-
|
|
2535
|
-
}
|
|
2536
|
-
this.resetChannelTable(channel);
|
|
3772
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
3773
|
+
channel.resetTable();
|
|
2537
3774
|
this.mode = "GM2";
|
|
2538
3775
|
this.masterFineTuning = 0; // cent
|
|
2539
3776
|
this.masterCoarseTuning = 0; // cent
|
|
@@ -2671,7 +3908,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2671
3908
|
case 9:
|
|
2672
3909
|
switch (data[3]) {
|
|
2673
3910
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2674
|
-
return this.handleChannelPressureSysEx(data,
|
|
3911
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
2675
3912
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2676
3913
|
return this.handleControlChangeSysEx(data, scheduleTime);
|
|
2677
3914
|
default:
|
|
@@ -2996,6 +4233,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
2996
4233
|
getChannelAmplitudeControl(channel) {
|
|
2997
4234
|
return this.calcChannelEffectValue(channel, 2);
|
|
2998
4235
|
}
|
|
4236
|
+
getAmplitudeControl(channel) {
|
|
4237
|
+
return this.calcEffectValue(channel, 2);
|
|
4238
|
+
}
|
|
2999
4239
|
getLFOPitchDepth(channel) {
|
|
3000
4240
|
return this.calcEffectValue(channel, 3);
|
|
3001
4241
|
}
|
|
@@ -3023,7 +4263,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
3023
4263
|
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
3024
4264
|
}
|
|
3025
4265
|
};
|
|
3026
|
-
handlers[2] = (channel,
|
|
4266
|
+
handlers[2] = (channel, _note, scheduleTime) => this.applyVolume(channel, scheduleTime);
|
|
3027
4267
|
handlers[3] = (channel, note, scheduleTime) => this.setModLfoToPitch(channel, note, scheduleTime);
|
|
3028
4268
|
handlers[4] = (channel, note, scheduleTime) => this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
3029
4269
|
handlers[5] = (channel, note, scheduleTime) => this.setModLfoToVolume(channel, note, scheduleTime);
|