@marmooo/midy 0.2.6 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -13
- package/esm/midy-GM1.d.ts +73 -74
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +207 -218
- package/esm/midy-GM2.d.ts +125 -127
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +358 -418
- package/esm/midy-GMLite.d.ts +69 -70
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +195 -207
- package/esm/midy.d.ts +148 -150
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +404 -500
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +73 -74
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +207 -218
- package/script/midy-GM2.d.ts +125 -127
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +358 -418
- package/script/midy-GMLite.d.ts +69 -70
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +195 -207
- package/script/midy.d.ts +148 -150
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +404 -500
package/esm/midy-GM2.js
CHANGED
|
@@ -284,18 +284,6 @@ export class MidyGM2 {
|
|
|
284
284
|
delayTimes: this.generateDistributedArray(0.02, 2, 0.5),
|
|
285
285
|
}
|
|
286
286
|
});
|
|
287
|
-
Object.defineProperty(this, "mono", {
|
|
288
|
-
enumerable: true,
|
|
289
|
-
configurable: true,
|
|
290
|
-
writable: true,
|
|
291
|
-
value: false
|
|
292
|
-
}); // CC#124, CC#125
|
|
293
|
-
Object.defineProperty(this, "omni", {
|
|
294
|
-
enumerable: true,
|
|
295
|
-
configurable: true,
|
|
296
|
-
writable: true,
|
|
297
|
-
value: false
|
|
298
|
-
}); // CC#126, CC#127
|
|
299
287
|
Object.defineProperty(this, "noteCheckInterval", {
|
|
300
288
|
enumerable: true,
|
|
301
289
|
configurable: true,
|
|
@@ -497,6 +485,7 @@ export class MidyGM2 {
|
|
|
497
485
|
controlTable: this.initControlTable(),
|
|
498
486
|
...this.setChannelAudioNodes(audioContext),
|
|
499
487
|
scheduledNotes: new SparseMap(128),
|
|
488
|
+
sustainNotes: [],
|
|
500
489
|
sostenutoNotes: new SparseMap(128),
|
|
501
490
|
scaleOctaveTuningTable: new Int8Array(12), // [-64, 63] cent
|
|
502
491
|
channelPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
@@ -579,14 +568,15 @@ export class MidyGM2 {
|
|
|
579
568
|
const portamentoTarget = this.findPortamentoTarget(queueIndex);
|
|
580
569
|
if (portamentoTarget)
|
|
581
570
|
portamentoTarget.portamento = true;
|
|
582
|
-
const notePromise = this.
|
|
571
|
+
const notePromise = this.scheduleNoteOff(event.channel, event.noteNumber, event.velocity, startTime, false, // force
|
|
572
|
+
portamentoTarget?.noteNumber);
|
|
583
573
|
if (notePromise) {
|
|
584
574
|
this.notePromises.push(notePromise);
|
|
585
575
|
}
|
|
586
576
|
break;
|
|
587
577
|
}
|
|
588
578
|
case "controller":
|
|
589
|
-
this.handleControlChange(
|
|
579
|
+
this.handleControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
590
580
|
break;
|
|
591
581
|
case "programChange":
|
|
592
582
|
this.handleProgramChange(event.channel, event.programNumber, startTime);
|
|
@@ -629,10 +619,11 @@ export class MidyGM2 {
|
|
|
629
619
|
resolve();
|
|
630
620
|
return;
|
|
631
621
|
}
|
|
632
|
-
const
|
|
622
|
+
const now = this.audioContext.currentTime;
|
|
623
|
+
const t = now + offset;
|
|
633
624
|
queueIndex = await this.scheduleTimelineEvents(t, offset, queueIndex);
|
|
634
625
|
if (this.isPausing) {
|
|
635
|
-
await this.stopNotes(0, true);
|
|
626
|
+
await this.stopNotes(0, true, now);
|
|
636
627
|
this.notePromises = [];
|
|
637
628
|
resolve();
|
|
638
629
|
this.isPausing = false;
|
|
@@ -640,7 +631,7 @@ export class MidyGM2 {
|
|
|
640
631
|
return;
|
|
641
632
|
}
|
|
642
633
|
else if (this.isStopping) {
|
|
643
|
-
await this.stopNotes(0, true);
|
|
634
|
+
await this.stopNotes(0, true, now);
|
|
644
635
|
this.notePromises = [];
|
|
645
636
|
this.exclusiveClassMap.clear();
|
|
646
637
|
this.audioBufferCache.clear();
|
|
@@ -650,7 +641,7 @@ export class MidyGM2 {
|
|
|
650
641
|
return;
|
|
651
642
|
}
|
|
652
643
|
else if (this.isSeeking) {
|
|
653
|
-
this.stopNotes(0, true);
|
|
644
|
+
this.stopNotes(0, true, now);
|
|
654
645
|
this.exclusiveClassMap.clear();
|
|
655
646
|
this.startTime = this.audioContext.currentTime;
|
|
656
647
|
queueIndex = this.getQueueIndex(this.resumeTime);
|
|
@@ -659,7 +650,6 @@ export class MidyGM2 {
|
|
|
659
650
|
await schedulePlayback();
|
|
660
651
|
}
|
|
661
652
|
else {
|
|
662
|
-
const now = this.audioContext.currentTime;
|
|
663
653
|
const waitTime = now + this.noteCheckInterval;
|
|
664
654
|
await this.scheduleTask(() => { }, waitTime);
|
|
665
655
|
await schedulePlayback();
|
|
@@ -779,25 +769,21 @@ export class MidyGM2 {
|
|
|
779
769
|
}
|
|
780
770
|
return { instruments, timeline };
|
|
781
771
|
}
|
|
782
|
-
|
|
783
|
-
const now = this.audioContext.currentTime;
|
|
772
|
+
stopChannelNotes(channelNumber, velocity, force, scheduleTime) {
|
|
784
773
|
const channel = this.channels[channelNumber];
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
const promise = this.scheduleNoteRelease(channelNumber, note.noteNumber, velocity, now, undefined, // portamentoNoteNumber
|
|
791
|
-
force);
|
|
792
|
-
this.notePromises.push(promise);
|
|
793
|
-
}
|
|
774
|
+
const promises = [];
|
|
775
|
+
this.processScheduledNotes(channel, (note) => {
|
|
776
|
+
const promise = this.scheduleNoteOff(channelNumber, note.noteNumber, velocity, scheduleTime, force, undefined);
|
|
777
|
+
this.notePromises.push(promise);
|
|
778
|
+
promises.push(promise);
|
|
794
779
|
});
|
|
795
780
|
channel.scheduledNotes.clear();
|
|
796
|
-
|
|
781
|
+
return Promise.all(promises);
|
|
797
782
|
}
|
|
798
|
-
stopNotes(velocity, force) {
|
|
783
|
+
stopNotes(velocity, force, scheduleTime) {
|
|
784
|
+
const promises = [];
|
|
799
785
|
for (let i = 0; i < this.channels.length; i++) {
|
|
800
|
-
this.stopChannelNotes(i, velocity, force);
|
|
786
|
+
promises.push(this.stopChannelNotes(i, velocity, force, scheduleTime));
|
|
801
787
|
}
|
|
802
788
|
return Promise.all(this.notePromises);
|
|
803
789
|
}
|
|
@@ -845,34 +831,32 @@ export class MidyGM2 {
|
|
|
845
831
|
const now = this.audioContext.currentTime;
|
|
846
832
|
return this.resumeTime + now - this.startTime - this.startDelay;
|
|
847
833
|
}
|
|
848
|
-
processScheduledNotes(channel,
|
|
834
|
+
processScheduledNotes(channel, callback) {
|
|
849
835
|
channel.scheduledNotes.forEach((noteList) => {
|
|
850
836
|
for (let i = 0; i < noteList.length; i++) {
|
|
851
837
|
const note = noteList[i];
|
|
852
838
|
if (!note)
|
|
853
839
|
continue;
|
|
854
|
-
if (scheduleTime < note.startTime)
|
|
855
|
-
continue;
|
|
856
840
|
callback(note);
|
|
857
841
|
}
|
|
858
842
|
});
|
|
859
843
|
}
|
|
860
|
-
getActiveNotes(channel,
|
|
844
|
+
getActiveNotes(channel, scheduleTime) {
|
|
861
845
|
const activeNotes = new SparseMap(128);
|
|
862
846
|
channel.scheduledNotes.forEach((noteList) => {
|
|
863
|
-
const activeNote = this.getActiveNote(noteList,
|
|
847
|
+
const activeNote = this.getActiveNote(noteList, scheduleTime);
|
|
864
848
|
if (activeNote) {
|
|
865
849
|
activeNotes.set(activeNote.noteNumber, activeNote);
|
|
866
850
|
}
|
|
867
851
|
});
|
|
868
852
|
return activeNotes;
|
|
869
853
|
}
|
|
870
|
-
getActiveNote(noteList,
|
|
854
|
+
getActiveNote(noteList, scheduleTime) {
|
|
871
855
|
for (let i = noteList.length - 1; i >= 0; i--) {
|
|
872
856
|
const note = noteList[i];
|
|
873
857
|
if (!note)
|
|
874
858
|
return;
|
|
875
|
-
if (
|
|
859
|
+
if (scheduleTime < note.startTime)
|
|
876
860
|
continue;
|
|
877
861
|
return (note.ending) ? null : note;
|
|
878
862
|
}
|
|
@@ -1032,43 +1016,35 @@ export class MidyGM2 {
|
|
|
1032
1016
|
calcNoteDetune(channel, note) {
|
|
1033
1017
|
return channel.scaleOctaveTuningTable[note.noteNumber % 12];
|
|
1034
1018
|
}
|
|
1035
|
-
updateChannelDetune(channel) {
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
const note = noteList[i];
|
|
1039
|
-
if (!note)
|
|
1040
|
-
continue;
|
|
1041
|
-
this.updateDetune(channel, note);
|
|
1042
|
-
}
|
|
1019
|
+
updateChannelDetune(channel, scheduleTime) {
|
|
1020
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1021
|
+
this.updateDetune(channel, note, scheduleTime);
|
|
1043
1022
|
});
|
|
1044
1023
|
}
|
|
1045
|
-
updateDetune(channel, note) {
|
|
1046
|
-
const now = this.audioContext.currentTime;
|
|
1024
|
+
updateDetune(channel, note, scheduleTime) {
|
|
1047
1025
|
const noteDetune = this.calcNoteDetune(channel, note);
|
|
1048
1026
|
const detune = channel.detune + noteDetune;
|
|
1049
1027
|
note.bufferSource.detune
|
|
1050
|
-
.cancelScheduledValues(
|
|
1051
|
-
.setValueAtTime(detune,
|
|
1028
|
+
.cancelScheduledValues(scheduleTime)
|
|
1029
|
+
.setValueAtTime(detune, scheduleTime);
|
|
1052
1030
|
}
|
|
1053
1031
|
getPortamentoTime(channel) {
|
|
1054
1032
|
const factor = 5 * Math.log(10) / 127;
|
|
1055
1033
|
const time = channel.state.portamentoTime;
|
|
1056
1034
|
return Math.log(time) / factor;
|
|
1057
1035
|
}
|
|
1058
|
-
setPortamentoStartVolumeEnvelope(channel, note) {
|
|
1059
|
-
const now = this.audioContext.currentTime;
|
|
1036
|
+
setPortamentoStartVolumeEnvelope(channel, note, scheduleTime) {
|
|
1060
1037
|
const { voiceParams, startTime } = note;
|
|
1061
1038
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation);
|
|
1062
1039
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1063
1040
|
const volDelay = startTime + voiceParams.volDelay;
|
|
1064
1041
|
const portamentoTime = volDelay + this.getPortamentoTime(channel);
|
|
1065
1042
|
note.volumeEnvelopeNode.gain
|
|
1066
|
-
.cancelScheduledValues(
|
|
1043
|
+
.cancelScheduledValues(scheduleTime)
|
|
1067
1044
|
.setValueAtTime(0, volDelay)
|
|
1068
1045
|
.linearRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1069
1046
|
}
|
|
1070
|
-
setVolumeEnvelope(channel, note) {
|
|
1071
|
-
const now = this.audioContext.currentTime;
|
|
1047
|
+
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
1072
1048
|
const { voiceParams, startTime } = note;
|
|
1073
1049
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation) *
|
|
1074
1050
|
(1 + this.getAmplitudeControl(channel));
|
|
@@ -1078,7 +1054,7 @@ export class MidyGM2 {
|
|
|
1078
1054
|
const volHold = volAttack + voiceParams.volHold;
|
|
1079
1055
|
const volDecay = volHold + voiceParams.volDecay;
|
|
1080
1056
|
note.volumeEnvelopeNode.gain
|
|
1081
|
-
.cancelScheduledValues(
|
|
1057
|
+
.cancelScheduledValues(scheduleTime)
|
|
1082
1058
|
.setValueAtTime(0, startTime)
|
|
1083
1059
|
.setValueAtTime(1e-6, volDelay) // exponentialRampToValueAtTime() requires a non-zero value
|
|
1084
1060
|
.exponentialRampToValueAtTime(attackVolume, volAttack)
|
|
@@ -1086,7 +1062,6 @@ export class MidyGM2 {
|
|
|
1086
1062
|
.linearRampToValueAtTime(sustainVolume, volDecay);
|
|
1087
1063
|
}
|
|
1088
1064
|
setPitchEnvelope(note, scheduleTime) {
|
|
1089
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1090
1065
|
const { voiceParams } = note;
|
|
1091
1066
|
const baseRate = voiceParams.playbackRate;
|
|
1092
1067
|
note.bufferSource.playbackRate
|
|
@@ -1113,8 +1088,7 @@ export class MidyGM2 {
|
|
|
1113
1088
|
const maxFrequency = 20000; // max Hz of initialFilterFc
|
|
1114
1089
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
1115
1090
|
}
|
|
1116
|
-
setPortamentoStartFilterEnvelope(channel, note) {
|
|
1117
|
-
const now = this.audioContext.currentTime;
|
|
1091
|
+
setPortamentoStartFilterEnvelope(channel, note, scheduleTime) {
|
|
1118
1092
|
const state = channel.state;
|
|
1119
1093
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1120
1094
|
const softPedalFactor = 1 -
|
|
@@ -1129,13 +1103,12 @@ export class MidyGM2 {
|
|
|
1129
1103
|
const portamentoTime = startTime + this.getPortamentoTime(channel);
|
|
1130
1104
|
const modDelay = startTime + voiceParams.modDelay;
|
|
1131
1105
|
note.filterNode.frequency
|
|
1132
|
-
.cancelScheduledValues(
|
|
1106
|
+
.cancelScheduledValues(scheduleTime)
|
|
1133
1107
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1134
1108
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1135
1109
|
.linearRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1136
1110
|
}
|
|
1137
|
-
setFilterEnvelope(channel, note) {
|
|
1138
|
-
const now = this.audioContext.currentTime;
|
|
1111
|
+
setFilterEnvelope(channel, note, scheduleTime) {
|
|
1139
1112
|
const state = channel.state;
|
|
1140
1113
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1141
1114
|
const softPedalFactor = 1 -
|
|
@@ -1155,14 +1128,14 @@ export class MidyGM2 {
|
|
|
1155
1128
|
const modHold = modAttack + voiceParams.modHold;
|
|
1156
1129
|
const modDecay = modHold + voiceParams.modDecay;
|
|
1157
1130
|
note.filterNode.frequency
|
|
1158
|
-
.cancelScheduledValues(
|
|
1131
|
+
.cancelScheduledValues(scheduleTime)
|
|
1159
1132
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1160
1133
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1161
1134
|
.exponentialRampToValueAtTime(adjustedPeekFreq, modAttack)
|
|
1162
1135
|
.setValueAtTime(adjustedPeekFreq, modHold)
|
|
1163
1136
|
.linearRampToValueAtTime(adjustedSustainFreq, modDecay);
|
|
1164
1137
|
}
|
|
1165
|
-
startModulation(channel, note,
|
|
1138
|
+
startModulation(channel, note, scheduleTime) {
|
|
1166
1139
|
const { voiceParams } = note;
|
|
1167
1140
|
note.modulationLFO = new OscillatorNode(this.audioContext, {
|
|
1168
1141
|
frequency: this.centToHz(voiceParams.freqModLFO),
|
|
@@ -1171,10 +1144,10 @@ export class MidyGM2 {
|
|
|
1171
1144
|
gain: voiceParams.modLfoToFilterFc,
|
|
1172
1145
|
});
|
|
1173
1146
|
note.modulationDepth = new GainNode(this.audioContext);
|
|
1174
|
-
this.setModLfoToPitch(channel, note);
|
|
1147
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1175
1148
|
note.volumeDepth = new GainNode(this.audioContext);
|
|
1176
|
-
this.setModLfoToVolume(
|
|
1177
|
-
note.modulationLFO.start(startTime + voiceParams.delayModLFO);
|
|
1149
|
+
this.setModLfoToVolume(note, scheduleTime);
|
|
1150
|
+
note.modulationLFO.start(note.startTime + voiceParams.delayModLFO);
|
|
1178
1151
|
note.modulationLFO.connect(note.filterDepth);
|
|
1179
1152
|
note.filterDepth.connect(note.filterNode.frequency);
|
|
1180
1153
|
note.modulationLFO.connect(note.modulationDepth);
|
|
@@ -1182,15 +1155,15 @@ export class MidyGM2 {
|
|
|
1182
1155
|
note.modulationLFO.connect(note.volumeDepth);
|
|
1183
1156
|
note.volumeDepth.connect(note.volumeEnvelopeNode.gain);
|
|
1184
1157
|
}
|
|
1185
|
-
startVibrato(channel, note,
|
|
1158
|
+
startVibrato(channel, note, scheduleTime) {
|
|
1186
1159
|
const { voiceParams } = note;
|
|
1187
1160
|
const state = channel.state;
|
|
1188
1161
|
note.vibratoLFO = new OscillatorNode(this.audioContext, {
|
|
1189
1162
|
frequency: this.centToHz(voiceParams.freqVibLFO) * state.vibratoRate * 2,
|
|
1190
1163
|
});
|
|
1191
|
-
note.vibratoLFO.start(startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1164
|
+
note.vibratoLFO.start(note.startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1192
1165
|
note.vibratoDepth = new GainNode(this.audioContext);
|
|
1193
|
-
this.setVibLfoToPitch(channel, note);
|
|
1166
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1194
1167
|
note.vibratoLFO.connect(note.vibratoDepth);
|
|
1195
1168
|
note.vibratoDepth.connect(note.bufferSource.detune);
|
|
1196
1169
|
}
|
|
@@ -1213,6 +1186,7 @@ export class MidyGM2 {
|
|
|
1213
1186
|
}
|
|
1214
1187
|
}
|
|
1215
1188
|
async createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3) {
|
|
1189
|
+
const now = this.audioContext.currentTime;
|
|
1216
1190
|
const state = channel.state;
|
|
1217
1191
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1218
1192
|
const voiceParams = voice.getAllParams(controllerState);
|
|
@@ -1229,22 +1203,22 @@ export class MidyGM2 {
|
|
|
1229
1203
|
});
|
|
1230
1204
|
if (portamento) {
|
|
1231
1205
|
note.portamento = true;
|
|
1232
|
-
this.setPortamentoStartVolumeEnvelope(channel, note);
|
|
1233
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1206
|
+
this.setPortamentoStartVolumeEnvelope(channel, note, now);
|
|
1207
|
+
this.setPortamentoStartFilterEnvelope(channel, note, now);
|
|
1234
1208
|
}
|
|
1235
1209
|
else {
|
|
1236
1210
|
note.portamento = false;
|
|
1237
|
-
this.setVolumeEnvelope(channel, note);
|
|
1238
|
-
this.setFilterEnvelope(channel, note);
|
|
1211
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
1212
|
+
this.setFilterEnvelope(channel, note, now);
|
|
1239
1213
|
}
|
|
1240
1214
|
if (0 < state.vibratoDepth) {
|
|
1241
|
-
this.startVibrato(channel, note,
|
|
1215
|
+
this.startVibrato(channel, note, now);
|
|
1242
1216
|
}
|
|
1243
|
-
this.setPitchEnvelope(note);
|
|
1217
|
+
this.setPitchEnvelope(note, now);
|
|
1244
1218
|
if (0 < state.modulationDepth) {
|
|
1245
|
-
this.startModulation(channel, note,
|
|
1219
|
+
this.startModulation(channel, note, now);
|
|
1246
1220
|
}
|
|
1247
|
-
if (
|
|
1221
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
1248
1222
|
channel.currentBufferSource.stop(startTime);
|
|
1249
1223
|
channel.currentBufferSource = note.bufferSource;
|
|
1250
1224
|
}
|
|
@@ -1254,10 +1228,10 @@ export class MidyGM2 {
|
|
|
1254
1228
|
note.volumeNode.connect(note.gainL);
|
|
1255
1229
|
note.volumeNode.connect(note.gainR);
|
|
1256
1230
|
if (0 < channel.chorusSendLevel) {
|
|
1257
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
1231
|
+
this.setChorusEffectsSend(channel, note, 0, now);
|
|
1258
1232
|
}
|
|
1259
1233
|
if (0 < channel.reverbSendLevel) {
|
|
1260
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
1234
|
+
this.setReverbEffectsSend(channel, note, 0, now);
|
|
1261
1235
|
}
|
|
1262
1236
|
note.bufferSource.start(startTime);
|
|
1263
1237
|
return note;
|
|
@@ -1285,8 +1259,8 @@ export class MidyGM2 {
|
|
|
1285
1259
|
const note = await this.createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3);
|
|
1286
1260
|
note.gainL.connect(channel.gainL);
|
|
1287
1261
|
note.gainR.connect(channel.gainR);
|
|
1288
|
-
if (channel.state.
|
|
1289
|
-
channel.
|
|
1262
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
1263
|
+
channel.sustainNotes.push(note);
|
|
1290
1264
|
}
|
|
1291
1265
|
const exclusiveClass = note.voiceParams.exclusiveClass;
|
|
1292
1266
|
if (exclusiveClass !== 0) {
|
|
@@ -1294,9 +1268,9 @@ export class MidyGM2 {
|
|
|
1294
1268
|
const prevEntry = this.exclusiveClassMap.get(exclusiveClass);
|
|
1295
1269
|
const [prevNote, prevChannelNumber] = prevEntry;
|
|
1296
1270
|
if (!prevNote.ending) {
|
|
1297
|
-
this.
|
|
1298
|
-
startTime,
|
|
1299
|
-
|
|
1271
|
+
this.scheduleNoteOff(prevChannelNumber, prevNote.noteNumber, 0, // velocity,
|
|
1272
|
+
startTime, true, // force
|
|
1273
|
+
undefined);
|
|
1300
1274
|
}
|
|
1301
1275
|
}
|
|
1302
1276
|
this.exclusiveClassMap.set(exclusiveClass, [note, channelNumber]);
|
|
@@ -1309,9 +1283,9 @@ export class MidyGM2 {
|
|
|
1309
1283
|
scheduledNotes.set(noteNumber, [note]);
|
|
1310
1284
|
}
|
|
1311
1285
|
}
|
|
1312
|
-
noteOn(channelNumber, noteNumber, velocity,
|
|
1313
|
-
|
|
1314
|
-
return this.scheduleNoteOn(channelNumber, noteNumber, velocity,
|
|
1286
|
+
noteOn(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1287
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1288
|
+
return this.scheduleNoteOn(channelNumber, noteNumber, velocity, scheduleTime, false);
|
|
1315
1289
|
}
|
|
1316
1290
|
stopNote(endTime, stopTime, scheduledNotes, index) {
|
|
1317
1291
|
const note = scheduledNotes[index];
|
|
@@ -1351,11 +1325,11 @@ export class MidyGM2 {
|
|
|
1351
1325
|
note.bufferSource.stop(stopTime);
|
|
1352
1326
|
});
|
|
1353
1327
|
}
|
|
1354
|
-
|
|
1328
|
+
scheduleNoteOff(channelNumber, noteNumber, _velocity, endTime, force, portamentoNoteNumber) {
|
|
1355
1329
|
const channel = this.channels[channelNumber];
|
|
1356
1330
|
const state = channel.state;
|
|
1357
1331
|
if (!force) {
|
|
1358
|
-
if (0.5
|
|
1332
|
+
if (0.5 <= state.sustainPedal)
|
|
1359
1333
|
return;
|
|
1360
1334
|
if (channel.sostenutoNotes.has(noteNumber))
|
|
1361
1335
|
return;
|
|
@@ -1390,68 +1364,60 @@ export class MidyGM2 {
|
|
|
1390
1364
|
}
|
|
1391
1365
|
}
|
|
1392
1366
|
}
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
return this.
|
|
1367
|
+
noteOff(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1368
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1369
|
+
return this.scheduleNoteOff(channelNumber, noteNumber, velocity, scheduleTime, false, // force
|
|
1370
|
+
undefined);
|
|
1396
1371
|
}
|
|
1397
|
-
releaseSustainPedal(channelNumber, halfVelocity) {
|
|
1372
|
+
releaseSustainPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1398
1373
|
const velocity = halfVelocity * 2;
|
|
1399
1374
|
const channel = this.channels[channelNumber];
|
|
1400
1375
|
const promises = [];
|
|
1401
|
-
channel.
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
continue;
|
|
1407
|
-
const { noteNumber } = note;
|
|
1408
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1409
|
-
promises.push(promise);
|
|
1410
|
-
}
|
|
1411
|
-
});
|
|
1376
|
+
for (let i = 0; i < channel.sustainNotes.length; i++) {
|
|
1377
|
+
const promise = this.noteOff(channelNumber, channel.sustainNotes[i].noteNumber, velocity, scheduleTime);
|
|
1378
|
+
promises.push(promise);
|
|
1379
|
+
}
|
|
1380
|
+
channel.sustainNotes = [];
|
|
1412
1381
|
return promises;
|
|
1413
1382
|
}
|
|
1414
|
-
releaseSostenutoPedal(channelNumber, halfVelocity) {
|
|
1383
|
+
releaseSostenutoPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1415
1384
|
const velocity = halfVelocity * 2;
|
|
1416
1385
|
const channel = this.channels[channelNumber];
|
|
1417
1386
|
const promises = [];
|
|
1418
1387
|
channel.state.sostenutoPedal = 0;
|
|
1419
|
-
channel.sostenutoNotes.forEach((
|
|
1420
|
-
const
|
|
1421
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1388
|
+
channel.sostenutoNotes.forEach((note) => {
|
|
1389
|
+
const promise = this.noteOff(channelNumber, note.noteNumber, velocity, scheduleTime);
|
|
1422
1390
|
promises.push(promise);
|
|
1423
1391
|
});
|
|
1424
1392
|
channel.sostenutoNotes.clear();
|
|
1425
1393
|
return promises;
|
|
1426
1394
|
}
|
|
1427
|
-
handleMIDIMessage(statusByte, data1, data2) {
|
|
1428
|
-
const channelNumber =
|
|
1395
|
+
handleMIDIMessage(statusByte, data1, data2, scheduleTime) {
|
|
1396
|
+
const channelNumber = statusByte & 0x0F;
|
|
1429
1397
|
const messageType = statusByte & 0xF0;
|
|
1430
1398
|
switch (messageType) {
|
|
1431
1399
|
case 0x80:
|
|
1432
|
-
return this.
|
|
1400
|
+
return this.noteOff(channelNumber, data1, data2, scheduleTime);
|
|
1433
1401
|
case 0x90:
|
|
1434
|
-
return this.noteOn(channelNumber, data1, data2);
|
|
1402
|
+
return this.noteOn(channelNumber, data1, data2, scheduleTime);
|
|
1435
1403
|
case 0xB0:
|
|
1436
|
-
return this.handleControlChange(channelNumber, data1, data2);
|
|
1404
|
+
return this.handleControlChange(channelNumber, data1, data2, scheduleTime);
|
|
1437
1405
|
case 0xC0:
|
|
1438
|
-
return this.handleProgramChange(channelNumber, data1);
|
|
1406
|
+
return this.handleProgramChange(channelNumber, data1, scheduleTime);
|
|
1439
1407
|
case 0xD0:
|
|
1440
|
-
return this.handleChannelPressure(channelNumber, data1);
|
|
1408
|
+
return this.handleChannelPressure(channelNumber, data1, scheduleTime);
|
|
1441
1409
|
case 0xE0:
|
|
1442
|
-
return this.handlePitchBendMessage(channelNumber, data1, data2);
|
|
1410
|
+
return this.handlePitchBendMessage(channelNumber, data1, data2, scheduleTime);
|
|
1443
1411
|
default:
|
|
1444
1412
|
console.warn(`Unsupported MIDI message: ${messageType.toString(16)}`);
|
|
1445
1413
|
}
|
|
1446
1414
|
}
|
|
1447
|
-
handleProgramChange(channelNumber, program) {
|
|
1415
|
+
handleProgramChange(channelNumber, program, _scheduleTime) {
|
|
1448
1416
|
const channel = this.channels[channelNumber];
|
|
1449
1417
|
channel.bank = channel.bankMSB * 128 + channel.bankLSB;
|
|
1450
1418
|
channel.program = program;
|
|
1451
1419
|
}
|
|
1452
|
-
handleChannelPressure(channelNumber, value,
|
|
1453
|
-
if (!startTime)
|
|
1454
|
-
startTime = this.audioContext.currentTime;
|
|
1420
|
+
handleChannelPressure(channelNumber, value, scheduleTime) {
|
|
1455
1421
|
const channel = this.channels[channelNumber];
|
|
1456
1422
|
const prev = channel.state.channelPressure;
|
|
1457
1423
|
const next = value / 127;
|
|
@@ -1461,72 +1427,68 @@ export class MidyGM2 {
|
|
|
1461
1427
|
channel.detune += pressureDepth * (next - prev);
|
|
1462
1428
|
}
|
|
1463
1429
|
const table = channel.channelPressureTable;
|
|
1464
|
-
this.getActiveNotes(channel,
|
|
1430
|
+
this.getActiveNotes(channel, scheduleTime).forEach((note) => {
|
|
1465
1431
|
this.setControllerParameters(channel, note, table);
|
|
1466
1432
|
});
|
|
1467
1433
|
// this.applyVoiceParams(channel, 13);
|
|
1468
1434
|
}
|
|
1469
|
-
handlePitchBendMessage(channelNumber, lsb, msb) {
|
|
1435
|
+
handlePitchBendMessage(channelNumber, lsb, msb, scheduleTime) {
|
|
1470
1436
|
const pitchBend = msb * 128 + lsb;
|
|
1471
|
-
this.setPitchBend(channelNumber, pitchBend);
|
|
1437
|
+
this.setPitchBend(channelNumber, pitchBend, scheduleTime);
|
|
1472
1438
|
}
|
|
1473
|
-
setPitchBend(channelNumber, value) {
|
|
1439
|
+
setPitchBend(channelNumber, value, scheduleTime) {
|
|
1440
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1474
1441
|
const channel = this.channels[channelNumber];
|
|
1475
1442
|
const state = channel.state;
|
|
1476
1443
|
const prev = state.pitchWheel * 2 - 1;
|
|
1477
1444
|
const next = (value - 8192) / 8192;
|
|
1478
1445
|
state.pitchWheel = value / 16383;
|
|
1479
1446
|
channel.detune += (next - prev) * state.pitchWheelSensitivity * 12800;
|
|
1480
|
-
this.updateChannelDetune(channel);
|
|
1481
|
-
this.applyVoiceParams(channel, 14);
|
|
1447
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1448
|
+
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
1482
1449
|
}
|
|
1483
|
-
setModLfoToPitch(channel, note) {
|
|
1484
|
-
const now = this.audioContext.currentTime;
|
|
1450
|
+
setModLfoToPitch(channel, note, scheduleTime) {
|
|
1485
1451
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
1486
1452
|
this.getLFOPitchDepth(channel);
|
|
1487
1453
|
const baseDepth = Math.abs(modLfoToPitch) + channel.state.modulationDepth;
|
|
1488
1454
|
const modulationDepth = baseDepth * Math.sign(modLfoToPitch);
|
|
1489
1455
|
note.modulationDepth.gain
|
|
1490
|
-
.cancelScheduledValues(
|
|
1491
|
-
.setValueAtTime(modulationDepth,
|
|
1456
|
+
.cancelScheduledValues(scheduleTime)
|
|
1457
|
+
.setValueAtTime(modulationDepth, scheduleTime);
|
|
1492
1458
|
}
|
|
1493
|
-
setVibLfoToPitch(channel, note) {
|
|
1494
|
-
const now = this.audioContext.currentTime;
|
|
1459
|
+
setVibLfoToPitch(channel, note, scheduleTime) {
|
|
1495
1460
|
const vibLfoToPitch = note.voiceParams.vibLfoToPitch;
|
|
1496
1461
|
const vibratoDepth = Math.abs(vibLfoToPitch) * channel.state.vibratoDepth *
|
|
1497
1462
|
2;
|
|
1498
1463
|
const vibratoDepthSign = 0 < vibLfoToPitch;
|
|
1499
1464
|
note.vibratoDepth.gain
|
|
1500
|
-
.cancelScheduledValues(
|
|
1501
|
-
.setValueAtTime(vibratoDepth * vibratoDepthSign,
|
|
1465
|
+
.cancelScheduledValues(scheduleTime)
|
|
1466
|
+
.setValueAtTime(vibratoDepth * vibratoDepthSign, scheduleTime);
|
|
1502
1467
|
}
|
|
1503
|
-
setModLfoToFilterFc(channel, note) {
|
|
1504
|
-
const now = this.audioContext.currentTime;
|
|
1468
|
+
setModLfoToFilterFc(channel, note, scheduleTime) {
|
|
1505
1469
|
const modLfoToFilterFc = note.voiceParams.modLfoToFilterFc +
|
|
1506
1470
|
this.getLFOFilterDepth(channel);
|
|
1507
1471
|
note.filterDepth.gain
|
|
1508
|
-
.cancelScheduledValues(
|
|
1509
|
-
.setValueAtTime(modLfoToFilterFc,
|
|
1472
|
+
.cancelScheduledValues(scheduleTime)
|
|
1473
|
+
.setValueAtTime(modLfoToFilterFc, scheduleTime);
|
|
1510
1474
|
}
|
|
1511
|
-
setModLfoToVolume(channel, note) {
|
|
1512
|
-
const now = this.audioContext.currentTime;
|
|
1475
|
+
setModLfoToVolume(channel, note, scheduleTime) {
|
|
1513
1476
|
const modLfoToVolume = note.voiceParams.modLfoToVolume;
|
|
1514
1477
|
const baseDepth = this.cbToRatio(Math.abs(modLfoToVolume)) - 1;
|
|
1515
1478
|
const volumeDepth = baseDepth * Math.sign(modLfoToVolume) *
|
|
1516
1479
|
(1 + this.getLFOAmplitudeDepth(channel));
|
|
1517
1480
|
note.volumeDepth.gain
|
|
1518
|
-
.cancelScheduledValues(
|
|
1519
|
-
.setValueAtTime(volumeDepth,
|
|
1481
|
+
.cancelScheduledValues(scheduleTime)
|
|
1482
|
+
.setValueAtTime(volumeDepth, scheduleTime);
|
|
1520
1483
|
}
|
|
1521
|
-
setReverbEffectsSend(channel, note, prevValue) {
|
|
1484
|
+
setReverbEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1522
1485
|
if (0 < prevValue) {
|
|
1523
1486
|
if (0 < note.voiceParams.reverbEffectsSend) {
|
|
1524
|
-
const now = this.audioContext.currentTime;
|
|
1525
1487
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 91);
|
|
1526
1488
|
const value = note.voiceParams.reverbEffectsSend + keyBasedValue;
|
|
1527
1489
|
note.reverbEffectsSend.gain
|
|
1528
|
-
.cancelScheduledValues(
|
|
1529
|
-
.setValueAtTime(value,
|
|
1490
|
+
.cancelScheduledValues(scheduleTime)
|
|
1491
|
+
.setValueAtTime(value, scheduleTime);
|
|
1530
1492
|
}
|
|
1531
1493
|
else {
|
|
1532
1494
|
note.reverbEffectsSend.disconnect();
|
|
@@ -1544,15 +1506,14 @@ export class MidyGM2 {
|
|
|
1544
1506
|
}
|
|
1545
1507
|
}
|
|
1546
1508
|
}
|
|
1547
|
-
setChorusEffectsSend(channel, note, prevValue) {
|
|
1509
|
+
setChorusEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1548
1510
|
if (0 < prevValue) {
|
|
1549
1511
|
if (0 < note.voiceParams.chorusEffectsSend) {
|
|
1550
|
-
const now = this.audioContext.currentTime;
|
|
1551
1512
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 93);
|
|
1552
1513
|
const value = note.voiceParams.chorusEffectsSend + keyBasedValue;
|
|
1553
1514
|
note.chorusEffectsSend.gain
|
|
1554
|
-
.cancelScheduledValues(
|
|
1555
|
-
.setValueAtTime(value,
|
|
1515
|
+
.cancelScheduledValues(scheduleTime)
|
|
1516
|
+
.setValueAtTime(value, scheduleTime);
|
|
1556
1517
|
}
|
|
1557
1518
|
else {
|
|
1558
1519
|
note.chorusEffectsSend.disconnect();
|
|
@@ -1570,75 +1531,71 @@ export class MidyGM2 {
|
|
|
1570
1531
|
}
|
|
1571
1532
|
}
|
|
1572
1533
|
}
|
|
1573
|
-
setDelayModLFO(note) {
|
|
1574
|
-
const now = this.audioContext.currentTime;
|
|
1534
|
+
setDelayModLFO(note, scheduleTime) {
|
|
1575
1535
|
const startTime = note.startTime;
|
|
1576
|
-
if (startTime <
|
|
1536
|
+
if (startTime < scheduleTime)
|
|
1577
1537
|
return;
|
|
1578
|
-
note.modulationLFO.stop(
|
|
1538
|
+
note.modulationLFO.stop(scheduleTime);
|
|
1579
1539
|
note.modulationLFO.start(startTime + note.voiceParams.delayModLFO);
|
|
1580
1540
|
note.modulationLFO.connect(note.filterDepth);
|
|
1581
1541
|
}
|
|
1582
|
-
setFreqModLFO(note) {
|
|
1583
|
-
const now = this.audioContext.currentTime;
|
|
1542
|
+
setFreqModLFO(note, scheduleTime) {
|
|
1584
1543
|
const freqModLFO = note.voiceParams.freqModLFO;
|
|
1585
1544
|
note.modulationLFO.frequency
|
|
1586
|
-
.cancelScheduledValues(
|
|
1587
|
-
.setValueAtTime(freqModLFO,
|
|
1545
|
+
.cancelScheduledValues(scheduleTime)
|
|
1546
|
+
.setValueAtTime(freqModLFO, scheduleTime);
|
|
1588
1547
|
}
|
|
1589
|
-
setFreqVibLFO(channel, note) {
|
|
1590
|
-
const now = this.audioContext.currentTime;
|
|
1548
|
+
setFreqVibLFO(channel, note, scheduleTime) {
|
|
1591
1549
|
const freqVibLFO = note.voiceParams.freqVibLFO;
|
|
1592
1550
|
note.vibratoLFO.frequency
|
|
1593
|
-
.cancelScheduledValues(
|
|
1594
|
-
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2,
|
|
1551
|
+
.cancelScheduledValues(scheduleTime)
|
|
1552
|
+
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2, scheduleTime);
|
|
1595
1553
|
}
|
|
1596
1554
|
createVoiceParamsHandlers() {
|
|
1597
1555
|
return {
|
|
1598
|
-
modLfoToPitch: (channel, note, _prevValue) => {
|
|
1556
|
+
modLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1599
1557
|
if (0 < channel.state.modulationDepth) {
|
|
1600
|
-
this.setModLfoToPitch(channel, note);
|
|
1558
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1601
1559
|
}
|
|
1602
1560
|
},
|
|
1603
|
-
vibLfoToPitch: (channel, note, _prevValue) => {
|
|
1561
|
+
vibLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1604
1562
|
if (0 < channel.state.vibratoDepth) {
|
|
1605
|
-
this.setVibLfoToPitch(channel, note);
|
|
1563
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1606
1564
|
}
|
|
1607
1565
|
},
|
|
1608
|
-
modLfoToFilterFc: (channel, note, _prevValue) => {
|
|
1566
|
+
modLfoToFilterFc: (channel, note, _prevValue, scheduleTime) => {
|
|
1609
1567
|
if (0 < channel.state.modulationDepth) {
|
|
1610
|
-
this.setModLfoToFilterFc(channel, note);
|
|
1568
|
+
this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
1611
1569
|
}
|
|
1612
1570
|
},
|
|
1613
|
-
modLfoToVolume: (channel, note, _prevValue) => {
|
|
1571
|
+
modLfoToVolume: (channel, note, _prevValue, scheduleTime) => {
|
|
1614
1572
|
if (0 < channel.state.modulationDepth) {
|
|
1615
|
-
this.setModLfoToVolume(channel, note);
|
|
1573
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1616
1574
|
}
|
|
1617
1575
|
},
|
|
1618
|
-
chorusEffectsSend: (channel, note, prevValue) => {
|
|
1619
|
-
this.setChorusEffectsSend(channel, note, prevValue);
|
|
1576
|
+
chorusEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1577
|
+
this.setChorusEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1620
1578
|
},
|
|
1621
|
-
reverbEffectsSend: (channel, note, prevValue) => {
|
|
1622
|
-
this.setReverbEffectsSend(channel, note, prevValue);
|
|
1579
|
+
reverbEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1580
|
+
this.setReverbEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1623
1581
|
},
|
|
1624
|
-
delayModLFO: (_channel, note, _prevValue) => this.setDelayModLFO(note),
|
|
1625
|
-
freqModLFO: (_channel, note, _prevValue) => this.setFreqModLFO(note),
|
|
1626
|
-
delayVibLFO: (channel, note, prevValue) => {
|
|
1582
|
+
delayModLFO: (_channel, note, _prevValue, scheduleTime) => this.setDelayModLFO(note, scheduleTime),
|
|
1583
|
+
freqModLFO: (_channel, note, _prevValue, scheduleTime) => this.setFreqModLFO(note, scheduleTime),
|
|
1584
|
+
delayVibLFO: (channel, note, prevValue, scheduleTime) => {
|
|
1627
1585
|
if (0 < channel.state.vibratoDepth) {
|
|
1628
|
-
const now = this.audioContext.currentTime;
|
|
1629
1586
|
const vibratoDelay = channel.state.vibratoDelay * 2;
|
|
1630
1587
|
const prevStartTime = note.startTime + prevValue * vibratoDelay;
|
|
1631
|
-
if (
|
|
1588
|
+
if (scheduleTime < prevStartTime)
|
|
1632
1589
|
return;
|
|
1633
1590
|
const value = note.voiceParams.delayVibLFO;
|
|
1634
1591
|
const startTime = note.startTime + value * vibratoDelay;
|
|
1635
|
-
note.vibratoLFO.stop(
|
|
1592
|
+
note.vibratoLFO.stop(scheduleTime);
|
|
1636
1593
|
note.vibratoLFO.start(startTime);
|
|
1637
1594
|
}
|
|
1638
1595
|
},
|
|
1639
|
-
freqVibLFO: (channel, note, _prevValue) => {
|
|
1596
|
+
freqVibLFO: (channel, note, _prevValue, scheduleTime) => {
|
|
1640
1597
|
if (0 < channel.state.vibratoDepth) {
|
|
1641
|
-
this.setFreqVibLFO(channel, note);
|
|
1598
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1642
1599
|
}
|
|
1643
1600
|
},
|
|
1644
1601
|
};
|
|
@@ -1650,54 +1607,49 @@ export class MidyGM2 {
|
|
|
1650
1607
|
state[3] = noteNumber / 127;
|
|
1651
1608
|
return state;
|
|
1652
1609
|
}
|
|
1653
|
-
applyVoiceParams(channel, controllerType) {
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1610
|
+
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1611
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1612
|
+
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1613
|
+
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1614
|
+
let appliedFilterEnvelope = false;
|
|
1615
|
+
let appliedVolumeEnvelope = false;
|
|
1616
|
+
for (const [key, value] of Object.entries(voiceParams)) {
|
|
1617
|
+
const prevValue = note.voiceParams[key];
|
|
1618
|
+
if (value === prevValue)
|
|
1658
1619
|
continue;
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
if (value === prevValue)
|
|
1620
|
+
note.voiceParams[key] = value;
|
|
1621
|
+
if (key in this.voiceParamsHandlers) {
|
|
1622
|
+
this.voiceParamsHandlers[key](channel, note, prevValue, scheduleTime);
|
|
1623
|
+
}
|
|
1624
|
+
else if (filterEnvelopeKeySet.has(key)) {
|
|
1625
|
+
if (appliedFilterEnvelope)
|
|
1666
1626
|
continue;
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1627
|
+
appliedFilterEnvelope = true;
|
|
1628
|
+
const noteVoiceParams = note.voiceParams;
|
|
1629
|
+
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1630
|
+
const key = filterEnvelopeKeys[i];
|
|
1631
|
+
if (key in voiceParams)
|
|
1632
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1670
1633
|
}
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
continue;
|
|
1674
|
-
appliedFilterEnvelope = true;
|
|
1675
|
-
const noteVoiceParams = note.voiceParams;
|
|
1676
|
-
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1677
|
-
const key = filterEnvelopeKeys[i];
|
|
1678
|
-
if (key in voiceParams)
|
|
1679
|
-
noteVoiceParams[key] = voiceParams[key];
|
|
1680
|
-
}
|
|
1681
|
-
if (note.portamento) {
|
|
1682
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1683
|
-
}
|
|
1684
|
-
else {
|
|
1685
|
-
this.setFilterEnvelope(channel, note);
|
|
1686
|
-
}
|
|
1687
|
-
this.setPitchEnvelope(note);
|
|
1634
|
+
if (note.portamento) {
|
|
1635
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1688
1636
|
}
|
|
1689
|
-
else
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
1637
|
+
else {
|
|
1638
|
+
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
1639
|
+
}
|
|
1640
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1641
|
+
}
|
|
1642
|
+
else if (volumeEnvelopeKeySet.has(key)) {
|
|
1643
|
+
if (appliedVolumeEnvelope)
|
|
1644
|
+
continue;
|
|
1645
|
+
appliedVolumeEnvelope = true;
|
|
1646
|
+
const noteVoiceParams = note.voiceParams;
|
|
1647
|
+
for (let i = 0; i < volumeEnvelopeKeys.length; i++) {
|
|
1648
|
+
const key = volumeEnvelopeKeys[i];
|
|
1649
|
+
if (key in voiceParams)
|
|
1650
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1700
1651
|
}
|
|
1652
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1701
1653
|
}
|
|
1702
1654
|
}
|
|
1703
1655
|
});
|
|
@@ -1730,12 +1682,12 @@ export class MidyGM2 {
|
|
|
1730
1682
|
127: this.polyOn,
|
|
1731
1683
|
};
|
|
1732
1684
|
}
|
|
1733
|
-
handleControlChange(channelNumber, controllerType, value,
|
|
1685
|
+
handleControlChange(channelNumber, controllerType, value, scheduleTime) {
|
|
1734
1686
|
const handler = this.controlChangeHandlers[controllerType];
|
|
1735
1687
|
if (handler) {
|
|
1736
|
-
handler.call(this, channelNumber, value,
|
|
1688
|
+
handler.call(this, channelNumber, value, scheduleTime);
|
|
1737
1689
|
const channel = this.channels[channelNumber];
|
|
1738
|
-
this.applyVoiceParams(channel, controllerType + 128);
|
|
1690
|
+
this.applyVoiceParams(channel, controllerType + 128, scheduleTime);
|
|
1739
1691
|
this.applyControlTable(channel, controllerType);
|
|
1740
1692
|
}
|
|
1741
1693
|
else {
|
|
@@ -1746,9 +1698,8 @@ export class MidyGM2 {
|
|
|
1746
1698
|
this.channels[channelNumber].bankMSB = msb;
|
|
1747
1699
|
}
|
|
1748
1700
|
updateModulation(channel, scheduleTime) {
|
|
1749
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1750
1701
|
const depth = channel.state.modulationDepth * channel.modulationDepthRange;
|
|
1751
|
-
this.processScheduledNotes(channel,
|
|
1702
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1752
1703
|
if (note.modulationDepth) {
|
|
1753
1704
|
note.modulationDepth.gain.setValueAtTime(depth, scheduleTime);
|
|
1754
1705
|
}
|
|
@@ -1759,6 +1710,7 @@ export class MidyGM2 {
|
|
|
1759
1710
|
});
|
|
1760
1711
|
}
|
|
1761
1712
|
setModulationDepth(channelNumber, modulation, scheduleTime) {
|
|
1713
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1762
1714
|
const channel = this.channels[channelNumber];
|
|
1763
1715
|
channel.state.modulationDepth = modulation / 127;
|
|
1764
1716
|
this.updateModulation(channel, scheduleTime);
|
|
@@ -1769,8 +1721,7 @@ export class MidyGM2 {
|
|
|
1769
1721
|
channel.state.portamentoTime = Math.exp(factor * portamentoTime);
|
|
1770
1722
|
}
|
|
1771
1723
|
setKeyBasedVolume(channel, scheduleTime) {
|
|
1772
|
-
|
|
1773
|
-
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1724
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1774
1725
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 7);
|
|
1775
1726
|
if (keyBasedValue !== 0) {
|
|
1776
1727
|
note.volumeNode.gain
|
|
@@ -1780,6 +1731,7 @@ export class MidyGM2 {
|
|
|
1780
1731
|
});
|
|
1781
1732
|
}
|
|
1782
1733
|
setVolume(channelNumber, volume, scheduleTime) {
|
|
1734
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1783
1735
|
const channel = this.channels[channelNumber];
|
|
1784
1736
|
channel.state.volume = volume / 127;
|
|
1785
1737
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1793,8 +1745,7 @@ export class MidyGM2 {
|
|
|
1793
1745
|
};
|
|
1794
1746
|
}
|
|
1795
1747
|
setKeyBasedPan(channel, scheduleTime) {
|
|
1796
|
-
|
|
1797
|
-
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1748
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1798
1749
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 10);
|
|
1799
1750
|
if (keyBasedValue !== 0) {
|
|
1800
1751
|
const { gainLeft, gainRight } = this.panToGain((keyBasedValue + 1) / 2);
|
|
@@ -1808,12 +1759,14 @@ export class MidyGM2 {
|
|
|
1808
1759
|
});
|
|
1809
1760
|
}
|
|
1810
1761
|
setPan(channelNumber, pan, scheduleTime) {
|
|
1762
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1811
1763
|
const channel = this.channels[channelNumber];
|
|
1812
1764
|
channel.state.pan = pan / 127;
|
|
1813
1765
|
this.updateChannelVolume(channel, scheduleTime);
|
|
1814
1766
|
this.setKeyBasedPan(channel, scheduleTime);
|
|
1815
1767
|
}
|
|
1816
1768
|
setExpression(channelNumber, expression, scheduleTime) {
|
|
1769
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1817
1770
|
const channel = this.channels[channelNumber];
|
|
1818
1771
|
channel.state.expression = expression / 127;
|
|
1819
1772
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1821,125 +1774,113 @@ export class MidyGM2 {
|
|
|
1821
1774
|
setBankLSB(channelNumber, lsb) {
|
|
1822
1775
|
this.channels[channelNumber].bankLSB = lsb;
|
|
1823
1776
|
}
|
|
1824
|
-
dataEntryLSB(channelNumber, value) {
|
|
1777
|
+
dataEntryLSB(channelNumber, value, scheduleTime) {
|
|
1825
1778
|
this.channels[channelNumber].dataLSB = value;
|
|
1826
|
-
this.handleRPN(channelNumber);
|
|
1779
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1827
1780
|
}
|
|
1828
|
-
updateChannelVolume(channel) {
|
|
1829
|
-
const now = this.audioContext.currentTime;
|
|
1781
|
+
updateChannelVolume(channel, scheduleTime) {
|
|
1830
1782
|
const state = channel.state;
|
|
1831
1783
|
const volume = state.volume * state.expression;
|
|
1832
1784
|
const { gainLeft, gainRight } = this.panToGain(state.pan);
|
|
1833
1785
|
channel.gainL.gain
|
|
1834
|
-
.cancelScheduledValues(
|
|
1835
|
-
.setValueAtTime(volume * gainLeft,
|
|
1786
|
+
.cancelScheduledValues(scheduleTime)
|
|
1787
|
+
.setValueAtTime(volume * gainLeft, scheduleTime);
|
|
1836
1788
|
channel.gainR.gain
|
|
1837
|
-
.cancelScheduledValues(
|
|
1838
|
-
.setValueAtTime(volume * gainRight,
|
|
1789
|
+
.cancelScheduledValues(scheduleTime)
|
|
1790
|
+
.setValueAtTime(volume * gainRight, scheduleTime);
|
|
1839
1791
|
}
|
|
1840
|
-
setSustainPedal(channelNumber, value) {
|
|
1841
|
-
this.
|
|
1842
|
-
|
|
1843
|
-
|
|
1792
|
+
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1793
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1794
|
+
const channel = this.channels[channelNumber];
|
|
1795
|
+
channel.state.sustainPedal = value / 127;
|
|
1796
|
+
if (64 <= value) {
|
|
1797
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1798
|
+
channel.sustainNotes.push(note);
|
|
1799
|
+
});
|
|
1800
|
+
}
|
|
1801
|
+
else {
|
|
1802
|
+
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
1844
1803
|
}
|
|
1845
1804
|
}
|
|
1846
1805
|
setPortamento(channelNumber, value) {
|
|
1847
1806
|
this.channels[channelNumber].state.portamento = value / 127;
|
|
1848
1807
|
}
|
|
1849
|
-
setSostenutoPedal(channelNumber, value) {
|
|
1808
|
+
setSostenutoPedal(channelNumber, value, scheduleTime) {
|
|
1809
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1850
1810
|
const channel = this.channels[channelNumber];
|
|
1851
1811
|
channel.state.sostenutoPedal = value / 127;
|
|
1852
1812
|
if (64 <= value) {
|
|
1853
|
-
|
|
1854
|
-
channel.sostenutoNotes = this.getActiveNotes(channel, now);
|
|
1813
|
+
channel.sostenutoNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1855
1814
|
}
|
|
1856
1815
|
else {
|
|
1857
|
-
this.releaseSostenutoPedal(channelNumber, value);
|
|
1816
|
+
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
1858
1817
|
}
|
|
1859
1818
|
}
|
|
1860
|
-
setSoftPedal(channelNumber, softPedal) {
|
|
1819
|
+
setSoftPedal(channelNumber, softPedal, _scheduleTime) {
|
|
1861
1820
|
const channel = this.channels[channelNumber];
|
|
1862
1821
|
channel.state.softPedal = softPedal / 127;
|
|
1863
1822
|
}
|
|
1864
|
-
setReverbSendLevel(channelNumber, reverbSendLevel) {
|
|
1823
|
+
setReverbSendLevel(channelNumber, reverbSendLevel, scheduleTime) {
|
|
1824
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1865
1825
|
const channel = this.channels[channelNumber];
|
|
1866
1826
|
const state = channel.state;
|
|
1867
1827
|
const reverbEffect = this.reverbEffect;
|
|
1868
1828
|
if (0 < state.reverbSendLevel) {
|
|
1869
1829
|
if (0 < reverbSendLevel) {
|
|
1870
|
-
const now = this.audioContext.currentTime;
|
|
1871
1830
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
1872
|
-
reverbEffect.input.gain
|
|
1873
|
-
|
|
1831
|
+
reverbEffect.input.gain
|
|
1832
|
+
.cancelScheduledValues(scheduleTime)
|
|
1833
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
1874
1834
|
}
|
|
1875
1835
|
else {
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
continue;
|
|
1881
|
-
if (note.voiceParams.reverbEffectsSend <= 0)
|
|
1882
|
-
continue;
|
|
1883
|
-
note.reverbEffectsSend.disconnect();
|
|
1884
|
-
}
|
|
1836
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1837
|
+
if (note.voiceParams.reverbEffectsSend <= 0)
|
|
1838
|
+
return false;
|
|
1839
|
+
note.reverbEffectsSend.disconnect();
|
|
1885
1840
|
});
|
|
1886
1841
|
}
|
|
1887
1842
|
}
|
|
1888
1843
|
else {
|
|
1889
1844
|
if (0 < reverbSendLevel) {
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
for (let i = 0; i < noteList.length; i++) {
|
|
1893
|
-
const note = noteList[i];
|
|
1894
|
-
if (!note)
|
|
1895
|
-
continue;
|
|
1896
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
1897
|
-
}
|
|
1845
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1846
|
+
this.setReverbEffectsSend(channel, note, 0, scheduleTime);
|
|
1898
1847
|
});
|
|
1899
1848
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
1900
|
-
reverbEffect.input.gain
|
|
1901
|
-
|
|
1849
|
+
reverbEffect.input.gain
|
|
1850
|
+
.cancelScheduledValues(scheduleTime)
|
|
1851
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
1902
1852
|
}
|
|
1903
1853
|
}
|
|
1904
1854
|
}
|
|
1905
|
-
setChorusSendLevel(channelNumber, chorusSendLevel) {
|
|
1855
|
+
setChorusSendLevel(channelNumber, chorusSendLevel, scheduleTime) {
|
|
1856
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1906
1857
|
const channel = this.channels[channelNumber];
|
|
1907
1858
|
const state = channel.state;
|
|
1908
1859
|
const chorusEffect = this.chorusEffect;
|
|
1909
1860
|
if (0 < state.chorusSendLevel) {
|
|
1910
1861
|
if (0 < chorusSendLevel) {
|
|
1911
|
-
const now = this.audioContext.currentTime;
|
|
1912
1862
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
1913
|
-
chorusEffect.input.gain
|
|
1914
|
-
|
|
1863
|
+
chorusEffect.input.gain
|
|
1864
|
+
.cancelScheduledValues(scheduleTime)
|
|
1865
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
1915
1866
|
}
|
|
1916
1867
|
else {
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
1921
|
-
continue;
|
|
1922
|
-
if (note.voiceParams.chorusEffectsSend <= 0)
|
|
1923
|
-
continue;
|
|
1924
|
-
note.chorusEffectsSend.disconnect();
|
|
1925
|
-
}
|
|
1868
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1869
|
+
if (note.voiceParams.chorusEffectsSend <= 0)
|
|
1870
|
+
return false;
|
|
1871
|
+
note.chorusEffectsSend.disconnect();
|
|
1926
1872
|
});
|
|
1927
1873
|
}
|
|
1928
1874
|
}
|
|
1929
1875
|
else {
|
|
1930
1876
|
if (0 < chorusSendLevel) {
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
for (let i = 0; i < noteList.length; i++) {
|
|
1934
|
-
const note = noteList[i];
|
|
1935
|
-
if (!note)
|
|
1936
|
-
continue;
|
|
1937
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
1938
|
-
}
|
|
1877
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1878
|
+
this.setChorusEffectsSend(channel, note, 0, scheduleTime);
|
|
1939
1879
|
});
|
|
1940
1880
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
1941
|
-
chorusEffect.input.gain
|
|
1942
|
-
|
|
1881
|
+
chorusEffect.input.gain
|
|
1882
|
+
.cancelScheduledValues(scheduleTime)
|
|
1883
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
1943
1884
|
}
|
|
1944
1885
|
}
|
|
1945
1886
|
}
|
|
@@ -1969,21 +1910,21 @@ export class MidyGM2 {
|
|
|
1969
1910
|
channel.dataMSB = minMSB;
|
|
1970
1911
|
}
|
|
1971
1912
|
}
|
|
1972
|
-
handleRPN(channelNumber) {
|
|
1913
|
+
handleRPN(channelNumber, scheduleTime) {
|
|
1973
1914
|
const channel = this.channels[channelNumber];
|
|
1974
1915
|
const rpn = channel.rpnMSB * 128 + channel.rpnLSB;
|
|
1975
1916
|
switch (rpn) {
|
|
1976
1917
|
case 0:
|
|
1977
|
-
this.handlePitchBendRangeRPN(channelNumber);
|
|
1918
|
+
this.handlePitchBendRangeRPN(channelNumber, scheduleTime);
|
|
1978
1919
|
break;
|
|
1979
1920
|
case 1:
|
|
1980
|
-
this.handleFineTuningRPN(channelNumber);
|
|
1921
|
+
this.handleFineTuningRPN(channelNumber, scheduleTime);
|
|
1981
1922
|
break;
|
|
1982
1923
|
case 2:
|
|
1983
|
-
this.handleCoarseTuningRPN(channelNumber);
|
|
1924
|
+
this.handleCoarseTuningRPN(channelNumber, scheduleTime);
|
|
1984
1925
|
break;
|
|
1985
1926
|
case 5:
|
|
1986
|
-
this.handleModulationDepthRangeRPN(channelNumber);
|
|
1927
|
+
this.handleModulationDepthRangeRPN(channelNumber, scheduleTime);
|
|
1987
1928
|
break;
|
|
1988
1929
|
default:
|
|
1989
1930
|
console.warn(`Channel ${channelNumber}: Unsupported RPN MSB=${channel.rpnMSB} LSB=${channel.rpnLSB}`);
|
|
@@ -1995,67 +1936,72 @@ export class MidyGM2 {
|
|
|
1995
1936
|
setRPNLSB(channelNumber, value) {
|
|
1996
1937
|
this.channels[channelNumber].rpnLSB = value;
|
|
1997
1938
|
}
|
|
1998
|
-
dataEntryMSB(channelNumber, value) {
|
|
1939
|
+
dataEntryMSB(channelNumber, value, scheduleTime) {
|
|
1999
1940
|
this.channels[channelNumber].dataMSB = value;
|
|
2000
|
-
this.handleRPN(channelNumber);
|
|
1941
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
2001
1942
|
}
|
|
2002
|
-
handlePitchBendRangeRPN(channelNumber) {
|
|
1943
|
+
handlePitchBendRangeRPN(channelNumber, scheduleTime) {
|
|
2003
1944
|
const channel = this.channels[channelNumber];
|
|
2004
1945
|
this.limitData(channel, 0, 127, 0, 99);
|
|
2005
1946
|
const pitchBendRange = channel.dataMSB + channel.dataLSB / 100;
|
|
2006
|
-
this.setPitchBendRange(channelNumber, pitchBendRange);
|
|
1947
|
+
this.setPitchBendRange(channelNumber, pitchBendRange, scheduleTime);
|
|
2007
1948
|
}
|
|
2008
|
-
setPitchBendRange(channelNumber, value) {
|
|
1949
|
+
setPitchBendRange(channelNumber, value, scheduleTime) {
|
|
1950
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2009
1951
|
const channel = this.channels[channelNumber];
|
|
2010
1952
|
const state = channel.state;
|
|
2011
1953
|
const prev = state.pitchWheelSensitivity;
|
|
2012
1954
|
const next = value / 128;
|
|
2013
1955
|
state.pitchWheelSensitivity = next;
|
|
2014
1956
|
channel.detune += (state.pitchWheel * 2 - 1) * (next - prev) * 12800;
|
|
2015
|
-
this.updateChannelDetune(channel);
|
|
2016
|
-
this.applyVoiceParams(channel, 16);
|
|
1957
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1958
|
+
this.applyVoiceParams(channel, 16, scheduleTime);
|
|
2017
1959
|
}
|
|
2018
|
-
handleFineTuningRPN(channelNumber) {
|
|
1960
|
+
handleFineTuningRPN(channelNumber, scheduleTime) {
|
|
2019
1961
|
const channel = this.channels[channelNumber];
|
|
2020
1962
|
this.limitData(channel, 0, 127, 0, 127);
|
|
2021
1963
|
const fineTuning = channel.dataMSB * 128 + channel.dataLSB;
|
|
2022
|
-
this.setFineTuning(channelNumber, fineTuning);
|
|
1964
|
+
this.setFineTuning(channelNumber, fineTuning, scheduleTime);
|
|
2023
1965
|
}
|
|
2024
|
-
setFineTuning(channelNumber, value) {
|
|
1966
|
+
setFineTuning(channelNumber, value, scheduleTime) {
|
|
1967
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2025
1968
|
const channel = this.channels[channelNumber];
|
|
2026
1969
|
const prev = channel.fineTuning;
|
|
2027
1970
|
const next = (value - 8192) / 8.192; // cent
|
|
2028
1971
|
channel.fineTuning = next;
|
|
2029
1972
|
channel.detune += next - prev;
|
|
2030
|
-
this.updateChannelDetune(channel);
|
|
1973
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2031
1974
|
}
|
|
2032
|
-
handleCoarseTuningRPN(channelNumber) {
|
|
1975
|
+
handleCoarseTuningRPN(channelNumber, scheduleTime) {
|
|
2033
1976
|
const channel = this.channels[channelNumber];
|
|
2034
1977
|
this.limitDataMSB(channel, 0, 127);
|
|
2035
1978
|
const coarseTuning = channel.dataMSB;
|
|
2036
|
-
this.setCoarseTuning(channelNumber, coarseTuning);
|
|
1979
|
+
this.setCoarseTuning(channelNumber, coarseTuning, scheduleTime);
|
|
2037
1980
|
}
|
|
2038
|
-
setCoarseTuning(channelNumber, value) {
|
|
1981
|
+
setCoarseTuning(channelNumber, value, scheduleTime) {
|
|
1982
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2039
1983
|
const channel = this.channels[channelNumber];
|
|
2040
1984
|
const prev = channel.coarseTuning;
|
|
2041
1985
|
const next = (value - 64) * 100; // cent
|
|
2042
1986
|
channel.coarseTuning = next;
|
|
2043
1987
|
channel.detune += next - prev;
|
|
2044
|
-
this.updateChannelDetune(channel);
|
|
1988
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2045
1989
|
}
|
|
2046
|
-
handleModulationDepthRangeRPN(channelNumber) {
|
|
1990
|
+
handleModulationDepthRangeRPN(channelNumber, scheduleTime) {
|
|
2047
1991
|
const channel = this.channels[channelNumber];
|
|
2048
1992
|
this.limitData(channel, 0, 127, 0, 127);
|
|
2049
1993
|
const modulationDepthRange = (dataMSB + dataLSB / 128) * 100;
|
|
2050
|
-
this.setModulationDepthRange(channelNumber, modulationDepthRange);
|
|
1994
|
+
this.setModulationDepthRange(channelNumber, modulationDepthRange, scheduleTime);
|
|
2051
1995
|
}
|
|
2052
|
-
setModulationDepthRange(channelNumber, modulationDepthRange) {
|
|
1996
|
+
setModulationDepthRange(channelNumber, modulationDepthRange, scheduleTime) {
|
|
1997
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2053
1998
|
const channel = this.channels[channelNumber];
|
|
2054
1999
|
channel.modulationDepthRange = modulationDepthRange;
|
|
2055
|
-
this.updateModulation(channel);
|
|
2000
|
+
this.updateModulation(channel, scheduleTime);
|
|
2056
2001
|
}
|
|
2057
|
-
allSoundOff(channelNumber) {
|
|
2058
|
-
|
|
2002
|
+
allSoundOff(channelNumber, _value, scheduleTime) {
|
|
2003
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2004
|
+
return this.stopChannelNotes(channelNumber, 0, true, scheduleTime);
|
|
2059
2005
|
}
|
|
2060
2006
|
resetAllControllers(channelNumber) {
|
|
2061
2007
|
const stateTypes = [
|
|
@@ -2083,28 +2029,33 @@ export class MidyGM2 {
|
|
|
2083
2029
|
channel[type] = this.constructor.channelSettings[type];
|
|
2084
2030
|
}
|
|
2085
2031
|
}
|
|
2086
|
-
allNotesOff(channelNumber) {
|
|
2087
|
-
|
|
2032
|
+
allNotesOff(channelNumber, _value, scheduleTime) {
|
|
2033
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2034
|
+
return this.stopChannelNotes(channelNumber, 0, false, scheduleTime);
|
|
2088
2035
|
}
|
|
2089
|
-
omniOff() {
|
|
2090
|
-
this.
|
|
2036
|
+
omniOff(channelNumber, value, scheduleTime) {
|
|
2037
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2091
2038
|
}
|
|
2092
|
-
omniOn() {
|
|
2093
|
-
this.
|
|
2039
|
+
omniOn(channelNumber, value, scheduleTime) {
|
|
2040
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2094
2041
|
}
|
|
2095
|
-
monoOn() {
|
|
2096
|
-
|
|
2042
|
+
monoOn(channelNumber, value, scheduleTime) {
|
|
2043
|
+
const channel = this.channels[channelNumber];
|
|
2044
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2045
|
+
channel.mono = true;
|
|
2097
2046
|
}
|
|
2098
|
-
polyOn() {
|
|
2099
|
-
|
|
2047
|
+
polyOn(channelNumber, value, scheduleTime) {
|
|
2048
|
+
const channel = this.channels[channelNumber];
|
|
2049
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2050
|
+
channel.mono = false;
|
|
2100
2051
|
}
|
|
2101
|
-
handleUniversalNonRealTimeExclusiveMessage(data) {
|
|
2052
|
+
handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2102
2053
|
switch (data[2]) {
|
|
2103
2054
|
case 8:
|
|
2104
2055
|
switch (data[3]) {
|
|
2105
2056
|
case 8:
|
|
2106
2057
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2107
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false);
|
|
2058
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false, scheduleTime);
|
|
2108
2059
|
default:
|
|
2109
2060
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2110
2061
|
}
|
|
@@ -2147,18 +2098,18 @@ export class MidyGM2 {
|
|
|
2147
2098
|
this.channels[9].bankMSB = 120;
|
|
2148
2099
|
this.channels[9].bank = 120 * 128;
|
|
2149
2100
|
}
|
|
2150
|
-
handleUniversalRealTimeExclusiveMessage(data) {
|
|
2101
|
+
handleUniversalRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2151
2102
|
switch (data[2]) {
|
|
2152
2103
|
case 4:
|
|
2153
2104
|
switch (data[3]) {
|
|
2154
2105
|
case 1:
|
|
2155
|
-
return this.handleMasterVolumeSysEx(data);
|
|
2106
|
+
return this.handleMasterVolumeSysEx(data, scheduleTime);
|
|
2156
2107
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2157
|
-
return this.handleMasterFineTuningSysEx(data);
|
|
2108
|
+
return this.handleMasterFineTuningSysEx(data, scheduleTime);
|
|
2158
2109
|
case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2159
|
-
return this.handleMasterCoarseTuningSysEx(data);
|
|
2110
|
+
return this.handleMasterCoarseTuningSysEx(data, scheduleTime);
|
|
2160
2111
|
case 5: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca24.pdf
|
|
2161
|
-
return this.handleGlobalParameterControlSysEx(data);
|
|
2112
|
+
return this.handleGlobalParameterControlSysEx(data, scheduleTime);
|
|
2162
2113
|
default:
|
|
2163
2114
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2164
2115
|
}
|
|
@@ -2176,7 +2127,7 @@ export class MidyGM2 {
|
|
|
2176
2127
|
case 10:
|
|
2177
2128
|
switch (data[3]) {
|
|
2178
2129
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca23.pdf
|
|
2179
|
-
return this.handleKeyBasedInstrumentControlSysEx(data);
|
|
2130
|
+
return this.handleKeyBasedInstrumentControlSysEx(data, scheduleTime);
|
|
2180
2131
|
default:
|
|
2181
2132
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2182
2133
|
}
|
|
@@ -2185,49 +2136,50 @@ export class MidyGM2 {
|
|
|
2185
2136
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2186
2137
|
}
|
|
2187
2138
|
}
|
|
2188
|
-
handleMasterVolumeSysEx(data) {
|
|
2139
|
+
handleMasterVolumeSysEx(data, scheduleTime) {
|
|
2189
2140
|
const volume = (data[5] * 128 + data[4]) / 16383;
|
|
2190
|
-
this.setMasterVolume(volume);
|
|
2141
|
+
this.setMasterVolume(volume, scheduleTime);
|
|
2191
2142
|
}
|
|
2192
|
-
setMasterVolume(volume) {
|
|
2143
|
+
setMasterVolume(volume, scheduleTime) {
|
|
2144
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2193
2145
|
if (volume < 0 && 1 < volume) {
|
|
2194
2146
|
console.error("Master Volume is out of range");
|
|
2195
2147
|
}
|
|
2196
2148
|
else {
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2149
|
+
this.masterVolume.gain
|
|
2150
|
+
.cancelScheduledValues(scheduleTime)
|
|
2151
|
+
.setValueAtTime(volume * volume, scheduleTime);
|
|
2200
2152
|
}
|
|
2201
2153
|
}
|
|
2202
|
-
handleMasterFineTuningSysEx(data) {
|
|
2154
|
+
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2203
2155
|
const fineTuning = data[5] * 128 + data[4];
|
|
2204
|
-
this.setMasterFineTuning(fineTuning);
|
|
2156
|
+
this.setMasterFineTuning(fineTuning, scheduleTime);
|
|
2205
2157
|
}
|
|
2206
|
-
setMasterFineTuning(value) {
|
|
2158
|
+
setMasterFineTuning(value, scheduleTime) {
|
|
2207
2159
|
const prev = this.masterFineTuning;
|
|
2208
2160
|
const next = (value - 8192) / 8.192; // cent
|
|
2209
2161
|
this.masterFineTuning = next;
|
|
2210
2162
|
channel.detune += next - prev;
|
|
2211
|
-
this.updateChannelDetune(channel);
|
|
2163
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2212
2164
|
}
|
|
2213
|
-
handleMasterCoarseTuningSysEx(data) {
|
|
2165
|
+
handleMasterCoarseTuningSysEx(data, scheduleTime) {
|
|
2214
2166
|
const coarseTuning = data[4];
|
|
2215
|
-
this.setMasterCoarseTuning(coarseTuning);
|
|
2167
|
+
this.setMasterCoarseTuning(coarseTuning, scheduleTime);
|
|
2216
2168
|
}
|
|
2217
|
-
setMasterCoarseTuning(value) {
|
|
2169
|
+
setMasterCoarseTuning(value, scheduleTime) {
|
|
2218
2170
|
const prev = this.masterCoarseTuning;
|
|
2219
2171
|
const next = (value - 64) * 100; // cent
|
|
2220
2172
|
this.masterCoarseTuning = next;
|
|
2221
2173
|
channel.detune += next - prev;
|
|
2222
|
-
this.updateChannelDetune(channel);
|
|
2174
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2223
2175
|
}
|
|
2224
|
-
handleGlobalParameterControlSysEx(data) {
|
|
2176
|
+
handleGlobalParameterControlSysEx(data, scheduleTime) {
|
|
2225
2177
|
if (data[7] === 1) {
|
|
2226
2178
|
switch (data[8]) {
|
|
2227
2179
|
case 1:
|
|
2228
2180
|
return this.handleReverbParameterSysEx(data);
|
|
2229
2181
|
case 2:
|
|
2230
|
-
return this.handleChorusParameterSysEx(data);
|
|
2182
|
+
return this.handleChorusParameterSysEx(data, scheduleTime);
|
|
2231
2183
|
default:
|
|
2232
2184
|
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
2233
2185
|
}
|
|
@@ -2306,88 +2258,84 @@ export class MidyGM2 {
|
|
|
2306
2258
|
calcDelay(rt60, feedback) {
|
|
2307
2259
|
return -rt60 * Math.log10(feedback) / 3;
|
|
2308
2260
|
}
|
|
2309
|
-
handleChorusParameterSysEx(data) {
|
|
2261
|
+
handleChorusParameterSysEx(data, scheduleTime) {
|
|
2310
2262
|
switch (data[9]) {
|
|
2311
2263
|
case 0:
|
|
2312
|
-
return this.setChorusType(data[10]);
|
|
2264
|
+
return this.setChorusType(data[10], scheduleTime);
|
|
2313
2265
|
case 1:
|
|
2314
|
-
return this.setChorusModRate(data[10]);
|
|
2266
|
+
return this.setChorusModRate(data[10], scheduleTime);
|
|
2315
2267
|
case 2:
|
|
2316
|
-
return this.setChorusModDepth(data[10]);
|
|
2268
|
+
return this.setChorusModDepth(data[10], scheduleTime);
|
|
2317
2269
|
case 3:
|
|
2318
|
-
return this.setChorusFeedback(data[10]);
|
|
2270
|
+
return this.setChorusFeedback(data[10], scheduleTime);
|
|
2319
2271
|
case 4:
|
|
2320
|
-
return this.setChorusSendToReverb(data[10]);
|
|
2272
|
+
return this.setChorusSendToReverb(data[10], scheduleTime);
|
|
2321
2273
|
}
|
|
2322
2274
|
}
|
|
2323
|
-
setChorusType(type) {
|
|
2275
|
+
setChorusType(type, scheduleTime) {
|
|
2324
2276
|
switch (type) {
|
|
2325
2277
|
case 0:
|
|
2326
|
-
return this.setChorusParameter(3, 5, 0, 0);
|
|
2278
|
+
return this.setChorusParameter(3, 5, 0, 0, scheduleTime);
|
|
2327
2279
|
case 1:
|
|
2328
|
-
return this.setChorusParameter(9, 19, 5, 0);
|
|
2280
|
+
return this.setChorusParameter(9, 19, 5, 0, scheduleTime);
|
|
2329
2281
|
case 2:
|
|
2330
|
-
return this.setChorusParameter(3, 19, 8, 0);
|
|
2282
|
+
return this.setChorusParameter(3, 19, 8, 0, scheduleTime);
|
|
2331
2283
|
case 3:
|
|
2332
|
-
return this.setChorusParameter(9, 16, 16, 0);
|
|
2284
|
+
return this.setChorusParameter(9, 16, 16, 0, scheduleTime);
|
|
2333
2285
|
case 4:
|
|
2334
|
-
return this.setChorusParameter(2, 24, 64, 0);
|
|
2286
|
+
return this.setChorusParameter(2, 24, 64, 0, scheduleTime);
|
|
2335
2287
|
case 5:
|
|
2336
|
-
return this.setChorusParameter(1, 5, 112, 0);
|
|
2288
|
+
return this.setChorusParameter(1, 5, 112, 0, scheduleTime);
|
|
2337
2289
|
default:
|
|
2338
2290
|
console.warn(`Unsupported Chorus Type: ${type}`);
|
|
2339
2291
|
}
|
|
2340
2292
|
}
|
|
2341
|
-
setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
|
|
2342
|
-
this.setChorusModRate(modRate);
|
|
2343
|
-
this.setChorusModDepth(modDepth);
|
|
2344
|
-
this.setChorusFeedback(feedback);
|
|
2345
|
-
this.setChorusSendToReverb(sendToReverb);
|
|
2293
|
+
setChorusParameter(modRate, modDepth, feedback, sendToReverb, scheduleTime) {
|
|
2294
|
+
this.setChorusModRate(modRate, scheduleTime);
|
|
2295
|
+
this.setChorusModDepth(modDepth, scheduleTime);
|
|
2296
|
+
this.setChorusFeedback(feedback, scheduleTime);
|
|
2297
|
+
this.setChorusSendToReverb(sendToReverb, scheduleTime);
|
|
2346
2298
|
}
|
|
2347
|
-
setChorusModRate(value) {
|
|
2348
|
-
const now = this.audioContext.currentTime;
|
|
2299
|
+
setChorusModRate(value, scheduleTime) {
|
|
2349
2300
|
const modRate = this.getChorusModRate(value);
|
|
2350
2301
|
this.chorus.modRate = modRate;
|
|
2351
|
-
this.chorusEffect.lfo.frequency.setValueAtTime(modRate,
|
|
2302
|
+
this.chorusEffect.lfo.frequency.setValueAtTime(modRate, scheduleTime);
|
|
2352
2303
|
}
|
|
2353
2304
|
getChorusModRate(value) {
|
|
2354
2305
|
return value * 0.122; // Hz
|
|
2355
2306
|
}
|
|
2356
|
-
setChorusModDepth(value) {
|
|
2357
|
-
const now = this.audioContext.currentTime;
|
|
2307
|
+
setChorusModDepth(value, scheduleTime) {
|
|
2358
2308
|
const modDepth = this.getChorusModDepth(value);
|
|
2359
2309
|
this.chorus.modDepth = modDepth;
|
|
2360
2310
|
this.chorusEffect.lfoGain.gain
|
|
2361
|
-
.cancelScheduledValues(
|
|
2362
|
-
.setValueAtTime(modDepth / 2,
|
|
2311
|
+
.cancelScheduledValues(scheduleTime)
|
|
2312
|
+
.setValueAtTime(modDepth / 2, scheduleTime);
|
|
2363
2313
|
}
|
|
2364
2314
|
getChorusModDepth(value) {
|
|
2365
2315
|
return (value + 1) / 3200; // second
|
|
2366
2316
|
}
|
|
2367
|
-
setChorusFeedback(value) {
|
|
2368
|
-
const now = this.audioContext.currentTime;
|
|
2317
|
+
setChorusFeedback(value, scheduleTime) {
|
|
2369
2318
|
const feedback = this.getChorusFeedback(value);
|
|
2370
2319
|
this.chorus.feedback = feedback;
|
|
2371
2320
|
const chorusEffect = this.chorusEffect;
|
|
2372
2321
|
for (let i = 0; i < chorusEffect.feedbackGains.length; i++) {
|
|
2373
2322
|
chorusEffect.feedbackGains[i].gain
|
|
2374
|
-
.cancelScheduledValues(
|
|
2375
|
-
.setValueAtTime(feedback,
|
|
2323
|
+
.cancelScheduledValues(scheduleTime)
|
|
2324
|
+
.setValueAtTime(feedback, scheduleTime);
|
|
2376
2325
|
}
|
|
2377
2326
|
}
|
|
2378
2327
|
getChorusFeedback(value) {
|
|
2379
2328
|
return value * 0.00763;
|
|
2380
2329
|
}
|
|
2381
|
-
setChorusSendToReverb(value) {
|
|
2330
|
+
setChorusSendToReverb(value, scheduleTime) {
|
|
2382
2331
|
const sendToReverb = this.getChorusSendToReverb(value);
|
|
2383
2332
|
const sendGain = this.chorusEffect.sendGain;
|
|
2384
2333
|
if (0 < this.chorus.sendToReverb) {
|
|
2385
2334
|
this.chorus.sendToReverb = sendToReverb;
|
|
2386
2335
|
if (0 < sendToReverb) {
|
|
2387
|
-
const now = this.audioContext.currentTime;
|
|
2388
2336
|
sendGain.gain
|
|
2389
|
-
.cancelScheduledValues(
|
|
2390
|
-
.setValueAtTime(sendToReverb,
|
|
2337
|
+
.cancelScheduledValues(scheduleTime)
|
|
2338
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2391
2339
|
}
|
|
2392
2340
|
else {
|
|
2393
2341
|
sendGain.disconnect();
|
|
@@ -2396,11 +2344,10 @@ export class MidyGM2 {
|
|
|
2396
2344
|
else {
|
|
2397
2345
|
this.chorus.sendToReverb = sendToReverb;
|
|
2398
2346
|
if (0 < sendToReverb) {
|
|
2399
|
-
const now = this.audioContext.currentTime;
|
|
2400
2347
|
sendGain.connect(this.reverbEffect.input);
|
|
2401
2348
|
sendGain.gain
|
|
2402
|
-
.cancelScheduledValues(
|
|
2403
|
-
.setValueAtTime(sendToReverb,
|
|
2349
|
+
.cancelScheduledValues(scheduleTime)
|
|
2350
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2404
2351
|
}
|
|
2405
2352
|
}
|
|
2406
2353
|
}
|
|
@@ -2426,7 +2373,7 @@ export class MidyGM2 {
|
|
|
2426
2373
|
}
|
|
2427
2374
|
return bitmap;
|
|
2428
2375
|
}
|
|
2429
|
-
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime) {
|
|
2376
|
+
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2430
2377
|
if (data.length < 19) {
|
|
2431
2378
|
console.error("Data length is too short");
|
|
2432
2379
|
return;
|
|
@@ -2441,7 +2388,7 @@ export class MidyGM2 {
|
|
|
2441
2388
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2442
2389
|
}
|
|
2443
2390
|
if (realtime)
|
|
2444
|
-
this.updateChannelDetune(channel);
|
|
2391
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2445
2392
|
}
|
|
2446
2393
|
}
|
|
2447
2394
|
getFilterCutoffControl(channel) {
|
|
@@ -2485,7 +2432,7 @@ export class MidyGM2 {
|
|
|
2485
2432
|
if (table[5] !== 0)
|
|
2486
2433
|
this.setModLfoToVolume(channel, note);
|
|
2487
2434
|
}
|
|
2488
|
-
|
|
2435
|
+
handlePressureSysEx(data, tableName) {
|
|
2489
2436
|
const channelNumber = data[4];
|
|
2490
2437
|
const table = this.channels[channelNumber][tableName];
|
|
2491
2438
|
for (let i = 5; i < data.length - 1; i += 2) {
|
|
@@ -2509,13 +2456,8 @@ export class MidyGM2 {
|
|
|
2509
2456
|
const slotSize = 6;
|
|
2510
2457
|
const offset = controllerType * slotSize;
|
|
2511
2458
|
const table = channel.controlTable.subarray(offset, offset + slotSize);
|
|
2512
|
-
|
|
2513
|
-
|
|
2514
|
-
const note = noteList[i];
|
|
2515
|
-
if (!note)
|
|
2516
|
-
continue;
|
|
2517
|
-
this.setControllerParameters(channel, note, table);
|
|
2518
|
-
}
|
|
2459
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2460
|
+
this.setControllerParameters(channel, note, table);
|
|
2519
2461
|
});
|
|
2520
2462
|
}
|
|
2521
2463
|
handleControlChangeSysEx(data) {
|
|
@@ -2533,7 +2475,7 @@ export class MidyGM2 {
|
|
|
2533
2475
|
const controlValue = channel.keyBasedInstrumentControlTable[index];
|
|
2534
2476
|
return (controlValue + 64) / 64;
|
|
2535
2477
|
}
|
|
2536
|
-
handleKeyBasedInstrumentControlSysEx(data) {
|
|
2478
|
+
handleKeyBasedInstrumentControlSysEx(data, scheduleTime) {
|
|
2537
2479
|
const channelNumber = data[4];
|
|
2538
2480
|
const keyNumber = data[5];
|
|
2539
2481
|
const table = this.channels[channelNumber].keyBasedInstrumentControlTable;
|
|
@@ -2543,30 +2485,27 @@ export class MidyGM2 {
|
|
|
2543
2485
|
const index = keyNumber * 128 + controllerType;
|
|
2544
2486
|
table[index] = value - 64;
|
|
2545
2487
|
}
|
|
2546
|
-
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127);
|
|
2488
|
+
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127, scheduleTime);
|
|
2547
2489
|
}
|
|
2548
|
-
|
|
2549
|
-
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2550
|
-
}
|
|
2551
|
-
handleSysEx(data) {
|
|
2490
|
+
handleSysEx(data, scheduleTime) {
|
|
2552
2491
|
switch (data[0]) {
|
|
2553
2492
|
case 126:
|
|
2554
|
-
return this.handleUniversalNonRealTimeExclusiveMessage(data);
|
|
2493
|
+
return this.handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime);
|
|
2555
2494
|
case 127:
|
|
2556
|
-
return this.handleUniversalRealTimeExclusiveMessage(data);
|
|
2495
|
+
return this.handleUniversalRealTimeExclusiveMessage(data, scheduleTime);
|
|
2557
2496
|
default:
|
|
2558
|
-
|
|
2497
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2559
2498
|
}
|
|
2560
2499
|
}
|
|
2561
|
-
scheduleTask(callback,
|
|
2500
|
+
scheduleTask(callback, scheduleTime) {
|
|
2562
2501
|
return new Promise((resolve) => {
|
|
2563
2502
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
2564
2503
|
bufferSource.onended = () => {
|
|
2565
2504
|
callback();
|
|
2566
2505
|
resolve();
|
|
2567
2506
|
};
|
|
2568
|
-
bufferSource.start(
|
|
2569
|
-
bufferSource.stop(
|
|
2507
|
+
bufferSource.start(scheduleTime);
|
|
2508
|
+
bufferSource.stop(scheduleTime);
|
|
2570
2509
|
});
|
|
2571
2510
|
}
|
|
2572
2511
|
}
|
|
@@ -2585,6 +2524,7 @@ Object.defineProperty(MidyGM2, "channelSettings", {
|
|
|
2585
2524
|
dataLSB: 0,
|
|
2586
2525
|
rpnMSB: 127,
|
|
2587
2526
|
rpnLSB: 127,
|
|
2527
|
+
mono: false, // CC#124, CC#125
|
|
2588
2528
|
fineTuning: 0, // cb
|
|
2589
2529
|
coarseTuning: 0, // cb
|
|
2590
2530
|
modulationDepthRange: 50, // cent
|