@marmooo/midy 0.2.6 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -13
- package/esm/midy-GM1.d.ts +73 -74
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +207 -218
- package/esm/midy-GM2.d.ts +125 -127
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +358 -418
- package/esm/midy-GMLite.d.ts +69 -70
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +195 -207
- package/esm/midy.d.ts +148 -150
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +404 -500
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +73 -74
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +207 -218
- package/script/midy-GM2.d.ts +125 -127
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +358 -418
- package/script/midy-GMLite.d.ts +69 -70
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +195 -207
- package/script/midy.d.ts +148 -150
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +404 -500
package/script/midy.js
CHANGED
|
@@ -294,18 +294,6 @@ class Midy {
|
|
|
294
294
|
delayTimes: this.generateDistributedArray(0.02, 2, 0.5),
|
|
295
295
|
}
|
|
296
296
|
});
|
|
297
|
-
Object.defineProperty(this, "mono", {
|
|
298
|
-
enumerable: true,
|
|
299
|
-
configurable: true,
|
|
300
|
-
writable: true,
|
|
301
|
-
value: false
|
|
302
|
-
}); // CC#124, CC#125
|
|
303
|
-
Object.defineProperty(this, "omni", {
|
|
304
|
-
enumerable: true,
|
|
305
|
-
configurable: true,
|
|
306
|
-
writable: true,
|
|
307
|
-
value: false
|
|
308
|
-
}); // CC#126, CC#127
|
|
309
297
|
Object.defineProperty(this, "noteCheckInterval", {
|
|
310
298
|
enumerable: true,
|
|
311
299
|
configurable: true,
|
|
@@ -507,6 +495,7 @@ class Midy {
|
|
|
507
495
|
controlTable: this.initControlTable(),
|
|
508
496
|
...this.setChannelAudioNodes(audioContext),
|
|
509
497
|
scheduledNotes: new SparseMap(128),
|
|
498
|
+
sustainNotes: [],
|
|
510
499
|
sostenutoNotes: new SparseMap(128),
|
|
511
500
|
scaleOctaveTuningTable: new Float32Array(12), // [-100, 100] cent
|
|
512
501
|
channelPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
@@ -590,7 +579,8 @@ class Midy {
|
|
|
590
579
|
const portamentoTarget = this.findPortamentoTarget(queueIndex);
|
|
591
580
|
if (portamentoTarget)
|
|
592
581
|
portamentoTarget.portamento = true;
|
|
593
|
-
const notePromise = this.
|
|
582
|
+
const notePromise = this.scheduleNoteOff(event.channel, event.noteNumber, event.velocity, startTime, false, // force
|
|
583
|
+
portamentoTarget?.noteNumber);
|
|
594
584
|
if (notePromise) {
|
|
595
585
|
this.notePromises.push(notePromise);
|
|
596
586
|
}
|
|
@@ -600,7 +590,7 @@ class Midy {
|
|
|
600
590
|
this.handlePolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
|
|
601
591
|
break;
|
|
602
592
|
case "controller":
|
|
603
|
-
this.handleControlChange(
|
|
593
|
+
this.handleControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
604
594
|
break;
|
|
605
595
|
case "programChange":
|
|
606
596
|
this.handleProgramChange(event.channel, event.programNumber, startTime);
|
|
@@ -643,10 +633,11 @@ class Midy {
|
|
|
643
633
|
resolve();
|
|
644
634
|
return;
|
|
645
635
|
}
|
|
646
|
-
const
|
|
636
|
+
const now = this.audioContext.currentTime;
|
|
637
|
+
const t = now + offset;
|
|
647
638
|
queueIndex = await this.scheduleTimelineEvents(t, offset, queueIndex);
|
|
648
639
|
if (this.isPausing) {
|
|
649
|
-
await this.stopNotes(0, true);
|
|
640
|
+
await this.stopNotes(0, true, now);
|
|
650
641
|
this.notePromises = [];
|
|
651
642
|
resolve();
|
|
652
643
|
this.isPausing = false;
|
|
@@ -654,7 +645,7 @@ class Midy {
|
|
|
654
645
|
return;
|
|
655
646
|
}
|
|
656
647
|
else if (this.isStopping) {
|
|
657
|
-
await this.stopNotes(0, true);
|
|
648
|
+
await this.stopNotes(0, true, now);
|
|
658
649
|
this.notePromises = [];
|
|
659
650
|
this.exclusiveClassMap.clear();
|
|
660
651
|
this.audioBufferCache.clear();
|
|
@@ -664,7 +655,7 @@ class Midy {
|
|
|
664
655
|
return;
|
|
665
656
|
}
|
|
666
657
|
else if (this.isSeeking) {
|
|
667
|
-
this.stopNotes(0, true);
|
|
658
|
+
this.stopNotes(0, true, now);
|
|
668
659
|
this.exclusiveClassMap.clear();
|
|
669
660
|
this.startTime = this.audioContext.currentTime;
|
|
670
661
|
queueIndex = this.getQueueIndex(this.resumeTime);
|
|
@@ -673,7 +664,6 @@ class Midy {
|
|
|
673
664
|
await schedulePlayback();
|
|
674
665
|
}
|
|
675
666
|
else {
|
|
676
|
-
const now = this.audioContext.currentTime;
|
|
677
667
|
const waitTime = now + this.noteCheckInterval;
|
|
678
668
|
await this.scheduleTask(() => { }, waitTime);
|
|
679
669
|
await schedulePlayback();
|
|
@@ -793,25 +783,21 @@ class Midy {
|
|
|
793
783
|
}
|
|
794
784
|
return { instruments, timeline };
|
|
795
785
|
}
|
|
796
|
-
|
|
797
|
-
const now = this.audioContext.currentTime;
|
|
786
|
+
stopChannelNotes(channelNumber, velocity, force, scheduleTime) {
|
|
798
787
|
const channel = this.channels[channelNumber];
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
const promise = this.scheduleNoteRelease(channelNumber, note.noteNumber, velocity, now, undefined, // portamentoNoteNumber
|
|
805
|
-
force);
|
|
806
|
-
this.notePromises.push(promise);
|
|
807
|
-
}
|
|
788
|
+
const promises = [];
|
|
789
|
+
this.processScheduledNotes(channel, (note) => {
|
|
790
|
+
const promise = this.scheduleNoteOff(channelNumber, note.noteNumber, velocity, scheduleTime, force, undefined);
|
|
791
|
+
this.notePromises.push(promise);
|
|
792
|
+
promises.push(promise);
|
|
808
793
|
});
|
|
809
794
|
channel.scheduledNotes.clear();
|
|
810
|
-
|
|
795
|
+
return Promise.all(promises);
|
|
811
796
|
}
|
|
812
|
-
stopNotes(velocity, force) {
|
|
797
|
+
stopNotes(velocity, force, scheduleTime) {
|
|
798
|
+
const promises = [];
|
|
813
799
|
for (let i = 0; i < this.channels.length; i++) {
|
|
814
|
-
this.stopChannelNotes(i, velocity, force);
|
|
800
|
+
promises.push(this.stopChannelNotes(i, velocity, force, scheduleTime));
|
|
815
801
|
}
|
|
816
802
|
return Promise.all(this.notePromises);
|
|
817
803
|
}
|
|
@@ -859,34 +845,32 @@ class Midy {
|
|
|
859
845
|
const now = this.audioContext.currentTime;
|
|
860
846
|
return this.resumeTime + now - this.startTime - this.startDelay;
|
|
861
847
|
}
|
|
862
|
-
processScheduledNotes(channel,
|
|
848
|
+
processScheduledNotes(channel, callback) {
|
|
863
849
|
channel.scheduledNotes.forEach((noteList) => {
|
|
864
850
|
for (let i = 0; i < noteList.length; i++) {
|
|
865
851
|
const note = noteList[i];
|
|
866
852
|
if (!note)
|
|
867
853
|
continue;
|
|
868
|
-
if (scheduleTime < note.startTime)
|
|
869
|
-
continue;
|
|
870
854
|
callback(note);
|
|
871
855
|
}
|
|
872
856
|
});
|
|
873
857
|
}
|
|
874
|
-
getActiveNotes(channel,
|
|
858
|
+
getActiveNotes(channel, scheduleTime) {
|
|
875
859
|
const activeNotes = new SparseMap(128);
|
|
876
860
|
channel.scheduledNotes.forEach((noteList) => {
|
|
877
|
-
const activeNote = this.getActiveNote(noteList,
|
|
861
|
+
const activeNote = this.getActiveNote(noteList, scheduleTime);
|
|
878
862
|
if (activeNote) {
|
|
879
863
|
activeNotes.set(activeNote.noteNumber, activeNote);
|
|
880
864
|
}
|
|
881
865
|
});
|
|
882
866
|
return activeNotes;
|
|
883
867
|
}
|
|
884
|
-
getActiveNote(noteList,
|
|
868
|
+
getActiveNote(noteList, scheduleTime) {
|
|
885
869
|
for (let i = noteList.length - 1; i >= 0; i--) {
|
|
886
870
|
const note = noteList[i];
|
|
887
871
|
if (!note)
|
|
888
872
|
return;
|
|
889
|
-
if (
|
|
873
|
+
if (scheduleTime < note.startTime)
|
|
890
874
|
continue;
|
|
891
875
|
return (note.ending) ? null : note;
|
|
892
876
|
}
|
|
@@ -1046,44 +1030,36 @@ class Midy {
|
|
|
1046
1030
|
calcNoteDetune(channel, note) {
|
|
1047
1031
|
return channel.scaleOctaveTuningTable[note.noteNumber % 12];
|
|
1048
1032
|
}
|
|
1049
|
-
updateChannelDetune(channel) {
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
const note = noteList[i];
|
|
1053
|
-
if (!note)
|
|
1054
|
-
continue;
|
|
1055
|
-
this.updateDetune(channel, note);
|
|
1056
|
-
}
|
|
1033
|
+
updateChannelDetune(channel, scheduleTime) {
|
|
1034
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1035
|
+
this.updateDetune(channel, note, scheduleTime);
|
|
1057
1036
|
});
|
|
1058
1037
|
}
|
|
1059
|
-
updateDetune(channel, note) {
|
|
1060
|
-
const now = this.audioContext.currentTime;
|
|
1038
|
+
updateDetune(channel, note, scheduleTime) {
|
|
1061
1039
|
const noteDetune = this.calcNoteDetune(channel, note);
|
|
1062
1040
|
const pitchControl = this.getPitchControl(channel, note);
|
|
1063
1041
|
const detune = channel.detune + noteDetune + pitchControl;
|
|
1064
1042
|
note.bufferSource.detune
|
|
1065
|
-
.cancelScheduledValues(
|
|
1066
|
-
.setValueAtTime(detune,
|
|
1043
|
+
.cancelScheduledValues(scheduleTime)
|
|
1044
|
+
.setValueAtTime(detune, scheduleTime);
|
|
1067
1045
|
}
|
|
1068
1046
|
getPortamentoTime(channel) {
|
|
1069
1047
|
const factor = 5 * Math.log(10) / 127;
|
|
1070
1048
|
const time = channel.state.portamentoTime;
|
|
1071
1049
|
return Math.log(time) / factor;
|
|
1072
1050
|
}
|
|
1073
|
-
setPortamentoStartVolumeEnvelope(channel, note) {
|
|
1074
|
-
const now = this.audioContext.currentTime;
|
|
1051
|
+
setPortamentoStartVolumeEnvelope(channel, note, scheduleTime) {
|
|
1075
1052
|
const { voiceParams, startTime } = note;
|
|
1076
1053
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation);
|
|
1077
1054
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1078
1055
|
const volDelay = startTime + voiceParams.volDelay;
|
|
1079
1056
|
const portamentoTime = volDelay + this.getPortamentoTime(channel);
|
|
1080
1057
|
note.volumeEnvelopeNode.gain
|
|
1081
|
-
.cancelScheduledValues(
|
|
1058
|
+
.cancelScheduledValues(scheduleTime)
|
|
1082
1059
|
.setValueAtTime(0, volDelay)
|
|
1083
1060
|
.linearRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1084
1061
|
}
|
|
1085
|
-
setVolumeEnvelope(channel, note) {
|
|
1086
|
-
const now = this.audioContext.currentTime;
|
|
1062
|
+
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
1087
1063
|
const state = channel.state;
|
|
1088
1064
|
const { voiceParams, startTime } = note;
|
|
1089
1065
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation) *
|
|
@@ -1094,7 +1070,7 @@ class Midy {
|
|
|
1094
1070
|
const volHold = volAttack + voiceParams.volHold;
|
|
1095
1071
|
const volDecay = volHold + voiceParams.volDecay * state.decayTime * 2;
|
|
1096
1072
|
note.volumeEnvelopeNode.gain
|
|
1097
|
-
.cancelScheduledValues(
|
|
1073
|
+
.cancelScheduledValues(scheduleTime)
|
|
1098
1074
|
.setValueAtTime(0, startTime)
|
|
1099
1075
|
.setValueAtTime(1e-6, volDelay) // exponentialRampToValueAtTime() requires a non-zero value
|
|
1100
1076
|
.exponentialRampToValueAtTime(attackVolume, volAttack)
|
|
@@ -1102,7 +1078,6 @@ class Midy {
|
|
|
1102
1078
|
.linearRampToValueAtTime(sustainVolume, volDecay);
|
|
1103
1079
|
}
|
|
1104
1080
|
setPitchEnvelope(note, scheduleTime) {
|
|
1105
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1106
1081
|
const { voiceParams } = note;
|
|
1107
1082
|
const baseRate = voiceParams.playbackRate;
|
|
1108
1083
|
note.bufferSource.playbackRate
|
|
@@ -1129,8 +1104,7 @@ class Midy {
|
|
|
1129
1104
|
const maxFrequency = 20000; // max Hz of initialFilterFc
|
|
1130
1105
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
1131
1106
|
}
|
|
1132
|
-
setPortamentoStartFilterEnvelope(channel, note) {
|
|
1133
|
-
const now = this.audioContext.currentTime;
|
|
1107
|
+
setPortamentoStartFilterEnvelope(channel, note, scheduleTime) {
|
|
1134
1108
|
const state = channel.state;
|
|
1135
1109
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1136
1110
|
const softPedalFactor = 1 -
|
|
@@ -1146,13 +1120,12 @@ class Midy {
|
|
|
1146
1120
|
const portamentoTime = startTime + this.getPortamentoTime(channel);
|
|
1147
1121
|
const modDelay = startTime + voiceParams.modDelay;
|
|
1148
1122
|
note.filterNode.frequency
|
|
1149
|
-
.cancelScheduledValues(
|
|
1123
|
+
.cancelScheduledValues(scheduleTime)
|
|
1150
1124
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1151
1125
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1152
1126
|
.linearRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1153
1127
|
}
|
|
1154
|
-
setFilterEnvelope(channel, note) {
|
|
1155
|
-
const now = this.audioContext.currentTime;
|
|
1128
|
+
setFilterEnvelope(channel, note, scheduleTime) {
|
|
1156
1129
|
const state = channel.state;
|
|
1157
1130
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1158
1131
|
const softPedalFactor = 1 -
|
|
@@ -1173,14 +1146,14 @@ class Midy {
|
|
|
1173
1146
|
const modHold = modAttack + voiceParams.modHold;
|
|
1174
1147
|
const modDecay = modHold + voiceParams.modDecay;
|
|
1175
1148
|
note.filterNode.frequency
|
|
1176
|
-
.cancelScheduledValues(
|
|
1149
|
+
.cancelScheduledValues(scheduleTime)
|
|
1177
1150
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1178
1151
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1179
1152
|
.exponentialRampToValueAtTime(adjustedPeekFreq, modAttack)
|
|
1180
1153
|
.setValueAtTime(adjustedPeekFreq, modHold)
|
|
1181
1154
|
.linearRampToValueAtTime(adjustedSustainFreq, modDecay);
|
|
1182
1155
|
}
|
|
1183
|
-
startModulation(channel, note,
|
|
1156
|
+
startModulation(channel, note, scheduleTime) {
|
|
1184
1157
|
const { voiceParams } = note;
|
|
1185
1158
|
note.modulationLFO = new OscillatorNode(this.audioContext, {
|
|
1186
1159
|
frequency: this.centToHz(voiceParams.freqModLFO),
|
|
@@ -1189,10 +1162,10 @@ class Midy {
|
|
|
1189
1162
|
gain: voiceParams.modLfoToFilterFc,
|
|
1190
1163
|
});
|
|
1191
1164
|
note.modulationDepth = new GainNode(this.audioContext);
|
|
1192
|
-
this.setModLfoToPitch(channel, note);
|
|
1165
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1193
1166
|
note.volumeDepth = new GainNode(this.audioContext);
|
|
1194
|
-
this.setModLfoToVolume(channel, note);
|
|
1195
|
-
note.modulationLFO.start(startTime + voiceParams.delayModLFO);
|
|
1167
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1168
|
+
note.modulationLFO.start(note.startTime + voiceParams.delayModLFO);
|
|
1196
1169
|
note.modulationLFO.connect(note.filterDepth);
|
|
1197
1170
|
note.filterDepth.connect(note.filterNode.frequency);
|
|
1198
1171
|
note.modulationLFO.connect(note.modulationDepth);
|
|
@@ -1200,15 +1173,15 @@ class Midy {
|
|
|
1200
1173
|
note.modulationLFO.connect(note.volumeDepth);
|
|
1201
1174
|
note.volumeDepth.connect(note.volumeEnvelopeNode.gain);
|
|
1202
1175
|
}
|
|
1203
|
-
startVibrato(channel, note,
|
|
1176
|
+
startVibrato(channel, note, scheduleTime) {
|
|
1204
1177
|
const { voiceParams } = note;
|
|
1205
1178
|
const state = channel.state;
|
|
1206
1179
|
note.vibratoLFO = new OscillatorNode(this.audioContext, {
|
|
1207
1180
|
frequency: this.centToHz(voiceParams.freqVibLFO) * state.vibratoRate * 2,
|
|
1208
1181
|
});
|
|
1209
|
-
note.vibratoLFO.start(startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1182
|
+
note.vibratoLFO.start(note.startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1210
1183
|
note.vibratoDepth = new GainNode(this.audioContext);
|
|
1211
|
-
this.setVibLfoToPitch(channel, note);
|
|
1184
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1212
1185
|
note.vibratoLFO.connect(note.vibratoDepth);
|
|
1213
1186
|
note.vibratoDepth.connect(note.bufferSource.detune);
|
|
1214
1187
|
}
|
|
@@ -1231,6 +1204,7 @@ class Midy {
|
|
|
1231
1204
|
}
|
|
1232
1205
|
}
|
|
1233
1206
|
async createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3) {
|
|
1207
|
+
const now = this.audioContext.currentTime;
|
|
1234
1208
|
const state = channel.state;
|
|
1235
1209
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1236
1210
|
const voiceParams = voice.getAllParams(controllerState);
|
|
@@ -1247,22 +1221,22 @@ class Midy {
|
|
|
1247
1221
|
});
|
|
1248
1222
|
if (portamento) {
|
|
1249
1223
|
note.portamento = true;
|
|
1250
|
-
this.setPortamentoStartVolumeEnvelope(channel, note);
|
|
1251
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1224
|
+
this.setPortamentoStartVolumeEnvelope(channel, note, now);
|
|
1225
|
+
this.setPortamentoStartFilterEnvelope(channel, note, now);
|
|
1252
1226
|
}
|
|
1253
1227
|
else {
|
|
1254
1228
|
note.portamento = false;
|
|
1255
|
-
this.setVolumeEnvelope(channel, note);
|
|
1256
|
-
this.setFilterEnvelope(channel, note);
|
|
1229
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
1230
|
+
this.setFilterEnvelope(channel, note, now);
|
|
1257
1231
|
}
|
|
1258
1232
|
if (0 < state.vibratoDepth) {
|
|
1259
|
-
this.startVibrato(channel, note,
|
|
1233
|
+
this.startVibrato(channel, note, now);
|
|
1260
1234
|
}
|
|
1261
|
-
this.setPitchEnvelope(note);
|
|
1235
|
+
this.setPitchEnvelope(note, now);
|
|
1262
1236
|
if (0 < state.modulationDepth) {
|
|
1263
|
-
this.startModulation(channel, note,
|
|
1237
|
+
this.startModulation(channel, note, now);
|
|
1264
1238
|
}
|
|
1265
|
-
if (
|
|
1239
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
1266
1240
|
channel.currentBufferSource.stop(startTime);
|
|
1267
1241
|
channel.currentBufferSource = note.bufferSource;
|
|
1268
1242
|
}
|
|
@@ -1272,10 +1246,10 @@ class Midy {
|
|
|
1272
1246
|
note.volumeNode.connect(note.gainL);
|
|
1273
1247
|
note.volumeNode.connect(note.gainR);
|
|
1274
1248
|
if (0 < channel.chorusSendLevel) {
|
|
1275
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
1249
|
+
this.setChorusEffectsSend(channel, note, 0, now);
|
|
1276
1250
|
}
|
|
1277
1251
|
if (0 < channel.reverbSendLevel) {
|
|
1278
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
1252
|
+
this.setReverbEffectsSend(channel, note, 0, now);
|
|
1279
1253
|
}
|
|
1280
1254
|
note.bufferSource.start(startTime);
|
|
1281
1255
|
return note;
|
|
@@ -1303,8 +1277,8 @@ class Midy {
|
|
|
1303
1277
|
const note = await this.createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3);
|
|
1304
1278
|
note.gainL.connect(channel.gainL);
|
|
1305
1279
|
note.gainR.connect(channel.gainR);
|
|
1306
|
-
if (channel.state.
|
|
1307
|
-
channel.
|
|
1280
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
1281
|
+
channel.sustainNotes.push(note);
|
|
1308
1282
|
}
|
|
1309
1283
|
const exclusiveClass = note.voiceParams.exclusiveClass;
|
|
1310
1284
|
if (exclusiveClass !== 0) {
|
|
@@ -1312,9 +1286,9 @@ class Midy {
|
|
|
1312
1286
|
const prevEntry = this.exclusiveClassMap.get(exclusiveClass);
|
|
1313
1287
|
const [prevNote, prevChannelNumber] = prevEntry;
|
|
1314
1288
|
if (!prevNote.ending) {
|
|
1315
|
-
this.
|
|
1316
|
-
startTime,
|
|
1317
|
-
|
|
1289
|
+
this.scheduleNoteOff(prevChannelNumber, prevNote.noteNumber, 0, // velocity,
|
|
1290
|
+
startTime, true, // force
|
|
1291
|
+
undefined);
|
|
1318
1292
|
}
|
|
1319
1293
|
}
|
|
1320
1294
|
this.exclusiveClassMap.set(exclusiveClass, [note, channelNumber]);
|
|
@@ -1327,9 +1301,9 @@ class Midy {
|
|
|
1327
1301
|
scheduledNotes.set(noteNumber, [note]);
|
|
1328
1302
|
}
|
|
1329
1303
|
}
|
|
1330
|
-
noteOn(channelNumber, noteNumber, velocity,
|
|
1331
|
-
|
|
1332
|
-
return this.scheduleNoteOn(channelNumber, noteNumber, velocity,
|
|
1304
|
+
noteOn(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1305
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1306
|
+
return this.scheduleNoteOn(channelNumber, noteNumber, velocity, scheduleTime, false);
|
|
1333
1307
|
}
|
|
1334
1308
|
stopNote(endTime, stopTime, scheduledNotes, index) {
|
|
1335
1309
|
const note = scheduledNotes[index];
|
|
@@ -1369,11 +1343,11 @@ class Midy {
|
|
|
1369
1343
|
note.bufferSource.stop(stopTime);
|
|
1370
1344
|
});
|
|
1371
1345
|
}
|
|
1372
|
-
|
|
1346
|
+
scheduleNoteOff(channelNumber, noteNumber, _velocity, endTime, force, portamentoNoteNumber) {
|
|
1373
1347
|
const channel = this.channels[channelNumber];
|
|
1374
1348
|
const state = channel.state;
|
|
1375
1349
|
if (!force) {
|
|
1376
|
-
if (0.5
|
|
1350
|
+
if (0.5 <= state.sustainPedal)
|
|
1377
1351
|
return;
|
|
1378
1352
|
if (channel.sostenutoNotes.has(noteNumber))
|
|
1379
1353
|
return;
|
|
@@ -1409,83 +1383,73 @@ class Midy {
|
|
|
1409
1383
|
}
|
|
1410
1384
|
}
|
|
1411
1385
|
}
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
return this.
|
|
1386
|
+
noteOff(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1387
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1388
|
+
return this.scheduleNoteOff(channelNumber, noteNumber, velocity, scheduleTime, false, // force
|
|
1389
|
+
undefined);
|
|
1415
1390
|
}
|
|
1416
|
-
releaseSustainPedal(channelNumber, halfVelocity) {
|
|
1391
|
+
releaseSustainPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1417
1392
|
const velocity = halfVelocity * 2;
|
|
1418
1393
|
const channel = this.channels[channelNumber];
|
|
1419
1394
|
const promises = [];
|
|
1420
|
-
channel.
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
continue;
|
|
1426
|
-
const { noteNumber } = note;
|
|
1427
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1428
|
-
promises.push(promise);
|
|
1429
|
-
}
|
|
1430
|
-
});
|
|
1395
|
+
for (let i = 0; i < channel.sustainNotes.length; i++) {
|
|
1396
|
+
const promise = this.noteOff(channelNumber, channel.sustainNotes[i].noteNumber, velocity, scheduleTime);
|
|
1397
|
+
promises.push(promise);
|
|
1398
|
+
}
|
|
1399
|
+
channel.sustainNotes = [];
|
|
1431
1400
|
return promises;
|
|
1432
1401
|
}
|
|
1433
|
-
releaseSostenutoPedal(channelNumber, halfVelocity) {
|
|
1402
|
+
releaseSostenutoPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1434
1403
|
const velocity = halfVelocity * 2;
|
|
1435
1404
|
const channel = this.channels[channelNumber];
|
|
1436
1405
|
const promises = [];
|
|
1437
1406
|
channel.state.sostenutoPedal = 0;
|
|
1438
|
-
channel.sostenutoNotes.forEach((
|
|
1439
|
-
const
|
|
1440
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1407
|
+
channel.sostenutoNotes.forEach((note) => {
|
|
1408
|
+
const promise = this.noteOff(channelNumber, note.noteNumber, velocity, scheduleTime);
|
|
1441
1409
|
promises.push(promise);
|
|
1442
1410
|
});
|
|
1443
1411
|
channel.sostenutoNotes.clear();
|
|
1444
1412
|
return promises;
|
|
1445
1413
|
}
|
|
1446
|
-
handleMIDIMessage(statusByte, data1, data2) {
|
|
1447
|
-
const channelNumber =
|
|
1414
|
+
handleMIDIMessage(statusByte, data1, data2, scheduleTime) {
|
|
1415
|
+
const channelNumber = statusByte & 0x0F;
|
|
1448
1416
|
const messageType = statusByte & 0xF0;
|
|
1449
1417
|
switch (messageType) {
|
|
1450
1418
|
case 0x80:
|
|
1451
|
-
return this.
|
|
1419
|
+
return this.noteOff(channelNumber, data1, data2, scheduleTime);
|
|
1452
1420
|
case 0x90:
|
|
1453
|
-
return this.noteOn(channelNumber, data1, data2);
|
|
1421
|
+
return this.noteOn(channelNumber, data1, data2, scheduleTime);
|
|
1454
1422
|
case 0xA0:
|
|
1455
|
-
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2);
|
|
1423
|
+
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2, scheduleTime);
|
|
1456
1424
|
case 0xB0:
|
|
1457
|
-
return this.handleControlChange(channelNumber, data1, data2);
|
|
1425
|
+
return this.handleControlChange(channelNumber, data1, data2, scheduleTime);
|
|
1458
1426
|
case 0xC0:
|
|
1459
|
-
return this.handleProgramChange(channelNumber, data1);
|
|
1427
|
+
return this.handleProgramChange(channelNumber, data1, scheduleTime);
|
|
1460
1428
|
case 0xD0:
|
|
1461
|
-
return this.handleChannelPressure(channelNumber, data1);
|
|
1429
|
+
return this.handleChannelPressure(channelNumber, data1, scheduleTime);
|
|
1462
1430
|
case 0xE0:
|
|
1463
|
-
return this.handlePitchBendMessage(channelNumber, data1, data2);
|
|
1431
|
+
return this.handlePitchBendMessage(channelNumber, data1, data2, scheduleTime);
|
|
1464
1432
|
default:
|
|
1465
1433
|
console.warn(`Unsupported MIDI message: ${messageType.toString(16)}`);
|
|
1466
1434
|
}
|
|
1467
1435
|
}
|
|
1468
|
-
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure,
|
|
1469
|
-
if (!startTime)
|
|
1470
|
-
startTime = this.audioContext.currentTime;
|
|
1436
|
+
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure, scheduleTime) {
|
|
1471
1437
|
const channel = this.channels[channelNumber];
|
|
1472
1438
|
channel.state.polyphonicKeyPressure = pressure / 127;
|
|
1473
1439
|
const table = channel.polyphonicKeyPressureTable;
|
|
1474
|
-
const activeNotes = this.getActiveNotes(channel,
|
|
1440
|
+
const activeNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1475
1441
|
if (activeNotes.has(noteNumber)) {
|
|
1476
1442
|
const note = activeNotes.get(noteNumber);
|
|
1477
1443
|
this.setControllerParameters(channel, note, table);
|
|
1478
1444
|
}
|
|
1479
1445
|
// this.applyVoiceParams(channel, 10);
|
|
1480
1446
|
}
|
|
1481
|
-
handleProgramChange(channelNumber, program) {
|
|
1447
|
+
handleProgramChange(channelNumber, program, _scheduleTime) {
|
|
1482
1448
|
const channel = this.channels[channelNumber];
|
|
1483
1449
|
channel.bank = channel.bankMSB * 128 + channel.bankLSB;
|
|
1484
1450
|
channel.program = program;
|
|
1485
1451
|
}
|
|
1486
|
-
handleChannelPressure(channelNumber, value,
|
|
1487
|
-
if (!startTime)
|
|
1488
|
-
startTime = this.audioContext.currentTime;
|
|
1452
|
+
handleChannelPressure(channelNumber, value, scheduleTime) {
|
|
1489
1453
|
const channel = this.channels[channelNumber];
|
|
1490
1454
|
const prev = channel.state.channelPressure;
|
|
1491
1455
|
const next = value / 127;
|
|
@@ -1495,72 +1459,68 @@ class Midy {
|
|
|
1495
1459
|
channel.detune += pressureDepth * (next - prev);
|
|
1496
1460
|
}
|
|
1497
1461
|
const table = channel.channelPressureTable;
|
|
1498
|
-
this.getActiveNotes(channel,
|
|
1462
|
+
this.getActiveNotes(channel, scheduleTime).forEach((note) => {
|
|
1499
1463
|
this.setControllerParameters(channel, note, table);
|
|
1500
1464
|
});
|
|
1501
1465
|
// this.applyVoiceParams(channel, 13);
|
|
1502
1466
|
}
|
|
1503
|
-
handlePitchBendMessage(channelNumber, lsb, msb) {
|
|
1467
|
+
handlePitchBendMessage(channelNumber, lsb, msb, scheduleTime) {
|
|
1504
1468
|
const pitchBend = msb * 128 + lsb;
|
|
1505
|
-
this.setPitchBend(channelNumber, pitchBend);
|
|
1469
|
+
this.setPitchBend(channelNumber, pitchBend, scheduleTime);
|
|
1506
1470
|
}
|
|
1507
|
-
setPitchBend(channelNumber, value) {
|
|
1471
|
+
setPitchBend(channelNumber, value, scheduleTime) {
|
|
1472
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1508
1473
|
const channel = this.channels[channelNumber];
|
|
1509
1474
|
const state = channel.state;
|
|
1510
1475
|
const prev = state.pitchWheel * 2 - 1;
|
|
1511
1476
|
const next = (value - 8192) / 8192;
|
|
1512
1477
|
state.pitchWheel = value / 16383;
|
|
1513
1478
|
channel.detune += (next - prev) * state.pitchWheelSensitivity * 12800;
|
|
1514
|
-
this.updateChannelDetune(channel);
|
|
1515
|
-
this.applyVoiceParams(channel, 14);
|
|
1479
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1480
|
+
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
1516
1481
|
}
|
|
1517
|
-
setModLfoToPitch(channel, note) {
|
|
1518
|
-
const now = this.audioContext.currentTime;
|
|
1482
|
+
setModLfoToPitch(channel, note, scheduleTime) {
|
|
1519
1483
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
1520
1484
|
this.getLFOPitchDepth(channel, note);
|
|
1521
1485
|
const baseDepth = Math.abs(modLfoToPitch) + channel.state.modulationDepth;
|
|
1522
1486
|
const modulationDepth = baseDepth * Math.sign(modLfoToPitch);
|
|
1523
1487
|
note.modulationDepth.gain
|
|
1524
|
-
.cancelScheduledValues(
|
|
1525
|
-
.setValueAtTime(modulationDepth,
|
|
1488
|
+
.cancelScheduledValues(scheduleTime)
|
|
1489
|
+
.setValueAtTime(modulationDepth, scheduleTime);
|
|
1526
1490
|
}
|
|
1527
|
-
setVibLfoToPitch(channel, note) {
|
|
1528
|
-
const now = this.audioContext.currentTime;
|
|
1491
|
+
setVibLfoToPitch(channel, note, scheduleTime) {
|
|
1529
1492
|
const vibLfoToPitch = note.voiceParams.vibLfoToPitch;
|
|
1530
1493
|
const vibratoDepth = Math.abs(vibLfoToPitch) * channel.state.vibratoDepth *
|
|
1531
1494
|
2;
|
|
1532
1495
|
const vibratoDepthSign = 0 < vibLfoToPitch;
|
|
1533
1496
|
note.vibratoDepth.gain
|
|
1534
|
-
.cancelScheduledValues(
|
|
1535
|
-
.setValueAtTime(vibratoDepth * vibratoDepthSign,
|
|
1497
|
+
.cancelScheduledValues(scheduleTime)
|
|
1498
|
+
.setValueAtTime(vibratoDepth * vibratoDepthSign, scheduleTime);
|
|
1536
1499
|
}
|
|
1537
|
-
setModLfoToFilterFc(channel, note) {
|
|
1538
|
-
const now = this.audioContext.currentTime;
|
|
1500
|
+
setModLfoToFilterFc(channel, note, scheduleTime) {
|
|
1539
1501
|
const modLfoToFilterFc = note.voiceParams.modLfoToFilterFc +
|
|
1540
1502
|
this.getLFOFilterDepth(channel, note);
|
|
1541
1503
|
note.filterDepth.gain
|
|
1542
|
-
.cancelScheduledValues(
|
|
1543
|
-
.setValueAtTime(modLfoToFilterFc,
|
|
1504
|
+
.cancelScheduledValues(scheduleTime)
|
|
1505
|
+
.setValueAtTime(modLfoToFilterFc, scheduleTime);
|
|
1544
1506
|
}
|
|
1545
|
-
setModLfoToVolume(channel, note) {
|
|
1546
|
-
const now = this.audioContext.currentTime;
|
|
1507
|
+
setModLfoToVolume(channel, note, scheduleTime) {
|
|
1547
1508
|
const modLfoToVolume = note.voiceParams.modLfoToVolume;
|
|
1548
1509
|
const baseDepth = this.cbToRatio(Math.abs(modLfoToVolume)) - 1;
|
|
1549
1510
|
const volumeDepth = baseDepth * Math.sign(modLfoToVolume) *
|
|
1550
1511
|
(1 + this.getLFOAmplitudeDepth(channel, note));
|
|
1551
1512
|
note.volumeDepth.gain
|
|
1552
|
-
.cancelScheduledValues(
|
|
1553
|
-
.setValueAtTime(volumeDepth,
|
|
1513
|
+
.cancelScheduledValues(scheduleTime)
|
|
1514
|
+
.setValueAtTime(volumeDepth, scheduleTime);
|
|
1554
1515
|
}
|
|
1555
|
-
setReverbEffectsSend(channel, note, prevValue) {
|
|
1516
|
+
setReverbEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1556
1517
|
if (0 < prevValue) {
|
|
1557
1518
|
if (0 < note.voiceParams.reverbEffectsSend) {
|
|
1558
|
-
const now = this.audioContext.currentTime;
|
|
1559
1519
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 91);
|
|
1560
1520
|
const value = note.voiceParams.reverbEffectsSend + keyBasedValue;
|
|
1561
1521
|
note.reverbEffectsSend.gain
|
|
1562
|
-
.cancelScheduledValues(
|
|
1563
|
-
.setValueAtTime(value,
|
|
1522
|
+
.cancelScheduledValues(scheduleTime)
|
|
1523
|
+
.setValueAtTime(value, scheduleTime);
|
|
1564
1524
|
}
|
|
1565
1525
|
else {
|
|
1566
1526
|
note.reverbEffectsSend.disconnect();
|
|
@@ -1578,15 +1538,14 @@ class Midy {
|
|
|
1578
1538
|
}
|
|
1579
1539
|
}
|
|
1580
1540
|
}
|
|
1581
|
-
setChorusEffectsSend(channel, note, prevValue) {
|
|
1541
|
+
setChorusEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1582
1542
|
if (0 < prevValue) {
|
|
1583
1543
|
if (0 < note.voiceParams.chorusEffectsSend) {
|
|
1584
|
-
const now = this.audioContext.currentTime;
|
|
1585
1544
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 93);
|
|
1586
1545
|
const value = note.voiceParams.chorusEffectsSend + keyBasedValue;
|
|
1587
1546
|
note.chorusEffectsSend.gain
|
|
1588
|
-
.cancelScheduledValues(
|
|
1589
|
-
.setValueAtTime(value,
|
|
1547
|
+
.cancelScheduledValues(scheduleTime)
|
|
1548
|
+
.setValueAtTime(value, scheduleTime);
|
|
1590
1549
|
}
|
|
1591
1550
|
else {
|
|
1592
1551
|
note.chorusEffectsSend.disconnect();
|
|
@@ -1604,75 +1563,71 @@ class Midy {
|
|
|
1604
1563
|
}
|
|
1605
1564
|
}
|
|
1606
1565
|
}
|
|
1607
|
-
setDelayModLFO(note) {
|
|
1608
|
-
const now = this.audioContext.currentTime;
|
|
1566
|
+
setDelayModLFO(note, scheduleTime) {
|
|
1609
1567
|
const startTime = note.startTime;
|
|
1610
|
-
if (startTime <
|
|
1568
|
+
if (startTime < scheduleTime)
|
|
1611
1569
|
return;
|
|
1612
|
-
note.modulationLFO.stop(
|
|
1570
|
+
note.modulationLFO.stop(scheduleTime);
|
|
1613
1571
|
note.modulationLFO.start(startTime + note.voiceParams.delayModLFO);
|
|
1614
1572
|
note.modulationLFO.connect(note.filterDepth);
|
|
1615
1573
|
}
|
|
1616
|
-
setFreqModLFO(note) {
|
|
1617
|
-
const now = this.audioContext.currentTime;
|
|
1574
|
+
setFreqModLFO(note, scheduleTime) {
|
|
1618
1575
|
const freqModLFO = note.voiceParams.freqModLFO;
|
|
1619
1576
|
note.modulationLFO.frequency
|
|
1620
|
-
.cancelScheduledValues(
|
|
1621
|
-
.setValueAtTime(freqModLFO,
|
|
1577
|
+
.cancelScheduledValues(scheduleTime)
|
|
1578
|
+
.setValueAtTime(freqModLFO, scheduleTime);
|
|
1622
1579
|
}
|
|
1623
|
-
setFreqVibLFO(channel, note) {
|
|
1624
|
-
const now = this.audioContext.currentTime;
|
|
1580
|
+
setFreqVibLFO(channel, note, scheduleTime) {
|
|
1625
1581
|
const freqVibLFO = note.voiceParams.freqVibLFO;
|
|
1626
1582
|
note.vibratoLFO.frequency
|
|
1627
|
-
.cancelScheduledValues(
|
|
1628
|
-
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2,
|
|
1583
|
+
.cancelScheduledValues(scheduleTime)
|
|
1584
|
+
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2, scheduleTime);
|
|
1629
1585
|
}
|
|
1630
1586
|
createVoiceParamsHandlers() {
|
|
1631
1587
|
return {
|
|
1632
|
-
modLfoToPitch: (channel, note, _prevValue) => {
|
|
1588
|
+
modLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1633
1589
|
if (0 < channel.state.modulationDepth) {
|
|
1634
|
-
this.setModLfoToPitch(channel, note);
|
|
1590
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1635
1591
|
}
|
|
1636
1592
|
},
|
|
1637
|
-
vibLfoToPitch: (channel, note, _prevValue) => {
|
|
1593
|
+
vibLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1638
1594
|
if (0 < channel.state.vibratoDepth) {
|
|
1639
|
-
this.setVibLfoToPitch(channel, note);
|
|
1595
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1640
1596
|
}
|
|
1641
1597
|
},
|
|
1642
|
-
modLfoToFilterFc: (channel, note, _prevValue) => {
|
|
1598
|
+
modLfoToFilterFc: (channel, note, _prevValue, scheduleTime) => {
|
|
1643
1599
|
if (0 < channel.state.modulationDepth) {
|
|
1644
|
-
this.setModLfoToFilterFc(channel, note);
|
|
1600
|
+
this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
1645
1601
|
}
|
|
1646
1602
|
},
|
|
1647
|
-
modLfoToVolume: (channel, note, _prevValue) => {
|
|
1603
|
+
modLfoToVolume: (channel, note, _prevValue, scheduleTime) => {
|
|
1648
1604
|
if (0 < channel.state.modulationDepth) {
|
|
1649
|
-
this.setModLfoToVolume(channel, note);
|
|
1605
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1650
1606
|
}
|
|
1651
1607
|
},
|
|
1652
|
-
chorusEffectsSend: (channel, note, prevValue) => {
|
|
1653
|
-
this.setChorusEffectsSend(channel, note, prevValue);
|
|
1608
|
+
chorusEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1609
|
+
this.setChorusEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1654
1610
|
},
|
|
1655
|
-
reverbEffectsSend: (channel, note, prevValue) => {
|
|
1656
|
-
this.setReverbEffectsSend(channel, note, prevValue);
|
|
1611
|
+
reverbEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1612
|
+
this.setReverbEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1657
1613
|
},
|
|
1658
|
-
delayModLFO: (_channel, note, _prevValue) => this.setDelayModLFO(note),
|
|
1659
|
-
freqModLFO: (_channel, note, _prevValue) => this.setFreqModLFO(note),
|
|
1660
|
-
delayVibLFO: (channel, note, prevValue) => {
|
|
1614
|
+
delayModLFO: (_channel, note, _prevValue, scheduleTime) => this.setDelayModLFO(note, scheduleTime),
|
|
1615
|
+
freqModLFO: (_channel, note, _prevValue, scheduleTime) => this.setFreqModLFO(note, scheduleTime),
|
|
1616
|
+
delayVibLFO: (channel, note, prevValue, scheduleTime) => {
|
|
1661
1617
|
if (0 < channel.state.vibratoDepth) {
|
|
1662
|
-
const now = this.audioContext.currentTime;
|
|
1663
1618
|
const vibratoDelay = channel.state.vibratoDelay * 2;
|
|
1664
1619
|
const prevStartTime = note.startTime + prevValue * vibratoDelay;
|
|
1665
|
-
if (
|
|
1620
|
+
if (scheduleTime < prevStartTime)
|
|
1666
1621
|
return;
|
|
1667
1622
|
const value = note.voiceParams.delayVibLFO;
|
|
1668
1623
|
const startTime = note.startTime + value * vibratoDelay;
|
|
1669
|
-
note.vibratoLFO.stop(
|
|
1624
|
+
note.vibratoLFO.stop(scheduleTime);
|
|
1670
1625
|
note.vibratoLFO.start(startTime);
|
|
1671
1626
|
}
|
|
1672
1627
|
},
|
|
1673
|
-
freqVibLFO: (channel, note, _prevValue) => {
|
|
1628
|
+
freqVibLFO: (channel, note, _prevValue, scheduleTime) => {
|
|
1674
1629
|
if (0 < channel.state.vibratoDepth) {
|
|
1675
|
-
this.setFreqVibLFO(channel, note);
|
|
1630
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1676
1631
|
}
|
|
1677
1632
|
},
|
|
1678
1633
|
};
|
|
@@ -1684,54 +1639,49 @@ class Midy {
|
|
|
1684
1639
|
state[3] = noteNumber / 127;
|
|
1685
1640
|
return state;
|
|
1686
1641
|
}
|
|
1687
|
-
applyVoiceParams(channel, controllerType) {
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1642
|
+
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1643
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1644
|
+
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1645
|
+
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1646
|
+
let appliedFilterEnvelope = false;
|
|
1647
|
+
let appliedVolumeEnvelope = false;
|
|
1648
|
+
for (const [key, value] of Object.entries(voiceParams)) {
|
|
1649
|
+
const prevValue = note.voiceParams[key];
|
|
1650
|
+
if (value === prevValue)
|
|
1692
1651
|
continue;
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
if (value === prevValue)
|
|
1652
|
+
note.voiceParams[key] = value;
|
|
1653
|
+
if (key in this.voiceParamsHandlers) {
|
|
1654
|
+
this.voiceParamsHandlers[key](channel, note, prevValue, scheduleTime);
|
|
1655
|
+
}
|
|
1656
|
+
else if (filterEnvelopeKeySet.has(key)) {
|
|
1657
|
+
if (appliedFilterEnvelope)
|
|
1700
1658
|
continue;
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1659
|
+
appliedFilterEnvelope = true;
|
|
1660
|
+
const noteVoiceParams = note.voiceParams;
|
|
1661
|
+
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1662
|
+
const key = filterEnvelopeKeys[i];
|
|
1663
|
+
if (key in voiceParams)
|
|
1664
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1704
1665
|
}
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
continue;
|
|
1708
|
-
appliedFilterEnvelope = true;
|
|
1709
|
-
const noteVoiceParams = note.voiceParams;
|
|
1710
|
-
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1711
|
-
const key = filterEnvelopeKeys[i];
|
|
1712
|
-
if (key in voiceParams)
|
|
1713
|
-
noteVoiceParams[key] = voiceParams[key];
|
|
1714
|
-
}
|
|
1715
|
-
if (note.portamento) {
|
|
1716
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1717
|
-
}
|
|
1718
|
-
else {
|
|
1719
|
-
this.setFilterEnvelope(channel, note);
|
|
1720
|
-
}
|
|
1721
|
-
this.setPitchEnvelope(note);
|
|
1666
|
+
if (note.portamento) {
|
|
1667
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1722
1668
|
}
|
|
1723
|
-
else
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
|
|
1733
|
-
|
|
1669
|
+
else {
|
|
1670
|
+
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
1671
|
+
}
|
|
1672
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1673
|
+
}
|
|
1674
|
+
else if (volumeEnvelopeKeySet.has(key)) {
|
|
1675
|
+
if (appliedVolumeEnvelope)
|
|
1676
|
+
continue;
|
|
1677
|
+
appliedVolumeEnvelope = true;
|
|
1678
|
+
const noteVoiceParams = note.voiceParams;
|
|
1679
|
+
for (let i = 0; i < volumeEnvelopeKeys.length; i++) {
|
|
1680
|
+
const key = volumeEnvelopeKeys[i];
|
|
1681
|
+
if (key in voiceParams)
|
|
1682
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1734
1683
|
}
|
|
1684
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1735
1685
|
}
|
|
1736
1686
|
}
|
|
1737
1687
|
});
|
|
@@ -1774,12 +1724,12 @@ class Midy {
|
|
|
1774
1724
|
127: this.polyOn,
|
|
1775
1725
|
};
|
|
1776
1726
|
}
|
|
1777
|
-
handleControlChange(channelNumber, controllerType, value,
|
|
1727
|
+
handleControlChange(channelNumber, controllerType, value, scheduleTime) {
|
|
1778
1728
|
const handler = this.controlChangeHandlers[controllerType];
|
|
1779
1729
|
if (handler) {
|
|
1780
|
-
handler.call(this, channelNumber, value,
|
|
1730
|
+
handler.call(this, channelNumber, value, scheduleTime);
|
|
1781
1731
|
const channel = this.channels[channelNumber];
|
|
1782
|
-
this.applyVoiceParams(channel, controllerType + 128);
|
|
1732
|
+
this.applyVoiceParams(channel, controllerType + 128, scheduleTime);
|
|
1783
1733
|
this.applyControlTable(channel, controllerType);
|
|
1784
1734
|
}
|
|
1785
1735
|
else {
|
|
@@ -1790,9 +1740,8 @@ class Midy {
|
|
|
1790
1740
|
this.channels[channelNumber].bankMSB = msb;
|
|
1791
1741
|
}
|
|
1792
1742
|
updateModulation(channel, scheduleTime) {
|
|
1793
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1794
1743
|
const depth = channel.state.modulationDepth * channel.modulationDepthRange;
|
|
1795
|
-
this.processScheduledNotes(channel,
|
|
1744
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1796
1745
|
if (note.modulationDepth) {
|
|
1797
1746
|
note.modulationDepth.gain.setValueAtTime(depth, scheduleTime);
|
|
1798
1747
|
}
|
|
@@ -1803,6 +1752,7 @@ class Midy {
|
|
|
1803
1752
|
});
|
|
1804
1753
|
}
|
|
1805
1754
|
setModulationDepth(channelNumber, modulation, scheduleTime) {
|
|
1755
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1806
1756
|
const channel = this.channels[channelNumber];
|
|
1807
1757
|
channel.state.modulationDepth = modulation / 127;
|
|
1808
1758
|
this.updateModulation(channel, scheduleTime);
|
|
@@ -1813,8 +1763,7 @@ class Midy {
|
|
|
1813
1763
|
channel.state.portamentoTime = Math.exp(factor * portamentoTime);
|
|
1814
1764
|
}
|
|
1815
1765
|
setKeyBasedVolume(channel, scheduleTime) {
|
|
1816
|
-
|
|
1817
|
-
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1766
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1818
1767
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 7);
|
|
1819
1768
|
if (keyBasedValue !== 0) {
|
|
1820
1769
|
note.volumeNode.gain
|
|
@@ -1824,6 +1773,7 @@ class Midy {
|
|
|
1824
1773
|
});
|
|
1825
1774
|
}
|
|
1826
1775
|
setVolume(channelNumber, volume, scheduleTime) {
|
|
1776
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1827
1777
|
const channel = this.channels[channelNumber];
|
|
1828
1778
|
channel.state.volume = volume / 127;
|
|
1829
1779
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1837,8 +1787,7 @@ class Midy {
|
|
|
1837
1787
|
};
|
|
1838
1788
|
}
|
|
1839
1789
|
setKeyBasedPan(channel, scheduleTime) {
|
|
1840
|
-
|
|
1841
|
-
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1790
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1842
1791
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 10);
|
|
1843
1792
|
if (keyBasedValue !== 0) {
|
|
1844
1793
|
const { gainLeft, gainRight } = this.panToGain((keyBasedValue + 1) / 2);
|
|
@@ -1852,12 +1801,14 @@ class Midy {
|
|
|
1852
1801
|
});
|
|
1853
1802
|
}
|
|
1854
1803
|
setPan(channelNumber, pan, scheduleTime) {
|
|
1804
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1855
1805
|
const channel = this.channels[channelNumber];
|
|
1856
1806
|
channel.state.pan = pan / 127;
|
|
1857
1807
|
this.updateChannelVolume(channel, scheduleTime);
|
|
1858
1808
|
this.setKeyBasedPan(channel, scheduleTime);
|
|
1859
1809
|
}
|
|
1860
1810
|
setExpression(channelNumber, expression, scheduleTime) {
|
|
1811
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1861
1812
|
const channel = this.channels[channelNumber];
|
|
1862
1813
|
channel.state.expression = expression / 127;
|
|
1863
1814
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1865,241 +1816,195 @@ class Midy {
|
|
|
1865
1816
|
setBankLSB(channelNumber, lsb) {
|
|
1866
1817
|
this.channels[channelNumber].bankLSB = lsb;
|
|
1867
1818
|
}
|
|
1868
|
-
dataEntryLSB(channelNumber, value) {
|
|
1819
|
+
dataEntryLSB(channelNumber, value, scheduleTime) {
|
|
1869
1820
|
this.channels[channelNumber].dataLSB = value;
|
|
1870
|
-
this.handleRPN(channelNumber,
|
|
1821
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1871
1822
|
}
|
|
1872
|
-
updateChannelVolume(channel) {
|
|
1873
|
-
const now = this.audioContext.currentTime;
|
|
1823
|
+
updateChannelVolume(channel, scheduleTime) {
|
|
1874
1824
|
const state = channel.state;
|
|
1875
1825
|
const volume = state.volume * state.expression;
|
|
1876
1826
|
const { gainLeft, gainRight } = this.panToGain(state.pan);
|
|
1877
1827
|
channel.gainL.gain
|
|
1878
|
-
.cancelScheduledValues(
|
|
1879
|
-
.setValueAtTime(volume * gainLeft,
|
|
1828
|
+
.cancelScheduledValues(scheduleTime)
|
|
1829
|
+
.setValueAtTime(volume * gainLeft, scheduleTime);
|
|
1880
1830
|
channel.gainR.gain
|
|
1881
|
-
.cancelScheduledValues(
|
|
1882
|
-
.setValueAtTime(volume * gainRight,
|
|
1831
|
+
.cancelScheduledValues(scheduleTime)
|
|
1832
|
+
.setValueAtTime(volume * gainRight, scheduleTime);
|
|
1883
1833
|
}
|
|
1884
|
-
setSustainPedal(channelNumber, value) {
|
|
1885
|
-
this.
|
|
1886
|
-
|
|
1887
|
-
|
|
1834
|
+
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1835
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1836
|
+
const channel = this.channels[channelNumber];
|
|
1837
|
+
channel.state.sustainPedal = value / 127;
|
|
1838
|
+
if (64 <= value) {
|
|
1839
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1840
|
+
channel.sustainNotes.push(note);
|
|
1841
|
+
});
|
|
1842
|
+
}
|
|
1843
|
+
else {
|
|
1844
|
+
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
1888
1845
|
}
|
|
1889
1846
|
}
|
|
1890
1847
|
setPortamento(channelNumber, value) {
|
|
1891
1848
|
this.channels[channelNumber].state.portamento = value / 127;
|
|
1892
1849
|
}
|
|
1893
|
-
setSostenutoPedal(channelNumber, value) {
|
|
1850
|
+
setSostenutoPedal(channelNumber, value, scheduleTime) {
|
|
1851
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1894
1852
|
const channel = this.channels[channelNumber];
|
|
1895
1853
|
channel.state.sostenutoPedal = value / 127;
|
|
1896
1854
|
if (64 <= value) {
|
|
1897
|
-
|
|
1898
|
-
channel.sostenutoNotes = this.getActiveNotes(channel, now);
|
|
1855
|
+
channel.sostenutoNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1899
1856
|
}
|
|
1900
1857
|
else {
|
|
1901
|
-
this.releaseSostenutoPedal(channelNumber, value);
|
|
1858
|
+
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
1902
1859
|
}
|
|
1903
1860
|
}
|
|
1904
|
-
setSoftPedal(channelNumber, softPedal) {
|
|
1861
|
+
setSoftPedal(channelNumber, softPedal, _scheduleTime) {
|
|
1905
1862
|
const channel = this.channels[channelNumber];
|
|
1906
1863
|
channel.state.softPedal = softPedal / 127;
|
|
1907
1864
|
}
|
|
1908
|
-
setFilterResonance(channelNumber, filterResonance) {
|
|
1909
|
-
|
|
1865
|
+
setFilterResonance(channelNumber, filterResonance, scheduleTime) {
|
|
1866
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1910
1867
|
const channel = this.channels[channelNumber];
|
|
1911
1868
|
const state = channel.state;
|
|
1912
1869
|
state.filterResonance = filterResonance / 64;
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
if (!note)
|
|
1917
|
-
continue;
|
|
1918
|
-
const Q = note.voiceParams.initialFilterQ / 5 * state.filterResonance;
|
|
1919
|
-
note.filterNode.Q.setValueAtTime(Q, now);
|
|
1920
|
-
}
|
|
1870
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1871
|
+
const Q = note.voiceParams.initialFilterQ / 5 * state.filterResonance;
|
|
1872
|
+
note.filterNode.Q.setValueAtTime(Q, scheduleTime);
|
|
1921
1873
|
});
|
|
1922
1874
|
}
|
|
1923
|
-
setReleaseTime(channelNumber, releaseTime) {
|
|
1875
|
+
setReleaseTime(channelNumber, releaseTime, _scheduleTime) {
|
|
1876
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1924
1877
|
const channel = this.channels[channelNumber];
|
|
1925
1878
|
channel.state.releaseTime = releaseTime / 64;
|
|
1926
1879
|
}
|
|
1927
|
-
setAttackTime(channelNumber, attackTime) {
|
|
1928
|
-
|
|
1880
|
+
setAttackTime(channelNumber, attackTime, scheduleTime) {
|
|
1881
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1929
1882
|
const channel = this.channels[channelNumber];
|
|
1930
1883
|
channel.state.attackTime = attackTime / 64;
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
|
|
1935
|
-
continue;
|
|
1936
|
-
if (note.startTime < now)
|
|
1937
|
-
continue;
|
|
1938
|
-
this.setVolumeEnvelope(channel, note);
|
|
1939
|
-
}
|
|
1884
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1885
|
+
if (note.startTime < scheduleTime)
|
|
1886
|
+
return false;
|
|
1887
|
+
this.setVolumeEnvelope(channel, note);
|
|
1940
1888
|
});
|
|
1941
1889
|
}
|
|
1942
|
-
setBrightness(channelNumber, brightness) {
|
|
1890
|
+
setBrightness(channelNumber, brightness, scheduleTime) {
|
|
1891
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1943
1892
|
const channel = this.channels[channelNumber];
|
|
1944
1893
|
channel.state.brightness = brightness / 64;
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1952
|
-
}
|
|
1953
|
-
else {
|
|
1954
|
-
this.setFilterEnvelope(channel, note);
|
|
1955
|
-
}
|
|
1894
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1895
|
+
if (note.portamento) {
|
|
1896
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1897
|
+
}
|
|
1898
|
+
else {
|
|
1899
|
+
this.setFilterEnvelope(channel, note);
|
|
1956
1900
|
}
|
|
1957
1901
|
});
|
|
1958
1902
|
}
|
|
1959
|
-
setDecayTime(channelNumber, dacayTime) {
|
|
1903
|
+
setDecayTime(channelNumber, dacayTime, scheduleTime) {
|
|
1904
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1960
1905
|
const channel = this.channels[channelNumber];
|
|
1961
1906
|
channel.state.decayTime = dacayTime / 64;
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
const note = noteList[i];
|
|
1965
|
-
if (!note)
|
|
1966
|
-
continue;
|
|
1967
|
-
this.setVolumeEnvelope(channel, note);
|
|
1968
|
-
}
|
|
1907
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1908
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1969
1909
|
});
|
|
1970
1910
|
}
|
|
1971
|
-
setVibratoRate(channelNumber, vibratoRate) {
|
|
1911
|
+
setVibratoRate(channelNumber, vibratoRate, scheduleTime) {
|
|
1912
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1972
1913
|
const channel = this.channels[channelNumber];
|
|
1973
1914
|
channel.state.vibratoRate = vibratoRate / 64;
|
|
1974
1915
|
if (channel.vibratoDepth <= 0)
|
|
1975
1916
|
return;
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
const note = noteList[i];
|
|
1979
|
-
if (!note)
|
|
1980
|
-
continue;
|
|
1981
|
-
this.setVibLfoToPitch(channel, note);
|
|
1982
|
-
}
|
|
1917
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1918
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1983
1919
|
});
|
|
1984
1920
|
}
|
|
1985
|
-
setVibratoDepth(channelNumber, vibratoDepth) {
|
|
1921
|
+
setVibratoDepth(channelNumber, vibratoDepth, scheduleTime) {
|
|
1922
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1986
1923
|
const channel = this.channels[channelNumber];
|
|
1987
1924
|
const prev = channel.state.vibratoDepth;
|
|
1988
1925
|
channel.state.vibratoDepth = vibratoDepth / 64;
|
|
1989
1926
|
if (0 < prev) {
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
const note = noteList[i];
|
|
1993
|
-
if (!note)
|
|
1994
|
-
continue;
|
|
1995
|
-
this.setFreqVibLFO(channel, note);
|
|
1996
|
-
}
|
|
1927
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1928
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1997
1929
|
});
|
|
1998
1930
|
}
|
|
1999
1931
|
else {
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
const note = noteList[i];
|
|
2003
|
-
if (!note)
|
|
2004
|
-
continue;
|
|
2005
|
-
this.startVibrato(channel, note, note.startTime);
|
|
2006
|
-
}
|
|
1932
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1933
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2007
1934
|
});
|
|
2008
1935
|
}
|
|
2009
1936
|
}
|
|
2010
1937
|
setVibratoDelay(channelNumber, vibratoDelay) {
|
|
1938
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2011
1939
|
const channel = this.channels[channelNumber];
|
|
2012
1940
|
channel.state.vibratoDelay = vibratoDelay / 64;
|
|
2013
1941
|
if (0 < channel.state.vibratoDepth) {
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
const note = noteList[i];
|
|
2017
|
-
if (!note)
|
|
2018
|
-
continue;
|
|
2019
|
-
this.startVibrato(channel, note, note.startTime);
|
|
2020
|
-
}
|
|
1942
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1943
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2021
1944
|
});
|
|
2022
1945
|
}
|
|
2023
1946
|
}
|
|
2024
|
-
setReverbSendLevel(channelNumber, reverbSendLevel) {
|
|
1947
|
+
setReverbSendLevel(channelNumber, reverbSendLevel, scheduleTime) {
|
|
1948
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2025
1949
|
const channel = this.channels[channelNumber];
|
|
2026
1950
|
const state = channel.state;
|
|
2027
1951
|
const reverbEffect = this.reverbEffect;
|
|
2028
1952
|
if (0 < state.reverbSendLevel) {
|
|
2029
1953
|
if (0 < reverbSendLevel) {
|
|
2030
|
-
const now = this.audioContext.currentTime;
|
|
2031
1954
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2032
|
-
reverbEffect.input.gain
|
|
2033
|
-
|
|
1955
|
+
reverbEffect.input.gain
|
|
1956
|
+
.cancelScheduledValues(scheduleTime)
|
|
1957
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2034
1958
|
}
|
|
2035
1959
|
else {
|
|
2036
|
-
|
|
2037
|
-
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
continue;
|
|
2041
|
-
if (note.voiceParams.reverbEffectsSend <= 0)
|
|
2042
|
-
continue;
|
|
2043
|
-
note.reverbEffectsSend.disconnect();
|
|
2044
|
-
}
|
|
1960
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1961
|
+
if (note.voiceParams.reverbEffectsSend <= 0)
|
|
1962
|
+
return false;
|
|
1963
|
+
note.reverbEffectsSend.disconnect();
|
|
2045
1964
|
});
|
|
2046
1965
|
}
|
|
2047
1966
|
}
|
|
2048
1967
|
else {
|
|
2049
1968
|
if (0 < reverbSendLevel) {
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
for (let i = 0; i < noteList.length; i++) {
|
|
2053
|
-
const note = noteList[i];
|
|
2054
|
-
if (!note)
|
|
2055
|
-
continue;
|
|
2056
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
2057
|
-
}
|
|
1969
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1970
|
+
this.setReverbEffectsSend(channel, note, 0, scheduleTime);
|
|
2058
1971
|
});
|
|
2059
1972
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2060
|
-
reverbEffect.input.gain
|
|
2061
|
-
|
|
1973
|
+
reverbEffect.input.gain
|
|
1974
|
+
.cancelScheduledValues(scheduleTime)
|
|
1975
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2062
1976
|
}
|
|
2063
1977
|
}
|
|
2064
1978
|
}
|
|
2065
|
-
setChorusSendLevel(channelNumber, chorusSendLevel) {
|
|
1979
|
+
setChorusSendLevel(channelNumber, chorusSendLevel, scheduleTime) {
|
|
1980
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2066
1981
|
const channel = this.channels[channelNumber];
|
|
2067
1982
|
const state = channel.state;
|
|
2068
1983
|
const chorusEffect = this.chorusEffect;
|
|
2069
1984
|
if (0 < state.chorusSendLevel) {
|
|
2070
1985
|
if (0 < chorusSendLevel) {
|
|
2071
|
-
const now = this.audioContext.currentTime;
|
|
2072
1986
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2073
|
-
chorusEffect.input.gain
|
|
2074
|
-
|
|
1987
|
+
chorusEffect.input.gain
|
|
1988
|
+
.cancelScheduledValues(scheduleTime)
|
|
1989
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2075
1990
|
}
|
|
2076
1991
|
else {
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
continue;
|
|
2082
|
-
if (note.voiceParams.chorusEffectsSend <= 0)
|
|
2083
|
-
continue;
|
|
2084
|
-
note.chorusEffectsSend.disconnect();
|
|
2085
|
-
}
|
|
1992
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1993
|
+
if (note.voiceParams.chorusEffectsSend <= 0)
|
|
1994
|
+
return false;
|
|
1995
|
+
note.chorusEffectsSend.disconnect();
|
|
2086
1996
|
});
|
|
2087
1997
|
}
|
|
2088
1998
|
}
|
|
2089
1999
|
else {
|
|
2090
2000
|
if (0 < chorusSendLevel) {
|
|
2091
|
-
|
|
2092
|
-
|
|
2093
|
-
for (let i = 0; i < noteList.length; i++) {
|
|
2094
|
-
const note = noteList[i];
|
|
2095
|
-
if (!note)
|
|
2096
|
-
continue;
|
|
2097
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
2098
|
-
}
|
|
2001
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2002
|
+
this.setChorusEffectsSend(channel, note, 0, scheduleTime);
|
|
2099
2003
|
});
|
|
2100
2004
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2101
|
-
chorusEffect.input.gain
|
|
2102
|
-
|
|
2005
|
+
chorusEffect.input.gain
|
|
2006
|
+
.cancelScheduledValues(scheduleTime)
|
|
2007
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2103
2008
|
}
|
|
2104
2009
|
}
|
|
2105
2010
|
}
|
|
@@ -2129,25 +2034,25 @@ class Midy {
|
|
|
2129
2034
|
channel.dataMSB = minMSB;
|
|
2130
2035
|
}
|
|
2131
2036
|
}
|
|
2132
|
-
handleRPN(channelNumber, value) {
|
|
2037
|
+
handleRPN(channelNumber, value, scheduleTime) {
|
|
2133
2038
|
const channel = this.channels[channelNumber];
|
|
2134
2039
|
const rpn = channel.rpnMSB * 128 + channel.rpnLSB;
|
|
2135
2040
|
switch (rpn) {
|
|
2136
2041
|
case 0:
|
|
2137
2042
|
channel.dataLSB += value;
|
|
2138
|
-
this.handlePitchBendRangeRPN(channelNumber);
|
|
2043
|
+
this.handlePitchBendRangeRPN(channelNumber, scheduleTime);
|
|
2139
2044
|
break;
|
|
2140
2045
|
case 1:
|
|
2141
2046
|
channel.dataLSB += value;
|
|
2142
|
-
this.handleFineTuningRPN(channelNumber);
|
|
2047
|
+
this.handleFineTuningRPN(channelNumber, scheduleTime);
|
|
2143
2048
|
break;
|
|
2144
2049
|
case 2:
|
|
2145
2050
|
channel.dataMSB += value;
|
|
2146
|
-
this.handleCoarseTuningRPN(channelNumber);
|
|
2051
|
+
this.handleCoarseTuningRPN(channelNumber, scheduleTime);
|
|
2147
2052
|
break;
|
|
2148
2053
|
case 5:
|
|
2149
2054
|
channel.dataLSB += value;
|
|
2150
|
-
this.handleModulationDepthRangeRPN(channelNumber);
|
|
2055
|
+
this.handleModulationDepthRangeRPN(channelNumber, scheduleTime);
|
|
2151
2056
|
break;
|
|
2152
2057
|
default:
|
|
2153
2058
|
console.warn(`Channel ${channelNumber}: Unsupported RPN MSB=${channel.rpnMSB} LSB=${channel.rpnLSB}`);
|
|
@@ -2167,67 +2072,72 @@ class Midy {
|
|
|
2167
2072
|
setRPNLSB(channelNumber, value) {
|
|
2168
2073
|
this.channels[channelNumber].rpnLSB = value;
|
|
2169
2074
|
}
|
|
2170
|
-
dataEntryMSB(channelNumber, value) {
|
|
2075
|
+
dataEntryMSB(channelNumber, value, scheduleTime) {
|
|
2171
2076
|
this.channels[channelNumber].dataMSB = value;
|
|
2172
|
-
this.handleRPN(channelNumber,
|
|
2077
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
2173
2078
|
}
|
|
2174
|
-
handlePitchBendRangeRPN(channelNumber) {
|
|
2079
|
+
handlePitchBendRangeRPN(channelNumber, scheduleTime) {
|
|
2175
2080
|
const channel = this.channels[channelNumber];
|
|
2176
2081
|
this.limitData(channel, 0, 127, 0, 99);
|
|
2177
2082
|
const pitchBendRange = channel.dataMSB + channel.dataLSB / 100;
|
|
2178
|
-
this.setPitchBendRange(channelNumber, pitchBendRange);
|
|
2083
|
+
this.setPitchBendRange(channelNumber, pitchBendRange, scheduleTime);
|
|
2179
2084
|
}
|
|
2180
|
-
setPitchBendRange(channelNumber, value) {
|
|
2085
|
+
setPitchBendRange(channelNumber, value, scheduleTime) {
|
|
2086
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2181
2087
|
const channel = this.channels[channelNumber];
|
|
2182
2088
|
const state = channel.state;
|
|
2183
2089
|
const prev = state.pitchWheelSensitivity;
|
|
2184
2090
|
const next = value / 128;
|
|
2185
2091
|
state.pitchWheelSensitivity = next;
|
|
2186
2092
|
channel.detune += (state.pitchWheel * 2 - 1) * (next - prev) * 12800;
|
|
2187
|
-
this.updateChannelDetune(channel);
|
|
2188
|
-
this.applyVoiceParams(channel, 16);
|
|
2093
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2094
|
+
this.applyVoiceParams(channel, 16, scheduleTime);
|
|
2189
2095
|
}
|
|
2190
|
-
handleFineTuningRPN(channelNumber) {
|
|
2096
|
+
handleFineTuningRPN(channelNumber, scheduleTime) {
|
|
2191
2097
|
const channel = this.channels[channelNumber];
|
|
2192
2098
|
this.limitData(channel, 0, 127, 0, 127);
|
|
2193
2099
|
const fineTuning = channel.dataMSB * 128 + channel.dataLSB;
|
|
2194
|
-
this.setFineTuning(channelNumber, fineTuning);
|
|
2100
|
+
this.setFineTuning(channelNumber, fineTuning, scheduleTime);
|
|
2195
2101
|
}
|
|
2196
|
-
setFineTuning(channelNumber, value) {
|
|
2102
|
+
setFineTuning(channelNumber, value, scheduleTime) {
|
|
2103
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2197
2104
|
const channel = this.channels[channelNumber];
|
|
2198
2105
|
const prev = channel.fineTuning;
|
|
2199
2106
|
const next = (value - 8192) / 8.192; // cent
|
|
2200
2107
|
channel.fineTuning = next;
|
|
2201
2108
|
channel.detune += next - prev;
|
|
2202
|
-
this.updateChannelDetune(channel);
|
|
2109
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2203
2110
|
}
|
|
2204
|
-
handleCoarseTuningRPN(channelNumber) {
|
|
2111
|
+
handleCoarseTuningRPN(channelNumber, scheduleTime) {
|
|
2205
2112
|
const channel = this.channels[channelNumber];
|
|
2206
2113
|
this.limitDataMSB(channel, 0, 127);
|
|
2207
2114
|
const coarseTuning = channel.dataMSB;
|
|
2208
|
-
this.setCoarseTuning(channelNumber, coarseTuning);
|
|
2115
|
+
this.setCoarseTuning(channelNumber, coarseTuning, scheduleTime);
|
|
2209
2116
|
}
|
|
2210
|
-
setCoarseTuning(channelNumber, value) {
|
|
2117
|
+
setCoarseTuning(channelNumber, value, scheduleTime) {
|
|
2118
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2211
2119
|
const channel = this.channels[channelNumber];
|
|
2212
2120
|
const prev = channel.coarseTuning;
|
|
2213
2121
|
const next = (value - 64) * 100; // cent
|
|
2214
2122
|
channel.coarseTuning = next;
|
|
2215
2123
|
channel.detune += next - prev;
|
|
2216
|
-
this.updateChannelDetune(channel);
|
|
2124
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2217
2125
|
}
|
|
2218
|
-
handleModulationDepthRangeRPN(channelNumber) {
|
|
2126
|
+
handleModulationDepthRangeRPN(channelNumber, scheduleTime) {
|
|
2219
2127
|
const channel = this.channels[channelNumber];
|
|
2220
2128
|
this.limitData(channel, 0, 127, 0, 127);
|
|
2221
2129
|
const modulationDepthRange = (dataMSB + dataLSB / 128) * 100;
|
|
2222
|
-
this.setModulationDepthRange(channelNumber, modulationDepthRange);
|
|
2130
|
+
this.setModulationDepthRange(channelNumber, modulationDepthRange, scheduleTime);
|
|
2223
2131
|
}
|
|
2224
|
-
setModulationDepthRange(channelNumber, modulationDepthRange) {
|
|
2132
|
+
setModulationDepthRange(channelNumber, modulationDepthRange, scheduleTime) {
|
|
2133
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2225
2134
|
const channel = this.channels[channelNumber];
|
|
2226
2135
|
channel.modulationDepthRange = modulationDepthRange;
|
|
2227
|
-
this.updateModulation(channel);
|
|
2136
|
+
this.updateModulation(channel, scheduleTime);
|
|
2228
2137
|
}
|
|
2229
|
-
allSoundOff(channelNumber) {
|
|
2230
|
-
|
|
2138
|
+
allSoundOff(channelNumber, _value, scheduleTime) {
|
|
2139
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2140
|
+
return this.stopChannelNotes(channelNumber, 0, true, scheduleTime);
|
|
2231
2141
|
}
|
|
2232
2142
|
resetAllControllers(channelNumber) {
|
|
2233
2143
|
const stateTypes = [
|
|
@@ -2255,31 +2165,36 @@ class Midy {
|
|
|
2255
2165
|
channel[type] = this.constructor.channelSettings[type];
|
|
2256
2166
|
}
|
|
2257
2167
|
}
|
|
2258
|
-
allNotesOff(channelNumber) {
|
|
2259
|
-
|
|
2168
|
+
allNotesOff(channelNumber, _value, scheduleTime) {
|
|
2169
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2170
|
+
return this.stopChannelNotes(channelNumber, 0, false, scheduleTime);
|
|
2260
2171
|
}
|
|
2261
|
-
omniOff() {
|
|
2262
|
-
this.
|
|
2172
|
+
omniOff(channelNumber, value, scheduleTime) {
|
|
2173
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2263
2174
|
}
|
|
2264
|
-
omniOn() {
|
|
2265
|
-
this.
|
|
2175
|
+
omniOn(channelNumber, value, scheduleTime) {
|
|
2176
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2266
2177
|
}
|
|
2267
|
-
monoOn() {
|
|
2268
|
-
|
|
2178
|
+
monoOn(channelNumber, value, scheduleTime) {
|
|
2179
|
+
const channel = this.channels[channelNumber];
|
|
2180
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2181
|
+
channel.mono = true;
|
|
2269
2182
|
}
|
|
2270
|
-
polyOn() {
|
|
2271
|
-
|
|
2183
|
+
polyOn(channelNumber, value, scheduleTime) {
|
|
2184
|
+
const channel = this.channels[channelNumber];
|
|
2185
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2186
|
+
channel.mono = false;
|
|
2272
2187
|
}
|
|
2273
|
-
handleUniversalNonRealTimeExclusiveMessage(data) {
|
|
2188
|
+
handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2274
2189
|
switch (data[2]) {
|
|
2275
2190
|
case 8:
|
|
2276
2191
|
switch (data[3]) {
|
|
2277
2192
|
case 8:
|
|
2278
2193
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2279
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false);
|
|
2194
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false, scheduleTime);
|
|
2280
2195
|
case 9:
|
|
2281
2196
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2282
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false);
|
|
2197
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false, scheduleTime);
|
|
2283
2198
|
default:
|
|
2284
2199
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2285
2200
|
}
|
|
@@ -2322,18 +2237,18 @@ class Midy {
|
|
|
2322
2237
|
this.channels[9].bankMSB = 120;
|
|
2323
2238
|
this.channels[9].bank = 120 * 128;
|
|
2324
2239
|
}
|
|
2325
|
-
handleUniversalRealTimeExclusiveMessage(data) {
|
|
2240
|
+
handleUniversalRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2326
2241
|
switch (data[2]) {
|
|
2327
2242
|
case 4:
|
|
2328
2243
|
switch (data[3]) {
|
|
2329
2244
|
case 1:
|
|
2330
|
-
return this.handleMasterVolumeSysEx(data);
|
|
2245
|
+
return this.handleMasterVolumeSysEx(data, scheduleTime);
|
|
2331
2246
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2332
|
-
return this.handleMasterFineTuningSysEx(data);
|
|
2247
|
+
return this.handleMasterFineTuningSysEx(data, scheduleTime);
|
|
2333
2248
|
case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2334
|
-
return this.handleMasterCoarseTuningSysEx(data);
|
|
2249
|
+
return this.handleMasterCoarseTuningSysEx(data, scheduleTime);
|
|
2335
2250
|
case 5: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca24.pdf
|
|
2336
|
-
return this.handleGlobalParameterControlSysEx(data);
|
|
2251
|
+
return this.handleGlobalParameterControlSysEx(data, scheduleTime);
|
|
2337
2252
|
default:
|
|
2338
2253
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2339
2254
|
}
|
|
@@ -2341,10 +2256,10 @@ class Midy {
|
|
|
2341
2256
|
case 8:
|
|
2342
2257
|
switch (data[3]) {
|
|
2343
2258
|
case 8: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2344
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true);
|
|
2259
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true, scheduleTime);
|
|
2345
2260
|
case 9:
|
|
2346
2261
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2347
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true);
|
|
2262
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true, scheduleTime);
|
|
2348
2263
|
default:
|
|
2349
2264
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2350
2265
|
}
|
|
@@ -2364,7 +2279,7 @@ class Midy {
|
|
|
2364
2279
|
case 10:
|
|
2365
2280
|
switch (data[3]) {
|
|
2366
2281
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca23.pdf
|
|
2367
|
-
return this.handleKeyBasedInstrumentControlSysEx(data);
|
|
2282
|
+
return this.handleKeyBasedInstrumentControlSysEx(data, scheduleTime);
|
|
2368
2283
|
default:
|
|
2369
2284
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2370
2285
|
}
|
|
@@ -2373,49 +2288,50 @@ class Midy {
|
|
|
2373
2288
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2374
2289
|
}
|
|
2375
2290
|
}
|
|
2376
|
-
handleMasterVolumeSysEx(data) {
|
|
2291
|
+
handleMasterVolumeSysEx(data, scheduleTime) {
|
|
2377
2292
|
const volume = (data[5] * 128 + data[4]) / 16383;
|
|
2378
|
-
this.setMasterVolume(volume);
|
|
2293
|
+
this.setMasterVolume(volume, scheduleTime);
|
|
2379
2294
|
}
|
|
2380
|
-
setMasterVolume(volume) {
|
|
2295
|
+
setMasterVolume(volume, scheduleTime) {
|
|
2296
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2381
2297
|
if (volume < 0 && 1 < volume) {
|
|
2382
2298
|
console.error("Master Volume is out of range");
|
|
2383
2299
|
}
|
|
2384
2300
|
else {
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2301
|
+
this.masterVolume.gain
|
|
2302
|
+
.cancelScheduledValues(scheduleTime)
|
|
2303
|
+
.setValueAtTime(volume * volume, scheduleTime);
|
|
2388
2304
|
}
|
|
2389
2305
|
}
|
|
2390
|
-
handleMasterFineTuningSysEx(data) {
|
|
2306
|
+
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2391
2307
|
const fineTuning = data[5] * 128 + data[4];
|
|
2392
|
-
this.setMasterFineTuning(fineTuning);
|
|
2308
|
+
this.setMasterFineTuning(fineTuning, scheduleTime);
|
|
2393
2309
|
}
|
|
2394
|
-
setMasterFineTuning(value) {
|
|
2310
|
+
setMasterFineTuning(value, scheduleTime) {
|
|
2395
2311
|
const prev = this.masterFineTuning;
|
|
2396
2312
|
const next = (value - 8192) / 8.192; // cent
|
|
2397
2313
|
this.masterFineTuning = next;
|
|
2398
2314
|
channel.detune += next - prev;
|
|
2399
|
-
this.updateChannelDetune(channel);
|
|
2315
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2400
2316
|
}
|
|
2401
|
-
handleMasterCoarseTuningSysEx(data) {
|
|
2317
|
+
handleMasterCoarseTuningSysEx(data, scheduleTime) {
|
|
2402
2318
|
const coarseTuning = data[4];
|
|
2403
|
-
this.setMasterCoarseTuning(coarseTuning);
|
|
2319
|
+
this.setMasterCoarseTuning(coarseTuning, scheduleTime);
|
|
2404
2320
|
}
|
|
2405
|
-
setMasterCoarseTuning(value) {
|
|
2321
|
+
setMasterCoarseTuning(value, scheduleTime) {
|
|
2406
2322
|
const prev = this.masterCoarseTuning;
|
|
2407
2323
|
const next = (value - 64) * 100; // cent
|
|
2408
2324
|
this.masterCoarseTuning = next;
|
|
2409
2325
|
channel.detune += next - prev;
|
|
2410
|
-
this.updateChannelDetune(channel);
|
|
2326
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2411
2327
|
}
|
|
2412
|
-
handleGlobalParameterControlSysEx(data) {
|
|
2328
|
+
handleGlobalParameterControlSysEx(data, scheduleTime) {
|
|
2413
2329
|
if (data[7] === 1) {
|
|
2414
2330
|
switch (data[8]) {
|
|
2415
2331
|
case 1:
|
|
2416
2332
|
return this.handleReverbParameterSysEx(data);
|
|
2417
2333
|
case 2:
|
|
2418
|
-
return this.handleChorusParameterSysEx(data);
|
|
2334
|
+
return this.handleChorusParameterSysEx(data, scheduleTime);
|
|
2419
2335
|
default:
|
|
2420
2336
|
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
2421
2337
|
}
|
|
@@ -2494,88 +2410,84 @@ class Midy {
|
|
|
2494
2410
|
calcDelay(rt60, feedback) {
|
|
2495
2411
|
return -rt60 * Math.log10(feedback) / 3;
|
|
2496
2412
|
}
|
|
2497
|
-
handleChorusParameterSysEx(data) {
|
|
2413
|
+
handleChorusParameterSysEx(data, scheduleTime) {
|
|
2498
2414
|
switch (data[9]) {
|
|
2499
2415
|
case 0:
|
|
2500
|
-
return this.setChorusType(data[10]);
|
|
2416
|
+
return this.setChorusType(data[10], scheduleTime);
|
|
2501
2417
|
case 1:
|
|
2502
|
-
return this.setChorusModRate(data[10]);
|
|
2418
|
+
return this.setChorusModRate(data[10], scheduleTime);
|
|
2503
2419
|
case 2:
|
|
2504
|
-
return this.setChorusModDepth(data[10]);
|
|
2420
|
+
return this.setChorusModDepth(data[10], scheduleTime);
|
|
2505
2421
|
case 3:
|
|
2506
|
-
return this.setChorusFeedback(data[10]);
|
|
2422
|
+
return this.setChorusFeedback(data[10], scheduleTime);
|
|
2507
2423
|
case 4:
|
|
2508
|
-
return this.setChorusSendToReverb(data[10]);
|
|
2424
|
+
return this.setChorusSendToReverb(data[10], scheduleTime);
|
|
2509
2425
|
}
|
|
2510
2426
|
}
|
|
2511
|
-
setChorusType(type) {
|
|
2427
|
+
setChorusType(type, scheduleTime) {
|
|
2512
2428
|
switch (type) {
|
|
2513
2429
|
case 0:
|
|
2514
|
-
return this.setChorusParameter(3, 5, 0, 0);
|
|
2430
|
+
return this.setChorusParameter(3, 5, 0, 0, scheduleTime);
|
|
2515
2431
|
case 1:
|
|
2516
|
-
return this.setChorusParameter(9, 19, 5, 0);
|
|
2432
|
+
return this.setChorusParameter(9, 19, 5, 0, scheduleTime);
|
|
2517
2433
|
case 2:
|
|
2518
|
-
return this.setChorusParameter(3, 19, 8, 0);
|
|
2434
|
+
return this.setChorusParameter(3, 19, 8, 0, scheduleTime);
|
|
2519
2435
|
case 3:
|
|
2520
|
-
return this.setChorusParameter(9, 16, 16, 0);
|
|
2436
|
+
return this.setChorusParameter(9, 16, 16, 0, scheduleTime);
|
|
2521
2437
|
case 4:
|
|
2522
|
-
return this.setChorusParameter(2, 24, 64, 0);
|
|
2438
|
+
return this.setChorusParameter(2, 24, 64, 0, scheduleTime);
|
|
2523
2439
|
case 5:
|
|
2524
|
-
return this.setChorusParameter(1, 5, 112, 0);
|
|
2440
|
+
return this.setChorusParameter(1, 5, 112, 0, scheduleTime);
|
|
2525
2441
|
default:
|
|
2526
2442
|
console.warn(`Unsupported Chorus Type: ${type}`);
|
|
2527
2443
|
}
|
|
2528
2444
|
}
|
|
2529
|
-
setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
|
|
2530
|
-
this.setChorusModRate(modRate);
|
|
2531
|
-
this.setChorusModDepth(modDepth);
|
|
2532
|
-
this.setChorusFeedback(feedback);
|
|
2533
|
-
this.setChorusSendToReverb(sendToReverb);
|
|
2445
|
+
setChorusParameter(modRate, modDepth, feedback, sendToReverb, scheduleTime) {
|
|
2446
|
+
this.setChorusModRate(modRate, scheduleTime);
|
|
2447
|
+
this.setChorusModDepth(modDepth, scheduleTime);
|
|
2448
|
+
this.setChorusFeedback(feedback, scheduleTime);
|
|
2449
|
+
this.setChorusSendToReverb(sendToReverb, scheduleTime);
|
|
2534
2450
|
}
|
|
2535
|
-
setChorusModRate(value) {
|
|
2536
|
-
const now = this.audioContext.currentTime;
|
|
2451
|
+
setChorusModRate(value, scheduleTime) {
|
|
2537
2452
|
const modRate = this.getChorusModRate(value);
|
|
2538
2453
|
this.chorus.modRate = modRate;
|
|
2539
|
-
this.chorusEffect.lfo.frequency.setValueAtTime(modRate,
|
|
2454
|
+
this.chorusEffect.lfo.frequency.setValueAtTime(modRate, scheduleTime);
|
|
2540
2455
|
}
|
|
2541
2456
|
getChorusModRate(value) {
|
|
2542
2457
|
return value * 0.122; // Hz
|
|
2543
2458
|
}
|
|
2544
|
-
setChorusModDepth(value) {
|
|
2545
|
-
const now = this.audioContext.currentTime;
|
|
2459
|
+
setChorusModDepth(value, scheduleTime) {
|
|
2546
2460
|
const modDepth = this.getChorusModDepth(value);
|
|
2547
2461
|
this.chorus.modDepth = modDepth;
|
|
2548
2462
|
this.chorusEffect.lfoGain.gain
|
|
2549
|
-
.cancelScheduledValues(
|
|
2550
|
-
.setValueAtTime(modDepth / 2,
|
|
2463
|
+
.cancelScheduledValues(scheduleTime)
|
|
2464
|
+
.setValueAtTime(modDepth / 2, scheduleTime);
|
|
2551
2465
|
}
|
|
2552
2466
|
getChorusModDepth(value) {
|
|
2553
2467
|
return (value + 1) / 3200; // second
|
|
2554
2468
|
}
|
|
2555
|
-
setChorusFeedback(value) {
|
|
2556
|
-
const now = this.audioContext.currentTime;
|
|
2469
|
+
setChorusFeedback(value, scheduleTime) {
|
|
2557
2470
|
const feedback = this.getChorusFeedback(value);
|
|
2558
2471
|
this.chorus.feedback = feedback;
|
|
2559
2472
|
const chorusEffect = this.chorusEffect;
|
|
2560
2473
|
for (let i = 0; i < chorusEffect.feedbackGains.length; i++) {
|
|
2561
2474
|
chorusEffect.feedbackGains[i].gain
|
|
2562
|
-
.cancelScheduledValues(
|
|
2563
|
-
.setValueAtTime(feedback,
|
|
2475
|
+
.cancelScheduledValues(scheduleTime)
|
|
2476
|
+
.setValueAtTime(feedback, scheduleTime);
|
|
2564
2477
|
}
|
|
2565
2478
|
}
|
|
2566
2479
|
getChorusFeedback(value) {
|
|
2567
2480
|
return value * 0.00763;
|
|
2568
2481
|
}
|
|
2569
|
-
setChorusSendToReverb(value) {
|
|
2482
|
+
setChorusSendToReverb(value, scheduleTime) {
|
|
2570
2483
|
const sendToReverb = this.getChorusSendToReverb(value);
|
|
2571
2484
|
const sendGain = this.chorusEffect.sendGain;
|
|
2572
2485
|
if (0 < this.chorus.sendToReverb) {
|
|
2573
2486
|
this.chorus.sendToReverb = sendToReverb;
|
|
2574
2487
|
if (0 < sendToReverb) {
|
|
2575
|
-
const now = this.audioContext.currentTime;
|
|
2576
2488
|
sendGain.gain
|
|
2577
|
-
.cancelScheduledValues(
|
|
2578
|
-
.setValueAtTime(sendToReverb,
|
|
2489
|
+
.cancelScheduledValues(scheduleTime)
|
|
2490
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2579
2491
|
}
|
|
2580
2492
|
else {
|
|
2581
2493
|
sendGain.disconnect();
|
|
@@ -2584,11 +2496,10 @@ class Midy {
|
|
|
2584
2496
|
else {
|
|
2585
2497
|
this.chorus.sendToReverb = sendToReverb;
|
|
2586
2498
|
if (0 < sendToReverb) {
|
|
2587
|
-
const now = this.audioContext.currentTime;
|
|
2588
2499
|
sendGain.connect(this.reverbEffect.input);
|
|
2589
2500
|
sendGain.gain
|
|
2590
|
-
.cancelScheduledValues(
|
|
2591
|
-
.setValueAtTime(sendToReverb,
|
|
2501
|
+
.cancelScheduledValues(scheduleTime)
|
|
2502
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2592
2503
|
}
|
|
2593
2504
|
}
|
|
2594
2505
|
}
|
|
@@ -2614,7 +2525,7 @@ class Midy {
|
|
|
2614
2525
|
}
|
|
2615
2526
|
return bitmap;
|
|
2616
2527
|
}
|
|
2617
|
-
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime) {
|
|
2528
|
+
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2618
2529
|
if (data.length < 19) {
|
|
2619
2530
|
console.error("Data length is too short");
|
|
2620
2531
|
return;
|
|
@@ -2629,10 +2540,10 @@ class Midy {
|
|
|
2629
2540
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2630
2541
|
}
|
|
2631
2542
|
if (realtime)
|
|
2632
|
-
this.updateChannelDetune(channel);
|
|
2543
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2633
2544
|
}
|
|
2634
2545
|
}
|
|
2635
|
-
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime) {
|
|
2546
|
+
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2636
2547
|
if (data.length < 31) {
|
|
2637
2548
|
console.error("Data length is too short");
|
|
2638
2549
|
return;
|
|
@@ -2651,7 +2562,7 @@ class Midy {
|
|
|
2651
2562
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2652
2563
|
}
|
|
2653
2564
|
if (realtime)
|
|
2654
|
-
this.updateChannelDetune(channel);
|
|
2565
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2655
2566
|
}
|
|
2656
2567
|
}
|
|
2657
2568
|
getPitchControl(channel, note) {
|
|
@@ -2710,7 +2621,7 @@ class Midy {
|
|
|
2710
2621
|
if (table[5] !== 0)
|
|
2711
2622
|
this.setModLfoToVolume(channel, note);
|
|
2712
2623
|
}
|
|
2713
|
-
|
|
2624
|
+
handlePressureSysEx(data, tableName) {
|
|
2714
2625
|
const channelNumber = data[4];
|
|
2715
2626
|
const table = this.channels[channelNumber][tableName];
|
|
2716
2627
|
for (let i = 5; i < data.length - 1; i += 2) {
|
|
@@ -2734,13 +2645,8 @@ class Midy {
|
|
|
2734
2645
|
const slotSize = 6;
|
|
2735
2646
|
const offset = controllerType * slotSize;
|
|
2736
2647
|
const table = channel.controlTable.subarray(offset, offset + slotSize);
|
|
2737
|
-
|
|
2738
|
-
|
|
2739
|
-
const note = noteList[i];
|
|
2740
|
-
if (!note)
|
|
2741
|
-
continue;
|
|
2742
|
-
this.setControllerParameters(channel, note, table);
|
|
2743
|
-
}
|
|
2648
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2649
|
+
this.setControllerParameters(channel, note, table);
|
|
2744
2650
|
});
|
|
2745
2651
|
}
|
|
2746
2652
|
handleControlChangeSysEx(data) {
|
|
@@ -2758,7 +2664,7 @@ class Midy {
|
|
|
2758
2664
|
const controlValue = channel.keyBasedInstrumentControlTable[index];
|
|
2759
2665
|
return (controlValue + 64) / 64;
|
|
2760
2666
|
}
|
|
2761
|
-
handleKeyBasedInstrumentControlSysEx(data) {
|
|
2667
|
+
handleKeyBasedInstrumentControlSysEx(data, scheduleTime) {
|
|
2762
2668
|
const channelNumber = data[4];
|
|
2763
2669
|
const keyNumber = data[5];
|
|
2764
2670
|
const table = this.channels[channelNumber].keyBasedInstrumentControlTable;
|
|
@@ -2768,30 +2674,27 @@ class Midy {
|
|
|
2768
2674
|
const index = keyNumber * 128 + controllerType;
|
|
2769
2675
|
table[index] = value - 64;
|
|
2770
2676
|
}
|
|
2771
|
-
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127);
|
|
2772
|
-
}
|
|
2773
|
-
handleExclusiveMessage(data) {
|
|
2774
|
-
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2677
|
+
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127, scheduleTime);
|
|
2775
2678
|
}
|
|
2776
|
-
handleSysEx(data) {
|
|
2679
|
+
handleSysEx(data, scheduleTime) {
|
|
2777
2680
|
switch (data[0]) {
|
|
2778
2681
|
case 126:
|
|
2779
|
-
return this.handleUniversalNonRealTimeExclusiveMessage(data);
|
|
2682
|
+
return this.handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime);
|
|
2780
2683
|
case 127:
|
|
2781
|
-
return this.handleUniversalRealTimeExclusiveMessage(data);
|
|
2684
|
+
return this.handleUniversalRealTimeExclusiveMessage(data, scheduleTime);
|
|
2782
2685
|
default:
|
|
2783
|
-
|
|
2686
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2784
2687
|
}
|
|
2785
2688
|
}
|
|
2786
|
-
scheduleTask(callback,
|
|
2689
|
+
scheduleTask(callback, scheduleTime) {
|
|
2787
2690
|
return new Promise((resolve) => {
|
|
2788
2691
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
2789
2692
|
bufferSource.onended = () => {
|
|
2790
2693
|
callback();
|
|
2791
2694
|
resolve();
|
|
2792
2695
|
};
|
|
2793
|
-
bufferSource.start(
|
|
2794
|
-
bufferSource.stop(
|
|
2696
|
+
bufferSource.start(scheduleTime);
|
|
2697
|
+
bufferSource.stop(scheduleTime);
|
|
2795
2698
|
});
|
|
2796
2699
|
}
|
|
2797
2700
|
}
|
|
@@ -2811,6 +2714,7 @@ Object.defineProperty(Midy, "channelSettings", {
|
|
|
2811
2714
|
dataLSB: 0,
|
|
2812
2715
|
rpnMSB: 127,
|
|
2813
2716
|
rpnLSB: 127,
|
|
2717
|
+
mono: false, // CC#124, CC#125
|
|
2814
2718
|
fineTuning: 0, // cb
|
|
2815
2719
|
coarseTuning: 0, // cb
|
|
2816
2720
|
modulationDepthRange: 50, // cent
|