@marmooo/midy 0.2.6 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +17 -12
- package/esm/midy-GM1.d.ts +68 -69
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +143 -156
- package/esm/midy-GM2.d.ts +104 -105
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +251 -280
- package/esm/midy-GMLite.d.ts +68 -69
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +143 -156
- package/esm/midy.d.ts +127 -128
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +274 -307
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +68 -69
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +143 -156
- package/script/midy-GM2.d.ts +104 -105
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +251 -280
- package/script/midy-GMLite.d.ts +68 -69
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +143 -156
- package/script/midy.d.ts +127 -128
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +274 -307
package/script/midy.js
CHANGED
|
@@ -590,7 +590,8 @@ class Midy {
|
|
|
590
590
|
const portamentoTarget = this.findPortamentoTarget(queueIndex);
|
|
591
591
|
if (portamentoTarget)
|
|
592
592
|
portamentoTarget.portamento = true;
|
|
593
|
-
const notePromise = this.
|
|
593
|
+
const notePromise = this.scheduleNoteOff(this.omni ? 0 : event.channel, event.noteNumber, event.velocity, startTime, false, // force
|
|
594
|
+
portamentoTarget?.noteNumber);
|
|
594
595
|
if (notePromise) {
|
|
595
596
|
this.notePromises.push(notePromise);
|
|
596
597
|
}
|
|
@@ -643,10 +644,11 @@ class Midy {
|
|
|
643
644
|
resolve();
|
|
644
645
|
return;
|
|
645
646
|
}
|
|
646
|
-
const
|
|
647
|
+
const now = this.audioContext.currentTime;
|
|
648
|
+
const t = now + offset;
|
|
647
649
|
queueIndex = await this.scheduleTimelineEvents(t, offset, queueIndex);
|
|
648
650
|
if (this.isPausing) {
|
|
649
|
-
await this.stopNotes(0, true);
|
|
651
|
+
await this.stopNotes(0, true, now);
|
|
650
652
|
this.notePromises = [];
|
|
651
653
|
resolve();
|
|
652
654
|
this.isPausing = false;
|
|
@@ -654,7 +656,7 @@ class Midy {
|
|
|
654
656
|
return;
|
|
655
657
|
}
|
|
656
658
|
else if (this.isStopping) {
|
|
657
|
-
await this.stopNotes(0, true);
|
|
659
|
+
await this.stopNotes(0, true, now);
|
|
658
660
|
this.notePromises = [];
|
|
659
661
|
this.exclusiveClassMap.clear();
|
|
660
662
|
this.audioBufferCache.clear();
|
|
@@ -664,7 +666,7 @@ class Midy {
|
|
|
664
666
|
return;
|
|
665
667
|
}
|
|
666
668
|
else if (this.isSeeking) {
|
|
667
|
-
this.stopNotes(0, true);
|
|
669
|
+
this.stopNotes(0, true, now);
|
|
668
670
|
this.exclusiveClassMap.clear();
|
|
669
671
|
this.startTime = this.audioContext.currentTime;
|
|
670
672
|
queueIndex = this.getQueueIndex(this.resumeTime);
|
|
@@ -673,7 +675,6 @@ class Midy {
|
|
|
673
675
|
await schedulePlayback();
|
|
674
676
|
}
|
|
675
677
|
else {
|
|
676
|
-
const now = this.audioContext.currentTime;
|
|
677
678
|
const waitTime = now + this.noteCheckInterval;
|
|
678
679
|
await this.scheduleTask(() => { }, waitTime);
|
|
679
680
|
await schedulePlayback();
|
|
@@ -793,25 +794,26 @@ class Midy {
|
|
|
793
794
|
}
|
|
794
795
|
return { instruments, timeline };
|
|
795
796
|
}
|
|
796
|
-
|
|
797
|
-
const now = this.audioContext.currentTime;
|
|
797
|
+
stopChannelNotes(channelNumber, velocity, force, scheduleTime) {
|
|
798
798
|
const channel = this.channels[channelNumber];
|
|
799
|
+
const promises = [];
|
|
799
800
|
channel.scheduledNotes.forEach((noteList) => {
|
|
800
801
|
for (let i = 0; i < noteList.length; i++) {
|
|
801
802
|
const note = noteList[i];
|
|
802
803
|
if (!note)
|
|
803
804
|
continue;
|
|
804
|
-
const promise = this.
|
|
805
|
-
force);
|
|
805
|
+
const promise = this.scheduleNoteOff(channelNumber, note.noteNumber, velocity, scheduleTime, force, undefined);
|
|
806
806
|
this.notePromises.push(promise);
|
|
807
|
+
promises.push(promise);
|
|
807
808
|
}
|
|
808
809
|
});
|
|
809
810
|
channel.scheduledNotes.clear();
|
|
810
|
-
|
|
811
|
+
return Promise.all(promises);
|
|
811
812
|
}
|
|
812
|
-
stopNotes(velocity, force) {
|
|
813
|
+
stopNotes(velocity, force, scheduleTime) {
|
|
814
|
+
const promises = [];
|
|
813
815
|
for (let i = 0; i < this.channels.length; i++) {
|
|
814
|
-
this.stopChannelNotes(i, velocity, force);
|
|
816
|
+
promises.push(this.stopChannelNotes(i, velocity, force, scheduleTime));
|
|
815
817
|
}
|
|
816
818
|
return Promise.all(this.notePromises);
|
|
817
819
|
}
|
|
@@ -871,22 +873,22 @@ class Midy {
|
|
|
871
873
|
}
|
|
872
874
|
});
|
|
873
875
|
}
|
|
874
|
-
getActiveNotes(channel,
|
|
876
|
+
getActiveNotes(channel, scheduleTime) {
|
|
875
877
|
const activeNotes = new SparseMap(128);
|
|
876
878
|
channel.scheduledNotes.forEach((noteList) => {
|
|
877
|
-
const activeNote = this.getActiveNote(noteList,
|
|
879
|
+
const activeNote = this.getActiveNote(noteList, scheduleTime);
|
|
878
880
|
if (activeNote) {
|
|
879
881
|
activeNotes.set(activeNote.noteNumber, activeNote);
|
|
880
882
|
}
|
|
881
883
|
});
|
|
882
884
|
return activeNotes;
|
|
883
885
|
}
|
|
884
|
-
getActiveNote(noteList,
|
|
886
|
+
getActiveNote(noteList, scheduleTime) {
|
|
885
887
|
for (let i = noteList.length - 1; i >= 0; i--) {
|
|
886
888
|
const note = noteList[i];
|
|
887
889
|
if (!note)
|
|
888
890
|
return;
|
|
889
|
-
if (
|
|
891
|
+
if (scheduleTime < note.startTime)
|
|
890
892
|
continue;
|
|
891
893
|
return (note.ending) ? null : note;
|
|
892
894
|
}
|
|
@@ -1046,44 +1048,36 @@ class Midy {
|
|
|
1046
1048
|
calcNoteDetune(channel, note) {
|
|
1047
1049
|
return channel.scaleOctaveTuningTable[note.noteNumber % 12];
|
|
1048
1050
|
}
|
|
1049
|
-
updateChannelDetune(channel) {
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
const note = noteList[i];
|
|
1053
|
-
if (!note)
|
|
1054
|
-
continue;
|
|
1055
|
-
this.updateDetune(channel, note);
|
|
1056
|
-
}
|
|
1051
|
+
updateChannelDetune(channel, scheduleTime) {
|
|
1052
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1053
|
+
this.updateDetune(channel, note, scheduleTime);
|
|
1057
1054
|
});
|
|
1058
1055
|
}
|
|
1059
|
-
updateDetune(channel, note) {
|
|
1060
|
-
const now = this.audioContext.currentTime;
|
|
1056
|
+
updateDetune(channel, note, scheduleTime) {
|
|
1061
1057
|
const noteDetune = this.calcNoteDetune(channel, note);
|
|
1062
1058
|
const pitchControl = this.getPitchControl(channel, note);
|
|
1063
1059
|
const detune = channel.detune + noteDetune + pitchControl;
|
|
1064
1060
|
note.bufferSource.detune
|
|
1065
|
-
.cancelScheduledValues(
|
|
1066
|
-
.setValueAtTime(detune,
|
|
1061
|
+
.cancelScheduledValues(scheduleTime)
|
|
1062
|
+
.setValueAtTime(detune, scheduleTime);
|
|
1067
1063
|
}
|
|
1068
1064
|
getPortamentoTime(channel) {
|
|
1069
1065
|
const factor = 5 * Math.log(10) / 127;
|
|
1070
1066
|
const time = channel.state.portamentoTime;
|
|
1071
1067
|
return Math.log(time) / factor;
|
|
1072
1068
|
}
|
|
1073
|
-
setPortamentoStartVolumeEnvelope(channel, note) {
|
|
1074
|
-
const now = this.audioContext.currentTime;
|
|
1069
|
+
setPortamentoStartVolumeEnvelope(channel, note, scheduleTime) {
|
|
1075
1070
|
const { voiceParams, startTime } = note;
|
|
1076
1071
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation);
|
|
1077
1072
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1078
1073
|
const volDelay = startTime + voiceParams.volDelay;
|
|
1079
1074
|
const portamentoTime = volDelay + this.getPortamentoTime(channel);
|
|
1080
1075
|
note.volumeEnvelopeNode.gain
|
|
1081
|
-
.cancelScheduledValues(
|
|
1076
|
+
.cancelScheduledValues(scheduleTime)
|
|
1082
1077
|
.setValueAtTime(0, volDelay)
|
|
1083
1078
|
.linearRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1084
1079
|
}
|
|
1085
|
-
setVolumeEnvelope(channel, note) {
|
|
1086
|
-
const now = this.audioContext.currentTime;
|
|
1080
|
+
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
1087
1081
|
const state = channel.state;
|
|
1088
1082
|
const { voiceParams, startTime } = note;
|
|
1089
1083
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation) *
|
|
@@ -1094,7 +1088,7 @@ class Midy {
|
|
|
1094
1088
|
const volHold = volAttack + voiceParams.volHold;
|
|
1095
1089
|
const volDecay = volHold + voiceParams.volDecay * state.decayTime * 2;
|
|
1096
1090
|
note.volumeEnvelopeNode.gain
|
|
1097
|
-
.cancelScheduledValues(
|
|
1091
|
+
.cancelScheduledValues(scheduleTime)
|
|
1098
1092
|
.setValueAtTime(0, startTime)
|
|
1099
1093
|
.setValueAtTime(1e-6, volDelay) // exponentialRampToValueAtTime() requires a non-zero value
|
|
1100
1094
|
.exponentialRampToValueAtTime(attackVolume, volAttack)
|
|
@@ -1102,7 +1096,6 @@ class Midy {
|
|
|
1102
1096
|
.linearRampToValueAtTime(sustainVolume, volDecay);
|
|
1103
1097
|
}
|
|
1104
1098
|
setPitchEnvelope(note, scheduleTime) {
|
|
1105
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1106
1099
|
const { voiceParams } = note;
|
|
1107
1100
|
const baseRate = voiceParams.playbackRate;
|
|
1108
1101
|
note.bufferSource.playbackRate
|
|
@@ -1129,8 +1122,7 @@ class Midy {
|
|
|
1129
1122
|
const maxFrequency = 20000; // max Hz of initialFilterFc
|
|
1130
1123
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
1131
1124
|
}
|
|
1132
|
-
setPortamentoStartFilterEnvelope(channel, note) {
|
|
1133
|
-
const now = this.audioContext.currentTime;
|
|
1125
|
+
setPortamentoStartFilterEnvelope(channel, note, scheduleTime) {
|
|
1134
1126
|
const state = channel.state;
|
|
1135
1127
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1136
1128
|
const softPedalFactor = 1 -
|
|
@@ -1146,13 +1138,12 @@ class Midy {
|
|
|
1146
1138
|
const portamentoTime = startTime + this.getPortamentoTime(channel);
|
|
1147
1139
|
const modDelay = startTime + voiceParams.modDelay;
|
|
1148
1140
|
note.filterNode.frequency
|
|
1149
|
-
.cancelScheduledValues(
|
|
1141
|
+
.cancelScheduledValues(scheduleTime)
|
|
1150
1142
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1151
1143
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1152
1144
|
.linearRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1153
1145
|
}
|
|
1154
|
-
setFilterEnvelope(channel, note) {
|
|
1155
|
-
const now = this.audioContext.currentTime;
|
|
1146
|
+
setFilterEnvelope(channel, note, scheduleTime) {
|
|
1156
1147
|
const state = channel.state;
|
|
1157
1148
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1158
1149
|
const softPedalFactor = 1 -
|
|
@@ -1173,14 +1164,14 @@ class Midy {
|
|
|
1173
1164
|
const modHold = modAttack + voiceParams.modHold;
|
|
1174
1165
|
const modDecay = modHold + voiceParams.modDecay;
|
|
1175
1166
|
note.filterNode.frequency
|
|
1176
|
-
.cancelScheduledValues(
|
|
1167
|
+
.cancelScheduledValues(scheduleTime)
|
|
1177
1168
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1178
1169
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1179
1170
|
.exponentialRampToValueAtTime(adjustedPeekFreq, modAttack)
|
|
1180
1171
|
.setValueAtTime(adjustedPeekFreq, modHold)
|
|
1181
1172
|
.linearRampToValueAtTime(adjustedSustainFreq, modDecay);
|
|
1182
1173
|
}
|
|
1183
|
-
startModulation(channel, note,
|
|
1174
|
+
startModulation(channel, note, scheduleTime) {
|
|
1184
1175
|
const { voiceParams } = note;
|
|
1185
1176
|
note.modulationLFO = new OscillatorNode(this.audioContext, {
|
|
1186
1177
|
frequency: this.centToHz(voiceParams.freqModLFO),
|
|
@@ -1189,10 +1180,10 @@ class Midy {
|
|
|
1189
1180
|
gain: voiceParams.modLfoToFilterFc,
|
|
1190
1181
|
});
|
|
1191
1182
|
note.modulationDepth = new GainNode(this.audioContext);
|
|
1192
|
-
this.setModLfoToPitch(channel, note);
|
|
1183
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1193
1184
|
note.volumeDepth = new GainNode(this.audioContext);
|
|
1194
|
-
this.setModLfoToVolume(channel, note);
|
|
1195
|
-
note.modulationLFO.start(startTime + voiceParams.delayModLFO);
|
|
1185
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1186
|
+
note.modulationLFO.start(note.startTime + voiceParams.delayModLFO);
|
|
1196
1187
|
note.modulationLFO.connect(note.filterDepth);
|
|
1197
1188
|
note.filterDepth.connect(note.filterNode.frequency);
|
|
1198
1189
|
note.modulationLFO.connect(note.modulationDepth);
|
|
@@ -1200,15 +1191,15 @@ class Midy {
|
|
|
1200
1191
|
note.modulationLFO.connect(note.volumeDepth);
|
|
1201
1192
|
note.volumeDepth.connect(note.volumeEnvelopeNode.gain);
|
|
1202
1193
|
}
|
|
1203
|
-
startVibrato(channel, note,
|
|
1194
|
+
startVibrato(channel, note, scheduleTime) {
|
|
1204
1195
|
const { voiceParams } = note;
|
|
1205
1196
|
const state = channel.state;
|
|
1206
1197
|
note.vibratoLFO = new OscillatorNode(this.audioContext, {
|
|
1207
1198
|
frequency: this.centToHz(voiceParams.freqVibLFO) * state.vibratoRate * 2,
|
|
1208
1199
|
});
|
|
1209
|
-
note.vibratoLFO.start(startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1200
|
+
note.vibratoLFO.start(note.startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1210
1201
|
note.vibratoDepth = new GainNode(this.audioContext);
|
|
1211
|
-
this.setVibLfoToPitch(channel, note);
|
|
1202
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1212
1203
|
note.vibratoLFO.connect(note.vibratoDepth);
|
|
1213
1204
|
note.vibratoDepth.connect(note.bufferSource.detune);
|
|
1214
1205
|
}
|
|
@@ -1231,6 +1222,7 @@ class Midy {
|
|
|
1231
1222
|
}
|
|
1232
1223
|
}
|
|
1233
1224
|
async createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3) {
|
|
1225
|
+
const now = this.audioContext.currentTime;
|
|
1234
1226
|
const state = channel.state;
|
|
1235
1227
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1236
1228
|
const voiceParams = voice.getAllParams(controllerState);
|
|
@@ -1247,20 +1239,20 @@ class Midy {
|
|
|
1247
1239
|
});
|
|
1248
1240
|
if (portamento) {
|
|
1249
1241
|
note.portamento = true;
|
|
1250
|
-
this.setPortamentoStartVolumeEnvelope(channel, note);
|
|
1251
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1242
|
+
this.setPortamentoStartVolumeEnvelope(channel, note, now);
|
|
1243
|
+
this.setPortamentoStartFilterEnvelope(channel, note, now);
|
|
1252
1244
|
}
|
|
1253
1245
|
else {
|
|
1254
1246
|
note.portamento = false;
|
|
1255
|
-
this.setVolumeEnvelope(channel, note);
|
|
1256
|
-
this.setFilterEnvelope(channel, note);
|
|
1247
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
1248
|
+
this.setFilterEnvelope(channel, note, now);
|
|
1257
1249
|
}
|
|
1258
1250
|
if (0 < state.vibratoDepth) {
|
|
1259
|
-
this.startVibrato(channel, note,
|
|
1251
|
+
this.startVibrato(channel, note, now);
|
|
1260
1252
|
}
|
|
1261
|
-
this.setPitchEnvelope(note);
|
|
1253
|
+
this.setPitchEnvelope(note, now);
|
|
1262
1254
|
if (0 < state.modulationDepth) {
|
|
1263
|
-
this.startModulation(channel, note,
|
|
1255
|
+
this.startModulation(channel, note, now);
|
|
1264
1256
|
}
|
|
1265
1257
|
if (this.mono && channel.currentBufferSource) {
|
|
1266
1258
|
channel.currentBufferSource.stop(startTime);
|
|
@@ -1272,10 +1264,10 @@ class Midy {
|
|
|
1272
1264
|
note.volumeNode.connect(note.gainL);
|
|
1273
1265
|
note.volumeNode.connect(note.gainR);
|
|
1274
1266
|
if (0 < channel.chorusSendLevel) {
|
|
1275
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
1267
|
+
this.setChorusEffectsSend(channel, note, 0, now);
|
|
1276
1268
|
}
|
|
1277
1269
|
if (0 < channel.reverbSendLevel) {
|
|
1278
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
1270
|
+
this.setReverbEffectsSend(channel, note, 0, now);
|
|
1279
1271
|
}
|
|
1280
1272
|
note.bufferSource.start(startTime);
|
|
1281
1273
|
return note;
|
|
@@ -1312,9 +1304,9 @@ class Midy {
|
|
|
1312
1304
|
const prevEntry = this.exclusiveClassMap.get(exclusiveClass);
|
|
1313
1305
|
const [prevNote, prevChannelNumber] = prevEntry;
|
|
1314
1306
|
if (!prevNote.ending) {
|
|
1315
|
-
this.
|
|
1316
|
-
startTime,
|
|
1317
|
-
|
|
1307
|
+
this.scheduleNoteOff(prevChannelNumber, prevNote.noteNumber, 0, // velocity,
|
|
1308
|
+
startTime, true, // force
|
|
1309
|
+
undefined);
|
|
1318
1310
|
}
|
|
1319
1311
|
}
|
|
1320
1312
|
this.exclusiveClassMap.set(exclusiveClass, [note, channelNumber]);
|
|
@@ -1327,9 +1319,9 @@ class Midy {
|
|
|
1327
1319
|
scheduledNotes.set(noteNumber, [note]);
|
|
1328
1320
|
}
|
|
1329
1321
|
}
|
|
1330
|
-
noteOn(channelNumber, noteNumber, velocity,
|
|
1331
|
-
|
|
1332
|
-
return this.scheduleNoteOn(channelNumber, noteNumber, velocity,
|
|
1322
|
+
noteOn(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1323
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1324
|
+
return this.scheduleNoteOn(channelNumber, noteNumber, velocity, scheduleTime, false);
|
|
1333
1325
|
}
|
|
1334
1326
|
stopNote(endTime, stopTime, scheduledNotes, index) {
|
|
1335
1327
|
const note = scheduledNotes[index];
|
|
@@ -1369,7 +1361,7 @@ class Midy {
|
|
|
1369
1361
|
note.bufferSource.stop(stopTime);
|
|
1370
1362
|
});
|
|
1371
1363
|
}
|
|
1372
|
-
|
|
1364
|
+
scheduleNoteOff(channelNumber, noteNumber, _velocity, endTime, force, portamentoNoteNumber) {
|
|
1373
1365
|
const channel = this.channels[channelNumber];
|
|
1374
1366
|
const state = channel.state;
|
|
1375
1367
|
if (!force) {
|
|
@@ -1409,24 +1401,19 @@ class Midy {
|
|
|
1409
1401
|
}
|
|
1410
1402
|
}
|
|
1411
1403
|
}
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
return this.
|
|
1404
|
+
noteOff(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1405
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1406
|
+
return this.scheduleNoteOff(channelNumber, noteNumber, velocity, scheduleTime, false, // force
|
|
1407
|
+
undefined);
|
|
1415
1408
|
}
|
|
1416
|
-
releaseSustainPedal(channelNumber, halfVelocity) {
|
|
1409
|
+
releaseSustainPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1417
1410
|
const velocity = halfVelocity * 2;
|
|
1418
1411
|
const channel = this.channels[channelNumber];
|
|
1419
1412
|
const promises = [];
|
|
1420
|
-
channel
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
if (!note)
|
|
1425
|
-
continue;
|
|
1426
|
-
const { noteNumber } = note;
|
|
1427
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1428
|
-
promises.push(promise);
|
|
1429
|
-
}
|
|
1413
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1414
|
+
const { noteNumber } = note;
|
|
1415
|
+
const promise = this.noteOff(channelNumber, noteNumber, velocity);
|
|
1416
|
+
promises.push(promise);
|
|
1430
1417
|
});
|
|
1431
1418
|
return promises;
|
|
1432
1419
|
}
|
|
@@ -1437,55 +1424,51 @@ class Midy {
|
|
|
1437
1424
|
channel.state.sostenutoPedal = 0;
|
|
1438
1425
|
channel.sostenutoNotes.forEach((activeNote) => {
|
|
1439
1426
|
const { noteNumber } = activeNote;
|
|
1440
|
-
const promise = this.
|
|
1427
|
+
const promise = this.noteOff(channelNumber, noteNumber, velocity);
|
|
1441
1428
|
promises.push(promise);
|
|
1442
1429
|
});
|
|
1443
1430
|
channel.sostenutoNotes.clear();
|
|
1444
1431
|
return promises;
|
|
1445
1432
|
}
|
|
1446
|
-
handleMIDIMessage(statusByte, data1, data2) {
|
|
1433
|
+
handleMIDIMessage(statusByte, data1, data2, scheduleTime) {
|
|
1447
1434
|
const channelNumber = omni ? 0 : statusByte & 0x0F;
|
|
1448
1435
|
const messageType = statusByte & 0xF0;
|
|
1449
1436
|
switch (messageType) {
|
|
1450
1437
|
case 0x80:
|
|
1451
|
-
return this.
|
|
1438
|
+
return this.noteOff(channelNumber, data1, data2, scheduleTime);
|
|
1452
1439
|
case 0x90:
|
|
1453
|
-
return this.noteOn(channelNumber, data1, data2);
|
|
1440
|
+
return this.noteOn(channelNumber, data1, data2, scheduleTime);
|
|
1454
1441
|
case 0xA0:
|
|
1455
|
-
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2);
|
|
1442
|
+
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2, scheduleTime);
|
|
1456
1443
|
case 0xB0:
|
|
1457
|
-
return this.handleControlChange(channelNumber, data1, data2);
|
|
1444
|
+
return this.handleControlChange(channelNumber, data1, data2, scheduleTime);
|
|
1458
1445
|
case 0xC0:
|
|
1459
|
-
return this.handleProgramChange(channelNumber, data1);
|
|
1446
|
+
return this.handleProgramChange(channelNumber, data1, scheduleTime);
|
|
1460
1447
|
case 0xD0:
|
|
1461
|
-
return this.handleChannelPressure(channelNumber, data1);
|
|
1448
|
+
return this.handleChannelPressure(channelNumber, data1, scheduleTime);
|
|
1462
1449
|
case 0xE0:
|
|
1463
|
-
return this.handlePitchBendMessage(channelNumber, data1, data2);
|
|
1450
|
+
return this.handlePitchBendMessage(channelNumber, data1, data2, scheduleTime);
|
|
1464
1451
|
default:
|
|
1465
1452
|
console.warn(`Unsupported MIDI message: ${messageType.toString(16)}`);
|
|
1466
1453
|
}
|
|
1467
1454
|
}
|
|
1468
|
-
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure,
|
|
1469
|
-
if (!startTime)
|
|
1470
|
-
startTime = this.audioContext.currentTime;
|
|
1455
|
+
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure, scheduleTime) {
|
|
1471
1456
|
const channel = this.channels[channelNumber];
|
|
1472
1457
|
channel.state.polyphonicKeyPressure = pressure / 127;
|
|
1473
1458
|
const table = channel.polyphonicKeyPressureTable;
|
|
1474
|
-
const activeNotes = this.getActiveNotes(channel,
|
|
1459
|
+
const activeNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1475
1460
|
if (activeNotes.has(noteNumber)) {
|
|
1476
1461
|
const note = activeNotes.get(noteNumber);
|
|
1477
1462
|
this.setControllerParameters(channel, note, table);
|
|
1478
1463
|
}
|
|
1479
1464
|
// this.applyVoiceParams(channel, 10);
|
|
1480
1465
|
}
|
|
1481
|
-
handleProgramChange(channelNumber, program) {
|
|
1466
|
+
handleProgramChange(channelNumber, program, _scheduleTime) {
|
|
1482
1467
|
const channel = this.channels[channelNumber];
|
|
1483
1468
|
channel.bank = channel.bankMSB * 128 + channel.bankLSB;
|
|
1484
1469
|
channel.program = program;
|
|
1485
1470
|
}
|
|
1486
|
-
handleChannelPressure(channelNumber, value,
|
|
1487
|
-
if (!startTime)
|
|
1488
|
-
startTime = this.audioContext.currentTime;
|
|
1471
|
+
handleChannelPressure(channelNumber, value, scheduleTime) {
|
|
1489
1472
|
const channel = this.channels[channelNumber];
|
|
1490
1473
|
const prev = channel.state.channelPressure;
|
|
1491
1474
|
const next = value / 127;
|
|
@@ -1495,72 +1478,68 @@ class Midy {
|
|
|
1495
1478
|
channel.detune += pressureDepth * (next - prev);
|
|
1496
1479
|
}
|
|
1497
1480
|
const table = channel.channelPressureTable;
|
|
1498
|
-
this.getActiveNotes(channel,
|
|
1481
|
+
this.getActiveNotes(channel, scheduleTime).forEach((note) => {
|
|
1499
1482
|
this.setControllerParameters(channel, note, table);
|
|
1500
1483
|
});
|
|
1501
1484
|
// this.applyVoiceParams(channel, 13);
|
|
1502
1485
|
}
|
|
1503
|
-
handlePitchBendMessage(channelNumber, lsb, msb) {
|
|
1486
|
+
handlePitchBendMessage(channelNumber, lsb, msb, scheduleTime) {
|
|
1504
1487
|
const pitchBend = msb * 128 + lsb;
|
|
1505
|
-
this.setPitchBend(channelNumber, pitchBend);
|
|
1488
|
+
this.setPitchBend(channelNumber, pitchBend, scheduleTime);
|
|
1506
1489
|
}
|
|
1507
|
-
setPitchBend(channelNumber, value) {
|
|
1490
|
+
setPitchBend(channelNumber, value, scheduleTime) {
|
|
1491
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1508
1492
|
const channel = this.channels[channelNumber];
|
|
1509
1493
|
const state = channel.state;
|
|
1510
1494
|
const prev = state.pitchWheel * 2 - 1;
|
|
1511
1495
|
const next = (value - 8192) / 8192;
|
|
1512
1496
|
state.pitchWheel = value / 16383;
|
|
1513
1497
|
channel.detune += (next - prev) * state.pitchWheelSensitivity * 12800;
|
|
1514
|
-
this.updateChannelDetune(channel);
|
|
1515
|
-
this.applyVoiceParams(channel, 14);
|
|
1498
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1499
|
+
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
1516
1500
|
}
|
|
1517
|
-
setModLfoToPitch(channel, note) {
|
|
1518
|
-
const now = this.audioContext.currentTime;
|
|
1501
|
+
setModLfoToPitch(channel, note, scheduleTime) {
|
|
1519
1502
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
1520
1503
|
this.getLFOPitchDepth(channel, note);
|
|
1521
1504
|
const baseDepth = Math.abs(modLfoToPitch) + channel.state.modulationDepth;
|
|
1522
1505
|
const modulationDepth = baseDepth * Math.sign(modLfoToPitch);
|
|
1523
1506
|
note.modulationDepth.gain
|
|
1524
|
-
.cancelScheduledValues(
|
|
1525
|
-
.setValueAtTime(modulationDepth,
|
|
1507
|
+
.cancelScheduledValues(scheduleTime)
|
|
1508
|
+
.setValueAtTime(modulationDepth, scheduleTime);
|
|
1526
1509
|
}
|
|
1527
|
-
setVibLfoToPitch(channel, note) {
|
|
1528
|
-
const now = this.audioContext.currentTime;
|
|
1510
|
+
setVibLfoToPitch(channel, note, scheduleTime) {
|
|
1529
1511
|
const vibLfoToPitch = note.voiceParams.vibLfoToPitch;
|
|
1530
1512
|
const vibratoDepth = Math.abs(vibLfoToPitch) * channel.state.vibratoDepth *
|
|
1531
1513
|
2;
|
|
1532
1514
|
const vibratoDepthSign = 0 < vibLfoToPitch;
|
|
1533
1515
|
note.vibratoDepth.gain
|
|
1534
|
-
.cancelScheduledValues(
|
|
1535
|
-
.setValueAtTime(vibratoDepth * vibratoDepthSign,
|
|
1516
|
+
.cancelScheduledValues(scheduleTime)
|
|
1517
|
+
.setValueAtTime(vibratoDepth * vibratoDepthSign, scheduleTime);
|
|
1536
1518
|
}
|
|
1537
|
-
setModLfoToFilterFc(channel, note) {
|
|
1538
|
-
const now = this.audioContext.currentTime;
|
|
1519
|
+
setModLfoToFilterFc(channel, note, scheduleTime) {
|
|
1539
1520
|
const modLfoToFilterFc = note.voiceParams.modLfoToFilterFc +
|
|
1540
1521
|
this.getLFOFilterDepth(channel, note);
|
|
1541
1522
|
note.filterDepth.gain
|
|
1542
|
-
.cancelScheduledValues(
|
|
1543
|
-
.setValueAtTime(modLfoToFilterFc,
|
|
1523
|
+
.cancelScheduledValues(scheduleTime)
|
|
1524
|
+
.setValueAtTime(modLfoToFilterFc, scheduleTime);
|
|
1544
1525
|
}
|
|
1545
|
-
setModLfoToVolume(channel, note) {
|
|
1546
|
-
const now = this.audioContext.currentTime;
|
|
1526
|
+
setModLfoToVolume(channel, note, scheduleTime) {
|
|
1547
1527
|
const modLfoToVolume = note.voiceParams.modLfoToVolume;
|
|
1548
1528
|
const baseDepth = this.cbToRatio(Math.abs(modLfoToVolume)) - 1;
|
|
1549
1529
|
const volumeDepth = baseDepth * Math.sign(modLfoToVolume) *
|
|
1550
1530
|
(1 + this.getLFOAmplitudeDepth(channel, note));
|
|
1551
1531
|
note.volumeDepth.gain
|
|
1552
|
-
.cancelScheduledValues(
|
|
1553
|
-
.setValueAtTime(volumeDepth,
|
|
1532
|
+
.cancelScheduledValues(scheduleTime)
|
|
1533
|
+
.setValueAtTime(volumeDepth, scheduleTime);
|
|
1554
1534
|
}
|
|
1555
|
-
setReverbEffectsSend(channel, note, prevValue) {
|
|
1535
|
+
setReverbEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1556
1536
|
if (0 < prevValue) {
|
|
1557
1537
|
if (0 < note.voiceParams.reverbEffectsSend) {
|
|
1558
|
-
const now = this.audioContext.currentTime;
|
|
1559
1538
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 91);
|
|
1560
1539
|
const value = note.voiceParams.reverbEffectsSend + keyBasedValue;
|
|
1561
1540
|
note.reverbEffectsSend.gain
|
|
1562
|
-
.cancelScheduledValues(
|
|
1563
|
-
.setValueAtTime(value,
|
|
1541
|
+
.cancelScheduledValues(scheduleTime)
|
|
1542
|
+
.setValueAtTime(value, scheduleTime);
|
|
1564
1543
|
}
|
|
1565
1544
|
else {
|
|
1566
1545
|
note.reverbEffectsSend.disconnect();
|
|
@@ -1578,15 +1557,14 @@ class Midy {
|
|
|
1578
1557
|
}
|
|
1579
1558
|
}
|
|
1580
1559
|
}
|
|
1581
|
-
setChorusEffectsSend(channel, note, prevValue) {
|
|
1560
|
+
setChorusEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1582
1561
|
if (0 < prevValue) {
|
|
1583
1562
|
if (0 < note.voiceParams.chorusEffectsSend) {
|
|
1584
|
-
const now = this.audioContext.currentTime;
|
|
1585
1563
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 93);
|
|
1586
1564
|
const value = note.voiceParams.chorusEffectsSend + keyBasedValue;
|
|
1587
1565
|
note.chorusEffectsSend.gain
|
|
1588
|
-
.cancelScheduledValues(
|
|
1589
|
-
.setValueAtTime(value,
|
|
1566
|
+
.cancelScheduledValues(scheduleTime)
|
|
1567
|
+
.setValueAtTime(value, scheduleTime);
|
|
1590
1568
|
}
|
|
1591
1569
|
else {
|
|
1592
1570
|
note.chorusEffectsSend.disconnect();
|
|
@@ -1604,75 +1582,71 @@ class Midy {
|
|
|
1604
1582
|
}
|
|
1605
1583
|
}
|
|
1606
1584
|
}
|
|
1607
|
-
setDelayModLFO(note) {
|
|
1608
|
-
const now = this.audioContext.currentTime;
|
|
1585
|
+
setDelayModLFO(note, scheduleTime) {
|
|
1609
1586
|
const startTime = note.startTime;
|
|
1610
|
-
if (startTime <
|
|
1587
|
+
if (startTime < scheduleTime)
|
|
1611
1588
|
return;
|
|
1612
|
-
note.modulationLFO.stop(
|
|
1589
|
+
note.modulationLFO.stop(scheduleTime);
|
|
1613
1590
|
note.modulationLFO.start(startTime + note.voiceParams.delayModLFO);
|
|
1614
1591
|
note.modulationLFO.connect(note.filterDepth);
|
|
1615
1592
|
}
|
|
1616
|
-
setFreqModLFO(note) {
|
|
1617
|
-
const now = this.audioContext.currentTime;
|
|
1593
|
+
setFreqModLFO(note, scheduleTime) {
|
|
1618
1594
|
const freqModLFO = note.voiceParams.freqModLFO;
|
|
1619
1595
|
note.modulationLFO.frequency
|
|
1620
|
-
.cancelScheduledValues(
|
|
1621
|
-
.setValueAtTime(freqModLFO,
|
|
1596
|
+
.cancelScheduledValues(scheduleTime)
|
|
1597
|
+
.setValueAtTime(freqModLFO, scheduleTime);
|
|
1622
1598
|
}
|
|
1623
|
-
setFreqVibLFO(channel, note) {
|
|
1624
|
-
const now = this.audioContext.currentTime;
|
|
1599
|
+
setFreqVibLFO(channel, note, scheduleTime) {
|
|
1625
1600
|
const freqVibLFO = note.voiceParams.freqVibLFO;
|
|
1626
1601
|
note.vibratoLFO.frequency
|
|
1627
|
-
.cancelScheduledValues(
|
|
1628
|
-
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2,
|
|
1602
|
+
.cancelScheduledValues(scheduleTime)
|
|
1603
|
+
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2, scheduleTime);
|
|
1629
1604
|
}
|
|
1630
1605
|
createVoiceParamsHandlers() {
|
|
1631
1606
|
return {
|
|
1632
|
-
modLfoToPitch: (channel, note, _prevValue) => {
|
|
1607
|
+
modLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1633
1608
|
if (0 < channel.state.modulationDepth) {
|
|
1634
|
-
this.setModLfoToPitch(channel, note);
|
|
1609
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1635
1610
|
}
|
|
1636
1611
|
},
|
|
1637
|
-
vibLfoToPitch: (channel, note, _prevValue) => {
|
|
1612
|
+
vibLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1638
1613
|
if (0 < channel.state.vibratoDepth) {
|
|
1639
|
-
this.setVibLfoToPitch(channel, note);
|
|
1614
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1640
1615
|
}
|
|
1641
1616
|
},
|
|
1642
|
-
modLfoToFilterFc: (channel, note, _prevValue) => {
|
|
1617
|
+
modLfoToFilterFc: (channel, note, _prevValue, scheduleTime) => {
|
|
1643
1618
|
if (0 < channel.state.modulationDepth) {
|
|
1644
|
-
this.setModLfoToFilterFc(channel, note);
|
|
1619
|
+
this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
1645
1620
|
}
|
|
1646
1621
|
},
|
|
1647
|
-
modLfoToVolume: (channel, note, _prevValue) => {
|
|
1622
|
+
modLfoToVolume: (channel, note, _prevValue, scheduleTime) => {
|
|
1648
1623
|
if (0 < channel.state.modulationDepth) {
|
|
1649
|
-
this.setModLfoToVolume(channel, note);
|
|
1624
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1650
1625
|
}
|
|
1651
1626
|
},
|
|
1652
|
-
chorusEffectsSend: (channel, note, prevValue) => {
|
|
1653
|
-
this.setChorusEffectsSend(channel, note, prevValue);
|
|
1627
|
+
chorusEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1628
|
+
this.setChorusEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1654
1629
|
},
|
|
1655
|
-
reverbEffectsSend: (channel, note, prevValue) => {
|
|
1656
|
-
this.setReverbEffectsSend(channel, note, prevValue);
|
|
1630
|
+
reverbEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1631
|
+
this.setReverbEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1657
1632
|
},
|
|
1658
|
-
delayModLFO: (_channel, note, _prevValue) => this.setDelayModLFO(note),
|
|
1659
|
-
freqModLFO: (_channel, note, _prevValue) => this.setFreqModLFO(note),
|
|
1660
|
-
delayVibLFO: (channel, note, prevValue) => {
|
|
1633
|
+
delayModLFO: (_channel, note, _prevValue, scheduleTime) => this.setDelayModLFO(note, scheduleTime),
|
|
1634
|
+
freqModLFO: (_channel, note, _prevValue, scheduleTime) => this.setFreqModLFO(note, scheduleTime),
|
|
1635
|
+
delayVibLFO: (channel, note, prevValue, scheduleTime) => {
|
|
1661
1636
|
if (0 < channel.state.vibratoDepth) {
|
|
1662
|
-
const now = this.audioContext.currentTime;
|
|
1663
1637
|
const vibratoDelay = channel.state.vibratoDelay * 2;
|
|
1664
1638
|
const prevStartTime = note.startTime + prevValue * vibratoDelay;
|
|
1665
|
-
if (
|
|
1639
|
+
if (scheduleTime < prevStartTime)
|
|
1666
1640
|
return;
|
|
1667
1641
|
const value = note.voiceParams.delayVibLFO;
|
|
1668
1642
|
const startTime = note.startTime + value * vibratoDelay;
|
|
1669
|
-
note.vibratoLFO.stop(
|
|
1643
|
+
note.vibratoLFO.stop(scheduleTime);
|
|
1670
1644
|
note.vibratoLFO.start(startTime);
|
|
1671
1645
|
}
|
|
1672
1646
|
},
|
|
1673
|
-
freqVibLFO: (channel, note, _prevValue) => {
|
|
1647
|
+
freqVibLFO: (channel, note, _prevValue, scheduleTime) => {
|
|
1674
1648
|
if (0 < channel.state.vibratoDepth) {
|
|
1675
|
-
this.setFreqVibLFO(channel, note);
|
|
1649
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1676
1650
|
}
|
|
1677
1651
|
},
|
|
1678
1652
|
};
|
|
@@ -1684,7 +1658,7 @@ class Midy {
|
|
|
1684
1658
|
state[3] = noteNumber / 127;
|
|
1685
1659
|
return state;
|
|
1686
1660
|
}
|
|
1687
|
-
applyVoiceParams(channel, controllerType) {
|
|
1661
|
+
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1688
1662
|
channel.scheduledNotes.forEach((noteList) => {
|
|
1689
1663
|
for (let i = 0; i < noteList.length; i++) {
|
|
1690
1664
|
const note = noteList[i];
|
|
@@ -1700,7 +1674,7 @@ class Midy {
|
|
|
1700
1674
|
continue;
|
|
1701
1675
|
note.voiceParams[key] = value;
|
|
1702
1676
|
if (key in this.voiceParamsHandlers) {
|
|
1703
|
-
this.voiceParamsHandlers[key](channel, note, prevValue);
|
|
1677
|
+
this.voiceParamsHandlers[key](channel, note, prevValue, scheduleTime);
|
|
1704
1678
|
}
|
|
1705
1679
|
else if (filterEnvelopeKeySet.has(key)) {
|
|
1706
1680
|
if (appliedFilterEnvelope)
|
|
@@ -1713,12 +1687,12 @@ class Midy {
|
|
|
1713
1687
|
noteVoiceParams[key] = voiceParams[key];
|
|
1714
1688
|
}
|
|
1715
1689
|
if (note.portamento) {
|
|
1716
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1690
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1717
1691
|
}
|
|
1718
1692
|
else {
|
|
1719
|
-
this.setFilterEnvelope(channel, note);
|
|
1693
|
+
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
1720
1694
|
}
|
|
1721
|
-
this.setPitchEnvelope(note);
|
|
1695
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1722
1696
|
}
|
|
1723
1697
|
else if (volumeEnvelopeKeySet.has(key)) {
|
|
1724
1698
|
if (appliedVolumeEnvelope)
|
|
@@ -1730,7 +1704,7 @@ class Midy {
|
|
|
1730
1704
|
if (key in voiceParams)
|
|
1731
1705
|
noteVoiceParams[key] = voiceParams[key];
|
|
1732
1706
|
}
|
|
1733
|
-
this.setVolumeEnvelope(channel, note);
|
|
1707
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1734
1708
|
}
|
|
1735
1709
|
}
|
|
1736
1710
|
}
|
|
@@ -1774,12 +1748,12 @@ class Midy {
|
|
|
1774
1748
|
127: this.polyOn,
|
|
1775
1749
|
};
|
|
1776
1750
|
}
|
|
1777
|
-
handleControlChange(channelNumber, controllerType, value,
|
|
1751
|
+
handleControlChange(channelNumber, controllerType, value, scheduleTime) {
|
|
1778
1752
|
const handler = this.controlChangeHandlers[controllerType];
|
|
1779
1753
|
if (handler) {
|
|
1780
|
-
handler.call(this, channelNumber, value,
|
|
1754
|
+
handler.call(this, channelNumber, value, scheduleTime);
|
|
1781
1755
|
const channel = this.channels[channelNumber];
|
|
1782
|
-
this.applyVoiceParams(channel, controllerType + 128);
|
|
1756
|
+
this.applyVoiceParams(channel, controllerType + 128, scheduleTime);
|
|
1783
1757
|
this.applyControlTable(channel, controllerType);
|
|
1784
1758
|
}
|
|
1785
1759
|
else {
|
|
@@ -1865,48 +1839,46 @@ class Midy {
|
|
|
1865
1839
|
setBankLSB(channelNumber, lsb) {
|
|
1866
1840
|
this.channels[channelNumber].bankLSB = lsb;
|
|
1867
1841
|
}
|
|
1868
|
-
dataEntryLSB(channelNumber, value) {
|
|
1842
|
+
dataEntryLSB(channelNumber, value, scheduleTime) {
|
|
1869
1843
|
this.channels[channelNumber].dataLSB = value;
|
|
1870
|
-
this.handleRPN(channelNumber,
|
|
1844
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1871
1845
|
}
|
|
1872
|
-
updateChannelVolume(channel) {
|
|
1873
|
-
const now = this.audioContext.currentTime;
|
|
1846
|
+
updateChannelVolume(channel, scheduleTime) {
|
|
1874
1847
|
const state = channel.state;
|
|
1875
1848
|
const volume = state.volume * state.expression;
|
|
1876
1849
|
const { gainLeft, gainRight } = this.panToGain(state.pan);
|
|
1877
1850
|
channel.gainL.gain
|
|
1878
|
-
.cancelScheduledValues(
|
|
1879
|
-
.setValueAtTime(volume * gainLeft,
|
|
1851
|
+
.cancelScheduledValues(scheduleTime)
|
|
1852
|
+
.setValueAtTime(volume * gainLeft, scheduleTime);
|
|
1880
1853
|
channel.gainR.gain
|
|
1881
|
-
.cancelScheduledValues(
|
|
1882
|
-
.setValueAtTime(volume * gainRight,
|
|
1854
|
+
.cancelScheduledValues(scheduleTime)
|
|
1855
|
+
.setValueAtTime(volume * gainRight, scheduleTime);
|
|
1883
1856
|
}
|
|
1884
|
-
setSustainPedal(channelNumber, value) {
|
|
1857
|
+
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1858
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1885
1859
|
this.channels[channelNumber].state.sustainPedal = value / 127;
|
|
1886
1860
|
if (value < 64) {
|
|
1887
|
-
this.releaseSustainPedal(channelNumber, value);
|
|
1861
|
+
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
1888
1862
|
}
|
|
1889
1863
|
}
|
|
1890
1864
|
setPortamento(channelNumber, value) {
|
|
1891
1865
|
this.channels[channelNumber].state.portamento = value / 127;
|
|
1892
1866
|
}
|
|
1893
|
-
setSostenutoPedal(channelNumber, value) {
|
|
1867
|
+
setSostenutoPedal(channelNumber, value, scheduleTime) {
|
|
1894
1868
|
const channel = this.channels[channelNumber];
|
|
1895
1869
|
channel.state.sostenutoPedal = value / 127;
|
|
1896
1870
|
if (64 <= value) {
|
|
1897
|
-
|
|
1898
|
-
channel.sostenutoNotes = this.getActiveNotes(channel, now);
|
|
1871
|
+
channel.sostenutoNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1899
1872
|
}
|
|
1900
1873
|
else {
|
|
1901
1874
|
this.releaseSostenutoPedal(channelNumber, value);
|
|
1902
1875
|
}
|
|
1903
1876
|
}
|
|
1904
|
-
setSoftPedal(channelNumber, softPedal) {
|
|
1877
|
+
setSoftPedal(channelNumber, softPedal, _scheduleTime) {
|
|
1905
1878
|
const channel = this.channels[channelNumber];
|
|
1906
1879
|
channel.state.softPedal = softPedal / 127;
|
|
1907
1880
|
}
|
|
1908
|
-
setFilterResonance(channelNumber, filterResonance) {
|
|
1909
|
-
const now = this.audioContext.currentTime;
|
|
1881
|
+
setFilterResonance(channelNumber, filterResonance, scheduleTime) {
|
|
1910
1882
|
const channel = this.channels[channelNumber];
|
|
1911
1883
|
const state = channel.state;
|
|
1912
1884
|
state.filterResonance = filterResonance / 64;
|
|
@@ -1916,16 +1888,15 @@ class Midy {
|
|
|
1916
1888
|
if (!note)
|
|
1917
1889
|
continue;
|
|
1918
1890
|
const Q = note.voiceParams.initialFilterQ / 5 * state.filterResonance;
|
|
1919
|
-
note.filterNode.Q.setValueAtTime(Q,
|
|
1891
|
+
note.filterNode.Q.setValueAtTime(Q, scheduleTime);
|
|
1920
1892
|
}
|
|
1921
1893
|
});
|
|
1922
1894
|
}
|
|
1923
|
-
setReleaseTime(channelNumber, releaseTime) {
|
|
1895
|
+
setReleaseTime(channelNumber, releaseTime, _scheduleTime) {
|
|
1924
1896
|
const channel = this.channels[channelNumber];
|
|
1925
1897
|
channel.state.releaseTime = releaseTime / 64;
|
|
1926
1898
|
}
|
|
1927
|
-
setAttackTime(channelNumber, attackTime) {
|
|
1928
|
-
const now = this.audioContext.currentTime;
|
|
1899
|
+
setAttackTime(channelNumber, attackTime, scheduleTime) {
|
|
1929
1900
|
const channel = this.channels[channelNumber];
|
|
1930
1901
|
channel.state.attackTime = attackTime / 64;
|
|
1931
1902
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -1933,13 +1904,13 @@ class Midy {
|
|
|
1933
1904
|
const note = noteList[i];
|
|
1934
1905
|
if (!note)
|
|
1935
1906
|
continue;
|
|
1936
|
-
if (note.startTime <
|
|
1907
|
+
if (note.startTime < scheduleTime)
|
|
1937
1908
|
continue;
|
|
1938
1909
|
this.setVolumeEnvelope(channel, note);
|
|
1939
1910
|
}
|
|
1940
1911
|
});
|
|
1941
1912
|
}
|
|
1942
|
-
setBrightness(channelNumber, brightness) {
|
|
1913
|
+
setBrightness(channelNumber, brightness, scheduleTime) {
|
|
1943
1914
|
const channel = this.channels[channelNumber];
|
|
1944
1915
|
channel.state.brightness = brightness / 64;
|
|
1945
1916
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -1948,7 +1919,7 @@ class Midy {
|
|
|
1948
1919
|
if (!note)
|
|
1949
1920
|
continue;
|
|
1950
1921
|
if (note.portamento) {
|
|
1951
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1922
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1952
1923
|
}
|
|
1953
1924
|
else {
|
|
1954
1925
|
this.setFilterEnvelope(channel, note);
|
|
@@ -1956,7 +1927,7 @@ class Midy {
|
|
|
1956
1927
|
}
|
|
1957
1928
|
});
|
|
1958
1929
|
}
|
|
1959
|
-
setDecayTime(channelNumber, dacayTime) {
|
|
1930
|
+
setDecayTime(channelNumber, dacayTime, scheduleTime) {
|
|
1960
1931
|
const channel = this.channels[channelNumber];
|
|
1961
1932
|
channel.state.decayTime = dacayTime / 64;
|
|
1962
1933
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -1964,11 +1935,11 @@ class Midy {
|
|
|
1964
1935
|
const note = noteList[i];
|
|
1965
1936
|
if (!note)
|
|
1966
1937
|
continue;
|
|
1967
|
-
this.setVolumeEnvelope(channel, note);
|
|
1938
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1968
1939
|
}
|
|
1969
1940
|
});
|
|
1970
1941
|
}
|
|
1971
|
-
setVibratoRate(channelNumber, vibratoRate) {
|
|
1942
|
+
setVibratoRate(channelNumber, vibratoRate, scheduleTime) {
|
|
1972
1943
|
const channel = this.channels[channelNumber];
|
|
1973
1944
|
channel.state.vibratoRate = vibratoRate / 64;
|
|
1974
1945
|
if (channel.vibratoDepth <= 0)
|
|
@@ -1978,11 +1949,11 @@ class Midy {
|
|
|
1978
1949
|
const note = noteList[i];
|
|
1979
1950
|
if (!note)
|
|
1980
1951
|
continue;
|
|
1981
|
-
this.setVibLfoToPitch(channel, note);
|
|
1952
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1982
1953
|
}
|
|
1983
1954
|
});
|
|
1984
1955
|
}
|
|
1985
|
-
setVibratoDepth(channelNumber, vibratoDepth) {
|
|
1956
|
+
setVibratoDepth(channelNumber, vibratoDepth, scheduleTime) {
|
|
1986
1957
|
const channel = this.channels[channelNumber];
|
|
1987
1958
|
const prev = channel.state.vibratoDepth;
|
|
1988
1959
|
channel.state.vibratoDepth = vibratoDepth / 64;
|
|
@@ -1992,7 +1963,7 @@ class Midy {
|
|
|
1992
1963
|
const note = noteList[i];
|
|
1993
1964
|
if (!note)
|
|
1994
1965
|
continue;
|
|
1995
|
-
this.setFreqVibLFO(channel, note);
|
|
1966
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1996
1967
|
}
|
|
1997
1968
|
});
|
|
1998
1969
|
}
|
|
@@ -2002,7 +1973,7 @@ class Midy {
|
|
|
2002
1973
|
const note = noteList[i];
|
|
2003
1974
|
if (!note)
|
|
2004
1975
|
continue;
|
|
2005
|
-
this.startVibrato(channel, note,
|
|
1976
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2006
1977
|
}
|
|
2007
1978
|
});
|
|
2008
1979
|
}
|
|
@@ -2016,21 +1987,21 @@ class Midy {
|
|
|
2016
1987
|
const note = noteList[i];
|
|
2017
1988
|
if (!note)
|
|
2018
1989
|
continue;
|
|
2019
|
-
this.startVibrato(channel, note,
|
|
1990
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2020
1991
|
}
|
|
2021
1992
|
});
|
|
2022
1993
|
}
|
|
2023
1994
|
}
|
|
2024
|
-
setReverbSendLevel(channelNumber, reverbSendLevel) {
|
|
1995
|
+
setReverbSendLevel(channelNumber, reverbSendLevel, scheduleTime) {
|
|
2025
1996
|
const channel = this.channels[channelNumber];
|
|
2026
1997
|
const state = channel.state;
|
|
2027
1998
|
const reverbEffect = this.reverbEffect;
|
|
2028
1999
|
if (0 < state.reverbSendLevel) {
|
|
2029
2000
|
if (0 < reverbSendLevel) {
|
|
2030
|
-
const now = this.audioContext.currentTime;
|
|
2031
2001
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2032
|
-
reverbEffect.input.gain
|
|
2033
|
-
|
|
2002
|
+
reverbEffect.input.gain
|
|
2003
|
+
.cancelScheduledValues(scheduleTime)
|
|
2004
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2034
2005
|
}
|
|
2035
2006
|
else {
|
|
2036
2007
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -2047,31 +2018,31 @@ class Midy {
|
|
|
2047
2018
|
}
|
|
2048
2019
|
else {
|
|
2049
2020
|
if (0 < reverbSendLevel) {
|
|
2050
|
-
const now = this.audioContext.currentTime;
|
|
2051
2021
|
channel.scheduledNotes.forEach((noteList) => {
|
|
2052
2022
|
for (let i = 0; i < noteList.length; i++) {
|
|
2053
2023
|
const note = noteList[i];
|
|
2054
2024
|
if (!note)
|
|
2055
2025
|
continue;
|
|
2056
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
2026
|
+
this.setReverbEffectsSend(channel, note, 0, scheduleTime);
|
|
2057
2027
|
}
|
|
2058
2028
|
});
|
|
2059
2029
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2060
|
-
reverbEffect.input.gain
|
|
2061
|
-
|
|
2030
|
+
reverbEffect.input.gain
|
|
2031
|
+
.cancelScheduledValues(scheduleTime)
|
|
2032
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2062
2033
|
}
|
|
2063
2034
|
}
|
|
2064
2035
|
}
|
|
2065
|
-
setChorusSendLevel(channelNumber, chorusSendLevel) {
|
|
2036
|
+
setChorusSendLevel(channelNumber, chorusSendLevel, scheduleTime) {
|
|
2066
2037
|
const channel = this.channels[channelNumber];
|
|
2067
2038
|
const state = channel.state;
|
|
2068
2039
|
const chorusEffect = this.chorusEffect;
|
|
2069
2040
|
if (0 < state.chorusSendLevel) {
|
|
2070
2041
|
if (0 < chorusSendLevel) {
|
|
2071
|
-
const now = this.audioContext.currentTime;
|
|
2072
2042
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2073
|
-
chorusEffect.input.gain
|
|
2074
|
-
|
|
2043
|
+
chorusEffect.input.gain
|
|
2044
|
+
.cancelScheduledValues(scheduleTime)
|
|
2045
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2075
2046
|
}
|
|
2076
2047
|
else {
|
|
2077
2048
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -2088,18 +2059,18 @@ class Midy {
|
|
|
2088
2059
|
}
|
|
2089
2060
|
else {
|
|
2090
2061
|
if (0 < chorusSendLevel) {
|
|
2091
|
-
const now = this.audioContext.currentTime;
|
|
2092
2062
|
channel.scheduledNotes.forEach((noteList) => {
|
|
2093
2063
|
for (let i = 0; i < noteList.length; i++) {
|
|
2094
2064
|
const note = noteList[i];
|
|
2095
2065
|
if (!note)
|
|
2096
2066
|
continue;
|
|
2097
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
2067
|
+
this.setChorusEffectsSend(channel, note, 0, scheduleTime);
|
|
2098
2068
|
}
|
|
2099
2069
|
});
|
|
2100
2070
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2101
|
-
chorusEffect.input.gain
|
|
2102
|
-
|
|
2071
|
+
chorusEffect.input.gain
|
|
2072
|
+
.cancelScheduledValues(scheduleTime)
|
|
2073
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2103
2074
|
}
|
|
2104
2075
|
}
|
|
2105
2076
|
}
|
|
@@ -2129,13 +2100,13 @@ class Midy {
|
|
|
2129
2100
|
channel.dataMSB = minMSB;
|
|
2130
2101
|
}
|
|
2131
2102
|
}
|
|
2132
|
-
handleRPN(channelNumber, value) {
|
|
2103
|
+
handleRPN(channelNumber, value, scheduleTime) {
|
|
2133
2104
|
const channel = this.channels[channelNumber];
|
|
2134
2105
|
const rpn = channel.rpnMSB * 128 + channel.rpnLSB;
|
|
2135
2106
|
switch (rpn) {
|
|
2136
2107
|
case 0:
|
|
2137
2108
|
channel.dataLSB += value;
|
|
2138
|
-
this.handlePitchBendRangeRPN(channelNumber);
|
|
2109
|
+
this.handlePitchBendRangeRPN(channelNumber, scheduleTime);
|
|
2139
2110
|
break;
|
|
2140
2111
|
case 1:
|
|
2141
2112
|
channel.dataLSB += value;
|
|
@@ -2167,25 +2138,26 @@ class Midy {
|
|
|
2167
2138
|
setRPNLSB(channelNumber, value) {
|
|
2168
2139
|
this.channels[channelNumber].rpnLSB = value;
|
|
2169
2140
|
}
|
|
2170
|
-
dataEntryMSB(channelNumber, value) {
|
|
2141
|
+
dataEntryMSB(channelNumber, value, scheduleTime) {
|
|
2171
2142
|
this.channels[channelNumber].dataMSB = value;
|
|
2172
|
-
this.handleRPN(channelNumber,
|
|
2143
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
2173
2144
|
}
|
|
2174
|
-
handlePitchBendRangeRPN(channelNumber) {
|
|
2145
|
+
handlePitchBendRangeRPN(channelNumber, scheduleTime) {
|
|
2175
2146
|
const channel = this.channels[channelNumber];
|
|
2176
2147
|
this.limitData(channel, 0, 127, 0, 99);
|
|
2177
2148
|
const pitchBendRange = channel.dataMSB + channel.dataLSB / 100;
|
|
2178
|
-
this.setPitchBendRange(channelNumber, pitchBendRange);
|
|
2149
|
+
this.setPitchBendRange(channelNumber, pitchBendRange, scheduleTime);
|
|
2179
2150
|
}
|
|
2180
|
-
setPitchBendRange(channelNumber, value) {
|
|
2151
|
+
setPitchBendRange(channelNumber, value, scheduleTime) {
|
|
2152
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2181
2153
|
const channel = this.channels[channelNumber];
|
|
2182
2154
|
const state = channel.state;
|
|
2183
2155
|
const prev = state.pitchWheelSensitivity;
|
|
2184
2156
|
const next = value / 128;
|
|
2185
2157
|
state.pitchWheelSensitivity = next;
|
|
2186
2158
|
channel.detune += (state.pitchWheel * 2 - 1) * (next - prev) * 12800;
|
|
2187
|
-
this.updateChannelDetune(channel);
|
|
2188
|
-
this.applyVoiceParams(channel, 16);
|
|
2159
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2160
|
+
this.applyVoiceParams(channel, 16, scheduleTime);
|
|
2189
2161
|
}
|
|
2190
2162
|
handleFineTuningRPN(channelNumber) {
|
|
2191
2163
|
const channel = this.channels[channelNumber];
|
|
@@ -2226,8 +2198,9 @@ class Midy {
|
|
|
2226
2198
|
channel.modulationDepthRange = modulationDepthRange;
|
|
2227
2199
|
this.updateModulation(channel);
|
|
2228
2200
|
}
|
|
2229
|
-
allSoundOff(channelNumber) {
|
|
2230
|
-
|
|
2201
|
+
allSoundOff(channelNumber, _value, scheduleTime) {
|
|
2202
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2203
|
+
return this.stopChannelNotes(channelNumber, 0, true, scheduleTime);
|
|
2231
2204
|
}
|
|
2232
2205
|
resetAllControllers(channelNumber) {
|
|
2233
2206
|
const stateTypes = [
|
|
@@ -2255,8 +2228,9 @@ class Midy {
|
|
|
2255
2228
|
channel[type] = this.constructor.channelSettings[type];
|
|
2256
2229
|
}
|
|
2257
2230
|
}
|
|
2258
|
-
allNotesOff(channelNumber) {
|
|
2259
|
-
|
|
2231
|
+
allNotesOff(channelNumber, _value, scheduleTime) {
|
|
2232
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2233
|
+
return this.stopChannelNotes(channelNumber, 0, false, scheduleTime);
|
|
2260
2234
|
}
|
|
2261
2235
|
omniOff() {
|
|
2262
2236
|
this.omni = false;
|
|
@@ -2270,16 +2244,16 @@ class Midy {
|
|
|
2270
2244
|
polyOn() {
|
|
2271
2245
|
this.mono = false;
|
|
2272
2246
|
}
|
|
2273
|
-
handleUniversalNonRealTimeExclusiveMessage(data) {
|
|
2247
|
+
handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2274
2248
|
switch (data[2]) {
|
|
2275
2249
|
case 8:
|
|
2276
2250
|
switch (data[3]) {
|
|
2277
2251
|
case 8:
|
|
2278
2252
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2279
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false);
|
|
2253
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false, scheduleTime);
|
|
2280
2254
|
case 9:
|
|
2281
2255
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2282
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false);
|
|
2256
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false, scheduleTime);
|
|
2283
2257
|
default:
|
|
2284
2258
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2285
2259
|
}
|
|
@@ -2322,18 +2296,18 @@ class Midy {
|
|
|
2322
2296
|
this.channels[9].bankMSB = 120;
|
|
2323
2297
|
this.channels[9].bank = 120 * 128;
|
|
2324
2298
|
}
|
|
2325
|
-
handleUniversalRealTimeExclusiveMessage(data) {
|
|
2299
|
+
handleUniversalRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2326
2300
|
switch (data[2]) {
|
|
2327
2301
|
case 4:
|
|
2328
2302
|
switch (data[3]) {
|
|
2329
2303
|
case 1:
|
|
2330
|
-
return this.handleMasterVolumeSysEx(data);
|
|
2304
|
+
return this.handleMasterVolumeSysEx(data, scheduleTime);
|
|
2331
2305
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2332
|
-
return this.handleMasterFineTuningSysEx(data);
|
|
2306
|
+
return this.handleMasterFineTuningSysEx(data, scheduleTime);
|
|
2333
2307
|
case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2334
|
-
return this.handleMasterCoarseTuningSysEx(data);
|
|
2308
|
+
return this.handleMasterCoarseTuningSysEx(data, scheduleTime);
|
|
2335
2309
|
case 5: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca24.pdf
|
|
2336
|
-
return this.handleGlobalParameterControlSysEx(data);
|
|
2310
|
+
return this.handleGlobalParameterControlSysEx(data, scheduleTime);
|
|
2337
2311
|
default:
|
|
2338
2312
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2339
2313
|
}
|
|
@@ -2341,10 +2315,10 @@ class Midy {
|
|
|
2341
2315
|
case 8:
|
|
2342
2316
|
switch (data[3]) {
|
|
2343
2317
|
case 8: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2344
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true);
|
|
2318
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true, scheduleTime);
|
|
2345
2319
|
case 9:
|
|
2346
2320
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2347
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true);
|
|
2321
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true, scheduleTime);
|
|
2348
2322
|
default:
|
|
2349
2323
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2350
2324
|
}
|
|
@@ -2364,7 +2338,7 @@ class Midy {
|
|
|
2364
2338
|
case 10:
|
|
2365
2339
|
switch (data[3]) {
|
|
2366
2340
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca23.pdf
|
|
2367
|
-
return this.handleKeyBasedInstrumentControlSysEx(data);
|
|
2341
|
+
return this.handleKeyBasedInstrumentControlSysEx(data, scheduleTime);
|
|
2368
2342
|
default:
|
|
2369
2343
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2370
2344
|
}
|
|
@@ -2373,49 +2347,50 @@ class Midy {
|
|
|
2373
2347
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2374
2348
|
}
|
|
2375
2349
|
}
|
|
2376
|
-
handleMasterVolumeSysEx(data) {
|
|
2350
|
+
handleMasterVolumeSysEx(data, scheduleTime) {
|
|
2377
2351
|
const volume = (data[5] * 128 + data[4]) / 16383;
|
|
2378
|
-
this.setMasterVolume(volume);
|
|
2352
|
+
this.setMasterVolume(volume, scheduleTime);
|
|
2379
2353
|
}
|
|
2380
|
-
setMasterVolume(volume) {
|
|
2354
|
+
setMasterVolume(volume, scheduleTime) {
|
|
2355
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2381
2356
|
if (volume < 0 && 1 < volume) {
|
|
2382
2357
|
console.error("Master Volume is out of range");
|
|
2383
2358
|
}
|
|
2384
2359
|
else {
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2360
|
+
this.masterVolume.gain
|
|
2361
|
+
.cancelScheduledValues(scheduleTime)
|
|
2362
|
+
.setValueAtTime(volume * volume, scheduleTime);
|
|
2388
2363
|
}
|
|
2389
2364
|
}
|
|
2390
|
-
handleMasterFineTuningSysEx(data) {
|
|
2365
|
+
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2391
2366
|
const fineTuning = data[5] * 128 + data[4];
|
|
2392
|
-
this.setMasterFineTuning(fineTuning);
|
|
2367
|
+
this.setMasterFineTuning(fineTuning, scheduleTime);
|
|
2393
2368
|
}
|
|
2394
|
-
setMasterFineTuning(value) {
|
|
2369
|
+
setMasterFineTuning(value, scheduleTime) {
|
|
2395
2370
|
const prev = this.masterFineTuning;
|
|
2396
2371
|
const next = (value - 8192) / 8.192; // cent
|
|
2397
2372
|
this.masterFineTuning = next;
|
|
2398
2373
|
channel.detune += next - prev;
|
|
2399
|
-
this.updateChannelDetune(channel);
|
|
2374
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2400
2375
|
}
|
|
2401
|
-
handleMasterCoarseTuningSysEx(data) {
|
|
2376
|
+
handleMasterCoarseTuningSysEx(data, scheduleTime) {
|
|
2402
2377
|
const coarseTuning = data[4];
|
|
2403
|
-
this.setMasterCoarseTuning(coarseTuning);
|
|
2378
|
+
this.setMasterCoarseTuning(coarseTuning, scheduleTime);
|
|
2404
2379
|
}
|
|
2405
|
-
setMasterCoarseTuning(value) {
|
|
2380
|
+
setMasterCoarseTuning(value, scheduleTime) {
|
|
2406
2381
|
const prev = this.masterCoarseTuning;
|
|
2407
2382
|
const next = (value - 64) * 100; // cent
|
|
2408
2383
|
this.masterCoarseTuning = next;
|
|
2409
2384
|
channel.detune += next - prev;
|
|
2410
|
-
this.updateChannelDetune(channel);
|
|
2385
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2411
2386
|
}
|
|
2412
|
-
handleGlobalParameterControlSysEx(data) {
|
|
2387
|
+
handleGlobalParameterControlSysEx(data, scheduleTime) {
|
|
2413
2388
|
if (data[7] === 1) {
|
|
2414
2389
|
switch (data[8]) {
|
|
2415
2390
|
case 1:
|
|
2416
2391
|
return this.handleReverbParameterSysEx(data);
|
|
2417
2392
|
case 2:
|
|
2418
|
-
return this.handleChorusParameterSysEx(data);
|
|
2393
|
+
return this.handleChorusParameterSysEx(data, scheduleTime);
|
|
2419
2394
|
default:
|
|
2420
2395
|
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
2421
2396
|
}
|
|
@@ -2494,88 +2469,84 @@ class Midy {
|
|
|
2494
2469
|
calcDelay(rt60, feedback) {
|
|
2495
2470
|
return -rt60 * Math.log10(feedback) / 3;
|
|
2496
2471
|
}
|
|
2497
|
-
handleChorusParameterSysEx(data) {
|
|
2472
|
+
handleChorusParameterSysEx(data, scheduleTime) {
|
|
2498
2473
|
switch (data[9]) {
|
|
2499
2474
|
case 0:
|
|
2500
|
-
return this.setChorusType(data[10]);
|
|
2475
|
+
return this.setChorusType(data[10], scheduleTime);
|
|
2501
2476
|
case 1:
|
|
2502
|
-
return this.setChorusModRate(data[10]);
|
|
2477
|
+
return this.setChorusModRate(data[10], scheduleTime);
|
|
2503
2478
|
case 2:
|
|
2504
|
-
return this.setChorusModDepth(data[10]);
|
|
2479
|
+
return this.setChorusModDepth(data[10], scheduleTime);
|
|
2505
2480
|
case 3:
|
|
2506
|
-
return this.setChorusFeedback(data[10]);
|
|
2481
|
+
return this.setChorusFeedback(data[10], scheduleTime);
|
|
2507
2482
|
case 4:
|
|
2508
|
-
return this.setChorusSendToReverb(data[10]);
|
|
2483
|
+
return this.setChorusSendToReverb(data[10], scheduleTime);
|
|
2509
2484
|
}
|
|
2510
2485
|
}
|
|
2511
|
-
setChorusType(type) {
|
|
2486
|
+
setChorusType(type, scheduleTime) {
|
|
2512
2487
|
switch (type) {
|
|
2513
2488
|
case 0:
|
|
2514
|
-
return this.setChorusParameter(3, 5, 0, 0);
|
|
2489
|
+
return this.setChorusParameter(3, 5, 0, 0, scheduleTime);
|
|
2515
2490
|
case 1:
|
|
2516
|
-
return this.setChorusParameter(9, 19, 5, 0);
|
|
2491
|
+
return this.setChorusParameter(9, 19, 5, 0, scheduleTime);
|
|
2517
2492
|
case 2:
|
|
2518
|
-
return this.setChorusParameter(3, 19, 8, 0);
|
|
2493
|
+
return this.setChorusParameter(3, 19, 8, 0, scheduleTime);
|
|
2519
2494
|
case 3:
|
|
2520
|
-
return this.setChorusParameter(9, 16, 16, 0);
|
|
2495
|
+
return this.setChorusParameter(9, 16, 16, 0, scheduleTime);
|
|
2521
2496
|
case 4:
|
|
2522
|
-
return this.setChorusParameter(2, 24, 64, 0);
|
|
2497
|
+
return this.setChorusParameter(2, 24, 64, 0, scheduleTime);
|
|
2523
2498
|
case 5:
|
|
2524
|
-
return this.setChorusParameter(1, 5, 112, 0);
|
|
2499
|
+
return this.setChorusParameter(1, 5, 112, 0, scheduleTime);
|
|
2525
2500
|
default:
|
|
2526
2501
|
console.warn(`Unsupported Chorus Type: ${type}`);
|
|
2527
2502
|
}
|
|
2528
2503
|
}
|
|
2529
|
-
setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
|
|
2530
|
-
this.setChorusModRate(modRate);
|
|
2531
|
-
this.setChorusModDepth(modDepth);
|
|
2532
|
-
this.setChorusFeedback(feedback);
|
|
2533
|
-
this.setChorusSendToReverb(sendToReverb);
|
|
2504
|
+
setChorusParameter(modRate, modDepth, feedback, sendToReverb, scheduleTime) {
|
|
2505
|
+
this.setChorusModRate(modRate, scheduleTime);
|
|
2506
|
+
this.setChorusModDepth(modDepth, scheduleTime);
|
|
2507
|
+
this.setChorusFeedback(feedback, scheduleTime);
|
|
2508
|
+
this.setChorusSendToReverb(sendToReverb, scheduleTime);
|
|
2534
2509
|
}
|
|
2535
|
-
setChorusModRate(value) {
|
|
2536
|
-
const now = this.audioContext.currentTime;
|
|
2510
|
+
setChorusModRate(value, scheduleTime) {
|
|
2537
2511
|
const modRate = this.getChorusModRate(value);
|
|
2538
2512
|
this.chorus.modRate = modRate;
|
|
2539
|
-
this.chorusEffect.lfo.frequency.setValueAtTime(modRate,
|
|
2513
|
+
this.chorusEffect.lfo.frequency.setValueAtTime(modRate, scheduleTime);
|
|
2540
2514
|
}
|
|
2541
2515
|
getChorusModRate(value) {
|
|
2542
2516
|
return value * 0.122; // Hz
|
|
2543
2517
|
}
|
|
2544
|
-
setChorusModDepth(value) {
|
|
2545
|
-
const now = this.audioContext.currentTime;
|
|
2518
|
+
setChorusModDepth(value, scheduleTime) {
|
|
2546
2519
|
const modDepth = this.getChorusModDepth(value);
|
|
2547
2520
|
this.chorus.modDepth = modDepth;
|
|
2548
2521
|
this.chorusEffect.lfoGain.gain
|
|
2549
|
-
.cancelScheduledValues(
|
|
2550
|
-
.setValueAtTime(modDepth / 2,
|
|
2522
|
+
.cancelScheduledValues(scheduleTime)
|
|
2523
|
+
.setValueAtTime(modDepth / 2, scheduleTime);
|
|
2551
2524
|
}
|
|
2552
2525
|
getChorusModDepth(value) {
|
|
2553
2526
|
return (value + 1) / 3200; // second
|
|
2554
2527
|
}
|
|
2555
|
-
setChorusFeedback(value) {
|
|
2556
|
-
const now = this.audioContext.currentTime;
|
|
2528
|
+
setChorusFeedback(value, scheduleTime) {
|
|
2557
2529
|
const feedback = this.getChorusFeedback(value);
|
|
2558
2530
|
this.chorus.feedback = feedback;
|
|
2559
2531
|
const chorusEffect = this.chorusEffect;
|
|
2560
2532
|
for (let i = 0; i < chorusEffect.feedbackGains.length; i++) {
|
|
2561
2533
|
chorusEffect.feedbackGains[i].gain
|
|
2562
|
-
.cancelScheduledValues(
|
|
2563
|
-
.setValueAtTime(feedback,
|
|
2534
|
+
.cancelScheduledValues(scheduleTime)
|
|
2535
|
+
.setValueAtTime(feedback, scheduleTime);
|
|
2564
2536
|
}
|
|
2565
2537
|
}
|
|
2566
2538
|
getChorusFeedback(value) {
|
|
2567
2539
|
return value * 0.00763;
|
|
2568
2540
|
}
|
|
2569
|
-
setChorusSendToReverb(value) {
|
|
2541
|
+
setChorusSendToReverb(value, scheduleTime) {
|
|
2570
2542
|
const sendToReverb = this.getChorusSendToReverb(value);
|
|
2571
2543
|
const sendGain = this.chorusEffect.sendGain;
|
|
2572
2544
|
if (0 < this.chorus.sendToReverb) {
|
|
2573
2545
|
this.chorus.sendToReverb = sendToReverb;
|
|
2574
2546
|
if (0 < sendToReverb) {
|
|
2575
|
-
const now = this.audioContext.currentTime;
|
|
2576
2547
|
sendGain.gain
|
|
2577
|
-
.cancelScheduledValues(
|
|
2578
|
-
.setValueAtTime(sendToReverb,
|
|
2548
|
+
.cancelScheduledValues(scheduleTime)
|
|
2549
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2579
2550
|
}
|
|
2580
2551
|
else {
|
|
2581
2552
|
sendGain.disconnect();
|
|
@@ -2584,11 +2555,10 @@ class Midy {
|
|
|
2584
2555
|
else {
|
|
2585
2556
|
this.chorus.sendToReverb = sendToReverb;
|
|
2586
2557
|
if (0 < sendToReverb) {
|
|
2587
|
-
const now = this.audioContext.currentTime;
|
|
2588
2558
|
sendGain.connect(this.reverbEffect.input);
|
|
2589
2559
|
sendGain.gain
|
|
2590
|
-
.cancelScheduledValues(
|
|
2591
|
-
.setValueAtTime(sendToReverb,
|
|
2560
|
+
.cancelScheduledValues(scheduleTime)
|
|
2561
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2592
2562
|
}
|
|
2593
2563
|
}
|
|
2594
2564
|
}
|
|
@@ -2614,7 +2584,7 @@ class Midy {
|
|
|
2614
2584
|
}
|
|
2615
2585
|
return bitmap;
|
|
2616
2586
|
}
|
|
2617
|
-
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime) {
|
|
2587
|
+
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2618
2588
|
if (data.length < 19) {
|
|
2619
2589
|
console.error("Data length is too short");
|
|
2620
2590
|
return;
|
|
@@ -2629,10 +2599,10 @@ class Midy {
|
|
|
2629
2599
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2630
2600
|
}
|
|
2631
2601
|
if (realtime)
|
|
2632
|
-
this.updateChannelDetune(channel);
|
|
2602
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2633
2603
|
}
|
|
2634
2604
|
}
|
|
2635
|
-
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime) {
|
|
2605
|
+
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2636
2606
|
if (data.length < 31) {
|
|
2637
2607
|
console.error("Data length is too short");
|
|
2638
2608
|
return;
|
|
@@ -2651,7 +2621,7 @@ class Midy {
|
|
|
2651
2621
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2652
2622
|
}
|
|
2653
2623
|
if (realtime)
|
|
2654
|
-
this.updateChannelDetune(channel);
|
|
2624
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2655
2625
|
}
|
|
2656
2626
|
}
|
|
2657
2627
|
getPitchControl(channel, note) {
|
|
@@ -2710,7 +2680,7 @@ class Midy {
|
|
|
2710
2680
|
if (table[5] !== 0)
|
|
2711
2681
|
this.setModLfoToVolume(channel, note);
|
|
2712
2682
|
}
|
|
2713
|
-
|
|
2683
|
+
handlePressureSysEx(data, tableName) {
|
|
2714
2684
|
const channelNumber = data[4];
|
|
2715
2685
|
const table = this.channels[channelNumber][tableName];
|
|
2716
2686
|
for (let i = 5; i < data.length - 1; i += 2) {
|
|
@@ -2758,7 +2728,7 @@ class Midy {
|
|
|
2758
2728
|
const controlValue = channel.keyBasedInstrumentControlTable[index];
|
|
2759
2729
|
return (controlValue + 64) / 64;
|
|
2760
2730
|
}
|
|
2761
|
-
handleKeyBasedInstrumentControlSysEx(data) {
|
|
2731
|
+
handleKeyBasedInstrumentControlSysEx(data, scheduleTime) {
|
|
2762
2732
|
const channelNumber = data[4];
|
|
2763
2733
|
const keyNumber = data[5];
|
|
2764
2734
|
const table = this.channels[channelNumber].keyBasedInstrumentControlTable;
|
|
@@ -2768,30 +2738,27 @@ class Midy {
|
|
|
2768
2738
|
const index = keyNumber * 128 + controllerType;
|
|
2769
2739
|
table[index] = value - 64;
|
|
2770
2740
|
}
|
|
2771
|
-
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127);
|
|
2741
|
+
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127, scheduleTime);
|
|
2772
2742
|
}
|
|
2773
|
-
|
|
2774
|
-
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2775
|
-
}
|
|
2776
|
-
handleSysEx(data) {
|
|
2743
|
+
handleSysEx(data, scheduleTime) {
|
|
2777
2744
|
switch (data[0]) {
|
|
2778
2745
|
case 126:
|
|
2779
|
-
return this.handleUniversalNonRealTimeExclusiveMessage(data);
|
|
2746
|
+
return this.handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime);
|
|
2780
2747
|
case 127:
|
|
2781
|
-
return this.handleUniversalRealTimeExclusiveMessage(data);
|
|
2748
|
+
return this.handleUniversalRealTimeExclusiveMessage(data, scheduleTime);
|
|
2782
2749
|
default:
|
|
2783
|
-
|
|
2750
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2784
2751
|
}
|
|
2785
2752
|
}
|
|
2786
|
-
scheduleTask(callback,
|
|
2753
|
+
scheduleTask(callback, scheduleTime) {
|
|
2787
2754
|
return new Promise((resolve) => {
|
|
2788
2755
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
2789
2756
|
bufferSource.onended = () => {
|
|
2790
2757
|
callback();
|
|
2791
2758
|
resolve();
|
|
2792
2759
|
};
|
|
2793
|
-
bufferSource.start(
|
|
2794
|
-
bufferSource.stop(
|
|
2760
|
+
bufferSource.start(scheduleTime);
|
|
2761
|
+
bufferSource.stop(scheduleTime);
|
|
2795
2762
|
});
|
|
2796
2763
|
}
|
|
2797
2764
|
}
|