@marmooo/midy 0.2.6 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -13
- package/esm/midy-GM1.d.ts +73 -74
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +207 -218
- package/esm/midy-GM2.d.ts +125 -127
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +358 -418
- package/esm/midy-GMLite.d.ts +69 -70
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +195 -207
- package/esm/midy.d.ts +148 -150
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +404 -500
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +73 -74
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +207 -218
- package/script/midy-GM2.d.ts +125 -127
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +358 -418
- package/script/midy-GMLite.d.ts +69 -70
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +195 -207
- package/script/midy.d.ts +148 -150
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +404 -500
package/esm/midy.js
CHANGED
|
@@ -291,18 +291,6 @@ export class Midy {
|
|
|
291
291
|
delayTimes: this.generateDistributedArray(0.02, 2, 0.5),
|
|
292
292
|
}
|
|
293
293
|
});
|
|
294
|
-
Object.defineProperty(this, "mono", {
|
|
295
|
-
enumerable: true,
|
|
296
|
-
configurable: true,
|
|
297
|
-
writable: true,
|
|
298
|
-
value: false
|
|
299
|
-
}); // CC#124, CC#125
|
|
300
|
-
Object.defineProperty(this, "omni", {
|
|
301
|
-
enumerable: true,
|
|
302
|
-
configurable: true,
|
|
303
|
-
writable: true,
|
|
304
|
-
value: false
|
|
305
|
-
}); // CC#126, CC#127
|
|
306
294
|
Object.defineProperty(this, "noteCheckInterval", {
|
|
307
295
|
enumerable: true,
|
|
308
296
|
configurable: true,
|
|
@@ -504,6 +492,7 @@ export class Midy {
|
|
|
504
492
|
controlTable: this.initControlTable(),
|
|
505
493
|
...this.setChannelAudioNodes(audioContext),
|
|
506
494
|
scheduledNotes: new SparseMap(128),
|
|
495
|
+
sustainNotes: [],
|
|
507
496
|
sostenutoNotes: new SparseMap(128),
|
|
508
497
|
scaleOctaveTuningTable: new Float32Array(12), // [-100, 100] cent
|
|
509
498
|
channelPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
@@ -587,7 +576,8 @@ export class Midy {
|
|
|
587
576
|
const portamentoTarget = this.findPortamentoTarget(queueIndex);
|
|
588
577
|
if (portamentoTarget)
|
|
589
578
|
portamentoTarget.portamento = true;
|
|
590
|
-
const notePromise = this.
|
|
579
|
+
const notePromise = this.scheduleNoteOff(event.channel, event.noteNumber, event.velocity, startTime, false, // force
|
|
580
|
+
portamentoTarget?.noteNumber);
|
|
591
581
|
if (notePromise) {
|
|
592
582
|
this.notePromises.push(notePromise);
|
|
593
583
|
}
|
|
@@ -597,7 +587,7 @@ export class Midy {
|
|
|
597
587
|
this.handlePolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
|
|
598
588
|
break;
|
|
599
589
|
case "controller":
|
|
600
|
-
this.handleControlChange(
|
|
590
|
+
this.handleControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
601
591
|
break;
|
|
602
592
|
case "programChange":
|
|
603
593
|
this.handleProgramChange(event.channel, event.programNumber, startTime);
|
|
@@ -640,10 +630,11 @@ export class Midy {
|
|
|
640
630
|
resolve();
|
|
641
631
|
return;
|
|
642
632
|
}
|
|
643
|
-
const
|
|
633
|
+
const now = this.audioContext.currentTime;
|
|
634
|
+
const t = now + offset;
|
|
644
635
|
queueIndex = await this.scheduleTimelineEvents(t, offset, queueIndex);
|
|
645
636
|
if (this.isPausing) {
|
|
646
|
-
await this.stopNotes(0, true);
|
|
637
|
+
await this.stopNotes(0, true, now);
|
|
647
638
|
this.notePromises = [];
|
|
648
639
|
resolve();
|
|
649
640
|
this.isPausing = false;
|
|
@@ -651,7 +642,7 @@ export class Midy {
|
|
|
651
642
|
return;
|
|
652
643
|
}
|
|
653
644
|
else if (this.isStopping) {
|
|
654
|
-
await this.stopNotes(0, true);
|
|
645
|
+
await this.stopNotes(0, true, now);
|
|
655
646
|
this.notePromises = [];
|
|
656
647
|
this.exclusiveClassMap.clear();
|
|
657
648
|
this.audioBufferCache.clear();
|
|
@@ -661,7 +652,7 @@ export class Midy {
|
|
|
661
652
|
return;
|
|
662
653
|
}
|
|
663
654
|
else if (this.isSeeking) {
|
|
664
|
-
this.stopNotes(0, true);
|
|
655
|
+
this.stopNotes(0, true, now);
|
|
665
656
|
this.exclusiveClassMap.clear();
|
|
666
657
|
this.startTime = this.audioContext.currentTime;
|
|
667
658
|
queueIndex = this.getQueueIndex(this.resumeTime);
|
|
@@ -670,7 +661,6 @@ export class Midy {
|
|
|
670
661
|
await schedulePlayback();
|
|
671
662
|
}
|
|
672
663
|
else {
|
|
673
|
-
const now = this.audioContext.currentTime;
|
|
674
664
|
const waitTime = now + this.noteCheckInterval;
|
|
675
665
|
await this.scheduleTask(() => { }, waitTime);
|
|
676
666
|
await schedulePlayback();
|
|
@@ -790,25 +780,21 @@ export class Midy {
|
|
|
790
780
|
}
|
|
791
781
|
return { instruments, timeline };
|
|
792
782
|
}
|
|
793
|
-
|
|
794
|
-
const now = this.audioContext.currentTime;
|
|
783
|
+
stopChannelNotes(channelNumber, velocity, force, scheduleTime) {
|
|
795
784
|
const channel = this.channels[channelNumber];
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
const promise = this.scheduleNoteRelease(channelNumber, note.noteNumber, velocity, now, undefined, // portamentoNoteNumber
|
|
802
|
-
force);
|
|
803
|
-
this.notePromises.push(promise);
|
|
804
|
-
}
|
|
785
|
+
const promises = [];
|
|
786
|
+
this.processScheduledNotes(channel, (note) => {
|
|
787
|
+
const promise = this.scheduleNoteOff(channelNumber, note.noteNumber, velocity, scheduleTime, force, undefined);
|
|
788
|
+
this.notePromises.push(promise);
|
|
789
|
+
promises.push(promise);
|
|
805
790
|
});
|
|
806
791
|
channel.scheduledNotes.clear();
|
|
807
|
-
|
|
792
|
+
return Promise.all(promises);
|
|
808
793
|
}
|
|
809
|
-
stopNotes(velocity, force) {
|
|
794
|
+
stopNotes(velocity, force, scheduleTime) {
|
|
795
|
+
const promises = [];
|
|
810
796
|
for (let i = 0; i < this.channels.length; i++) {
|
|
811
|
-
this.stopChannelNotes(i, velocity, force);
|
|
797
|
+
promises.push(this.stopChannelNotes(i, velocity, force, scheduleTime));
|
|
812
798
|
}
|
|
813
799
|
return Promise.all(this.notePromises);
|
|
814
800
|
}
|
|
@@ -856,34 +842,32 @@ export class Midy {
|
|
|
856
842
|
const now = this.audioContext.currentTime;
|
|
857
843
|
return this.resumeTime + now - this.startTime - this.startDelay;
|
|
858
844
|
}
|
|
859
|
-
processScheduledNotes(channel,
|
|
845
|
+
processScheduledNotes(channel, callback) {
|
|
860
846
|
channel.scheduledNotes.forEach((noteList) => {
|
|
861
847
|
for (let i = 0; i < noteList.length; i++) {
|
|
862
848
|
const note = noteList[i];
|
|
863
849
|
if (!note)
|
|
864
850
|
continue;
|
|
865
|
-
if (scheduleTime < note.startTime)
|
|
866
|
-
continue;
|
|
867
851
|
callback(note);
|
|
868
852
|
}
|
|
869
853
|
});
|
|
870
854
|
}
|
|
871
|
-
getActiveNotes(channel,
|
|
855
|
+
getActiveNotes(channel, scheduleTime) {
|
|
872
856
|
const activeNotes = new SparseMap(128);
|
|
873
857
|
channel.scheduledNotes.forEach((noteList) => {
|
|
874
|
-
const activeNote = this.getActiveNote(noteList,
|
|
858
|
+
const activeNote = this.getActiveNote(noteList, scheduleTime);
|
|
875
859
|
if (activeNote) {
|
|
876
860
|
activeNotes.set(activeNote.noteNumber, activeNote);
|
|
877
861
|
}
|
|
878
862
|
});
|
|
879
863
|
return activeNotes;
|
|
880
864
|
}
|
|
881
|
-
getActiveNote(noteList,
|
|
865
|
+
getActiveNote(noteList, scheduleTime) {
|
|
882
866
|
for (let i = noteList.length - 1; i >= 0; i--) {
|
|
883
867
|
const note = noteList[i];
|
|
884
868
|
if (!note)
|
|
885
869
|
return;
|
|
886
|
-
if (
|
|
870
|
+
if (scheduleTime < note.startTime)
|
|
887
871
|
continue;
|
|
888
872
|
return (note.ending) ? null : note;
|
|
889
873
|
}
|
|
@@ -1043,44 +1027,36 @@ export class Midy {
|
|
|
1043
1027
|
calcNoteDetune(channel, note) {
|
|
1044
1028
|
return channel.scaleOctaveTuningTable[note.noteNumber % 12];
|
|
1045
1029
|
}
|
|
1046
|
-
updateChannelDetune(channel) {
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
const note = noteList[i];
|
|
1050
|
-
if (!note)
|
|
1051
|
-
continue;
|
|
1052
|
-
this.updateDetune(channel, note);
|
|
1053
|
-
}
|
|
1030
|
+
updateChannelDetune(channel, scheduleTime) {
|
|
1031
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1032
|
+
this.updateDetune(channel, note, scheduleTime);
|
|
1054
1033
|
});
|
|
1055
1034
|
}
|
|
1056
|
-
updateDetune(channel, note) {
|
|
1057
|
-
const now = this.audioContext.currentTime;
|
|
1035
|
+
updateDetune(channel, note, scheduleTime) {
|
|
1058
1036
|
const noteDetune = this.calcNoteDetune(channel, note);
|
|
1059
1037
|
const pitchControl = this.getPitchControl(channel, note);
|
|
1060
1038
|
const detune = channel.detune + noteDetune + pitchControl;
|
|
1061
1039
|
note.bufferSource.detune
|
|
1062
|
-
.cancelScheduledValues(
|
|
1063
|
-
.setValueAtTime(detune,
|
|
1040
|
+
.cancelScheduledValues(scheduleTime)
|
|
1041
|
+
.setValueAtTime(detune, scheduleTime);
|
|
1064
1042
|
}
|
|
1065
1043
|
getPortamentoTime(channel) {
|
|
1066
1044
|
const factor = 5 * Math.log(10) / 127;
|
|
1067
1045
|
const time = channel.state.portamentoTime;
|
|
1068
1046
|
return Math.log(time) / factor;
|
|
1069
1047
|
}
|
|
1070
|
-
setPortamentoStartVolumeEnvelope(channel, note) {
|
|
1071
|
-
const now = this.audioContext.currentTime;
|
|
1048
|
+
setPortamentoStartVolumeEnvelope(channel, note, scheduleTime) {
|
|
1072
1049
|
const { voiceParams, startTime } = note;
|
|
1073
1050
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation);
|
|
1074
1051
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1075
1052
|
const volDelay = startTime + voiceParams.volDelay;
|
|
1076
1053
|
const portamentoTime = volDelay + this.getPortamentoTime(channel);
|
|
1077
1054
|
note.volumeEnvelopeNode.gain
|
|
1078
|
-
.cancelScheduledValues(
|
|
1055
|
+
.cancelScheduledValues(scheduleTime)
|
|
1079
1056
|
.setValueAtTime(0, volDelay)
|
|
1080
1057
|
.linearRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1081
1058
|
}
|
|
1082
|
-
setVolumeEnvelope(channel, note) {
|
|
1083
|
-
const now = this.audioContext.currentTime;
|
|
1059
|
+
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
1084
1060
|
const state = channel.state;
|
|
1085
1061
|
const { voiceParams, startTime } = note;
|
|
1086
1062
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation) *
|
|
@@ -1091,7 +1067,7 @@ export class Midy {
|
|
|
1091
1067
|
const volHold = volAttack + voiceParams.volHold;
|
|
1092
1068
|
const volDecay = volHold + voiceParams.volDecay * state.decayTime * 2;
|
|
1093
1069
|
note.volumeEnvelopeNode.gain
|
|
1094
|
-
.cancelScheduledValues(
|
|
1070
|
+
.cancelScheduledValues(scheduleTime)
|
|
1095
1071
|
.setValueAtTime(0, startTime)
|
|
1096
1072
|
.setValueAtTime(1e-6, volDelay) // exponentialRampToValueAtTime() requires a non-zero value
|
|
1097
1073
|
.exponentialRampToValueAtTime(attackVolume, volAttack)
|
|
@@ -1099,7 +1075,6 @@ export class Midy {
|
|
|
1099
1075
|
.linearRampToValueAtTime(sustainVolume, volDecay);
|
|
1100
1076
|
}
|
|
1101
1077
|
setPitchEnvelope(note, scheduleTime) {
|
|
1102
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1103
1078
|
const { voiceParams } = note;
|
|
1104
1079
|
const baseRate = voiceParams.playbackRate;
|
|
1105
1080
|
note.bufferSource.playbackRate
|
|
@@ -1126,8 +1101,7 @@ export class Midy {
|
|
|
1126
1101
|
const maxFrequency = 20000; // max Hz of initialFilterFc
|
|
1127
1102
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
1128
1103
|
}
|
|
1129
|
-
setPortamentoStartFilterEnvelope(channel, note) {
|
|
1130
|
-
const now = this.audioContext.currentTime;
|
|
1104
|
+
setPortamentoStartFilterEnvelope(channel, note, scheduleTime) {
|
|
1131
1105
|
const state = channel.state;
|
|
1132
1106
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1133
1107
|
const softPedalFactor = 1 -
|
|
@@ -1143,13 +1117,12 @@ export class Midy {
|
|
|
1143
1117
|
const portamentoTime = startTime + this.getPortamentoTime(channel);
|
|
1144
1118
|
const modDelay = startTime + voiceParams.modDelay;
|
|
1145
1119
|
note.filterNode.frequency
|
|
1146
|
-
.cancelScheduledValues(
|
|
1120
|
+
.cancelScheduledValues(scheduleTime)
|
|
1147
1121
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1148
1122
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1149
1123
|
.linearRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1150
1124
|
}
|
|
1151
|
-
setFilterEnvelope(channel, note) {
|
|
1152
|
-
const now = this.audioContext.currentTime;
|
|
1125
|
+
setFilterEnvelope(channel, note, scheduleTime) {
|
|
1153
1126
|
const state = channel.state;
|
|
1154
1127
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1155
1128
|
const softPedalFactor = 1 -
|
|
@@ -1170,14 +1143,14 @@ export class Midy {
|
|
|
1170
1143
|
const modHold = modAttack + voiceParams.modHold;
|
|
1171
1144
|
const modDecay = modHold + voiceParams.modDecay;
|
|
1172
1145
|
note.filterNode.frequency
|
|
1173
|
-
.cancelScheduledValues(
|
|
1146
|
+
.cancelScheduledValues(scheduleTime)
|
|
1174
1147
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1175
1148
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1176
1149
|
.exponentialRampToValueAtTime(adjustedPeekFreq, modAttack)
|
|
1177
1150
|
.setValueAtTime(adjustedPeekFreq, modHold)
|
|
1178
1151
|
.linearRampToValueAtTime(adjustedSustainFreq, modDecay);
|
|
1179
1152
|
}
|
|
1180
|
-
startModulation(channel, note,
|
|
1153
|
+
startModulation(channel, note, scheduleTime) {
|
|
1181
1154
|
const { voiceParams } = note;
|
|
1182
1155
|
note.modulationLFO = new OscillatorNode(this.audioContext, {
|
|
1183
1156
|
frequency: this.centToHz(voiceParams.freqModLFO),
|
|
@@ -1186,10 +1159,10 @@ export class Midy {
|
|
|
1186
1159
|
gain: voiceParams.modLfoToFilterFc,
|
|
1187
1160
|
});
|
|
1188
1161
|
note.modulationDepth = new GainNode(this.audioContext);
|
|
1189
|
-
this.setModLfoToPitch(channel, note);
|
|
1162
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1190
1163
|
note.volumeDepth = new GainNode(this.audioContext);
|
|
1191
|
-
this.setModLfoToVolume(channel, note);
|
|
1192
|
-
note.modulationLFO.start(startTime + voiceParams.delayModLFO);
|
|
1164
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1165
|
+
note.modulationLFO.start(note.startTime + voiceParams.delayModLFO);
|
|
1193
1166
|
note.modulationLFO.connect(note.filterDepth);
|
|
1194
1167
|
note.filterDepth.connect(note.filterNode.frequency);
|
|
1195
1168
|
note.modulationLFO.connect(note.modulationDepth);
|
|
@@ -1197,15 +1170,15 @@ export class Midy {
|
|
|
1197
1170
|
note.modulationLFO.connect(note.volumeDepth);
|
|
1198
1171
|
note.volumeDepth.connect(note.volumeEnvelopeNode.gain);
|
|
1199
1172
|
}
|
|
1200
|
-
startVibrato(channel, note,
|
|
1173
|
+
startVibrato(channel, note, scheduleTime) {
|
|
1201
1174
|
const { voiceParams } = note;
|
|
1202
1175
|
const state = channel.state;
|
|
1203
1176
|
note.vibratoLFO = new OscillatorNode(this.audioContext, {
|
|
1204
1177
|
frequency: this.centToHz(voiceParams.freqVibLFO) * state.vibratoRate * 2,
|
|
1205
1178
|
});
|
|
1206
|
-
note.vibratoLFO.start(startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1179
|
+
note.vibratoLFO.start(note.startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1207
1180
|
note.vibratoDepth = new GainNode(this.audioContext);
|
|
1208
|
-
this.setVibLfoToPitch(channel, note);
|
|
1181
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1209
1182
|
note.vibratoLFO.connect(note.vibratoDepth);
|
|
1210
1183
|
note.vibratoDepth.connect(note.bufferSource.detune);
|
|
1211
1184
|
}
|
|
@@ -1228,6 +1201,7 @@ export class Midy {
|
|
|
1228
1201
|
}
|
|
1229
1202
|
}
|
|
1230
1203
|
async createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3) {
|
|
1204
|
+
const now = this.audioContext.currentTime;
|
|
1231
1205
|
const state = channel.state;
|
|
1232
1206
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1233
1207
|
const voiceParams = voice.getAllParams(controllerState);
|
|
@@ -1244,22 +1218,22 @@ export class Midy {
|
|
|
1244
1218
|
});
|
|
1245
1219
|
if (portamento) {
|
|
1246
1220
|
note.portamento = true;
|
|
1247
|
-
this.setPortamentoStartVolumeEnvelope(channel, note);
|
|
1248
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1221
|
+
this.setPortamentoStartVolumeEnvelope(channel, note, now);
|
|
1222
|
+
this.setPortamentoStartFilterEnvelope(channel, note, now);
|
|
1249
1223
|
}
|
|
1250
1224
|
else {
|
|
1251
1225
|
note.portamento = false;
|
|
1252
|
-
this.setVolumeEnvelope(channel, note);
|
|
1253
|
-
this.setFilterEnvelope(channel, note);
|
|
1226
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
1227
|
+
this.setFilterEnvelope(channel, note, now);
|
|
1254
1228
|
}
|
|
1255
1229
|
if (0 < state.vibratoDepth) {
|
|
1256
|
-
this.startVibrato(channel, note,
|
|
1230
|
+
this.startVibrato(channel, note, now);
|
|
1257
1231
|
}
|
|
1258
|
-
this.setPitchEnvelope(note);
|
|
1232
|
+
this.setPitchEnvelope(note, now);
|
|
1259
1233
|
if (0 < state.modulationDepth) {
|
|
1260
|
-
this.startModulation(channel, note,
|
|
1234
|
+
this.startModulation(channel, note, now);
|
|
1261
1235
|
}
|
|
1262
|
-
if (
|
|
1236
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
1263
1237
|
channel.currentBufferSource.stop(startTime);
|
|
1264
1238
|
channel.currentBufferSource = note.bufferSource;
|
|
1265
1239
|
}
|
|
@@ -1269,10 +1243,10 @@ export class Midy {
|
|
|
1269
1243
|
note.volumeNode.connect(note.gainL);
|
|
1270
1244
|
note.volumeNode.connect(note.gainR);
|
|
1271
1245
|
if (0 < channel.chorusSendLevel) {
|
|
1272
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
1246
|
+
this.setChorusEffectsSend(channel, note, 0, now);
|
|
1273
1247
|
}
|
|
1274
1248
|
if (0 < channel.reverbSendLevel) {
|
|
1275
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
1249
|
+
this.setReverbEffectsSend(channel, note, 0, now);
|
|
1276
1250
|
}
|
|
1277
1251
|
note.bufferSource.start(startTime);
|
|
1278
1252
|
return note;
|
|
@@ -1300,8 +1274,8 @@ export class Midy {
|
|
|
1300
1274
|
const note = await this.createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3);
|
|
1301
1275
|
note.gainL.connect(channel.gainL);
|
|
1302
1276
|
note.gainR.connect(channel.gainR);
|
|
1303
|
-
if (channel.state.
|
|
1304
|
-
channel.
|
|
1277
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
1278
|
+
channel.sustainNotes.push(note);
|
|
1305
1279
|
}
|
|
1306
1280
|
const exclusiveClass = note.voiceParams.exclusiveClass;
|
|
1307
1281
|
if (exclusiveClass !== 0) {
|
|
@@ -1309,9 +1283,9 @@ export class Midy {
|
|
|
1309
1283
|
const prevEntry = this.exclusiveClassMap.get(exclusiveClass);
|
|
1310
1284
|
const [prevNote, prevChannelNumber] = prevEntry;
|
|
1311
1285
|
if (!prevNote.ending) {
|
|
1312
|
-
this.
|
|
1313
|
-
startTime,
|
|
1314
|
-
|
|
1286
|
+
this.scheduleNoteOff(prevChannelNumber, prevNote.noteNumber, 0, // velocity,
|
|
1287
|
+
startTime, true, // force
|
|
1288
|
+
undefined);
|
|
1315
1289
|
}
|
|
1316
1290
|
}
|
|
1317
1291
|
this.exclusiveClassMap.set(exclusiveClass, [note, channelNumber]);
|
|
@@ -1324,9 +1298,9 @@ export class Midy {
|
|
|
1324
1298
|
scheduledNotes.set(noteNumber, [note]);
|
|
1325
1299
|
}
|
|
1326
1300
|
}
|
|
1327
|
-
noteOn(channelNumber, noteNumber, velocity,
|
|
1328
|
-
|
|
1329
|
-
return this.scheduleNoteOn(channelNumber, noteNumber, velocity,
|
|
1301
|
+
noteOn(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1302
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1303
|
+
return this.scheduleNoteOn(channelNumber, noteNumber, velocity, scheduleTime, false);
|
|
1330
1304
|
}
|
|
1331
1305
|
stopNote(endTime, stopTime, scheduledNotes, index) {
|
|
1332
1306
|
const note = scheduledNotes[index];
|
|
@@ -1366,11 +1340,11 @@ export class Midy {
|
|
|
1366
1340
|
note.bufferSource.stop(stopTime);
|
|
1367
1341
|
});
|
|
1368
1342
|
}
|
|
1369
|
-
|
|
1343
|
+
scheduleNoteOff(channelNumber, noteNumber, _velocity, endTime, force, portamentoNoteNumber) {
|
|
1370
1344
|
const channel = this.channels[channelNumber];
|
|
1371
1345
|
const state = channel.state;
|
|
1372
1346
|
if (!force) {
|
|
1373
|
-
if (0.5
|
|
1347
|
+
if (0.5 <= state.sustainPedal)
|
|
1374
1348
|
return;
|
|
1375
1349
|
if (channel.sostenutoNotes.has(noteNumber))
|
|
1376
1350
|
return;
|
|
@@ -1406,83 +1380,73 @@ export class Midy {
|
|
|
1406
1380
|
}
|
|
1407
1381
|
}
|
|
1408
1382
|
}
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
return this.
|
|
1383
|
+
noteOff(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1384
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1385
|
+
return this.scheduleNoteOff(channelNumber, noteNumber, velocity, scheduleTime, false, // force
|
|
1386
|
+
undefined);
|
|
1412
1387
|
}
|
|
1413
|
-
releaseSustainPedal(channelNumber, halfVelocity) {
|
|
1388
|
+
releaseSustainPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1414
1389
|
const velocity = halfVelocity * 2;
|
|
1415
1390
|
const channel = this.channels[channelNumber];
|
|
1416
1391
|
const promises = [];
|
|
1417
|
-
channel.
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
continue;
|
|
1423
|
-
const { noteNumber } = note;
|
|
1424
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1425
|
-
promises.push(promise);
|
|
1426
|
-
}
|
|
1427
|
-
});
|
|
1392
|
+
for (let i = 0; i < channel.sustainNotes.length; i++) {
|
|
1393
|
+
const promise = this.noteOff(channelNumber, channel.sustainNotes[i].noteNumber, velocity, scheduleTime);
|
|
1394
|
+
promises.push(promise);
|
|
1395
|
+
}
|
|
1396
|
+
channel.sustainNotes = [];
|
|
1428
1397
|
return promises;
|
|
1429
1398
|
}
|
|
1430
|
-
releaseSostenutoPedal(channelNumber, halfVelocity) {
|
|
1399
|
+
releaseSostenutoPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1431
1400
|
const velocity = halfVelocity * 2;
|
|
1432
1401
|
const channel = this.channels[channelNumber];
|
|
1433
1402
|
const promises = [];
|
|
1434
1403
|
channel.state.sostenutoPedal = 0;
|
|
1435
|
-
channel.sostenutoNotes.forEach((
|
|
1436
|
-
const
|
|
1437
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1404
|
+
channel.sostenutoNotes.forEach((note) => {
|
|
1405
|
+
const promise = this.noteOff(channelNumber, note.noteNumber, velocity, scheduleTime);
|
|
1438
1406
|
promises.push(promise);
|
|
1439
1407
|
});
|
|
1440
1408
|
channel.sostenutoNotes.clear();
|
|
1441
1409
|
return promises;
|
|
1442
1410
|
}
|
|
1443
|
-
handleMIDIMessage(statusByte, data1, data2) {
|
|
1444
|
-
const channelNumber =
|
|
1411
|
+
handleMIDIMessage(statusByte, data1, data2, scheduleTime) {
|
|
1412
|
+
const channelNumber = statusByte & 0x0F;
|
|
1445
1413
|
const messageType = statusByte & 0xF0;
|
|
1446
1414
|
switch (messageType) {
|
|
1447
1415
|
case 0x80:
|
|
1448
|
-
return this.
|
|
1416
|
+
return this.noteOff(channelNumber, data1, data2, scheduleTime);
|
|
1449
1417
|
case 0x90:
|
|
1450
|
-
return this.noteOn(channelNumber, data1, data2);
|
|
1418
|
+
return this.noteOn(channelNumber, data1, data2, scheduleTime);
|
|
1451
1419
|
case 0xA0:
|
|
1452
|
-
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2);
|
|
1420
|
+
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2, scheduleTime);
|
|
1453
1421
|
case 0xB0:
|
|
1454
|
-
return this.handleControlChange(channelNumber, data1, data2);
|
|
1422
|
+
return this.handleControlChange(channelNumber, data1, data2, scheduleTime);
|
|
1455
1423
|
case 0xC0:
|
|
1456
|
-
return this.handleProgramChange(channelNumber, data1);
|
|
1424
|
+
return this.handleProgramChange(channelNumber, data1, scheduleTime);
|
|
1457
1425
|
case 0xD0:
|
|
1458
|
-
return this.handleChannelPressure(channelNumber, data1);
|
|
1426
|
+
return this.handleChannelPressure(channelNumber, data1, scheduleTime);
|
|
1459
1427
|
case 0xE0:
|
|
1460
|
-
return this.handlePitchBendMessage(channelNumber, data1, data2);
|
|
1428
|
+
return this.handlePitchBendMessage(channelNumber, data1, data2, scheduleTime);
|
|
1461
1429
|
default:
|
|
1462
1430
|
console.warn(`Unsupported MIDI message: ${messageType.toString(16)}`);
|
|
1463
1431
|
}
|
|
1464
1432
|
}
|
|
1465
|
-
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure,
|
|
1466
|
-
if (!startTime)
|
|
1467
|
-
startTime = this.audioContext.currentTime;
|
|
1433
|
+
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure, scheduleTime) {
|
|
1468
1434
|
const channel = this.channels[channelNumber];
|
|
1469
1435
|
channel.state.polyphonicKeyPressure = pressure / 127;
|
|
1470
1436
|
const table = channel.polyphonicKeyPressureTable;
|
|
1471
|
-
const activeNotes = this.getActiveNotes(channel,
|
|
1437
|
+
const activeNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1472
1438
|
if (activeNotes.has(noteNumber)) {
|
|
1473
1439
|
const note = activeNotes.get(noteNumber);
|
|
1474
1440
|
this.setControllerParameters(channel, note, table);
|
|
1475
1441
|
}
|
|
1476
1442
|
// this.applyVoiceParams(channel, 10);
|
|
1477
1443
|
}
|
|
1478
|
-
handleProgramChange(channelNumber, program) {
|
|
1444
|
+
handleProgramChange(channelNumber, program, _scheduleTime) {
|
|
1479
1445
|
const channel = this.channels[channelNumber];
|
|
1480
1446
|
channel.bank = channel.bankMSB * 128 + channel.bankLSB;
|
|
1481
1447
|
channel.program = program;
|
|
1482
1448
|
}
|
|
1483
|
-
handleChannelPressure(channelNumber, value,
|
|
1484
|
-
if (!startTime)
|
|
1485
|
-
startTime = this.audioContext.currentTime;
|
|
1449
|
+
handleChannelPressure(channelNumber, value, scheduleTime) {
|
|
1486
1450
|
const channel = this.channels[channelNumber];
|
|
1487
1451
|
const prev = channel.state.channelPressure;
|
|
1488
1452
|
const next = value / 127;
|
|
@@ -1492,72 +1456,68 @@ export class Midy {
|
|
|
1492
1456
|
channel.detune += pressureDepth * (next - prev);
|
|
1493
1457
|
}
|
|
1494
1458
|
const table = channel.channelPressureTable;
|
|
1495
|
-
this.getActiveNotes(channel,
|
|
1459
|
+
this.getActiveNotes(channel, scheduleTime).forEach((note) => {
|
|
1496
1460
|
this.setControllerParameters(channel, note, table);
|
|
1497
1461
|
});
|
|
1498
1462
|
// this.applyVoiceParams(channel, 13);
|
|
1499
1463
|
}
|
|
1500
|
-
handlePitchBendMessage(channelNumber, lsb, msb) {
|
|
1464
|
+
handlePitchBendMessage(channelNumber, lsb, msb, scheduleTime) {
|
|
1501
1465
|
const pitchBend = msb * 128 + lsb;
|
|
1502
|
-
this.setPitchBend(channelNumber, pitchBend);
|
|
1466
|
+
this.setPitchBend(channelNumber, pitchBend, scheduleTime);
|
|
1503
1467
|
}
|
|
1504
|
-
setPitchBend(channelNumber, value) {
|
|
1468
|
+
setPitchBend(channelNumber, value, scheduleTime) {
|
|
1469
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1505
1470
|
const channel = this.channels[channelNumber];
|
|
1506
1471
|
const state = channel.state;
|
|
1507
1472
|
const prev = state.pitchWheel * 2 - 1;
|
|
1508
1473
|
const next = (value - 8192) / 8192;
|
|
1509
1474
|
state.pitchWheel = value / 16383;
|
|
1510
1475
|
channel.detune += (next - prev) * state.pitchWheelSensitivity * 12800;
|
|
1511
|
-
this.updateChannelDetune(channel);
|
|
1512
|
-
this.applyVoiceParams(channel, 14);
|
|
1476
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1477
|
+
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
1513
1478
|
}
|
|
1514
|
-
setModLfoToPitch(channel, note) {
|
|
1515
|
-
const now = this.audioContext.currentTime;
|
|
1479
|
+
setModLfoToPitch(channel, note, scheduleTime) {
|
|
1516
1480
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
1517
1481
|
this.getLFOPitchDepth(channel, note);
|
|
1518
1482
|
const baseDepth = Math.abs(modLfoToPitch) + channel.state.modulationDepth;
|
|
1519
1483
|
const modulationDepth = baseDepth * Math.sign(modLfoToPitch);
|
|
1520
1484
|
note.modulationDepth.gain
|
|
1521
|
-
.cancelScheduledValues(
|
|
1522
|
-
.setValueAtTime(modulationDepth,
|
|
1485
|
+
.cancelScheduledValues(scheduleTime)
|
|
1486
|
+
.setValueAtTime(modulationDepth, scheduleTime);
|
|
1523
1487
|
}
|
|
1524
|
-
setVibLfoToPitch(channel, note) {
|
|
1525
|
-
const now = this.audioContext.currentTime;
|
|
1488
|
+
setVibLfoToPitch(channel, note, scheduleTime) {
|
|
1526
1489
|
const vibLfoToPitch = note.voiceParams.vibLfoToPitch;
|
|
1527
1490
|
const vibratoDepth = Math.abs(vibLfoToPitch) * channel.state.vibratoDepth *
|
|
1528
1491
|
2;
|
|
1529
1492
|
const vibratoDepthSign = 0 < vibLfoToPitch;
|
|
1530
1493
|
note.vibratoDepth.gain
|
|
1531
|
-
.cancelScheduledValues(
|
|
1532
|
-
.setValueAtTime(vibratoDepth * vibratoDepthSign,
|
|
1494
|
+
.cancelScheduledValues(scheduleTime)
|
|
1495
|
+
.setValueAtTime(vibratoDepth * vibratoDepthSign, scheduleTime);
|
|
1533
1496
|
}
|
|
1534
|
-
setModLfoToFilterFc(channel, note) {
|
|
1535
|
-
const now = this.audioContext.currentTime;
|
|
1497
|
+
setModLfoToFilterFc(channel, note, scheduleTime) {
|
|
1536
1498
|
const modLfoToFilterFc = note.voiceParams.modLfoToFilterFc +
|
|
1537
1499
|
this.getLFOFilterDepth(channel, note);
|
|
1538
1500
|
note.filterDepth.gain
|
|
1539
|
-
.cancelScheduledValues(
|
|
1540
|
-
.setValueAtTime(modLfoToFilterFc,
|
|
1501
|
+
.cancelScheduledValues(scheduleTime)
|
|
1502
|
+
.setValueAtTime(modLfoToFilterFc, scheduleTime);
|
|
1541
1503
|
}
|
|
1542
|
-
setModLfoToVolume(channel, note) {
|
|
1543
|
-
const now = this.audioContext.currentTime;
|
|
1504
|
+
setModLfoToVolume(channel, note, scheduleTime) {
|
|
1544
1505
|
const modLfoToVolume = note.voiceParams.modLfoToVolume;
|
|
1545
1506
|
const baseDepth = this.cbToRatio(Math.abs(modLfoToVolume)) - 1;
|
|
1546
1507
|
const volumeDepth = baseDepth * Math.sign(modLfoToVolume) *
|
|
1547
1508
|
(1 + this.getLFOAmplitudeDepth(channel, note));
|
|
1548
1509
|
note.volumeDepth.gain
|
|
1549
|
-
.cancelScheduledValues(
|
|
1550
|
-
.setValueAtTime(volumeDepth,
|
|
1510
|
+
.cancelScheduledValues(scheduleTime)
|
|
1511
|
+
.setValueAtTime(volumeDepth, scheduleTime);
|
|
1551
1512
|
}
|
|
1552
|
-
setReverbEffectsSend(channel, note, prevValue) {
|
|
1513
|
+
setReverbEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1553
1514
|
if (0 < prevValue) {
|
|
1554
1515
|
if (0 < note.voiceParams.reverbEffectsSend) {
|
|
1555
|
-
const now = this.audioContext.currentTime;
|
|
1556
1516
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 91);
|
|
1557
1517
|
const value = note.voiceParams.reverbEffectsSend + keyBasedValue;
|
|
1558
1518
|
note.reverbEffectsSend.gain
|
|
1559
|
-
.cancelScheduledValues(
|
|
1560
|
-
.setValueAtTime(value,
|
|
1519
|
+
.cancelScheduledValues(scheduleTime)
|
|
1520
|
+
.setValueAtTime(value, scheduleTime);
|
|
1561
1521
|
}
|
|
1562
1522
|
else {
|
|
1563
1523
|
note.reverbEffectsSend.disconnect();
|
|
@@ -1575,15 +1535,14 @@ export class Midy {
|
|
|
1575
1535
|
}
|
|
1576
1536
|
}
|
|
1577
1537
|
}
|
|
1578
|
-
setChorusEffectsSend(channel, note, prevValue) {
|
|
1538
|
+
setChorusEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1579
1539
|
if (0 < prevValue) {
|
|
1580
1540
|
if (0 < note.voiceParams.chorusEffectsSend) {
|
|
1581
|
-
const now = this.audioContext.currentTime;
|
|
1582
1541
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 93);
|
|
1583
1542
|
const value = note.voiceParams.chorusEffectsSend + keyBasedValue;
|
|
1584
1543
|
note.chorusEffectsSend.gain
|
|
1585
|
-
.cancelScheduledValues(
|
|
1586
|
-
.setValueAtTime(value,
|
|
1544
|
+
.cancelScheduledValues(scheduleTime)
|
|
1545
|
+
.setValueAtTime(value, scheduleTime);
|
|
1587
1546
|
}
|
|
1588
1547
|
else {
|
|
1589
1548
|
note.chorusEffectsSend.disconnect();
|
|
@@ -1601,75 +1560,71 @@ export class Midy {
|
|
|
1601
1560
|
}
|
|
1602
1561
|
}
|
|
1603
1562
|
}
|
|
1604
|
-
setDelayModLFO(note) {
|
|
1605
|
-
const now = this.audioContext.currentTime;
|
|
1563
|
+
setDelayModLFO(note, scheduleTime) {
|
|
1606
1564
|
const startTime = note.startTime;
|
|
1607
|
-
if (startTime <
|
|
1565
|
+
if (startTime < scheduleTime)
|
|
1608
1566
|
return;
|
|
1609
|
-
note.modulationLFO.stop(
|
|
1567
|
+
note.modulationLFO.stop(scheduleTime);
|
|
1610
1568
|
note.modulationLFO.start(startTime + note.voiceParams.delayModLFO);
|
|
1611
1569
|
note.modulationLFO.connect(note.filterDepth);
|
|
1612
1570
|
}
|
|
1613
|
-
setFreqModLFO(note) {
|
|
1614
|
-
const now = this.audioContext.currentTime;
|
|
1571
|
+
setFreqModLFO(note, scheduleTime) {
|
|
1615
1572
|
const freqModLFO = note.voiceParams.freqModLFO;
|
|
1616
1573
|
note.modulationLFO.frequency
|
|
1617
|
-
.cancelScheduledValues(
|
|
1618
|
-
.setValueAtTime(freqModLFO,
|
|
1574
|
+
.cancelScheduledValues(scheduleTime)
|
|
1575
|
+
.setValueAtTime(freqModLFO, scheduleTime);
|
|
1619
1576
|
}
|
|
1620
|
-
setFreqVibLFO(channel, note) {
|
|
1621
|
-
const now = this.audioContext.currentTime;
|
|
1577
|
+
setFreqVibLFO(channel, note, scheduleTime) {
|
|
1622
1578
|
const freqVibLFO = note.voiceParams.freqVibLFO;
|
|
1623
1579
|
note.vibratoLFO.frequency
|
|
1624
|
-
.cancelScheduledValues(
|
|
1625
|
-
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2,
|
|
1580
|
+
.cancelScheduledValues(scheduleTime)
|
|
1581
|
+
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2, scheduleTime);
|
|
1626
1582
|
}
|
|
1627
1583
|
createVoiceParamsHandlers() {
|
|
1628
1584
|
return {
|
|
1629
|
-
modLfoToPitch: (channel, note, _prevValue) => {
|
|
1585
|
+
modLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1630
1586
|
if (0 < channel.state.modulationDepth) {
|
|
1631
|
-
this.setModLfoToPitch(channel, note);
|
|
1587
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1632
1588
|
}
|
|
1633
1589
|
},
|
|
1634
|
-
vibLfoToPitch: (channel, note, _prevValue) => {
|
|
1590
|
+
vibLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1635
1591
|
if (0 < channel.state.vibratoDepth) {
|
|
1636
|
-
this.setVibLfoToPitch(channel, note);
|
|
1592
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1637
1593
|
}
|
|
1638
1594
|
},
|
|
1639
|
-
modLfoToFilterFc: (channel, note, _prevValue) => {
|
|
1595
|
+
modLfoToFilterFc: (channel, note, _prevValue, scheduleTime) => {
|
|
1640
1596
|
if (0 < channel.state.modulationDepth) {
|
|
1641
|
-
this.setModLfoToFilterFc(channel, note);
|
|
1597
|
+
this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
1642
1598
|
}
|
|
1643
1599
|
},
|
|
1644
|
-
modLfoToVolume: (channel, note, _prevValue) => {
|
|
1600
|
+
modLfoToVolume: (channel, note, _prevValue, scheduleTime) => {
|
|
1645
1601
|
if (0 < channel.state.modulationDepth) {
|
|
1646
|
-
this.setModLfoToVolume(channel, note);
|
|
1602
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1647
1603
|
}
|
|
1648
1604
|
},
|
|
1649
|
-
chorusEffectsSend: (channel, note, prevValue) => {
|
|
1650
|
-
this.setChorusEffectsSend(channel, note, prevValue);
|
|
1605
|
+
chorusEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1606
|
+
this.setChorusEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1651
1607
|
},
|
|
1652
|
-
reverbEffectsSend: (channel, note, prevValue) => {
|
|
1653
|
-
this.setReverbEffectsSend(channel, note, prevValue);
|
|
1608
|
+
reverbEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1609
|
+
this.setReverbEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1654
1610
|
},
|
|
1655
|
-
delayModLFO: (_channel, note, _prevValue) => this.setDelayModLFO(note),
|
|
1656
|
-
freqModLFO: (_channel, note, _prevValue) => this.setFreqModLFO(note),
|
|
1657
|
-
delayVibLFO: (channel, note, prevValue) => {
|
|
1611
|
+
delayModLFO: (_channel, note, _prevValue, scheduleTime) => this.setDelayModLFO(note, scheduleTime),
|
|
1612
|
+
freqModLFO: (_channel, note, _prevValue, scheduleTime) => this.setFreqModLFO(note, scheduleTime),
|
|
1613
|
+
delayVibLFO: (channel, note, prevValue, scheduleTime) => {
|
|
1658
1614
|
if (0 < channel.state.vibratoDepth) {
|
|
1659
|
-
const now = this.audioContext.currentTime;
|
|
1660
1615
|
const vibratoDelay = channel.state.vibratoDelay * 2;
|
|
1661
1616
|
const prevStartTime = note.startTime + prevValue * vibratoDelay;
|
|
1662
|
-
if (
|
|
1617
|
+
if (scheduleTime < prevStartTime)
|
|
1663
1618
|
return;
|
|
1664
1619
|
const value = note.voiceParams.delayVibLFO;
|
|
1665
1620
|
const startTime = note.startTime + value * vibratoDelay;
|
|
1666
|
-
note.vibratoLFO.stop(
|
|
1621
|
+
note.vibratoLFO.stop(scheduleTime);
|
|
1667
1622
|
note.vibratoLFO.start(startTime);
|
|
1668
1623
|
}
|
|
1669
1624
|
},
|
|
1670
|
-
freqVibLFO: (channel, note, _prevValue) => {
|
|
1625
|
+
freqVibLFO: (channel, note, _prevValue, scheduleTime) => {
|
|
1671
1626
|
if (0 < channel.state.vibratoDepth) {
|
|
1672
|
-
this.setFreqVibLFO(channel, note);
|
|
1627
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1673
1628
|
}
|
|
1674
1629
|
},
|
|
1675
1630
|
};
|
|
@@ -1681,54 +1636,49 @@ export class Midy {
|
|
|
1681
1636
|
state[3] = noteNumber / 127;
|
|
1682
1637
|
return state;
|
|
1683
1638
|
}
|
|
1684
|
-
applyVoiceParams(channel, controllerType) {
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1639
|
+
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1640
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1641
|
+
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1642
|
+
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1643
|
+
let appliedFilterEnvelope = false;
|
|
1644
|
+
let appliedVolumeEnvelope = false;
|
|
1645
|
+
for (const [key, value] of Object.entries(voiceParams)) {
|
|
1646
|
+
const prevValue = note.voiceParams[key];
|
|
1647
|
+
if (value === prevValue)
|
|
1689
1648
|
continue;
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
if (value === prevValue)
|
|
1649
|
+
note.voiceParams[key] = value;
|
|
1650
|
+
if (key in this.voiceParamsHandlers) {
|
|
1651
|
+
this.voiceParamsHandlers[key](channel, note, prevValue, scheduleTime);
|
|
1652
|
+
}
|
|
1653
|
+
else if (filterEnvelopeKeySet.has(key)) {
|
|
1654
|
+
if (appliedFilterEnvelope)
|
|
1697
1655
|
continue;
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1656
|
+
appliedFilterEnvelope = true;
|
|
1657
|
+
const noteVoiceParams = note.voiceParams;
|
|
1658
|
+
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1659
|
+
const key = filterEnvelopeKeys[i];
|
|
1660
|
+
if (key in voiceParams)
|
|
1661
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1701
1662
|
}
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
continue;
|
|
1705
|
-
appliedFilterEnvelope = true;
|
|
1706
|
-
const noteVoiceParams = note.voiceParams;
|
|
1707
|
-
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1708
|
-
const key = filterEnvelopeKeys[i];
|
|
1709
|
-
if (key in voiceParams)
|
|
1710
|
-
noteVoiceParams[key] = voiceParams[key];
|
|
1711
|
-
}
|
|
1712
|
-
if (note.portamento) {
|
|
1713
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1714
|
-
}
|
|
1715
|
-
else {
|
|
1716
|
-
this.setFilterEnvelope(channel, note);
|
|
1717
|
-
}
|
|
1718
|
-
this.setPitchEnvelope(note);
|
|
1663
|
+
if (note.portamento) {
|
|
1664
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1719
1665
|
}
|
|
1720
|
-
else
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1666
|
+
else {
|
|
1667
|
+
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
1668
|
+
}
|
|
1669
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1670
|
+
}
|
|
1671
|
+
else if (volumeEnvelopeKeySet.has(key)) {
|
|
1672
|
+
if (appliedVolumeEnvelope)
|
|
1673
|
+
continue;
|
|
1674
|
+
appliedVolumeEnvelope = true;
|
|
1675
|
+
const noteVoiceParams = note.voiceParams;
|
|
1676
|
+
for (let i = 0; i < volumeEnvelopeKeys.length; i++) {
|
|
1677
|
+
const key = volumeEnvelopeKeys[i];
|
|
1678
|
+
if (key in voiceParams)
|
|
1679
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1731
1680
|
}
|
|
1681
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1732
1682
|
}
|
|
1733
1683
|
}
|
|
1734
1684
|
});
|
|
@@ -1771,12 +1721,12 @@ export class Midy {
|
|
|
1771
1721
|
127: this.polyOn,
|
|
1772
1722
|
};
|
|
1773
1723
|
}
|
|
1774
|
-
handleControlChange(channelNumber, controllerType, value,
|
|
1724
|
+
handleControlChange(channelNumber, controllerType, value, scheduleTime) {
|
|
1775
1725
|
const handler = this.controlChangeHandlers[controllerType];
|
|
1776
1726
|
if (handler) {
|
|
1777
|
-
handler.call(this, channelNumber, value,
|
|
1727
|
+
handler.call(this, channelNumber, value, scheduleTime);
|
|
1778
1728
|
const channel = this.channels[channelNumber];
|
|
1779
|
-
this.applyVoiceParams(channel, controllerType + 128);
|
|
1729
|
+
this.applyVoiceParams(channel, controllerType + 128, scheduleTime);
|
|
1780
1730
|
this.applyControlTable(channel, controllerType);
|
|
1781
1731
|
}
|
|
1782
1732
|
else {
|
|
@@ -1787,9 +1737,8 @@ export class Midy {
|
|
|
1787
1737
|
this.channels[channelNumber].bankMSB = msb;
|
|
1788
1738
|
}
|
|
1789
1739
|
updateModulation(channel, scheduleTime) {
|
|
1790
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1791
1740
|
const depth = channel.state.modulationDepth * channel.modulationDepthRange;
|
|
1792
|
-
this.processScheduledNotes(channel,
|
|
1741
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1793
1742
|
if (note.modulationDepth) {
|
|
1794
1743
|
note.modulationDepth.gain.setValueAtTime(depth, scheduleTime);
|
|
1795
1744
|
}
|
|
@@ -1800,6 +1749,7 @@ export class Midy {
|
|
|
1800
1749
|
});
|
|
1801
1750
|
}
|
|
1802
1751
|
setModulationDepth(channelNumber, modulation, scheduleTime) {
|
|
1752
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1803
1753
|
const channel = this.channels[channelNumber];
|
|
1804
1754
|
channel.state.modulationDepth = modulation / 127;
|
|
1805
1755
|
this.updateModulation(channel, scheduleTime);
|
|
@@ -1810,8 +1760,7 @@ export class Midy {
|
|
|
1810
1760
|
channel.state.portamentoTime = Math.exp(factor * portamentoTime);
|
|
1811
1761
|
}
|
|
1812
1762
|
setKeyBasedVolume(channel, scheduleTime) {
|
|
1813
|
-
|
|
1814
|
-
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1763
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1815
1764
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 7);
|
|
1816
1765
|
if (keyBasedValue !== 0) {
|
|
1817
1766
|
note.volumeNode.gain
|
|
@@ -1821,6 +1770,7 @@ export class Midy {
|
|
|
1821
1770
|
});
|
|
1822
1771
|
}
|
|
1823
1772
|
setVolume(channelNumber, volume, scheduleTime) {
|
|
1773
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1824
1774
|
const channel = this.channels[channelNumber];
|
|
1825
1775
|
channel.state.volume = volume / 127;
|
|
1826
1776
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1834,8 +1784,7 @@ export class Midy {
|
|
|
1834
1784
|
};
|
|
1835
1785
|
}
|
|
1836
1786
|
setKeyBasedPan(channel, scheduleTime) {
|
|
1837
|
-
|
|
1838
|
-
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1787
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1839
1788
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 10);
|
|
1840
1789
|
if (keyBasedValue !== 0) {
|
|
1841
1790
|
const { gainLeft, gainRight } = this.panToGain((keyBasedValue + 1) / 2);
|
|
@@ -1849,12 +1798,14 @@ export class Midy {
|
|
|
1849
1798
|
});
|
|
1850
1799
|
}
|
|
1851
1800
|
setPan(channelNumber, pan, scheduleTime) {
|
|
1801
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1852
1802
|
const channel = this.channels[channelNumber];
|
|
1853
1803
|
channel.state.pan = pan / 127;
|
|
1854
1804
|
this.updateChannelVolume(channel, scheduleTime);
|
|
1855
1805
|
this.setKeyBasedPan(channel, scheduleTime);
|
|
1856
1806
|
}
|
|
1857
1807
|
setExpression(channelNumber, expression, scheduleTime) {
|
|
1808
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1858
1809
|
const channel = this.channels[channelNumber];
|
|
1859
1810
|
channel.state.expression = expression / 127;
|
|
1860
1811
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1862,241 +1813,195 @@ export class Midy {
|
|
|
1862
1813
|
setBankLSB(channelNumber, lsb) {
|
|
1863
1814
|
this.channels[channelNumber].bankLSB = lsb;
|
|
1864
1815
|
}
|
|
1865
|
-
dataEntryLSB(channelNumber, value) {
|
|
1816
|
+
dataEntryLSB(channelNumber, value, scheduleTime) {
|
|
1866
1817
|
this.channels[channelNumber].dataLSB = value;
|
|
1867
|
-
this.handleRPN(channelNumber,
|
|
1818
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1868
1819
|
}
|
|
1869
|
-
updateChannelVolume(channel) {
|
|
1870
|
-
const now = this.audioContext.currentTime;
|
|
1820
|
+
updateChannelVolume(channel, scheduleTime) {
|
|
1871
1821
|
const state = channel.state;
|
|
1872
1822
|
const volume = state.volume * state.expression;
|
|
1873
1823
|
const { gainLeft, gainRight } = this.panToGain(state.pan);
|
|
1874
1824
|
channel.gainL.gain
|
|
1875
|
-
.cancelScheduledValues(
|
|
1876
|
-
.setValueAtTime(volume * gainLeft,
|
|
1825
|
+
.cancelScheduledValues(scheduleTime)
|
|
1826
|
+
.setValueAtTime(volume * gainLeft, scheduleTime);
|
|
1877
1827
|
channel.gainR.gain
|
|
1878
|
-
.cancelScheduledValues(
|
|
1879
|
-
.setValueAtTime(volume * gainRight,
|
|
1828
|
+
.cancelScheduledValues(scheduleTime)
|
|
1829
|
+
.setValueAtTime(volume * gainRight, scheduleTime);
|
|
1880
1830
|
}
|
|
1881
|
-
setSustainPedal(channelNumber, value) {
|
|
1882
|
-
this.
|
|
1883
|
-
|
|
1884
|
-
|
|
1831
|
+
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1832
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1833
|
+
const channel = this.channels[channelNumber];
|
|
1834
|
+
channel.state.sustainPedal = value / 127;
|
|
1835
|
+
if (64 <= value) {
|
|
1836
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1837
|
+
channel.sustainNotes.push(note);
|
|
1838
|
+
});
|
|
1839
|
+
}
|
|
1840
|
+
else {
|
|
1841
|
+
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
1885
1842
|
}
|
|
1886
1843
|
}
|
|
1887
1844
|
setPortamento(channelNumber, value) {
|
|
1888
1845
|
this.channels[channelNumber].state.portamento = value / 127;
|
|
1889
1846
|
}
|
|
1890
|
-
setSostenutoPedal(channelNumber, value) {
|
|
1847
|
+
setSostenutoPedal(channelNumber, value, scheduleTime) {
|
|
1848
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1891
1849
|
const channel = this.channels[channelNumber];
|
|
1892
1850
|
channel.state.sostenutoPedal = value / 127;
|
|
1893
1851
|
if (64 <= value) {
|
|
1894
|
-
|
|
1895
|
-
channel.sostenutoNotes = this.getActiveNotes(channel, now);
|
|
1852
|
+
channel.sostenutoNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1896
1853
|
}
|
|
1897
1854
|
else {
|
|
1898
|
-
this.releaseSostenutoPedal(channelNumber, value);
|
|
1855
|
+
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
1899
1856
|
}
|
|
1900
1857
|
}
|
|
1901
|
-
setSoftPedal(channelNumber, softPedal) {
|
|
1858
|
+
setSoftPedal(channelNumber, softPedal, _scheduleTime) {
|
|
1902
1859
|
const channel = this.channels[channelNumber];
|
|
1903
1860
|
channel.state.softPedal = softPedal / 127;
|
|
1904
1861
|
}
|
|
1905
|
-
setFilterResonance(channelNumber, filterResonance) {
|
|
1906
|
-
|
|
1862
|
+
setFilterResonance(channelNumber, filterResonance, scheduleTime) {
|
|
1863
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1907
1864
|
const channel = this.channels[channelNumber];
|
|
1908
1865
|
const state = channel.state;
|
|
1909
1866
|
state.filterResonance = filterResonance / 64;
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
if (!note)
|
|
1914
|
-
continue;
|
|
1915
|
-
const Q = note.voiceParams.initialFilterQ / 5 * state.filterResonance;
|
|
1916
|
-
note.filterNode.Q.setValueAtTime(Q, now);
|
|
1917
|
-
}
|
|
1867
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1868
|
+
const Q = note.voiceParams.initialFilterQ / 5 * state.filterResonance;
|
|
1869
|
+
note.filterNode.Q.setValueAtTime(Q, scheduleTime);
|
|
1918
1870
|
});
|
|
1919
1871
|
}
|
|
1920
|
-
setReleaseTime(channelNumber, releaseTime) {
|
|
1872
|
+
setReleaseTime(channelNumber, releaseTime, _scheduleTime) {
|
|
1873
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1921
1874
|
const channel = this.channels[channelNumber];
|
|
1922
1875
|
channel.state.releaseTime = releaseTime / 64;
|
|
1923
1876
|
}
|
|
1924
|
-
setAttackTime(channelNumber, attackTime) {
|
|
1925
|
-
|
|
1877
|
+
setAttackTime(channelNumber, attackTime, scheduleTime) {
|
|
1878
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1926
1879
|
const channel = this.channels[channelNumber];
|
|
1927
1880
|
channel.state.attackTime = attackTime / 64;
|
|
1928
|
-
|
|
1929
|
-
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
continue;
|
|
1933
|
-
if (note.startTime < now)
|
|
1934
|
-
continue;
|
|
1935
|
-
this.setVolumeEnvelope(channel, note);
|
|
1936
|
-
}
|
|
1881
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1882
|
+
if (note.startTime < scheduleTime)
|
|
1883
|
+
return false;
|
|
1884
|
+
this.setVolumeEnvelope(channel, note);
|
|
1937
1885
|
});
|
|
1938
1886
|
}
|
|
1939
|
-
setBrightness(channelNumber, brightness) {
|
|
1887
|
+
setBrightness(channelNumber, brightness, scheduleTime) {
|
|
1888
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1940
1889
|
const channel = this.channels[channelNumber];
|
|
1941
1890
|
channel.state.brightness = brightness / 64;
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1949
|
-
}
|
|
1950
|
-
else {
|
|
1951
|
-
this.setFilterEnvelope(channel, note);
|
|
1952
|
-
}
|
|
1891
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1892
|
+
if (note.portamento) {
|
|
1893
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1894
|
+
}
|
|
1895
|
+
else {
|
|
1896
|
+
this.setFilterEnvelope(channel, note);
|
|
1953
1897
|
}
|
|
1954
1898
|
});
|
|
1955
1899
|
}
|
|
1956
|
-
setDecayTime(channelNumber, dacayTime) {
|
|
1900
|
+
setDecayTime(channelNumber, dacayTime, scheduleTime) {
|
|
1901
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1957
1902
|
const channel = this.channels[channelNumber];
|
|
1958
1903
|
channel.state.decayTime = dacayTime / 64;
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
const note = noteList[i];
|
|
1962
|
-
if (!note)
|
|
1963
|
-
continue;
|
|
1964
|
-
this.setVolumeEnvelope(channel, note);
|
|
1965
|
-
}
|
|
1904
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1905
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1966
1906
|
});
|
|
1967
1907
|
}
|
|
1968
|
-
setVibratoRate(channelNumber, vibratoRate) {
|
|
1908
|
+
setVibratoRate(channelNumber, vibratoRate, scheduleTime) {
|
|
1909
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1969
1910
|
const channel = this.channels[channelNumber];
|
|
1970
1911
|
channel.state.vibratoRate = vibratoRate / 64;
|
|
1971
1912
|
if (channel.vibratoDepth <= 0)
|
|
1972
1913
|
return;
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
const note = noteList[i];
|
|
1976
|
-
if (!note)
|
|
1977
|
-
continue;
|
|
1978
|
-
this.setVibLfoToPitch(channel, note);
|
|
1979
|
-
}
|
|
1914
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1915
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1980
1916
|
});
|
|
1981
1917
|
}
|
|
1982
|
-
setVibratoDepth(channelNumber, vibratoDepth) {
|
|
1918
|
+
setVibratoDepth(channelNumber, vibratoDepth, scheduleTime) {
|
|
1919
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1983
1920
|
const channel = this.channels[channelNumber];
|
|
1984
1921
|
const prev = channel.state.vibratoDepth;
|
|
1985
1922
|
channel.state.vibratoDepth = vibratoDepth / 64;
|
|
1986
1923
|
if (0 < prev) {
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
const note = noteList[i];
|
|
1990
|
-
if (!note)
|
|
1991
|
-
continue;
|
|
1992
|
-
this.setFreqVibLFO(channel, note);
|
|
1993
|
-
}
|
|
1924
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1925
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1994
1926
|
});
|
|
1995
1927
|
}
|
|
1996
1928
|
else {
|
|
1997
|
-
|
|
1998
|
-
|
|
1999
|
-
const note = noteList[i];
|
|
2000
|
-
if (!note)
|
|
2001
|
-
continue;
|
|
2002
|
-
this.startVibrato(channel, note, note.startTime);
|
|
2003
|
-
}
|
|
1929
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1930
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2004
1931
|
});
|
|
2005
1932
|
}
|
|
2006
1933
|
}
|
|
2007
1934
|
setVibratoDelay(channelNumber, vibratoDelay) {
|
|
1935
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2008
1936
|
const channel = this.channels[channelNumber];
|
|
2009
1937
|
channel.state.vibratoDelay = vibratoDelay / 64;
|
|
2010
1938
|
if (0 < channel.state.vibratoDepth) {
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
const note = noteList[i];
|
|
2014
|
-
if (!note)
|
|
2015
|
-
continue;
|
|
2016
|
-
this.startVibrato(channel, note, note.startTime);
|
|
2017
|
-
}
|
|
1939
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1940
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2018
1941
|
});
|
|
2019
1942
|
}
|
|
2020
1943
|
}
|
|
2021
|
-
setReverbSendLevel(channelNumber, reverbSendLevel) {
|
|
1944
|
+
setReverbSendLevel(channelNumber, reverbSendLevel, scheduleTime) {
|
|
1945
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2022
1946
|
const channel = this.channels[channelNumber];
|
|
2023
1947
|
const state = channel.state;
|
|
2024
1948
|
const reverbEffect = this.reverbEffect;
|
|
2025
1949
|
if (0 < state.reverbSendLevel) {
|
|
2026
1950
|
if (0 < reverbSendLevel) {
|
|
2027
|
-
const now = this.audioContext.currentTime;
|
|
2028
1951
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2029
|
-
reverbEffect.input.gain
|
|
2030
|
-
|
|
1952
|
+
reverbEffect.input.gain
|
|
1953
|
+
.cancelScheduledValues(scheduleTime)
|
|
1954
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2031
1955
|
}
|
|
2032
1956
|
else {
|
|
2033
|
-
|
|
2034
|
-
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
continue;
|
|
2038
|
-
if (note.voiceParams.reverbEffectsSend <= 0)
|
|
2039
|
-
continue;
|
|
2040
|
-
note.reverbEffectsSend.disconnect();
|
|
2041
|
-
}
|
|
1957
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1958
|
+
if (note.voiceParams.reverbEffectsSend <= 0)
|
|
1959
|
+
return false;
|
|
1960
|
+
note.reverbEffectsSend.disconnect();
|
|
2042
1961
|
});
|
|
2043
1962
|
}
|
|
2044
1963
|
}
|
|
2045
1964
|
else {
|
|
2046
1965
|
if (0 < reverbSendLevel) {
|
|
2047
|
-
|
|
2048
|
-
|
|
2049
|
-
for (let i = 0; i < noteList.length; i++) {
|
|
2050
|
-
const note = noteList[i];
|
|
2051
|
-
if (!note)
|
|
2052
|
-
continue;
|
|
2053
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
2054
|
-
}
|
|
1966
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1967
|
+
this.setReverbEffectsSend(channel, note, 0, scheduleTime);
|
|
2055
1968
|
});
|
|
2056
1969
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2057
|
-
reverbEffect.input.gain
|
|
2058
|
-
|
|
1970
|
+
reverbEffect.input.gain
|
|
1971
|
+
.cancelScheduledValues(scheduleTime)
|
|
1972
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2059
1973
|
}
|
|
2060
1974
|
}
|
|
2061
1975
|
}
|
|
2062
|
-
setChorusSendLevel(channelNumber, chorusSendLevel) {
|
|
1976
|
+
setChorusSendLevel(channelNumber, chorusSendLevel, scheduleTime) {
|
|
1977
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2063
1978
|
const channel = this.channels[channelNumber];
|
|
2064
1979
|
const state = channel.state;
|
|
2065
1980
|
const chorusEffect = this.chorusEffect;
|
|
2066
1981
|
if (0 < state.chorusSendLevel) {
|
|
2067
1982
|
if (0 < chorusSendLevel) {
|
|
2068
|
-
const now = this.audioContext.currentTime;
|
|
2069
1983
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2070
|
-
chorusEffect.input.gain
|
|
2071
|
-
|
|
1984
|
+
chorusEffect.input.gain
|
|
1985
|
+
.cancelScheduledValues(scheduleTime)
|
|
1986
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2072
1987
|
}
|
|
2073
1988
|
else {
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
continue;
|
|
2079
|
-
if (note.voiceParams.chorusEffectsSend <= 0)
|
|
2080
|
-
continue;
|
|
2081
|
-
note.chorusEffectsSend.disconnect();
|
|
2082
|
-
}
|
|
1989
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1990
|
+
if (note.voiceParams.chorusEffectsSend <= 0)
|
|
1991
|
+
return false;
|
|
1992
|
+
note.chorusEffectsSend.disconnect();
|
|
2083
1993
|
});
|
|
2084
1994
|
}
|
|
2085
1995
|
}
|
|
2086
1996
|
else {
|
|
2087
1997
|
if (0 < chorusSendLevel) {
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
for (let i = 0; i < noteList.length; i++) {
|
|
2091
|
-
const note = noteList[i];
|
|
2092
|
-
if (!note)
|
|
2093
|
-
continue;
|
|
2094
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
2095
|
-
}
|
|
1998
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1999
|
+
this.setChorusEffectsSend(channel, note, 0, scheduleTime);
|
|
2096
2000
|
});
|
|
2097
2001
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2098
|
-
chorusEffect.input.gain
|
|
2099
|
-
|
|
2002
|
+
chorusEffect.input.gain
|
|
2003
|
+
.cancelScheduledValues(scheduleTime)
|
|
2004
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2100
2005
|
}
|
|
2101
2006
|
}
|
|
2102
2007
|
}
|
|
@@ -2126,25 +2031,25 @@ export class Midy {
|
|
|
2126
2031
|
channel.dataMSB = minMSB;
|
|
2127
2032
|
}
|
|
2128
2033
|
}
|
|
2129
|
-
handleRPN(channelNumber, value) {
|
|
2034
|
+
handleRPN(channelNumber, value, scheduleTime) {
|
|
2130
2035
|
const channel = this.channels[channelNumber];
|
|
2131
2036
|
const rpn = channel.rpnMSB * 128 + channel.rpnLSB;
|
|
2132
2037
|
switch (rpn) {
|
|
2133
2038
|
case 0:
|
|
2134
2039
|
channel.dataLSB += value;
|
|
2135
|
-
this.handlePitchBendRangeRPN(channelNumber);
|
|
2040
|
+
this.handlePitchBendRangeRPN(channelNumber, scheduleTime);
|
|
2136
2041
|
break;
|
|
2137
2042
|
case 1:
|
|
2138
2043
|
channel.dataLSB += value;
|
|
2139
|
-
this.handleFineTuningRPN(channelNumber);
|
|
2044
|
+
this.handleFineTuningRPN(channelNumber, scheduleTime);
|
|
2140
2045
|
break;
|
|
2141
2046
|
case 2:
|
|
2142
2047
|
channel.dataMSB += value;
|
|
2143
|
-
this.handleCoarseTuningRPN(channelNumber);
|
|
2048
|
+
this.handleCoarseTuningRPN(channelNumber, scheduleTime);
|
|
2144
2049
|
break;
|
|
2145
2050
|
case 5:
|
|
2146
2051
|
channel.dataLSB += value;
|
|
2147
|
-
this.handleModulationDepthRangeRPN(channelNumber);
|
|
2052
|
+
this.handleModulationDepthRangeRPN(channelNumber, scheduleTime);
|
|
2148
2053
|
break;
|
|
2149
2054
|
default:
|
|
2150
2055
|
console.warn(`Channel ${channelNumber}: Unsupported RPN MSB=${channel.rpnMSB} LSB=${channel.rpnLSB}`);
|
|
@@ -2164,67 +2069,72 @@ export class Midy {
|
|
|
2164
2069
|
setRPNLSB(channelNumber, value) {
|
|
2165
2070
|
this.channels[channelNumber].rpnLSB = value;
|
|
2166
2071
|
}
|
|
2167
|
-
dataEntryMSB(channelNumber, value) {
|
|
2072
|
+
dataEntryMSB(channelNumber, value, scheduleTime) {
|
|
2168
2073
|
this.channels[channelNumber].dataMSB = value;
|
|
2169
|
-
this.handleRPN(channelNumber,
|
|
2074
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
2170
2075
|
}
|
|
2171
|
-
handlePitchBendRangeRPN(channelNumber) {
|
|
2076
|
+
handlePitchBendRangeRPN(channelNumber, scheduleTime) {
|
|
2172
2077
|
const channel = this.channels[channelNumber];
|
|
2173
2078
|
this.limitData(channel, 0, 127, 0, 99);
|
|
2174
2079
|
const pitchBendRange = channel.dataMSB + channel.dataLSB / 100;
|
|
2175
|
-
this.setPitchBendRange(channelNumber, pitchBendRange);
|
|
2080
|
+
this.setPitchBendRange(channelNumber, pitchBendRange, scheduleTime);
|
|
2176
2081
|
}
|
|
2177
|
-
setPitchBendRange(channelNumber, value) {
|
|
2082
|
+
setPitchBendRange(channelNumber, value, scheduleTime) {
|
|
2083
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2178
2084
|
const channel = this.channels[channelNumber];
|
|
2179
2085
|
const state = channel.state;
|
|
2180
2086
|
const prev = state.pitchWheelSensitivity;
|
|
2181
2087
|
const next = value / 128;
|
|
2182
2088
|
state.pitchWheelSensitivity = next;
|
|
2183
2089
|
channel.detune += (state.pitchWheel * 2 - 1) * (next - prev) * 12800;
|
|
2184
|
-
this.updateChannelDetune(channel);
|
|
2185
|
-
this.applyVoiceParams(channel, 16);
|
|
2090
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2091
|
+
this.applyVoiceParams(channel, 16, scheduleTime);
|
|
2186
2092
|
}
|
|
2187
|
-
handleFineTuningRPN(channelNumber) {
|
|
2093
|
+
handleFineTuningRPN(channelNumber, scheduleTime) {
|
|
2188
2094
|
const channel = this.channels[channelNumber];
|
|
2189
2095
|
this.limitData(channel, 0, 127, 0, 127);
|
|
2190
2096
|
const fineTuning = channel.dataMSB * 128 + channel.dataLSB;
|
|
2191
|
-
this.setFineTuning(channelNumber, fineTuning);
|
|
2097
|
+
this.setFineTuning(channelNumber, fineTuning, scheduleTime);
|
|
2192
2098
|
}
|
|
2193
|
-
setFineTuning(channelNumber, value) {
|
|
2099
|
+
setFineTuning(channelNumber, value, scheduleTime) {
|
|
2100
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2194
2101
|
const channel = this.channels[channelNumber];
|
|
2195
2102
|
const prev = channel.fineTuning;
|
|
2196
2103
|
const next = (value - 8192) / 8.192; // cent
|
|
2197
2104
|
channel.fineTuning = next;
|
|
2198
2105
|
channel.detune += next - prev;
|
|
2199
|
-
this.updateChannelDetune(channel);
|
|
2106
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2200
2107
|
}
|
|
2201
|
-
handleCoarseTuningRPN(channelNumber) {
|
|
2108
|
+
handleCoarseTuningRPN(channelNumber, scheduleTime) {
|
|
2202
2109
|
const channel = this.channels[channelNumber];
|
|
2203
2110
|
this.limitDataMSB(channel, 0, 127);
|
|
2204
2111
|
const coarseTuning = channel.dataMSB;
|
|
2205
|
-
this.setCoarseTuning(channelNumber, coarseTuning);
|
|
2112
|
+
this.setCoarseTuning(channelNumber, coarseTuning, scheduleTime);
|
|
2206
2113
|
}
|
|
2207
|
-
setCoarseTuning(channelNumber, value) {
|
|
2114
|
+
setCoarseTuning(channelNumber, value, scheduleTime) {
|
|
2115
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2208
2116
|
const channel = this.channels[channelNumber];
|
|
2209
2117
|
const prev = channel.coarseTuning;
|
|
2210
2118
|
const next = (value - 64) * 100; // cent
|
|
2211
2119
|
channel.coarseTuning = next;
|
|
2212
2120
|
channel.detune += next - prev;
|
|
2213
|
-
this.updateChannelDetune(channel);
|
|
2121
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2214
2122
|
}
|
|
2215
|
-
handleModulationDepthRangeRPN(channelNumber) {
|
|
2123
|
+
handleModulationDepthRangeRPN(channelNumber, scheduleTime) {
|
|
2216
2124
|
const channel = this.channels[channelNumber];
|
|
2217
2125
|
this.limitData(channel, 0, 127, 0, 127);
|
|
2218
2126
|
const modulationDepthRange = (dataMSB + dataLSB / 128) * 100;
|
|
2219
|
-
this.setModulationDepthRange(channelNumber, modulationDepthRange);
|
|
2127
|
+
this.setModulationDepthRange(channelNumber, modulationDepthRange, scheduleTime);
|
|
2220
2128
|
}
|
|
2221
|
-
setModulationDepthRange(channelNumber, modulationDepthRange) {
|
|
2129
|
+
setModulationDepthRange(channelNumber, modulationDepthRange, scheduleTime) {
|
|
2130
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2222
2131
|
const channel = this.channels[channelNumber];
|
|
2223
2132
|
channel.modulationDepthRange = modulationDepthRange;
|
|
2224
|
-
this.updateModulation(channel);
|
|
2133
|
+
this.updateModulation(channel, scheduleTime);
|
|
2225
2134
|
}
|
|
2226
|
-
allSoundOff(channelNumber) {
|
|
2227
|
-
|
|
2135
|
+
allSoundOff(channelNumber, _value, scheduleTime) {
|
|
2136
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2137
|
+
return this.stopChannelNotes(channelNumber, 0, true, scheduleTime);
|
|
2228
2138
|
}
|
|
2229
2139
|
resetAllControllers(channelNumber) {
|
|
2230
2140
|
const stateTypes = [
|
|
@@ -2252,31 +2162,36 @@ export class Midy {
|
|
|
2252
2162
|
channel[type] = this.constructor.channelSettings[type];
|
|
2253
2163
|
}
|
|
2254
2164
|
}
|
|
2255
|
-
allNotesOff(channelNumber) {
|
|
2256
|
-
|
|
2165
|
+
allNotesOff(channelNumber, _value, scheduleTime) {
|
|
2166
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2167
|
+
return this.stopChannelNotes(channelNumber, 0, false, scheduleTime);
|
|
2257
2168
|
}
|
|
2258
|
-
omniOff() {
|
|
2259
|
-
this.
|
|
2169
|
+
omniOff(channelNumber, value, scheduleTime) {
|
|
2170
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2260
2171
|
}
|
|
2261
|
-
omniOn() {
|
|
2262
|
-
this.
|
|
2172
|
+
omniOn(channelNumber, value, scheduleTime) {
|
|
2173
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2263
2174
|
}
|
|
2264
|
-
monoOn() {
|
|
2265
|
-
|
|
2175
|
+
monoOn(channelNumber, value, scheduleTime) {
|
|
2176
|
+
const channel = this.channels[channelNumber];
|
|
2177
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2178
|
+
channel.mono = true;
|
|
2266
2179
|
}
|
|
2267
|
-
polyOn() {
|
|
2268
|
-
|
|
2180
|
+
polyOn(channelNumber, value, scheduleTime) {
|
|
2181
|
+
const channel = this.channels[channelNumber];
|
|
2182
|
+
this.allNotesOff(channelNumber, value, scheduleTime);
|
|
2183
|
+
channel.mono = false;
|
|
2269
2184
|
}
|
|
2270
|
-
handleUniversalNonRealTimeExclusiveMessage(data) {
|
|
2185
|
+
handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2271
2186
|
switch (data[2]) {
|
|
2272
2187
|
case 8:
|
|
2273
2188
|
switch (data[3]) {
|
|
2274
2189
|
case 8:
|
|
2275
2190
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2276
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false);
|
|
2191
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false, scheduleTime);
|
|
2277
2192
|
case 9:
|
|
2278
2193
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2279
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false);
|
|
2194
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false, scheduleTime);
|
|
2280
2195
|
default:
|
|
2281
2196
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2282
2197
|
}
|
|
@@ -2319,18 +2234,18 @@ export class Midy {
|
|
|
2319
2234
|
this.channels[9].bankMSB = 120;
|
|
2320
2235
|
this.channels[9].bank = 120 * 128;
|
|
2321
2236
|
}
|
|
2322
|
-
handleUniversalRealTimeExclusiveMessage(data) {
|
|
2237
|
+
handleUniversalRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2323
2238
|
switch (data[2]) {
|
|
2324
2239
|
case 4:
|
|
2325
2240
|
switch (data[3]) {
|
|
2326
2241
|
case 1:
|
|
2327
|
-
return this.handleMasterVolumeSysEx(data);
|
|
2242
|
+
return this.handleMasterVolumeSysEx(data, scheduleTime);
|
|
2328
2243
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2329
|
-
return this.handleMasterFineTuningSysEx(data);
|
|
2244
|
+
return this.handleMasterFineTuningSysEx(data, scheduleTime);
|
|
2330
2245
|
case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2331
|
-
return this.handleMasterCoarseTuningSysEx(data);
|
|
2246
|
+
return this.handleMasterCoarseTuningSysEx(data, scheduleTime);
|
|
2332
2247
|
case 5: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca24.pdf
|
|
2333
|
-
return this.handleGlobalParameterControlSysEx(data);
|
|
2248
|
+
return this.handleGlobalParameterControlSysEx(data, scheduleTime);
|
|
2334
2249
|
default:
|
|
2335
2250
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2336
2251
|
}
|
|
@@ -2338,10 +2253,10 @@ export class Midy {
|
|
|
2338
2253
|
case 8:
|
|
2339
2254
|
switch (data[3]) {
|
|
2340
2255
|
case 8: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2341
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true);
|
|
2256
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true, scheduleTime);
|
|
2342
2257
|
case 9:
|
|
2343
2258
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2344
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true);
|
|
2259
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true, scheduleTime);
|
|
2345
2260
|
default:
|
|
2346
2261
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2347
2262
|
}
|
|
@@ -2361,7 +2276,7 @@ export class Midy {
|
|
|
2361
2276
|
case 10:
|
|
2362
2277
|
switch (data[3]) {
|
|
2363
2278
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca23.pdf
|
|
2364
|
-
return this.handleKeyBasedInstrumentControlSysEx(data);
|
|
2279
|
+
return this.handleKeyBasedInstrumentControlSysEx(data, scheduleTime);
|
|
2365
2280
|
default:
|
|
2366
2281
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2367
2282
|
}
|
|
@@ -2370,49 +2285,50 @@ export class Midy {
|
|
|
2370
2285
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2371
2286
|
}
|
|
2372
2287
|
}
|
|
2373
|
-
handleMasterVolumeSysEx(data) {
|
|
2288
|
+
handleMasterVolumeSysEx(data, scheduleTime) {
|
|
2374
2289
|
const volume = (data[5] * 128 + data[4]) / 16383;
|
|
2375
|
-
this.setMasterVolume(volume);
|
|
2290
|
+
this.setMasterVolume(volume, scheduleTime);
|
|
2376
2291
|
}
|
|
2377
|
-
setMasterVolume(volume) {
|
|
2292
|
+
setMasterVolume(volume, scheduleTime) {
|
|
2293
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2378
2294
|
if (volume < 0 && 1 < volume) {
|
|
2379
2295
|
console.error("Master Volume is out of range");
|
|
2380
2296
|
}
|
|
2381
2297
|
else {
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2298
|
+
this.masterVolume.gain
|
|
2299
|
+
.cancelScheduledValues(scheduleTime)
|
|
2300
|
+
.setValueAtTime(volume * volume, scheduleTime);
|
|
2385
2301
|
}
|
|
2386
2302
|
}
|
|
2387
|
-
handleMasterFineTuningSysEx(data) {
|
|
2303
|
+
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2388
2304
|
const fineTuning = data[5] * 128 + data[4];
|
|
2389
|
-
this.setMasterFineTuning(fineTuning);
|
|
2305
|
+
this.setMasterFineTuning(fineTuning, scheduleTime);
|
|
2390
2306
|
}
|
|
2391
|
-
setMasterFineTuning(value) {
|
|
2307
|
+
setMasterFineTuning(value, scheduleTime) {
|
|
2392
2308
|
const prev = this.masterFineTuning;
|
|
2393
2309
|
const next = (value - 8192) / 8.192; // cent
|
|
2394
2310
|
this.masterFineTuning = next;
|
|
2395
2311
|
channel.detune += next - prev;
|
|
2396
|
-
this.updateChannelDetune(channel);
|
|
2312
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2397
2313
|
}
|
|
2398
|
-
handleMasterCoarseTuningSysEx(data) {
|
|
2314
|
+
handleMasterCoarseTuningSysEx(data, scheduleTime) {
|
|
2399
2315
|
const coarseTuning = data[4];
|
|
2400
|
-
this.setMasterCoarseTuning(coarseTuning);
|
|
2316
|
+
this.setMasterCoarseTuning(coarseTuning, scheduleTime);
|
|
2401
2317
|
}
|
|
2402
|
-
setMasterCoarseTuning(value) {
|
|
2318
|
+
setMasterCoarseTuning(value, scheduleTime) {
|
|
2403
2319
|
const prev = this.masterCoarseTuning;
|
|
2404
2320
|
const next = (value - 64) * 100; // cent
|
|
2405
2321
|
this.masterCoarseTuning = next;
|
|
2406
2322
|
channel.detune += next - prev;
|
|
2407
|
-
this.updateChannelDetune(channel);
|
|
2323
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2408
2324
|
}
|
|
2409
|
-
handleGlobalParameterControlSysEx(data) {
|
|
2325
|
+
handleGlobalParameterControlSysEx(data, scheduleTime) {
|
|
2410
2326
|
if (data[7] === 1) {
|
|
2411
2327
|
switch (data[8]) {
|
|
2412
2328
|
case 1:
|
|
2413
2329
|
return this.handleReverbParameterSysEx(data);
|
|
2414
2330
|
case 2:
|
|
2415
|
-
return this.handleChorusParameterSysEx(data);
|
|
2331
|
+
return this.handleChorusParameterSysEx(data, scheduleTime);
|
|
2416
2332
|
default:
|
|
2417
2333
|
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
2418
2334
|
}
|
|
@@ -2491,88 +2407,84 @@ export class Midy {
|
|
|
2491
2407
|
calcDelay(rt60, feedback) {
|
|
2492
2408
|
return -rt60 * Math.log10(feedback) / 3;
|
|
2493
2409
|
}
|
|
2494
|
-
handleChorusParameterSysEx(data) {
|
|
2410
|
+
handleChorusParameterSysEx(data, scheduleTime) {
|
|
2495
2411
|
switch (data[9]) {
|
|
2496
2412
|
case 0:
|
|
2497
|
-
return this.setChorusType(data[10]);
|
|
2413
|
+
return this.setChorusType(data[10], scheduleTime);
|
|
2498
2414
|
case 1:
|
|
2499
|
-
return this.setChorusModRate(data[10]);
|
|
2415
|
+
return this.setChorusModRate(data[10], scheduleTime);
|
|
2500
2416
|
case 2:
|
|
2501
|
-
return this.setChorusModDepth(data[10]);
|
|
2417
|
+
return this.setChorusModDepth(data[10], scheduleTime);
|
|
2502
2418
|
case 3:
|
|
2503
|
-
return this.setChorusFeedback(data[10]);
|
|
2419
|
+
return this.setChorusFeedback(data[10], scheduleTime);
|
|
2504
2420
|
case 4:
|
|
2505
|
-
return this.setChorusSendToReverb(data[10]);
|
|
2421
|
+
return this.setChorusSendToReverb(data[10], scheduleTime);
|
|
2506
2422
|
}
|
|
2507
2423
|
}
|
|
2508
|
-
setChorusType(type) {
|
|
2424
|
+
setChorusType(type, scheduleTime) {
|
|
2509
2425
|
switch (type) {
|
|
2510
2426
|
case 0:
|
|
2511
|
-
return this.setChorusParameter(3, 5, 0, 0);
|
|
2427
|
+
return this.setChorusParameter(3, 5, 0, 0, scheduleTime);
|
|
2512
2428
|
case 1:
|
|
2513
|
-
return this.setChorusParameter(9, 19, 5, 0);
|
|
2429
|
+
return this.setChorusParameter(9, 19, 5, 0, scheduleTime);
|
|
2514
2430
|
case 2:
|
|
2515
|
-
return this.setChorusParameter(3, 19, 8, 0);
|
|
2431
|
+
return this.setChorusParameter(3, 19, 8, 0, scheduleTime);
|
|
2516
2432
|
case 3:
|
|
2517
|
-
return this.setChorusParameter(9, 16, 16, 0);
|
|
2433
|
+
return this.setChorusParameter(9, 16, 16, 0, scheduleTime);
|
|
2518
2434
|
case 4:
|
|
2519
|
-
return this.setChorusParameter(2, 24, 64, 0);
|
|
2435
|
+
return this.setChorusParameter(2, 24, 64, 0, scheduleTime);
|
|
2520
2436
|
case 5:
|
|
2521
|
-
return this.setChorusParameter(1, 5, 112, 0);
|
|
2437
|
+
return this.setChorusParameter(1, 5, 112, 0, scheduleTime);
|
|
2522
2438
|
default:
|
|
2523
2439
|
console.warn(`Unsupported Chorus Type: ${type}`);
|
|
2524
2440
|
}
|
|
2525
2441
|
}
|
|
2526
|
-
setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
|
|
2527
|
-
this.setChorusModRate(modRate);
|
|
2528
|
-
this.setChorusModDepth(modDepth);
|
|
2529
|
-
this.setChorusFeedback(feedback);
|
|
2530
|
-
this.setChorusSendToReverb(sendToReverb);
|
|
2442
|
+
setChorusParameter(modRate, modDepth, feedback, sendToReverb, scheduleTime) {
|
|
2443
|
+
this.setChorusModRate(modRate, scheduleTime);
|
|
2444
|
+
this.setChorusModDepth(modDepth, scheduleTime);
|
|
2445
|
+
this.setChorusFeedback(feedback, scheduleTime);
|
|
2446
|
+
this.setChorusSendToReverb(sendToReverb, scheduleTime);
|
|
2531
2447
|
}
|
|
2532
|
-
setChorusModRate(value) {
|
|
2533
|
-
const now = this.audioContext.currentTime;
|
|
2448
|
+
setChorusModRate(value, scheduleTime) {
|
|
2534
2449
|
const modRate = this.getChorusModRate(value);
|
|
2535
2450
|
this.chorus.modRate = modRate;
|
|
2536
|
-
this.chorusEffect.lfo.frequency.setValueAtTime(modRate,
|
|
2451
|
+
this.chorusEffect.lfo.frequency.setValueAtTime(modRate, scheduleTime);
|
|
2537
2452
|
}
|
|
2538
2453
|
getChorusModRate(value) {
|
|
2539
2454
|
return value * 0.122; // Hz
|
|
2540
2455
|
}
|
|
2541
|
-
setChorusModDepth(value) {
|
|
2542
|
-
const now = this.audioContext.currentTime;
|
|
2456
|
+
setChorusModDepth(value, scheduleTime) {
|
|
2543
2457
|
const modDepth = this.getChorusModDepth(value);
|
|
2544
2458
|
this.chorus.modDepth = modDepth;
|
|
2545
2459
|
this.chorusEffect.lfoGain.gain
|
|
2546
|
-
.cancelScheduledValues(
|
|
2547
|
-
.setValueAtTime(modDepth / 2,
|
|
2460
|
+
.cancelScheduledValues(scheduleTime)
|
|
2461
|
+
.setValueAtTime(modDepth / 2, scheduleTime);
|
|
2548
2462
|
}
|
|
2549
2463
|
getChorusModDepth(value) {
|
|
2550
2464
|
return (value + 1) / 3200; // second
|
|
2551
2465
|
}
|
|
2552
|
-
setChorusFeedback(value) {
|
|
2553
|
-
const now = this.audioContext.currentTime;
|
|
2466
|
+
setChorusFeedback(value, scheduleTime) {
|
|
2554
2467
|
const feedback = this.getChorusFeedback(value);
|
|
2555
2468
|
this.chorus.feedback = feedback;
|
|
2556
2469
|
const chorusEffect = this.chorusEffect;
|
|
2557
2470
|
for (let i = 0; i < chorusEffect.feedbackGains.length; i++) {
|
|
2558
2471
|
chorusEffect.feedbackGains[i].gain
|
|
2559
|
-
.cancelScheduledValues(
|
|
2560
|
-
.setValueAtTime(feedback,
|
|
2472
|
+
.cancelScheduledValues(scheduleTime)
|
|
2473
|
+
.setValueAtTime(feedback, scheduleTime);
|
|
2561
2474
|
}
|
|
2562
2475
|
}
|
|
2563
2476
|
getChorusFeedback(value) {
|
|
2564
2477
|
return value * 0.00763;
|
|
2565
2478
|
}
|
|
2566
|
-
setChorusSendToReverb(value) {
|
|
2479
|
+
setChorusSendToReverb(value, scheduleTime) {
|
|
2567
2480
|
const sendToReverb = this.getChorusSendToReverb(value);
|
|
2568
2481
|
const sendGain = this.chorusEffect.sendGain;
|
|
2569
2482
|
if (0 < this.chorus.sendToReverb) {
|
|
2570
2483
|
this.chorus.sendToReverb = sendToReverb;
|
|
2571
2484
|
if (0 < sendToReverb) {
|
|
2572
|
-
const now = this.audioContext.currentTime;
|
|
2573
2485
|
sendGain.gain
|
|
2574
|
-
.cancelScheduledValues(
|
|
2575
|
-
.setValueAtTime(sendToReverb,
|
|
2486
|
+
.cancelScheduledValues(scheduleTime)
|
|
2487
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2576
2488
|
}
|
|
2577
2489
|
else {
|
|
2578
2490
|
sendGain.disconnect();
|
|
@@ -2581,11 +2493,10 @@ export class Midy {
|
|
|
2581
2493
|
else {
|
|
2582
2494
|
this.chorus.sendToReverb = sendToReverb;
|
|
2583
2495
|
if (0 < sendToReverb) {
|
|
2584
|
-
const now = this.audioContext.currentTime;
|
|
2585
2496
|
sendGain.connect(this.reverbEffect.input);
|
|
2586
2497
|
sendGain.gain
|
|
2587
|
-
.cancelScheduledValues(
|
|
2588
|
-
.setValueAtTime(sendToReverb,
|
|
2498
|
+
.cancelScheduledValues(scheduleTime)
|
|
2499
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2589
2500
|
}
|
|
2590
2501
|
}
|
|
2591
2502
|
}
|
|
@@ -2611,7 +2522,7 @@ export class Midy {
|
|
|
2611
2522
|
}
|
|
2612
2523
|
return bitmap;
|
|
2613
2524
|
}
|
|
2614
|
-
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime) {
|
|
2525
|
+
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2615
2526
|
if (data.length < 19) {
|
|
2616
2527
|
console.error("Data length is too short");
|
|
2617
2528
|
return;
|
|
@@ -2626,10 +2537,10 @@ export class Midy {
|
|
|
2626
2537
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2627
2538
|
}
|
|
2628
2539
|
if (realtime)
|
|
2629
|
-
this.updateChannelDetune(channel);
|
|
2540
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2630
2541
|
}
|
|
2631
2542
|
}
|
|
2632
|
-
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime) {
|
|
2543
|
+
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2633
2544
|
if (data.length < 31) {
|
|
2634
2545
|
console.error("Data length is too short");
|
|
2635
2546
|
return;
|
|
@@ -2648,7 +2559,7 @@ export class Midy {
|
|
|
2648
2559
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2649
2560
|
}
|
|
2650
2561
|
if (realtime)
|
|
2651
|
-
this.updateChannelDetune(channel);
|
|
2562
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2652
2563
|
}
|
|
2653
2564
|
}
|
|
2654
2565
|
getPitchControl(channel, note) {
|
|
@@ -2707,7 +2618,7 @@ export class Midy {
|
|
|
2707
2618
|
if (table[5] !== 0)
|
|
2708
2619
|
this.setModLfoToVolume(channel, note);
|
|
2709
2620
|
}
|
|
2710
|
-
|
|
2621
|
+
handlePressureSysEx(data, tableName) {
|
|
2711
2622
|
const channelNumber = data[4];
|
|
2712
2623
|
const table = this.channels[channelNumber][tableName];
|
|
2713
2624
|
for (let i = 5; i < data.length - 1; i += 2) {
|
|
@@ -2731,13 +2642,8 @@ export class Midy {
|
|
|
2731
2642
|
const slotSize = 6;
|
|
2732
2643
|
const offset = controllerType * slotSize;
|
|
2733
2644
|
const table = channel.controlTable.subarray(offset, offset + slotSize);
|
|
2734
|
-
|
|
2735
|
-
|
|
2736
|
-
const note = noteList[i];
|
|
2737
|
-
if (!note)
|
|
2738
|
-
continue;
|
|
2739
|
-
this.setControllerParameters(channel, note, table);
|
|
2740
|
-
}
|
|
2645
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2646
|
+
this.setControllerParameters(channel, note, table);
|
|
2741
2647
|
});
|
|
2742
2648
|
}
|
|
2743
2649
|
handleControlChangeSysEx(data) {
|
|
@@ -2755,7 +2661,7 @@ export class Midy {
|
|
|
2755
2661
|
const controlValue = channel.keyBasedInstrumentControlTable[index];
|
|
2756
2662
|
return (controlValue + 64) / 64;
|
|
2757
2663
|
}
|
|
2758
|
-
handleKeyBasedInstrumentControlSysEx(data) {
|
|
2664
|
+
handleKeyBasedInstrumentControlSysEx(data, scheduleTime) {
|
|
2759
2665
|
const channelNumber = data[4];
|
|
2760
2666
|
const keyNumber = data[5];
|
|
2761
2667
|
const table = this.channels[channelNumber].keyBasedInstrumentControlTable;
|
|
@@ -2765,30 +2671,27 @@ export class Midy {
|
|
|
2765
2671
|
const index = keyNumber * 128 + controllerType;
|
|
2766
2672
|
table[index] = value - 64;
|
|
2767
2673
|
}
|
|
2768
|
-
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127);
|
|
2769
|
-
}
|
|
2770
|
-
handleExclusiveMessage(data) {
|
|
2771
|
-
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2674
|
+
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127, scheduleTime);
|
|
2772
2675
|
}
|
|
2773
|
-
handleSysEx(data) {
|
|
2676
|
+
handleSysEx(data, scheduleTime) {
|
|
2774
2677
|
switch (data[0]) {
|
|
2775
2678
|
case 126:
|
|
2776
|
-
return this.handleUniversalNonRealTimeExclusiveMessage(data);
|
|
2679
|
+
return this.handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime);
|
|
2777
2680
|
case 127:
|
|
2778
|
-
return this.handleUniversalRealTimeExclusiveMessage(data);
|
|
2681
|
+
return this.handleUniversalRealTimeExclusiveMessage(data, scheduleTime);
|
|
2779
2682
|
default:
|
|
2780
|
-
|
|
2683
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2781
2684
|
}
|
|
2782
2685
|
}
|
|
2783
|
-
scheduleTask(callback,
|
|
2686
|
+
scheduleTask(callback, scheduleTime) {
|
|
2784
2687
|
return new Promise((resolve) => {
|
|
2785
2688
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
2786
2689
|
bufferSource.onended = () => {
|
|
2787
2690
|
callback();
|
|
2788
2691
|
resolve();
|
|
2789
2692
|
};
|
|
2790
|
-
bufferSource.start(
|
|
2791
|
-
bufferSource.stop(
|
|
2693
|
+
bufferSource.start(scheduleTime);
|
|
2694
|
+
bufferSource.stop(scheduleTime);
|
|
2792
2695
|
});
|
|
2793
2696
|
}
|
|
2794
2697
|
}
|
|
@@ -2807,6 +2710,7 @@ Object.defineProperty(Midy, "channelSettings", {
|
|
|
2807
2710
|
dataLSB: 0,
|
|
2808
2711
|
rpnMSB: 127,
|
|
2809
2712
|
rpnLSB: 127,
|
|
2713
|
+
mono: false, // CC#124, CC#125
|
|
2810
2714
|
fineTuning: 0, // cb
|
|
2811
2715
|
coarseTuning: 0, // cb
|
|
2812
2716
|
modulationDepthRange: 50, // cent
|