@marmooo/midy 0.2.5 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +17 -12
- package/esm/midy-GM1.d.ts +85 -87
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +188 -237
- package/esm/midy-GM2.d.ts +129 -126
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +374 -390
- package/esm/midy-GMLite.d.ts +85 -84
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +191 -190
- package/esm/midy.d.ts +153 -150
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +420 -442
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +85 -87
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +188 -237
- package/script/midy-GM2.d.ts +129 -126
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +374 -390
- package/script/midy-GMLite.d.ts +85 -84
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +191 -190
- package/script/midy.d.ts +153 -150
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +420 -442
package/script/midy.js
CHANGED
|
@@ -69,31 +69,37 @@ class Note {
|
|
|
69
69
|
writable: true,
|
|
70
70
|
value: void 0
|
|
71
71
|
});
|
|
72
|
+
Object.defineProperty(this, "filterDepth", {
|
|
73
|
+
enumerable: true,
|
|
74
|
+
configurable: true,
|
|
75
|
+
writable: true,
|
|
76
|
+
value: void 0
|
|
77
|
+
});
|
|
72
78
|
Object.defineProperty(this, "volumeEnvelopeNode", {
|
|
73
79
|
enumerable: true,
|
|
74
80
|
configurable: true,
|
|
75
81
|
writable: true,
|
|
76
82
|
value: void 0
|
|
77
83
|
});
|
|
78
|
-
Object.defineProperty(this, "
|
|
84
|
+
Object.defineProperty(this, "volumeDepth", {
|
|
79
85
|
enumerable: true,
|
|
80
86
|
configurable: true,
|
|
81
87
|
writable: true,
|
|
82
88
|
value: void 0
|
|
83
89
|
});
|
|
84
|
-
Object.defineProperty(this, "
|
|
90
|
+
Object.defineProperty(this, "volumeNode", {
|
|
85
91
|
enumerable: true,
|
|
86
92
|
configurable: true,
|
|
87
93
|
writable: true,
|
|
88
94
|
value: void 0
|
|
89
95
|
});
|
|
90
|
-
Object.defineProperty(this, "
|
|
96
|
+
Object.defineProperty(this, "gainL", {
|
|
91
97
|
enumerable: true,
|
|
92
98
|
configurable: true,
|
|
93
99
|
writable: true,
|
|
94
100
|
value: void 0
|
|
95
101
|
});
|
|
96
|
-
Object.defineProperty(this, "
|
|
102
|
+
Object.defineProperty(this, "gainR", {
|
|
97
103
|
enumerable: true,
|
|
98
104
|
configurable: true,
|
|
99
105
|
writable: true,
|
|
@@ -502,6 +508,10 @@ class Midy {
|
|
|
502
508
|
...this.setChannelAudioNodes(audioContext),
|
|
503
509
|
scheduledNotes: new SparseMap(128),
|
|
504
510
|
sostenutoNotes: new SparseMap(128),
|
|
511
|
+
scaleOctaveTuningTable: new Float32Array(12), // [-100, 100] cent
|
|
512
|
+
channelPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
513
|
+
polyphonicKeyPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
514
|
+
keyBasedInstrumentControlTable: new Int8Array(128 * 128), // [-64, 63]
|
|
505
515
|
};
|
|
506
516
|
});
|
|
507
517
|
return channels;
|
|
@@ -568,10 +578,11 @@ class Midy {
|
|
|
568
578
|
const event = this.timeline[queueIndex];
|
|
569
579
|
if (event.startTime > t + this.lookAhead)
|
|
570
580
|
break;
|
|
581
|
+
const startTime = event.startTime + this.startDelay - offset;
|
|
571
582
|
switch (event.type) {
|
|
572
583
|
case "noteOn":
|
|
573
584
|
if (event.velocity !== 0) {
|
|
574
|
-
await this.scheduleNoteOn(event.channel, event.noteNumber, event.velocity,
|
|
585
|
+
await this.scheduleNoteOn(event.channel, event.noteNumber, event.velocity, startTime, event.portamento);
|
|
575
586
|
break;
|
|
576
587
|
}
|
|
577
588
|
/* falls through */
|
|
@@ -579,29 +590,30 @@ class Midy {
|
|
|
579
590
|
const portamentoTarget = this.findPortamentoTarget(queueIndex);
|
|
580
591
|
if (portamentoTarget)
|
|
581
592
|
portamentoTarget.portamento = true;
|
|
582
|
-
const notePromise = this.
|
|
593
|
+
const notePromise = this.scheduleNoteOff(this.omni ? 0 : event.channel, event.noteNumber, event.velocity, startTime, false, // force
|
|
594
|
+
portamentoTarget?.noteNumber);
|
|
583
595
|
if (notePromise) {
|
|
584
596
|
this.notePromises.push(notePromise);
|
|
585
597
|
}
|
|
586
598
|
break;
|
|
587
599
|
}
|
|
588
600
|
case "noteAftertouch":
|
|
589
|
-
this.handlePolyphonicKeyPressure(event.channel, event.noteNumber, event.amount);
|
|
601
|
+
this.handlePolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
|
|
590
602
|
break;
|
|
591
603
|
case "controller":
|
|
592
|
-
this.handleControlChange(this.omni ? 0 : event.channel, event.controllerType, event.value);
|
|
604
|
+
this.handleControlChange(this.omni ? 0 : event.channel, event.controllerType, event.value, startTime);
|
|
593
605
|
break;
|
|
594
606
|
case "programChange":
|
|
595
|
-
this.handleProgramChange(event.channel, event.programNumber);
|
|
607
|
+
this.handleProgramChange(event.channel, event.programNumber, startTime);
|
|
596
608
|
break;
|
|
597
609
|
case "channelAftertouch":
|
|
598
|
-
this.handleChannelPressure(event.channel, event.amount);
|
|
610
|
+
this.handleChannelPressure(event.channel, event.amount, startTime);
|
|
599
611
|
break;
|
|
600
612
|
case "pitchBend":
|
|
601
|
-
this.setPitchBend(event.channel, event.value + 8192);
|
|
613
|
+
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
602
614
|
break;
|
|
603
615
|
case "sysEx":
|
|
604
|
-
this.handleSysEx(event.data);
|
|
616
|
+
this.handleSysEx(event.data, startTime);
|
|
605
617
|
}
|
|
606
618
|
queueIndex++;
|
|
607
619
|
}
|
|
@@ -632,10 +644,11 @@ class Midy {
|
|
|
632
644
|
resolve();
|
|
633
645
|
return;
|
|
634
646
|
}
|
|
635
|
-
const
|
|
647
|
+
const now = this.audioContext.currentTime;
|
|
648
|
+
const t = now + offset;
|
|
636
649
|
queueIndex = await this.scheduleTimelineEvents(t, offset, queueIndex);
|
|
637
650
|
if (this.isPausing) {
|
|
638
|
-
await this.stopNotes(0, true);
|
|
651
|
+
await this.stopNotes(0, true, now);
|
|
639
652
|
this.notePromises = [];
|
|
640
653
|
resolve();
|
|
641
654
|
this.isPausing = false;
|
|
@@ -643,7 +656,7 @@ class Midy {
|
|
|
643
656
|
return;
|
|
644
657
|
}
|
|
645
658
|
else if (this.isStopping) {
|
|
646
|
-
await this.stopNotes(0, true);
|
|
659
|
+
await this.stopNotes(0, true, now);
|
|
647
660
|
this.notePromises = [];
|
|
648
661
|
this.exclusiveClassMap.clear();
|
|
649
662
|
this.audioBufferCache.clear();
|
|
@@ -653,7 +666,7 @@ class Midy {
|
|
|
653
666
|
return;
|
|
654
667
|
}
|
|
655
668
|
else if (this.isSeeking) {
|
|
656
|
-
this.stopNotes(0, true);
|
|
669
|
+
this.stopNotes(0, true, now);
|
|
657
670
|
this.exclusiveClassMap.clear();
|
|
658
671
|
this.startTime = this.audioContext.currentTime;
|
|
659
672
|
queueIndex = this.getQueueIndex(this.resumeTime);
|
|
@@ -662,7 +675,6 @@ class Midy {
|
|
|
662
675
|
await schedulePlayback();
|
|
663
676
|
}
|
|
664
677
|
else {
|
|
665
|
-
const now = this.audioContext.currentTime;
|
|
666
678
|
const waitTime = now + this.noteCheckInterval;
|
|
667
679
|
await this.scheduleTask(() => { }, waitTime);
|
|
668
680
|
await schedulePlayback();
|
|
@@ -782,25 +794,26 @@ class Midy {
|
|
|
782
794
|
}
|
|
783
795
|
return { instruments, timeline };
|
|
784
796
|
}
|
|
785
|
-
|
|
786
|
-
const now = this.audioContext.currentTime;
|
|
797
|
+
stopChannelNotes(channelNumber, velocity, force, scheduleTime) {
|
|
787
798
|
const channel = this.channels[channelNumber];
|
|
799
|
+
const promises = [];
|
|
788
800
|
channel.scheduledNotes.forEach((noteList) => {
|
|
789
801
|
for (let i = 0; i < noteList.length; i++) {
|
|
790
802
|
const note = noteList[i];
|
|
791
803
|
if (!note)
|
|
792
804
|
continue;
|
|
793
|
-
const promise = this.
|
|
794
|
-
force);
|
|
805
|
+
const promise = this.scheduleNoteOff(channelNumber, note.noteNumber, velocity, scheduleTime, force, undefined);
|
|
795
806
|
this.notePromises.push(promise);
|
|
807
|
+
promises.push(promise);
|
|
796
808
|
}
|
|
797
809
|
});
|
|
798
810
|
channel.scheduledNotes.clear();
|
|
799
|
-
|
|
811
|
+
return Promise.all(promises);
|
|
800
812
|
}
|
|
801
|
-
stopNotes(velocity, force) {
|
|
813
|
+
stopNotes(velocity, force, scheduleTime) {
|
|
814
|
+
const promises = [];
|
|
802
815
|
for (let i = 0; i < this.channels.length; i++) {
|
|
803
|
-
this.stopChannelNotes(i, velocity, force);
|
|
816
|
+
promises.push(this.stopChannelNotes(i, velocity, force, scheduleTime));
|
|
804
817
|
}
|
|
805
818
|
return Promise.all(this.notePromises);
|
|
806
819
|
}
|
|
@@ -848,22 +861,34 @@ class Midy {
|
|
|
848
861
|
const now = this.audioContext.currentTime;
|
|
849
862
|
return this.resumeTime + now - this.startTime - this.startDelay;
|
|
850
863
|
}
|
|
851
|
-
|
|
864
|
+
processScheduledNotes(channel, scheduleTime, callback) {
|
|
865
|
+
channel.scheduledNotes.forEach((noteList) => {
|
|
866
|
+
for (let i = 0; i < noteList.length; i++) {
|
|
867
|
+
const note = noteList[i];
|
|
868
|
+
if (!note)
|
|
869
|
+
continue;
|
|
870
|
+
if (scheduleTime < note.startTime)
|
|
871
|
+
continue;
|
|
872
|
+
callback(note);
|
|
873
|
+
}
|
|
874
|
+
});
|
|
875
|
+
}
|
|
876
|
+
getActiveNotes(channel, scheduleTime) {
|
|
852
877
|
const activeNotes = new SparseMap(128);
|
|
853
878
|
channel.scheduledNotes.forEach((noteList) => {
|
|
854
|
-
const activeNote = this.getActiveNote(noteList,
|
|
879
|
+
const activeNote = this.getActiveNote(noteList, scheduleTime);
|
|
855
880
|
if (activeNote) {
|
|
856
881
|
activeNotes.set(activeNote.noteNumber, activeNote);
|
|
857
882
|
}
|
|
858
883
|
});
|
|
859
884
|
return activeNotes;
|
|
860
885
|
}
|
|
861
|
-
getActiveNote(noteList,
|
|
886
|
+
getActiveNote(noteList, scheduleTime) {
|
|
862
887
|
for (let i = noteList.length - 1; i >= 0; i--) {
|
|
863
888
|
const note = noteList[i];
|
|
864
889
|
if (!note)
|
|
865
890
|
return;
|
|
866
|
-
if (
|
|
891
|
+
if (scheduleTime < note.startTime)
|
|
867
892
|
continue;
|
|
868
893
|
return (note.ending) ? null : note;
|
|
869
894
|
}
|
|
@@ -1023,74 +1048,66 @@ class Midy {
|
|
|
1023
1048
|
calcNoteDetune(channel, note) {
|
|
1024
1049
|
return channel.scaleOctaveTuningTable[note.noteNumber % 12];
|
|
1025
1050
|
}
|
|
1026
|
-
updateChannelDetune(channel) {
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
const note = noteList[i];
|
|
1030
|
-
if (!note)
|
|
1031
|
-
continue;
|
|
1032
|
-
this.updateDetune(channel, note, 0);
|
|
1033
|
-
}
|
|
1051
|
+
updateChannelDetune(channel, scheduleTime) {
|
|
1052
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1053
|
+
this.updateDetune(channel, note, scheduleTime);
|
|
1034
1054
|
});
|
|
1035
1055
|
}
|
|
1036
|
-
updateDetune(channel, note,
|
|
1037
|
-
const now = this.audioContext.currentTime;
|
|
1056
|
+
updateDetune(channel, note, scheduleTime) {
|
|
1038
1057
|
const noteDetune = this.calcNoteDetune(channel, note);
|
|
1039
|
-
const
|
|
1058
|
+
const pitchControl = this.getPitchControl(channel, note);
|
|
1059
|
+
const detune = channel.detune + noteDetune + pitchControl;
|
|
1040
1060
|
note.bufferSource.detune
|
|
1041
|
-
.cancelScheduledValues(
|
|
1042
|
-
.setValueAtTime(detune,
|
|
1061
|
+
.cancelScheduledValues(scheduleTime)
|
|
1062
|
+
.setValueAtTime(detune, scheduleTime);
|
|
1043
1063
|
}
|
|
1044
1064
|
getPortamentoTime(channel) {
|
|
1045
1065
|
const factor = 5 * Math.log(10) / 127;
|
|
1046
1066
|
const time = channel.state.portamentoTime;
|
|
1047
1067
|
return Math.log(time) / factor;
|
|
1048
1068
|
}
|
|
1049
|
-
setPortamentoStartVolumeEnvelope(channel, note) {
|
|
1050
|
-
const now = this.audioContext.currentTime;
|
|
1069
|
+
setPortamentoStartVolumeEnvelope(channel, note, scheduleTime) {
|
|
1051
1070
|
const { voiceParams, startTime } = note;
|
|
1052
1071
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation);
|
|
1053
1072
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1054
1073
|
const volDelay = startTime + voiceParams.volDelay;
|
|
1055
1074
|
const portamentoTime = volDelay + this.getPortamentoTime(channel);
|
|
1056
1075
|
note.volumeEnvelopeNode.gain
|
|
1057
|
-
.cancelScheduledValues(
|
|
1076
|
+
.cancelScheduledValues(scheduleTime)
|
|
1058
1077
|
.setValueAtTime(0, volDelay)
|
|
1059
1078
|
.linearRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1060
1079
|
}
|
|
1061
|
-
setVolumeEnvelope(channel, note,
|
|
1062
|
-
const now = this.audioContext.currentTime;
|
|
1080
|
+
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
1063
1081
|
const state = channel.state;
|
|
1064
1082
|
const { voiceParams, startTime } = note;
|
|
1065
1083
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation) *
|
|
1066
|
-
(1 +
|
|
1084
|
+
(1 + this.getAmplitudeControl(channel, note));
|
|
1067
1085
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1068
1086
|
const volDelay = startTime + voiceParams.volDelay;
|
|
1069
1087
|
const volAttack = volDelay + voiceParams.volAttack * state.attackTime * 2;
|
|
1070
1088
|
const volHold = volAttack + voiceParams.volHold;
|
|
1071
1089
|
const volDecay = volHold + voiceParams.volDecay * state.decayTime * 2;
|
|
1072
1090
|
note.volumeEnvelopeNode.gain
|
|
1073
|
-
.cancelScheduledValues(
|
|
1091
|
+
.cancelScheduledValues(scheduleTime)
|
|
1074
1092
|
.setValueAtTime(0, startTime)
|
|
1075
1093
|
.setValueAtTime(1e-6, volDelay) // exponentialRampToValueAtTime() requires a non-zero value
|
|
1076
1094
|
.exponentialRampToValueAtTime(attackVolume, volAttack)
|
|
1077
1095
|
.setValueAtTime(attackVolume, volHold)
|
|
1078
1096
|
.linearRampToValueAtTime(sustainVolume, volDecay);
|
|
1079
1097
|
}
|
|
1080
|
-
setPitchEnvelope(note) {
|
|
1081
|
-
const now = this.audioContext.currentTime;
|
|
1098
|
+
setPitchEnvelope(note, scheduleTime) {
|
|
1082
1099
|
const { voiceParams } = note;
|
|
1083
1100
|
const baseRate = voiceParams.playbackRate;
|
|
1084
1101
|
note.bufferSource.playbackRate
|
|
1085
|
-
.cancelScheduledValues(
|
|
1086
|
-
.setValueAtTime(baseRate,
|
|
1102
|
+
.cancelScheduledValues(scheduleTime)
|
|
1103
|
+
.setValueAtTime(baseRate, scheduleTime);
|
|
1087
1104
|
const modEnvToPitch = voiceParams.modEnvToPitch;
|
|
1088
1105
|
if (modEnvToPitch === 0)
|
|
1089
1106
|
return;
|
|
1090
1107
|
const basePitch = this.rateToCent(baseRate);
|
|
1091
1108
|
const peekPitch = basePitch + modEnvToPitch;
|
|
1092
1109
|
const peekRate = this.centToRate(peekPitch);
|
|
1093
|
-
const modDelay = startTime + voiceParams.modDelay;
|
|
1110
|
+
const modDelay = note.startTime + voiceParams.modDelay;
|
|
1094
1111
|
const modAttack = modDelay + voiceParams.modAttack;
|
|
1095
1112
|
const modHold = modAttack + voiceParams.modHold;
|
|
1096
1113
|
const modDecay = modHold + voiceParams.modDecay;
|
|
@@ -1105,8 +1122,7 @@ class Midy {
|
|
|
1105
1122
|
const maxFrequency = 20000; // max Hz of initialFilterFc
|
|
1106
1123
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
1107
1124
|
}
|
|
1108
|
-
setPortamentoStartFilterEnvelope(channel, note) {
|
|
1109
|
-
const now = this.audioContext.currentTime;
|
|
1125
|
+
setPortamentoStartFilterEnvelope(channel, note, scheduleTime) {
|
|
1110
1126
|
const state = channel.state;
|
|
1111
1127
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1112
1128
|
const softPedalFactor = 1 -
|
|
@@ -1122,18 +1138,18 @@ class Midy {
|
|
|
1122
1138
|
const portamentoTime = startTime + this.getPortamentoTime(channel);
|
|
1123
1139
|
const modDelay = startTime + voiceParams.modDelay;
|
|
1124
1140
|
note.filterNode.frequency
|
|
1125
|
-
.cancelScheduledValues(
|
|
1141
|
+
.cancelScheduledValues(scheduleTime)
|
|
1126
1142
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1127
1143
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1128
1144
|
.linearRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1129
1145
|
}
|
|
1130
|
-
setFilterEnvelope(channel, note,
|
|
1131
|
-
const now = this.audioContext.currentTime;
|
|
1146
|
+
setFilterEnvelope(channel, note, scheduleTime) {
|
|
1132
1147
|
const state = channel.state;
|
|
1133
1148
|
const { voiceParams, noteNumber, startTime } = note;
|
|
1134
1149
|
const softPedalFactor = 1 -
|
|
1135
1150
|
(0.1 + (noteNumber / 127) * 0.2) * state.softPedal;
|
|
1136
|
-
const baseCent = voiceParams.initialFilterFc +
|
|
1151
|
+
const baseCent = voiceParams.initialFilterFc +
|
|
1152
|
+
this.getFilterCutoffControl(channel, note);
|
|
1137
1153
|
const baseFreq = this.centToHz(baseCent) * softPedalFactor *
|
|
1138
1154
|
state.brightness * 2;
|
|
1139
1155
|
const peekFreq = this.centToHz(baseCent + voiceParams.modEnvToFilterFc) *
|
|
@@ -1148,14 +1164,14 @@ class Midy {
|
|
|
1148
1164
|
const modHold = modAttack + voiceParams.modHold;
|
|
1149
1165
|
const modDecay = modHold + voiceParams.modDecay;
|
|
1150
1166
|
note.filterNode.frequency
|
|
1151
|
-
.cancelScheduledValues(
|
|
1167
|
+
.cancelScheduledValues(scheduleTime)
|
|
1152
1168
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
1153
1169
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
1154
1170
|
.exponentialRampToValueAtTime(adjustedPeekFreq, modAttack)
|
|
1155
1171
|
.setValueAtTime(adjustedPeekFreq, modHold)
|
|
1156
1172
|
.linearRampToValueAtTime(adjustedSustainFreq, modDecay);
|
|
1157
1173
|
}
|
|
1158
|
-
startModulation(channel, note,
|
|
1174
|
+
startModulation(channel, note, scheduleTime) {
|
|
1159
1175
|
const { voiceParams } = note;
|
|
1160
1176
|
note.modulationLFO = new OscillatorNode(this.audioContext, {
|
|
1161
1177
|
frequency: this.centToHz(voiceParams.freqModLFO),
|
|
@@ -1164,10 +1180,10 @@ class Midy {
|
|
|
1164
1180
|
gain: voiceParams.modLfoToFilterFc,
|
|
1165
1181
|
});
|
|
1166
1182
|
note.modulationDepth = new GainNode(this.audioContext);
|
|
1167
|
-
this.setModLfoToPitch(channel, note,
|
|
1183
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1168
1184
|
note.volumeDepth = new GainNode(this.audioContext);
|
|
1169
|
-
this.setModLfoToVolume(note,
|
|
1170
|
-
note.modulationLFO.start(startTime + voiceParams.delayModLFO);
|
|
1185
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1186
|
+
note.modulationLFO.start(note.startTime + voiceParams.delayModLFO);
|
|
1171
1187
|
note.modulationLFO.connect(note.filterDepth);
|
|
1172
1188
|
note.filterDepth.connect(note.filterNode.frequency);
|
|
1173
1189
|
note.modulationLFO.connect(note.modulationDepth);
|
|
@@ -1175,15 +1191,15 @@ class Midy {
|
|
|
1175
1191
|
note.modulationLFO.connect(note.volumeDepth);
|
|
1176
1192
|
note.volumeDepth.connect(note.volumeEnvelopeNode.gain);
|
|
1177
1193
|
}
|
|
1178
|
-
startVibrato(channel, note,
|
|
1194
|
+
startVibrato(channel, note, scheduleTime) {
|
|
1179
1195
|
const { voiceParams } = note;
|
|
1180
1196
|
const state = channel.state;
|
|
1181
1197
|
note.vibratoLFO = new OscillatorNode(this.audioContext, {
|
|
1182
1198
|
frequency: this.centToHz(voiceParams.freqVibLFO) * state.vibratoRate * 2,
|
|
1183
1199
|
});
|
|
1184
|
-
note.vibratoLFO.start(startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1200
|
+
note.vibratoLFO.start(note.startTime + voiceParams.delayVibLFO * state.vibratoDelay * 2);
|
|
1185
1201
|
note.vibratoDepth = new GainNode(this.audioContext);
|
|
1186
|
-
this.setVibLfoToPitch(channel, note);
|
|
1202
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1187
1203
|
note.vibratoLFO.connect(note.vibratoDepth);
|
|
1188
1204
|
note.vibratoDepth.connect(note.bufferSource.detune);
|
|
1189
1205
|
}
|
|
@@ -1206,6 +1222,7 @@ class Midy {
|
|
|
1206
1222
|
}
|
|
1207
1223
|
}
|
|
1208
1224
|
async createNote(channel, voice, noteNumber, velocity, startTime, portamento, isSF3) {
|
|
1225
|
+
const now = this.audioContext.currentTime;
|
|
1209
1226
|
const state = channel.state;
|
|
1210
1227
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1211
1228
|
const voiceParams = voice.getAllParams(controllerState);
|
|
@@ -1222,20 +1239,20 @@ class Midy {
|
|
|
1222
1239
|
});
|
|
1223
1240
|
if (portamento) {
|
|
1224
1241
|
note.portamento = true;
|
|
1225
|
-
this.setPortamentoStartVolumeEnvelope(channel, note);
|
|
1226
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1242
|
+
this.setPortamentoStartVolumeEnvelope(channel, note, now);
|
|
1243
|
+
this.setPortamentoStartFilterEnvelope(channel, note, now);
|
|
1227
1244
|
}
|
|
1228
1245
|
else {
|
|
1229
1246
|
note.portamento = false;
|
|
1230
|
-
this.setVolumeEnvelope(channel, note,
|
|
1231
|
-
this.setFilterEnvelope(channel, note,
|
|
1247
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
1248
|
+
this.setFilterEnvelope(channel, note, now);
|
|
1232
1249
|
}
|
|
1233
1250
|
if (0 < state.vibratoDepth) {
|
|
1234
|
-
this.startVibrato(channel, note,
|
|
1251
|
+
this.startVibrato(channel, note, now);
|
|
1235
1252
|
}
|
|
1236
|
-
this.setPitchEnvelope(note);
|
|
1253
|
+
this.setPitchEnvelope(note, now);
|
|
1237
1254
|
if (0 < state.modulationDepth) {
|
|
1238
|
-
this.startModulation(channel, note,
|
|
1255
|
+
this.startModulation(channel, note, now);
|
|
1239
1256
|
}
|
|
1240
1257
|
if (this.mono && channel.currentBufferSource) {
|
|
1241
1258
|
channel.currentBufferSource.stop(startTime);
|
|
@@ -1247,10 +1264,10 @@ class Midy {
|
|
|
1247
1264
|
note.volumeNode.connect(note.gainL);
|
|
1248
1265
|
note.volumeNode.connect(note.gainR);
|
|
1249
1266
|
if (0 < channel.chorusSendLevel) {
|
|
1250
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
1267
|
+
this.setChorusEffectsSend(channel, note, 0, now);
|
|
1251
1268
|
}
|
|
1252
1269
|
if (0 < channel.reverbSendLevel) {
|
|
1253
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
1270
|
+
this.setReverbEffectsSend(channel, note, 0, now);
|
|
1254
1271
|
}
|
|
1255
1272
|
note.bufferSource.start(startTime);
|
|
1256
1273
|
return note;
|
|
@@ -1287,9 +1304,9 @@ class Midy {
|
|
|
1287
1304
|
const prevEntry = this.exclusiveClassMap.get(exclusiveClass);
|
|
1288
1305
|
const [prevNote, prevChannelNumber] = prevEntry;
|
|
1289
1306
|
if (!prevNote.ending) {
|
|
1290
|
-
this.
|
|
1291
|
-
startTime,
|
|
1292
|
-
|
|
1307
|
+
this.scheduleNoteOff(prevChannelNumber, prevNote.noteNumber, 0, // velocity,
|
|
1308
|
+
startTime, true, // force
|
|
1309
|
+
undefined);
|
|
1293
1310
|
}
|
|
1294
1311
|
}
|
|
1295
1312
|
this.exclusiveClassMap.set(exclusiveClass, [note, channelNumber]);
|
|
@@ -1302,9 +1319,9 @@ class Midy {
|
|
|
1302
1319
|
scheduledNotes.set(noteNumber, [note]);
|
|
1303
1320
|
}
|
|
1304
1321
|
}
|
|
1305
|
-
noteOn(channelNumber, noteNumber, velocity,
|
|
1306
|
-
|
|
1307
|
-
return this.scheduleNoteOn(channelNumber, noteNumber, velocity,
|
|
1322
|
+
noteOn(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1323
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1324
|
+
return this.scheduleNoteOn(channelNumber, noteNumber, velocity, scheduleTime, false);
|
|
1308
1325
|
}
|
|
1309
1326
|
stopNote(endTime, stopTime, scheduledNotes, index) {
|
|
1310
1327
|
const note = scheduledNotes[index];
|
|
@@ -1344,7 +1361,7 @@ class Midy {
|
|
|
1344
1361
|
note.bufferSource.stop(stopTime);
|
|
1345
1362
|
});
|
|
1346
1363
|
}
|
|
1347
|
-
|
|
1364
|
+
scheduleNoteOff(channelNumber, noteNumber, _velocity, endTime, force, portamentoNoteNumber) {
|
|
1348
1365
|
const channel = this.channels[channelNumber];
|
|
1349
1366
|
const state = channel.state;
|
|
1350
1367
|
if (!force) {
|
|
@@ -1384,24 +1401,19 @@ class Midy {
|
|
|
1384
1401
|
}
|
|
1385
1402
|
}
|
|
1386
1403
|
}
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
return this.
|
|
1404
|
+
noteOff(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
1405
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1406
|
+
return this.scheduleNoteOff(channelNumber, noteNumber, velocity, scheduleTime, false, // force
|
|
1407
|
+
undefined);
|
|
1390
1408
|
}
|
|
1391
|
-
releaseSustainPedal(channelNumber, halfVelocity) {
|
|
1409
|
+
releaseSustainPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
1392
1410
|
const velocity = halfVelocity * 2;
|
|
1393
1411
|
const channel = this.channels[channelNumber];
|
|
1394
1412
|
const promises = [];
|
|
1395
|
-
channel
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
if (!note)
|
|
1400
|
-
continue;
|
|
1401
|
-
const { noteNumber } = note;
|
|
1402
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
1403
|
-
promises.push(promise);
|
|
1404
|
-
}
|
|
1413
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1414
|
+
const { noteNumber } = note;
|
|
1415
|
+
const promise = this.noteOff(channelNumber, noteNumber, velocity);
|
|
1416
|
+
promises.push(promise);
|
|
1405
1417
|
});
|
|
1406
1418
|
return promises;
|
|
1407
1419
|
}
|
|
@@ -1412,53 +1424,51 @@ class Midy {
|
|
|
1412
1424
|
channel.state.sostenutoPedal = 0;
|
|
1413
1425
|
channel.sostenutoNotes.forEach((activeNote) => {
|
|
1414
1426
|
const { noteNumber } = activeNote;
|
|
1415
|
-
const promise = this.
|
|
1427
|
+
const promise = this.noteOff(channelNumber, noteNumber, velocity);
|
|
1416
1428
|
promises.push(promise);
|
|
1417
1429
|
});
|
|
1418
1430
|
channel.sostenutoNotes.clear();
|
|
1419
1431
|
return promises;
|
|
1420
1432
|
}
|
|
1421
|
-
handleMIDIMessage(statusByte, data1, data2) {
|
|
1433
|
+
handleMIDIMessage(statusByte, data1, data2, scheduleTime) {
|
|
1422
1434
|
const channelNumber = omni ? 0 : statusByte & 0x0F;
|
|
1423
1435
|
const messageType = statusByte & 0xF0;
|
|
1424
1436
|
switch (messageType) {
|
|
1425
1437
|
case 0x80:
|
|
1426
|
-
return this.
|
|
1438
|
+
return this.noteOff(channelNumber, data1, data2, scheduleTime);
|
|
1427
1439
|
case 0x90:
|
|
1428
|
-
return this.noteOn(channelNumber, data1, data2);
|
|
1440
|
+
return this.noteOn(channelNumber, data1, data2, scheduleTime);
|
|
1429
1441
|
case 0xA0:
|
|
1430
|
-
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2);
|
|
1442
|
+
return this.handlePolyphonicKeyPressure(channelNumber, data1, data2, scheduleTime);
|
|
1431
1443
|
case 0xB0:
|
|
1432
|
-
return this.handleControlChange(channelNumber, data1, data2);
|
|
1444
|
+
return this.handleControlChange(channelNumber, data1, data2, scheduleTime);
|
|
1433
1445
|
case 0xC0:
|
|
1434
|
-
return this.handleProgramChange(channelNumber, data1);
|
|
1446
|
+
return this.handleProgramChange(channelNumber, data1, scheduleTime);
|
|
1435
1447
|
case 0xD0:
|
|
1436
|
-
return this.handleChannelPressure(channelNumber, data1);
|
|
1448
|
+
return this.handleChannelPressure(channelNumber, data1, scheduleTime);
|
|
1437
1449
|
case 0xE0:
|
|
1438
|
-
return this.handlePitchBendMessage(channelNumber, data1, data2);
|
|
1450
|
+
return this.handlePitchBendMessage(channelNumber, data1, data2, scheduleTime);
|
|
1439
1451
|
default:
|
|
1440
1452
|
console.warn(`Unsupported MIDI message: ${messageType.toString(16)}`);
|
|
1441
1453
|
}
|
|
1442
1454
|
}
|
|
1443
|
-
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure) {
|
|
1444
|
-
const now = this.audioContext.currentTime;
|
|
1455
|
+
handlePolyphonicKeyPressure(channelNumber, noteNumber, pressure, scheduleTime) {
|
|
1445
1456
|
const channel = this.channels[channelNumber];
|
|
1446
1457
|
channel.state.polyphonicKeyPressure = pressure / 127;
|
|
1447
1458
|
const table = channel.polyphonicKeyPressureTable;
|
|
1448
|
-
const activeNotes = this.getActiveNotes(channel,
|
|
1459
|
+
const activeNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1449
1460
|
if (activeNotes.has(noteNumber)) {
|
|
1450
1461
|
const note = activeNotes.get(noteNumber);
|
|
1451
|
-
this.
|
|
1462
|
+
this.setControllerParameters(channel, note, table);
|
|
1452
1463
|
}
|
|
1453
1464
|
// this.applyVoiceParams(channel, 10);
|
|
1454
1465
|
}
|
|
1455
|
-
handleProgramChange(channelNumber, program) {
|
|
1466
|
+
handleProgramChange(channelNumber, program, _scheduleTime) {
|
|
1456
1467
|
const channel = this.channels[channelNumber];
|
|
1457
1468
|
channel.bank = channel.bankMSB * 128 + channel.bankLSB;
|
|
1458
1469
|
channel.program = program;
|
|
1459
1470
|
}
|
|
1460
|
-
handleChannelPressure(channelNumber, value) {
|
|
1461
|
-
const now = this.audioContext.currentTime;
|
|
1471
|
+
handleChannelPressure(channelNumber, value, scheduleTime) {
|
|
1462
1472
|
const channel = this.channels[channelNumber];
|
|
1463
1473
|
const prev = channel.state.channelPressure;
|
|
1464
1474
|
const next = value / 127;
|
|
@@ -1468,69 +1478,68 @@ class Midy {
|
|
|
1468
1478
|
channel.detune += pressureDepth * (next - prev);
|
|
1469
1479
|
}
|
|
1470
1480
|
const table = channel.channelPressureTable;
|
|
1471
|
-
this.getActiveNotes(channel,
|
|
1472
|
-
this.
|
|
1481
|
+
this.getActiveNotes(channel, scheduleTime).forEach((note) => {
|
|
1482
|
+
this.setControllerParameters(channel, note, table);
|
|
1473
1483
|
});
|
|
1474
1484
|
// this.applyVoiceParams(channel, 13);
|
|
1475
1485
|
}
|
|
1476
|
-
handlePitchBendMessage(channelNumber, lsb, msb) {
|
|
1486
|
+
handlePitchBendMessage(channelNumber, lsb, msb, scheduleTime) {
|
|
1477
1487
|
const pitchBend = msb * 128 + lsb;
|
|
1478
|
-
this.setPitchBend(channelNumber, pitchBend);
|
|
1488
|
+
this.setPitchBend(channelNumber, pitchBend, scheduleTime);
|
|
1479
1489
|
}
|
|
1480
|
-
setPitchBend(channelNumber, value) {
|
|
1490
|
+
setPitchBend(channelNumber, value, scheduleTime) {
|
|
1491
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1481
1492
|
const channel = this.channels[channelNumber];
|
|
1482
1493
|
const state = channel.state;
|
|
1483
1494
|
const prev = state.pitchWheel * 2 - 1;
|
|
1484
1495
|
const next = (value - 8192) / 8192;
|
|
1485
1496
|
state.pitchWheel = value / 16383;
|
|
1486
1497
|
channel.detune += (next - prev) * state.pitchWheelSensitivity * 12800;
|
|
1487
|
-
this.updateChannelDetune(channel);
|
|
1488
|
-
this.applyVoiceParams(channel, 14);
|
|
1498
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1499
|
+
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
1489
1500
|
}
|
|
1490
|
-
setModLfoToPitch(channel, note,
|
|
1491
|
-
const
|
|
1492
|
-
|
|
1501
|
+
setModLfoToPitch(channel, note, scheduleTime) {
|
|
1502
|
+
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
1503
|
+
this.getLFOPitchDepth(channel, note);
|
|
1493
1504
|
const baseDepth = Math.abs(modLfoToPitch) + channel.state.modulationDepth;
|
|
1494
1505
|
const modulationDepth = baseDepth * Math.sign(modLfoToPitch);
|
|
1495
1506
|
note.modulationDepth.gain
|
|
1496
|
-
.cancelScheduledValues(
|
|
1497
|
-
.setValueAtTime(modulationDepth,
|
|
1507
|
+
.cancelScheduledValues(scheduleTime)
|
|
1508
|
+
.setValueAtTime(modulationDepth, scheduleTime);
|
|
1498
1509
|
}
|
|
1499
|
-
setVibLfoToPitch(channel, note) {
|
|
1500
|
-
const now = this.audioContext.currentTime;
|
|
1510
|
+
setVibLfoToPitch(channel, note, scheduleTime) {
|
|
1501
1511
|
const vibLfoToPitch = note.voiceParams.vibLfoToPitch;
|
|
1502
1512
|
const vibratoDepth = Math.abs(vibLfoToPitch) * channel.state.vibratoDepth *
|
|
1503
1513
|
2;
|
|
1504
1514
|
const vibratoDepthSign = 0 < vibLfoToPitch;
|
|
1505
1515
|
note.vibratoDepth.gain
|
|
1506
|
-
.cancelScheduledValues(
|
|
1507
|
-
.setValueAtTime(vibratoDepth * vibratoDepthSign,
|
|
1516
|
+
.cancelScheduledValues(scheduleTime)
|
|
1517
|
+
.setValueAtTime(vibratoDepth * vibratoDepthSign, scheduleTime);
|
|
1508
1518
|
}
|
|
1509
|
-
setModLfoToFilterFc(note,
|
|
1510
|
-
const
|
|
1511
|
-
|
|
1519
|
+
setModLfoToFilterFc(channel, note, scheduleTime) {
|
|
1520
|
+
const modLfoToFilterFc = note.voiceParams.modLfoToFilterFc +
|
|
1521
|
+
this.getLFOFilterDepth(channel, note);
|
|
1512
1522
|
note.filterDepth.gain
|
|
1513
|
-
.cancelScheduledValues(
|
|
1514
|
-
.setValueAtTime(modLfoToFilterFc,
|
|
1523
|
+
.cancelScheduledValues(scheduleTime)
|
|
1524
|
+
.setValueAtTime(modLfoToFilterFc, scheduleTime);
|
|
1515
1525
|
}
|
|
1516
|
-
setModLfoToVolume(note,
|
|
1517
|
-
const now = this.audioContext.currentTime;
|
|
1526
|
+
setModLfoToVolume(channel, note, scheduleTime) {
|
|
1518
1527
|
const modLfoToVolume = note.voiceParams.modLfoToVolume;
|
|
1519
1528
|
const baseDepth = this.cbToRatio(Math.abs(modLfoToVolume)) - 1;
|
|
1520
|
-
const volumeDepth = baseDepth * Math.sign(modLfoToVolume) *
|
|
1529
|
+
const volumeDepth = baseDepth * Math.sign(modLfoToVolume) *
|
|
1530
|
+
(1 + this.getLFOAmplitudeDepth(channel, note));
|
|
1521
1531
|
note.volumeDepth.gain
|
|
1522
|
-
.cancelScheduledValues(
|
|
1523
|
-
.setValueAtTime(volumeDepth,
|
|
1532
|
+
.cancelScheduledValues(scheduleTime)
|
|
1533
|
+
.setValueAtTime(volumeDepth, scheduleTime);
|
|
1524
1534
|
}
|
|
1525
|
-
setReverbEffectsSend(channel, note, prevValue) {
|
|
1535
|
+
setReverbEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1526
1536
|
if (0 < prevValue) {
|
|
1527
1537
|
if (0 < note.voiceParams.reverbEffectsSend) {
|
|
1528
|
-
const now = this.audioContext.currentTime;
|
|
1529
1538
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 91);
|
|
1530
1539
|
const value = note.voiceParams.reverbEffectsSend + keyBasedValue;
|
|
1531
1540
|
note.reverbEffectsSend.gain
|
|
1532
|
-
.cancelScheduledValues(
|
|
1533
|
-
.setValueAtTime(value,
|
|
1541
|
+
.cancelScheduledValues(scheduleTime)
|
|
1542
|
+
.setValueAtTime(value, scheduleTime);
|
|
1534
1543
|
}
|
|
1535
1544
|
else {
|
|
1536
1545
|
note.reverbEffectsSend.disconnect();
|
|
@@ -1548,15 +1557,14 @@ class Midy {
|
|
|
1548
1557
|
}
|
|
1549
1558
|
}
|
|
1550
1559
|
}
|
|
1551
|
-
setChorusEffectsSend(channel, note, prevValue) {
|
|
1560
|
+
setChorusEffectsSend(channel, note, prevValue, scheduleTime) {
|
|
1552
1561
|
if (0 < prevValue) {
|
|
1553
1562
|
if (0 < note.voiceParams.chorusEffectsSend) {
|
|
1554
|
-
const now = this.audioContext.currentTime;
|
|
1555
1563
|
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 93);
|
|
1556
1564
|
const value = note.voiceParams.chorusEffectsSend + keyBasedValue;
|
|
1557
1565
|
note.chorusEffectsSend.gain
|
|
1558
|
-
.cancelScheduledValues(
|
|
1559
|
-
.setValueAtTime(value,
|
|
1566
|
+
.cancelScheduledValues(scheduleTime)
|
|
1567
|
+
.setValueAtTime(value, scheduleTime);
|
|
1560
1568
|
}
|
|
1561
1569
|
else {
|
|
1562
1570
|
note.chorusEffectsSend.disconnect();
|
|
@@ -1574,75 +1582,71 @@ class Midy {
|
|
|
1574
1582
|
}
|
|
1575
1583
|
}
|
|
1576
1584
|
}
|
|
1577
|
-
setDelayModLFO(note) {
|
|
1578
|
-
const now = this.audioContext.currentTime;
|
|
1585
|
+
setDelayModLFO(note, scheduleTime) {
|
|
1579
1586
|
const startTime = note.startTime;
|
|
1580
|
-
if (startTime <
|
|
1587
|
+
if (startTime < scheduleTime)
|
|
1581
1588
|
return;
|
|
1582
|
-
note.modulationLFO.stop(
|
|
1589
|
+
note.modulationLFO.stop(scheduleTime);
|
|
1583
1590
|
note.modulationLFO.start(startTime + note.voiceParams.delayModLFO);
|
|
1584
1591
|
note.modulationLFO.connect(note.filterDepth);
|
|
1585
1592
|
}
|
|
1586
|
-
setFreqModLFO(note) {
|
|
1587
|
-
const now = this.audioContext.currentTime;
|
|
1593
|
+
setFreqModLFO(note, scheduleTime) {
|
|
1588
1594
|
const freqModLFO = note.voiceParams.freqModLFO;
|
|
1589
1595
|
note.modulationLFO.frequency
|
|
1590
|
-
.cancelScheduledValues(
|
|
1591
|
-
.setValueAtTime(freqModLFO,
|
|
1596
|
+
.cancelScheduledValues(scheduleTime)
|
|
1597
|
+
.setValueAtTime(freqModLFO, scheduleTime);
|
|
1592
1598
|
}
|
|
1593
|
-
setFreqVibLFO(channel, note) {
|
|
1594
|
-
const now = this.audioContext.currentTime;
|
|
1599
|
+
setFreqVibLFO(channel, note, scheduleTime) {
|
|
1595
1600
|
const freqVibLFO = note.voiceParams.freqVibLFO;
|
|
1596
1601
|
note.vibratoLFO.frequency
|
|
1597
|
-
.cancelScheduledValues(
|
|
1598
|
-
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2,
|
|
1602
|
+
.cancelScheduledValues(scheduleTime)
|
|
1603
|
+
.setValueAtTime(freqVibLFO * channel.state.vibratoRate * 2, scheduleTime);
|
|
1599
1604
|
}
|
|
1600
1605
|
createVoiceParamsHandlers() {
|
|
1601
1606
|
return {
|
|
1602
|
-
modLfoToPitch: (channel, note, _prevValue) => {
|
|
1607
|
+
modLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1603
1608
|
if (0 < channel.state.modulationDepth) {
|
|
1604
|
-
this.setModLfoToPitch(channel, note,
|
|
1609
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1605
1610
|
}
|
|
1606
1611
|
},
|
|
1607
|
-
vibLfoToPitch: (channel, note, _prevValue) => {
|
|
1612
|
+
vibLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1608
1613
|
if (0 < channel.state.vibratoDepth) {
|
|
1609
|
-
this.setVibLfoToPitch(channel, note);
|
|
1614
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1610
1615
|
}
|
|
1611
1616
|
},
|
|
1612
|
-
modLfoToFilterFc: (channel, note, _prevValue) => {
|
|
1617
|
+
modLfoToFilterFc: (channel, note, _prevValue, scheduleTime) => {
|
|
1613
1618
|
if (0 < channel.state.modulationDepth) {
|
|
1614
|
-
this.setModLfoToFilterFc(note,
|
|
1619
|
+
this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
1615
1620
|
}
|
|
1616
1621
|
},
|
|
1617
|
-
modLfoToVolume: (channel, note, _prevValue) => {
|
|
1622
|
+
modLfoToVolume: (channel, note, _prevValue, scheduleTime) => {
|
|
1618
1623
|
if (0 < channel.state.modulationDepth) {
|
|
1619
|
-
this.setModLfoToVolume(note,
|
|
1624
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1620
1625
|
}
|
|
1621
1626
|
},
|
|
1622
|
-
chorusEffectsSend: (channel, note, prevValue) => {
|
|
1623
|
-
this.setChorusEffectsSend(channel, note, prevValue);
|
|
1627
|
+
chorusEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1628
|
+
this.setChorusEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1624
1629
|
},
|
|
1625
|
-
reverbEffectsSend: (channel, note, prevValue) => {
|
|
1626
|
-
this.setReverbEffectsSend(channel, note, prevValue);
|
|
1630
|
+
reverbEffectsSend: (channel, note, prevValue, scheduleTime) => {
|
|
1631
|
+
this.setReverbEffectsSend(channel, note, prevValue, scheduleTime);
|
|
1627
1632
|
},
|
|
1628
|
-
delayModLFO: (_channel, note, _prevValue) => this.setDelayModLFO(note),
|
|
1629
|
-
freqModLFO: (_channel, note, _prevValue) => this.setFreqModLFO(note),
|
|
1630
|
-
delayVibLFO: (channel, note, prevValue) => {
|
|
1633
|
+
delayModLFO: (_channel, note, _prevValue, scheduleTime) => this.setDelayModLFO(note, scheduleTime),
|
|
1634
|
+
freqModLFO: (_channel, note, _prevValue, scheduleTime) => this.setFreqModLFO(note, scheduleTime),
|
|
1635
|
+
delayVibLFO: (channel, note, prevValue, scheduleTime) => {
|
|
1631
1636
|
if (0 < channel.state.vibratoDepth) {
|
|
1632
|
-
const now = this.audioContext.currentTime;
|
|
1633
1637
|
const vibratoDelay = channel.state.vibratoDelay * 2;
|
|
1634
1638
|
const prevStartTime = note.startTime + prevValue * vibratoDelay;
|
|
1635
|
-
if (
|
|
1639
|
+
if (scheduleTime < prevStartTime)
|
|
1636
1640
|
return;
|
|
1637
1641
|
const value = note.voiceParams.delayVibLFO;
|
|
1638
1642
|
const startTime = note.startTime + value * vibratoDelay;
|
|
1639
|
-
note.vibratoLFO.stop(
|
|
1643
|
+
note.vibratoLFO.stop(scheduleTime);
|
|
1640
1644
|
note.vibratoLFO.start(startTime);
|
|
1641
1645
|
}
|
|
1642
1646
|
},
|
|
1643
|
-
freqVibLFO: (channel, note, _prevValue) => {
|
|
1647
|
+
freqVibLFO: (channel, note, _prevValue, scheduleTime) => {
|
|
1644
1648
|
if (0 < channel.state.vibratoDepth) {
|
|
1645
|
-
this.setFreqVibLFO(channel, note);
|
|
1649
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1646
1650
|
}
|
|
1647
1651
|
},
|
|
1648
1652
|
};
|
|
@@ -1654,7 +1658,7 @@ class Midy {
|
|
|
1654
1658
|
state[3] = noteNumber / 127;
|
|
1655
1659
|
return state;
|
|
1656
1660
|
}
|
|
1657
|
-
applyVoiceParams(channel, controllerType) {
|
|
1661
|
+
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1658
1662
|
channel.scheduledNotes.forEach((noteList) => {
|
|
1659
1663
|
for (let i = 0; i < noteList.length; i++) {
|
|
1660
1664
|
const note = noteList[i];
|
|
@@ -1670,7 +1674,7 @@ class Midy {
|
|
|
1670
1674
|
continue;
|
|
1671
1675
|
note.voiceParams[key] = value;
|
|
1672
1676
|
if (key in this.voiceParamsHandlers) {
|
|
1673
|
-
this.voiceParamsHandlers[key](channel, note, prevValue);
|
|
1677
|
+
this.voiceParamsHandlers[key](channel, note, prevValue, scheduleTime);
|
|
1674
1678
|
}
|
|
1675
1679
|
else if (filterEnvelopeKeySet.has(key)) {
|
|
1676
1680
|
if (appliedFilterEnvelope)
|
|
@@ -1683,12 +1687,12 @@ class Midy {
|
|
|
1683
1687
|
noteVoiceParams[key] = voiceParams[key];
|
|
1684
1688
|
}
|
|
1685
1689
|
if (note.portamento) {
|
|
1686
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1690
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1687
1691
|
}
|
|
1688
1692
|
else {
|
|
1689
|
-
this.setFilterEnvelope(channel, note,
|
|
1693
|
+
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
1690
1694
|
}
|
|
1691
|
-
this.setPitchEnvelope(note);
|
|
1695
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1692
1696
|
}
|
|
1693
1697
|
else if (volumeEnvelopeKeySet.has(key)) {
|
|
1694
1698
|
if (appliedVolumeEnvelope)
|
|
@@ -1700,7 +1704,7 @@ class Midy {
|
|
|
1700
1704
|
if (key in voiceParams)
|
|
1701
1705
|
noteVoiceParams[key] = voiceParams[key];
|
|
1702
1706
|
}
|
|
1703
|
-
this.setVolumeEnvelope(channel, note,
|
|
1707
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1704
1708
|
}
|
|
1705
1709
|
}
|
|
1706
1710
|
}
|
|
@@ -1744,12 +1748,12 @@ class Midy {
|
|
|
1744
1748
|
127: this.polyOn,
|
|
1745
1749
|
};
|
|
1746
1750
|
}
|
|
1747
|
-
handleControlChange(channelNumber, controllerType, value) {
|
|
1751
|
+
handleControlChange(channelNumber, controllerType, value, scheduleTime) {
|
|
1748
1752
|
const handler = this.controlChangeHandlers[controllerType];
|
|
1749
1753
|
if (handler) {
|
|
1750
|
-
handler.call(this, channelNumber, value);
|
|
1754
|
+
handler.call(this, channelNumber, value, scheduleTime);
|
|
1751
1755
|
const channel = this.channels[channelNumber];
|
|
1752
|
-
this.applyVoiceParams(channel, controllerType + 128);
|
|
1756
|
+
this.applyVoiceParams(channel, controllerType + 128, scheduleTime);
|
|
1753
1757
|
this.applyControlTable(channel, controllerType);
|
|
1754
1758
|
}
|
|
1755
1759
|
else {
|
|
@@ -1759,55 +1763,45 @@ class Midy {
|
|
|
1759
1763
|
setBankMSB(channelNumber, msb) {
|
|
1760
1764
|
this.channels[channelNumber].bankMSB = msb;
|
|
1761
1765
|
}
|
|
1762
|
-
updateModulation(channel) {
|
|
1763
|
-
|
|
1766
|
+
updateModulation(channel, scheduleTime) {
|
|
1767
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1764
1768
|
const depth = channel.state.modulationDepth * channel.modulationDepthRange;
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
}
|
|
1773
|
-
else {
|
|
1774
|
-
this.setPitchEnvelope(note);
|
|
1775
|
-
this.startModulation(channel, note, now);
|
|
1776
|
-
}
|
|
1769
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1770
|
+
if (note.modulationDepth) {
|
|
1771
|
+
note.modulationDepth.gain.setValueAtTime(depth, scheduleTime);
|
|
1772
|
+
}
|
|
1773
|
+
else {
|
|
1774
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1775
|
+
this.startModulation(channel, note, scheduleTime);
|
|
1777
1776
|
}
|
|
1778
1777
|
});
|
|
1779
1778
|
}
|
|
1780
|
-
setModulationDepth(channelNumber, modulation) {
|
|
1779
|
+
setModulationDepth(channelNumber, modulation, scheduleTime) {
|
|
1781
1780
|
const channel = this.channels[channelNumber];
|
|
1782
1781
|
channel.state.modulationDepth = modulation / 127;
|
|
1783
|
-
this.updateModulation(channel);
|
|
1782
|
+
this.updateModulation(channel, scheduleTime);
|
|
1784
1783
|
}
|
|
1785
1784
|
setPortamentoTime(channelNumber, portamentoTime) {
|
|
1786
1785
|
const channel = this.channels[channelNumber];
|
|
1787
1786
|
const factor = 5 * Math.log(10) / 127;
|
|
1788
1787
|
channel.state.portamentoTime = Math.exp(factor * portamentoTime);
|
|
1789
1788
|
}
|
|
1790
|
-
setKeyBasedVolume(channel) {
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
if (!note)
|
|
1796
|
-
continue;
|
|
1797
|
-
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 7);
|
|
1798
|
-
if (keyBasedValue === 0)
|
|
1799
|
-
continue;
|
|
1789
|
+
setKeyBasedVolume(channel, scheduleTime) {
|
|
1790
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1791
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1792
|
+
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 7);
|
|
1793
|
+
if (keyBasedValue !== 0) {
|
|
1800
1794
|
note.volumeNode.gain
|
|
1801
|
-
.cancelScheduledValues(
|
|
1802
|
-
.setValueAtTime(1 + keyBasedValue,
|
|
1795
|
+
.cancelScheduledValues(scheduleTime)
|
|
1796
|
+
.setValueAtTime(1 + keyBasedValue, scheduleTime);
|
|
1803
1797
|
}
|
|
1804
1798
|
});
|
|
1805
1799
|
}
|
|
1806
|
-
setVolume(channelNumber, volume) {
|
|
1800
|
+
setVolume(channelNumber, volume, scheduleTime) {
|
|
1807
1801
|
const channel = this.channels[channelNumber];
|
|
1808
1802
|
channel.state.volume = volume / 127;
|
|
1809
|
-
this.updateChannelVolume(channel);
|
|
1810
|
-
this.setKeyBasedVolume(channel);
|
|
1803
|
+
this.updateChannelVolume(channel, scheduleTime);
|
|
1804
|
+
this.setKeyBasedVolume(channel, scheduleTime);
|
|
1811
1805
|
}
|
|
1812
1806
|
panToGain(pan) {
|
|
1813
1807
|
const theta = Math.PI / 2 * Math.max(0, pan * 127 - 1) / 126;
|
|
@@ -1816,82 +1810,75 @@ class Midy {
|
|
|
1816
1810
|
gainRight: Math.sin(theta),
|
|
1817
1811
|
};
|
|
1818
1812
|
}
|
|
1819
|
-
setKeyBasedPan(channel) {
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
if (!note)
|
|
1825
|
-
continue;
|
|
1826
|
-
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 10);
|
|
1827
|
-
if (keyBasedValue === 0)
|
|
1828
|
-
continue;
|
|
1813
|
+
setKeyBasedPan(channel, scheduleTime) {
|
|
1814
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1815
|
+
this.processScheduledNotes(channel, scheduleTime, (note) => {
|
|
1816
|
+
const keyBasedValue = this.getKeyBasedInstrumentControlValue(channel, note.noteNumber, 10);
|
|
1817
|
+
if (keyBasedValue !== 0) {
|
|
1829
1818
|
const { gainLeft, gainRight } = this.panToGain((keyBasedValue + 1) / 2);
|
|
1830
1819
|
note.gainL.gain
|
|
1831
|
-
.cancelScheduledValues(
|
|
1832
|
-
.setValueAtTime(gainLeft,
|
|
1820
|
+
.cancelScheduledValues(scheduleTime)
|
|
1821
|
+
.setValueAtTime(gainLeft, scheduleTime);
|
|
1833
1822
|
note.gainR.gain
|
|
1834
|
-
.cancelScheduledValues(
|
|
1835
|
-
.setValueAtTime(gainRight,
|
|
1823
|
+
.cancelScheduledValues(scheduleTime)
|
|
1824
|
+
.setValueAtTime(gainRight, scheduleTime);
|
|
1836
1825
|
}
|
|
1837
1826
|
});
|
|
1838
1827
|
}
|
|
1839
|
-
setPan(channelNumber, pan) {
|
|
1828
|
+
setPan(channelNumber, pan, scheduleTime) {
|
|
1840
1829
|
const channel = this.channels[channelNumber];
|
|
1841
1830
|
channel.state.pan = pan / 127;
|
|
1842
|
-
this.updateChannelVolume(channel);
|
|
1843
|
-
this.setKeyBasedPan(channel);
|
|
1831
|
+
this.updateChannelVolume(channel, scheduleTime);
|
|
1832
|
+
this.setKeyBasedPan(channel, scheduleTime);
|
|
1844
1833
|
}
|
|
1845
|
-
setExpression(channelNumber, expression) {
|
|
1834
|
+
setExpression(channelNumber, expression, scheduleTime) {
|
|
1846
1835
|
const channel = this.channels[channelNumber];
|
|
1847
1836
|
channel.state.expression = expression / 127;
|
|
1848
|
-
this.updateChannelVolume(channel);
|
|
1837
|
+
this.updateChannelVolume(channel, scheduleTime);
|
|
1849
1838
|
}
|
|
1850
1839
|
setBankLSB(channelNumber, lsb) {
|
|
1851
1840
|
this.channels[channelNumber].bankLSB = lsb;
|
|
1852
1841
|
}
|
|
1853
|
-
dataEntryLSB(channelNumber, value) {
|
|
1842
|
+
dataEntryLSB(channelNumber, value, scheduleTime) {
|
|
1854
1843
|
this.channels[channelNumber].dataLSB = value;
|
|
1855
|
-
this.handleRPN(channelNumber,
|
|
1844
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1856
1845
|
}
|
|
1857
|
-
updateChannelVolume(channel) {
|
|
1858
|
-
const now = this.audioContext.currentTime;
|
|
1846
|
+
updateChannelVolume(channel, scheduleTime) {
|
|
1859
1847
|
const state = channel.state;
|
|
1860
1848
|
const volume = state.volume * state.expression;
|
|
1861
1849
|
const { gainLeft, gainRight } = this.panToGain(state.pan);
|
|
1862
1850
|
channel.gainL.gain
|
|
1863
|
-
.cancelScheduledValues(
|
|
1864
|
-
.setValueAtTime(volume * gainLeft,
|
|
1851
|
+
.cancelScheduledValues(scheduleTime)
|
|
1852
|
+
.setValueAtTime(volume * gainLeft, scheduleTime);
|
|
1865
1853
|
channel.gainR.gain
|
|
1866
|
-
.cancelScheduledValues(
|
|
1867
|
-
.setValueAtTime(volume * gainRight,
|
|
1854
|
+
.cancelScheduledValues(scheduleTime)
|
|
1855
|
+
.setValueAtTime(volume * gainRight, scheduleTime);
|
|
1868
1856
|
}
|
|
1869
|
-
setSustainPedal(channelNumber, value) {
|
|
1857
|
+
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1858
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1870
1859
|
this.channels[channelNumber].state.sustainPedal = value / 127;
|
|
1871
1860
|
if (value < 64) {
|
|
1872
|
-
this.releaseSustainPedal(channelNumber, value);
|
|
1861
|
+
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
1873
1862
|
}
|
|
1874
1863
|
}
|
|
1875
1864
|
setPortamento(channelNumber, value) {
|
|
1876
1865
|
this.channels[channelNumber].state.portamento = value / 127;
|
|
1877
1866
|
}
|
|
1878
|
-
setSostenutoPedal(channelNumber, value) {
|
|
1867
|
+
setSostenutoPedal(channelNumber, value, scheduleTime) {
|
|
1879
1868
|
const channel = this.channels[channelNumber];
|
|
1880
1869
|
channel.state.sostenutoPedal = value / 127;
|
|
1881
1870
|
if (64 <= value) {
|
|
1882
|
-
|
|
1883
|
-
channel.sostenutoNotes = this.getActiveNotes(channel, now);
|
|
1871
|
+
channel.sostenutoNotes = this.getActiveNotes(channel, scheduleTime);
|
|
1884
1872
|
}
|
|
1885
1873
|
else {
|
|
1886
1874
|
this.releaseSostenutoPedal(channelNumber, value);
|
|
1887
1875
|
}
|
|
1888
1876
|
}
|
|
1889
|
-
setSoftPedal(channelNumber, softPedal) {
|
|
1877
|
+
setSoftPedal(channelNumber, softPedal, _scheduleTime) {
|
|
1890
1878
|
const channel = this.channels[channelNumber];
|
|
1891
1879
|
channel.state.softPedal = softPedal / 127;
|
|
1892
1880
|
}
|
|
1893
|
-
setFilterResonance(channelNumber, filterResonance) {
|
|
1894
|
-
const now = this.audioContext.currentTime;
|
|
1881
|
+
setFilterResonance(channelNumber, filterResonance, scheduleTime) {
|
|
1895
1882
|
const channel = this.channels[channelNumber];
|
|
1896
1883
|
const state = channel.state;
|
|
1897
1884
|
state.filterResonance = filterResonance / 64;
|
|
@@ -1901,16 +1888,15 @@ class Midy {
|
|
|
1901
1888
|
if (!note)
|
|
1902
1889
|
continue;
|
|
1903
1890
|
const Q = note.voiceParams.initialFilterQ / 5 * state.filterResonance;
|
|
1904
|
-
note.filterNode.Q.setValueAtTime(Q,
|
|
1891
|
+
note.filterNode.Q.setValueAtTime(Q, scheduleTime);
|
|
1905
1892
|
}
|
|
1906
1893
|
});
|
|
1907
1894
|
}
|
|
1908
|
-
setReleaseTime(channelNumber, releaseTime) {
|
|
1895
|
+
setReleaseTime(channelNumber, releaseTime, _scheduleTime) {
|
|
1909
1896
|
const channel = this.channels[channelNumber];
|
|
1910
1897
|
channel.state.releaseTime = releaseTime / 64;
|
|
1911
1898
|
}
|
|
1912
|
-
setAttackTime(channelNumber, attackTime) {
|
|
1913
|
-
const now = this.audioContext.currentTime;
|
|
1899
|
+
setAttackTime(channelNumber, attackTime, scheduleTime) {
|
|
1914
1900
|
const channel = this.channels[channelNumber];
|
|
1915
1901
|
channel.state.attackTime = attackTime / 64;
|
|
1916
1902
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -1918,13 +1904,13 @@ class Midy {
|
|
|
1918
1904
|
const note = noteList[i];
|
|
1919
1905
|
if (!note)
|
|
1920
1906
|
continue;
|
|
1921
|
-
if (note.startTime <
|
|
1907
|
+
if (note.startTime < scheduleTime)
|
|
1922
1908
|
continue;
|
|
1923
|
-
this.setVolumeEnvelope(channel, note
|
|
1909
|
+
this.setVolumeEnvelope(channel, note);
|
|
1924
1910
|
}
|
|
1925
1911
|
});
|
|
1926
1912
|
}
|
|
1927
|
-
setBrightness(channelNumber, brightness) {
|
|
1913
|
+
setBrightness(channelNumber, brightness, scheduleTime) {
|
|
1928
1914
|
const channel = this.channels[channelNumber];
|
|
1929
1915
|
channel.state.brightness = brightness / 64;
|
|
1930
1916
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -1933,15 +1919,15 @@ class Midy {
|
|
|
1933
1919
|
if (!note)
|
|
1934
1920
|
continue;
|
|
1935
1921
|
if (note.portamento) {
|
|
1936
|
-
this.setPortamentoStartFilterEnvelope(channel, note);
|
|
1922
|
+
this.setPortamentoStartFilterEnvelope(channel, note, scheduleTime);
|
|
1937
1923
|
}
|
|
1938
1924
|
else {
|
|
1939
|
-
this.setFilterEnvelope(channel, note
|
|
1925
|
+
this.setFilterEnvelope(channel, note);
|
|
1940
1926
|
}
|
|
1941
1927
|
}
|
|
1942
1928
|
});
|
|
1943
1929
|
}
|
|
1944
|
-
setDecayTime(channelNumber, dacayTime) {
|
|
1930
|
+
setDecayTime(channelNumber, dacayTime, scheduleTime) {
|
|
1945
1931
|
const channel = this.channels[channelNumber];
|
|
1946
1932
|
channel.state.decayTime = dacayTime / 64;
|
|
1947
1933
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -1949,11 +1935,11 @@ class Midy {
|
|
|
1949
1935
|
const note = noteList[i];
|
|
1950
1936
|
if (!note)
|
|
1951
1937
|
continue;
|
|
1952
|
-
this.setVolumeEnvelope(channel, note,
|
|
1938
|
+
this.setVolumeEnvelope(channel, note, scheduleTime);
|
|
1953
1939
|
}
|
|
1954
1940
|
});
|
|
1955
1941
|
}
|
|
1956
|
-
setVibratoRate(channelNumber, vibratoRate) {
|
|
1942
|
+
setVibratoRate(channelNumber, vibratoRate, scheduleTime) {
|
|
1957
1943
|
const channel = this.channels[channelNumber];
|
|
1958
1944
|
channel.state.vibratoRate = vibratoRate / 64;
|
|
1959
1945
|
if (channel.vibratoDepth <= 0)
|
|
@@ -1963,11 +1949,11 @@ class Midy {
|
|
|
1963
1949
|
const note = noteList[i];
|
|
1964
1950
|
if (!note)
|
|
1965
1951
|
continue;
|
|
1966
|
-
this.setVibLfoToPitch(channel, note);
|
|
1952
|
+
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1967
1953
|
}
|
|
1968
1954
|
});
|
|
1969
1955
|
}
|
|
1970
|
-
setVibratoDepth(channelNumber, vibratoDepth) {
|
|
1956
|
+
setVibratoDepth(channelNumber, vibratoDepth, scheduleTime) {
|
|
1971
1957
|
const channel = this.channels[channelNumber];
|
|
1972
1958
|
const prev = channel.state.vibratoDepth;
|
|
1973
1959
|
channel.state.vibratoDepth = vibratoDepth / 64;
|
|
@@ -1977,7 +1963,7 @@ class Midy {
|
|
|
1977
1963
|
const note = noteList[i];
|
|
1978
1964
|
if (!note)
|
|
1979
1965
|
continue;
|
|
1980
|
-
this.setFreqVibLFO(channel, note);
|
|
1966
|
+
this.setFreqVibLFO(channel, note, scheduleTime);
|
|
1981
1967
|
}
|
|
1982
1968
|
});
|
|
1983
1969
|
}
|
|
@@ -1987,7 +1973,7 @@ class Midy {
|
|
|
1987
1973
|
const note = noteList[i];
|
|
1988
1974
|
if (!note)
|
|
1989
1975
|
continue;
|
|
1990
|
-
this.startVibrato(channel, note,
|
|
1976
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
1991
1977
|
}
|
|
1992
1978
|
});
|
|
1993
1979
|
}
|
|
@@ -2001,21 +1987,21 @@ class Midy {
|
|
|
2001
1987
|
const note = noteList[i];
|
|
2002
1988
|
if (!note)
|
|
2003
1989
|
continue;
|
|
2004
|
-
this.startVibrato(channel, note,
|
|
1990
|
+
this.startVibrato(channel, note, scheduleTime);
|
|
2005
1991
|
}
|
|
2006
1992
|
});
|
|
2007
1993
|
}
|
|
2008
1994
|
}
|
|
2009
|
-
setReverbSendLevel(channelNumber, reverbSendLevel) {
|
|
1995
|
+
setReverbSendLevel(channelNumber, reverbSendLevel, scheduleTime) {
|
|
2010
1996
|
const channel = this.channels[channelNumber];
|
|
2011
1997
|
const state = channel.state;
|
|
2012
1998
|
const reverbEffect = this.reverbEffect;
|
|
2013
1999
|
if (0 < state.reverbSendLevel) {
|
|
2014
2000
|
if (0 < reverbSendLevel) {
|
|
2015
|
-
const now = this.audioContext.currentTime;
|
|
2016
2001
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2017
|
-
reverbEffect.input.gain
|
|
2018
|
-
|
|
2002
|
+
reverbEffect.input.gain
|
|
2003
|
+
.cancelScheduledValues(scheduleTime)
|
|
2004
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2019
2005
|
}
|
|
2020
2006
|
else {
|
|
2021
2007
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -2032,31 +2018,31 @@ class Midy {
|
|
|
2032
2018
|
}
|
|
2033
2019
|
else {
|
|
2034
2020
|
if (0 < reverbSendLevel) {
|
|
2035
|
-
const now = this.audioContext.currentTime;
|
|
2036
2021
|
channel.scheduledNotes.forEach((noteList) => {
|
|
2037
2022
|
for (let i = 0; i < noteList.length; i++) {
|
|
2038
2023
|
const note = noteList[i];
|
|
2039
2024
|
if (!note)
|
|
2040
2025
|
continue;
|
|
2041
|
-
this.setReverbEffectsSend(channel, note, 0);
|
|
2026
|
+
this.setReverbEffectsSend(channel, note, 0, scheduleTime);
|
|
2042
2027
|
}
|
|
2043
2028
|
});
|
|
2044
2029
|
state.reverbSendLevel = reverbSendLevel / 127;
|
|
2045
|
-
reverbEffect.input.gain
|
|
2046
|
-
|
|
2030
|
+
reverbEffect.input.gain
|
|
2031
|
+
.cancelScheduledValues(scheduleTime)
|
|
2032
|
+
.setValueAtTime(state.reverbSendLevel, scheduleTime);
|
|
2047
2033
|
}
|
|
2048
2034
|
}
|
|
2049
2035
|
}
|
|
2050
|
-
setChorusSendLevel(channelNumber, chorusSendLevel) {
|
|
2036
|
+
setChorusSendLevel(channelNumber, chorusSendLevel, scheduleTime) {
|
|
2051
2037
|
const channel = this.channels[channelNumber];
|
|
2052
2038
|
const state = channel.state;
|
|
2053
2039
|
const chorusEffect = this.chorusEffect;
|
|
2054
2040
|
if (0 < state.chorusSendLevel) {
|
|
2055
2041
|
if (0 < chorusSendLevel) {
|
|
2056
|
-
const now = this.audioContext.currentTime;
|
|
2057
2042
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2058
|
-
chorusEffect.input.gain
|
|
2059
|
-
|
|
2043
|
+
chorusEffect.input.gain
|
|
2044
|
+
.cancelScheduledValues(scheduleTime)
|
|
2045
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2060
2046
|
}
|
|
2061
2047
|
else {
|
|
2062
2048
|
channel.scheduledNotes.forEach((noteList) => {
|
|
@@ -2073,18 +2059,18 @@ class Midy {
|
|
|
2073
2059
|
}
|
|
2074
2060
|
else {
|
|
2075
2061
|
if (0 < chorusSendLevel) {
|
|
2076
|
-
const now = this.audioContext.currentTime;
|
|
2077
2062
|
channel.scheduledNotes.forEach((noteList) => {
|
|
2078
2063
|
for (let i = 0; i < noteList.length; i++) {
|
|
2079
2064
|
const note = noteList[i];
|
|
2080
2065
|
if (!note)
|
|
2081
2066
|
continue;
|
|
2082
|
-
this.setChorusEffectsSend(channel, note, 0);
|
|
2067
|
+
this.setChorusEffectsSend(channel, note, 0, scheduleTime);
|
|
2083
2068
|
}
|
|
2084
2069
|
});
|
|
2085
2070
|
state.chorusSendLevel = chorusSendLevel / 127;
|
|
2086
|
-
chorusEffect.input.gain
|
|
2087
|
-
|
|
2071
|
+
chorusEffect.input.gain
|
|
2072
|
+
.cancelScheduledValues(scheduleTime)
|
|
2073
|
+
.setValueAtTime(state.chorusSendLevel, scheduleTime);
|
|
2088
2074
|
}
|
|
2089
2075
|
}
|
|
2090
2076
|
}
|
|
@@ -2114,13 +2100,13 @@ class Midy {
|
|
|
2114
2100
|
channel.dataMSB = minMSB;
|
|
2115
2101
|
}
|
|
2116
2102
|
}
|
|
2117
|
-
handleRPN(channelNumber, value) {
|
|
2103
|
+
handleRPN(channelNumber, value, scheduleTime) {
|
|
2118
2104
|
const channel = this.channels[channelNumber];
|
|
2119
2105
|
const rpn = channel.rpnMSB * 128 + channel.rpnLSB;
|
|
2120
2106
|
switch (rpn) {
|
|
2121
2107
|
case 0:
|
|
2122
2108
|
channel.dataLSB += value;
|
|
2123
|
-
this.handlePitchBendRangeRPN(channelNumber);
|
|
2109
|
+
this.handlePitchBendRangeRPN(channelNumber, scheduleTime);
|
|
2124
2110
|
break;
|
|
2125
2111
|
case 1:
|
|
2126
2112
|
channel.dataLSB += value;
|
|
@@ -2152,25 +2138,26 @@ class Midy {
|
|
|
2152
2138
|
setRPNLSB(channelNumber, value) {
|
|
2153
2139
|
this.channels[channelNumber].rpnLSB = value;
|
|
2154
2140
|
}
|
|
2155
|
-
dataEntryMSB(channelNumber, value) {
|
|
2141
|
+
dataEntryMSB(channelNumber, value, scheduleTime) {
|
|
2156
2142
|
this.channels[channelNumber].dataMSB = value;
|
|
2157
|
-
this.handleRPN(channelNumber,
|
|
2143
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
2158
2144
|
}
|
|
2159
|
-
handlePitchBendRangeRPN(channelNumber) {
|
|
2145
|
+
handlePitchBendRangeRPN(channelNumber, scheduleTime) {
|
|
2160
2146
|
const channel = this.channels[channelNumber];
|
|
2161
2147
|
this.limitData(channel, 0, 127, 0, 99);
|
|
2162
2148
|
const pitchBendRange = channel.dataMSB + channel.dataLSB / 100;
|
|
2163
|
-
this.setPitchBendRange(channelNumber, pitchBendRange);
|
|
2149
|
+
this.setPitchBendRange(channelNumber, pitchBendRange, scheduleTime);
|
|
2164
2150
|
}
|
|
2165
|
-
setPitchBendRange(channelNumber, value) {
|
|
2151
|
+
setPitchBendRange(channelNumber, value, scheduleTime) {
|
|
2152
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2166
2153
|
const channel = this.channels[channelNumber];
|
|
2167
2154
|
const state = channel.state;
|
|
2168
2155
|
const prev = state.pitchWheelSensitivity;
|
|
2169
2156
|
const next = value / 128;
|
|
2170
2157
|
state.pitchWheelSensitivity = next;
|
|
2171
2158
|
channel.detune += (state.pitchWheel * 2 - 1) * (next - prev) * 12800;
|
|
2172
|
-
this.updateChannelDetune(channel);
|
|
2173
|
-
this.applyVoiceParams(channel, 16);
|
|
2159
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2160
|
+
this.applyVoiceParams(channel, 16, scheduleTime);
|
|
2174
2161
|
}
|
|
2175
2162
|
handleFineTuningRPN(channelNumber) {
|
|
2176
2163
|
const channel = this.channels[channelNumber];
|
|
@@ -2211,8 +2198,9 @@ class Midy {
|
|
|
2211
2198
|
channel.modulationDepthRange = modulationDepthRange;
|
|
2212
2199
|
this.updateModulation(channel);
|
|
2213
2200
|
}
|
|
2214
|
-
allSoundOff(channelNumber) {
|
|
2215
|
-
|
|
2201
|
+
allSoundOff(channelNumber, _value, scheduleTime) {
|
|
2202
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2203
|
+
return this.stopChannelNotes(channelNumber, 0, true, scheduleTime);
|
|
2216
2204
|
}
|
|
2217
2205
|
resetAllControllers(channelNumber) {
|
|
2218
2206
|
const stateTypes = [
|
|
@@ -2240,8 +2228,9 @@ class Midy {
|
|
|
2240
2228
|
channel[type] = this.constructor.channelSettings[type];
|
|
2241
2229
|
}
|
|
2242
2230
|
}
|
|
2243
|
-
allNotesOff(channelNumber) {
|
|
2244
|
-
|
|
2231
|
+
allNotesOff(channelNumber, _value, scheduleTime) {
|
|
2232
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2233
|
+
return this.stopChannelNotes(channelNumber, 0, false, scheduleTime);
|
|
2245
2234
|
}
|
|
2246
2235
|
omniOff() {
|
|
2247
2236
|
this.omni = false;
|
|
@@ -2255,16 +2244,16 @@ class Midy {
|
|
|
2255
2244
|
polyOn() {
|
|
2256
2245
|
this.mono = false;
|
|
2257
2246
|
}
|
|
2258
|
-
handleUniversalNonRealTimeExclusiveMessage(data) {
|
|
2247
|
+
handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2259
2248
|
switch (data[2]) {
|
|
2260
2249
|
case 8:
|
|
2261
2250
|
switch (data[3]) {
|
|
2262
2251
|
case 8:
|
|
2263
2252
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2264
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false);
|
|
2253
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, false, scheduleTime);
|
|
2265
2254
|
case 9:
|
|
2266
2255
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2267
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false);
|
|
2256
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, false, scheduleTime);
|
|
2268
2257
|
default:
|
|
2269
2258
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2270
2259
|
}
|
|
@@ -2307,18 +2296,18 @@ class Midy {
|
|
|
2307
2296
|
this.channels[9].bankMSB = 120;
|
|
2308
2297
|
this.channels[9].bank = 120 * 128;
|
|
2309
2298
|
}
|
|
2310
|
-
handleUniversalRealTimeExclusiveMessage(data) {
|
|
2299
|
+
handleUniversalRealTimeExclusiveMessage(data, scheduleTime) {
|
|
2311
2300
|
switch (data[2]) {
|
|
2312
2301
|
case 4:
|
|
2313
2302
|
switch (data[3]) {
|
|
2314
2303
|
case 1:
|
|
2315
|
-
return this.handleMasterVolumeSysEx(data);
|
|
2304
|
+
return this.handleMasterVolumeSysEx(data, scheduleTime);
|
|
2316
2305
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2317
|
-
return this.handleMasterFineTuningSysEx(data);
|
|
2306
|
+
return this.handleMasterFineTuningSysEx(data, scheduleTime);
|
|
2318
2307
|
case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
2319
|
-
return this.handleMasterCoarseTuningSysEx(data);
|
|
2308
|
+
return this.handleMasterCoarseTuningSysEx(data, scheduleTime);
|
|
2320
2309
|
case 5: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca24.pdf
|
|
2321
|
-
return this.handleGlobalParameterControlSysEx(data);
|
|
2310
|
+
return this.handleGlobalParameterControlSysEx(data, scheduleTime);
|
|
2322
2311
|
default:
|
|
2323
2312
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2324
2313
|
}
|
|
@@ -2326,10 +2315,10 @@ class Midy {
|
|
|
2326
2315
|
case 8:
|
|
2327
2316
|
switch (data[3]) {
|
|
2328
2317
|
case 8: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2329
|
-
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true);
|
|
2318
|
+
return this.handleScaleOctaveTuning1ByteFormatSysEx(data, true, scheduleTime);
|
|
2330
2319
|
case 9:
|
|
2331
2320
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca21.pdf
|
|
2332
|
-
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true);
|
|
2321
|
+
return this.handleScaleOctaveTuning2ByteFormatSysEx(data, true, scheduleTime);
|
|
2333
2322
|
default:
|
|
2334
2323
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2335
2324
|
}
|
|
@@ -2349,7 +2338,7 @@ class Midy {
|
|
|
2349
2338
|
case 10:
|
|
2350
2339
|
switch (data[3]) {
|
|
2351
2340
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca23.pdf
|
|
2352
|
-
return this.handleKeyBasedInstrumentControlSysEx(data);
|
|
2341
|
+
return this.handleKeyBasedInstrumentControlSysEx(data, scheduleTime);
|
|
2353
2342
|
default:
|
|
2354
2343
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2355
2344
|
}
|
|
@@ -2358,49 +2347,50 @@ class Midy {
|
|
|
2358
2347
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2359
2348
|
}
|
|
2360
2349
|
}
|
|
2361
|
-
handleMasterVolumeSysEx(data) {
|
|
2350
|
+
handleMasterVolumeSysEx(data, scheduleTime) {
|
|
2362
2351
|
const volume = (data[5] * 128 + data[4]) / 16383;
|
|
2363
|
-
this.setMasterVolume(volume);
|
|
2352
|
+
this.setMasterVolume(volume, scheduleTime);
|
|
2364
2353
|
}
|
|
2365
|
-
setMasterVolume(volume) {
|
|
2354
|
+
setMasterVolume(volume, scheduleTime) {
|
|
2355
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
2366
2356
|
if (volume < 0 && 1 < volume) {
|
|
2367
2357
|
console.error("Master Volume is out of range");
|
|
2368
2358
|
}
|
|
2369
2359
|
else {
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
|
|
2360
|
+
this.masterVolume.gain
|
|
2361
|
+
.cancelScheduledValues(scheduleTime)
|
|
2362
|
+
.setValueAtTime(volume * volume, scheduleTime);
|
|
2373
2363
|
}
|
|
2374
2364
|
}
|
|
2375
|
-
handleMasterFineTuningSysEx(data) {
|
|
2365
|
+
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2376
2366
|
const fineTuning = data[5] * 128 + data[4];
|
|
2377
|
-
this.setMasterFineTuning(fineTuning);
|
|
2367
|
+
this.setMasterFineTuning(fineTuning, scheduleTime);
|
|
2378
2368
|
}
|
|
2379
|
-
setMasterFineTuning(value) {
|
|
2369
|
+
setMasterFineTuning(value, scheduleTime) {
|
|
2380
2370
|
const prev = this.masterFineTuning;
|
|
2381
2371
|
const next = (value - 8192) / 8.192; // cent
|
|
2382
2372
|
this.masterFineTuning = next;
|
|
2383
2373
|
channel.detune += next - prev;
|
|
2384
|
-
this.updateChannelDetune(channel);
|
|
2374
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2385
2375
|
}
|
|
2386
|
-
handleMasterCoarseTuningSysEx(data) {
|
|
2376
|
+
handleMasterCoarseTuningSysEx(data, scheduleTime) {
|
|
2387
2377
|
const coarseTuning = data[4];
|
|
2388
|
-
this.setMasterCoarseTuning(coarseTuning);
|
|
2378
|
+
this.setMasterCoarseTuning(coarseTuning, scheduleTime);
|
|
2389
2379
|
}
|
|
2390
|
-
setMasterCoarseTuning(value) {
|
|
2380
|
+
setMasterCoarseTuning(value, scheduleTime) {
|
|
2391
2381
|
const prev = this.masterCoarseTuning;
|
|
2392
2382
|
const next = (value - 64) * 100; // cent
|
|
2393
2383
|
this.masterCoarseTuning = next;
|
|
2394
2384
|
channel.detune += next - prev;
|
|
2395
|
-
this.updateChannelDetune(channel);
|
|
2385
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2396
2386
|
}
|
|
2397
|
-
handleGlobalParameterControlSysEx(data) {
|
|
2387
|
+
handleGlobalParameterControlSysEx(data, scheduleTime) {
|
|
2398
2388
|
if (data[7] === 1) {
|
|
2399
2389
|
switch (data[8]) {
|
|
2400
2390
|
case 1:
|
|
2401
2391
|
return this.handleReverbParameterSysEx(data);
|
|
2402
2392
|
case 2:
|
|
2403
|
-
return this.handleChorusParameterSysEx(data);
|
|
2393
|
+
return this.handleChorusParameterSysEx(data, scheduleTime);
|
|
2404
2394
|
default:
|
|
2405
2395
|
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
2406
2396
|
}
|
|
@@ -2479,88 +2469,84 @@ class Midy {
|
|
|
2479
2469
|
calcDelay(rt60, feedback) {
|
|
2480
2470
|
return -rt60 * Math.log10(feedback) / 3;
|
|
2481
2471
|
}
|
|
2482
|
-
handleChorusParameterSysEx(data) {
|
|
2472
|
+
handleChorusParameterSysEx(data, scheduleTime) {
|
|
2483
2473
|
switch (data[9]) {
|
|
2484
2474
|
case 0:
|
|
2485
|
-
return this.setChorusType(data[10]);
|
|
2475
|
+
return this.setChorusType(data[10], scheduleTime);
|
|
2486
2476
|
case 1:
|
|
2487
|
-
return this.setChorusModRate(data[10]);
|
|
2477
|
+
return this.setChorusModRate(data[10], scheduleTime);
|
|
2488
2478
|
case 2:
|
|
2489
|
-
return this.setChorusModDepth(data[10]);
|
|
2479
|
+
return this.setChorusModDepth(data[10], scheduleTime);
|
|
2490
2480
|
case 3:
|
|
2491
|
-
return this.setChorusFeedback(data[10]);
|
|
2481
|
+
return this.setChorusFeedback(data[10], scheduleTime);
|
|
2492
2482
|
case 4:
|
|
2493
|
-
return this.setChorusSendToReverb(data[10]);
|
|
2483
|
+
return this.setChorusSendToReverb(data[10], scheduleTime);
|
|
2494
2484
|
}
|
|
2495
2485
|
}
|
|
2496
|
-
setChorusType(type) {
|
|
2486
|
+
setChorusType(type, scheduleTime) {
|
|
2497
2487
|
switch (type) {
|
|
2498
2488
|
case 0:
|
|
2499
|
-
return this.setChorusParameter(3, 5, 0, 0);
|
|
2489
|
+
return this.setChorusParameter(3, 5, 0, 0, scheduleTime);
|
|
2500
2490
|
case 1:
|
|
2501
|
-
return this.setChorusParameter(9, 19, 5, 0);
|
|
2491
|
+
return this.setChorusParameter(9, 19, 5, 0, scheduleTime);
|
|
2502
2492
|
case 2:
|
|
2503
|
-
return this.setChorusParameter(3, 19, 8, 0);
|
|
2493
|
+
return this.setChorusParameter(3, 19, 8, 0, scheduleTime);
|
|
2504
2494
|
case 3:
|
|
2505
|
-
return this.setChorusParameter(9, 16, 16, 0);
|
|
2495
|
+
return this.setChorusParameter(9, 16, 16, 0, scheduleTime);
|
|
2506
2496
|
case 4:
|
|
2507
|
-
return this.setChorusParameter(2, 24, 64, 0);
|
|
2497
|
+
return this.setChorusParameter(2, 24, 64, 0, scheduleTime);
|
|
2508
2498
|
case 5:
|
|
2509
|
-
return this.setChorusParameter(1, 5, 112, 0);
|
|
2499
|
+
return this.setChorusParameter(1, 5, 112, 0, scheduleTime);
|
|
2510
2500
|
default:
|
|
2511
2501
|
console.warn(`Unsupported Chorus Type: ${type}`);
|
|
2512
2502
|
}
|
|
2513
2503
|
}
|
|
2514
|
-
setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
|
|
2515
|
-
this.setChorusModRate(modRate);
|
|
2516
|
-
this.setChorusModDepth(modDepth);
|
|
2517
|
-
this.setChorusFeedback(feedback);
|
|
2518
|
-
this.setChorusSendToReverb(sendToReverb);
|
|
2504
|
+
setChorusParameter(modRate, modDepth, feedback, sendToReverb, scheduleTime) {
|
|
2505
|
+
this.setChorusModRate(modRate, scheduleTime);
|
|
2506
|
+
this.setChorusModDepth(modDepth, scheduleTime);
|
|
2507
|
+
this.setChorusFeedback(feedback, scheduleTime);
|
|
2508
|
+
this.setChorusSendToReverb(sendToReverb, scheduleTime);
|
|
2519
2509
|
}
|
|
2520
|
-
setChorusModRate(value) {
|
|
2521
|
-
const now = this.audioContext.currentTime;
|
|
2510
|
+
setChorusModRate(value, scheduleTime) {
|
|
2522
2511
|
const modRate = this.getChorusModRate(value);
|
|
2523
2512
|
this.chorus.modRate = modRate;
|
|
2524
|
-
this.chorusEffect.lfo.frequency.setValueAtTime(modRate,
|
|
2513
|
+
this.chorusEffect.lfo.frequency.setValueAtTime(modRate, scheduleTime);
|
|
2525
2514
|
}
|
|
2526
2515
|
getChorusModRate(value) {
|
|
2527
2516
|
return value * 0.122; // Hz
|
|
2528
2517
|
}
|
|
2529
|
-
setChorusModDepth(value) {
|
|
2530
|
-
const now = this.audioContext.currentTime;
|
|
2518
|
+
setChorusModDepth(value, scheduleTime) {
|
|
2531
2519
|
const modDepth = this.getChorusModDepth(value);
|
|
2532
2520
|
this.chorus.modDepth = modDepth;
|
|
2533
2521
|
this.chorusEffect.lfoGain.gain
|
|
2534
|
-
.cancelScheduledValues(
|
|
2535
|
-
.setValueAtTime(modDepth / 2,
|
|
2522
|
+
.cancelScheduledValues(scheduleTime)
|
|
2523
|
+
.setValueAtTime(modDepth / 2, scheduleTime);
|
|
2536
2524
|
}
|
|
2537
2525
|
getChorusModDepth(value) {
|
|
2538
2526
|
return (value + 1) / 3200; // second
|
|
2539
2527
|
}
|
|
2540
|
-
setChorusFeedback(value) {
|
|
2541
|
-
const now = this.audioContext.currentTime;
|
|
2528
|
+
setChorusFeedback(value, scheduleTime) {
|
|
2542
2529
|
const feedback = this.getChorusFeedback(value);
|
|
2543
2530
|
this.chorus.feedback = feedback;
|
|
2544
2531
|
const chorusEffect = this.chorusEffect;
|
|
2545
2532
|
for (let i = 0; i < chorusEffect.feedbackGains.length; i++) {
|
|
2546
2533
|
chorusEffect.feedbackGains[i].gain
|
|
2547
|
-
.cancelScheduledValues(
|
|
2548
|
-
.setValueAtTime(feedback,
|
|
2534
|
+
.cancelScheduledValues(scheduleTime)
|
|
2535
|
+
.setValueAtTime(feedback, scheduleTime);
|
|
2549
2536
|
}
|
|
2550
2537
|
}
|
|
2551
2538
|
getChorusFeedback(value) {
|
|
2552
2539
|
return value * 0.00763;
|
|
2553
2540
|
}
|
|
2554
|
-
setChorusSendToReverb(value) {
|
|
2541
|
+
setChorusSendToReverb(value, scheduleTime) {
|
|
2555
2542
|
const sendToReverb = this.getChorusSendToReverb(value);
|
|
2556
2543
|
const sendGain = this.chorusEffect.sendGain;
|
|
2557
2544
|
if (0 < this.chorus.sendToReverb) {
|
|
2558
2545
|
this.chorus.sendToReverb = sendToReverb;
|
|
2559
2546
|
if (0 < sendToReverb) {
|
|
2560
|
-
const now = this.audioContext.currentTime;
|
|
2561
2547
|
sendGain.gain
|
|
2562
|
-
.cancelScheduledValues(
|
|
2563
|
-
.setValueAtTime(sendToReverb,
|
|
2548
|
+
.cancelScheduledValues(scheduleTime)
|
|
2549
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2564
2550
|
}
|
|
2565
2551
|
else {
|
|
2566
2552
|
sendGain.disconnect();
|
|
@@ -2569,11 +2555,10 @@ class Midy {
|
|
|
2569
2555
|
else {
|
|
2570
2556
|
this.chorus.sendToReverb = sendToReverb;
|
|
2571
2557
|
if (0 < sendToReverb) {
|
|
2572
|
-
const now = this.audioContext.currentTime;
|
|
2573
2558
|
sendGain.connect(this.reverbEffect.input);
|
|
2574
2559
|
sendGain.gain
|
|
2575
|
-
.cancelScheduledValues(
|
|
2576
|
-
.setValueAtTime(sendToReverb,
|
|
2560
|
+
.cancelScheduledValues(scheduleTime)
|
|
2561
|
+
.setValueAtTime(sendToReverb, scheduleTime);
|
|
2577
2562
|
}
|
|
2578
2563
|
}
|
|
2579
2564
|
}
|
|
@@ -2599,7 +2584,7 @@ class Midy {
|
|
|
2599
2584
|
}
|
|
2600
2585
|
return bitmap;
|
|
2601
2586
|
}
|
|
2602
|
-
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime) {
|
|
2587
|
+
handleScaleOctaveTuning1ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2603
2588
|
if (data.length < 19) {
|
|
2604
2589
|
console.error("Data length is too short");
|
|
2605
2590
|
return;
|
|
@@ -2614,10 +2599,10 @@ class Midy {
|
|
|
2614
2599
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2615
2600
|
}
|
|
2616
2601
|
if (realtime)
|
|
2617
|
-
this.updateChannelDetune(channel);
|
|
2602
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2618
2603
|
}
|
|
2619
2604
|
}
|
|
2620
|
-
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime) {
|
|
2605
|
+
handleScaleOctaveTuning2ByteFormatSysEx(data, realtime, scheduleTime) {
|
|
2621
2606
|
if (data.length < 31) {
|
|
2622
2607
|
console.error("Data length is too short");
|
|
2623
2608
|
return;
|
|
@@ -2636,66 +2621,66 @@ class Midy {
|
|
|
2636
2621
|
channel.scaleOctaveTuningTable[j] = centValue;
|
|
2637
2622
|
}
|
|
2638
2623
|
if (realtime)
|
|
2639
|
-
this.updateChannelDetune(channel);
|
|
2640
|
-
}
|
|
2641
|
-
}
|
|
2642
|
-
|
|
2643
|
-
|
|
2644
|
-
|
|
2645
|
-
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2649
|
-
|
|
2624
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
getPitchControl(channel, note) {
|
|
2628
|
+
const polyphonicKeyPressure = (channel.polyphonicKeyPressureTable[0] - 64) *
|
|
2629
|
+
note.pressure;
|
|
2630
|
+
return polyphonicKeyPressure * note.pressure / 37.5; // 2400 / 64;
|
|
2631
|
+
}
|
|
2632
|
+
getFilterCutoffControl(channel, note) {
|
|
2633
|
+
const channelPressure = (channel.channelPressureTable[1] - 64) *
|
|
2634
|
+
channel.state.channelPressure;
|
|
2635
|
+
const polyphonicKeyPressure = (channel.polyphonicKeyPressureTable[1] - 64) *
|
|
2636
|
+
note.pressure;
|
|
2637
|
+
return (channelPressure + polyphonicKeyPressure) * 15;
|
|
2638
|
+
}
|
|
2639
|
+
getAmplitudeControl(channel, note) {
|
|
2640
|
+
const channelPressure = channel.channelPressureTable[2] *
|
|
2641
|
+
channel.state.channelPressure;
|
|
2642
|
+
const polyphonicKeyPressure = channel.polyphonicKeyPressureTable[2] *
|
|
2643
|
+
note.pressure;
|
|
2644
|
+
return (channelPressure + polyphonicKeyPressure) / 128;
|
|
2645
|
+
}
|
|
2646
|
+
getLFOPitchDepth(channel, note) {
|
|
2647
|
+
const channelPressure = channel.channelPressureTable[3] *
|
|
2648
|
+
channel.state.channelPressure;
|
|
2649
|
+
const polyphonicKeyPressure = channel.polyphonicKeyPressureTable[3] *
|
|
2650
|
+
note.pressure;
|
|
2651
|
+
return (channelPressure + polyphonicKeyPressure) / 254 * 600;
|
|
2652
|
+
}
|
|
2653
|
+
getLFOFilterDepth(channel, note) {
|
|
2654
|
+
const channelPressure = channel.channelPressureTable[4] *
|
|
2655
|
+
channel.state.channelPressure;
|
|
2656
|
+
const polyphonicKeyPressure = channel.polyphonicKeyPressureTable[4] *
|
|
2657
|
+
note.pressure;
|
|
2658
|
+
return (channelPressure + polyphonicKeyPressure) / 254 * 2400;
|
|
2659
|
+
}
|
|
2660
|
+
getLFOAmplitudeDepth(channel, note) {
|
|
2661
|
+
const channelPressure = channel.channelPressureTable[5] *
|
|
2662
|
+
channel.state.channelPressure;
|
|
2663
|
+
const polyphonicKeyPressure = channel.polyphonicKeyPressureTable[5] *
|
|
2664
|
+
note.pressure;
|
|
2665
|
+
return (channelPressure + polyphonicKeyPressure) / 254;
|
|
2666
|
+
}
|
|
2667
|
+
setControllerParameters(channel, note, table) {
|
|
2668
|
+
if (table[0] !== 64)
|
|
2669
|
+
this.updateDetune(channel, note);
|
|
2650
2670
|
if (!note.portamento) {
|
|
2651
|
-
if (table[1] !== 64)
|
|
2652
|
-
|
|
2653
|
-
|
|
2654
|
-
|
|
2655
|
-
|
|
2656
|
-
|
|
2657
|
-
|
|
2658
|
-
|
|
2659
|
-
|
|
2660
|
-
|
|
2661
|
-
|
|
2662
|
-
|
|
2663
|
-
|
|
2664
|
-
? channel.polyphonicKeyPressureTable[2] * note.pressure
|
|
2665
|
-
: 0;
|
|
2666
|
-
const pressure = (channelPressure + polyphonicKeyPressure) / 128;
|
|
2667
|
-
this.setVolumeEnvelope(channel, note, pressure);
|
|
2668
|
-
}
|
|
2669
|
-
}
|
|
2670
|
-
if (table[3] !== 0) {
|
|
2671
|
-
const channelPressure = channel.channelPressureTable[3] *
|
|
2672
|
-
channel.state.channelPressure;
|
|
2673
|
-
const polyphonicKeyPressure = (0 < note.pressure)
|
|
2674
|
-
? channel.polyphonicKeyPressureTable[3] * note.pressure
|
|
2675
|
-
: 0;
|
|
2676
|
-
const pressure = (channelPressure + polyphonicKeyPressure) / 254 * 600;
|
|
2677
|
-
this.setModLfoToPitch(channel, note, pressure);
|
|
2678
|
-
}
|
|
2679
|
-
if (table[4] !== 0) {
|
|
2680
|
-
const channelPressure = channel.channelPressureTable[4] *
|
|
2681
|
-
channel.state.channelPressure;
|
|
2682
|
-
const polyphonicKeyPressure = (0 < note.pressure)
|
|
2683
|
-
? channel.polyphonicKeyPressureTable[4] * note.pressure
|
|
2684
|
-
: 0;
|
|
2685
|
-
const pressure = (channelPressure + polyphonicKeyPressure) / 254 * 2400;
|
|
2686
|
-
this.setModLfoToFilterFc(note, pressure);
|
|
2687
|
-
}
|
|
2688
|
-
if (table[5] !== 0) {
|
|
2689
|
-
const channelPressure = channel.channelPressureTable[5] *
|
|
2690
|
-
channel.state.channelPressure;
|
|
2691
|
-
const polyphonicKeyPressure = (0 < note.pressure)
|
|
2692
|
-
? channel.polyphonicKeyPressureTable[5] * note.pressure
|
|
2693
|
-
: 0;
|
|
2694
|
-
const pressure = (channelPressure + polyphonicKeyPressure) / 254;
|
|
2695
|
-
this.setModLfoToVolume(note, pressure);
|
|
2696
|
-
}
|
|
2697
|
-
}
|
|
2698
|
-
handleChannelPressureSysEx(data, tableName) {
|
|
2671
|
+
if (table[1] !== 64)
|
|
2672
|
+
this.setFilterEnvelope(channel, note);
|
|
2673
|
+
if (table[2] !== 64)
|
|
2674
|
+
this.setVolumeEnvelope(channel, note);
|
|
2675
|
+
}
|
|
2676
|
+
if (table[3] !== 0)
|
|
2677
|
+
this.setModLfoToPitch(channel, note);
|
|
2678
|
+
if (table[4] !== 0)
|
|
2679
|
+
this.setModLfoToFilterFc(channel, note);
|
|
2680
|
+
if (table[5] !== 0)
|
|
2681
|
+
this.setModLfoToVolume(channel, note);
|
|
2682
|
+
}
|
|
2683
|
+
handlePressureSysEx(data, tableName) {
|
|
2699
2684
|
const channelNumber = data[4];
|
|
2700
2685
|
const table = this.channels[channelNumber][tableName];
|
|
2701
2686
|
for (let i = 5; i < data.length - 1; i += 2) {
|
|
@@ -2724,7 +2709,7 @@ class Midy {
|
|
|
2724
2709
|
const note = noteList[i];
|
|
2725
2710
|
if (!note)
|
|
2726
2711
|
continue;
|
|
2727
|
-
this.
|
|
2712
|
+
this.setControllerParameters(channel, note, table);
|
|
2728
2713
|
}
|
|
2729
2714
|
});
|
|
2730
2715
|
}
|
|
@@ -2743,7 +2728,7 @@ class Midy {
|
|
|
2743
2728
|
const controlValue = channel.keyBasedInstrumentControlTable[index];
|
|
2744
2729
|
return (controlValue + 64) / 64;
|
|
2745
2730
|
}
|
|
2746
|
-
handleKeyBasedInstrumentControlSysEx(data) {
|
|
2731
|
+
handleKeyBasedInstrumentControlSysEx(data, scheduleTime) {
|
|
2747
2732
|
const channelNumber = data[4];
|
|
2748
2733
|
const keyNumber = data[5];
|
|
2749
2734
|
const table = this.channels[channelNumber].keyBasedInstrumentControlTable;
|
|
@@ -2753,30 +2738,27 @@ class Midy {
|
|
|
2753
2738
|
const index = keyNumber * 128 + controllerType;
|
|
2754
2739
|
table[index] = value - 64;
|
|
2755
2740
|
}
|
|
2756
|
-
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127);
|
|
2757
|
-
}
|
|
2758
|
-
handleExclusiveMessage(data) {
|
|
2759
|
-
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2741
|
+
this.handleChannelPressure(channelNumber, channel.state.channelPressure * 127, scheduleTime);
|
|
2760
2742
|
}
|
|
2761
|
-
handleSysEx(data) {
|
|
2743
|
+
handleSysEx(data, scheduleTime) {
|
|
2762
2744
|
switch (data[0]) {
|
|
2763
2745
|
case 126:
|
|
2764
|
-
return this.handleUniversalNonRealTimeExclusiveMessage(data);
|
|
2746
|
+
return this.handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime);
|
|
2765
2747
|
case 127:
|
|
2766
|
-
return this.handleUniversalRealTimeExclusiveMessage(data);
|
|
2748
|
+
return this.handleUniversalRealTimeExclusiveMessage(data, scheduleTime);
|
|
2767
2749
|
default:
|
|
2768
|
-
|
|
2750
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
2769
2751
|
}
|
|
2770
2752
|
}
|
|
2771
|
-
scheduleTask(callback,
|
|
2753
|
+
scheduleTask(callback, scheduleTime) {
|
|
2772
2754
|
return new Promise((resolve) => {
|
|
2773
2755
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
2774
2756
|
bufferSource.onended = () => {
|
|
2775
2757
|
callback();
|
|
2776
2758
|
resolve();
|
|
2777
2759
|
};
|
|
2778
|
-
bufferSource.start(
|
|
2779
|
-
bufferSource.stop(
|
|
2760
|
+
bufferSource.start(scheduleTime);
|
|
2761
|
+
bufferSource.stop(scheduleTime);
|
|
2780
2762
|
});
|
|
2781
2763
|
}
|
|
2782
2764
|
}
|
|
@@ -2788,10 +2770,6 @@ Object.defineProperty(Midy, "channelSettings", {
|
|
|
2788
2770
|
value: {
|
|
2789
2771
|
currentBufferSource: null,
|
|
2790
2772
|
detune: 0,
|
|
2791
|
-
scaleOctaveTuningTable: new Float32Array(12), // [-100, 100] cent
|
|
2792
|
-
channelPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
2793
|
-
polyphonicKeyPressureTable: new Uint8Array([64, 64, 64, 0, 0, 0]),
|
|
2794
|
-
keyBasedInstrumentControlTable: new Int8Array(128 * 128), // [-64, 63]
|
|
2795
2773
|
program: 0,
|
|
2796
2774
|
bank: 121 * 128,
|
|
2797
2775
|
bankMSB: 121,
|