@marmooo/midy 0.0.9 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/midy-GM1.d.ts +0 -2
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +5 -9
- package/esm/midy-GM2.d.ts +46 -24
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +301 -92
- package/esm/midy-GMLite.js +5 -5
- package/esm/midy.d.ts +46 -24
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +300 -91
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +0 -2
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +5 -9
- package/script/midy-GM2.d.ts +46 -24
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +301 -92
- package/script/midy-GMLite.js +5 -5
- package/script/midy.d.ts +46 -24
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +300 -91
package/script/midy.js
CHANGED
|
@@ -67,12 +67,6 @@ class Midy {
|
|
|
67
67
|
writable: true,
|
|
68
68
|
value: 0
|
|
69
69
|
});
|
|
70
|
-
Object.defineProperty(this, "reverbFactor", {
|
|
71
|
-
enumerable: true,
|
|
72
|
-
configurable: true,
|
|
73
|
-
writable: true,
|
|
74
|
-
value: 0.1
|
|
75
|
-
});
|
|
76
70
|
Object.defineProperty(this, "masterFineTuning", {
|
|
77
71
|
enumerable: true,
|
|
78
72
|
configurable: true,
|
|
@@ -85,6 +79,27 @@ class Midy {
|
|
|
85
79
|
writable: true,
|
|
86
80
|
value: 0
|
|
87
81
|
}); // cb
|
|
82
|
+
Object.defineProperty(this, "reverb", {
|
|
83
|
+
enumerable: true,
|
|
84
|
+
configurable: true,
|
|
85
|
+
writable: true,
|
|
86
|
+
value: {
|
|
87
|
+
time: this.getReverbTime(64),
|
|
88
|
+
feedback: 0.25,
|
|
89
|
+
}
|
|
90
|
+
});
|
|
91
|
+
Object.defineProperty(this, "chorus", {
|
|
92
|
+
enumerable: true,
|
|
93
|
+
configurable: true,
|
|
94
|
+
writable: true,
|
|
95
|
+
value: {
|
|
96
|
+
modRate: this.getChorusModRate(3),
|
|
97
|
+
modDepth: this.getChorusModDepth(19),
|
|
98
|
+
feedback: this.getChorusFeedback(8),
|
|
99
|
+
sendToReverb: this.getChorusSendToReverb(0),
|
|
100
|
+
delayTimes: this.generateDistributedArray(0.02, 2, 0.5),
|
|
101
|
+
}
|
|
102
|
+
});
|
|
88
103
|
Object.defineProperty(this, "mono", {
|
|
89
104
|
enumerable: true,
|
|
90
105
|
configurable: true,
|
|
@@ -193,8 +208,19 @@ class Midy {
|
|
|
193
208
|
writable: true,
|
|
194
209
|
value: {
|
|
195
210
|
reverbAlgorithm: (audioContext) => {
|
|
196
|
-
|
|
197
|
-
|
|
211
|
+
const { time: rt60, feedback } = this.reverb;
|
|
212
|
+
// const delay = this.calcDelay(rt60, feedback);
|
|
213
|
+
// const impulse = this.createConvolutionReverbImpulse(
|
|
214
|
+
// audioContext,
|
|
215
|
+
// rt60,
|
|
216
|
+
// delay,
|
|
217
|
+
// );
|
|
218
|
+
// return this.createConvolutionReverb(audioContext, impulse);
|
|
219
|
+
const combFeedbacks = this.generateDistributedArray(feedback, 4);
|
|
220
|
+
const combDelays = combFeedbacks.map((feedback) => this.calcDelay(rt60, feedback));
|
|
221
|
+
const allpassFeedbacks = this.generateDistributedArray(feedback, 4);
|
|
222
|
+
const allpassDelays = allpassFeedbacks.map((feedback) => this.calcDelay(rt60, feedback));
|
|
223
|
+
return this.createSchroederReverb(audioContext, combFeedbacks, combDelays, allpassFeedbacks, allpassDelays);
|
|
198
224
|
},
|
|
199
225
|
}
|
|
200
226
|
});
|
|
@@ -599,12 +625,7 @@ class Midy {
|
|
|
599
625
|
}
|
|
600
626
|
return noteList[0];
|
|
601
627
|
}
|
|
602
|
-
|
|
603
|
-
const { decay = 0.8, preDecay = 0, } = options;
|
|
604
|
-
const input = new GainNode(audioContext);
|
|
605
|
-
const output = new GainNode(audioContext);
|
|
606
|
-
const dryGain = new GainNode(audioContext);
|
|
607
|
-
const wetGain = new GainNode(audioContext);
|
|
628
|
+
createConvolutionReverbImpulse(audioContext, decay, preDecay) {
|
|
608
629
|
const sampleRate = audioContext.sampleRate;
|
|
609
630
|
const length = sampleRate * decay;
|
|
610
631
|
const impulse = new AudioBuffer({
|
|
@@ -624,18 +645,17 @@ class Midy {
|
|
|
624
645
|
channelData[i] = (Math.random() * 2 - 1) * attenuation;
|
|
625
646
|
}
|
|
626
647
|
}
|
|
648
|
+
return impulse;
|
|
649
|
+
}
|
|
650
|
+
createConvolutionReverb(audioContext, impulse) {
|
|
651
|
+
const output = new GainNode(audioContext);
|
|
627
652
|
const convolverNode = new ConvolverNode(audioContext, {
|
|
628
653
|
buffer: impulse,
|
|
629
654
|
});
|
|
630
|
-
|
|
631
|
-
convolverNode.connect(wetGain);
|
|
632
|
-
wetGain.connect(output);
|
|
633
|
-
dryGain.connect(output);
|
|
655
|
+
convolverNode.connect(output);
|
|
634
656
|
return {
|
|
635
|
-
input,
|
|
657
|
+
input: convolverNode,
|
|
636
658
|
output,
|
|
637
|
-
dryGain,
|
|
638
|
-
wetGain,
|
|
639
659
|
convolverNode,
|
|
640
660
|
};
|
|
641
661
|
}
|
|
@@ -663,17 +683,22 @@ class Midy {
|
|
|
663
683
|
delayNode.connect(passGain);
|
|
664
684
|
return passGain;
|
|
665
685
|
}
|
|
686
|
+
generateDistributedArray(center, count, varianceRatio = 0.1, randomness = 0.05) {
|
|
687
|
+
const variance = center * varianceRatio;
|
|
688
|
+
const array = new Array(count);
|
|
689
|
+
for (let i = 0; i < count; i++) {
|
|
690
|
+
const fraction = i / (count - 1 || 1);
|
|
691
|
+
const value = center - variance + fraction * 2 * variance;
|
|
692
|
+
array[i] = value * (1 - (Math.random() * 2 - 1) * randomness);
|
|
693
|
+
}
|
|
694
|
+
return array;
|
|
695
|
+
}
|
|
666
696
|
// https://hajim.rochester.edu/ece/sites/zduan/teaching/ece472/reading/Schroeder_1962.pdf
|
|
667
697
|
// M.R.Schroeder, "Natural Sounding Artificial Reverberation", J.Audio Eng. Soc., vol.10, p.219, 1962
|
|
668
|
-
createSchroederReverb(audioContext,
|
|
669
|
-
const { combDelays = [0.31, 0.34, 0.37, 0.40], combFeedbacks = [0.86, 0.87, 0.88, 0.89], allpassDelays = [0.02, 0.05], allpassFeedbacks = [0.7, 0.7], mix = 0.5, } = options;
|
|
698
|
+
createSchroederReverb(audioContext, combDelays, combFeedbacks, allpassDelays, allpassFeedbacks) {
|
|
670
699
|
const input = new GainNode(audioContext);
|
|
671
700
|
const output = new GainNode(audioContext);
|
|
672
|
-
const mergerGain = new GainNode(audioContext
|
|
673
|
-
gain: 1 / (combDelays.length * 2),
|
|
674
|
-
});
|
|
675
|
-
const dryGain = new GainNode(audioContext, { gain: 1 - mix });
|
|
676
|
-
const wetGain = new GainNode(audioContext, { gain: mix });
|
|
701
|
+
const mergerGain = new GainNode(audioContext);
|
|
677
702
|
for (let i = 0; i < combDelays.length; i++) {
|
|
678
703
|
const comb = this.createCombFilter(audioContext, input, combDelays[i], combFeedbacks[i]);
|
|
679
704
|
comb.connect(mergerGain);
|
|
@@ -683,68 +708,65 @@ class Midy {
|
|
|
683
708
|
const allpass = this.createAllpassFilter(audioContext, (i === 0) ? mergerGain : allpasses.at(-1), allpassDelays[i], allpassFeedbacks[i]);
|
|
684
709
|
allpasses.push(allpass);
|
|
685
710
|
}
|
|
686
|
-
allpasses.at(-1).connect(
|
|
687
|
-
input
|
|
688
|
-
dryGain.connect(output);
|
|
689
|
-
wetGain.connect(output);
|
|
690
|
-
return { input, output, dryGain, wetGain };
|
|
711
|
+
allpasses.at(-1).connect(output);
|
|
712
|
+
return { input, output };
|
|
691
713
|
}
|
|
692
|
-
createChorusEffect(audioContext
|
|
693
|
-
const
|
|
694
|
-
const lfo = new OscillatorNode(audioContext, { frequency: chorusRate });
|
|
695
|
-
const lfoGain = new GainNode(audioContext, { gain: chorusDepth });
|
|
714
|
+
createChorusEffect(audioContext) {
|
|
715
|
+
const input = new GainNode(audioContext);
|
|
696
716
|
const output = new GainNode(audioContext);
|
|
697
|
-
const
|
|
717
|
+
const sendGain = new GainNode(audioContext);
|
|
718
|
+
const lfo = new OscillatorNode(audioContext, {
|
|
719
|
+
frequency: this.chorus.modRate,
|
|
720
|
+
});
|
|
721
|
+
const lfoGain = new GainNode(audioContext, {
|
|
722
|
+
gain: this.chorus.modDepth / 2,
|
|
723
|
+
});
|
|
724
|
+
const delayTimes = this.chorus.delayTimes;
|
|
698
725
|
const delayNodes = [];
|
|
699
|
-
const
|
|
700
|
-
for (let i = 0; i <
|
|
701
|
-
const
|
|
702
|
-
const delayTime = (i + 1) * delay + randomDelayFactor;
|
|
726
|
+
const feedbackGains = [];
|
|
727
|
+
for (let i = 0; i < delayTimes.length; i++) {
|
|
728
|
+
const delayTime = delayTimes[i];
|
|
703
729
|
const delayNode = new DelayNode(audioContext, {
|
|
704
|
-
maxDelayTime: delayTime
|
|
730
|
+
maxDelayTime: 0.1, // generally, 5ms < delayTime < 50ms
|
|
731
|
+
delayTime,
|
|
732
|
+
});
|
|
733
|
+
const feedbackGain = new GainNode(audioContext, {
|
|
734
|
+
gain: this.chorus.feedback,
|
|
705
735
|
});
|
|
706
|
-
const chorusGain = new GainNode(audioContext, { gain: baseGain });
|
|
707
736
|
delayNodes.push(delayNode);
|
|
708
|
-
|
|
737
|
+
feedbackGains.push(feedbackGain);
|
|
738
|
+
input.connect(delayNode);
|
|
709
739
|
lfoGain.connect(delayNode.delayTime);
|
|
710
|
-
delayNode.connect(
|
|
711
|
-
|
|
740
|
+
delayNode.connect(feedbackGain);
|
|
741
|
+
feedbackGain.connect(delayNode);
|
|
742
|
+
delayNode.connect(output);
|
|
712
743
|
}
|
|
744
|
+
output.connect(sendGain);
|
|
713
745
|
lfo.connect(lfoGain);
|
|
714
746
|
lfo.start();
|
|
715
747
|
return {
|
|
748
|
+
input,
|
|
749
|
+
output,
|
|
750
|
+
sendGain,
|
|
716
751
|
lfo,
|
|
717
752
|
lfoGain,
|
|
718
753
|
delayNodes,
|
|
719
|
-
|
|
720
|
-
output,
|
|
754
|
+
feedbackGains,
|
|
721
755
|
};
|
|
722
756
|
}
|
|
723
757
|
connectEffects(channel, gainNode) {
|
|
724
758
|
gainNode.connect(channel.merger);
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
else { // chorus
|
|
730
|
-
channel.chorusEffect.delayNodes.forEach((delayNode) => {
|
|
731
|
-
channel.merger.connect(delayNode);
|
|
732
|
-
});
|
|
733
|
-
channel.chorusEffect.output.connect(this.masterGain);
|
|
734
|
-
}
|
|
759
|
+
channel.merger.connect(this.masterGain);
|
|
760
|
+
if (0 < channel.reverbSendLevel) {
|
|
761
|
+
channel.merger.connect(channel.reverbEffect.input);
|
|
762
|
+
channel.reverbEffect.output.connect(this.masterGain);
|
|
735
763
|
}
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
channel.chorusEffect.delayNodes.forEach((delayNode) => {
|
|
743
|
-
channel.merger.connect(delayNode);
|
|
744
|
-
});
|
|
745
|
-
channel.merger.connect(channel.reverbEffect.input);
|
|
746
|
-
channel.reverbEffect.output.connect(this.masterGain);
|
|
747
|
-
}
|
|
764
|
+
if (0 < channel.chorusSendLevel) {
|
|
765
|
+
channel.merger.connect(channel.chorusEffect.input);
|
|
766
|
+
channel.reverbEffect.output.connect(this.masterGain);
|
|
767
|
+
}
|
|
768
|
+
if (0 < this.chorus.sendToReverb) {
|
|
769
|
+
channel.chorusEffect.sendGain.connect(channel.reverbEffect.input);
|
|
748
770
|
}
|
|
749
771
|
}
|
|
750
772
|
cbToRatio(cb) {
|
|
@@ -1190,23 +1212,25 @@ class Midy {
|
|
|
1190
1212
|
this.releaseSustainPedal(channelNumber, value);
|
|
1191
1213
|
}
|
|
1192
1214
|
}
|
|
1215
|
+
// TODO
|
|
1193
1216
|
setPortamento(channelNumber, value) {
|
|
1194
1217
|
this.channels[channelNumber].portamento = value >= 64;
|
|
1195
1218
|
}
|
|
1196
|
-
setReverbSendLevel(channelNumber,
|
|
1219
|
+
setReverbSendLevel(channelNumber, reverbSendLevel) {
|
|
1197
1220
|
const now = this.audioContext.currentTime;
|
|
1198
1221
|
const channel = this.channels[channelNumber];
|
|
1199
1222
|
const reverbEffect = channel.reverbEffect;
|
|
1200
|
-
channel.
|
|
1201
|
-
reverbEffect.
|
|
1202
|
-
reverbEffect.
|
|
1203
|
-
reverbEffect.wetGain.gain.cancelScheduledValues(now);
|
|
1204
|
-
reverbEffect.wetGain.gain.setValueAtTime(channel.reverb, now);
|
|
1223
|
+
channel.reverbSendLevel = reverbSendLevel / 127;
|
|
1224
|
+
reverbEffect.output.gain.cancelScheduledValues(now);
|
|
1225
|
+
reverbEffect.output.gain.setValueAtTime(channel.reverbSendLevel, now);
|
|
1205
1226
|
}
|
|
1206
|
-
setChorusSendLevel(channelNumber,
|
|
1227
|
+
setChorusSendLevel(channelNumber, chorusSendLevel) {
|
|
1228
|
+
const now = this.audioContext.currentTime;
|
|
1207
1229
|
const channel = this.channels[channelNumber];
|
|
1208
|
-
|
|
1209
|
-
channel.
|
|
1230
|
+
const chorusEffect = channel.chorusEffect;
|
|
1231
|
+
channel.chorusSendLevel = chorusSendLevel / 127;
|
|
1232
|
+
chorusEffect.output.gain.cancelScheduledValues(now);
|
|
1233
|
+
chorusEffect.output.gain.setValueAtTime(channel.chorusSendLevel, now);
|
|
1210
1234
|
}
|
|
1211
1235
|
setSostenutoPedal(channelNumber, value) {
|
|
1212
1236
|
const isOn = value >= 64;
|
|
@@ -1430,11 +1454,11 @@ class Midy {
|
|
|
1430
1454
|
this.GM2SystemOn();
|
|
1431
1455
|
break;
|
|
1432
1456
|
default:
|
|
1433
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1457
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1434
1458
|
}
|
|
1435
1459
|
break;
|
|
1436
1460
|
default:
|
|
1437
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1461
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1438
1462
|
}
|
|
1439
1463
|
}
|
|
1440
1464
|
GM1SystemOn() {
|
|
@@ -1465,9 +1489,10 @@ class Midy {
|
|
|
1465
1489
|
return this.handleMasterFineTuningSysEx(data);
|
|
1466
1490
|
case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
|
|
1467
1491
|
return this.handleMasterCoarseTuningSysEx(data);
|
|
1468
|
-
|
|
1492
|
+
case 5:
|
|
1493
|
+
return this.handleGlobalParameterControlSysEx(data);
|
|
1469
1494
|
default:
|
|
1470
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1495
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1471
1496
|
}
|
|
1472
1497
|
break;
|
|
1473
1498
|
case 8:
|
|
@@ -1476,7 +1501,7 @@ class Midy {
|
|
|
1476
1501
|
// // TODO
|
|
1477
1502
|
// return this.handleScaleOctaveTuning1ByteFormat();
|
|
1478
1503
|
default:
|
|
1479
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1504
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1480
1505
|
}
|
|
1481
1506
|
break;
|
|
1482
1507
|
case 9:
|
|
@@ -1488,7 +1513,7 @@ class Midy {
|
|
|
1488
1513
|
// // TODO
|
|
1489
1514
|
// return this.setControlChange();
|
|
1490
1515
|
default:
|
|
1491
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1516
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1492
1517
|
}
|
|
1493
1518
|
break;
|
|
1494
1519
|
case 10:
|
|
@@ -1497,11 +1522,11 @@ class Midy {
|
|
|
1497
1522
|
// // TODO
|
|
1498
1523
|
// return this.handleKeyBasedInstrumentControl();
|
|
1499
1524
|
default:
|
|
1500
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1525
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1501
1526
|
}
|
|
1502
1527
|
break;
|
|
1503
1528
|
default:
|
|
1504
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1529
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1505
1530
|
}
|
|
1506
1531
|
}
|
|
1507
1532
|
handleMasterVolumeSysEx(data) {
|
|
@@ -1542,8 +1567,192 @@ class Midy {
|
|
|
1542
1567
|
this.masterCoarseTuning = coarseTuning - 64;
|
|
1543
1568
|
}
|
|
1544
1569
|
}
|
|
1570
|
+
handleGlobalParameterControlSysEx(data) {
|
|
1571
|
+
if (data[7] === 1) {
|
|
1572
|
+
switch (data[8]) {
|
|
1573
|
+
case 1:
|
|
1574
|
+
return this.handleReverbParameterSysEx(data);
|
|
1575
|
+
case 2:
|
|
1576
|
+
return this.handleChorusParameterSysEx(data);
|
|
1577
|
+
default:
|
|
1578
|
+
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
else {
|
|
1582
|
+
console.warn(`Unsupported Global Parameter Control Message: ${data}`);
|
|
1583
|
+
}
|
|
1584
|
+
}
|
|
1585
|
+
handleReverbParameterSysEx(data) {
|
|
1586
|
+
switch (data[9]) {
|
|
1587
|
+
case 0:
|
|
1588
|
+
return this.setReverbType(data[10]);
|
|
1589
|
+
case 1:
|
|
1590
|
+
return this.setReverbTime(data[10]);
|
|
1591
|
+
}
|
|
1592
|
+
}
|
|
1593
|
+
setReverbType(type) {
|
|
1594
|
+
this.reverb.time = this.getReverbTimeFromType(type);
|
|
1595
|
+
this.reverb.feedback = (type === 8) ? 0.1 : 0.2;
|
|
1596
|
+
const { audioContext, channels, options } = this;
|
|
1597
|
+
for (let i = 0; i < channels.length; i++) {
|
|
1598
|
+
channels[i].reverbEffect = options.reverbAlgorithm(audioContext);
|
|
1599
|
+
}
|
|
1600
|
+
}
|
|
1601
|
+
getReverbTimeFromType(type) {
|
|
1602
|
+
switch (type) {
|
|
1603
|
+
case 0:
|
|
1604
|
+
return this.getReverbTime(44);
|
|
1605
|
+
case 1:
|
|
1606
|
+
return this.getReverbTime(50);
|
|
1607
|
+
case 2:
|
|
1608
|
+
return this.getReverbTime(56);
|
|
1609
|
+
case 3:
|
|
1610
|
+
return this.getReverbTime(64);
|
|
1611
|
+
case 4:
|
|
1612
|
+
return this.getReverbTime(64);
|
|
1613
|
+
case 8:
|
|
1614
|
+
return this.getReverbTime(50);
|
|
1615
|
+
default:
|
|
1616
|
+
console.warn(`Unsupported Reverb Time: ${type}`);
|
|
1617
|
+
}
|
|
1618
|
+
}
|
|
1619
|
+
setReverbTime(value) {
|
|
1620
|
+
this.reverb.time = this.getReverbTime(value);
|
|
1621
|
+
const { audioContext, channels, options } = this;
|
|
1622
|
+
for (let i = 0; i < channels.length; i++) {
|
|
1623
|
+
channels[i].reverbEffect = options.reverbAlgorithm(audioContext);
|
|
1624
|
+
}
|
|
1625
|
+
}
|
|
1626
|
+
getReverbTime(value) {
|
|
1627
|
+
return Math.pow(Math.E, (value - 40) * 0.025);
|
|
1628
|
+
}
|
|
1629
|
+
// mean free path equation
|
|
1630
|
+
// https://repository.dl.itc.u-tokyo.ac.jp/record/8550/files/A31912.pdf
|
|
1631
|
+
// 江田和司, 拡散性制御に基づく室内音響設計に向けた音場解析に関する研究, 2015
|
|
1632
|
+
// V: room size (m^3)
|
|
1633
|
+
// S: room surface area (m^2)
|
|
1634
|
+
// meanFreePath = 4V / S (m)
|
|
1635
|
+
// delay estimation using mean free path
|
|
1636
|
+
// t: degree Celsius, generally used 20
|
|
1637
|
+
// c: speed of sound = 331.5 + 0.61t = 331.5 * 0.61 * 20 = 343.7 (m/s)
|
|
1638
|
+
// delay = meanFreePath / c (s)
|
|
1639
|
+
// feedback equation
|
|
1640
|
+
// RT60 means that the energy is reduced to Math.pow(10, -6).
|
|
1641
|
+
// Since energy is proportional to the square of the amplitude,
|
|
1642
|
+
// the amplitude is reduced to Math.pow(10, -3).
|
|
1643
|
+
// When this is done through n feedbacks,
|
|
1644
|
+
// Math.pow(feedback, n) = Math.pow(10, -3)
|
|
1645
|
+
// Math.pow(feedback, RT60 / delay) = Math.pow(10, -3)
|
|
1646
|
+
// RT60 / delay * Math.log10(feedback) = -3
|
|
1647
|
+
// RT60 = -3 * delay / Math.log10(feedback)
|
|
1648
|
+
// feedback = Math.pow(10, -3 * delay / RT60)
|
|
1649
|
+
// delay estimation using ideal feedback
|
|
1650
|
+
// The structure of a concert hall is complex,
|
|
1651
|
+
// so estimates based on mean free path are unstable.
|
|
1652
|
+
// It is easier to determine the delay based on ideal feedback.
|
|
1653
|
+
// The average sound absorption coefficient
|
|
1654
|
+
// suitable for playing musical instruments is 0.18 to 0.28.
|
|
1655
|
+
// delay = -RT60 * Math.log10(feedback) / 3
|
|
1656
|
+
calcDelay(rt60, feedback) {
|
|
1657
|
+
return -rt60 * Math.log10(feedback) / 3;
|
|
1658
|
+
}
|
|
1659
|
+
handleChorusParameterSysEx(data) {
|
|
1660
|
+
switch (data[9]) {
|
|
1661
|
+
case 0:
|
|
1662
|
+
return this.setChorusType(data[10]);
|
|
1663
|
+
case 1:
|
|
1664
|
+
return this.setChorusModRate(data[10]);
|
|
1665
|
+
case 2:
|
|
1666
|
+
return this.setChorusModDepth(data[10]);
|
|
1667
|
+
case 3:
|
|
1668
|
+
return this.setChorusFeedback(data[10]);
|
|
1669
|
+
case 4:
|
|
1670
|
+
return this.setChorusSendToReverb(data[10]);
|
|
1671
|
+
}
|
|
1672
|
+
}
|
|
1673
|
+
setChorusType(type) {
|
|
1674
|
+
switch (type) {
|
|
1675
|
+
case 0:
|
|
1676
|
+
return this.setChorusParameter(3, 5, 0, 0);
|
|
1677
|
+
case 1:
|
|
1678
|
+
return this.setChorusParameter(9, 19, 5, 0);
|
|
1679
|
+
case 2:
|
|
1680
|
+
return this.setChorusParameter(3, 19, 8, 0);
|
|
1681
|
+
case 3:
|
|
1682
|
+
return this.setChorusParameter(9, 16, 16, 0);
|
|
1683
|
+
case 4:
|
|
1684
|
+
return this.setChorusParameter(2, 24, 64, 0);
|
|
1685
|
+
case 5:
|
|
1686
|
+
return this.setChorusParameter(1, 5, 112, 0);
|
|
1687
|
+
default:
|
|
1688
|
+
console.warn(`Unsupported Chorus Type: ${type}`);
|
|
1689
|
+
}
|
|
1690
|
+
}
|
|
1691
|
+
setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
|
|
1692
|
+
this.setChorusModRate(modRate);
|
|
1693
|
+
this.setChorusModDepth(modDepth);
|
|
1694
|
+
this.setChorusFeedback(feedback);
|
|
1695
|
+
this.setChorusSendToReverb(sendToReverb);
|
|
1696
|
+
}
|
|
1697
|
+
setChorusModRate(value) {
|
|
1698
|
+
const now = this.audioContext.currentTime;
|
|
1699
|
+
const modRate = this.getChorusModRate(value);
|
|
1700
|
+
this.chorus.modRate = modRate;
|
|
1701
|
+
for (let i = 0; i < this.channels.length; i++) {
|
|
1702
|
+
const lfo = this.channels[i].chorusEffect.lfo;
|
|
1703
|
+
lfo.frequency.setValueAtTime(modRate, now);
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
getChorusModRate(value) {
|
|
1707
|
+
return value * 0.122; // Hz
|
|
1708
|
+
}
|
|
1709
|
+
setChorusModDepth(value) {
|
|
1710
|
+
const now = this.audioContext.currentTime;
|
|
1711
|
+
const modDepth = this.getChorusModDepth(value);
|
|
1712
|
+
this.chorus.modDepth = modDepth;
|
|
1713
|
+
for (let i = 0; i < this.channels.length; i++) {
|
|
1714
|
+
const chorusEffect = this.channels[i].chorusEffect;
|
|
1715
|
+
chorusEffect.lfoGain.gain
|
|
1716
|
+
.cancelScheduledValues(now)
|
|
1717
|
+
.setValueAtTime(modDepth / 2, now);
|
|
1718
|
+
}
|
|
1719
|
+
}
|
|
1720
|
+
getChorusModDepth(value) {
|
|
1721
|
+
return (value + 1) / 3200; // second
|
|
1722
|
+
}
|
|
1723
|
+
setChorusFeedback(value) {
|
|
1724
|
+
const now = this.audioContext.currentTime;
|
|
1725
|
+
const feedback = this.getChorusFeedback(value);
|
|
1726
|
+
this.chorus.feedback = feedback;
|
|
1727
|
+
for (let i = 0; i < this.channels.length; i++) {
|
|
1728
|
+
const chorusEffect = this.channels[i].chorusEffect;
|
|
1729
|
+
for (let j = 0; j < chorusEffect.feedbackGains.length; j++) {
|
|
1730
|
+
const feedbackGain = chorusEffect.feedbackGains[j];
|
|
1731
|
+
feedbackGain.gain
|
|
1732
|
+
.cancelScheduledValues(now)
|
|
1733
|
+
.setValueAtTime(feedback, now);
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
getChorusFeedback(value) {
|
|
1738
|
+
return value * 0.00763;
|
|
1739
|
+
}
|
|
1740
|
+
setChorusSendToReverb(value) {
|
|
1741
|
+
const now = this.audioContext.currentTime;
|
|
1742
|
+
const sendToReverb = this.getChorusSendToReverb(value);
|
|
1743
|
+
this.chorus.sendToReverb = sendToReverb;
|
|
1744
|
+
for (let i = 0; i < this.channels.length; i++) {
|
|
1745
|
+
const chorusEffect = this.channels[i].chorusEffect;
|
|
1746
|
+
chorusEffect.sendGain.gain
|
|
1747
|
+
.cancelScheduledValues(now)
|
|
1748
|
+
.setValueAtTime(sendToReverb, now);
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
getChorusSendToReverb(value) {
|
|
1752
|
+
return value * 0.00787;
|
|
1753
|
+
}
|
|
1545
1754
|
handleExclusiveMessage(data) {
|
|
1546
|
-
console.warn(`Unsupported Exclusive Message ${data}`);
|
|
1755
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1547
1756
|
}
|
|
1548
1757
|
handleSysEx(data) {
|
|
1549
1758
|
switch (data[0]) {
|
|
@@ -1577,8 +1786,8 @@ Object.defineProperty(Midy, "channelSettings", {
|
|
|
1577
1786
|
volume: 100 / 127,
|
|
1578
1787
|
pan: 64,
|
|
1579
1788
|
portamentoTime: 0,
|
|
1580
|
-
|
|
1581
|
-
|
|
1789
|
+
reverbSendLevel: 0,
|
|
1790
|
+
chorusSendLevel: 0,
|
|
1582
1791
|
vibratoRate: 5,
|
|
1583
1792
|
vibratoDepth: 0.5,
|
|
1584
1793
|
vibratoDelay: 2.5,
|