@marmooo/midy 0.0.9 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/midy-GM2.js CHANGED
@@ -64,12 +64,6 @@ export class MidyGM2 {
64
64
  writable: true,
65
65
  value: 0
66
66
  });
67
- Object.defineProperty(this, "reverbFactor", {
68
- enumerable: true,
69
- configurable: true,
70
- writable: true,
71
- value: 0.1
72
- });
73
67
  Object.defineProperty(this, "masterFineTuning", {
74
68
  enumerable: true,
75
69
  configurable: true,
@@ -82,6 +76,27 @@ export class MidyGM2 {
82
76
  writable: true,
83
77
  value: 0
84
78
  }); // cb
79
+ Object.defineProperty(this, "reverb", {
80
+ enumerable: true,
81
+ configurable: true,
82
+ writable: true,
83
+ value: {
84
+ time: this.getReverbTime(64),
85
+ feedback: 0.25,
86
+ }
87
+ });
88
+ Object.defineProperty(this, "chorus", {
89
+ enumerable: true,
90
+ configurable: true,
91
+ writable: true,
92
+ value: {
93
+ modRate: this.getChorusModRate(3),
94
+ modDepth: this.getChorusModDepth(19),
95
+ feedback: this.getChorusFeedback(8),
96
+ sendToReverb: this.getChorusSendToReverb(0),
97
+ delayTimes: this.generateDistributedArray(0.02, 2, 0.5),
98
+ }
99
+ });
85
100
  Object.defineProperty(this, "mono", {
86
101
  enumerable: true,
87
102
  configurable: true,
@@ -190,8 +205,19 @@ export class MidyGM2 {
190
205
  writable: true,
191
206
  value: {
192
207
  reverbAlgorithm: (audioContext) => {
193
- // return this.createConvolutionReverb(audioContext);
194
- return this.createSchroederReverb(audioContext);
208
+ const { time: rt60, feedback } = this.reverb;
209
+ // const delay = this.calcDelay(rt60, feedback);
210
+ // const impulse = this.createConvolutionReverbImpulse(
211
+ // audioContext,
212
+ // rt60,
213
+ // delay,
214
+ // );
215
+ // return this.createConvolutionReverb(audioContext, impulse);
216
+ const combFeedbacks = this.generateDistributedArray(feedback, 4);
217
+ const combDelays = combFeedbacks.map((feedback) => this.calcDelay(rt60, feedback));
218
+ const allpassFeedbacks = this.generateDistributedArray(feedback, 4);
219
+ const allpassDelays = allpassFeedbacks.map((feedback) => this.calcDelay(rt60, feedback));
220
+ return this.createSchroederReverb(audioContext, combFeedbacks, combDelays, allpassFeedbacks, allpassDelays);
195
221
  },
196
222
  }
197
223
  });
@@ -590,12 +616,7 @@ export class MidyGM2 {
590
616
  }
591
617
  return noteList[0];
592
618
  }
593
- createConvolutionReverb(audioContext, options = {}) {
594
- const { decay = 0.8, preDecay = 0, } = options;
595
- const input = new GainNode(audioContext);
596
- const output = new GainNode(audioContext);
597
- const dryGain = new GainNode(audioContext);
598
- const wetGain = new GainNode(audioContext);
619
+ createConvolutionReverbImpulse(audioContext, decay, preDecay) {
599
620
  const sampleRate = audioContext.sampleRate;
600
621
  const length = sampleRate * decay;
601
622
  const impulse = new AudioBuffer({
@@ -615,18 +636,17 @@ export class MidyGM2 {
615
636
  channelData[i] = (Math.random() * 2 - 1) * attenuation;
616
637
  }
617
638
  }
639
+ return impulse;
640
+ }
641
+ createConvolutionReverb(audioContext, impulse) {
642
+ const output = new GainNode(audioContext);
618
643
  const convolverNode = new ConvolverNode(audioContext, {
619
644
  buffer: impulse,
620
645
  });
621
- input.connect(convolverNode);
622
- convolverNode.connect(wetGain);
623
- wetGain.connect(output);
624
- dryGain.connect(output);
646
+ convolverNode.connect(output);
625
647
  return {
626
- input,
648
+ input: convolverNode,
627
649
  output,
628
- dryGain,
629
- wetGain,
630
650
  convolverNode,
631
651
  };
632
652
  }
@@ -654,17 +674,22 @@ export class MidyGM2 {
654
674
  delayNode.connect(passGain);
655
675
  return passGain;
656
676
  }
677
+ generateDistributedArray(center, count, varianceRatio = 0.1, randomness = 0.05) {
678
+ const variance = center * varianceRatio;
679
+ const array = new Array(count);
680
+ for (let i = 0; i < count; i++) {
681
+ const fraction = i / (count - 1 || 1);
682
+ const value = center - variance + fraction * 2 * variance;
683
+ array[i] = value * (1 - (Math.random() * 2 - 1) * randomness);
684
+ }
685
+ return array;
686
+ }
657
687
  // https://hajim.rochester.edu/ece/sites/zduan/teaching/ece472/reading/Schroeder_1962.pdf
658
688
  // M.R.Schroeder, "Natural Sounding Artificial Reverberation", J.Audio Eng. Soc., vol.10, p.219, 1962
659
- createSchroederReverb(audioContext, options = {}) {
660
- const { combDelays = [0.31, 0.34, 0.37, 0.40], combFeedbacks = [0.86, 0.87, 0.88, 0.89], allpassDelays = [0.02, 0.05], allpassFeedbacks = [0.7, 0.7], mix = 0.5, } = options;
689
+ createSchroederReverb(audioContext, combDelays, combFeedbacks, allpassDelays, allpassFeedbacks) {
661
690
  const input = new GainNode(audioContext);
662
691
  const output = new GainNode(audioContext);
663
- const mergerGain = new GainNode(audioContext, {
664
- gain: 1 / (combDelays.length * 2),
665
- });
666
- const dryGain = new GainNode(audioContext, { gain: 1 - mix });
667
- const wetGain = new GainNode(audioContext, { gain: mix });
692
+ const mergerGain = new GainNode(audioContext);
668
693
  for (let i = 0; i < combDelays.length; i++) {
669
694
  const comb = this.createCombFilter(audioContext, input, combDelays[i], combFeedbacks[i]);
670
695
  comb.connect(mergerGain);
@@ -674,68 +699,65 @@ export class MidyGM2 {
674
699
  const allpass = this.createAllpassFilter(audioContext, (i === 0) ? mergerGain : allpasses.at(-1), allpassDelays[i], allpassFeedbacks[i]);
675
700
  allpasses.push(allpass);
676
701
  }
677
- allpasses.at(-1).connect(wetGain);
678
- input.connect(dryGain);
679
- dryGain.connect(output);
680
- wetGain.connect(output);
681
- return { input, output, dryGain, wetGain };
682
- }
683
- createChorusEffect(audioContext, options = {}) {
684
- const { chorusCount = 2, chorusRate = 0.6, chorusDepth = 0.15, delay = 0.01, variance = delay * 0.1, } = options;
685
- const lfo = new OscillatorNode(audioContext, { frequency: chorusRate });
686
- const lfoGain = new GainNode(audioContext, { gain: chorusDepth });
702
+ allpasses.at(-1).connect(output);
703
+ return { input, output };
704
+ }
705
+ createChorusEffect(audioContext) {
706
+ const input = new GainNode(audioContext);
687
707
  const output = new GainNode(audioContext);
688
- const chorusGains = [];
708
+ const sendGain = new GainNode(audioContext);
709
+ const lfo = new OscillatorNode(audioContext, {
710
+ frequency: this.chorus.modRate,
711
+ });
712
+ const lfoGain = new GainNode(audioContext, {
713
+ gain: this.chorus.modDepth / 2,
714
+ });
715
+ const delayTimes = this.chorus.delayTimes;
689
716
  const delayNodes = [];
690
- const baseGain = 1 / chorusCount;
691
- for (let i = 0; i < chorusCount; i++) {
692
- const randomDelayFactor = (Math.random() - 0.5) * variance;
693
- const delayTime = (i + 1) * delay + randomDelayFactor;
717
+ const feedbackGains = [];
718
+ for (let i = 0; i < delayTimes.length; i++) {
719
+ const delayTime = delayTimes[i];
694
720
  const delayNode = new DelayNode(audioContext, {
695
- maxDelayTime: delayTime,
721
+ maxDelayTime: 0.1, // generally, 5ms < delayTime < 50ms
722
+ delayTime,
723
+ });
724
+ const feedbackGain = new GainNode(audioContext, {
725
+ gain: this.chorus.feedback,
696
726
  });
697
- const chorusGain = new GainNode(audioContext, { gain: baseGain });
698
727
  delayNodes.push(delayNode);
699
- chorusGains.push(chorusGain);
728
+ feedbackGains.push(feedbackGain);
729
+ input.connect(delayNode);
700
730
  lfoGain.connect(delayNode.delayTime);
701
- delayNode.connect(chorusGain);
702
- chorusGain.connect(output);
731
+ delayNode.connect(feedbackGain);
732
+ feedbackGain.connect(delayNode);
733
+ delayNode.connect(output);
703
734
  }
735
+ output.connect(sendGain);
704
736
  lfo.connect(lfoGain);
705
737
  lfo.start();
706
738
  return {
739
+ input,
740
+ output,
741
+ sendGain,
707
742
  lfo,
708
743
  lfoGain,
709
744
  delayNodes,
710
- chorusGains,
711
- output,
745
+ feedbackGains,
712
746
  };
713
747
  }
714
748
  connectEffects(channel, gainNode) {
715
749
  gainNode.connect(channel.merger);
716
- if (channel.reverb === 0) {
717
- if (channel.chorus === 0) { // no effect
718
- channel.merger.connect(this.masterGain);
719
- }
720
- else { // chorus
721
- channel.chorusEffect.delayNodes.forEach((delayNode) => {
722
- channel.merger.connect(delayNode);
723
- });
724
- channel.chorusEffect.output.connect(this.masterGain);
725
- }
750
+ channel.merger.connect(this.masterGain);
751
+ if (0 < channel.reverbSendLevel) {
752
+ channel.merger.connect(channel.reverbEffect.input);
753
+ channel.reverbEffect.output.connect(this.masterGain);
726
754
  }
727
- else {
728
- if (channel.chorus === 0) { // reverb
729
- channel.merger.connect(channel.reverbEffect.input);
730
- channel.reverbEffect.output.connect(this.masterGain);
731
- }
732
- else { // reverb + chorus
733
- channel.chorusEffect.delayNodes.forEach((delayNode) => {
734
- channel.merger.connect(delayNode);
735
- });
736
- channel.merger.connect(channel.reverbEffect.input);
737
- channel.reverbEffect.output.connect(this.masterGain);
738
- }
755
+ if (0 < channel.chorusSendLevel) {
756
+ channel.merger.connect(channel.chorusEffect.input);
757
+ channel.reverbEffect.output.connect(this.masterGain);
758
+ }
759
+ if (0 < this.chorus.sendToReverb) {
760
+ channel.chorusEffect.sendGain.connect(channel.reverbEffect.input);
739
761
  }
740
762
  }
741
763
  cbToRatio(cb) {
@@ -1136,23 +1158,25 @@ export class MidyGM2 {
1136
1158
  this.releaseSustainPedal(channelNumber, value);
1137
1159
  }
1138
1160
  }
1161
+ // TODO
1139
1162
  setPortamento(channelNumber, value) {
1140
1163
  this.channels[channelNumber].portamento = value >= 64;
1141
1164
  }
1142
- setReverbSendLevel(channelNumber, reverb) {
1165
+ setReverbSendLevel(channelNumber, reverbSendLevel) {
1143
1166
  const now = this.audioContext.currentTime;
1144
1167
  const channel = this.channels[channelNumber];
1145
1168
  const reverbEffect = channel.reverbEffect;
1146
- channel.reverb = reverb / 127 * this.reverbFactor;
1147
- reverbEffect.dryGain.gain.cancelScheduledValues(now);
1148
- reverbEffect.dryGain.gain.setValueAtTime(1 - channel.reverb, now);
1149
- reverbEffect.wetGain.gain.cancelScheduledValues(now);
1150
- reverbEffect.wetGain.gain.setValueAtTime(channel.reverb, now);
1169
+ channel.reverbSendLevel = reverbSendLevel / 127;
1170
+ reverbEffect.output.gain.cancelScheduledValues(now);
1171
+ reverbEffect.output.gain.setValueAtTime(channel.reverbSendLevel, now);
1151
1172
  }
1152
- setChorusSendLevel(channelNumber, chorus) {
1173
+ setChorusSendLevel(channelNumber, chorusSendLevel) {
1174
+ const now = this.audioContext.currentTime;
1153
1175
  const channel = this.channels[channelNumber];
1154
- channel.chorus = chorus / 127;
1155
- channel.chorusEffect.lfoGain = channel.chorus;
1176
+ const chorusEffect = channel.chorusEffect;
1177
+ channel.chorusSendLevel = chorusSendLevel / 127;
1178
+ chorusEffect.output.gain.cancelScheduledValues(now);
1179
+ chorusEffect.output.gain.setValueAtTime(channel.chorusSendLevel, now);
1156
1180
  }
1157
1181
  setSostenutoPedal(channelNumber, value) {
1158
1182
  const isOn = value >= 64;
@@ -1348,11 +1372,11 @@ export class MidyGM2 {
1348
1372
  this.GM2SystemOn();
1349
1373
  break;
1350
1374
  default:
1351
- console.warn(`Unsupported Exclusive Message ${data}`);
1375
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1352
1376
  }
1353
1377
  break;
1354
1378
  default:
1355
- console.warn(`Unsupported Exclusive Message ${data}`);
1379
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1356
1380
  }
1357
1381
  }
1358
1382
  GM1SystemOn() {
@@ -1383,9 +1407,10 @@ export class MidyGM2 {
1383
1407
  return this.handleMasterFineTuningSysEx(data);
1384
1408
  case 4: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca25.pdf
1385
1409
  return this.handleMasterCoarseTuningSysEx(data);
1386
- // case 5: // TODO: Global Parameter Control
1410
+ case 5:
1411
+ return this.handleGlobalParameterControlSysEx(data);
1387
1412
  default:
1388
- console.warn(`Unsupported Exclusive Message ${data}`);
1413
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1389
1414
  }
1390
1415
  break;
1391
1416
  case 8:
@@ -1394,7 +1419,7 @@ export class MidyGM2 {
1394
1419
  // // TODO
1395
1420
  // return this.handleScaleOctaveTuning1ByteFormat();
1396
1421
  default:
1397
- console.warn(`Unsupported Exclusive Message ${data}`);
1422
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1398
1423
  }
1399
1424
  break;
1400
1425
  case 9:
@@ -1406,7 +1431,7 @@ export class MidyGM2 {
1406
1431
  // // TODO
1407
1432
  // return this.setControlChange();
1408
1433
  default:
1409
- console.warn(`Unsupported Exclusive Message ${data}`);
1434
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1410
1435
  }
1411
1436
  break;
1412
1437
  case 10:
@@ -1415,11 +1440,11 @@ export class MidyGM2 {
1415
1440
  // // TODO
1416
1441
  // return this.handleKeyBasedInstrumentControl();
1417
1442
  default:
1418
- console.warn(`Unsupported Exclusive Message ${data}`);
1443
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1419
1444
  }
1420
1445
  break;
1421
1446
  default:
1422
- console.warn(`Unsupported Exclusive Message ${data}`);
1447
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1423
1448
  }
1424
1449
  }
1425
1450
  handleMasterVolumeSysEx(data) {
@@ -1460,8 +1485,192 @@ export class MidyGM2 {
1460
1485
  this.masterCoarseTuning = coarseTuning - 64;
1461
1486
  }
1462
1487
  }
1488
+ handleGlobalParameterControlSysEx(data) {
1489
+ if (data[7] === 1) {
1490
+ switch (data[8]) {
1491
+ case 1:
1492
+ return this.handleReverbParameterSysEx(data);
1493
+ case 2:
1494
+ return this.handleChorusParameterSysEx(data);
1495
+ default:
1496
+ console.warn(`Unsupported Global Parameter Control Message: ${data}`);
1497
+ }
1498
+ }
1499
+ else {
1500
+ console.warn(`Unsupported Global Parameter Control Message: ${data}`);
1501
+ }
1502
+ }
1503
+ handleReverbParameterSysEx(data) {
1504
+ switch (data[9]) {
1505
+ case 0:
1506
+ return this.setReverbType(data[10]);
1507
+ case 1:
1508
+ return this.setReverbTime(data[10]);
1509
+ }
1510
+ }
1511
+ setReverbType(type) {
1512
+ this.reverb.time = this.getReverbTimeFromType(type);
1513
+ this.reverb.feedback = (type === 8) ? 0.1 : 0.2;
1514
+ const { audioContext, channels, options } = this;
1515
+ for (let i = 0; i < channels.length; i++) {
1516
+ channels[i].reverbEffect = options.reverbAlgorithm(audioContext);
1517
+ }
1518
+ }
1519
+ getReverbTimeFromType(type) {
1520
+ switch (type) {
1521
+ case 0:
1522
+ return this.getReverbTime(44);
1523
+ case 1:
1524
+ return this.getReverbTime(50);
1525
+ case 2:
1526
+ return this.getReverbTime(56);
1527
+ case 3:
1528
+ return this.getReverbTime(64);
1529
+ case 4:
1530
+ return this.getReverbTime(64);
1531
+ case 8:
1532
+ return this.getReverbTime(50);
1533
+ default:
1534
+ console.warn(`Unsupported Reverb Time: ${type}`);
1535
+ }
1536
+ }
1537
+ setReverbTime(value) {
1538
+ this.reverb.time = this.getReverbTime(value);
1539
+ const { audioContext, channels, options } = this;
1540
+ for (let i = 0; i < channels.length; i++) {
1541
+ channels[i].reverbEffect = options.reverbAlgorithm(audioContext);
1542
+ }
1543
+ }
1544
+ getReverbTime(value) {
1545
+ return Math.pow(Math.E, (value - 40) * 0.025);
1546
+ }
1547
+ // mean free path equation
1548
+ // https://repository.dl.itc.u-tokyo.ac.jp/record/8550/files/A31912.pdf
1549
+ // 江田和司, 拡散性制御に基づく室内音響設計に向けた音場解析に関する研究, 2015
1550
+ // V: room size (m^3)
1551
+ // S: room surface area (m^2)
1552
+ // meanFreePath = 4V / S (m)
1553
+ // delay estimation using mean free path
1554
+ // t: degree Celsius, generally used 20
1555
+ // c: speed of sound = 331.5 + 0.61t = 331.5 * 0.61 * 20 = 343.7 (m/s)
1556
+ // delay = meanFreePath / c (s)
1557
+ // feedback equation
1558
+ // RT60 means that the energy is reduced to Math.pow(10, -6).
1559
+ // Since energy is proportional to the square of the amplitude,
1560
+ // the amplitude is reduced to Math.pow(10, -3).
1561
+ // When this is done through n feedbacks,
1562
+ // Math.pow(feedback, n) = Math.pow(10, -3)
1563
+ // Math.pow(feedback, RT60 / delay) = Math.pow(10, -3)
1564
+ // RT60 / delay * Math.log10(feedback) = -3
1565
+ // RT60 = -3 * delay / Math.log10(feedback)
1566
+ // feedback = Math.pow(10, -3 * delay / RT60)
1567
+ // delay estimation using ideal feedback
1568
+ // The structure of a concert hall is complex,
1569
+ // so estimates based on mean free path are unstable.
1570
+ // It is easier to determine the delay based on ideal feedback.
1571
+ // The average sound absorption coefficient
1572
+ // suitable for playing musical instruments is 0.18 to 0.28.
1573
+ // delay = -RT60 * Math.log10(feedback) / 3
1574
+ calcDelay(rt60, feedback) {
1575
+ return -rt60 * Math.log10(feedback) / 3;
1576
+ }
1577
+ handleChorusParameterSysEx(data) {
1578
+ switch (data[9]) {
1579
+ case 0:
1580
+ return this.setChorusType(data[10]);
1581
+ case 1:
1582
+ return this.setChorusModRate(data[10]);
1583
+ case 2:
1584
+ return this.setChorusModDepth(data[10]);
1585
+ case 3:
1586
+ return this.setChorusFeedback(data[10]);
1587
+ case 4:
1588
+ return this.setChorusSendToReverb(data[10]);
1589
+ }
1590
+ }
1591
+ setChorusType(type) {
1592
+ switch (type) {
1593
+ case 0:
1594
+ return this.setChorusParameter(3, 5, 0, 0);
1595
+ case 1:
1596
+ return this.setChorusParameter(9, 19, 5, 0);
1597
+ case 2:
1598
+ return this.setChorusParameter(3, 19, 8, 0);
1599
+ case 3:
1600
+ return this.setChorusParameter(9, 16, 16, 0);
1601
+ case 4:
1602
+ return this.setChorusParameter(2, 24, 64, 0);
1603
+ case 5:
1604
+ return this.setChorusParameter(1, 5, 112, 0);
1605
+ default:
1606
+ console.warn(`Unsupported Chorus Type: ${type}`);
1607
+ }
1608
+ }
1609
+ setChorusParameter(modRate, modDepth, feedback, sendToReverb) {
1610
+ this.setChorusModRate(modRate);
1611
+ this.setChorusModDepth(modDepth);
1612
+ this.setChorusFeedback(feedback);
1613
+ this.setChorusSendToReverb(sendToReverb);
1614
+ }
1615
+ setChorusModRate(value) {
1616
+ const now = this.audioContext.currentTime;
1617
+ const modRate = this.getChorusModRate(value);
1618
+ this.chorus.modRate = modRate;
1619
+ for (let i = 0; i < this.channels.length; i++) {
1620
+ const lfo = this.channels[i].chorusEffect.lfo;
1621
+ lfo.frequency.setValueAtTime(modRate, now);
1622
+ }
1623
+ }
1624
+ getChorusModRate(value) {
1625
+ return value * 0.122; // Hz
1626
+ }
1627
+ setChorusModDepth(value) {
1628
+ const now = this.audioContext.currentTime;
1629
+ const modDepth = this.getChorusModDepth(value);
1630
+ this.chorus.modDepth = modDepth;
1631
+ for (let i = 0; i < this.channels.length; i++) {
1632
+ const chorusEffect = this.channels[i].chorusEffect;
1633
+ chorusEffect.lfoGain.gain
1634
+ .cancelScheduledValues(now)
1635
+ .setValueAtTime(modDepth / 2, now);
1636
+ }
1637
+ }
1638
+ getChorusModDepth(value) {
1639
+ return (value + 1) / 3200; // second
1640
+ }
1641
+ setChorusFeedback(value) {
1642
+ const now = this.audioContext.currentTime;
1643
+ const feedback = this.getChorusFeedback(value);
1644
+ this.chorus.feedback = feedback;
1645
+ for (let i = 0; i < this.channels.length; i++) {
1646
+ const chorusEffect = this.channels[i].chorusEffect;
1647
+ for (let j = 0; j < chorusEffect.feedbackGains.length; j++) {
1648
+ const feedbackGain = chorusEffect.feedbackGains[j];
1649
+ feedbackGain.gain
1650
+ .cancelScheduledValues(now)
1651
+ .setValueAtTime(feedback, now);
1652
+ }
1653
+ }
1654
+ }
1655
+ getChorusFeedback(value) {
1656
+ return value * 0.00763;
1657
+ }
1658
+ setChorusSendToReverb(value) {
1659
+ const now = this.audioContext.currentTime;
1660
+ const sendToReverb = this.getChorusSendToReverb(value);
1661
+ this.chorus.sendToReverb = sendToReverb;
1662
+ for (let i = 0; i < this.channels.length; i++) {
1663
+ const chorusEffect = this.channels[i].chorusEffect;
1664
+ chorusEffect.sendGain.gain
1665
+ .cancelScheduledValues(now)
1666
+ .setValueAtTime(sendToReverb, now);
1667
+ }
1668
+ }
1669
+ getChorusSendToReverb(value) {
1670
+ return value * 0.00787;
1671
+ }
1463
1672
  handleExclusiveMessage(data) {
1464
- console.warn(`Unsupported Exclusive Message ${data}`);
1673
+ console.warn(`Unsupported Exclusive Message: ${data}`);
1465
1674
  }
1466
1675
  handleSysEx(data) {
1467
1676
  switch (data[0]) {
@@ -1494,8 +1703,8 @@ Object.defineProperty(MidyGM2, "channelSettings", {
1494
1703
  volume: 100 / 127,
1495
1704
  pan: 64,
1496
1705
  portamentoTime: 0,
1497
- reverb: 0,
1498
- chorus: 0,
1706
+ reverbSendLevel: 0,
1707
+ chorusSendLevel: 0,
1499
1708
  bank: 121 * 128,
1500
1709
  bankMSB: 121,
1501
1710
  bankLSB: 0,
@@ -893,11 +893,11 @@ export class MidyGMLite {
893
893
  case 2: // GM System Off
894
894
  break;
895
895
  default:
896
- console.warn(`Unsupported Exclusive Message ${data}`);
896
+ console.warn(`Unsupported Exclusive Message: ${data}`);
897
897
  }
898
898
  break;
899
899
  default:
900
- console.warn(`Unsupported Exclusive Message ${data}`);
900
+ console.warn(`Unsupported Exclusive Message: ${data}`);
901
901
  }
902
902
  }
903
903
  GM1SystemOn() {
@@ -916,11 +916,11 @@ export class MidyGMLite {
916
916
  case 1:
917
917
  return this.handleMasterVolumeSysEx(data);
918
918
  default:
919
- console.warn(`Unsupported Exclusive Message ${data}`);
919
+ console.warn(`Unsupported Exclusive Message: ${data}`);
920
920
  }
921
921
  break;
922
922
  default:
923
- console.warn(`Unsupported Exclusive Message ${data}`);
923
+ console.warn(`Unsupported Exclusive Message: ${data}`);
924
924
  }
925
925
  }
926
926
  handleMasterVolumeSysEx(data) {
@@ -938,7 +938,7 @@ export class MidyGMLite {
938
938
  }
939
939
  }
940
940
  handleExclusiveMessage(data) {
941
- console.warn(`Unsupported Exclusive Message ${data}`);
941
+ console.warn(`Unsupported Exclusive Message: ${data}`);
942
942
  }
943
943
  handleSysEx(data) {
944
944
  switch (data[0]) {