pxt-core 8.2.1 → 8.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -689,10 +689,10 @@ var pxt;
689
689
  docs.prepTemplate = prepTemplate;
690
690
  function setupRenderer(renderer) {
691
691
  renderer.image = function (href, title, text) {
692
- const endpointName = "makecode-lucas-testing-makecodetempmediaservice-usea";
692
+ const endpointName = "makecodeprodmediaeastus-usea";
693
693
  if (href.startsWith("youtube:")) {
694
- let out = '<div class="tutorial-video-embed"><iframe src="https://www.youtube.com/embed/' + href.split(":").pop()
695
- + '" title="' + title + '" frameborder="0" ' + 'allowFullScreen ' + 'allow="autoplay; picture-in-picture"></iframe></div>';
694
+ let out = '<div class="tutorial-video-embed"><iframe class="yt-embed" src="https://www.youtube.com/embed/' + href.split(":").pop()
695
+ + '" title="' + text + '" frameborder="0" ' + 'allowFullScreen ' + 'allow="autoplay; picture-in-picture"></iframe></div>';
696
696
  return out;
697
697
  }
698
698
  else if (href.startsWith("azuremedia:")) {
package/built/gdb.js CHANGED
@@ -38,16 +38,16 @@ function getBMPSerialPortsAsync() {
38
38
  });
39
39
  }
40
40
  else if (process.platform == "darwin") {
41
- return cpExecAsync("ioreg -p IOUSB -l -w 0")
41
+ return cpExecAsync("system_profiler SPUSBDataType")
42
42
  .then(({ stdout, stderr }) => {
43
43
  let res = [];
44
44
  let inBMP = false;
45
45
  stdout.split(/\n/).forEach(ln => {
46
- if (ln.indexOf("+-o Black Magic Probe") >= 0)
46
+ if (ln.indexOf(" Black Magic Probe") >= 0)
47
47
  inBMP = true;
48
48
  if (!inBMP)
49
49
  return;
50
- let m = /"USB Serial Number" = "(\w+)"/.exec(ln);
50
+ let m = / Serial Number: (\w+)/.exec(ln);
51
51
  if (m) {
52
52
  inBMP = false;
53
53
  res.push("/dev/cu.usbmodem" + m[1] + "1");
package/built/pxt.js CHANGED
@@ -106399,10 +106399,10 @@ var pxt;
106399
106399
  docs.prepTemplate = prepTemplate;
106400
106400
  function setupRenderer(renderer) {
106401
106401
  renderer.image = function (href, title, text) {
106402
- const endpointName = "makecode-lucas-testing-makecodetempmediaservice-usea";
106402
+ const endpointName = "makecodeprodmediaeastus-usea";
106403
106403
  if (href.startsWith("youtube:")) {
106404
- let out = '<div class="tutorial-video-embed"><iframe src="https://www.youtube.com/embed/' + href.split(":").pop()
106405
- + '" title="' + title + '" frameborder="0" ' + 'allowFullScreen ' + 'allow="autoplay; picture-in-picture"></iframe></div>';
106404
+ let out = '<div class="tutorial-video-embed"><iframe class="yt-embed" src="https://www.youtube.com/embed/' + href.split(":").pop()
106405
+ + '" title="' + text + '" frameborder="0" ' + 'allowFullScreen ' + 'allow="autoplay; picture-in-picture"></iframe></div>';
106406
106406
  return out;
106407
106407
  }
106408
106408
  else if (href.startsWith("azuremedia:")) {
@@ -108186,14 +108186,17 @@ var pxt;
108186
108186
  return false;
108187
108187
  }
108188
108188
  function isRepoBanned(repo, config) {
108189
+ var _a, _b;
108189
108190
  if (isOrgBanned(repo, config))
108190
108191
  return true;
108191
108192
  if (!config)
108192
108193
  return false; // don't know
108193
- if (!repo || !repo.fullName)
108194
+ if (!repo)
108194
108195
  return true;
108196
+ const repoFull = (_a = repo.fullName) === null || _a === void 0 ? void 0 : _a.toLowerCase();
108197
+ const repoSlug = (_b = repo.slug) === null || _b === void 0 ? void 0 : _b.toLowerCase();
108195
108198
  if (config.bannedRepos
108196
- && config.bannedRepos.some(fn => fn.toLowerCase() == repo.fullName.toLowerCase()))
108199
+ && config.bannedRepos.some(fn => fn && (fn.toLowerCase() == repoFull || fn.toLowerCase() == repoSlug)))
108197
108200
  return true;
108198
108201
  return false;
108199
108202
  }
@@ -108207,13 +108210,15 @@ var pxt;
108207
108210
  return false;
108208
108211
  }
108209
108212
  function isRepoApproved(repo, config) {
108210
- var _a;
108213
+ var _a, _b;
108211
108214
  if (isOrgApproved(repo, config))
108212
108215
  return true;
108213
- if (!repo || !config)
108216
+ const repoFull = (_a = repo === null || repo === void 0 ? void 0 : repo.fullName) === null || _a === void 0 ? void 0 : _a.toLowerCase();
108217
+ const repoSlug = (_b = repo === null || repo === void 0 ? void 0 : repo.slug) === null || _b === void 0 ? void 0 : _b.toLowerCase();
108218
+ if (!(config === null || config === void 0 ? void 0 : config.approvedRepoLib) || !(repoFull || repoSlug))
108214
108219
  return false;
108215
- if (repo.fullName
108216
- && ((_a = config.approvedRepoLib) === null || _a === void 0 ? void 0 : _a[repo.fullName.toLowerCase()]))
108220
+ if (config.approvedRepoLib[repoFull]
108221
+ || config.approvedRepoLib[repoSlug])
108217
108222
  return true;
108218
108223
  return false;
108219
108224
  }
@@ -114591,6 +114596,97 @@ var pxt;
114591
114596
  return outParts.join(" ");
114592
114597
  }
114593
114598
  }
114599
+ function soundToInstructionBuffer(sound, fxSteps, fxRange) {
114600
+ const { startFrequency, endFrequency, startVolume, endVolume, interpolation, duration } = sound;
114601
+ const steps = [];
114602
+ // Optimize the simple case
114603
+ if (sound.interpolation === "linear" && sound.effect === "none") {
114604
+ steps.push({
114605
+ frequency: startFrequency,
114606
+ volume: (startVolume / assets.MAX_VOLUME) * 1024,
114607
+ });
114608
+ steps.push({
114609
+ frequency: endFrequency,
114610
+ volume: (endVolume / assets.MAX_VOLUME) * 1024,
114611
+ });
114612
+ }
114613
+ else {
114614
+ fxSteps = Math.min(fxSteps, Math.floor(duration / 5));
114615
+ const getVolumeAt = (t) => ((startVolume + t * (endVolume - startVolume) / duration) / assets.MAX_VOLUME) * 1024;
114616
+ let getFrequencyAt;
114617
+ switch (interpolation) {
114618
+ case "linear":
114619
+ getFrequencyAt = t => startFrequency + t * (endFrequency - startFrequency) / duration;
114620
+ break;
114621
+ case "curve":
114622
+ getFrequencyAt = t => startFrequency + (endFrequency - startFrequency) * Math.sin(t / duration * (Math.PI / 2));
114623
+ break;
114624
+ case "logarithmic":
114625
+ getFrequencyAt = t => startFrequency + Math.log10(1 + 9 * (t / duration)) * (endFrequency - startFrequency);
114626
+ break;
114627
+ }
114628
+ const timeSlice = duration / fxSteps;
114629
+ for (let i = 0; i < fxSteps; i++) {
114630
+ const newStep = {
114631
+ frequency: Math.max(getFrequencyAt(i * timeSlice), 1),
114632
+ volume: getVolumeAt(i * timeSlice)
114633
+ };
114634
+ if (sound.effect === "tremolo") {
114635
+ if (i % 2 === 0) {
114636
+ newStep.volume = Math.max(newStep.volume - fxRange * 500, 0);
114637
+ }
114638
+ else {
114639
+ newStep.volume = Math.min(newStep.volume + fxRange * 500, 1023);
114640
+ }
114641
+ }
114642
+ else if (sound.effect === "vibrato") {
114643
+ if (i % 2 === 0) {
114644
+ newStep.frequency = Math.max(newStep.frequency - fxRange * 100, 1);
114645
+ }
114646
+ else {
114647
+ newStep.frequency = newStep.frequency + fxRange * 100;
114648
+ }
114649
+ }
114650
+ else if (sound.effect === "warble") {
114651
+ if (i % 2 === 0) {
114652
+ newStep.frequency = Math.max(newStep.frequency - fxRange * 1000, 1);
114653
+ }
114654
+ else {
114655
+ newStep.frequency = newStep.frequency + fxRange * 1000;
114656
+ }
114657
+ }
114658
+ steps.push(newStep);
114659
+ }
114660
+ }
114661
+ const out = new Uint8Array(12 * (steps.length - 1));
114662
+ const stepDuration = Math.floor(duration / (steps.length - 1));
114663
+ for (let i = 0; i < steps.length - 1; i++) {
114664
+ const offset = i * 12;
114665
+ out[offset] = waveToValue(sound.wave);
114666
+ set16BitNumber(out, offset + 2, steps[i].frequency);
114667
+ set16BitNumber(out, offset + 4, stepDuration);
114668
+ set16BitNumber(out, offset + 6, steps[i].volume);
114669
+ set16BitNumber(out, offset + 8, steps[i + 1].volume);
114670
+ set16BitNumber(out, offset + 10, steps[i + 1].frequency);
114671
+ }
114672
+ return out;
114673
+ }
114674
+ assets.soundToInstructionBuffer = soundToInstructionBuffer;
114675
+ function waveToValue(wave) {
114676
+ switch (wave) {
114677
+ case "square": return 15;
114678
+ case "sine": return 3;
114679
+ case "triangle": return 1;
114680
+ case "noise": return 18;
114681
+ case "sawtooth": return 2;
114682
+ }
114683
+ }
114684
+ function set16BitNumber(buf, offset, value) {
114685
+ const temp = new Uint8Array(2);
114686
+ new Uint16Array(temp.buffer)[0] = value | 0;
114687
+ buf[offset] = temp[0];
114688
+ buf[offset + 1] = temp[1];
114689
+ }
114594
114690
  })(assets = pxt.assets || (pxt.assets = {}));
114595
114691
  })(pxt || (pxt = {}));
114596
114692
  // See https://github.com/microsoft/TouchDevelop-backend/blob/master/docs/streams.md
@@ -153272,7 +153368,7 @@ var pxsim;
153272
153368
  }
153273
153369
  }
153274
153370
  createFrame(url) {
153275
- var _a;
153371
+ var _a, _b;
153276
153372
  const wrapper = document.createElement("div");
153277
153373
  wrapper.className = `simframe ui embed`;
153278
153374
  const frame = document.createElement('iframe');
@@ -153282,12 +153378,18 @@ var pxsim;
153282
153378
  frame.setAttribute('allow', 'autoplay;microphone');
153283
153379
  frame.setAttribute('sandbox', 'allow-same-origin allow-scripts');
153284
153380
  frame.className = 'no-select';
153285
- const furl = (url || this.getSimUrl()) + '#' + frame.id;
153381
+ let furl = url || this.getSimUrl().toString();
153382
+ if ((_a = this._runOptions) === null || _a === void 0 ? void 0 : _a.hideSimButtons) {
153383
+ const urlObject = new URL(furl);
153384
+ urlObject.searchParams.append("hideSimButtons", "1");
153385
+ furl = urlObject.toString();
153386
+ }
153387
+ furl += '#' + frame.id;
153286
153388
  frame.src = furl;
153287
153389
  frame.frameBorder = "0";
153288
153390
  frame.dataset['runid'] = this.runId;
153289
153391
  frame.dataset['origin'] = new URL(furl).origin || "*";
153290
- if ((_a = this._runOptions) === null || _a === void 0 ? void 0 : _a.autofocus)
153392
+ if ((_b = this._runOptions) === null || _b === void 0 ? void 0 : _b.autofocus)
153291
153393
  frame.setAttribute("autofocus", "true");
153292
153394
  wrapper.appendChild(frame);
153293
153395
  const i = document.createElement("i");
@@ -153524,8 +153626,7 @@ var pxsim;
153524
153626
  msg.frameCounter = ++this.frameCounter;
153525
153627
  msg.options = {
153526
153628
  theme: this.themes[this.nextFrameId++ % this.themes.length],
153527
- mpRole: (_b = (_a = /[\&\?]mp=(server|client)/i.exec(window.location.href)) === null || _a === void 0 ? void 0 : _a[1]) === null || _b === void 0 ? void 0 : _b.toLowerCase(),
153528
- hideSimButtons: /hidesimbuttons(?:[:=])1/i.test(window.location.href)
153629
+ mpRole: (_b = (_a = /[\&\?]mp=(server|client)/i.exec(window.location.href)) === null || _a === void 0 ? void 0 : _a[1]) === null || _b === void 0 ? void 0 : _b.toLowerCase()
153529
153630
  };
153530
153631
  msg.id = `${msg.options.theme}-${this.nextId()}`;
153531
153632
  frame.dataset['runid'] = this.runId;
@@ -154222,92 +154323,10 @@ var pxsim;
154222
154323
  .then(() => {
154223
154324
  if (prevStop != instrStopId)
154224
154325
  return Promise.resolve();
154225
- return playInstructionsAsync(b);
154326
+ return playInstructionsAsync(b.data);
154226
154327
  });
154227
154328
  }
154228
154329
  AudioContextManager.queuePlayInstructions = queuePlayInstructions;
154229
- function playInstructionsAsync(b) {
154230
- const prevStop = instrStopId;
154231
- let ctx = context();
154232
- let idx = 0;
154233
- let ch = new Channel();
154234
- let currWave = -1;
154235
- let currFreq = -1;
154236
- let timeOff = 0;
154237
- if (channels.length > 5)
154238
- channels[0].remove();
154239
- channels.push(ch);
154240
- /** Square waves are perceved as much louder than other sounds, so scale it down a bit to make it less jarring **/
154241
- const scaleVol = (n, isSqWave) => (n / 1024) / 4 * (isSqWave ? .5 : 1);
154242
- const finish = () => {
154243
- ch.disconnectNodes();
154244
- timeOff = 0;
154245
- currWave = -1;
154246
- currFreq = -1;
154247
- };
154248
- const loopAsync = () => {
154249
- if (idx >= b.data.length || !b.data[idx])
154250
- return pxsim.U.delay(timeOff).then(finish);
154251
- const soundWaveIdx = b.data[idx];
154252
- const freq = pxsim.BufferMethods.getNumber(b, pxsim.BufferMethods.NumberFormat.UInt16LE, idx + 2);
154253
- const duration = pxsim.BufferMethods.getNumber(b, pxsim.BufferMethods.NumberFormat.UInt16LE, idx + 4);
154254
- const startVol = pxsim.BufferMethods.getNumber(b, pxsim.BufferMethods.NumberFormat.UInt16LE, idx + 6);
154255
- const endVol = pxsim.BufferMethods.getNumber(b, pxsim.BufferMethods.NumberFormat.UInt16LE, idx + 8);
154256
- const endFreq = pxsim.BufferMethods.getNumber(b, pxsim.BufferMethods.NumberFormat.UInt16LE, idx + 10);
154257
- const isSquareWave = 11 <= soundWaveIdx && soundWaveIdx <= 15;
154258
- const isFilteredNoise = soundWaveIdx == 4 || (16 <= soundWaveIdx && soundWaveIdx <= 18);
154259
- const scaledStart = scaleVol(startVol, isSquareWave);
154260
- const scaledEnd = scaleVol(endVol, isSquareWave);
154261
- if (!ctx || prevStop != instrStopId)
154262
- return pxsim.U.delay(duration);
154263
- if (currWave != soundWaveIdx || currFreq != freq || freq != endFreq) {
154264
- if (ch.generator) {
154265
- return pxsim.U.delay(timeOff)
154266
- .then(() => {
154267
- finish();
154268
- return loopAsync();
154269
- });
154270
- }
154271
- ch.generator = getGenerator(soundWaveIdx, freq);
154272
- if (!ch.generator)
154273
- return pxsim.U.delay(duration);
154274
- currWave = soundWaveIdx;
154275
- currFreq = freq;
154276
- ch.gain = ctx.createGain();
154277
- ch.gain.gain.value = 0;
154278
- ch.gain.gain.setTargetAtTime(scaledStart, _context.currentTime, 0.015);
154279
- if (endFreq != freq) {
154280
- if (ch.generator.frequency != undefined) {
154281
- // If generator is an OscillatorNode
154282
- const param = ch.generator.frequency;
154283
- param.linearRampToValueAtTime(endFreq, ctx.currentTime + ((timeOff + duration) / 1000));
154284
- }
154285
- else if (ch.generator.playbackRate != undefined) {
154286
- // If generator is an AudioBufferSourceNode
154287
- const param = ch.generator.playbackRate;
154288
- const bufferSamplesPerWave = isFilteredNoise ? 4 : 1024;
154289
- param.linearRampToValueAtTime(endFreq / (context().sampleRate / bufferSamplesPerWave), ctx.currentTime + ((timeOff + duration) / 1000));
154290
- }
154291
- }
154292
- ch.generator.connect(ch.gain);
154293
- ch.gain.connect(destination);
154294
- ch.generator.start();
154295
- }
154296
- idx += 12;
154297
- ch.gain.gain.setValueAtTime(scaledStart, ctx.currentTime + (timeOff / 1000));
154298
- timeOff += duration;
154299
- // To prevent clipping, we ramp to this value slightly earlier than intended. This is so that we
154300
- // can go for a smooth ramp to 0 in ch.mute() without this operation interrupting it. If we had
154301
- // more accurate timing this would not be necessary, but we'd probably have to do something like
154302
- // running a metronome in a webworker to get the level of precision we need
154303
- const endTime = scaledEnd !== 0 && duration > 50 ? ((timeOff - 50) / 1000) : ((timeOff - 10) / 1000);
154304
- ch.gain.gain.linearRampToValueAtTime(scaledEnd, ctx.currentTime + endTime);
154305
- return loopAsync();
154306
- };
154307
- return loopAsync()
154308
- .then(() => ch.remove());
154309
- }
154310
- AudioContextManager.playInstructionsAsync = playInstructionsAsync;
154311
154330
  function tone(frequency, gain) {
154312
154331
  if (frequency < 0)
154313
154332
  return;
@@ -154446,6 +154465,126 @@ var pxsim;
154446
154465
  function frequencyFromMidiNoteNumber(note) {
154447
154466
  return 440 * Math.pow(2, (note - 69) / 12);
154448
154467
  }
154468
+ function playInstructionsAsync(instructions, isCancelled, onPull) {
154469
+ return new Promise(async (resolve) => {
154470
+ let resolved = false;
154471
+ let ctx = context();
154472
+ let channel = new Channel();
154473
+ if (channels.length > 5)
154474
+ channels[0].remove();
154475
+ channels.push(channel);
154476
+ channel.gain = ctx.createGain();
154477
+ channel.gain.gain.value = 1;
154478
+ channel.gain.connect(destination);
154479
+ const oscillators = {};
154480
+ const gains = {};
154481
+ let startTime = ctx.currentTime;
154482
+ let currentTime = startTime;
154483
+ let currentWave = 0;
154484
+ let totalDuration = 0;
154485
+ /** Square waves are perceved as much louder than other sounds, so scale it down a bit to make it less jarring **/
154486
+ const scaleVol = (n, isSqWave) => (n / 1024) / 4 * (isSqWave ? .5 : 1);
154487
+ const disconnectNodes = () => {
154488
+ if (resolved)
154489
+ return;
154490
+ resolved = true;
154491
+ channel.disconnectNodes();
154492
+ for (const wave of Object.keys(oscillators)) {
154493
+ oscillators[wave].stop();
154494
+ oscillators[wave].disconnect();
154495
+ gains[wave].disconnect();
154496
+ }
154497
+ resolve();
154498
+ };
154499
+ for (let i = 0; i < instructions.length; i += 12) {
154500
+ const wave = instructions[i];
154501
+ const startFrequency = readUint16(instructions, i + 2);
154502
+ const duration = readUint16(instructions, i + 4) / 1000;
154503
+ const startVolume = readUint16(instructions, i + 6);
154504
+ const endVolume = readUint16(instructions, i + 8);
154505
+ const endFrequency = readUint16(instructions, i + 10);
154506
+ totalDuration += duration;
154507
+ const isSquareWave = 11 <= wave && wave <= 15;
154508
+ if (!oscillators[wave]) {
154509
+ oscillators[wave] = getGenerator(wave, startFrequency);
154510
+ gains[wave] = ctx.createGain();
154511
+ gains[wave].gain.value = 0;
154512
+ gains[wave].connect(channel.gain);
154513
+ oscillators[wave].connect(gains[wave]);
154514
+ oscillators[wave].start();
154515
+ }
154516
+ if (currentWave && wave !== currentWave) {
154517
+ gains[currentWave].gain.setTargetAtTime(0, currentTime, 0.015);
154518
+ }
154519
+ const osc = oscillators[wave];
154520
+ const gain = gains[wave];
154521
+ if (osc instanceof OscillatorNode) {
154522
+ osc.frequency.setValueAtTime(startFrequency, currentTime);
154523
+ osc.frequency.linearRampToValueAtTime(endFrequency, currentTime + duration);
154524
+ }
154525
+ else {
154526
+ const isFilteredNoise = wave == 4 || (16 <= wave && wave <= 18);
154527
+ if (isFilteredNoise)
154528
+ osc.playbackRate.linearRampToValueAtTime(endFrequency / (ctx.sampleRate / 4), currentTime + duration);
154529
+ else if (wave != 5)
154530
+ osc.playbackRate.linearRampToValueAtTime(endFrequency / (ctx.sampleRate / 1024), currentTime + duration);
154531
+ }
154532
+ gain.gain.setValueAtTime(scaleVol(startVolume, isSquareWave), currentTime);
154533
+ gain.gain.linearRampToValueAtTime(scaleVol(endVolume, isSquareWave), currentTime + duration);
154534
+ currentWave = wave;
154535
+ currentTime += duration;
154536
+ }
154537
+ channel.gain.gain.setTargetAtTime(0, currentTime, 0.015);
154538
+ if (isCancelled || onPull) {
154539
+ const handleAnimationFrame = () => {
154540
+ const time = ctx.currentTime;
154541
+ if (time > startTime + totalDuration) {
154542
+ return;
154543
+ }
154544
+ if (isCancelled && isCancelled()) {
154545
+ disconnectNodes();
154546
+ return;
154547
+ }
154548
+ const { frequency, volume } = findFrequencyAndVolumeAtTime((time - startTime) * 1000, instructions);
154549
+ onPull(frequency, volume / 1024);
154550
+ requestAnimationFrame(handleAnimationFrame);
154551
+ };
154552
+ requestAnimationFrame(handleAnimationFrame);
154553
+ }
154554
+ await pxsim.U.delay(totalDuration * 1000);
154555
+ disconnectNodes();
154556
+ });
154557
+ }
154558
+ AudioContextManager.playInstructionsAsync = playInstructionsAsync;
154559
+ function readUint16(buf, offset) {
154560
+ const temp = new Uint8Array(2);
154561
+ temp[0] = buf[offset];
154562
+ temp[1] = buf[offset + 1];
154563
+ return new Uint16Array(temp.buffer)[0];
154564
+ }
154565
+ function findFrequencyAndVolumeAtTime(millis, instructions) {
154566
+ let currentTime = 0;
154567
+ for (let i = 0; i < instructions.length; i += 12) {
154568
+ const startFrequency = readUint16(instructions, i + 2);
154569
+ const duration = readUint16(instructions, i + 4);
154570
+ const startVolume = readUint16(instructions, i + 6);
154571
+ const endVolume = readUint16(instructions, i + 8);
154572
+ const endFrequency = readUint16(instructions, i + 10);
154573
+ if (currentTime + duration < millis) {
154574
+ currentTime += duration;
154575
+ continue;
154576
+ }
154577
+ const offset = (millis - currentTime) / duration;
154578
+ return {
154579
+ frequency: startFrequency + (endFrequency - startFrequency) * offset,
154580
+ volume: startVolume + (endVolume - startVolume) * offset,
154581
+ };
154582
+ }
154583
+ return {
154584
+ frequency: -1,
154585
+ volume: -1
154586
+ };
154587
+ }
154449
154588
  function sendMidiMessage(buf) {
154450
154589
  const data = buf.data;
154451
154590
  if (!data.length) // garbage.
@@ -15424,6 +15424,8 @@ var pxtblockly;
15424
15424
  this.options.interpolationFieldName = "interpolation";
15425
15425
  if (!this.options.effectFieldName)
15426
15426
  this.options.effectFieldName = "effect";
15427
+ if (!this.options.useMixerSynthesizer)
15428
+ this.options.useMixerSynthesizer = false;
15427
15429
  this.redrawPreview();
15428
15430
  if (this.sourceBlock_.workspace) {
15429
15431
  this.workspace = this.sourceBlock_.workspace;
@@ -15540,7 +15542,8 @@ var pxtblockly;
15540
15542
  this.updateSiblingBlocks(newSound);
15541
15543
  this.redrawPreview();
15542
15544
  },
15543
- initialSound: initialSound
15545
+ initialSound: initialSound,
15546
+ useMixerSynthesizer: isTrue(this.options.useMixerSynthesizer)
15544
15547
  };
15545
15548
  const fv = pxt.react.getFieldEditorView("soundeffect-editor", initialSound, opts, widgetDiv);
15546
15549
  const block = this.sourceBlock_;
@@ -15734,6 +15737,23 @@ var pxtblockly;
15734
15737
  function reverseLookup(map, value) {
15735
15738
  return Object.keys(map).find(k => map[k] === value);
15736
15739
  }
15740
+ function isTrue(value) {
15741
+ if (!value)
15742
+ return false;
15743
+ if (typeof value === "string") {
15744
+ switch (value.toLowerCase().trim()) {
15745
+ case "1":
15746
+ case "yes":
15747
+ case "y":
15748
+ case "on":
15749
+ case "true":
15750
+ return true;
15751
+ default:
15752
+ return false;
15753
+ }
15754
+ }
15755
+ return !!value;
15756
+ }
15737
15757
  })(pxtblockly || (pxtblockly = {}));
15738
15758
  /// <reference path="../../localtypings/blockly.d.ts"/>
15739
15759
  /// <reference path="../../built/pxtsim.d.ts"/>
@@ -1187,6 +1187,7 @@ declare namespace pxtblockly {
1187
1187
  waveFieldName: string;
1188
1188
  interpolationFieldName: string;
1189
1189
  effectFieldName: string;
1190
+ useMixerSynthesizer: any;
1190
1191
  }
1191
1192
  class FieldSoundEffect extends FieldBase<FieldSoundEffectParams> {
1192
1193
  protected mostRecentValue: pxt.assets.Sound;
@@ -11862,6 +11862,8 @@ var pxtblockly;
11862
11862
  this.options.interpolationFieldName = "interpolation";
11863
11863
  if (!this.options.effectFieldName)
11864
11864
  this.options.effectFieldName = "effect";
11865
+ if (!this.options.useMixerSynthesizer)
11866
+ this.options.useMixerSynthesizer = false;
11865
11867
  this.redrawPreview();
11866
11868
  if (this.sourceBlock_.workspace) {
11867
11869
  this.workspace = this.sourceBlock_.workspace;
@@ -11978,7 +11980,8 @@ var pxtblockly;
11978
11980
  this.updateSiblingBlocks(newSound);
11979
11981
  this.redrawPreview();
11980
11982
  },
11981
- initialSound: initialSound
11983
+ initialSound: initialSound,
11984
+ useMixerSynthesizer: isTrue(this.options.useMixerSynthesizer)
11982
11985
  };
11983
11986
  const fv = pxt.react.getFieldEditorView("soundeffect-editor", initialSound, opts, widgetDiv);
11984
11987
  const block = this.sourceBlock_;
@@ -12172,6 +12175,23 @@ var pxtblockly;
12172
12175
  function reverseLookup(map, value) {
12173
12176
  return Object.keys(map).find(k => map[k] === value);
12174
12177
  }
12178
+ function isTrue(value) {
12179
+ if (!value)
12180
+ return false;
12181
+ if (typeof value === "string") {
12182
+ switch (value.toLowerCase().trim()) {
12183
+ case "1":
12184
+ case "yes":
12185
+ case "y":
12186
+ case "on":
12187
+ case "true":
12188
+ return true;
12189
+ default:
12190
+ return false;
12191
+ }
12192
+ }
12193
+ return !!value;
12194
+ }
12175
12195
  })(pxtblockly || (pxtblockly = {}));
12176
12196
  /// <reference path="../../localtypings/blockly.d.ts"/>
12177
12197
  /// <reference path="../../built/pxtsim.d.ts"/>
@@ -1055,7 +1055,8 @@ var pxt;
1055
1055
  onClose: () => this.fv.hide(),
1056
1056
  onSoundChange: (newValue) => this.value = newValue,
1057
1057
  initialSound: this.value,
1058
- useFlex: true
1058
+ useFlex: true,
1059
+ useMixerSynthesizer: pxt.appTarget.id !== "microbit" // FIXME
1059
1060
  };
1060
1061
  }
1061
1062
  }
package/built/pxtlib.d.ts CHANGED
@@ -2333,6 +2333,7 @@ declare namespace pxt.assets {
2333
2333
  const MAX_VOLUME = 255;
2334
2334
  function renderSoundPath(sound: pxt.assets.Sound, width: number, height: number): string;
2335
2335
  function renderWaveSnapshot(frequency: number, volume: number, wave: SoundWaveForm, width: number, height: number, timeBase: number): string;
2336
+ function soundToInstructionBuffer(sound: Sound, fxSteps: number, fxRange: number): Uint8Array;
2336
2337
  }
2337
2338
  declare namespace pxt.streams {
2338
2339
  interface JsonStreamField {