pxt-common-packages 10.3.1 → 10.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/libs/azureiot/built/debug/binary.js +461 -461
- package/libs/color/built/debug/binary.js +8 -8
- package/libs/color-sensor/built/debug/binary.js +8 -8
- package/libs/controller/built/debug/binary.js +6969 -6969
- package/libs/controller---none/built/debug/binary.js +6949 -6949
- package/libs/datalogger/built/debug/binary.js +63 -63
- package/libs/edge-connector/built/debug/binary.js +9 -9
- package/libs/esp32/built/debug/binary.js +462 -462
- package/libs/game/assetTemplates.ts +10 -0
- package/libs/game/built/debug/binary.js +6888 -6888
- package/libs/lcd/built/debug/binary.js +8 -8
- package/libs/light-spectrum-sensor/built/debug/binary.js +8 -8
- package/libs/lora/built/debug/binary.js +8 -8
- package/libs/matrix-keypad/built/debug/binary.js +8 -8
- package/libs/mixer/instrument.ts +776 -0
- package/libs/mixer/melody.h +1 -1
- package/libs/mixer/melody.ts +29 -1
- package/libs/mixer/pxt.json +2 -0
- package/libs/mixer/sequencer.ts +88 -0
- package/libs/mqtt/built/debug/binary.js +176 -176
- package/libs/net/built/debug/binary.js +176 -176
- package/libs/net-game/built/debug/binary.js +8477 -8477
- package/libs/palette/built/debug/binary.js +6887 -6887
- package/libs/pixel/built/debug/binary.js +8 -8
- package/libs/power/built/debug/binary.js +8 -8
- package/libs/proximity/built/debug/binary.js +8 -8
- package/libs/radio/built/debug/binary.js +8 -8
- package/libs/radio-broadcast/built/debug/binary.js +8 -8
- package/libs/rotary-encoder/built/debug/binary.js +8 -8
- package/libs/screen/built/debug/binary.js +50 -50
- package/libs/servo/built/debug/binary.js +8 -8
- package/libs/sprite-scaling/built/debug/binary.js +6887 -6887
- package/libs/storyboard/built/debug/binary.js +6887 -6887
- package/package.json +1 -1
|
@@ -0,0 +1,776 @@
|
|
|
1
|
+
namespace music.sequencer {
|
|
2
|
+
const BUFFER_SIZE = 12;
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Byte encoding format for songs
|
|
6
|
+
* FIXME: should this all be word aligned?
|
|
7
|
+
*
|
|
8
|
+
* song(7 + length of all tracks bytes)
|
|
9
|
+
* 0 version
|
|
10
|
+
* 1 beats per minute
|
|
11
|
+
* 3 beats per measure
|
|
12
|
+
* 4 ticks per beat
|
|
13
|
+
* 5 measures
|
|
14
|
+
* 6 number of tracks
|
|
15
|
+
* ...tracks
|
|
16
|
+
*
|
|
17
|
+
* track(6 + instrument length + note length bytes)
|
|
18
|
+
* 0 id
|
|
19
|
+
* 1 flags
|
|
20
|
+
* 2 instruments byte length
|
|
21
|
+
* 4...instrument
|
|
22
|
+
* notes byte length
|
|
23
|
+
* ...note events
|
|
24
|
+
*
|
|
25
|
+
* instrument(27 bytes)
|
|
26
|
+
* 0 waveform
|
|
27
|
+
* 1 amp attack
|
|
28
|
+
* 3 amp decay
|
|
29
|
+
* 5 amp sustain
|
|
30
|
+
* 7 amp release
|
|
31
|
+
* 9 amp amp
|
|
32
|
+
* 11 pitch attack
|
|
33
|
+
* 13 pitch decay
|
|
34
|
+
* 15 pitch sustain
|
|
35
|
+
* 17 pitch release
|
|
36
|
+
* 19 pitch amp
|
|
37
|
+
* 21 amp lfo freq
|
|
38
|
+
* 22 amp lfo amp
|
|
39
|
+
* 24 pitch lfo freq
|
|
40
|
+
* 25 pitch lfo amp
|
|
41
|
+
*
|
|
42
|
+
* drum(5 + 7 * steps bytes)
|
|
43
|
+
* 0 steps
|
|
44
|
+
* 1 start freq
|
|
45
|
+
* 3 start amp
|
|
46
|
+
* 5...steps
|
|
47
|
+
*
|
|
48
|
+
* drum step(7 bytes)
|
|
49
|
+
* 0 waveform
|
|
50
|
+
* 1 freq
|
|
51
|
+
* 3 volume
|
|
52
|
+
* 5 duration
|
|
53
|
+
*
|
|
54
|
+
* note event(5 + 1 * polyphony bytes)
|
|
55
|
+
* 0 start tick
|
|
56
|
+
* 2 end tick
|
|
57
|
+
* 4 polyphony
|
|
58
|
+
* 5...notes(1 byte each)
|
|
59
|
+
*
|
|
60
|
+
*/
|
|
61
|
+
|
|
62
|
+
export class Song {
|
|
63
|
+
tracks: Track[];
|
|
64
|
+
|
|
65
|
+
constructor(public buf: Buffer) {
|
|
66
|
+
this.tracks = [];
|
|
67
|
+
|
|
68
|
+
let currentOffset = 7;
|
|
69
|
+
for (let i = 0; i < this.numberOfTracks; i++) {
|
|
70
|
+
let track: Track = new MelodicTrack(this.buf, currentOffset);
|
|
71
|
+
|
|
72
|
+
if (!track.isMelodicTrack) {
|
|
73
|
+
track = new DrumTrack(this.buf, currentOffset)
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
this.tracks.push(track);
|
|
77
|
+
currentOffset += track.byteLength;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
get version(): number {
|
|
82
|
+
return this.buf[0];
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
set version(value: number) {
|
|
86
|
+
this.buf[0] = value;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
get beatsPerMinute(): number {
|
|
90
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, 1);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
set beatsPerMinute(value: number) {
|
|
94
|
+
this.buf.setNumber(NumberFormat.UInt16LE, 1, value);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
get beatsPerMeasure(): number {
|
|
98
|
+
return this.buf[3];
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
set beatsPerMeasure(value: number) {
|
|
102
|
+
this.buf[3] = value;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
get ticksPerBeat(): number {
|
|
106
|
+
return this.buf[4];
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
set ticksPerBeat(value: number) {
|
|
110
|
+
this.buf[4] = value;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
get measures(): number {
|
|
114
|
+
return this.buf[5];
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
set measures(value: number) {
|
|
118
|
+
this.buf[5] = value;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
get numberOfTracks(): number {
|
|
122
|
+
return this.buf[6];
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
export class Envelope {
|
|
127
|
+
constructor(public buf?: Buffer, public offset?: number) {
|
|
128
|
+
if (!buf) this.buf = control.createBuffer(10);
|
|
129
|
+
this.offset = this.offset || 0;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// The time in ms for the envelope to reach its maximum value
|
|
133
|
+
get attack(): number {
|
|
134
|
+
return this.getValue(0);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
set attack(value: number) {
|
|
138
|
+
this.setValue(0, value);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// The time in ms for the envelope to reach its sustain value after reaching its maximum value
|
|
142
|
+
get decay(): number {
|
|
143
|
+
return this.getValue(2);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
set decay(value: number) {
|
|
147
|
+
this.setValue(2, value);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// The value (0-1024) to hold at during the sustain stage
|
|
151
|
+
get sustain(): number {
|
|
152
|
+
return this.getValue(4);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
set sustain(value: number) {
|
|
156
|
+
this.setValue(4, value);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// The time in ms for the envelope to reach 0 after the gate length ends
|
|
160
|
+
get release(): number {
|
|
161
|
+
return this.getValue(6);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
set release(value: number) {
|
|
165
|
+
this.setValue(6, value);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// The maximum value that this envelope will reach
|
|
169
|
+
get amplitude(): number {
|
|
170
|
+
return this.getValue(8);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
set amplitude(value: number) {
|
|
174
|
+
this.setValue(8, value);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
protected getValue(offset: number) {
|
|
178
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, this.offset + offset);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
protected setValue(offset: number, value: number) {
|
|
182
|
+
this.buf.setNumber(NumberFormat.UInt16LE, this.offset + offset, value);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
export class LFO {
|
|
187
|
+
constructor(public buf?: Buffer, public offset?: number) {
|
|
188
|
+
if (!buf) this.buf = control.createBuffer(3);
|
|
189
|
+
this.offset = this.offset || 0;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
get frequency(): number {
|
|
193
|
+
return this.buf[this.offset];
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
set frequency(value: number) {
|
|
197
|
+
this.buf[this.offset] = value;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
get amplitude(): number {
|
|
201
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, this.offset + 1);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
set amplitude(value: number) {
|
|
205
|
+
this.buf.setNumber(NumberFormat.UInt16LE, this.offset + 1, value);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
export class NoteEvent {
|
|
210
|
+
constructor(public buf: Buffer, public offset: number) {
|
|
211
|
+
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
get startTick(): number {
|
|
215
|
+
return this.getValue(0);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
set startTick(value: number) {
|
|
219
|
+
this.setValue(0, value);
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
get endTick(): number {
|
|
223
|
+
return this.getValue(2);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
set endTick(value: number) {
|
|
227
|
+
this.setValue(2, value);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
get polyphony(): number {
|
|
231
|
+
return this.buf[this.offset + 4];
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
set polyphony(value: number) {
|
|
235
|
+
this.buf[this.offset + 4] = value;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
get byteLength() {
|
|
239
|
+
return this.polyphony + 5;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
getNote(offset: number) {
|
|
243
|
+
return this.buf[this.offset + offset + 5];
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
protected getValue(offset: number) {
|
|
247
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, this.offset + offset);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
protected setValue(offset: number, value: number) {
|
|
251
|
+
this.buf.setNumber(NumberFormat.UInt16LE, this.offset + offset, value);
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
export class Track {
|
|
256
|
+
currentNoteEvent: NoteEvent;
|
|
257
|
+
|
|
258
|
+
constructor(public buf: Buffer, public offset: number) {
|
|
259
|
+
this.currentNoteEvent = new NoteEvent(this.buf, this.noteEventStart + 2);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
get isMelodicTrack(): boolean {
|
|
263
|
+
return this.flags === 0;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
get id(): number {
|
|
267
|
+
return this.buf[this.offset];
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
set id(value: number) {
|
|
271
|
+
this.buf[this.offset] = value;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
get flags(): number {
|
|
275
|
+
return this.buf[this.offset + 1];
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
set flags(value: number) {
|
|
279
|
+
this.buf[this.offset + 1] = value;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
get instrumentByteLength(): number {
|
|
283
|
+
return this.getValue(this.offset + 2);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
set instrumentByteLength(value: number) {
|
|
287
|
+
this.setValue(this.offset + 2, value);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
get noteEventStart(): number {
|
|
291
|
+
return this.offset + this.instrumentByteLength + 4;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
get noteEventByteLength(): number {
|
|
295
|
+
return this.getValue(this.noteEventStart);
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
set noteEventByteLength(value: number) {
|
|
299
|
+
this.setValue(this.noteEventStart, value);
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
get byteLength() {
|
|
303
|
+
return this.noteEventByteLength + this.instrumentByteLength + 6;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
advanceNoteEvent() {
|
|
307
|
+
this.currentNoteEvent.offset += this.currentNoteEvent.byteLength;
|
|
308
|
+
|
|
309
|
+
if (this.currentNoteEvent.offset >= this.offset + this.byteLength) {
|
|
310
|
+
this.currentNoteEvent.offset = this.noteEventStart + 2;
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
protected getValue(offset: number) {
|
|
315
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, offset);
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
protected setValue(offset: number, value: number) {
|
|
319
|
+
this.buf.setNumber(NumberFormat.UInt16LE, offset, value);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
export class MelodicTrack extends Track {
|
|
324
|
+
instrument: Instrument;
|
|
325
|
+
|
|
326
|
+
constructor(buf: Buffer, offset: number) {
|
|
327
|
+
super(buf, offset);
|
|
328
|
+
this.instrument = new Instrument(this.buf, this.offset + 4);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
export class DrumTrack extends Track {
|
|
333
|
+
drums: DrumInstrument[];
|
|
334
|
+
|
|
335
|
+
constructor(buf: Buffer, offset: number) {
|
|
336
|
+
super(buf, offset);
|
|
337
|
+
this.drums = [];
|
|
338
|
+
|
|
339
|
+
let currentOffset = 0;
|
|
340
|
+
while (currentOffset < this.instrumentByteLength) {
|
|
341
|
+
this.drums.push(new DrumInstrument(this.buf, this.offset + 4 + currentOffset));
|
|
342
|
+
currentOffset += this.drums[this.drums.length - 1].byteLength;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
export class Instrument {
|
|
348
|
+
ampEnvelope: Envelope;
|
|
349
|
+
pitchEnvelope: Envelope;
|
|
350
|
+
ampLFO: LFO;
|
|
351
|
+
pitchLFO: LFO;
|
|
352
|
+
|
|
353
|
+
constructor(public buf?: Buffer, public offset?: number) {
|
|
354
|
+
if (!buf) this.buf = control.createBuffer(27);
|
|
355
|
+
this.offset = this.offset || 0;
|
|
356
|
+
this.ampEnvelope = new Envelope(this.buf, this.offset + 1);
|
|
357
|
+
this.pitchEnvelope = new Envelope(this.buf, this.offset + 11);
|
|
358
|
+
this.ampLFO = new LFO(this.buf, this.offset + 21);
|
|
359
|
+
this.pitchLFO = new LFO(this.buf, this.offset + 24)
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
get waveform(): number {
|
|
363
|
+
return this.buf[this.offset];
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
set waveform(value: number) {
|
|
367
|
+
this.buf[this.offset] = value;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
export class DrumInstrument {
|
|
372
|
+
steps: DrumStep[];
|
|
373
|
+
|
|
374
|
+
constructor(public buf: Buffer, public offset: number) {
|
|
375
|
+
this.steps = [];
|
|
376
|
+
|
|
377
|
+
for (let i = 0; i < this.numSteps; i++) {
|
|
378
|
+
this.steps.push(new DrumStep(this.buf, this.offset + 5 + i * 7))
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
get byteLength(): number {
|
|
383
|
+
return 5 + this.numSteps * 7;
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
get numSteps(): number {
|
|
387
|
+
return this.buf[this.offset];
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
set numSteps(value: number) {
|
|
391
|
+
this.buf[this.offset] = value;
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
get startFrequency(): number {
|
|
395
|
+
return this.getValue(1);
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
set startFrequency(value: number) {
|
|
399
|
+
this.setValue(1, value);
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
get startVolume(): number {
|
|
403
|
+
return this.getValue(3);
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
set startVolume(value: number) {
|
|
407
|
+
this.setValue(3, value);
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
protected getValue(offset: number) {
|
|
411
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, this.offset + offset);
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
protected setValue(offset: number, value: number) {
|
|
415
|
+
this.buf.setNumber(NumberFormat.UInt16LE, this.offset + offset, value);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
export class DrumStep {
|
|
420
|
+
constructor(public buf?: Buffer, public offset?: number) {
|
|
421
|
+
if (!buf) this.buf = control.createBuffer(7);
|
|
422
|
+
this.offset = this.offset || 0;
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
get waveform(): number {
|
|
426
|
+
return this.buf[this.offset];
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
set waveform(value: number) {
|
|
430
|
+
this.buf[this.offset] = value;
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
get frequency(): number {
|
|
434
|
+
return this.getValue(1);
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
set frequency(value: number) {
|
|
438
|
+
this.setValue(1, value);
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
get volume(): number {
|
|
442
|
+
return this.getValue(3);
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
set volume(value: number) {
|
|
446
|
+
this.setValue(3, value);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
get duration(): number {
|
|
450
|
+
return this.getValue(5);
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
set duration(value: number) {
|
|
454
|
+
this.setValue(5, value);
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
protected getValue(offset: number) {
|
|
458
|
+
return this.buf.getNumber(NumberFormat.UInt16LE, this.offset + offset);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
protected setValue(offset: number, value: number) {
|
|
462
|
+
this.buf.setNumber(NumberFormat.UInt16LE, this.offset + offset, value);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
/**
|
|
467
|
+
* Renders a single note played on an instrument into a buffer of sound instructions.
|
|
468
|
+
*
|
|
469
|
+
* @param instrument The instrument being played
|
|
470
|
+
* @param noteFrequency The frequency of the note being played. In other words, "the key being pressed on the piano"
|
|
471
|
+
* @param gateLength The length of time that the "piano key" is held down in ms. The total duration
|
|
472
|
+
* of the sound instructions will be longer than this if the amplitude envelope of the
|
|
473
|
+
* instrument has a nonzero release time
|
|
474
|
+
* @param volume The peak volume of the note to play (0-1024). Also called the "velocity"
|
|
475
|
+
*/
|
|
476
|
+
export function renderInstrument(instrument: Instrument, noteFrequency: number, gateLength: number, volume: number) {
|
|
477
|
+
// We cut off the sound at the end of the amplitude envelope's release time. This is to prevent
|
|
478
|
+
// the amp envelope from making the sound keep playing forever
|
|
479
|
+
const totalDuration = gateLength + instrument.ampEnvelope.release;
|
|
480
|
+
|
|
481
|
+
// Our goal is to calculate the frequency and amplitude at all of the inflection points in this note's lifetime
|
|
482
|
+
|
|
483
|
+
// For the ADSR envelopes, the inflection points are:
|
|
484
|
+
// 1. The end of the envelope atack (which is when the decay begins)
|
|
485
|
+
// 2. The end of the envelope decay (which is when the sustain begins)
|
|
486
|
+
// 3. The end of the gateLength (which is when the release begins)
|
|
487
|
+
// 4. The end of the envelope release
|
|
488
|
+
// If the gateLength ends before any of these stages (e.g. it's shorter than the envelope's attack), then
|
|
489
|
+
// we ignore the other stages and go straight to the release stage.
|
|
490
|
+
|
|
491
|
+
// For the triangle LFOs, the inflections points occur every time the slope goes from positive to negative. In
|
|
492
|
+
// other words, it's half the period of the triangle wave.
|
|
493
|
+
|
|
494
|
+
const ampLFOInterval = instrument.ampLFO.amplitude ? Math.max(500 / instrument.ampLFO.frequency, 50) : 50;
|
|
495
|
+
const pitchLFOInterval = instrument.pitchLFO.amplitude ? Math.max(500 / instrument.pitchLFO.frequency, 50) : 50;
|
|
496
|
+
|
|
497
|
+
// We're going to add the timepoints to this array in order so that it doesn't need to be sorted
|
|
498
|
+
let timePoints = [0];
|
|
499
|
+
|
|
500
|
+
// For each LFO and envelope, keep track of the next inflection point. If any of the LFOs or envelopes have
|
|
501
|
+
// an amplitude of 0, we can ignore them entirely.
|
|
502
|
+
let nextAETime = instrument.ampEnvelope.attack;
|
|
503
|
+
let nextPETime = instrument.pitchEnvelope.amplitude ? instrument.pitchEnvelope.attack : totalDuration;
|
|
504
|
+
let nextPLTime = instrument.pitchLFO.amplitude ? pitchLFOInterval : totalDuration;
|
|
505
|
+
let nextALTime = instrument.ampLFO.amplitude ? ampLFOInterval : totalDuration;
|
|
506
|
+
|
|
507
|
+
let time = 0;
|
|
508
|
+
while (time < totalDuration) {
|
|
509
|
+
// Amp envelope
|
|
510
|
+
if (nextAETime <= nextPETime && nextAETime <= nextPLTime && nextAETime <= nextALTime) {
|
|
511
|
+
time = nextAETime;
|
|
512
|
+
timePoints.push(nextAETime);
|
|
513
|
+
|
|
514
|
+
// Check if the end of the decay stage is next
|
|
515
|
+
if (time < instrument.ampEnvelope.attack + instrument.ampEnvelope.decay && instrument.ampEnvelope.attack + instrument.ampEnvelope.decay < gateLength) {
|
|
516
|
+
nextAETime = instrument.ampEnvelope.attack + instrument.ampEnvelope.decay;
|
|
517
|
+
}
|
|
518
|
+
// Then check for the end of the sustain stage
|
|
519
|
+
else if (time < gateLength) {
|
|
520
|
+
nextAETime = gateLength;
|
|
521
|
+
}
|
|
522
|
+
// Otherwise it must be the end of the release
|
|
523
|
+
else {
|
|
524
|
+
nextAETime = totalDuration;
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
// Pitch envelope
|
|
528
|
+
else if (nextPETime <= nextPLTime && nextPETime <= nextALTime && nextPETime < totalDuration) {
|
|
529
|
+
time = nextPETime;
|
|
530
|
+
timePoints.push(nextPETime);
|
|
531
|
+
|
|
532
|
+
// Check if the end of the decay stage is next
|
|
533
|
+
if (time < instrument.pitchEnvelope.attack + instrument.pitchEnvelope.decay && instrument.pitchEnvelope.attack + instrument.pitchEnvelope.decay < gateLength) {
|
|
534
|
+
nextPETime = instrument.pitchEnvelope.attack + instrument.pitchEnvelope.decay;
|
|
535
|
+
}
|
|
536
|
+
// Then check for the end of the sustain stage
|
|
537
|
+
else if (time < gateLength) {
|
|
538
|
+
nextPETime = gateLength;
|
|
539
|
+
}
|
|
540
|
+
// Otherwise it must be the end of the release
|
|
541
|
+
else if (time < gateLength + instrument.pitchEnvelope.release) {
|
|
542
|
+
nextPETime = Math.min(totalDuration, gateLength + instrument.pitchEnvelope.release);
|
|
543
|
+
}
|
|
544
|
+
// If we reach the end of the release before the amp envelope is finished, bail out
|
|
545
|
+
else {
|
|
546
|
+
nextPETime = totalDuration
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
// Pitch LFO
|
|
550
|
+
else if (nextPLTime <= nextALTime && nextPLTime < totalDuration) {
|
|
551
|
+
time = nextPLTime;
|
|
552
|
+
timePoints.push(nextPLTime);
|
|
553
|
+
nextPLTime += pitchLFOInterval;
|
|
554
|
+
}
|
|
555
|
+
// Amp LFO
|
|
556
|
+
else if (nextALTime < totalDuration) {
|
|
557
|
+
time = nextALTime;
|
|
558
|
+
timePoints.push(nextALTime);
|
|
559
|
+
nextALTime += ampLFOInterval;
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
if (time >= totalDuration) {
|
|
564
|
+
break;
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// Now that we've advanced the time, we need to check all of the envelopes/LFOs again
|
|
568
|
+
// to see if any of them also need to be pushed forward (e.g. they had the same inflection point
|
|
569
|
+
// as the one we just added to the array)
|
|
570
|
+
if (nextAETime <= time) {
|
|
571
|
+
if (time < instrument.ampEnvelope.attack + instrument.ampEnvelope.decay && instrument.ampEnvelope.attack + instrument.ampEnvelope.decay < gateLength) {
|
|
572
|
+
nextAETime = instrument.ampEnvelope.attack + instrument.ampEnvelope.decay;
|
|
573
|
+
}
|
|
574
|
+
else if (time < gateLength) {
|
|
575
|
+
nextAETime = gateLength;
|
|
576
|
+
}
|
|
577
|
+
else {
|
|
578
|
+
nextAETime = totalDuration;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
if (nextPETime <= time) {
|
|
582
|
+
if (time < instrument.pitchEnvelope.attack + instrument.pitchEnvelope.decay && instrument.pitchEnvelope.attack + instrument.pitchEnvelope.decay < gateLength) {
|
|
583
|
+
nextPETime = instrument.pitchEnvelope.attack + instrument.pitchEnvelope.decay;
|
|
584
|
+
}
|
|
585
|
+
else if (time < gateLength) {
|
|
586
|
+
nextPETime = gateLength;
|
|
587
|
+
}
|
|
588
|
+
else if (time < gateLength + instrument.pitchEnvelope.release) {
|
|
589
|
+
nextPETime = Math.min(totalDuration, gateLength + instrument.pitchEnvelope.release);
|
|
590
|
+
}
|
|
591
|
+
else {
|
|
592
|
+
nextPETime = totalDuration
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
while (nextALTime <= time) {
|
|
596
|
+
nextALTime += ampLFOInterval;
|
|
597
|
+
}
|
|
598
|
+
while (nextPLTime <= time) {
|
|
599
|
+
nextPLTime += pitchLFOInterval;
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
// Once we've calculated the inflection points, calculate the frequency and amplitude at
|
|
604
|
+
// each step and interpolate between them with sound instructions
|
|
605
|
+
let prevAmp = instrumentVolumeAtTime(instrument, gateLength, 0, volume) | 0;
|
|
606
|
+
let prevPitch = instrumentPitchAtTime(instrument, noteFrequency, gateLength, 0) | 0;
|
|
607
|
+
let prevTime = 0;
|
|
608
|
+
|
|
609
|
+
let nextAmp: number;
|
|
610
|
+
let nextPitch: number;
|
|
611
|
+
const out = control.createBuffer(BUFFER_SIZE * timePoints.length);
|
|
612
|
+
for (let i = 1; i < timePoints.length; i++) {
|
|
613
|
+
if (timePoints[i] - prevTime < 5) {
|
|
614
|
+
prevTime = timePoints[i];
|
|
615
|
+
continue;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
nextAmp = instrumentVolumeAtTime(instrument, gateLength, timePoints[i], volume) | 0;
|
|
619
|
+
nextPitch = instrumentPitchAtTime(instrument, noteFrequency, gateLength, timePoints[i]) | 0
|
|
620
|
+
addNote(
|
|
621
|
+
out,
|
|
622
|
+
(i - 1) * 12,
|
|
623
|
+
(timePoints[i] - prevTime) | 0,
|
|
624
|
+
prevAmp,
|
|
625
|
+
nextAmp,
|
|
626
|
+
instrument.waveform,
|
|
627
|
+
prevPitch,
|
|
628
|
+
255,
|
|
629
|
+
nextPitch
|
|
630
|
+
)
|
|
631
|
+
|
|
632
|
+
prevAmp = nextAmp;
|
|
633
|
+
prevPitch = nextPitch;
|
|
634
|
+
prevTime = timePoints[i];
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
// Finally, add one extra step to move the amplitude to 0 without
|
|
638
|
+
// clipping just in case the amp LFO caused it to be nonzero
|
|
639
|
+
addNote(
|
|
640
|
+
out,
|
|
641
|
+
(timePoints.length - 1) * 12,
|
|
642
|
+
10,
|
|
643
|
+
prevAmp,
|
|
644
|
+
0,
|
|
645
|
+
instrument.waveform,
|
|
646
|
+
prevPitch,
|
|
647
|
+
255,
|
|
648
|
+
prevPitch
|
|
649
|
+
)
|
|
650
|
+
return out;
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
export function renderDrumInstrument(sound: DrumInstrument, volume: number) {
|
|
654
|
+
// Drum instruments are rendered just like melodic instruments, but the inflection
|
|
655
|
+
// points are already calculated for us
|
|
656
|
+
let prevAmp = sound.startVolume;
|
|
657
|
+
let prevFreq = sound.startFrequency;
|
|
658
|
+
|
|
659
|
+
const scaleVolume = (value: number) => (value / 1024) * volume;
|
|
660
|
+
|
|
661
|
+
let out = control.createBuffer((sound.steps.length + 1) * BUFFER_SIZE);
|
|
662
|
+
|
|
663
|
+
for (let i = 0; i < sound.steps.length; i++) {
|
|
664
|
+
addNote(
|
|
665
|
+
out,
|
|
666
|
+
i * BUFFER_SIZE,
|
|
667
|
+
sound.steps[i].duration,
|
|
668
|
+
scaleVolume(prevAmp),
|
|
669
|
+
scaleVolume(sound.steps[i].volume),
|
|
670
|
+
sound.steps[i].waveform,
|
|
671
|
+
prevFreq,
|
|
672
|
+
255,
|
|
673
|
+
sound.steps[i].frequency
|
|
674
|
+
);
|
|
675
|
+
prevAmp = sound.steps[i].volume;
|
|
676
|
+
prevFreq = sound.steps[i].frequency
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
addNote(
|
|
680
|
+
out,
|
|
681
|
+
sound.steps.length * BUFFER_SIZE,
|
|
682
|
+
10,
|
|
683
|
+
scaleVolume(prevAmp),
|
|
684
|
+
0,
|
|
685
|
+
sound.steps[sound.steps.length - 1].waveform,
|
|
686
|
+
prevFreq,
|
|
687
|
+
255,
|
|
688
|
+
prevFreq
|
|
689
|
+
);
|
|
690
|
+
|
|
691
|
+
return out;
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
function instrumentPitchAtTime(instrument: Instrument, noteFrequency: number, gateLength: number, time: number) {
|
|
695
|
+
let mod = 0;
|
|
696
|
+
if (instrument.pitchEnvelope.amplitude) {
|
|
697
|
+
mod += envelopeValueAtTime(instrument.pitchEnvelope, time, gateLength)
|
|
698
|
+
}
|
|
699
|
+
if (instrument.pitchLFO.amplitude) {
|
|
700
|
+
mod += lfoValueAtTime(instrument.pitchLFO, time)
|
|
701
|
+
}
|
|
702
|
+
return Math.max(noteFrequency + mod, 0);
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
function instrumentVolumeAtTime(instrument: Instrument, gateLength: number, time: number, maxVolume: number) {
|
|
706
|
+
let mod = 0;
|
|
707
|
+
if (instrument.ampEnvelope.amplitude) {
|
|
708
|
+
mod += envelopeValueAtTime(instrument.ampEnvelope, time, gateLength)
|
|
709
|
+
}
|
|
710
|
+
if (instrument.ampLFO.amplitude) {
|
|
711
|
+
mod += lfoValueAtTime(instrument.ampLFO, time)
|
|
712
|
+
}
|
|
713
|
+
return ((Math.max(Math.min(mod, instrument.ampEnvelope.amplitude), 0) / 1024) * maxVolume) | 0;
|
|
714
|
+
}
|
|
715
|
+
|
|
716
|
+
/**
|
|
717
|
+
* Calculates the value of an ADSR envelope at the given time for a given gate length.
|
|
718
|
+
*
|
|
719
|
+
* @param envelope The ADSR envelope
|
|
720
|
+
* @param time The point and time to calculate the value at
|
|
721
|
+
* @param gateLength The length of time that the "piano key" is held down in ms. The total duration
|
|
722
|
+
* of the sound instructions will be longer than this if the amplitude envelope of the
|
|
723
|
+
* instrument has a nonzero release time
|
|
724
|
+
*/
|
|
725
|
+
function envelopeValueAtTime(envelope: Envelope, time: number, gateLength: number) {
|
|
726
|
+
// ADSR envelopes consist of 4 stages. They are (in order):
|
|
727
|
+
// 1. The attack stage, where the value starts at 0 and rises to the maximum value
|
|
728
|
+
// 2. The decay stage, where the value falls from the maximum value to the sustain value
|
|
729
|
+
// 3. The sustain stage, where the value holds steady at the sustain value until the gate length ends
|
|
730
|
+
// 4. The release stage, where the value falls to 0 after the gate length ends
|
|
731
|
+
// If the gate length ends before the sustain stage, we immediately skip to the release stage. All stages
|
|
732
|
+
// use a linear function for the value
|
|
733
|
+
const adjustedSustain = (envelope.sustain / 1024) * envelope.amplitude;
|
|
734
|
+
|
|
735
|
+
// First check to see if we are already in the release stage
|
|
736
|
+
if (time > gateLength) {
|
|
737
|
+
if (time - gateLength > envelope.release) return 0;
|
|
738
|
+
|
|
739
|
+
// Did the gate length end before the attack stage finished?
|
|
740
|
+
else if (time < envelope.attack) {
|
|
741
|
+
const height = (envelope.amplitude / envelope.attack) * gateLength;
|
|
742
|
+
return height - ((height / envelope.release) * (time - gateLength))
|
|
743
|
+
}
|
|
744
|
+
// Did the gate length end before the decay stage finished?
|
|
745
|
+
else if (time < envelope.attack + envelope.decay) {
|
|
746
|
+
const height2 = envelope.amplitude - ((envelope.amplitude - adjustedSustain) / envelope.decay) * (gateLength - envelope.attack);
|
|
747
|
+
return height2 - ((height2 / envelope.release) * (time - gateLength))
|
|
748
|
+
}
|
|
749
|
+
else {
|
|
750
|
+
return adjustedSustain - (adjustedSustain / envelope.release) * (time - gateLength)
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
else if (time < envelope.attack) {
|
|
754
|
+
return (envelope.amplitude / envelope.attack) * time
|
|
755
|
+
}
|
|
756
|
+
else if (time < envelope.attack + envelope.decay) {
|
|
757
|
+
return envelope.amplitude - ((envelope.amplitude - adjustedSustain) / envelope.decay) * (time - envelope.attack)
|
|
758
|
+
}
|
|
759
|
+
else {
|
|
760
|
+
return adjustedSustain;
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
/**
|
|
765
|
+
* Calculates the value of the LFO at the given time.
|
|
766
|
+
*
|
|
767
|
+
* TODO: might be nice to give options to shift the phase of the LFO or let it run free
|
|
768
|
+
*
|
|
769
|
+
* @param lfo The LFO to calculate the value of
|
|
770
|
+
* @param time The time to calculate the value at
|
|
771
|
+
*/
|
|
772
|
+
function lfoValueAtTime(lfo: LFO, time: number) {
|
|
773
|
+
// Use cosine to smooth out the value somewhat
|
|
774
|
+
return Math.cos(((time / 1000) * lfo.frequency) * 2 * Math.PI) * lfo.amplitude
|
|
775
|
+
}
|
|
776
|
+
}
|