@songram/songram-daw-engine 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2016 @@
1
+ import { BaseContext } from 'tone';
2
+ import { Context } from 'tone';
3
+ import { Gain } from 'tone';
4
+ import { Generator as Generator_2 } from 'soundfont2';
5
+ import { GeneratorType } from 'soundfont2';
6
+ import { SynthOptions } from 'tone';
7
+ import { ToneAudioNode } from 'tone';
8
+ import { Volume } from 'tone';
9
+ import { ZoneMap } from 'soundfont2';
10
+
11
+ /**
12
+ * An action control shown on annotation items (e.g., delete, split).
13
+ */
14
+ export declare interface AnnotationAction {
15
+ class?: string;
16
+ text?: string;
17
+ title: string;
18
+ action: (annotation: AnnotationData, index: number, annotations: AnnotationData[], opts: AnnotationActionOptions) => void;
19
+ }
20
+
21
+ /**
22
+ * Configuration options passed to annotation action handlers.
23
+ * Used by both browser and annotations packages.
24
+ */
25
+ export declare interface AnnotationActionOptions {
26
+ /** Whether annotation endpoints are linked (moving one endpoint moves the other) */
27
+ linkEndpoints?: boolean;
28
+ /** Whether to continue playing after an annotation ends */
29
+ continuousPlay?: boolean;
30
+ /** Additional custom properties */
31
+ [key: string]: unknown;
32
+ }
33
+
34
+ /**
35
+ * Shared annotation types used across Songram DAW modules
36
+ */
37
+ /**
38
+ * Base annotation data structure
39
+ */
40
+ export declare interface AnnotationData {
41
+ id: string;
42
+ start: number;
43
+ end: number;
44
+ lines: string[];
45
+ language?: string;
46
+ }
47
+
48
+ /**
49
+ * Event handlers for annotation operations
50
+ */
51
+ export declare interface AnnotationEventMap {
52
+ 'annotation-select': (annotation: AnnotationData) => void;
53
+ 'annotation-update': (annotation: AnnotationData) => void;
54
+ 'annotation-delete': (id: string) => void;
55
+ 'annotation-create': (annotation: AnnotationData) => void;
56
+ }
57
+
58
+ /**
59
+ * Annotation format definition for parsing/serializing
60
+ */
61
+ export declare interface AnnotationFormat {
62
+ name: string;
63
+ parse: (data: unknown) => AnnotationData[];
64
+ serialize: (annotations: AnnotationData[]) => unknown;
65
+ }
66
+
67
+ /**
68
+ * Options for annotation list behavior
69
+ */
70
+ export declare interface AnnotationListOptions {
71
+ editable?: boolean;
72
+ linkEndpoints?: boolean;
73
+ isContinuousPlay?: boolean;
74
+ }
75
+
76
+ /**
77
+ * Apply a fade in to an AudioParam
78
+ *
79
+ * @param param - The AudioParam to apply the fade to (usually gain)
80
+ * @param startTime - When the fade starts (in seconds, AudioContext time)
81
+ * @param duration - Duration of the fade in seconds
82
+ * @param type - Type of fade curve
83
+ * @param startValue - Starting value (default: 0)
84
+ * @param endValue - Ending value (default: 1)
85
+ */
86
+ export declare function applyFadeIn(param: AudioParam, startTime: number, duration: number, type?: FadeType, startValue?: number, endValue?: number): void;
87
+
88
+ /**
89
+ * Apply a fade out to an AudioParam
90
+ *
91
+ * @param param - The AudioParam to apply the fade to (usually gain)
92
+ * @param startTime - When the fade starts (in seconds, AudioContext time)
93
+ * @param duration - Duration of the fade in seconds
94
+ * @param type - Type of fade curve
95
+ * @param startValue - Starting value (default: 1)
96
+ * @param endValue - Ending value (default: 0)
97
+ */
98
+ export declare function applyFadeOut(param: AudioParam, startTime: number, duration: number, type?: FadeType, startValue?: number, endValue?: number): void;
99
+
100
+ export declare interface AssistantPlugin extends EnginePlugin {
101
+ kind: 'assistant';
102
+ capabilities?: string[];
103
+ }
104
+
105
+ export declare interface AssistantRequestEvent {
106
+ assistantId: string;
107
+ action: string;
108
+ payload?: unknown;
109
+ }
110
+
111
+ declare interface AudioBuffer_2 {
112
+ length: number;
113
+ duration: number;
114
+ numberOfChannels: number;
115
+ sampleRate: number;
116
+ getChannelData(channel: number): Float32Array;
117
+ }
118
+ export { AudioBuffer_2 as AudioBuffer }
119
+
120
+ /**
121
+ * Represents a single audio clip on the timeline
122
+ *
123
+ * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)
124
+ * to avoid floating-point precision errors. Convert to seconds only when
125
+ * needed for playback using: seconds = samples / sampleRate
126
+ *
127
+ * Clips can be created with just waveformData (for instant visual rendering)
128
+ * and have audioBuffer added later when audio finishes loading.
129
+ */
130
+ export declare interface AudioClip {
131
+ /** Unique identifier for this clip */
132
+ id: string;
133
+ /**
134
+ * The audio buffer containing the audio data.
135
+ * Optional for peaks-first rendering - can be added later.
136
+ * Required for playback and editing operations.
137
+ */
138
+ audioBuffer?: AudioBuffer;
139
+ /** Position on timeline where this clip starts (in samples at timeline sampleRate) */
140
+ startSample: number;
141
+ /** Duration of this clip (in samples) - how much of the audio buffer to play */
142
+ durationSamples: number;
143
+ /** Offset into the audio buffer where playback starts (in samples) - the "trim start" point */
144
+ offsetSamples: number;
145
+ /**
146
+ * Sample rate for this clip's audio.
147
+ * Required when audioBuffer is not provided (for peaks-first rendering).
148
+ * When audioBuffer is present, this should match audioBuffer.sampleRate.
149
+ */
150
+ sampleRate: number;
151
+ /**
152
+ * Total duration of the source audio in samples.
153
+ * Required when audioBuffer is not provided (for trim bounds calculation).
154
+ * When audioBuffer is present, this should equal audioBuffer.length.
155
+ */
156
+ sourceDurationSamples: number;
157
+ /** Optional fade in effect */
158
+ fadeIn?: Fade;
159
+ /** Optional fade out effect */
160
+ fadeOut?: Fade;
161
+ /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */
162
+ gain: number;
163
+ /** Optional label/name for this clip */
164
+ name?: string;
165
+ /** Optional color for visual distinction */
166
+ color?: string;
167
+ /**
168
+ * Pre-computed waveform data from waveform-data.js library.
169
+ * When provided, the library will use this instead of computing peaks from the audioBuffer.
170
+ * Supports resampling to different zoom levels and slicing for clip trimming.
171
+ * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`
172
+ */
173
+ waveformData?: WaveformDataObject;
174
+ /**
175
+ * MIDI note data — when present, this clip plays MIDI instead of audio.
176
+ * The playout adapter uses this field to detect MIDI clips and route them
177
+ * to MidiToneTrack (PolySynth) instead of ToneTrack (AudioBufferSourceNode).
178
+ */
179
+ midiNotes?: MidiNoteData[];
180
+ /** MIDI channel (0-indexed). Channel 9 = GM percussion. */
181
+ midiChannel?: number;
182
+ /** MIDI program number (0-127). GM instrument number for SoundFont playback. */
183
+ midiProgram?: number;
184
+ }
185
+
186
+ /**
187
+ * Bits type - number of bits for peak data
188
+ */
189
+ export declare type Bits = 8 | 16;
190
+
191
+ /**
192
+ * Calculate total timeline duration in seconds from all tracks/clips.
193
+ * Iterates all clips, finds the furthest clip end (startSample + durationSamples),
194
+ * converts to seconds using each clip's sampleRate.
195
+ *
196
+ * @param tracks - Array of clip tracks
197
+ * @returns Duration in seconds
198
+ */
199
+ export declare function calculateDuration(tracks: ClipTrack[]): number;
200
+
201
+ /**
202
+ * Calculate playback rate for a MIDI note using the SF2 generator chain.
203
+ *
204
+ * SF2 root key resolution priority:
205
+ * 1. OverridingRootKey generator (per-zone, most specific)
206
+ * 2. sample.header.originalPitch (sample header)
207
+ * 3. MIDI note 60 (middle C fallback)
208
+ *
209
+ * Tuning adjustments:
210
+ * - CoarseTune generator (semitones, additive)
211
+ * - FineTune generator (cents, additive)
212
+ * - sample.header.pitchCorrection (cents, additive)
213
+ */
214
+ export declare function calculatePlaybackRate(params: PlaybackRateParams): number;
215
+
216
+ /**
217
+ * Snap a split sample position to the nearest pixel boundary.
218
+ *
219
+ * @param splitSample - The sample position to snap
220
+ * @param samplesPerPixel - Current zoom level (samples per pixel)
221
+ * @returns Snapped sample position
222
+ */
223
+ export declare function calculateSplitPoint(splitSample: number, samplesPerPixel: number): number;
224
+
225
+ /**
226
+ * Viewport operations for virtual scrolling.
227
+ *
228
+ * Pure math helpers that determine which portion of the timeline
229
+ * is visible and which canvas chunks need to be mounted.
230
+ */
231
+ /**
232
+ * Calculate the visible region with an overscan buffer for virtual scrolling.
233
+ *
234
+ * The buffer extends the visible range on both sides so that chunks are
235
+ * mounted slightly before they scroll into view, preventing flicker.
236
+ *
237
+ * @param scrollLeft - Current horizontal scroll position in pixels
238
+ * @param containerWidth - Width of the scroll container in pixels
239
+ * @param bufferRatio - Multiplier for buffer size (default 1.5x container width)
240
+ * @returns Object with visibleStart and visibleEnd in pixels
241
+ */
242
+ export declare function calculateViewportBounds(scrollLeft: number, containerWidth: number, bufferRatio?: number): {
243
+ visibleStart: number;
244
+ visibleEnd: number;
245
+ };
246
+
247
+ /**
248
+ * Keep viewport centered during zoom changes.
249
+ * Calculates center time from old zoom, computes new pixel position at new zoom,
250
+ * and returns new scrollLeft clamped to >= 0.
251
+ *
252
+ * @param oldSamplesPerPixel - Previous zoom level
253
+ * @param newSamplesPerPixel - New zoom level
254
+ * @param scrollLeft - Current horizontal scroll position
255
+ * @param containerWidth - Viewport width in pixels
256
+ * @param sampleRate - Audio sample rate
257
+ * @param controlWidth - Width of track controls panel (defaults to 0)
258
+ * @returns New scrollLeft value
259
+ */
260
+ export declare function calculateZoomScrollPosition(oldSamplesPerPixel: number, newSamplesPerPixel: number, scrollLeft: number, containerWidth: number, sampleRate: number, controlWidth?: number): number;
261
+
262
+ /**
263
+ * Check whether a clip can be split at the given sample position.
264
+ *
265
+ * The split point must be strictly inside the clip (not at start or end),
266
+ * and both resulting clips must meet the minimum duration requirement.
267
+ *
268
+ * @param clip - The clip to check
269
+ * @param sample - The timeline sample position to test
270
+ * @param minDurationSamples - Minimum allowed clip duration in samples
271
+ * @returns true if the split is valid
272
+ */
273
+ export declare function canSplitAt(clip: AudioClip, sample: number, minDurationSamples: number): boolean;
274
+
275
+ /**
276
+ * Clamp a seek position to the valid range [0, duration].
277
+ *
278
+ * @param time - Requested seek time in seconds
279
+ * @param duration - Maximum duration in seconds
280
+ * @returns Clamped time value
281
+ */
282
+ export declare function clampSeekPosition(time: number, duration: number): number;
283
+
284
+ /** Clip duration in seconds */
285
+ export declare function clipDurationTime(clip: AudioClip): number;
286
+
287
+ /** Clip end position in seconds (start + duration) */
288
+ export declare function clipEndTime(clip: AudioClip): number;
289
+
290
+ declare interface ClipInfo {
291
+ buffer: AudioBuffer;
292
+ startTime: number;
293
+ duration: number;
294
+ offset: number;
295
+ fadeIn?: Fade;
296
+ fadeOut?: Fade;
297
+ gain: number;
298
+ }
299
+
300
+ /** Clip offset into source audio in seconds */
301
+ export declare function clipOffsetTime(clip: AudioClip): number;
302
+
303
+ /**
304
+ * Clip width in pixels at a given samplesPerPixel.
305
+ * Shared by Clip.tsx (container sizing) and ChannelWithProgress.tsx (progress overlay)
306
+ * to ensure pixel-perfect alignment. Floor-based endpoint subtraction guarantees
307
+ * adjacent clips have no pixel gaps.
308
+ */
309
+ export declare function clipPixelWidth(startSample: number, durationSamples: number, samplesPerPixel: number): number;
310
+
311
+ /**
312
+ * Utility: Check if two clips overlap
313
+ */
314
+ export declare function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean;
315
+
316
+ /** Clip start position in seconds */
317
+ export declare function clipStartTime(clip: AudioClip): number;
318
+
319
+ /**
320
+ * Represents a track containing multiple audio clips
321
+ */
322
+ export declare interface ClipTrack {
323
+ /** Unique identifier for this track */
324
+ id: string;
325
+ /** Display name for this track */
326
+ name: string;
327
+ /** Array of audio clips on this track */
328
+ clips: AudioClip[];
329
+ /** Whether this track is muted */
330
+ muted: boolean;
331
+ /** Whether this track is soloed */
332
+ soloed: boolean;
333
+ /** Track volume (0.0 to 1.0+) */
334
+ volume: number;
335
+ /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */
336
+ pan: number;
337
+ /** Optional track color for visual distinction */
338
+ color?: string;
339
+ /** Track height in pixels (for UI) */
340
+ height?: number;
341
+ /** Optional effects function for this track */
342
+ effects?: TrackEffectsFunction_2;
343
+ /** Visualization render mode. Default: 'waveform' */
344
+ renderMode?: RenderMode;
345
+ /** Per-track spectrogram configuration (FFT size, window, frequency scale, etc.) */
346
+ spectrogramConfig?: SpectrogramConfig;
347
+ /** Per-track spectrogram color map name or custom color array */
348
+ spectrogramColorMap?: ColorMapValue;
349
+ }
350
+
351
+ /**
352
+ * Close the global AudioContext
353
+ * Should only be called when the application is shutting down
354
+ */
355
+ export declare function closeGlobalAudioContext(): Promise<void>;
356
+
357
+ /** A single color map entry: [r, g, b] or [r, g, b, a] */
358
+ export declare type ColorMapEntry = [number, number, number] | [number, number, number, number];
359
+
360
+ /** Built-in color map names */
361
+ export declare type ColorMapName = 'viridis' | 'magma' | 'inferno' | 'grayscale' | 'igray' | 'roseus';
362
+
363
+ /** Color map can be a named preset or a custom array of [r, g, b, a?] entries */
364
+ export declare type ColorMapValue = ColorMapName | ColorMapEntry[];
365
+
366
+ /**
367
+ * Constrain boundary trim delta for left or right edge of a clip.
368
+ *
369
+ * LEFT boundary: delta moves the left edge (positive = shrink, negative = expand)
370
+ * - startSample += delta, offsetSamples += delta, durationSamples -= delta
371
+ *
372
+ * RIGHT boundary: delta applied to durationSamples (positive = expand, negative = shrink)
373
+ * - durationSamples += delta
374
+ *
375
+ * @param clip - The clip being trimmed
376
+ * @param deltaSamples - Requested trim delta in samples
377
+ * @param boundary - Which edge is being trimmed: 'left' or 'right'
378
+ * @param sortedClips - All clips on the track, sorted by startSample
379
+ * @param clipIndex - Index of the trimmed clip in sortedClips
380
+ * @param minDurationSamples - Minimum allowed clip duration in samples
381
+ * @returns Constrained delta
382
+ */
383
+ export declare function constrainBoundaryTrim(clip: AudioClip, deltaSamples: number, boundary: 'left' | 'right', sortedClips: AudioClip[], clipIndex: number, minDurationSamples: number): number;
384
+
385
+ /**
386
+ * Constrain clip movement delta to prevent overlaps with adjacent clips
387
+ * and going before sample 0.
388
+ *
389
+ * @param clip - The clip being dragged
390
+ * @param deltaSamples - Requested movement in samples (negative = left, positive = right)
391
+ * @param sortedClips - All clips on the track, sorted by startSample
392
+ * @param clipIndex - Index of the dragged clip in sortedClips
393
+ * @returns Constrained delta that prevents overlaps
394
+ */
395
+ export declare function constrainClipDrag(clip: AudioClip, deltaSamples: number, sortedClips: AudioClip[], clipIndex: number): number;
396
+
397
+ /**
398
+ * Creates a new AudioClip with sensible defaults (using sample counts)
399
+ *
400
+ * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:
401
+ * - Provided explicitly via options
402
+ * - Derived from waveformData (sample_rate and duration properties)
403
+ */
404
+ export declare function createClip(options: CreateClipOptions): AudioClip;
405
+
406
+ /**
407
+ * Creates a new AudioClip from time-based values (convenience function)
408
+ * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate
409
+ *
410
+ * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:
411
+ * - Provided explicitly via options
412
+ * - Derived from waveformData (sample_rate and duration properties)
413
+ */
414
+ export declare function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip;
415
+
416
+ /**
417
+ * Options for creating a new audio clip (using sample counts)
418
+ *
419
+ * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.
420
+ * Providing waveformData without audioBuffer enables peaks-first rendering.
421
+ */
422
+ export declare interface CreateClipOptions {
423
+ /** Audio buffer - optional for peaks-first rendering */
424
+ audioBuffer?: AudioBuffer;
425
+ startSample: number;
426
+ durationSamples?: number;
427
+ offsetSamples?: number;
428
+ gain?: number;
429
+ name?: string;
430
+ color?: string;
431
+ fadeIn?: Fade;
432
+ fadeOut?: Fade;
433
+ /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */
434
+ waveformData?: WaveformDataObject;
435
+ /** Sample rate - required if audioBuffer not provided */
436
+ sampleRate?: number;
437
+ /** Total source audio duration in samples - required if audioBuffer not provided */
438
+ sourceDurationSamples?: number;
439
+ /** MIDI note data — passed through to the created AudioClip */
440
+ midiNotes?: MidiNoteData[];
441
+ /** MIDI channel (0-indexed). Channel 9 = GM percussion. */
442
+ midiChannel?: number;
443
+ /** MIDI program number (0-127). GM instrument for SoundFont playback. */
444
+ midiProgram?: number;
445
+ }
446
+
447
+ /**
448
+ * Options for creating a new audio clip (using seconds for convenience)
449
+ *
450
+ * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.
451
+ * Providing waveformData without audioBuffer enables peaks-first rendering.
452
+ */
453
+ export declare interface CreateClipOptionsSeconds {
454
+ /** Audio buffer - optional for peaks-first rendering */
455
+ audioBuffer?: AudioBuffer;
456
+ startTime: number;
457
+ duration?: number;
458
+ offset?: number;
459
+ gain?: number;
460
+ name?: string;
461
+ color?: string;
462
+ fadeIn?: Fade;
463
+ fadeOut?: Fade;
464
+ /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */
465
+ waveformData?: WaveformDataObject;
466
+ /** Sample rate - required if audioBuffer not provided */
467
+ sampleRate?: number;
468
+ /** Total source audio duration in seconds - required if audioBuffer not provided */
469
+ sourceDuration?: number;
470
+ /** MIDI note data — passed through to the created AudioClip */
471
+ midiNotes?: MidiNoteData[];
472
+ /** MIDI channel (0-indexed). Channel 9 = GM percussion. */
473
+ midiChannel?: number;
474
+ /** MIDI program number (0-127). GM instrument for SoundFont playback. */
475
+ midiProgram?: number;
476
+ }
477
+
478
+ /**
479
+ * Create a control change message
480
+ */
481
+ export declare function createControlChange(channel: number, cc: number, value: number): number[];
482
+
483
+ /**
484
+ * Create a note off message
485
+ */
486
+ export declare function createNoteOff(channel: number, note: number, velocity?: number): number[];
487
+
488
+ /**
489
+ * Create a configured note on message
490
+ */
491
+ export declare function createNoteOn(channel: number, note: number, velocity: number): number[];
492
+
493
+ export declare function createSongramEngine(options?: SongramEngineOptions): SongramEngine;
494
+
495
+ /**
496
+ * Creates a new Timeline with sensible defaults
497
+ */
498
+ export declare function createTimeline(tracks: ClipTrack[], sampleRate?: number, options?: {
499
+ name?: string;
500
+ tempo?: number;
501
+ timeSignature?: {
502
+ numerator: number;
503
+ denominator: number;
504
+ };
505
+ }): Timeline;
506
+
507
+ export declare function createToneAdapter(options?: ToneAdapterOptions): PlayoutAdapter;
508
+
509
+ /**
510
+ * Creates a new ClipTrack with sensible defaults
511
+ */
512
+ export declare function createTrack(options: CreateTrackOptions): ClipTrack;
513
+
514
+ /**
515
+ * Options for creating a new track
516
+ */
517
+ export declare interface CreateTrackOptions {
518
+ name: string;
519
+ clips?: AudioClip[];
520
+ muted?: boolean;
521
+ soloed?: boolean;
522
+ volume?: number;
523
+ pan?: number;
524
+ color?: string;
525
+ height?: number;
526
+ spectrogramConfig?: SpectrogramConfig;
527
+ spectrogramColorMap?: ColorMapValue;
528
+ }
529
+
530
+ /**
531
+ * Convert a dB value to a normalized range.
532
+ *
533
+ * Maps dB values linearly: floor → 0, 0 dB → 1.
534
+ * Values above 0 dB map to > 1 (e.g., +5 dB → 1.05 with default floor).
535
+ *
536
+ * @param dB - Decibel value (typically -Infinity to +5)
537
+ * @param floor - Minimum dB value mapped to 0. Default: -100 (Firefox compat)
538
+ * @returns Normalized value (0 at floor, 1 at 0 dB, >1 above 0 dB)
539
+ */
540
+ export declare function dBToNormalized(dB: number, floor?: number): number;
541
+
542
+ export declare interface EffectPlugin extends EnginePlugin {
543
+ kind: 'effect';
544
+ }
545
+
546
+ export declare type EffectsFunction = (masterGainNode: Volume, destination: ToneAudioNode, isOffline: boolean) => void | (() => void);
547
+
548
+ /**
549
+ * Events emitted by PlaylistEngine.
550
+ */
551
+ export declare interface EngineEvents {
552
+ statechange: (state: EngineState) => void;
553
+ timeupdate: (time: number) => void;
554
+ play: () => void;
555
+ pause: () => void;
556
+ stop: () => void;
557
+ }
558
+
559
+ export declare interface EnginePlugin {
560
+ id: string;
561
+ kind?: SongramPluginKind;
562
+ setup?: (context: SongramPluginContext) => void | (() => void | Promise<void>) | Promise<void>;
563
+ }
564
+
565
+ /**
566
+ * Snapshot of playlist engine state, emitted on every state change.
567
+ */
568
+ export declare interface EngineState {
569
+ tracks: ClipTrack[];
570
+ /** Monotonic counter incremented on any tracks mutation (setTracks, addTrack, removeTrack, moveClip, trimClip, splitClip). */
571
+ tracksVersion: number;
572
+ duration: number;
573
+ currentTime: number;
574
+ isPlaying: boolean;
575
+ samplesPerPixel: number;
576
+ sampleRate: number;
577
+ selectedTrackId: string | null;
578
+ zoomIndex: number;
579
+ canZoomIn: boolean;
580
+ canZoomOut: boolean;
581
+ /** Start of the audio selection range. Guaranteed: selectionStart <= selectionEnd. */
582
+ selectionStart: number;
583
+ /** End of the audio selection range. Guaranteed: selectionStart <= selectionEnd. */
584
+ selectionEnd: number;
585
+ /** Master output volume, 0.0–1.0. */
586
+ masterVolume: number;
587
+ /** Start of the loop region. Guaranteed: loopStart <= loopEnd. */
588
+ loopStart: number;
589
+ /** End of the loop region. Guaranteed: loopStart <= loopEnd. */
590
+ loopEnd: number;
591
+ /** Whether loop playback is active. */
592
+ isLoopEnabled: boolean;
593
+ }
594
+
595
+ export declare class EventBus<TEvents extends object> {
596
+ private readonly emitter;
597
+ on<K extends keyof TEvents>(event: K, listener: TEvents[K]): void;
598
+ off<K extends keyof TEvents>(event: K, listener: TEvents[K]): void;
599
+ once<K extends keyof TEvents>(event: K, listener: TEvents[K]): void;
600
+ emit<K extends keyof TEvents>(event: K, ...args: TEvents[K] extends (...params: infer TArgs) => void ? TArgs : never): void;
601
+ removeAllListeners(): void;
602
+ }
603
+
604
+ declare type EventName = keyof EngineEvents;
605
+
606
+ /**
607
+ * Extract loop points and volume envelope data from per-zone generators.
608
+ *
609
+ * Loop points are stored as absolute indices into the SF2 sample pool.
610
+ * We convert to AudioBuffer-relative seconds by subtracting header.start
611
+ * and dividing by sampleRate.
612
+ *
613
+ * Volume envelope times are in SF2 timecents; sustain is centibels attenuation.
614
+ */
615
+ export declare function extractLoopAndEnvelope(params: LoopAndEnvelopeParams): Omit<SoundFontSample, 'buffer' | 'playbackRate'>;
616
+
617
+ /**
618
+ * Simple fade configuration
619
+ */
620
+ export declare interface Fade {
621
+ /** Duration of the fade in seconds */
622
+ duration: number;
623
+ /** Type of fade curve (default: 'linear') */
624
+ type?: FadeType_2;
625
+ }
626
+
627
+ /**
628
+ * Simple fade configuration - just duration and type
629
+ */
630
+ export declare interface FadeConfig {
631
+ /** Duration of the fade in seconds */
632
+ duration: number;
633
+ /** Type of fade curve (default: 'linear') */
634
+ type?: FadeType;
635
+ }
636
+
637
+ export declare type FadeType = 'linear' | 'logarithmic' | 'exponential' | 'sCurve';
638
+
639
+ declare type FadeType_2 = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';
640
+
641
+ /**
642
+ * Spectrogram Types
643
+ *
644
+ * Types for frequency-domain visualization of audio data.
645
+ */
646
+ /** Valid FFT sizes (must be power of 2, 256–8192) */
647
+ export declare type FFTSize = 256 | 512 | 1024 | 2048 | 4096 | 8192;
648
+
649
+ /**
650
+ * Find the zoom level index closest to a given samplesPerPixel.
651
+ * Returns exact match if found, otherwise the index whose value is
652
+ * nearest to the target (by absolute difference).
653
+ *
654
+ * @param targetSamplesPerPixel - The samplesPerPixel value to find
655
+ * @param zoomLevels - Array of available zoom levels (samplesPerPixel values)
656
+ * @returns Index into the zoomLevels array
657
+ */
658
+ export declare function findClosestZoomIndex(targetSamplesPerPixel: number, zoomLevels: number[]): number;
659
+
660
+ export declare function findGaps(track: ClipTrack): Gap[];
661
+
662
+ /**
663
+ * Convert a linear gain value (0-1+) to normalized 0-1 via dB.
664
+ *
665
+ * Combines gain-to-dB (20 * log10) with dBToNormalized for a consistent
666
+ * mapping from raw AudioWorklet peak/RMS values to the 0-1 range used
667
+ * by UI meter components.
668
+ *
669
+ * @param gain - Linear gain value (typically 0 to 1, can exceed 1)
670
+ * @param floor - Minimum dB value mapped to 0. Default: -100
671
+ * @returns Normalized value (0 at silence/floor, 1 at 0 dB, >1 above 0 dB)
672
+ */
673
+ export declare function gainToNormalized(gain: number, floor?: number): number;
674
+
675
+ /**
676
+ * Utility: Find gaps between clips (silent regions)
677
+ */
678
+ export declare interface Gap {
679
+ startSample: number;
680
+ endSample: number;
681
+ durationSamples: number;
682
+ }
683
+
684
+ /**
685
+ * Utility: Get all clips at a specific sample position
686
+ */
687
+ export declare function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[];
688
+
689
+ /**
690
+ * Utility: Get all clips within a sample range
691
+ */
692
+ export declare function getClipsInRange(track: ClipTrack, startSample: number, endSample: number): AudioClip[];
693
+
694
+ /**
695
+ * Get a numeric generator value from a zone map.
696
+ */
697
+ export declare function getGeneratorValue(generators: ZoneMap<Generator_2>, type: GeneratorType): number | undefined;
698
+
699
+ /**
700
+ * Get or create the global AudioContext
701
+ * Uses Tone.js Context for cross-browser compatibility
702
+ * @returns The global AudioContext instance (rawContext from Tone.Context)
703
+ */
704
+ export declare function getGlobalAudioContext(): AudioContext;
705
+
706
+ /**
707
+ * Get the current state of the global AudioContext
708
+ * @returns The AudioContext state ('suspended', 'running', or 'closed')
709
+ */
710
+ export declare function getGlobalAudioContextState(): AudioContextState;
711
+
712
+ /**
713
+ * Get the global Tone.js Context
714
+ * This is the main context for cross-browser audio operations.
715
+ * Use context.createAudioWorkletNode(), context.createMediaStreamSource(), etc.
716
+ * @returns The Tone.js Context instance
717
+ */
718
+ export declare function getGlobalContext(): Context;
719
+
720
+ /**
721
+ * @deprecated Use getGlobalContext() instead
722
+ * Get the Tone.js Context's rawContext typed as IAudioContext
723
+ * @returns The rawContext cast as IAudioContext
724
+ */
725
+ export declare function getGlobalToneContext(): Context;
726
+
727
+ /**
728
+ * Get GM instrument name from program number
729
+ */
730
+ export declare function getInstrumentName(programNumber: number): string;
731
+
732
+ /**
733
+ * MediaStreamSource Manager
734
+ *
735
+ * Manages MediaStreamAudioSourceNode instances to ensure only one source
736
+ * is created per MediaStream per AudioContext.
737
+ *
738
+ * Web Audio API constraint: You can only create one MediaStreamAudioSourceNode
739
+ * per MediaStream per AudioContext. Multiple attempts will fail or disconnect
740
+ * previous sources.
741
+ *
742
+ * This manager ensures a single source is shared across multiple consumers
743
+ * (e.g., AnalyserNode for VU meter, AudioWorkletNode for recording).
744
+ *
745
+ * NOTE: With Tone.js Context, you can also use context.createMediaStreamSource()
746
+ * directly, which handles cross-browser compatibility internally.
747
+ */
748
+ /**
749
+ * Get or create a MediaStreamAudioSourceNode for the given stream
750
+ *
751
+ * @param stream - The MediaStream to create a source for
752
+ * @returns MediaStreamAudioSourceNode that can be connected to multiple nodes
753
+ *
754
+ * @example
755
+ * ```typescript
756
+ * const source = getMediaStreamSource(stream);
757
+ *
758
+ * // Multiple consumers can connect to the same source
759
+ * source.connect(analyserNode); // For VU meter
760
+ * source.connect(workletNode); // For recording
761
+ * ```
762
+ */
763
+ export declare function getMediaStreamSource(stream: MediaStream): MediaStreamAudioSourceNode;
764
+
765
+ /**
766
+ * Get list of connected MIDI input devices
767
+ */
768
+ export declare function getMidiInputs(access: MIDIAccess): MidiInputDevice[];
769
+
770
+ /**
771
+ * Get list of connected MIDI output devices
772
+ */
773
+ export declare function getMidiOutputs(access: MIDIAccess): MidiOutputDevice[];
774
+
775
+ /**
776
+ * Fade utilities for Web Audio API
777
+ *
778
+ * Applies fade in/out envelopes to AudioParam (typically gain)
779
+ * using various curve types.
780
+ */
781
+ export declare function getUnderlyingAudioParam(signal: unknown): AudioParam | undefined;
782
+
783
+ /**
784
+ * Get an array of chunk indices that overlap the visible viewport.
785
+ *
786
+ * Chunks are fixed-width segments of the total timeline width. Only chunks
787
+ * that intersect [visibleStart, visibleEnd) are included. The last chunk
788
+ * may be narrower than chunkWidth if totalWidth is not evenly divisible.
789
+ *
790
+ * @param totalWidth - Total width of the timeline in pixels
791
+ * @param chunkWidth - Width of each chunk in pixels
792
+ * @param visibleStart - Left edge of the visible region in pixels
793
+ * @param visibleEnd - Right edge of the visible region in pixels
794
+ * @returns Array of chunk indices (0-based) that are visible
795
+ */
796
+ export declare function getVisibleChunkIndices(totalWidth: number, chunkWidth: number, visibleStart: number, visibleEnd: number): number[];
797
+
798
+ /**
799
+ * Check if a MediaStreamSource exists for the given stream
800
+ *
801
+ * @param stream - The MediaStream to check
802
+ * @returns true if a source exists for this stream
803
+ */
804
+ export declare function hasMediaStreamSource(stream: MediaStream): boolean;
805
+
806
+ export declare interface InstrumentPlugin extends EnginePlugin {
807
+ kind: 'instrument';
808
+ }
809
+
810
+ /**
811
+ * Convert Int16Array sample data to Float32Array.
812
+ * SF2 samples are 16-bit signed integers; Web Audio needs Float32 [-1, 1].
813
+ */
814
+ export declare function int16ToFloat32(samples: Int16Array): Float32Array;
815
+
816
+ export declare enum InteractionState {
817
+ Cursor = "cursor",
818
+ Select = "select",
819
+ Shift = "shift",
820
+ FadeIn = "fadein",
821
+ FadeOut = "fadeout"
822
+ }
823
+
824
+ /**
825
+ * Check if Web MIDI API is supported
826
+ */
827
+ export declare function isWebMidiSupported(): boolean;
828
+
829
+ /**
830
+ * Input parameters for loop and envelope extraction.
831
+ */
832
+ export declare interface LoopAndEnvelopeParams {
833
+ /** SF2 generators zone map */
834
+ generators: ZoneMap<Generator_2>;
835
+ /** Sample header with loop points and sample rate */
836
+ header: {
837
+ startLoop: number;
838
+ endLoop: number;
839
+ sampleRate: number;
840
+ };
841
+ }
842
+
843
+ /**
844
+ * Maximum width in CSS pixels for a single canvas chunk.
845
+ * Canvas elements are split into chunks of this width to enable
846
+ * horizontal virtual scrolling — only visible chunks are mounted.
847
+ */
848
+ export declare const MAX_CANVAS_WIDTH = 1000;
849
+
850
+ export declare const MIDI_CC: {
851
+ readonly MODULATION: 1;
852
+ readonly BREATH: 2;
853
+ readonly VOLUME: 7;
854
+ readonly PAN: 10;
855
+ readonly EXPRESSION: 11;
856
+ readonly SUSTAIN: 64;
857
+ readonly PORTAMENTO: 65;
858
+ readonly SOSTENUTO: 66;
859
+ readonly SOFT_PEDAL: 67;
860
+ readonly ALL_SOUND_OFF: 120;
861
+ readonly RESET_ALL: 121;
862
+ readonly ALL_NOTES_OFF: 123;
863
+ };
864
+
865
+ export declare const MIDI_COMMANDS: {
866
+ readonly NOTE_OFF: 8;
867
+ readonly NOTE_ON: 9;
868
+ readonly POLY_AFTERTOUCH: 10;
869
+ readonly CONTROL_CHANGE: 11;
870
+ readonly PROGRAM_CHANGE: 12;
871
+ readonly CHANNEL_AFTERTOUCH: 13;
872
+ readonly PITCH_BEND: 14;
873
+ };
874
+
875
+ export declare interface MidiClipInfo {
876
+ notes: MidiNoteData[];
877
+ startTime: number;
878
+ duration: number;
879
+ offset: number;
880
+ }
881
+
882
+ /**
883
+ * MIDI Controller Manager - handles connections and message routing
884
+ */
885
+ export declare class MidiController {
886
+ private access;
887
+ private activeInputId;
888
+ private messageCallbacks;
889
+ private stateChangeCallbacks;
890
+ /**
891
+ * Initialize MIDI access
892
+ */
893
+ init(sysex?: boolean): Promise<boolean>;
894
+ /**
895
+ * Check if initialized
896
+ */
897
+ get isInitialized(): boolean;
898
+ /**
899
+ * Get available input devices
900
+ */
901
+ getInputs(): MidiInputDevice[];
902
+ /**
903
+ * Get available output devices
904
+ */
905
+ getOutputs(): MidiOutputDevice[];
906
+ /**
907
+ * Connect to a specific MIDI input
908
+ */
909
+ connectInput(deviceId: string): boolean;
910
+ /**
911
+ * Connect to the first available MIDI input
912
+ */
913
+ connectFirstInput(): boolean;
914
+ /**
915
+ * Disconnect from current MIDI input
916
+ */
917
+ disconnectInput(): void;
918
+ /**
919
+ * Get the currently connected input ID
920
+ */
921
+ get connectedInputId(): string | null;
922
+ /**
923
+ * Subscribe to MIDI messages
924
+ */
925
+ onMessage(callback: MidiMessageCallback): () => void;
926
+ /**
927
+ * Subscribe to device state changes (connect/disconnect)
928
+ */
929
+ onStateChange(callback: () => void): () => void;
930
+ /**
931
+ * Send MIDI message to an output device
932
+ */
933
+ sendMessage(deviceId: string, data: number[]): boolean;
934
+ /**
935
+ * Clean up resources
936
+ */
937
+ dispose(): void;
938
+ }
939
+
940
+ /**
941
+ * Web MIDI API utilities for connecting MIDI controllers
942
+ *
943
+ * Enables real-time MIDI input from hardware controllers for
944
+ * live playing, recording, and DAW control.
945
+ */
946
+ export declare interface MidiInputDevice {
947
+ id: string;
948
+ name: string;
949
+ manufacturer: string;
950
+ state: 'connected' | 'disconnected';
951
+ }
952
+
953
+ export declare interface MidiMessage {
954
+ /** MIDI command (note on, note off, CC, etc.) */
955
+ command: number;
956
+ /** MIDI channel (0-15) */
957
+ channel: number;
958
+ /** Note number or CC number */
959
+ data1: number;
960
+ /** Velocity or CC value */
961
+ data2: number;
962
+ /** Raw MIDI bytes */
963
+ raw: Uint8Array;
964
+ /** Timestamp */
965
+ timestamp: number;
966
+ }
967
+
968
+ export declare type MidiMessageCallback = (message: MidiMessage) => void;
969
+
970
+ /**
971
+ * MIDI note data for clips that play MIDI instead of audio.
972
+ * When present on an AudioClip, the clip is treated as a MIDI clip
973
+ * by the playout adapter.
974
+ */
975
+ export declare interface MidiNoteData {
976
+ /** MIDI note number (0-127) */
977
+ midi: number;
978
+ /** Note name in scientific pitch notation ("C4", "G#3") */
979
+ name: string;
980
+ /** Start time in seconds, relative to clip start */
981
+ time: number;
982
+ /** Duration in seconds */
983
+ duration: number;
984
+ /** Velocity (0-1 normalized) */
985
+ velocity: number;
986
+ /** MIDI channel (0-indexed). Channel 9 = GM percussion. Enables per-note routing in flattened tracks. */
987
+ channel?: number;
988
+ }
989
+
990
+ /**
991
+ * MIDI note data for clips that play MIDI instead of audio.
992
+ * Matches the MidiNoteData interface from songram-daw core types.
993
+ */
994
+ declare interface MidiNoteData_2 {
995
+ /** MIDI note number (0-127) */
996
+ midi: number;
997
+ /** Note name in scientific pitch notation ("C4", "G#3") */
998
+ name: string;
999
+ /** Start time in seconds, relative to clip start */
1000
+ time: number;
1001
+ /** Duration in seconds */
1002
+ duration: number;
1003
+ /** Velocity (0-1 normalized) */
1004
+ velocity: number;
1005
+ /** MIDI channel (0-indexed). Channel 9 = GM percussion. */
1006
+ channel?: number;
1007
+ }
1008
+
1009
+ /**
1010
+ * Convert MIDI note number to note name with octave
1011
+ * @param midiNote - MIDI note number (0-127)
1012
+ * @returns Note name like "C4", "F#3", "Bb5"
1013
+ */
1014
+ export declare function midiNoteToName(midiNote: number): string;
1015
+
1016
+ export declare interface MidiOutputDevice {
1017
+ id: string;
1018
+ name: string;
1019
+ manufacturer: string;
1020
+ state: 'connected' | 'disconnected';
1021
+ }
1022
+
1023
+ /**
1024
+ * MIDI track that always creates both melodic and percussion synths.
1025
+ * Per-note routing uses the `channel` field on each MidiNoteData:
1026
+ * channel 9 → percussion synths, all others → melodic PolySynth.
1027
+ * This enables flattened tracks (mixed channels) to play correctly.
1028
+ */
1029
+ export declare class MidiToneTrack implements PlayableTrack {
1030
+ private scheduledClips;
1031
+ private synth;
1032
+ private kickSynth;
1033
+ private snareSynth;
1034
+ private cymbalSynth;
1035
+ private tomSynth;
1036
+ private volumeNode;
1037
+ private panNode;
1038
+ private muteGain;
1039
+ private track;
1040
+ private effectsCleanup?;
1041
+ constructor(options: MidiToneTrackOptions);
1042
+ /**
1043
+ * Trigger a note using the appropriate synth.
1044
+ * Routes per-note: channel 9 → percussion synths, others → melodic PolySynth.
1045
+ */
1046
+ private triggerNote;
1047
+ private gainToDb;
1048
+ /**
1049
+ * No-op for MIDI — schedule guard is for AudioBufferSourceNode ghost tick prevention.
1050
+ * Tone.Part handles its own scheduling relative to Transport.
1051
+ */
1052
+ setScheduleGuardOffset(_offset: number): void;
1053
+ /**
1054
+ * For MIDI, mid-clip sources are notes that should already be sounding.
1055
+ * We trigger them with their remaining duration.
1056
+ */
1057
+ startMidClipSources(transportOffset: number, audioContextTime: number): void;
1058
+ /**
1059
+ * Stop all sounding notes and cancel scheduled Part events.
1060
+ */
1061
+ stopAllSources(): void;
1062
+ /**
1063
+ * No-op for MIDI — MIDI uses note velocity, not gain fades.
1064
+ */
1065
+ prepareFades(_when: number, _offset: number): void;
1066
+ /**
1067
+ * No-op for MIDI — no fade automation to cancel.
1068
+ */
1069
+ cancelFades(): void;
1070
+ setVolume(gain: number): void;
1071
+ setPan(pan: number): void;
1072
+ setMute(muted: boolean): void;
1073
+ setSolo(soloed: boolean): void;
1074
+ dispose(): void;
1075
+ get id(): string;
1076
+ get duration(): number;
1077
+ get muted(): boolean;
1078
+ get startTime(): number;
1079
+ }
1080
+
1081
+ export declare interface MidiToneTrackOptions {
1082
+ clips: MidiClipInfo[];
1083
+ track: Track;
1084
+ effects?: TrackEffectsFunction;
1085
+ destination?: ToneAudioNode;
1086
+ synthOptions?: Partial<SynthOptions>;
1087
+ }
1088
+
1089
+ /**
1090
+ * Convert a normalized value back to dB.
1091
+ *
1092
+ * Maps linearly: 0 → floor, 1 → 0 dB.
1093
+ * Values above 1 map to positive dB (e.g., 1.05 → +5 dB with default floor).
1094
+ *
1095
+ * @param normalized - Normalized value (0 = floor, 1 = 0 dB)
1096
+ * @param floor - Minimum dB value (maps from 0). Must be negative. Default: -100
1097
+ * @returns dB value (floor at 0, 0 dB at 1, positive dB above 1)
1098
+ */
1099
+ export declare function normalizedToDb(normalized: number, floor?: number): number;
1100
+
1101
+ /**
1102
+ * Convert note name to MIDI note number
1103
+ * @param name - Note name like "C4", "F#3", "Bb5"
1104
+ * @returns MIDI note number (0-127)
1105
+ */
1106
+ export declare function noteNameToMidi(name: string): number;
1107
+
1108
+ /**
1109
+ * Complete parsed MIDI file result
1110
+ */
1111
+ export declare interface ParsedMidi {
1112
+ /** Parsed tracks (one per channel or merged if flatten=true) */
1113
+ tracks: ParsedMidiTrack[];
1114
+ /** Total duration in seconds */
1115
+ duration: number;
1116
+ /** Song name from MIDI header */
1117
+ name: string;
1118
+ /** Tempo in BPM (from first tempo event, default 120) */
1119
+ bpm: number;
1120
+ /** Time signature [numerator, denominator] */
1121
+ timeSignature: [number, number];
1122
+ }
1123
+
1124
+ /**
1125
+ * Parsed MIDI track with notes and metadata
1126
+ */
1127
+ export declare interface ParsedMidiTrack {
1128
+ /** Track name from MIDI file or generated */
1129
+ name: string;
1130
+ /** Notes in MidiNoteData format */
1131
+ notes: MidiNoteData_2[];
1132
+ /** Duration in seconds */
1133
+ duration: number;
1134
+ /** MIDI channel (0-indexed, 9 = percussion) */
1135
+ channel: number;
1136
+ /** GM instrument name */
1137
+ instrument: string;
1138
+ /** GM program number (0-127) */
1139
+ programNumber: number;
1140
+ }
1141
+
1142
+ /**
1143
+ * Parse a MIDI file from an ArrayBuffer
1144
+ *
1145
+ * @param data - MIDI file as ArrayBuffer
1146
+ * @param options - Parsing options
1147
+ * @returns Parsed MIDI data with tracks and metadata
1148
+ *
1149
+ * @example
1150
+ * ```typescript
1151
+ * const response = await fetch('/song.mid');
1152
+ * const buffer = await response.arrayBuffer();
1153
+ * const midi = parseMidiFile(buffer);
1154
+ * console.log(midi.tracks.length, 'tracks');
1155
+ * ```
1156
+ */
1157
+ export declare function parseMidiFile(data: ArrayBuffer, options?: ParseMidiOptions): ParsedMidi;
1158
+
1159
+ /**
1160
+ * Parse a raw MIDI message into structured data
1161
+ */
1162
+ export declare function parseMidiMessage(data: Uint8Array, timestamp: number): MidiMessage;
1163
+
1164
+ export declare interface ParseMidiOptions {
1165
+ /** Merge all MIDI tracks into one (default: false) */
1166
+ flatten?: boolean;
1167
+ }
1168
+
1169
+ /**
1170
+ * Fetch and parse a MIDI file from a URL
1171
+ *
1172
+ * @param url - URL to MIDI file
1173
+ * @param options - Parsing options
1174
+ * @param signal - Optional AbortSignal for cancellation
1175
+ * @returns Promise resolving to parsed MIDI data
1176
+ *
1177
+ * @example
1178
+ * ```typescript
1179
+ * const midi = await parseMidiUrl('/songs/demo.mid');
1180
+ * for (const track of midi.tracks) {
1181
+ * console.log(track.name, track.notes.length, 'notes');
1182
+ * }
1183
+ * ```
1184
+ */
1185
+ export declare function parseMidiUrl(url: string, options?: ParseMidiOptions, signal?: AbortSignal): Promise<ParsedMidi>;
1186
+
1187
+ /**
1188
+ * PeakData - result of peak extraction
1189
+ */
1190
+ export declare interface PeakData {
1191
+ /** Number of peak pairs extracted */
1192
+ length: number;
1193
+ /** Array of peak data for each channel (interleaved min/max) */
1194
+ data: Peaks[];
1195
+ /** Bit depth of peak data */
1196
+ bits: Bits;
1197
+ }
1198
+
1199
+ /**
1200
+ * Peaks type - represents a typed array of interleaved min/max peak data
1201
+ */
1202
+ export declare type Peaks = Int8Array | Int16Array;
1203
+
1204
+ export declare function pixelsToSamples(pixels: number, samplesPerPixel: number): number;
1205
+
1206
+ export declare function pixelsToSeconds(pixels: number, samplesPerPixel: number, sampleRate: number): number;
1207
+
1208
+ /**
1209
+ * Shared interface for tracks managed by TonePlayout.
1210
+ * Both ToneTrack (audio) and MidiToneTrack (MIDI) implement this,
1211
+ * allowing TonePlayout to manage them uniformly.
1212
+ */
1213
+ export declare interface PlayableTrack {
1214
+ id: string;
1215
+ startTime: number;
1216
+ muted: boolean;
1217
+ duration: number;
1218
+ stopAllSources(): void;
1219
+ startMidClipSources(offset: number, time: number): void;
1220
+ setScheduleGuardOffset(offset: number): void;
1221
+ prepareFades(when: number, offset: number): void;
1222
+ cancelFades(): void;
1223
+ setVolume(gain: number): void;
1224
+ setPan(pan: number): void;
1225
+ setMute(muted: boolean): void;
1226
+ setSolo(soloed: boolean): void;
1227
+ dispose(): void;
1228
+ }
1229
+
1230
+ /**
1231
+ * Input parameters for playback rate calculation.
1232
+ */
1233
+ export declare interface PlaybackRateParams {
1234
+ /** Target MIDI note number (0-127) */
1235
+ midiNote: number;
1236
+ /** OverridingRootKey generator value, or undefined if not set */
1237
+ overrideRootKey: number | undefined;
1238
+ /** sample.header.originalPitch (255 means unpitched) */
1239
+ originalPitch: number;
1240
+ /** CoarseTune generator value in semitones (default 0) */
1241
+ coarseTune: number;
1242
+ /** FineTune generator value in cents (default 0) */
1243
+ fineTune: number;
1244
+ /** sample.header.pitchCorrection in cents (default 0) */
1245
+ pitchCorrection: number;
1246
+ }
1247
+
1248
+ export declare interface PlaylistConfig {
1249
+ samplesPerPixel?: number;
1250
+ waveHeight?: number;
1251
+ container?: HTMLElement;
1252
+ isAutomaticScroll?: boolean;
1253
+ timescale?: boolean;
1254
+ colors?: {
1255
+ waveOutlineColor?: string;
1256
+ waveFillColor?: string;
1257
+ waveProgressColor?: string;
1258
+ };
1259
+ controls?: {
1260
+ show?: boolean;
1261
+ width?: number;
1262
+ };
1263
+ zoomLevels?: number[];
1264
+ }
1265
+
1266
+ export declare class PlaylistEngine {
1267
+ private _tracks;
1268
+ private _currentTime;
1269
+ private _playStartPosition;
1270
+ private _isPlaying;
1271
+ private _selectedTrackId;
1272
+ private _sampleRate;
1273
+ private _zoomLevels;
1274
+ private _zoomIndex;
1275
+ private _selectionStart;
1276
+ private _selectionEnd;
1277
+ private _masterVolume;
1278
+ private _loopStart;
1279
+ private _loopEnd;
1280
+ private _isLoopEnabled;
1281
+ private _tracksVersion;
1282
+ private _adapter;
1283
+ private _animFrameId;
1284
+ private _disposed;
1285
+ private _listeners;
1286
+ constructor(options?: PlaylistEngineOptions);
1287
+ getState(): EngineState;
1288
+ setTracks(tracks: ClipTrack[]): void;
1289
+ addTrack(track: ClipTrack): void;
1290
+ removeTrack(trackId: string): void;
1291
+ selectTrack(trackId: string | null): void;
1292
+ moveClip(trackId: string, clipId: string, deltaSamples: number): void;
1293
+ splitClip(trackId: string, clipId: string, atSample: number): void;
1294
+ trimClip(trackId: string, clipId: string, boundary: 'left' | 'right', deltaSamples: number): void;
1295
+ init(): Promise<void>;
1296
+ play(startTime?: number, endTime?: number): void;
1297
+ pause(): void;
1298
+ stop(): void;
1299
+ seek(time: number): void;
1300
+ setMasterVolume(volume: number): void;
1301
+ getCurrentTime(): number;
1302
+ setSelection(start: number, end: number): void;
1303
+ setLoopRegion(start: number, end: number): void;
1304
+ setLoopEnabled(enabled: boolean): void;
1305
+ setTrackVolume(trackId: string, volume: number): void;
1306
+ setTrackMute(trackId: string, muted: boolean): void;
1307
+ setTrackSolo(trackId: string, soloed: boolean): void;
1308
+ setTrackPan(trackId: string, pan: number): void;
1309
+ zoomIn(): void;
1310
+ zoomOut(): void;
1311
+ setZoomLevel(samplesPerPixel: number): void;
1312
+ on<K extends EventName>(event: K, listener: EngineEvents[K]): void;
1313
+ off<K extends EventName>(event: K, listener: EngineEvents[K]): void;
1314
+ dispose(): void;
1315
+ private _emit;
1316
+ /**
1317
+ * Returns whether the current playback position is before loopEnd.
1318
+ * Used by setLoopEnabled/setLoopRegion during playback — if past loopEnd,
1319
+ * Transport loop stays off so playback continues to the end.
1320
+ * Note: play() uses an inline check instead — _isPlaying is still false
1321
+ * when play() runs, and this method returns true unconditionally when
1322
+ * not playing.
1323
+ */
1324
+ private _isBeforeLoopEnd;
1325
+ private _emitStateChange;
1326
+ private _startTimeUpdateLoop;
1327
+ private _stopTimeUpdateLoop;
1328
+ }
1329
+
1330
+ /**
1331
+ * Configuration options for PlaylistEngine constructor.
1332
+ */
1333
+ export declare interface PlaylistEngineOptions {
1334
+ adapter?: PlayoutAdapter;
1335
+ sampleRate?: number;
1336
+ samplesPerPixel?: number;
1337
+ zoomLevels?: number[];
1338
+ }
1339
+
1340
+ /**
1341
+ * Interface for pluggable audio playback adapters.
1342
+ * Implement this to connect PlaylistEngine to any audio backend
1343
+ * (Tone.js, openDAW, HTMLAudioElement, etc.)
1344
+ */
1345
+ export declare interface PlayoutAdapter {
1346
+ init(): Promise<void>;
1347
+ setTracks(tracks: ClipTrack[]): void;
1348
+ /** Incrementally add a single track without rebuilding the entire playout. */
1349
+ addTrack?(track: ClipTrack): void;
1350
+ play(startTime: number, endTime?: number): void;
1351
+ pause(): void;
1352
+ stop(): void;
1353
+ seek(time: number): void;
1354
+ getCurrentTime(): number;
1355
+ isPlaying(): boolean;
1356
+ setMasterVolume(volume: number): void;
1357
+ setTrackVolume(trackId: string, volume: number): void;
1358
+ setTrackMute(trackId: string, muted: boolean): void;
1359
+ setTrackSolo(trackId: string, soloed: boolean): void;
1360
+ setTrackPan(trackId: string, pan: number): void;
1361
+ setLoop(enabled: boolean, start: number, end: number): void;
1362
+ dispose(): void;
1363
+ }
1364
+
1365
+ export declare interface PlayoutState {
1366
+ isPlaying: boolean;
1367
+ isPaused: boolean;
1368
+ cursor: number;
1369
+ duration: number;
1370
+ }
1371
+
1372
+ export declare class PluginHost {
1373
+ private readonly plugins;
1374
+ private readonly disposers;
1375
+ private readonly context;
1376
+ constructor(context: SongramPluginContext);
1377
+ getAll(): EnginePlugin[];
1378
+ register(plugin: EnginePlugin): Promise<void>;
1379
+ unregister(pluginId: string): Promise<void>;
1380
+ dispose(): Promise<void>;
1381
+ }
1382
+
1383
+ /** Default PPQN matching Tone.js Transport (192 ticks per quarter note) */
1384
+ export declare const PPQN = 192;
1385
+
1386
+ /**
1387
+ * Manually release a MediaStreamSource
1388
+ *
1389
+ * Normally you don't need to call this - cleanup happens automatically
1390
+ * when the stream ends. Only call this if you need to force cleanup.
1391
+ *
1392
+ * @param stream - The MediaStream to release the source for
1393
+ */
1394
+ export declare function releaseMediaStreamSource(stream: MediaStream): void;
1395
+
1396
+ /**
1397
+ * Props passed to the renderAnnotationItem function for custom rendering.
1398
+ */
1399
+ export declare interface RenderAnnotationItemProps {
1400
+ annotation: AnnotationData;
1401
+ index: number;
1402
+ isActive: boolean;
1403
+ onClick: () => void;
1404
+ formatTime: (seconds: number) => string;
1405
+ }
1406
+
1407
+ /** Render mode for a track's visualization */
1408
+ export declare type RenderMode = 'waveform' | 'spectrogram' | 'both' | 'piano-roll';
1409
+
1410
+ /**
1411
+ * Request Web MIDI API access
1412
+ * @param sysex - Whether to request SysEx access (default: false)
1413
+ */
1414
+ export declare function requestMidiAccess(sysex?: boolean): Promise<MIDIAccess | null>;
1415
+
1416
+ /**
1417
+ * Resume the global AudioContext if it's suspended
1418
+ * Should be called in response to a user gesture (e.g., button click)
1419
+ * @returns Promise that resolves when context is running
1420
+ */
1421
+ export declare function resumeGlobalAudioContext(): Promise<void>;
1422
+
1423
+ export declare function samplesToPixels(samples: number, samplesPerPixel: number): number;
1424
+
1425
+ export declare function samplesToSeconds(samples: number, sampleRate: number): number;
1426
+
1427
+ /** Convert sample count to PPQN ticks. Inverse of ticksToSamples. */
1428
+ export declare function samplesToTicks(samples: number, bpm: number, sampleRate: number, ppqn?: number): number;
1429
+
1430
+ export declare function secondsToPixels(seconds: number, samplesPerPixel: number, sampleRate: number): number;
1431
+
1432
+ export declare function secondsToSamples(seconds: number, sampleRate: number): number;
1433
+
1434
+ /**
1435
+ * Determine whether a scroll change is large enough to warrant
1436
+ * recalculating the viewport and re-rendering chunks.
1437
+ *
1438
+ * Small scroll movements are ignored to avoid excessive recomputation
1439
+ * during smooth scrolling.
1440
+ *
1441
+ * @param oldScrollLeft - Previous scroll position in pixels
1442
+ * @param newScrollLeft - Current scroll position in pixels
1443
+ * @param threshold - Minimum pixel delta to trigger an update (default 100)
1444
+ * @returns true if the scroll delta meets or exceeds the threshold
1445
+ */
1446
+ export declare function shouldUpdateViewport(oldScrollLeft: number, newScrollLeft: number, threshold?: number): boolean;
1447
+
1448
+ /** Snap a tick position to the nearest grid line (rounds to nearest). */
1449
+ export declare function snapToGrid(ticks: number, gridSizeTicks: number): number;
1450
+
1451
+ export declare class SongramEngine {
1452
+ readonly events: EventBus<SongramEngineEventMap>;
1453
+ readonly plugins: PluginHost;
1454
+ readonly transport: {
1455
+ play: (startTime?: number, endTime?: number) => void;
1456
+ pause: () => void;
1457
+ stop: () => void;
1458
+ seek: (time: number) => void;
1459
+ getCurrentTime: () => number;
1460
+ };
1461
+ readonly tracks: {
1462
+ set: (tracks: ClipTrack[]) => void;
1463
+ add: (track: ClipTrack) => void;
1464
+ remove: (trackId: string) => void;
1465
+ select: (trackId: string | null) => void;
1466
+ };
1467
+ readonly state: {
1468
+ getSnapshot: () => EngineState;
1469
+ };
1470
+ readonly playlist: PlaylistEngine;
1471
+ private readonly initialPlugins;
1472
+ private readonly handleStateChange;
1473
+ private readonly handleTimeUpdate;
1474
+ private readonly handlePlay;
1475
+ private readonly handlePause;
1476
+ private readonly handleStop;
1477
+ constructor(options?: SongramEngineOptions);
1478
+ init(): Promise<void>;
1479
+ registerPlugin(plugin: EnginePlugin): Promise<void>;
1480
+ unregisterPlugin(pluginId: string): Promise<void>;
1481
+ dispose(): Promise<void>;
1482
+ }
1483
+
1484
+ export declare interface SongramEngineEventMap {
1485
+ ready: () => void;
1486
+ dispose: () => void;
1487
+ statechange: (state: unknown) => void;
1488
+ timeupdate: (time: number) => void;
1489
+ play: () => void;
1490
+ pause: () => void;
1491
+ stop: () => void;
1492
+ 'transport:seek': (time: number) => void;
1493
+ 'tracks:set': (trackIds: string[]) => void;
1494
+ 'tracks:add': (trackId: string) => void;
1495
+ 'tracks:remove': (trackId: string) => void;
1496
+ 'plugins:registered': (pluginId: string) => void;
1497
+ 'plugins:unregistered': (pluginId: string) => void;
1498
+ 'assistant:request': (payload: AssistantRequestEvent) => void;
1499
+ }
1500
+
1501
+ export declare interface SongramEngineOptions extends Omit<PlaylistEngineOptions, 'adapter'> {
1502
+ adapter?: PlayoutAdapter;
1503
+ audio?: ToneAdapterOptions;
1504
+ plugins?: EnginePlugin[];
1505
+ }
1506
+
1507
+ export declare interface SongramPluginContext {
1508
+ events: EventBus<SongramEngineEventMap>;
1509
+ requestAssistant: (assistantId: string, action: string, payload?: unknown) => void;
1510
+ }
1511
+
1512
+ export declare type SongramPluginKind = 'general' | 'effect' | 'instrument' | 'assistant';
1513
+
1514
+ /**
1515
+ * Utility: Sort clips by startSample
1516
+ */
1517
+ export declare function sortClipsByTime(clips: AudioClip[]): AudioClip[];
1518
+
1519
+ /**
1520
+ * Caches parsed SoundFont2 data and AudioBuffers for efficient playback.
1521
+ *
1522
+ * AudioBuffers are created lazily on first access and cached by sample index.
1523
+ * Pitch calculation uses the SF2 generator chain:
1524
+ * OverridingRootKey → sample.header.originalPitch → fallback 60
1525
+ *
1526
+ * Audio graph per note:
1527
+ * AudioBufferSourceNode (playbackRate for pitch) → GainNode (velocity) → track chain
1528
+ */
1529
+ export declare class SoundFontCache {
1530
+ private sf2;
1531
+ private audioBufferCache;
1532
+ private context;
1533
+ /**
1534
+ * @param context Optional AudioContext for createBuffer(). If omitted, uses
1535
+ * an OfflineAudioContext which doesn't require user gesture — safe to
1536
+ * construct before user interaction (avoids Firefox autoplay warnings).
1537
+ */
1538
+ constructor(context?: BaseAudioContext);
1539
+ /**
1540
+ * Load and parse an SF2 file from a URL.
1541
+ */
1542
+ load(url: string, signal?: AbortSignal): Promise<void>;
1543
+ /**
1544
+ * Load from an already-fetched ArrayBuffer.
1545
+ */
1546
+ loadFromBuffer(data: ArrayBuffer): void;
1547
+ get isLoaded(): boolean;
1548
+ /**
1549
+ * Look up a MIDI note and return the AudioBuffer + playbackRate.
1550
+ *
1551
+ * @param midiNote - MIDI note number (0-127)
1552
+ * @param bankNumber - Bank number (0 for melodic, 128 for percussion/drums)
1553
+ * @param presetNumber - GM program number (0-127)
1554
+ * @returns SoundFontSample or null if no sample found for this note
1555
+ */
1556
+ getAudioBuffer(midiNote: number, bankNumber?: number, presetNumber?: number): SoundFontSample | null;
1557
+ /**
1558
+ * Convert Int16Array sample data to an AudioBuffer.
1559
+ * Uses the extracted int16ToFloat32 for the conversion, then copies into an AudioBuffer.
1560
+ */
1561
+ private int16ToAudioBuffer;
1562
+ /**
1563
+ * Clear all cached AudioBuffers and release the parsed SF2.
1564
+ */
1565
+ dispose(): void;
1566
+ }
1567
+
1568
+ /**
1569
+ * Result of looking up a MIDI note in the SoundFont.
1570
+ * Contains the AudioBuffer, playbackRate, loop points, and volume envelope.
1571
+ */
1572
+ export declare interface SoundFontSample {
1573
+ /** Cached AudioBuffer for this sample */
1574
+ buffer: AudioBuffer;
1575
+ /** Playback rate to pitch-shift from originalPitch to target note */
1576
+ playbackRate: number;
1577
+ /** Loop mode: 0=no loop, 1=continuous, 3=sustain loop */
1578
+ loopMode: number;
1579
+ /** Loop start in seconds, relative to AudioBuffer start */
1580
+ loopStart: number;
1581
+ /** Loop end in seconds, relative to AudioBuffer start */
1582
+ loopEnd: number;
1583
+ /** Volume envelope attack time in seconds */
1584
+ attackVolEnv: number;
1585
+ /** Volume envelope hold time in seconds */
1586
+ holdVolEnv: number;
1587
+ /** Volume envelope decay time in seconds */
1588
+ decayVolEnv: number;
1589
+ /** Volume envelope sustain level as linear gain 0-1 */
1590
+ sustainVolEnv: number;
1591
+ /** Volume envelope release time in seconds */
1592
+ releaseVolEnv: number;
1593
+ }
1594
+
1595
+ /**
1596
+ * MIDI track that uses SoundFont samples for playback.
1597
+ *
1598
+ * Instead of PolySynth synthesis, each note triggers the correct instrument
1599
+ * sample from an SF2 file, pitch-shifted via AudioBufferSourceNode.playbackRate.
1600
+ *
1601
+ * Audio graph per note:
1602
+ * AudioBufferSourceNode (native, one-shot, pitch-shifted)
1603
+ * → GainNode (native, per-note velocity)
1604
+ * → Volume.input (Tone.js, shared per-track)
1605
+ * → Panner → muteGain → effects/destination
1606
+ */
1607
+ export declare class SoundFontToneTrack implements PlayableTrack {
1608
+ /** Rate-limit missing sample warnings — one per class lifetime */
1609
+ private static _missingSampleWarned;
1610
+ private scheduledClips;
1611
+ private activeSources;
1612
+ private soundFontCache;
1613
+ private programNumber;
1614
+ private bankNumber;
1615
+ private volumeNode;
1616
+ private panNode;
1617
+ private muteGain;
1618
+ private track;
1619
+ private effectsCleanup?;
1620
+ constructor(options: SoundFontToneTrackOptions);
1621
+ /**
1622
+ * Trigger a note by creating a native AudioBufferSourceNode from the SoundFont cache.
1623
+ *
1624
+ * Per-note routing: channel 9 → bank 128 (drums), others → bank 0 with programNumber.
1625
+ */
1626
+ private triggerNote;
1627
+ private gainToDb;
1628
+ /**
1629
+ * No-op — Tone.Part handles scheduling internally, no ghost tick guard needed.
1630
+ */
1631
+ setScheduleGuardOffset(_offset: number): void;
1632
+ /**
1633
+ * Start notes that should already be sounding at the current transport offset.
1634
+ */
1635
+ startMidClipSources(transportOffset: number, audioContextTime: number): void;
1636
+ /**
1637
+ * Stop all active AudioBufferSourceNodes.
1638
+ */
1639
+ stopAllSources(): void;
1640
+ /** No-op for MIDI — MIDI uses note velocity, not gain fades. */
1641
+ prepareFades(_when: number, _offset: number): void;
1642
+ /** No-op for MIDI — no fade automation to cancel. */
1643
+ cancelFades(): void;
1644
+ setVolume(gain: number): void;
1645
+ setPan(pan: number): void;
1646
+ setMute(muted: boolean): void;
1647
+ setSolo(soloed: boolean): void;
1648
+ dispose(): void;
1649
+ get id(): string;
1650
+ get duration(): number;
1651
+ get muted(): boolean;
1652
+ get startTime(): number;
1653
+ }
1654
+
1655
+ export declare interface SoundFontToneTrackOptions {
1656
+ clips: MidiClipInfo[];
1657
+ track: Track;
1658
+ soundFontCache: SoundFontCache;
1659
+ /** GM program number (0-127) for melodic instruments */
1660
+ programNumber?: number;
1661
+ /** Whether this track uses percussion bank (channel 9) */
1662
+ isPercussion?: boolean;
1663
+ effects?: TrackEffectsFunction;
1664
+ destination?: ToneAudioNode;
1665
+ }
1666
+
1667
+ /** Subset of SpectrogramConfig fields that affect FFT computation (used for cache keys) */
1668
+ export declare type SpectrogramComputeConfig = Pick<SpectrogramConfig, 'fftSize' | 'hopSize' | 'windowFunction' | 'alpha' | 'zeroPaddingFactor'>;
1669
+
1670
+ /**
1671
+ * Configuration for spectrogram computation and rendering.
1672
+ */
1673
+ export declare interface SpectrogramConfig {
1674
+ /** FFT size: 256–8192, must be power of 2. Default: 2048 */
1675
+ fftSize?: FFTSize;
1676
+ /** Hop size between frames in samples. Default: fftSize / 4 */
1677
+ hopSize?: number;
1678
+ /** Window function applied before FFT. Default: 'hann' */
1679
+ windowFunction?: 'hann' | 'hamming' | 'blackman' | 'rectangular' | 'bartlett' | 'blackman-harris';
1680
+ /** Window function parameter (0-1), used by some window functions */
1681
+ alpha?: number;
1682
+ /** Frequency axis scale. Default: 'mel' */
1683
+ frequencyScale?: 'linear' | 'logarithmic' | 'mel' | 'bark' | 'erb';
1684
+ /** Minimum frequency in Hz. Default: 0 */
1685
+ minFrequency?: number;
1686
+ /** Maximum frequency in Hz. Default: sampleRate / 2 */
1687
+ maxFrequency?: number;
1688
+ /** Display brightness boost in dB. Default: 20 */
1689
+ gainDb?: number;
1690
+ /** Signal range in dB. Default: 80 */
1691
+ rangeDb?: number;
1692
+ /** Zero padding factor: actual FFT length = fftSize * zeroPaddingFactor. Default: 2 */
1693
+ zeroPaddingFactor?: number;
1694
+ /** Show frequency axis labels. Default: false */
1695
+ labels?: boolean;
1696
+ /** Label text color */
1697
+ labelsColor?: string;
1698
+ /** Label background color */
1699
+ labelsBackground?: string;
1700
+ }
1701
+
1702
+ /**
1703
+ * Computed spectrogram data ready for rendering.
1704
+ */
1705
+ export declare interface SpectrogramData {
1706
+ /** Actual FFT length used for computation (includes zero padding) */
1707
+ fftSize: number;
1708
+ /** Original analysis window size before zero padding */
1709
+ windowSize: number;
1710
+ /** Number of frequency bins (fftSize / 2) */
1711
+ frequencyBinCount: number;
1712
+ /** Sample rate of the source audio */
1713
+ sampleRate: number;
1714
+ /** Hop size between FFT frames (in samples) */
1715
+ hopSize: number;
1716
+ /** Number of time frames */
1717
+ frameCount: number;
1718
+ /** dB values: frameCount * frequencyBinCount Float32Array (row-major, frame × bin) */
1719
+ data: Float32Array;
1720
+ /** Display brightness boost in dB */
1721
+ gainDb: number;
1722
+ /** Signal range in dB */
1723
+ rangeDb: number;
1724
+ }
1725
+
1726
+ /** Subset of SpectrogramConfig fields that only affect display/rendering (not FFT computation) */
1727
+ export declare type SpectrogramDisplayConfig = Pick<SpectrogramConfig, 'frequencyScale' | 'minFrequency' | 'maxFrequency' | 'gainDb' | 'rangeDb' | 'labels' | 'labelsColor' | 'labelsBackground'>;
1728
+
1729
+ /**
1730
+ * Split a clip into two clips at the given sample position.
1731
+ *
1732
+ * The left clip retains the original fadeIn; the right clip retains the original fadeOut.
1733
+ * Both clips share the same waveformData reference.
1734
+ * If the clip has a name, suffixes " (1)" and " (2)" are appended.
1735
+ *
1736
+ * @param clip - The clip to split
1737
+ * @param splitSample - The timeline sample position where the split occurs
1738
+ * @returns Object with `left` and `right` AudioClip
1739
+ */
1740
+ export declare function splitClip(clip: AudioClip, splitSample: number): {
1741
+ left: AudioClip;
1742
+ right: AudioClip;
1743
+ };
1744
+
1745
+ /** Number of PPQN ticks per bar for the given time signature. */
1746
+ export declare function ticksPerBar(timeSignature: [number, number], ppqn?: number): number;
1747
+
1748
+ /** Number of PPQN ticks per beat for the given time signature. */
1749
+ export declare function ticksPerBeat(timeSignature: [number, number], ppqn?: number): number;
1750
+
1751
+ /** Format ticks as a 1-indexed bar.beat label. Beat 1 shows bar number only (e.g., "3" not "3.1"). */
1752
+ export declare function ticksToBarBeatLabel(ticks: number, timeSignature: [number, number], ppqn?: number): string;
1753
+
1754
+ /** Convert PPQN ticks to sample count. Uses Math.round for integer sample alignment. */
1755
+ export declare function ticksToSamples(ticks: number, bpm: number, sampleRate: number, ppqn?: number): number;
1756
+
1757
+ /**
1758
+ * Convert SF2 timecents to seconds.
1759
+ * SF2 formula: seconds = 2^(timecents / 1200)
1760
+ * Default -12000 timecents ≈ 0.001s (effectively instant).
1761
+ */
1762
+ export declare function timecentsToSeconds(tc: number): number;
1763
+
1764
+ /**
1765
+ * Represents the entire timeline/project
1766
+ */
1767
+ export declare interface Timeline {
1768
+ /** All tracks in the timeline */
1769
+ tracks: ClipTrack[];
1770
+ /** Total timeline duration in seconds */
1771
+ duration: number;
1772
+ /** Sample rate for all audio (typically 44100 or 48000) */
1773
+ sampleRate: number;
1774
+ /** Optional project name */
1775
+ name?: string;
1776
+ /** Optional tempo (BPM) for grid snapping */
1777
+ tempo?: number;
1778
+ /** Optional time signature for grid snapping */
1779
+ timeSignature?: {
1780
+ numerator: number;
1781
+ denominator: number;
1782
+ };
1783
+ }
1784
+
1785
+ export declare interface TimeSelection {
1786
+ start: number;
1787
+ end: number;
1788
+ }
1789
+
1790
+ export declare interface ToneAdapterOptions {
1791
+ effects?: EffectsFunction;
1792
+ /** When provided, MIDI clips use SoundFont sample playback instead of PolySynth */
1793
+ soundFontCache?: SoundFontCache;
1794
+ }
1795
+
1796
+ export declare class TonePlayout {
1797
+ private tracks;
1798
+ private masterVolume;
1799
+ private isInitialized;
1800
+ private soloedTracks;
1801
+ private manualMuteState;
1802
+ private effectsCleanup?;
1803
+ private onPlaybackCompleteCallback?;
1804
+ private _completionEventId;
1805
+ private _loopHandler;
1806
+ private _loopEnabled;
1807
+ private _loopStart;
1808
+ private _loopEnd;
1809
+ constructor(options?: TonePlayoutOptions);
1810
+ private gainToDb;
1811
+ private clearCompletionEvent;
1812
+ init(): Promise<void>;
1813
+ addTrack(trackOptions: ToneTrackOptions): ToneTrack;
1814
+ addMidiTrack(trackOptions: MidiToneTrackOptions): MidiToneTrack;
1815
+ addSoundFontTrack(trackOptions: SoundFontToneTrackOptions): SoundFontToneTrack;
1816
+ /**
1817
+ * Apply solo muting after all tracks have been added.
1818
+ * Call this after adding all tracks to ensure solo logic is applied correctly.
1819
+ */
1820
+ applyInitialSoloState(): void;
1821
+ removeTrack(trackId: string): void;
1822
+ getTrack(trackId: string): PlayableTrack | undefined;
1823
+ play(when?: number, offset?: number, duration?: number): void;
1824
+ pause(): void;
1825
+ stop(): void;
1826
+ setMasterGain(gain: number): void;
1827
+ setSolo(trackId: string, soloed: boolean): void;
1828
+ private updateSoloMuting;
1829
+ setMute(trackId: string, muted: boolean): void;
1830
+ setLoop(enabled: boolean, loopStart: number, loopEnd: number): void;
1831
+ getCurrentTime(): number;
1832
+ seekTo(time: number): void;
1833
+ dispose(): void;
1834
+ get context(): BaseContext;
1835
+ get sampleRate(): number;
1836
+ setOnPlaybackComplete(callback: () => void): void;
1837
+ }
1838
+
1839
+ export declare interface TonePlayoutOptions {
1840
+ tracks?: ToneTrack[];
1841
+ masterGain?: number;
1842
+ effects?: EffectsFunction;
1843
+ }
1844
+
1845
+ export declare class ToneTrack {
1846
+ private scheduledClips;
1847
+ private activeSources;
1848
+ private volumeNode;
1849
+ private panNode;
1850
+ private muteGain;
1851
+ private track;
1852
+ private effectsCleanup?;
1853
+ private _scheduleGuardOffset;
1854
+ constructor(options: ToneTrackOptions);
1855
+ /**
1856
+ * Create and start an AudioBufferSourceNode for a clip.
1857
+ * Sources are one-shot: each play or loop iteration creates a fresh one.
1858
+ */
1859
+ private startClipSource;
1860
+ /**
1861
+ * Set the schedule guard offset. Schedule callbacks for clips before this
1862
+ * offset are suppressed (already handled by startMidClipSources).
1863
+ * Must be called before transport.start() and in the loop handler.
1864
+ */
1865
+ setScheduleGuardOffset(offset: number): void;
1866
+ /**
1867
+ * Start sources for clips that span the given Transport position.
1868
+ * Used for mid-playback seeking and loop boundary handling where
1869
+ * Transport.schedule() callbacks have already passed.
1870
+ *
1871
+ * Uses strict < for absClipStart to avoid double-creation with
1872
+ * schedule callbacks at exact Transport position (e.g., loopStart).
1873
+ */
1874
+ startMidClipSources(transportOffset: number, audioContextTime: number): void;
1875
+ /**
1876
+ * Stop all active AudioBufferSourceNodes and clear the set.
1877
+ * Native AudioBufferSourceNodes ignore Transport state changes —
1878
+ * they must be explicitly stopped.
1879
+ */
1880
+ stopAllSources(): void;
1881
+ /**
1882
+ * Schedule fade envelopes for a clip at the given AudioContext time.
1883
+ * Uses native GainNode.gain (AudioParam) directly — no _param workaround needed.
1884
+ */
1885
+ private scheduleFades;
1886
+ /**
1887
+ * Prepare fade envelopes for all clips based on Transport offset.
1888
+ * Called before Transport.start() to schedule fades at correct AudioContext times.
1889
+ */
1890
+ prepareFades(when: number, transportOffset: number): void;
1891
+ /**
1892
+ * Cancel all scheduled fade automation and reset to nominal gain.
1893
+ * Called on pause/stop to prevent stale fade envelopes.
1894
+ */
1895
+ cancelFades(): void;
1896
+ private gainToDb;
1897
+ setVolume(gain: number): void;
1898
+ setPan(pan: number): void;
1899
+ setMute(muted: boolean): void;
1900
+ setSolo(soloed: boolean): void;
1901
+ dispose(): void;
1902
+ get id(): string;
1903
+ get duration(): number;
1904
+ get buffer(): AudioBuffer;
1905
+ get muted(): boolean;
1906
+ get startTime(): number;
1907
+ }
1908
+
1909
+ export declare interface ToneTrackOptions {
1910
+ buffer?: AudioBuffer;
1911
+ clips?: ClipInfo[];
1912
+ track: Track;
1913
+ effects?: TrackEffectsFunction;
1914
+ destination?: ToneAudioNode;
1915
+ }
1916
+
1917
+ export declare interface Track {
1918
+ id: string;
1919
+ name: string;
1920
+ src?: string | AudioBuffer_2;
1921
+ gain: number;
1922
+ muted: boolean;
1923
+ soloed: boolean;
1924
+ stereoPan: number;
1925
+ startTime: number;
1926
+ endTime?: number;
1927
+ fadeIn?: Fade;
1928
+ fadeOut?: Fade;
1929
+ cueIn?: number;
1930
+ cueOut?: number;
1931
+ }
1932
+
1933
+ export declare type TrackEffectsFunction = (graphEnd: Gain, masterGainNode: ToneAudioNode, isOffline: boolean) => void | (() => void);
1934
+
1935
+ /**
1936
+ * Generic effects function type for track-level audio processing.
1937
+ *
1938
+ * The actual implementation receives Tone.js audio nodes. Using generic types
1939
+ * here to avoid circular dependencies with the playout package.
1940
+ *
1941
+ * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)
1942
+ * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)
1943
+ * @param isOffline - Whether rendering offline (for export)
1944
+ * @returns Optional cleanup function called when track is disposed
1945
+ *
1946
+ * @example
1947
+ * ```typescript
1948
+ * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {
1949
+ * const reverb = new Tone.Reverb({ decay: 1.5 });
1950
+ * graphEnd.connect(reverb);
1951
+ * reverb.connect(destination);
1952
+ *
1953
+ * return () => {
1954
+ * reverb.dispose();
1955
+ * };
1956
+ * };
1957
+ * ```
1958
+ */
1959
+ declare type TrackEffectsFunction_2 = (graphEnd: unknown, destination: unknown, isOffline: boolean) => void | (() => void);
1960
+
1961
+ /** Per-track overrides for spectrogram rendering (render mode, config, color map) */
1962
+ export declare interface TrackSpectrogramOverrides {
1963
+ renderMode: RenderMode;
1964
+ config?: SpectrogramConfig;
1965
+ colorMap?: ColorMapValue;
1966
+ }
1967
+
1968
+ export declare interface WaveformConfig {
1969
+ sampleRate: number;
1970
+ samplesPerPixel: number;
1971
+ waveHeight?: number;
1972
+ waveOutlineColor?: string;
1973
+ waveFillColor?: string;
1974
+ waveProgressColor?: string;
1975
+ }
1976
+
1977
+ /**
1978
+ * WaveformData object from waveform-data.js library.
1979
+ * Supports resample() and slice() for dynamic zoom levels.
1980
+ * See: https://github.com/bbc/waveform-data.js
1981
+ */
1982
+ export declare interface WaveformDataObject {
1983
+ /** Sample rate of the original audio */
1984
+ readonly sample_rate: number;
1985
+ /** Number of audio samples per pixel */
1986
+ readonly scale: number;
1987
+ /** Length of waveform data in pixels */
1988
+ readonly length: number;
1989
+ /** Bit depth (8 or 16) */
1990
+ readonly bits: number;
1991
+ /** Duration in seconds */
1992
+ readonly duration: number;
1993
+ /** Number of channels */
1994
+ readonly channels: number;
1995
+ /** Get channel data */
1996
+ channel: (index: number) => {
1997
+ min_array: () => number[];
1998
+ max_array: () => number[];
1999
+ };
2000
+ /** Resample to different scale */
2001
+ resample: (options: {
2002
+ scale: number;
2003
+ } | {
2004
+ width: number;
2005
+ }) => WaveformDataObject;
2006
+ /** Slice a portion of the waveform */
2007
+ slice: (options: {
2008
+ startTime: number;
2009
+ endTime: number;
2010
+ } | {
2011
+ startIndex: number;
2012
+ endIndex: number;
2013
+ }) => WaveformDataObject;
2014
+ }
2015
+
2016
+ export { }