@waveform-playlist/browser 5.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/dist/index.d.ts +1536 -0
- package/dist/index.js +1371 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +24304 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +72 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,1536 @@
|
|
|
1
|
+
import { BaseContext } from 'tone';
|
|
2
|
+
import { default as default_2 } from 'react';
|
|
3
|
+
import { default as default_3 } from 'waveform-data';
|
|
4
|
+
import { DragEndEvent } from '@dnd-kit/core';
|
|
5
|
+
import { Fade as Fade_2 } from '@waveform-playlist/core';
|
|
6
|
+
import { Gain } from 'tone';
|
|
7
|
+
import { MutableRefObject } from 'react';
|
|
8
|
+
import react__default from 'react';
|
|
9
|
+
import { ReactNode } from 'react';
|
|
10
|
+
import { RefObject } from 'react';
|
|
11
|
+
import { SensorDescriptor } from '@dnd-kit/core';
|
|
12
|
+
import { SensorOptions } from '@dnd-kit/core';
|
|
13
|
+
import * as Tone from 'tone';
|
|
14
|
+
import { ToneAudioNode } from 'tone';
|
|
15
|
+
import { Track } from '@waveform-playlist/core';
|
|
16
|
+
import { Volume } from 'tone';
|
|
17
|
+
|
|
18
|
+
export declare interface ActiveEffect {
|
|
19
|
+
instanceId: string;
|
|
20
|
+
effectId: string;
|
|
21
|
+
definition: EffectDefinition;
|
|
22
|
+
params: Record<string, number | string | boolean>;
|
|
23
|
+
bypassed: boolean;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
declare interface Annotation$1 {
|
|
27
|
+
id: string;
|
|
28
|
+
start: number;
|
|
29
|
+
end: number;
|
|
30
|
+
lines: string[];
|
|
31
|
+
lang?: string;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
declare interface AnnotationAction {
|
|
35
|
+
class?: string;
|
|
36
|
+
text?: string;
|
|
37
|
+
title: string;
|
|
38
|
+
action: (annotation: AnnotationData, index: number, annotations: AnnotationData[], opts: AnnotationActionOptions) => void;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Configuration options passed to annotation action handlers
|
|
43
|
+
*/
|
|
44
|
+
declare interface AnnotationActionOptions {
|
|
45
|
+
/** Whether annotation endpoints are linked (moving one endpoint moves the other) */
|
|
46
|
+
linkEndpoints?: boolean;
|
|
47
|
+
/** Whether to continue playing after an annotation ends */
|
|
48
|
+
continuousPlay?: boolean;
|
|
49
|
+
/** Additional custom properties */
|
|
50
|
+
[key: string]: unknown;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
declare interface AnnotationData {
|
|
54
|
+
id: string;
|
|
55
|
+
start: number;
|
|
56
|
+
end: number;
|
|
57
|
+
lines: string[];
|
|
58
|
+
language?: string;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Represents a single audio clip on the timeline
|
|
63
|
+
*
|
|
64
|
+
* IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)
|
|
65
|
+
* to avoid floating-point precision errors. Convert to seconds only when
|
|
66
|
+
* needed for playback using: seconds = samples / sampleRate
|
|
67
|
+
*/
|
|
68
|
+
declare interface AudioClip {
|
|
69
|
+
/** Unique identifier for this clip */
|
|
70
|
+
id: string;
|
|
71
|
+
/** The audio buffer containing the audio data */
|
|
72
|
+
audioBuffer: AudioBuffer;
|
|
73
|
+
/** Position on timeline where this clip starts (in samples at timeline sampleRate) */
|
|
74
|
+
startSample: number;
|
|
75
|
+
/** Duration of this clip (in samples) - how much of the audio buffer to play */
|
|
76
|
+
durationSamples: number;
|
|
77
|
+
/** Offset into the audio buffer where playback starts (in samples) - the "trim start" point */
|
|
78
|
+
offsetSamples: number;
|
|
79
|
+
/** Optional fade in effect */
|
|
80
|
+
fadeIn?: Fade;
|
|
81
|
+
/** Optional fade out effect */
|
|
82
|
+
fadeOut?: Fade;
|
|
83
|
+
/** Clip-specific gain/volume multiplier (0.0 to 1.0+) */
|
|
84
|
+
gain: number;
|
|
85
|
+
/** Optional label/name for this clip */
|
|
86
|
+
name?: string;
|
|
87
|
+
/** Optional color for visual distinction */
|
|
88
|
+
color?: string;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Audio position display that uses the playlist context.
|
|
93
|
+
* Uses requestAnimationFrame for smooth 60fps updates during playback.
|
|
94
|
+
* Direct DOM manipulation avoids React re-renders.
|
|
95
|
+
*/
|
|
96
|
+
export declare const AudioPosition: default_2.FC<{
|
|
97
|
+
className?: string;
|
|
98
|
+
}>;
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Configuration for a single audio track to load
|
|
102
|
+
*/
|
|
103
|
+
export declare interface AudioTrackConfig {
|
|
104
|
+
src: string;
|
|
105
|
+
name?: string;
|
|
106
|
+
muted?: boolean;
|
|
107
|
+
soloed?: boolean;
|
|
108
|
+
volume?: number;
|
|
109
|
+
pan?: number;
|
|
110
|
+
color?: string;
|
|
111
|
+
effects?: TrackEffectsFunction_2;
|
|
112
|
+
startTime?: number;
|
|
113
|
+
duration?: number;
|
|
114
|
+
offset?: number;
|
|
115
|
+
fadeIn?: Fade;
|
|
116
|
+
fadeOut?: Fade;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Automatic scroll checkbox that uses the playlist context
|
|
121
|
+
* Uses split contexts to avoid re-rendering during animation
|
|
122
|
+
*/
|
|
123
|
+
export declare const AutomaticScrollCheckbox: default_2.FC<{
|
|
124
|
+
className?: string;
|
|
125
|
+
}>;
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Bits type - number of bits for peak data
|
|
129
|
+
*/
|
|
130
|
+
declare type Bits = 8 | 16;
|
|
131
|
+
|
|
132
|
+
declare interface ClipInfo {
|
|
133
|
+
buffer: AudioBuffer;
|
|
134
|
+
startTime: number;
|
|
135
|
+
duration: number;
|
|
136
|
+
offset: number;
|
|
137
|
+
fadeIn?: Fade_2;
|
|
138
|
+
fadeOut?: Fade_2;
|
|
139
|
+
gain: number;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
declare interface ClipPeaks {
|
|
143
|
+
clipId: string;
|
|
144
|
+
trackName: string;
|
|
145
|
+
peaks: PeakData;
|
|
146
|
+
startSample: number;
|
|
147
|
+
durationSamples: number;
|
|
148
|
+
fadeIn?: Fade;
|
|
149
|
+
fadeOut?: Fade;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Represents a track containing multiple audio clips
|
|
154
|
+
*/
|
|
155
|
+
declare interface ClipTrack {
|
|
156
|
+
/** Unique identifier for this track */
|
|
157
|
+
id: string;
|
|
158
|
+
/** Display name for this track */
|
|
159
|
+
name: string;
|
|
160
|
+
/** Array of audio clips on this track */
|
|
161
|
+
clips: AudioClip[];
|
|
162
|
+
/** Whether this track is muted */
|
|
163
|
+
muted: boolean;
|
|
164
|
+
/** Whether this track is soloed */
|
|
165
|
+
soloed: boolean;
|
|
166
|
+
/** Track volume (0.0 to 1.0+) */
|
|
167
|
+
volume: number;
|
|
168
|
+
/** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */
|
|
169
|
+
pan: number;
|
|
170
|
+
/** Optional track color for visual distinction */
|
|
171
|
+
color?: string;
|
|
172
|
+
/** Track height in pixels (for UI) */
|
|
173
|
+
height?: number;
|
|
174
|
+
/** Optional effects function for this track */
|
|
175
|
+
effects?: TrackEffectsFunction_2;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Continuous play checkbox that uses the playlist context
|
|
180
|
+
* Uses split contexts to avoid re-rendering during animation
|
|
181
|
+
*/
|
|
182
|
+
export declare const ContinuousPlayCheckbox: default_2.FC<{
|
|
183
|
+
className?: string;
|
|
184
|
+
}>;
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Create a chain of effects connected in series
|
|
188
|
+
*/
|
|
189
|
+
export declare function createEffectChain(effects: EffectInstance[]): {
|
|
190
|
+
input: any;
|
|
191
|
+
output: any;
|
|
192
|
+
dispose: () => void;
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Create an effect instance from a definition with initial parameter values
|
|
197
|
+
*/
|
|
198
|
+
export declare function createEffectInstance(definition: EffectDefinition, initialParams?: Record<string, number | string | boolean>): EffectInstance;
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Download annotations button that uses the playlist context
|
|
202
|
+
* Uses split contexts to avoid re-rendering during animation
|
|
203
|
+
*/
|
|
204
|
+
export declare const DownloadAnnotationsButton: default_2.FC<{
|
|
205
|
+
filename?: string;
|
|
206
|
+
className?: string;
|
|
207
|
+
}>;
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Editable annotations checkbox that uses the playlist context
|
|
211
|
+
* Uses split contexts to avoid re-rendering during animation
|
|
212
|
+
*/
|
|
213
|
+
export declare const EditableCheckbox: default_2.FC<{
|
|
214
|
+
className?: string;
|
|
215
|
+
}>;
|
|
216
|
+
|
|
217
|
+
export declare const effectCategories: {
|
|
218
|
+
id: EffectDefinition['category'];
|
|
219
|
+
name: string;
|
|
220
|
+
}[];
|
|
221
|
+
|
|
222
|
+
export declare interface EffectDefinition {
|
|
223
|
+
id: string;
|
|
224
|
+
name: string;
|
|
225
|
+
category: 'delay' | 'reverb' | 'modulation' | 'distortion' | 'filter' | 'dynamics' | 'spatial';
|
|
226
|
+
description: string;
|
|
227
|
+
parameters: EffectParameter[];
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
export declare const effectDefinitions: EffectDefinition[];
|
|
231
|
+
|
|
232
|
+
export declare interface EffectInstance {
|
|
233
|
+
effect: any;
|
|
234
|
+
id: string;
|
|
235
|
+
instanceId: string;
|
|
236
|
+
dispose: () => void;
|
|
237
|
+
setParameter: (name: string, value: number | string | boolean) => void;
|
|
238
|
+
getParameter: (name: string) => number | string | boolean | undefined;
|
|
239
|
+
connect: (destination: any) => void;
|
|
240
|
+
disconnect: () => void;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
export declare interface EffectParameter {
|
|
244
|
+
name: string;
|
|
245
|
+
label: string;
|
|
246
|
+
type: ParameterType;
|
|
247
|
+
min?: number;
|
|
248
|
+
max?: number;
|
|
249
|
+
step?: number;
|
|
250
|
+
default: number | string | boolean;
|
|
251
|
+
unit?: string;
|
|
252
|
+
options?: {
|
|
253
|
+
value: string | number;
|
|
254
|
+
label: string;
|
|
255
|
+
}[];
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
export declare type EffectsFunction = (masterGainNode: Volume, destination: ToneAudioNode, isOffline: boolean) => void | (() => void);
|
|
259
|
+
|
|
260
|
+
export declare interface ExportOptions extends WavEncoderOptions {
|
|
261
|
+
/** Filename for download (without extension) */
|
|
262
|
+
filename?: string;
|
|
263
|
+
/** Export mode: 'master' for stereo mix, 'individual' for single track */
|
|
264
|
+
mode?: 'master' | 'individual';
|
|
265
|
+
/** Track index for individual export (only used when mode is 'individual') */
|
|
266
|
+
trackIndex?: number;
|
|
267
|
+
/** Whether to trigger automatic download */
|
|
268
|
+
autoDownload?: boolean;
|
|
269
|
+
/** Whether to apply effects (fades, etc.) - defaults to true */
|
|
270
|
+
applyEffects?: boolean;
|
|
271
|
+
/**
|
|
272
|
+
* Optional Tone.js effects function for master effects. When provided, export will use Tone.Offline
|
|
273
|
+
* to render through the effects chain. The function receives isOffline=true.
|
|
274
|
+
*/
|
|
275
|
+
effectsFunction?: EffectsFunction;
|
|
276
|
+
/**
|
|
277
|
+
* Optional function to create offline track effects.
|
|
278
|
+
* Takes a trackId and returns a TrackEffectsFunction for offline rendering.
|
|
279
|
+
* This is used instead of track.effects to avoid AudioContext mismatch issues.
|
|
280
|
+
*/
|
|
281
|
+
createOfflineTrackEffects?: (trackId: string) => TrackEffectsFunction_3 | undefined;
|
|
282
|
+
/** Progress callback (0-1) */
|
|
283
|
+
onProgress?: (progress: number) => void;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
export declare interface ExportResult {
|
|
287
|
+
/** The rendered audio buffer */
|
|
288
|
+
audioBuffer: AudioBuffer;
|
|
289
|
+
/** The WAV file as a Blob */
|
|
290
|
+
blob: Blob;
|
|
291
|
+
/** Duration in seconds */
|
|
292
|
+
duration: number;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
export declare const ExportWavButton: default_2.FC<ExportWavButtonProps>;
|
|
296
|
+
|
|
297
|
+
export declare interface ExportWavButtonProps {
|
|
298
|
+
/** Button label */
|
|
299
|
+
label?: string;
|
|
300
|
+
/** Filename for the downloaded file (without extension) */
|
|
301
|
+
filename?: string;
|
|
302
|
+
/** Export mode: 'master' for stereo mix, 'individual' for single track */
|
|
303
|
+
mode?: 'master' | 'individual';
|
|
304
|
+
/** Track index for individual export */
|
|
305
|
+
trackIndex?: number;
|
|
306
|
+
/** Bit depth: 16 or 32 */
|
|
307
|
+
bitDepth?: 16 | 32;
|
|
308
|
+
/** Whether to apply effects (fades, etc.) - defaults to true */
|
|
309
|
+
applyEffects?: boolean;
|
|
310
|
+
/**
|
|
311
|
+
* Optional Tone.js effects function for master effects. When provided, export will use Tone.Offline
|
|
312
|
+
* to render through the effects chain. The function receives isOffline=true.
|
|
313
|
+
*/
|
|
314
|
+
effectsFunction?: EffectsFunction;
|
|
315
|
+
/**
|
|
316
|
+
* Optional function to create offline track effects.
|
|
317
|
+
* Takes a trackId and returns a TrackEffectsFunction for offline rendering.
|
|
318
|
+
*/
|
|
319
|
+
createOfflineTrackEffects?: (trackId: string) => TrackEffectsFunction_3 | undefined;
|
|
320
|
+
/** CSS class name */
|
|
321
|
+
className?: string;
|
|
322
|
+
/** Callback when export completes */
|
|
323
|
+
onExportComplete?: (blob: Blob) => void;
|
|
324
|
+
/** Callback when export fails */
|
|
325
|
+
onExportError?: (error: Error) => void;
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
/**
|
|
329
|
+
* Simple fade configuration
|
|
330
|
+
*/
|
|
331
|
+
declare interface Fade {
|
|
332
|
+
/** Duration of the fade in seconds */
|
|
333
|
+
duration: number;
|
|
334
|
+
/** Type of fade curve (default: 'linear') */
|
|
335
|
+
type?: FadeType;
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
declare type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';
|
|
339
|
+
|
|
340
|
+
export declare const FastForwardButton: default_2.FC<{
|
|
341
|
+
className?: string;
|
|
342
|
+
}>;
|
|
343
|
+
|
|
344
|
+
export declare const getEffectDefinition: (id: string) => EffectDefinition | undefined;
|
|
345
|
+
|
|
346
|
+
export declare const getEffectsByCategory: (category: EffectDefinition["category"]) => EffectDefinition[];
|
|
347
|
+
|
|
348
|
+
/**
|
|
349
|
+
* Get a human-readable string representation of a keyboard shortcut
|
|
350
|
+
*
|
|
351
|
+
* @param shortcut - The keyboard shortcut
|
|
352
|
+
* @returns Human-readable string (e.g., "Cmd+Shift+S")
|
|
353
|
+
*/
|
|
354
|
+
export declare const getShortcutLabel: (shortcut: KeyboardShortcut) => string;
|
|
355
|
+
|
|
356
|
+
/**
|
|
357
|
+
* Get metadata from waveform data file without converting to peaks
|
|
358
|
+
*
|
|
359
|
+
* @param src - URL to waveform data file
|
|
360
|
+
* @returns Metadata (sample rate, channels, duration, bits, etc.)
|
|
361
|
+
*/
|
|
362
|
+
export declare function getWaveformDataMetadata(src: string): Promise<{
|
|
363
|
+
sampleRate: number;
|
|
364
|
+
channels: number;
|
|
365
|
+
duration: number;
|
|
366
|
+
samplesPerPixel: number;
|
|
367
|
+
length: number;
|
|
368
|
+
bits: 8 | 16;
|
|
369
|
+
}>;
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Waveform Playlist Theme
|
|
373
|
+
*
|
|
374
|
+
* This file defines the theme interface and default values for the waveform playlist components.
|
|
375
|
+
*/
|
|
376
|
+
/**
|
|
377
|
+
* Gradient color stop for waveform gradients
|
|
378
|
+
*/
|
|
379
|
+
declare interface GradientStop {
|
|
380
|
+
offset: number;
|
|
381
|
+
color: string;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
export declare interface IntegratedRecordingOptions {
|
|
385
|
+
/**
|
|
386
|
+
* Current playback/cursor position in seconds
|
|
387
|
+
* Recording will start from max(currentTime, lastClipEndTime)
|
|
388
|
+
*/
|
|
389
|
+
currentTime?: number;
|
|
390
|
+
/**
|
|
391
|
+
* MediaTrackConstraints for audio recording
|
|
392
|
+
* These will override the recording-optimized defaults (echo cancellation off, low latency)
|
|
393
|
+
*/
|
|
394
|
+
audioConstraints?: MediaTrackConstraints;
|
|
395
|
+
/**
|
|
396
|
+
* Number of channels to record (1 = mono, 2 = stereo)
|
|
397
|
+
* Default: 1 (mono)
|
|
398
|
+
*/
|
|
399
|
+
channelCount?: number;
|
|
400
|
+
/**
|
|
401
|
+
* Samples per pixel for peak generation
|
|
402
|
+
* Default: 1024
|
|
403
|
+
*/
|
|
404
|
+
samplesPerPixel?: number;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
declare interface KeyboardShortcut {
|
|
408
|
+
key: string;
|
|
409
|
+
ctrlKey?: boolean;
|
|
410
|
+
shiftKey?: boolean;
|
|
411
|
+
metaKey?: boolean;
|
|
412
|
+
altKey?: boolean;
|
|
413
|
+
action: () => void;
|
|
414
|
+
description?: string;
|
|
415
|
+
preventDefault?: boolean;
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
/**
|
|
419
|
+
* Link endpoints checkbox that uses the playlist context
|
|
420
|
+
* Uses split contexts to avoid re-rendering during animation
|
|
421
|
+
*/
|
|
422
|
+
export declare const LinkEndpointsCheckbox: default_2.FC<{
|
|
423
|
+
className?: string;
|
|
424
|
+
}>;
|
|
425
|
+
|
|
426
|
+
/**
|
|
427
|
+
* Load waveform data file and convert to Peaks format in one step
|
|
428
|
+
*
|
|
429
|
+
* @param src - URL to waveform data file (.dat or .json)
|
|
430
|
+
* @param channelIndex - Channel index (default: 0)
|
|
431
|
+
* @returns Peaks data ready for rendering
|
|
432
|
+
*/
|
|
433
|
+
export declare function loadPeaksFromWaveformData(src: string, channelIndex?: number): Promise<{
|
|
434
|
+
data: Int8Array | Int16Array;
|
|
435
|
+
bits: 8 | 16;
|
|
436
|
+
length: number;
|
|
437
|
+
sampleRate: number;
|
|
438
|
+
}>;
|
|
439
|
+
|
|
440
|
+
/**
|
|
441
|
+
* Load waveform data from a .dat or .json file
|
|
442
|
+
*
|
|
443
|
+
* @param src - URL to waveform data file (.dat or .json)
|
|
444
|
+
* @returns WaveformData instance
|
|
445
|
+
*/
|
|
446
|
+
export declare function loadWaveformData(src: string): Promise<default_3>;
|
|
447
|
+
|
|
448
|
+
/**
|
|
449
|
+
* Master volume control that uses the playlist context
|
|
450
|
+
*/
|
|
451
|
+
export declare const MasterVolumeControl: default_2.FC<{
|
|
452
|
+
className?: string;
|
|
453
|
+
}>;
|
|
454
|
+
|
|
455
|
+
export declare interface MasterVolumeControls {
|
|
456
|
+
masterVolume: number;
|
|
457
|
+
setMasterVolume: (volume: number) => void;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
declare interface MicrophoneDevice {
|
|
461
|
+
deviceId: string;
|
|
462
|
+
label: string;
|
|
463
|
+
groupId: string;
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
/**
|
|
467
|
+
* Effect definitions for all available Tone.js effects
|
|
468
|
+
* Each effect has parameters with min/max/default values for UI controls
|
|
469
|
+
*/
|
|
470
|
+
export declare type ParameterType = 'number' | 'select' | 'boolean';
|
|
471
|
+
|
|
472
|
+
export declare const PauseButton: default_2.FC<{
|
|
473
|
+
className?: string;
|
|
474
|
+
}>;
|
|
475
|
+
|
|
476
|
+
/**
|
|
477
|
+
* PeakData - result of peak extraction
|
|
478
|
+
*/
|
|
479
|
+
declare interface PeakData {
|
|
480
|
+
/** Number of peaks extracted */
|
|
481
|
+
length: number;
|
|
482
|
+
/** Array of peak data for each channel */
|
|
483
|
+
data: Peaks[];
|
|
484
|
+
/** Bit depth of peak data */
|
|
485
|
+
bits: Bits;
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
/**
|
|
489
|
+
* Peaks type - represents a typed array of peak data
|
|
490
|
+
*/
|
|
491
|
+
declare type Peaks = Int8Array | Int16Array;
|
|
492
|
+
|
|
493
|
+
declare interface PlaybackAnimationContextValue {
|
|
494
|
+
isPlaying: boolean;
|
|
495
|
+
currentTime: number;
|
|
496
|
+
currentTimeRef: default_2.RefObject<number>;
|
|
497
|
+
playbackStartTimeRef: default_2.RefObject<number>;
|
|
498
|
+
audioStartPositionRef: default_2.RefObject<number>;
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
export declare const PlayButton: default_2.FC<{
|
|
502
|
+
className?: string;
|
|
503
|
+
}>;
|
|
504
|
+
|
|
505
|
+
/**
|
|
506
|
+
* Props passed to the default playhead component or custom render function.
|
|
507
|
+
*/
|
|
508
|
+
declare interface PlayheadProps {
|
|
509
|
+
/** Position in pixels from left edge (only valid when not playing) */
|
|
510
|
+
position: number;
|
|
511
|
+
/** Playhead color (default: #ff0000) */
|
|
512
|
+
color?: string;
|
|
513
|
+
/** Whether audio is currently playing */
|
|
514
|
+
isPlaying: boolean;
|
|
515
|
+
/** Ref to current time in seconds - use for smooth animation during playback */
|
|
516
|
+
currentTimeRef: react__default.RefObject<number>;
|
|
517
|
+
/** Audio context start time when playback began - for calculating elapsed time */
|
|
518
|
+
playbackStartTimeRef: react__default.RefObject<number>;
|
|
519
|
+
/** Audio position when playback started - for calculating current position */
|
|
520
|
+
audioStartPositionRef: react__default.RefObject<number>;
|
|
521
|
+
/** Samples per pixel - for converting time to pixels */
|
|
522
|
+
samplesPerPixel: number;
|
|
523
|
+
/** Sample rate - for converting time to pixels */
|
|
524
|
+
sampleRate: number;
|
|
525
|
+
/** Controls offset in pixels */
|
|
526
|
+
controlsOffset: number;
|
|
527
|
+
/** Function to get current audio context time - required for smooth animation */
|
|
528
|
+
getAudioContextTime?: () => number;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
declare interface PlaylistControlsContextValue {
|
|
532
|
+
play: (startTime?: number, playDuration?: number) => Promise<void>;
|
|
533
|
+
pause: () => void;
|
|
534
|
+
stop: () => void;
|
|
535
|
+
seekTo: (time: number) => void;
|
|
536
|
+
setCurrentTime: (time: number) => void;
|
|
537
|
+
setTrackMute: (trackIndex: number, muted: boolean) => void;
|
|
538
|
+
setTrackSolo: (trackIndex: number, soloed: boolean) => void;
|
|
539
|
+
setTrackVolume: (trackIndex: number, volume: number) => void;
|
|
540
|
+
setTrackPan: (trackIndex: number, pan: number) => void;
|
|
541
|
+
setSelection: (start: number, end: number) => void;
|
|
542
|
+
setSelectedTrackId: (trackId: string | null) => void;
|
|
543
|
+
setTimeFormat: (format: TimeFormat) => void;
|
|
544
|
+
formatTime: (seconds: number) => string;
|
|
545
|
+
zoomIn: () => void;
|
|
546
|
+
zoomOut: () => void;
|
|
547
|
+
setMasterVolume: (volume: number) => void;
|
|
548
|
+
setAutomaticScroll: (enabled: boolean) => void;
|
|
549
|
+
setScrollContainer: (element: HTMLDivElement | null) => void;
|
|
550
|
+
scrollContainerRef: default_2.RefObject<HTMLDivElement | null>;
|
|
551
|
+
setContinuousPlay: (enabled: boolean) => void;
|
|
552
|
+
setLinkEndpoints: (enabled: boolean) => void;
|
|
553
|
+
setAnnotationsEditable: (enabled: boolean) => void;
|
|
554
|
+
setAnnotations: (annotations: AnnotationData[]) => void;
|
|
555
|
+
setActiveAnnotationId: (id: string | null) => void;
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
declare interface PlaylistDataContextValue {
|
|
559
|
+
duration: number;
|
|
560
|
+
audioBuffers: AudioBuffer[];
|
|
561
|
+
peaksDataArray: TrackClipPeaks[];
|
|
562
|
+
trackStates: TrackState[];
|
|
563
|
+
tracks: ClipTrack[];
|
|
564
|
+
sampleRate: number;
|
|
565
|
+
waveHeight: number;
|
|
566
|
+
timeScaleHeight: number;
|
|
567
|
+
minimumPlaylistHeight: number;
|
|
568
|
+
controls: {
|
|
569
|
+
show: boolean;
|
|
570
|
+
width: number;
|
|
571
|
+
};
|
|
572
|
+
playoutRef: default_2.RefObject<TonePlayout | null>;
|
|
573
|
+
samplesPerPixel: number;
|
|
574
|
+
timeFormat: string;
|
|
575
|
+
masterVolume: number;
|
|
576
|
+
canZoomIn: boolean;
|
|
577
|
+
canZoomOut: boolean;
|
|
578
|
+
barWidth: number;
|
|
579
|
+
barGap: number;
|
|
580
|
+
/** Width in pixels of progress bars. Defaults to barWidth + barGap (fills gaps). */
|
|
581
|
+
progressBarWidth: number;
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
declare interface PlaylistStateContextValue {
|
|
585
|
+
continuousPlay: boolean;
|
|
586
|
+
linkEndpoints: boolean;
|
|
587
|
+
annotationsEditable: boolean;
|
|
588
|
+
isAutomaticScroll: boolean;
|
|
589
|
+
annotations: AnnotationData[];
|
|
590
|
+
activeAnnotationId: string | null;
|
|
591
|
+
selectionStart: number;
|
|
592
|
+
selectionEnd: number;
|
|
593
|
+
selectedTrackId: string | null;
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
/**
|
|
597
|
+
* Type for custom playhead render functions.
|
|
598
|
+
* Receives position, color, and animation refs for smooth 60fps animation.
|
|
599
|
+
* Custom playheads should use requestAnimationFrame with the refs during playback.
|
|
600
|
+
*/
|
|
601
|
+
declare type RenderPlayheadFunction = (props: PlayheadProps) => react__default.ReactNode;
|
|
602
|
+
|
|
603
|
+
export declare const RewindButton: default_2.FC<{
|
|
604
|
+
className?: string;
|
|
605
|
+
}>;
|
|
606
|
+
|
|
607
|
+
/**
|
|
608
|
+
* Selection time inputs that use the playlist context
|
|
609
|
+
*/
|
|
610
|
+
export declare const SelectionTimeInputs: default_2.FC<{
|
|
611
|
+
className?: string;
|
|
612
|
+
}>;
|
|
613
|
+
|
|
614
|
+
export declare const SkipBackwardButton: default_2.FC<{
|
|
615
|
+
skipAmount?: number;
|
|
616
|
+
className?: string;
|
|
617
|
+
}>;
|
|
618
|
+
|
|
619
|
+
export declare const SkipForwardButton: default_2.FC<{
|
|
620
|
+
skipAmount?: number;
|
|
621
|
+
className?: string;
|
|
622
|
+
}>;
|
|
623
|
+
|
|
624
|
+
export declare const StopButton: default_2.FC<{
|
|
625
|
+
className?: string;
|
|
626
|
+
}>;
|
|
627
|
+
|
|
628
|
+
/**
|
|
629
|
+
* Time format utilities for displaying and parsing audio timestamps
|
|
630
|
+
*/
|
|
631
|
+
export declare type TimeFormat = 'seconds' | 'thousandths' | 'hh:mm:ss' | 'hh:mm:ss.u' | 'hh:mm:ss.uu' | 'hh:mm:ss.uuu';
|
|
632
|
+
|
|
633
|
+
export declare interface TimeFormatControls {
|
|
634
|
+
timeFormat: TimeFormat;
|
|
635
|
+
setTimeFormat: (format: TimeFormat) => void;
|
|
636
|
+
formatTime: (seconds: number) => string;
|
|
637
|
+
parseTime: (timeString: string) => number;
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
/**
|
|
641
|
+
* Time format selector that uses the playlist context
|
|
642
|
+
*/
|
|
643
|
+
export declare const TimeFormatSelect: default_2.FC<{
|
|
644
|
+
className?: string;
|
|
645
|
+
}>;
|
|
646
|
+
|
|
647
|
+
export { Tone }
|
|
648
|
+
|
|
649
|
+
declare class TonePlayout {
|
|
650
|
+
private tracks;
|
|
651
|
+
private masterVolume;
|
|
652
|
+
private isInitialized;
|
|
653
|
+
private soloedTracks;
|
|
654
|
+
private manualMuteState;
|
|
655
|
+
private effectsCleanup?;
|
|
656
|
+
private onPlaybackCompleteCallback?;
|
|
657
|
+
private activeTracks;
|
|
658
|
+
private playbackSessionId;
|
|
659
|
+
constructor(options?: TonePlayoutOptions);
|
|
660
|
+
private gainToDb;
|
|
661
|
+
init(): Promise<void>;
|
|
662
|
+
addTrack(trackOptions: ToneTrackOptions): ToneTrack;
|
|
663
|
+
removeTrack(trackId: string): void;
|
|
664
|
+
getTrack(trackId: string): ToneTrack | undefined;
|
|
665
|
+
play(when?: number, offset?: number, duration?: number): void;
|
|
666
|
+
pause(): void;
|
|
667
|
+
stop(): void;
|
|
668
|
+
setMasterGain(gain: number): void;
|
|
669
|
+
setSolo(trackId: string, soloed: boolean): void;
|
|
670
|
+
private updateSoloMuting;
|
|
671
|
+
setMute(trackId: string, muted: boolean): void;
|
|
672
|
+
getCurrentTime(): number;
|
|
673
|
+
seekTo(time: number): void;
|
|
674
|
+
dispose(): void;
|
|
675
|
+
get context(): BaseContext;
|
|
676
|
+
get sampleRate(): number;
|
|
677
|
+
setOnPlaybackComplete(callback: () => void): void;
|
|
678
|
+
}
|
|
679
|
+
|
|
680
|
+
declare interface TonePlayoutOptions {
|
|
681
|
+
tracks?: ToneTrack[];
|
|
682
|
+
masterGain?: number;
|
|
683
|
+
effects?: EffectsFunction;
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
declare class ToneTrack {
|
|
687
|
+
private clips;
|
|
688
|
+
private volumeNode;
|
|
689
|
+
private panNode;
|
|
690
|
+
private muteGain;
|
|
691
|
+
private track;
|
|
692
|
+
private effectsCleanup?;
|
|
693
|
+
private onStopCallback?;
|
|
694
|
+
private activePlayers;
|
|
695
|
+
constructor(options: ToneTrackOptions);
|
|
696
|
+
/**
|
|
697
|
+
* Schedule fade envelopes for a clip at the given start time
|
|
698
|
+
*/
|
|
699
|
+
private scheduleFades;
|
|
700
|
+
private gainToDb;
|
|
701
|
+
setVolume(gain: number): void;
|
|
702
|
+
setPan(pan: number): void;
|
|
703
|
+
setMute(muted: boolean): void;
|
|
704
|
+
setSolo(soloed: boolean): void;
|
|
705
|
+
play(when?: number, offset?: number, duration?: number): void;
|
|
706
|
+
pause(): void;
|
|
707
|
+
stop(when?: number): void;
|
|
708
|
+
dispose(): void;
|
|
709
|
+
get id(): string;
|
|
710
|
+
get duration(): number;
|
|
711
|
+
get buffer(): AudioBuffer;
|
|
712
|
+
get isPlaying(): boolean;
|
|
713
|
+
get muted(): boolean;
|
|
714
|
+
get startTime(): number;
|
|
715
|
+
setOnStopCallback(callback: () => void): void;
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
declare interface ToneTrackOptions {
|
|
719
|
+
buffer?: AudioBuffer;
|
|
720
|
+
clips?: ClipInfo[];
|
|
721
|
+
track: Track;
|
|
722
|
+
effects?: TrackEffectsFunction;
|
|
723
|
+
destination?: ToneAudioNode;
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
export declare interface TrackActiveEffect {
|
|
727
|
+
instanceId: string;
|
|
728
|
+
effectId: string;
|
|
729
|
+
definition: EffectDefinition;
|
|
730
|
+
params: Record<string, number | string | boolean>;
|
|
731
|
+
bypassed: boolean;
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
declare type TrackClipPeaks = ClipPeaks[];
|
|
735
|
+
|
|
736
|
+
export declare type TrackEffectsFunction = (graphEnd: Gain, masterGainNode: ToneAudioNode, isOffline: boolean) => void | (() => void);
|
|
737
|
+
|
|
738
|
+
/**
|
|
739
|
+
* Clip-Based Model Types
|
|
740
|
+
*
|
|
741
|
+
* These types support a professional multi-track editing model where:
|
|
742
|
+
* - Each track can contain multiple audio clips
|
|
743
|
+
* - Clips can be positioned anywhere on the timeline
|
|
744
|
+
* - Clips have independent trim points (offset/duration)
|
|
745
|
+
* - Gaps between clips are silent
|
|
746
|
+
* - Clips can overlap (for crossfades)
|
|
747
|
+
*/
|
|
748
|
+
|
|
749
|
+
/**
|
|
750
|
+
* Generic effects function type for track-level audio processing.
|
|
751
|
+
*
|
|
752
|
+
* The actual implementation receives Tone.js audio nodes. Using generic types
|
|
753
|
+
* here to avoid circular dependencies with the playout package.
|
|
754
|
+
*
|
|
755
|
+
* @param graphEnd - The end of the track's audio graph (Tone.js Gain node)
|
|
756
|
+
* @param destination - Where to connect the effects output (Tone.js ToneAudioNode)
|
|
757
|
+
* @param isOffline - Whether rendering offline (for export)
|
|
758
|
+
* @returns Optional cleanup function called when track is disposed
|
|
759
|
+
*
|
|
760
|
+
* @example
|
|
761
|
+
* ```typescript
|
|
762
|
+
* const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {
|
|
763
|
+
* const reverb = new Tone.Reverb({ decay: 1.5 });
|
|
764
|
+
* graphEnd.connect(reverb);
|
|
765
|
+
* reverb.connect(destination);
|
|
766
|
+
*
|
|
767
|
+
* return () => {
|
|
768
|
+
* reverb.dispose();
|
|
769
|
+
* };
|
|
770
|
+
* };
|
|
771
|
+
* ```
|
|
772
|
+
*/
|
|
773
|
+
declare type TrackEffectsFunction_2 = (graphEnd: unknown, destination: unknown, isOffline: boolean) => void | (() => void);
|
|
774
|
+
|
|
775
|
+
/** Function type for per-track effects (same as in @waveform-playlist/core) */
|
|
776
|
+
declare type TrackEffectsFunction_3 = (graphEnd: unknown, destination: unknown, isOffline: boolean) => void | (() => void);
|
|
777
|
+
|
|
778
|
+
export declare interface TrackEffectsState {
|
|
779
|
+
trackId: string;
|
|
780
|
+
activeEffects: TrackActiveEffect[];
|
|
781
|
+
}
|
|
782
|
+
|
|
783
|
+
export declare interface TrackState {
|
|
784
|
+
name: string;
|
|
785
|
+
muted: boolean;
|
|
786
|
+
soloed: boolean;
|
|
787
|
+
volume: number;
|
|
788
|
+
pan: number;
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
declare interface TrackState_2 {
|
|
792
|
+
muted: boolean;
|
|
793
|
+
soloed: boolean;
|
|
794
|
+
volume: number;
|
|
795
|
+
pan: number;
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
/**
|
|
799
|
+
* Custom hook for handling annotation drag operations (boundary trimming)
|
|
800
|
+
*
|
|
801
|
+
* Provides drag handlers for use with @dnd-kit/core DndContext.
|
|
802
|
+
* Handles annotation boundary resizing with linked endpoints support.
|
|
803
|
+
*
|
|
804
|
+
* @example
|
|
805
|
+
* ```tsx
|
|
806
|
+
* const { onDragStart, onDragMove, onDragEnd } = useAnnotationDragHandlers({
|
|
807
|
+
* annotations,
|
|
808
|
+
* onAnnotationsChange: setAnnotations,
|
|
809
|
+
* samplesPerPixel,
|
|
810
|
+
* sampleRate,
|
|
811
|
+
* duration,
|
|
812
|
+
* linkEndpoints,
|
|
813
|
+
* });
|
|
814
|
+
*
|
|
815
|
+
* return (
|
|
816
|
+
* <DndContext
|
|
817
|
+
* onDragStart={onDragStart}
|
|
818
|
+
* onDragMove={onDragMove}
|
|
819
|
+
* onDragEnd={onDragEnd}
|
|
820
|
+
* modifiers={[restrictToHorizontalAxis]}
|
|
821
|
+
* >
|
|
822
|
+
* {renderAnnotations()}
|
|
823
|
+
* </DndContext>
|
|
824
|
+
* );
|
|
825
|
+
* ```
|
|
826
|
+
*/
|
|
827
|
+
export declare function useAnnotationDragHandlers({ annotations, onAnnotationsChange, samplesPerPixel, sampleRate, duration, linkEndpoints, }: UseAnnotationDragHandlersOptions): {
|
|
828
|
+
onDragStart: (event: {
|
|
829
|
+
active: any;
|
|
830
|
+
}) => void;
|
|
831
|
+
onDragMove: (event: {
|
|
832
|
+
active: any;
|
|
833
|
+
delta: {
|
|
834
|
+
x: number;
|
|
835
|
+
y: number;
|
|
836
|
+
};
|
|
837
|
+
}) => void;
|
|
838
|
+
onDragEnd: () => void;
|
|
839
|
+
};
|
|
840
|
+
|
|
841
|
+
declare interface UseAnnotationDragHandlersOptions {
|
|
842
|
+
annotations: Annotation$1[];
|
|
843
|
+
onAnnotationsChange: (annotations: Annotation$1[]) => void;
|
|
844
|
+
samplesPerPixel: number;
|
|
845
|
+
sampleRate: number;
|
|
846
|
+
duration: number;
|
|
847
|
+
linkEndpoints: boolean;
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
/**
|
|
851
|
+
* Hook for keyboard-based annotation navigation and boundary editing
|
|
852
|
+
*
|
|
853
|
+
* Navigation Shortcuts:
|
|
854
|
+
* - ArrowUp / ArrowLeft = Select previous annotation
|
|
855
|
+
* - ArrowDown / ArrowRight = Select next annotation
|
|
856
|
+
* - Home = Select first annotation
|
|
857
|
+
* - End = Select last annotation
|
|
858
|
+
* - Escape = Deselect annotation
|
|
859
|
+
* - Enter = Play selected annotation
|
|
860
|
+
*
|
|
861
|
+
* Boundary Editing Shortcuts (requires active annotation):
|
|
862
|
+
* - [ = Move start boundary earlier (left)
|
|
863
|
+
* - ] = Move start boundary later (right)
|
|
864
|
+
* - Shift+[ = Move end boundary earlier (left)
|
|
865
|
+
* - Shift+] = Move end boundary later (right)
|
|
866
|
+
*
|
|
867
|
+
* Respects linkEndpoints and continuousPlay settings.
|
|
868
|
+
*
|
|
869
|
+
* @example
|
|
870
|
+
* ```tsx
|
|
871
|
+
* useAnnotationKeyboardControls({
|
|
872
|
+
* annotations,
|
|
873
|
+
* activeAnnotationId,
|
|
874
|
+
* onAnnotationsChange: setAnnotations,
|
|
875
|
+
* onActiveAnnotationChange: setActiveAnnotationId,
|
|
876
|
+
* duration,
|
|
877
|
+
* linkEndpoints,
|
|
878
|
+
* });
|
|
879
|
+
* ```
|
|
880
|
+
*/
|
|
881
|
+
export declare function useAnnotationKeyboardControls({ annotations, activeAnnotationId, onAnnotationsChange, onActiveAnnotationChange, duration, linkEndpoints, continuousPlay, enabled, scrollContainerRef, samplesPerPixel, sampleRate, controlsWidth, onPlay, }: UseAnnotationKeyboardControlsOptions): {
|
|
882
|
+
moveStartBoundary: (delta: number) => void;
|
|
883
|
+
moveEndBoundary: (delta: number) => void;
|
|
884
|
+
selectPrevious: () => void;
|
|
885
|
+
selectNext: () => void;
|
|
886
|
+
selectFirst: () => void;
|
|
887
|
+
selectLast: () => void;
|
|
888
|
+
clearSelection: () => void;
|
|
889
|
+
scrollToAnnotation: (annotationId: string) => void;
|
|
890
|
+
playActiveAnnotation: () => void;
|
|
891
|
+
};
|
|
892
|
+
|
|
893
|
+
declare interface UseAnnotationKeyboardControlsOptions {
|
|
894
|
+
annotations: Annotation$1[];
|
|
895
|
+
activeAnnotationId: string | null;
|
|
896
|
+
onAnnotationsChange: (annotations: Annotation$1[]) => void;
|
|
897
|
+
/** Callback to set the active annotation ID for selection */
|
|
898
|
+
onActiveAnnotationChange?: (id: string | null) => void;
|
|
899
|
+
duration: number;
|
|
900
|
+
linkEndpoints: boolean;
|
|
901
|
+
/** Whether continuous play is enabled (affects playback duration) */
|
|
902
|
+
continuousPlay?: boolean;
|
|
903
|
+
enabled?: boolean;
|
|
904
|
+
/** Optional: scroll container ref for auto-scrolling to annotation */
|
|
905
|
+
scrollContainerRef?: React.RefObject<HTMLDivElement | null>;
|
|
906
|
+
/** Optional: samples per pixel for scroll position calculation */
|
|
907
|
+
samplesPerPixel?: number;
|
|
908
|
+
/** Optional: sample rate for scroll position calculation */
|
|
909
|
+
sampleRate?: number;
|
|
910
|
+
/** Optional: controls width offset for scroll position calculation */
|
|
911
|
+
controlsWidth?: number;
|
|
912
|
+
/** Optional: callback to start playback at a time with optional duration */
|
|
913
|
+
onPlay?: (startTime: number, duration?: number) => void;
|
|
914
|
+
}
|
|
915
|
+
|
|
916
|
+
/**
|
|
917
|
+
* Hook to load audio from URLs and convert to ClipTrack format
|
|
918
|
+
*
|
|
919
|
+
* This hook fetches audio files, decodes them, and creates ClipTrack objects
|
|
920
|
+
* with a single clip per track. Supports custom positioning for multi-clip arrangements.
|
|
921
|
+
*
|
|
922
|
+
* @param configs - Array of audio track configurations
|
|
923
|
+
* @returns Object with tracks array and loading state
|
|
924
|
+
*
|
|
925
|
+
* @example
|
|
926
|
+
* ```typescript
|
|
927
|
+
* // Basic usage (clips positioned at start)
|
|
928
|
+
* const { tracks, loading, error } = useAudioTracks([
|
|
929
|
+
* { src: 'audio/vocals.mp3', name: 'Vocals' },
|
|
930
|
+
* { src: 'audio/drums.mp3', name: 'Drums' },
|
|
931
|
+
* ]);
|
|
932
|
+
*
|
|
933
|
+
* // Multi-clip positioning (clips at different times with gaps)
|
|
934
|
+
* const { tracks, loading, error } = useAudioTracks([
|
|
935
|
+
* { src: 'audio/guitar.mp3', name: 'Guitar Clip 1', startTime: 0, duration: 3 },
|
|
936
|
+
* { src: 'audio/guitar.mp3', name: 'Guitar Clip 2', startTime: 5, duration: 3, offset: 5 },
|
|
937
|
+
* { src: 'audio/vocals.mp3', name: 'Vocals', startTime: 2, duration: 4 },
|
|
938
|
+
* ]);
|
|
939
|
+
*
|
|
940
|
+
* if (loading) return <div>Loading...</div>;
|
|
941
|
+
* if (error) return <div>Error: {error}</div>;
|
|
942
|
+
*
|
|
943
|
+
* return <WaveformPlaylistProvider tracks={tracks}>...</WaveformPlaylistProvider>;
|
|
944
|
+
* ```
|
|
945
|
+
*/
|
|
946
|
+
export declare function useAudioTracks(configs: AudioTrackConfig[]): {
|
|
947
|
+
tracks: ClipTrack[];
|
|
948
|
+
loading: boolean;
|
|
949
|
+
error: string | null;
|
|
950
|
+
};
|
|
951
|
+
|
|
952
|
+
/**
|
|
953
|
+
* Custom hook for handling clip drag operations (movement and trimming)
|
|
954
|
+
*
|
|
955
|
+
* Provides drag handlers and collision modifier for use with @dnd-kit/core DndContext.
|
|
956
|
+
* Handles both clip movement (dragging entire clips) and boundary trimming (adjusting clip edges).
|
|
957
|
+
*
|
|
958
|
+
* @example
|
|
959
|
+
* ```tsx
|
|
960
|
+
* const { onDragStart, onDragMove, onDragEnd, collisionModifier } = useClipDragHandlers({
|
|
961
|
+
* tracks,
|
|
962
|
+
* onTracksChange: setTracks,
|
|
963
|
+
* samplesPerPixel,
|
|
964
|
+
* sampleRate,
|
|
965
|
+
* });
|
|
966
|
+
*
|
|
967
|
+
* return (
|
|
968
|
+
* <DndContext
|
|
969
|
+
* onDragStart={onDragStart}
|
|
970
|
+
* onDragMove={onDragMove}
|
|
971
|
+
* onDragEnd={onDragEnd}
|
|
972
|
+
* modifiers={[restrictToHorizontalAxis, collisionModifier]}
|
|
973
|
+
* >
|
|
974
|
+
* <Waveform showClipHeaders={true} />
|
|
975
|
+
* </DndContext>
|
|
976
|
+
* );
|
|
977
|
+
* ```
|
|
978
|
+
*/
|
|
979
|
+
export declare function useClipDragHandlers({ tracks, onTracksChange, samplesPerPixel, sampleRate, }: UseClipDragHandlersOptions): {
|
|
980
|
+
onDragStart: (event: {
|
|
981
|
+
active: any;
|
|
982
|
+
}) => void;
|
|
983
|
+
onDragMove: (event: {
|
|
984
|
+
active: any;
|
|
985
|
+
delta: {
|
|
986
|
+
x: number;
|
|
987
|
+
y: number;
|
|
988
|
+
};
|
|
989
|
+
}) => void;
|
|
990
|
+
onDragEnd: (event: DragEndEvent) => void;
|
|
991
|
+
collisionModifier: (args: {
|
|
992
|
+
transform: {
|
|
993
|
+
x: number;
|
|
994
|
+
y: number;
|
|
995
|
+
};
|
|
996
|
+
active: any;
|
|
997
|
+
}) => {
|
|
998
|
+
scaleX: number;
|
|
999
|
+
scaleY: number;
|
|
1000
|
+
x: number;
|
|
1001
|
+
y: number;
|
|
1002
|
+
};
|
|
1003
|
+
};
|
|
1004
|
+
|
|
1005
|
+
declare interface UseClipDragHandlersOptions {
|
|
1006
|
+
tracks: ClipTrack[];
|
|
1007
|
+
onTracksChange: (tracks: ClipTrack[]) => void;
|
|
1008
|
+
samplesPerPixel: number;
|
|
1009
|
+
sampleRate: number;
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
/**
|
|
1013
|
+
* Hook for splitting clips at the playhead or at a specific time
|
|
1014
|
+
*
|
|
1015
|
+
* @param options - Configuration options
|
|
1016
|
+
* @returns Object with split functions
|
|
1017
|
+
*
|
|
1018
|
+
* @example
|
|
1019
|
+
* ```tsx
|
|
1020
|
+
* const { splitClipAtPlayhead } = useClipSplitting({
|
|
1021
|
+
* tracks,
|
|
1022
|
+
* onTracksChange: setTracks,
|
|
1023
|
+
* currentTime,
|
|
1024
|
+
* });
|
|
1025
|
+
*
|
|
1026
|
+
* // In keyboard handler
|
|
1027
|
+
* const handleKeyPress = (e: KeyboardEvent) => {
|
|
1028
|
+
* if (e.key === 's' || e.key === 'S') {
|
|
1029
|
+
* splitClipAtPlayhead();
|
|
1030
|
+
* }
|
|
1031
|
+
* };
|
|
1032
|
+
* ```
|
|
1033
|
+
*/
|
|
1034
|
+
export declare const useClipSplitting: (options: UseClipSplittingOptions) => UseClipSplittingResult;
|
|
1035
|
+
|
|
1036
|
+
declare interface UseClipSplittingOptions {
|
|
1037
|
+
tracks: ClipTrack[];
|
|
1038
|
+
onTracksChange: (tracks: ClipTrack[]) => void;
|
|
1039
|
+
sampleRate: number;
|
|
1040
|
+
samplesPerPixel: number;
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
declare interface UseClipSplittingResult {
|
|
1044
|
+
splitClipAtPlayhead: () => boolean;
|
|
1045
|
+
splitClipAt: (trackIndex: number, clipIndex: number, splitTime: number) => boolean;
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
/**
|
|
1049
|
+
* Hook for configuring @dnd-kit sensors for clip dragging
|
|
1050
|
+
*
|
|
1051
|
+
* Provides consistent drag activation behavior across all examples
|
|
1052
|
+
*/
|
|
1053
|
+
/**
|
|
1054
|
+
* Returns configured sensors for @dnd-kit drag operations
|
|
1055
|
+
*
|
|
1056
|
+
* @returns Configured sensors with 1px activation distance for immediate feedback
|
|
1057
|
+
*/
|
|
1058
|
+
export declare function useDragSensors(): SensorDescriptor<SensorOptions>[];
|
|
1059
|
+
|
|
1060
|
+
/**
|
|
1061
|
+
* Hook for managing a dynamic chain of audio effects with real-time parameter updates
|
|
1062
|
+
*/
|
|
1063
|
+
export declare function useDynamicEffects(fftSize?: number): UseDynamicEffectsReturn;
|
|
1064
|
+
|
|
1065
|
+
export declare interface UseDynamicEffectsReturn {
|
|
1066
|
+
activeEffects: ActiveEffect[];
|
|
1067
|
+
availableEffects: EffectDefinition[];
|
|
1068
|
+
addEffect: (effectId: string) => void;
|
|
1069
|
+
removeEffect: (instanceId: string) => void;
|
|
1070
|
+
updateParameter: (instanceId: string, paramName: string, value: number | string | boolean) => void;
|
|
1071
|
+
toggleBypass: (instanceId: string) => void;
|
|
1072
|
+
reorderEffects: (fromIndex: number, toIndex: number) => void;
|
|
1073
|
+
clearAllEffects: () => void;
|
|
1074
|
+
masterEffects: EffectsFunction;
|
|
1075
|
+
/**
|
|
1076
|
+
* Creates a fresh effects function for offline rendering.
|
|
1077
|
+
* This creates new effect instances that work in the offline AudioContext.
|
|
1078
|
+
*/
|
|
1079
|
+
createOfflineEffectsFunction: () => EffectsFunction | undefined;
|
|
1080
|
+
analyserRef: React.RefObject<any>;
|
|
1081
|
+
}
|
|
1082
|
+
|
|
1083
|
+
/**
|
|
1084
|
+
* Hook for exporting the waveform playlist to WAV format
|
|
1085
|
+
* Uses OfflineAudioContext for fast, non-real-time rendering
|
|
1086
|
+
*/
|
|
1087
|
+
export declare function useExportWav(): UseExportWavReturn;
|
|
1088
|
+
|
|
1089
|
+
export declare interface UseExportWavReturn {
|
|
1090
|
+
/** Export the playlist to WAV */
|
|
1091
|
+
exportWav: (tracks: ClipTrack[], trackStates: TrackState_2[], options?: ExportOptions) => Promise<ExportResult>;
|
|
1092
|
+
/** Whether export is in progress */
|
|
1093
|
+
isExporting: boolean;
|
|
1094
|
+
/** Export progress (0-1) */
|
|
1095
|
+
progress: number;
|
|
1096
|
+
/** Error message if export failed */
|
|
1097
|
+
error: string | null;
|
|
1098
|
+
}
|
|
1099
|
+
|
|
1100
|
+
export declare function useIntegratedRecording(tracks: ClipTrack[], setTracks: (tracks: ClipTrack[]) => void, selectedTrackId: string | null, options?: IntegratedRecordingOptions): UseIntegratedRecordingReturn;
|
|
1101
|
+
|
|
1102
|
+
export declare interface UseIntegratedRecordingReturn {
|
|
1103
|
+
isRecording: boolean;
|
|
1104
|
+
isPaused: boolean;
|
|
1105
|
+
duration: number;
|
|
1106
|
+
level: number;
|
|
1107
|
+
peakLevel: number;
|
|
1108
|
+
error: Error | null;
|
|
1109
|
+
stream: MediaStream | null;
|
|
1110
|
+
devices: MicrophoneDevice[];
|
|
1111
|
+
hasPermission: boolean;
|
|
1112
|
+
selectedDevice: string | null;
|
|
1113
|
+
startRecording: () => void;
|
|
1114
|
+
stopRecording: () => void;
|
|
1115
|
+
pauseRecording: () => void;
|
|
1116
|
+
resumeRecording: () => void;
|
|
1117
|
+
requestMicAccess: () => Promise<void>;
|
|
1118
|
+
changeDevice: (deviceId: string) => Promise<void>;
|
|
1119
|
+
recordingPeaks: Int8Array | Int16Array;
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
/**
|
|
1123
|
+
* Hook for managing keyboard shortcuts
|
|
1124
|
+
*
|
|
1125
|
+
* @param options - Configuration options
|
|
1126
|
+
*
|
|
1127
|
+
* @example
|
|
1128
|
+
* ```tsx
|
|
1129
|
+
* const { splitClipAtPlayhead } = useClipSplitting({ ... });
|
|
1130
|
+
*
|
|
1131
|
+
* useKeyboardShortcuts({
|
|
1132
|
+
* shortcuts: [
|
|
1133
|
+
* {
|
|
1134
|
+
* key: 's',
|
|
1135
|
+
* action: splitClipAtPlayhead,
|
|
1136
|
+
* description: 'Split clip at playhead',
|
|
1137
|
+
* preventDefault: true,
|
|
1138
|
+
* },
|
|
1139
|
+
* {
|
|
1140
|
+
* key: 'S',
|
|
1141
|
+
* shiftKey: true,
|
|
1142
|
+
* action: () => splitAtSelection(),
|
|
1143
|
+
* description: 'Split at selection boundaries',
|
|
1144
|
+
* preventDefault: true,
|
|
1145
|
+
* },
|
|
1146
|
+
* ],
|
|
1147
|
+
* });
|
|
1148
|
+
* ```
|
|
1149
|
+
*/
|
|
1150
|
+
export declare const useKeyboardShortcuts: (options: UseKeyboardShortcutsOptions) => void;
|
|
1151
|
+
|
|
1152
|
+
declare interface UseKeyboardShortcutsOptions {
|
|
1153
|
+
shortcuts: KeyboardShortcut[];
|
|
1154
|
+
enabled?: boolean;
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
/**
|
|
1158
|
+
* Hook for master effects with frequency analyzer
|
|
1159
|
+
* Returns the analyser ref and the effects function to pass to WaveformPlaylistProvider
|
|
1160
|
+
*
|
|
1161
|
+
* For more advanced effects (reverb, delay, filters, etc.), use useDynamicEffects instead.
|
|
1162
|
+
*/
|
|
1163
|
+
export declare const useMasterAnalyser: (fftSize?: number) => {
|
|
1164
|
+
analyserRef: MutableRefObject<any>;
|
|
1165
|
+
masterEffects: EffectsFunction;
|
|
1166
|
+
};
|
|
1167
|
+
|
|
1168
|
+
/**
|
|
1169
|
+
* Hook for managing master volume control
|
|
1170
|
+
*
|
|
1171
|
+
* @example
|
|
1172
|
+
* ```tsx
|
|
1173
|
+
* const { masterVolume, setMasterVolume } = useMasterVolume({
|
|
1174
|
+
* playoutRef,
|
|
1175
|
+
* initialVolume: 1.0,
|
|
1176
|
+
* });
|
|
1177
|
+
*
|
|
1178
|
+
* <MasterVolumeControl
|
|
1179
|
+
* volume={masterVolume}
|
|
1180
|
+
* onChange={setMasterVolume}
|
|
1181
|
+
* />
|
|
1182
|
+
* ```
|
|
1183
|
+
*/
|
|
1184
|
+
export declare function useMasterVolume({ playoutRef, initialVolume, onVolumeChange, }: UseMasterVolumeProps): MasterVolumeControls;
|
|
1185
|
+
|
|
1186
|
+
declare interface UseMasterVolumeProps {
|
|
1187
|
+
playoutRef: RefObject<TonePlayout | null>;
|
|
1188
|
+
initialVolume?: number;
|
|
1189
|
+
onVolumeChange?: (volume: number) => void;
|
|
1190
|
+
}
|
|
1191
|
+
|
|
1192
|
+
export declare const usePlaybackAnimation: () => PlaybackAnimationContextValue;
|
|
1193
|
+
|
|
1194
|
+
/**
|
|
1195
|
+
* Hook that provides common playback keyboard shortcuts for the playlist.
|
|
1196
|
+
*
|
|
1197
|
+
* Default shortcuts:
|
|
1198
|
+
* - `Space` - Toggle play/pause
|
|
1199
|
+
* - `Escape` - Stop playback
|
|
1200
|
+
* - `0` - Rewind to start (seek to time 0)
|
|
1201
|
+
*
|
|
1202
|
+
* @example
|
|
1203
|
+
* ```tsx
|
|
1204
|
+
* // Basic usage - enables default shortcuts
|
|
1205
|
+
* usePlaybackShortcuts();
|
|
1206
|
+
*
|
|
1207
|
+
* // With additional custom shortcuts
|
|
1208
|
+
* usePlaybackShortcuts({
|
|
1209
|
+
* additionalShortcuts: [
|
|
1210
|
+
* { key: 's', action: splitClipAtPlayhead, description: 'Split clip' },
|
|
1211
|
+
* ],
|
|
1212
|
+
* });
|
|
1213
|
+
*
|
|
1214
|
+
* // Completely override shortcuts
|
|
1215
|
+
* usePlaybackShortcuts({
|
|
1216
|
+
* shortcuts: [
|
|
1217
|
+
* { key: 'Home', action: rewindToStart, description: 'Go to start' },
|
|
1218
|
+
* ],
|
|
1219
|
+
* });
|
|
1220
|
+
* ```
|
|
1221
|
+
*/
|
|
1222
|
+
export declare const usePlaybackShortcuts: (options?: UsePlaybackShortcutsOptions) => UsePlaybackShortcutsReturn;
|
|
1223
|
+
|
|
1224
|
+
export declare interface UsePlaybackShortcutsOptions {
|
|
1225
|
+
/**
|
|
1226
|
+
* Enable the shortcuts. Defaults to true.
|
|
1227
|
+
*/
|
|
1228
|
+
enabled?: boolean;
|
|
1229
|
+
/**
|
|
1230
|
+
* Additional shortcuts to include alongside the default playback shortcuts.
|
|
1231
|
+
*/
|
|
1232
|
+
additionalShortcuts?: KeyboardShortcut[];
|
|
1233
|
+
/**
|
|
1234
|
+
* Override default shortcuts. If provided, only these shortcuts will be used.
|
|
1235
|
+
*/
|
|
1236
|
+
shortcuts?: KeyboardShortcut[];
|
|
1237
|
+
}
|
|
1238
|
+
|
|
1239
|
+
export declare interface UsePlaybackShortcutsReturn {
|
|
1240
|
+
/** Rewind to the beginning (time = 0) */
|
|
1241
|
+
rewindToStart: () => void;
|
|
1242
|
+
/** Toggle play/pause */
|
|
1243
|
+
togglePlayPause: () => void;
|
|
1244
|
+
/** Stop playback and return to start position */
|
|
1245
|
+
stopPlayback: () => void;
|
|
1246
|
+
/** The list of active keyboard shortcuts */
|
|
1247
|
+
shortcuts: KeyboardShortcut[];
|
|
1248
|
+
}
|
|
1249
|
+
|
|
1250
|
+
export declare const usePlaylistControls: () => PlaylistControlsContextValue;
|
|
1251
|
+
|
|
1252
|
+
export declare const usePlaylistData: () => PlaylistDataContextValue;
|
|
1253
|
+
|
|
1254
|
+
export declare const usePlaylistState: () => PlaylistStateContextValue;
|
|
1255
|
+
|
|
1256
|
+
/**
|
|
1257
|
+
* Hook to manage time format state
|
|
1258
|
+
*
|
|
1259
|
+
* @example
|
|
1260
|
+
* ```tsx
|
|
1261
|
+
* const { timeFormat, setTimeFormat, formatTime, parseTime } = useTimeFormat();
|
|
1262
|
+
*
|
|
1263
|
+
* <TimeFormatSelect
|
|
1264
|
+
* value={timeFormat}
|
|
1265
|
+
* onChange={setTimeFormat}
|
|
1266
|
+
* />
|
|
1267
|
+
* <span>{formatTime(currentTime)}</span>
|
|
1268
|
+
* <input onChange={(e) => seekTo(parseTime(e.target.value))} />
|
|
1269
|
+
* ```
|
|
1270
|
+
*/
|
|
1271
|
+
export declare function useTimeFormat(): TimeFormatControls;
|
|
1272
|
+
|
|
1273
|
+
/**
|
|
1274
|
+
* Hook for managing dynamic effects per track with real-time parameter updates
|
|
1275
|
+
*/
|
|
1276
|
+
export declare function useTrackDynamicEffects(): UseTrackDynamicEffectsReturn;
|
|
1277
|
+
|
|
1278
|
+
export declare interface UseTrackDynamicEffectsReturn {
|
|
1279
|
+
trackEffectsState: Map<string, TrackActiveEffect[]>;
|
|
1280
|
+
addEffectToTrack: (trackId: string, effectId: string) => void;
|
|
1281
|
+
removeEffectFromTrack: (trackId: string, instanceId: string) => void;
|
|
1282
|
+
updateTrackEffectParameter: (trackId: string, instanceId: string, paramName: string, value: number | string | boolean) => void;
|
|
1283
|
+
toggleBypass: (trackId: string, instanceId: string) => void;
|
|
1284
|
+
clearTrackEffects: (trackId: string) => void;
|
|
1285
|
+
getTrackEffectsFunction: (trackId: string) => TrackEffectsFunction | undefined;
|
|
1286
|
+
/**
|
|
1287
|
+
* Creates a fresh effects function for a track for offline rendering.
|
|
1288
|
+
* This creates new effect instances that work in the offline AudioContext.
|
|
1289
|
+
*/
|
|
1290
|
+
createOfflineTrackEffectsFunction: (trackId: string) => TrackEffectsFunction | undefined;
|
|
1291
|
+
availableEffects: EffectDefinition[];
|
|
1292
|
+
}
|
|
1293
|
+
|
|
1294
|
+
export declare const useWaveformPlaylist: () => WaveformPlaylistContextValue;
|
|
1295
|
+
|
|
1296
|
+
export declare function useZoomControls({ initialSamplesPerPixel, zoomLevels, }: UseZoomControlsProps): ZoomControls;
|
|
1297
|
+
|
|
1298
|
+
declare interface UseZoomControlsProps {
|
|
1299
|
+
initialSamplesPerPixel: number;
|
|
1300
|
+
zoomLevels?: number[];
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
/**
|
|
1304
|
+
* Waveform visualization component that uses the playlist context
|
|
1305
|
+
*/
|
|
1306
|
+
export declare const Waveform: default_2.FC<WaveformProps>;
|
|
1307
|
+
|
|
1308
|
+
/**
|
|
1309
|
+
* Waveform color can be a simple string or a gradient configuration
|
|
1310
|
+
*/
|
|
1311
|
+
declare type WaveformColor = string | WaveformGradient;
|
|
1312
|
+
|
|
1313
|
+
/**
|
|
1314
|
+
* Convert WaveformData to our internal Peaks format
|
|
1315
|
+
*
|
|
1316
|
+
* @param waveformData - WaveformData instance from waveform-data.js
|
|
1317
|
+
* @param channelIndex - Channel index (0 for mono/left, 1 for right)
|
|
1318
|
+
* @returns Peaks data with alternating min/max values, preserving original bit depth
|
|
1319
|
+
*/
|
|
1320
|
+
export declare function waveformDataToPeaks(waveformData: default_3, channelIndex?: number): {
|
|
1321
|
+
data: Int8Array | Int16Array;
|
|
1322
|
+
bits: 8 | 16;
|
|
1323
|
+
length: number;
|
|
1324
|
+
sampleRate: number;
|
|
1325
|
+
};
|
|
1326
|
+
|
|
1327
|
+
/**
|
|
1328
|
+
* Waveform drawing mode determines how colors are applied:
|
|
1329
|
+
* - 'inverted': Canvas draws waveOutlineColor in areas WITHOUT audio (current default).
|
|
1330
|
+
* waveFillColor shows through where audio peaks are. Good for gradient bars.
|
|
1331
|
+
* - 'normal': Canvas draws waveFillColor bars where audio peaks ARE.
|
|
1332
|
+
* waveOutlineColor is used as background. Good for gradient backgrounds.
|
|
1333
|
+
*/
|
|
1334
|
+
declare type WaveformDrawMode = 'inverted' | 'normal';
|
|
1335
|
+
|
|
1336
|
+
/**
|
|
1337
|
+
* Gradient configuration for waveforms
|
|
1338
|
+
* Can be applied vertically (top to bottom) or horizontally (left to right)
|
|
1339
|
+
*/
|
|
1340
|
+
declare interface WaveformGradient {
|
|
1341
|
+
type: 'linear';
|
|
1342
|
+
direction: 'vertical' | 'horizontal';
|
|
1343
|
+
stops: GradientStop[];
|
|
1344
|
+
}
|
|
1345
|
+
|
|
1346
|
+
export declare interface WaveformPlaylistContextValue {
|
|
1347
|
+
isPlaying: boolean;
|
|
1348
|
+
currentTime: number;
|
|
1349
|
+
duration: number;
|
|
1350
|
+
audioBuffers: AudioBuffer[];
|
|
1351
|
+
peaksDataArray: TrackClipPeaks[];
|
|
1352
|
+
trackStates: TrackState[];
|
|
1353
|
+
annotations: AnnotationData[];
|
|
1354
|
+
activeAnnotationId: string | null;
|
|
1355
|
+
selectionStart: number;
|
|
1356
|
+
selectionEnd: number;
|
|
1357
|
+
isAutomaticScroll: boolean;
|
|
1358
|
+
continuousPlay: boolean;
|
|
1359
|
+
linkEndpoints: boolean;
|
|
1360
|
+
annotationsEditable: boolean;
|
|
1361
|
+
play: (startTime?: number, playDuration?: number) => Promise<void>;
|
|
1362
|
+
pause: () => void;
|
|
1363
|
+
stop: () => void;
|
|
1364
|
+
setCurrentTime: (time: number) => void;
|
|
1365
|
+
setTrackMute: (trackIndex: number, muted: boolean) => void;
|
|
1366
|
+
setTrackSolo: (trackIndex: number, soloed: boolean) => void;
|
|
1367
|
+
setTrackVolume: (trackIndex: number, volume: number) => void;
|
|
1368
|
+
setTrackPan: (trackIndex: number, pan: number) => void;
|
|
1369
|
+
setSelection: (start: number, end: number) => void;
|
|
1370
|
+
timeFormat: string;
|
|
1371
|
+
setTimeFormat: (format: TimeFormat) => void;
|
|
1372
|
+
formatTime: (seconds: number) => string;
|
|
1373
|
+
samplesPerPixel: number;
|
|
1374
|
+
zoomIn: () => void;
|
|
1375
|
+
zoomOut: () => void;
|
|
1376
|
+
canZoomIn: boolean;
|
|
1377
|
+
canZoomOut: boolean;
|
|
1378
|
+
masterVolume: number;
|
|
1379
|
+
setMasterVolume: (volume: number) => void;
|
|
1380
|
+
setAutomaticScroll: (enabled: boolean) => void;
|
|
1381
|
+
setScrollContainer: (element: HTMLDivElement | null) => void;
|
|
1382
|
+
setContinuousPlay: (enabled: boolean) => void;
|
|
1383
|
+
setLinkEndpoints: (enabled: boolean) => void;
|
|
1384
|
+
setAnnotationsEditable: (enabled: boolean) => void;
|
|
1385
|
+
setAnnotations: (annotations: AnnotationData[]) => void;
|
|
1386
|
+
setActiveAnnotationId: (id: string | null) => void;
|
|
1387
|
+
playoutRef: default_2.RefObject<TonePlayout | null>;
|
|
1388
|
+
currentTimeRef: default_2.RefObject<number>;
|
|
1389
|
+
sampleRate: number;
|
|
1390
|
+
waveHeight: number;
|
|
1391
|
+
timeScaleHeight: number;
|
|
1392
|
+
minimumPlaylistHeight: number;
|
|
1393
|
+
controls: {
|
|
1394
|
+
show: boolean;
|
|
1395
|
+
width: number;
|
|
1396
|
+
};
|
|
1397
|
+
}
|
|
1398
|
+
|
|
1399
|
+
export declare const WaveformPlaylistProvider: default_2.FC<WaveformPlaylistProviderProps>;
|
|
1400
|
+
|
|
1401
|
+
declare interface WaveformPlaylistProviderProps {
|
|
1402
|
+
tracks: ClipTrack[];
|
|
1403
|
+
timescale?: boolean;
|
|
1404
|
+
mono?: boolean;
|
|
1405
|
+
waveHeight?: number;
|
|
1406
|
+
samplesPerPixel?: number;
|
|
1407
|
+
zoomLevels?: number[];
|
|
1408
|
+
automaticScroll?: boolean;
|
|
1409
|
+
theme?: Partial<WaveformPlaylistTheme>;
|
|
1410
|
+
controls?: {
|
|
1411
|
+
show: boolean;
|
|
1412
|
+
width: number;
|
|
1413
|
+
};
|
|
1414
|
+
annotationList?: {
|
|
1415
|
+
annotations?: any[];
|
|
1416
|
+
editable?: boolean;
|
|
1417
|
+
isContinuousPlay?: boolean;
|
|
1418
|
+
linkEndpoints?: boolean;
|
|
1419
|
+
controls?: any[];
|
|
1420
|
+
};
|
|
1421
|
+
effects?: EffectsFunction;
|
|
1422
|
+
onReady?: () => void;
|
|
1423
|
+
onAnnotationUpdate?: (annotations: AnnotationData[]) => void;
|
|
1424
|
+
/** Width in pixels of waveform bars. Default: 1 */
|
|
1425
|
+
barWidth?: number;
|
|
1426
|
+
/** Spacing in pixels between waveform bars. Default: 0 */
|
|
1427
|
+
barGap?: number;
|
|
1428
|
+
/** Width in pixels of progress bars. Default: barWidth + barGap (fills gaps). */
|
|
1429
|
+
progressBarWidth?: number;
|
|
1430
|
+
children: ReactNode;
|
|
1431
|
+
}
|
|
1432
|
+
|
|
1433
|
+
declare interface WaveformPlaylistTheme {
|
|
1434
|
+
waveformDrawMode?: WaveformDrawMode;
|
|
1435
|
+
waveOutlineColor: WaveformColor;
|
|
1436
|
+
waveFillColor: WaveformColor;
|
|
1437
|
+
waveProgressColor: string;
|
|
1438
|
+
selectedWaveOutlineColor: WaveformColor;
|
|
1439
|
+
selectedWaveFillColor: WaveformColor;
|
|
1440
|
+
selectedTrackControlsBackground: string;
|
|
1441
|
+
timeColor: string;
|
|
1442
|
+
timescaleBackgroundColor: string;
|
|
1443
|
+
playheadColor: string;
|
|
1444
|
+
selectionColor: string;
|
|
1445
|
+
clipHeaderBackgroundColor: string;
|
|
1446
|
+
clipHeaderBorderColor: string;
|
|
1447
|
+
clipHeaderTextColor: string;
|
|
1448
|
+
clipHeaderFontFamily: string;
|
|
1449
|
+
selectedClipHeaderBackgroundColor: string;
|
|
1450
|
+
fadeOverlayColor: string;
|
|
1451
|
+
backgroundColor: string;
|
|
1452
|
+
surfaceColor: string;
|
|
1453
|
+
borderColor: string;
|
|
1454
|
+
textColor: string;
|
|
1455
|
+
textColorMuted: string;
|
|
1456
|
+
inputBackground: string;
|
|
1457
|
+
inputBorder: string;
|
|
1458
|
+
inputText: string;
|
|
1459
|
+
inputPlaceholder: string;
|
|
1460
|
+
inputFocusBorder: string;
|
|
1461
|
+
buttonBackground: string;
|
|
1462
|
+
buttonText: string;
|
|
1463
|
+
buttonBorder: string;
|
|
1464
|
+
buttonHoverBackground: string;
|
|
1465
|
+
sliderTrackColor: string;
|
|
1466
|
+
sliderThumbColor: string;
|
|
1467
|
+
annotationBoxBackground: string;
|
|
1468
|
+
annotationBoxActiveBackground: string;
|
|
1469
|
+
annotationBoxHoverBackground: string;
|
|
1470
|
+
annotationBoxBorder: string;
|
|
1471
|
+
annotationBoxActiveBorder: string;
|
|
1472
|
+
annotationLabelColor: string;
|
|
1473
|
+
annotationResizeHandleColor: string;
|
|
1474
|
+
annotationResizeHandleActiveColor: string;
|
|
1475
|
+
annotationTextItemHoverBackground: string;
|
|
1476
|
+
borderRadius: string;
|
|
1477
|
+
fontFamily: string;
|
|
1478
|
+
fontSize: string;
|
|
1479
|
+
fontSizeSmall: string;
|
|
1480
|
+
}
|
|
1481
|
+
|
|
1482
|
+
export declare interface WaveformProps {
|
|
1483
|
+
renderTrackControls?: (trackIndex: number) => ReactNode;
|
|
1484
|
+
renderTimestamp?: (timeMs: number, pixelPosition: number) => ReactNode;
|
|
1485
|
+
/** Custom playhead render function. Receives position (pixels) and color from theme. */
|
|
1486
|
+
renderPlayhead?: RenderPlayheadFunction;
|
|
1487
|
+
annotationControls?: AnnotationAction[];
|
|
1488
|
+
annotationListConfig?: AnnotationActionOptions;
|
|
1489
|
+
annotationTextHeight?: number;
|
|
1490
|
+
className?: string;
|
|
1491
|
+
showClipHeaders?: boolean;
|
|
1492
|
+
interactiveClips?: boolean;
|
|
1493
|
+
showFades?: boolean;
|
|
1494
|
+
recordingState?: {
|
|
1495
|
+
isRecording: boolean;
|
|
1496
|
+
trackId: string;
|
|
1497
|
+
startSample: number;
|
|
1498
|
+
durationSamples: number;
|
|
1499
|
+
peaks: Int8Array | Int16Array;
|
|
1500
|
+
};
|
|
1501
|
+
}
|
|
1502
|
+
|
|
1503
|
+
export declare interface WaveformTrack {
|
|
1504
|
+
src: string | AudioBuffer;
|
|
1505
|
+
name?: string;
|
|
1506
|
+
effects?: TrackEffectsFunction;
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
/**
|
|
1510
|
+
* WAV file encoder
|
|
1511
|
+
* Converts AudioBuffer to WAV format Blob
|
|
1512
|
+
*/
|
|
1513
|
+
declare interface WavEncoderOptions {
|
|
1514
|
+
/** Bit depth: 16 or 32. Default: 16 */
|
|
1515
|
+
bitDepth?: 16 | 32;
|
|
1516
|
+
}
|
|
1517
|
+
|
|
1518
|
+
export declare interface ZoomControls {
|
|
1519
|
+
samplesPerPixel: number;
|
|
1520
|
+
zoomIn: () => void;
|
|
1521
|
+
zoomOut: () => void;
|
|
1522
|
+
canZoomIn: boolean;
|
|
1523
|
+
canZoomOut: boolean;
|
|
1524
|
+
}
|
|
1525
|
+
|
|
1526
|
+
export declare const ZoomInButton: default_2.FC<{
|
|
1527
|
+
className?: string;
|
|
1528
|
+
disabled?: boolean;
|
|
1529
|
+
}>;
|
|
1530
|
+
|
|
1531
|
+
export declare const ZoomOutButton: default_2.FC<{
|
|
1532
|
+
className?: string;
|
|
1533
|
+
disabled?: boolean;
|
|
1534
|
+
}>;
|
|
1535
|
+
|
|
1536
|
+
export { }
|