@waveform-playlist/core 5.0.0-alpha.1 → 5.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -9,6 +9,44 @@
9
9
  * - Clips can overlap (for crossfades)
10
10
  */
11
11
 
12
+ /**
13
+ * WaveformData object from waveform-data.js library.
14
+ * Supports resample() and slice() for dynamic zoom levels.
15
+ * See: https://github.com/bbc/waveform-data.js
16
+ */
17
+ interface WaveformDataObject {
18
+ /** Sample rate of the original audio */
19
+ readonly sample_rate: number;
20
+ /** Number of audio samples per pixel */
21
+ readonly scale: number;
22
+ /** Length of waveform data in pixels */
23
+ readonly length: number;
24
+ /** Bit depth (8 or 16) */
25
+ readonly bits: number;
26
+ /** Duration in seconds */
27
+ readonly duration: number;
28
+ /** Number of channels */
29
+ readonly channels: number;
30
+ /** Get channel data */
31
+ channel: (index: number) => {
32
+ min_array: () => number[];
33
+ max_array: () => number[];
34
+ };
35
+ /** Resample to different scale */
36
+ resample: (options: {
37
+ scale: number;
38
+ } | {
39
+ width: number;
40
+ }) => WaveformDataObject;
41
+ /** Slice a portion of the waveform */
42
+ slice: (options: {
43
+ startTime: number;
44
+ endTime: number;
45
+ } | {
46
+ startIndex: number;
47
+ endIndex: number;
48
+ }) => WaveformDataObject;
49
+ }
12
50
  /**
13
51
  * Generic effects function type for track-level audio processing.
14
52
  *
@@ -40,18 +78,37 @@ type TrackEffectsFunction = (graphEnd: unknown, destination: unknown, isOffline:
40
78
  * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)
41
79
  * to avoid floating-point precision errors. Convert to seconds only when
42
80
  * needed for playback using: seconds = samples / sampleRate
81
+ *
82
+ * Clips can be created with just waveformData (for instant visual rendering)
83
+ * and have audioBuffer added later when audio finishes loading.
43
84
  */
44
85
  interface AudioClip {
45
86
  /** Unique identifier for this clip */
46
87
  id: string;
47
- /** The audio buffer containing the audio data */
48
- audioBuffer: AudioBuffer;
88
+ /**
89
+ * The audio buffer containing the audio data.
90
+ * Optional for peaks-first rendering - can be added later.
91
+ * Required for playback and editing operations.
92
+ */
93
+ audioBuffer?: AudioBuffer;
49
94
  /** Position on timeline where this clip starts (in samples at timeline sampleRate) */
50
95
  startSample: number;
51
96
  /** Duration of this clip (in samples) - how much of the audio buffer to play */
52
97
  durationSamples: number;
53
98
  /** Offset into the audio buffer where playback starts (in samples) - the "trim start" point */
54
99
  offsetSamples: number;
100
+ /**
101
+ * Sample rate for this clip's audio.
102
+ * Required when audioBuffer is not provided (for peaks-first rendering).
103
+ * When audioBuffer is present, this should match audioBuffer.sampleRate.
104
+ */
105
+ sampleRate: number;
106
+ /**
107
+ * Total duration of the source audio in samples.
108
+ * Required when audioBuffer is not provided (for trim bounds calculation).
109
+ * When audioBuffer is present, this should equal audioBuffer.length.
110
+ */
111
+ sourceDurationSamples: number;
55
112
  /** Optional fade in effect */
56
113
  fadeIn?: Fade;
57
114
  /** Optional fade out effect */
@@ -62,6 +119,13 @@ interface AudioClip {
62
119
  name?: string;
63
120
  /** Optional color for visual distinction */
64
121
  color?: string;
122
+ /**
123
+ * Pre-computed waveform data from waveform-data.js library.
124
+ * When provided, the library will use this instead of computing peaks from the audioBuffer.
125
+ * Supports resampling to different zoom levels and slicing for clip trimming.
126
+ * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`
127
+ */
128
+ waveformData?: WaveformDataObject;
65
129
  }
66
130
  /**
67
131
  * Represents a track containing multiple audio clips
@@ -110,9 +174,13 @@ interface Timeline {
110
174
  }
111
175
  /**
112
176
  * Options for creating a new audio clip (using sample counts)
177
+ *
178
+ * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.
179
+ * Providing waveformData without audioBuffer enables peaks-first rendering.
113
180
  */
114
181
  interface CreateClipOptions {
115
- audioBuffer: AudioBuffer;
182
+ /** Audio buffer - optional for peaks-first rendering */
183
+ audioBuffer?: AudioBuffer;
116
184
  startSample: number;
117
185
  durationSamples?: number;
118
186
  offsetSamples?: number;
@@ -121,12 +189,22 @@ interface CreateClipOptions {
121
189
  color?: string;
122
190
  fadeIn?: Fade;
123
191
  fadeOut?: Fade;
192
+ /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */
193
+ waveformData?: WaveformDataObject;
194
+ /** Sample rate - required if audioBuffer not provided */
195
+ sampleRate?: number;
196
+ /** Total source audio duration in samples - required if audioBuffer not provided */
197
+ sourceDurationSamples?: number;
124
198
  }
125
199
  /**
126
200
  * Options for creating a new audio clip (using seconds for convenience)
201
+ *
202
+ * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.
203
+ * Providing waveformData without audioBuffer enables peaks-first rendering.
127
204
  */
128
205
  interface CreateClipOptionsSeconds {
129
- audioBuffer: AudioBuffer;
206
+ /** Audio buffer - optional for peaks-first rendering */
207
+ audioBuffer?: AudioBuffer;
130
208
  startTime: number;
131
209
  duration?: number;
132
210
  offset?: number;
@@ -135,6 +213,12 @@ interface CreateClipOptionsSeconds {
135
213
  color?: string;
136
214
  fadeIn?: Fade;
137
215
  fadeOut?: Fade;
216
+ /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */
217
+ waveformData?: WaveformDataObject;
218
+ /** Sample rate - required if audioBuffer not provided */
219
+ sampleRate?: number;
220
+ /** Total source audio duration in seconds - required if audioBuffer not provided */
221
+ sourceDuration?: number;
138
222
  }
139
223
  /**
140
224
  * Options for creating a new track
@@ -151,11 +235,19 @@ interface CreateTrackOptions {
151
235
  }
152
236
  /**
153
237
  * Creates a new AudioClip with sensible defaults (using sample counts)
238
+ *
239
+ * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:
240
+ * - Provided explicitly via options
241
+ * - Derived from waveformData (sample_rate and duration properties)
154
242
  */
155
243
  declare function createClip(options: CreateClipOptions): AudioClip;
156
244
  /**
157
245
  * Creates a new AudioClip from time-based values (convenience function)
158
- * Converts seconds to samples using the audioBuffer's sampleRate
246
+ * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate
247
+ *
248
+ * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:
249
+ * - Provided explicitly via options
250
+ * - Derived from waveformData (sample_rate and duration properties)
159
251
  */
160
252
  declare function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip;
161
253
  /**
@@ -281,4 +373,4 @@ declare function pixelsToSamples(pixels: number, samplesPerPixel: number): numbe
281
373
  declare function pixelsToSeconds(pixels: number, samplesPerPixel: number, sampleRate: number): number;
282
374
  declare function secondsToPixels(seconds: number, samplesPerPixel: number, sampleRate: number): number;
283
375
 
284
- export { type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type WaveformConfig, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
376
+ export { type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type WaveformConfig, type WaveformDataObject, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
package/dist/index.d.ts CHANGED
@@ -9,6 +9,44 @@
9
9
  * - Clips can overlap (for crossfades)
10
10
  */
11
11
 
12
+ /**
13
+ * WaveformData object from waveform-data.js library.
14
+ * Supports resample() and slice() for dynamic zoom levels.
15
+ * See: https://github.com/bbc/waveform-data.js
16
+ */
17
+ interface WaveformDataObject {
18
+ /** Sample rate of the original audio */
19
+ readonly sample_rate: number;
20
+ /** Number of audio samples per pixel */
21
+ readonly scale: number;
22
+ /** Length of waveform data in pixels */
23
+ readonly length: number;
24
+ /** Bit depth (8 or 16) */
25
+ readonly bits: number;
26
+ /** Duration in seconds */
27
+ readonly duration: number;
28
+ /** Number of channels */
29
+ readonly channels: number;
30
+ /** Get channel data */
31
+ channel: (index: number) => {
32
+ min_array: () => number[];
33
+ max_array: () => number[];
34
+ };
35
+ /** Resample to different scale */
36
+ resample: (options: {
37
+ scale: number;
38
+ } | {
39
+ width: number;
40
+ }) => WaveformDataObject;
41
+ /** Slice a portion of the waveform */
42
+ slice: (options: {
43
+ startTime: number;
44
+ endTime: number;
45
+ } | {
46
+ startIndex: number;
47
+ endIndex: number;
48
+ }) => WaveformDataObject;
49
+ }
12
50
  /**
13
51
  * Generic effects function type for track-level audio processing.
14
52
  *
@@ -40,18 +78,37 @@ type TrackEffectsFunction = (graphEnd: unknown, destination: unknown, isOffline:
40
78
  * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)
41
79
  * to avoid floating-point precision errors. Convert to seconds only when
42
80
  * needed for playback using: seconds = samples / sampleRate
81
+ *
82
+ * Clips can be created with just waveformData (for instant visual rendering)
83
+ * and have audioBuffer added later when audio finishes loading.
43
84
  */
44
85
  interface AudioClip {
45
86
  /** Unique identifier for this clip */
46
87
  id: string;
47
- /** The audio buffer containing the audio data */
48
- audioBuffer: AudioBuffer;
88
+ /**
89
+ * The audio buffer containing the audio data.
90
+ * Optional for peaks-first rendering - can be added later.
91
+ * Required for playback and editing operations.
92
+ */
93
+ audioBuffer?: AudioBuffer;
49
94
  /** Position on timeline where this clip starts (in samples at timeline sampleRate) */
50
95
  startSample: number;
51
96
  /** Duration of this clip (in samples) - how much of the audio buffer to play */
52
97
  durationSamples: number;
53
98
  /** Offset into the audio buffer where playback starts (in samples) - the "trim start" point */
54
99
  offsetSamples: number;
100
+ /**
101
+ * Sample rate for this clip's audio.
102
+ * Required when audioBuffer is not provided (for peaks-first rendering).
103
+ * When audioBuffer is present, this should match audioBuffer.sampleRate.
104
+ */
105
+ sampleRate: number;
106
+ /**
107
+ * Total duration of the source audio in samples.
108
+ * Required when audioBuffer is not provided (for trim bounds calculation).
109
+ * When audioBuffer is present, this should equal audioBuffer.length.
110
+ */
111
+ sourceDurationSamples: number;
55
112
  /** Optional fade in effect */
56
113
  fadeIn?: Fade;
57
114
  /** Optional fade out effect */
@@ -62,6 +119,13 @@ interface AudioClip {
62
119
  name?: string;
63
120
  /** Optional color for visual distinction */
64
121
  color?: string;
122
+ /**
123
+ * Pre-computed waveform data from waveform-data.js library.
124
+ * When provided, the library will use this instead of computing peaks from the audioBuffer.
125
+ * Supports resampling to different zoom levels and slicing for clip trimming.
126
+ * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`
127
+ */
128
+ waveformData?: WaveformDataObject;
65
129
  }
66
130
  /**
67
131
  * Represents a track containing multiple audio clips
@@ -110,9 +174,13 @@ interface Timeline {
110
174
  }
111
175
  /**
112
176
  * Options for creating a new audio clip (using sample counts)
177
+ *
178
+ * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.
179
+ * Providing waveformData without audioBuffer enables peaks-first rendering.
113
180
  */
114
181
  interface CreateClipOptions {
115
- audioBuffer: AudioBuffer;
182
+ /** Audio buffer - optional for peaks-first rendering */
183
+ audioBuffer?: AudioBuffer;
116
184
  startSample: number;
117
185
  durationSamples?: number;
118
186
  offsetSamples?: number;
@@ -121,12 +189,22 @@ interface CreateClipOptions {
121
189
  color?: string;
122
190
  fadeIn?: Fade;
123
191
  fadeOut?: Fade;
192
+ /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */
193
+ waveformData?: WaveformDataObject;
194
+ /** Sample rate - required if audioBuffer not provided */
195
+ sampleRate?: number;
196
+ /** Total source audio duration in samples - required if audioBuffer not provided */
197
+ sourceDurationSamples?: number;
124
198
  }
125
199
  /**
126
200
  * Options for creating a new audio clip (using seconds for convenience)
201
+ *
202
+ * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.
203
+ * Providing waveformData without audioBuffer enables peaks-first rendering.
127
204
  */
128
205
  interface CreateClipOptionsSeconds {
129
- audioBuffer: AudioBuffer;
206
+ /** Audio buffer - optional for peaks-first rendering */
207
+ audioBuffer?: AudioBuffer;
130
208
  startTime: number;
131
209
  duration?: number;
132
210
  offset?: number;
@@ -135,6 +213,12 @@ interface CreateClipOptionsSeconds {
135
213
  color?: string;
136
214
  fadeIn?: Fade;
137
215
  fadeOut?: Fade;
216
+ /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */
217
+ waveformData?: WaveformDataObject;
218
+ /** Sample rate - required if audioBuffer not provided */
219
+ sampleRate?: number;
220
+ /** Total source audio duration in seconds - required if audioBuffer not provided */
221
+ sourceDuration?: number;
138
222
  }
139
223
  /**
140
224
  * Options for creating a new track
@@ -151,11 +235,19 @@ interface CreateTrackOptions {
151
235
  }
152
236
  /**
153
237
  * Creates a new AudioClip with sensible defaults (using sample counts)
238
+ *
239
+ * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:
240
+ * - Provided explicitly via options
241
+ * - Derived from waveformData (sample_rate and duration properties)
154
242
  */
155
243
  declare function createClip(options: CreateClipOptions): AudioClip;
156
244
  /**
157
245
  * Creates a new AudioClip from time-based values (convenience function)
158
- * Converts seconds to samples using the audioBuffer's sampleRate
246
+ * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate
247
+ *
248
+ * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:
249
+ * - Provided explicitly via options
250
+ * - Derived from waveformData (sample_rate and duration properties)
159
251
  */
160
252
  declare function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip;
161
253
  /**
@@ -281,4 +373,4 @@ declare function pixelsToSamples(pixels: number, samplesPerPixel: number): numbe
281
373
  declare function pixelsToSeconds(pixels: number, samplesPerPixel: number, sampleRate: number): number;
282
374
  declare function secondsToPixels(seconds: number, samplesPerPixel: number, sampleRate: number): number;
283
375
 
284
- export { type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type WaveformConfig, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
376
+ export { type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type WaveformConfig, type WaveformDataObject, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
package/dist/index.js CHANGED
@@ -44,51 +44,83 @@ function createClip(options) {
44
44
  const {
45
45
  audioBuffer,
46
46
  startSample,
47
- durationSamples = audioBuffer.length,
48
- // Full buffer by default
49
47
  offsetSamples = 0,
50
48
  gain = 1,
51
49
  name,
52
50
  color,
53
51
  fadeIn,
54
- fadeOut
52
+ fadeOut,
53
+ waveformData
55
54
  } = options;
55
+ const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;
56
+ const sourceDurationSamples = audioBuffer?.length ?? options.sourceDurationSamples ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : void 0);
57
+ if (sampleRate === void 0) {
58
+ throw new Error("createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)");
59
+ }
60
+ if (sourceDurationSamples === void 0) {
61
+ throw new Error("createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)");
62
+ }
63
+ if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {
64
+ console.warn(
65
+ `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). Using audioBuffer sample rate. Waveform visualization may be slightly off.`
66
+ );
67
+ }
68
+ const durationSamples = options.durationSamples ?? sourceDurationSamples;
56
69
  return {
57
70
  id: generateId(),
58
71
  audioBuffer,
59
72
  startSample,
60
73
  durationSamples,
61
74
  offsetSamples,
75
+ sampleRate,
76
+ sourceDurationSamples,
62
77
  gain,
63
78
  name,
64
79
  color,
65
80
  fadeIn,
66
- fadeOut
81
+ fadeOut,
82
+ waveformData
67
83
  };
68
84
  }
69
85
  function createClipFromSeconds(options) {
70
86
  const {
71
87
  audioBuffer,
72
88
  startTime,
73
- duration = audioBuffer.duration,
74
89
  offset = 0,
75
90
  gain = 1,
76
91
  name,
77
92
  color,
78
93
  fadeIn,
79
- fadeOut
94
+ fadeOut,
95
+ waveformData
80
96
  } = options;
81
- const sampleRate = audioBuffer.sampleRate;
97
+ const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;
98
+ if (sampleRate === void 0) {
99
+ throw new Error("createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)");
100
+ }
101
+ const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;
102
+ if (sourceDuration === void 0) {
103
+ throw new Error("createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)");
104
+ }
105
+ if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {
106
+ console.warn(
107
+ `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). Using audioBuffer sample rate. Waveform visualization may be slightly off.`
108
+ );
109
+ }
110
+ const duration = options.duration ?? sourceDuration;
82
111
  return createClip({
83
112
  audioBuffer,
84
113
  startSample: Math.round(startTime * sampleRate),
85
114
  durationSamples: Math.round(duration * sampleRate),
86
115
  offsetSamples: Math.round(offset * sampleRate),
116
+ sampleRate,
117
+ sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),
87
118
  gain,
88
119
  name,
89
120
  color,
90
121
  fadeIn,
91
- fadeOut
122
+ fadeOut,
123
+ waveformData
92
124
  });
93
125
  }
94
126
  function createTrack(options) {
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["export * from './types';\nexport * from './utils';\n","/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /** The audio buffer containing the audio data */\n audioBuffer: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n */\nexport interface CreateClipOptions {\n audioBuffer: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n */\nexport interface CreateClipOptionsSeconds {\n audioBuffer: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n durationSamples = audioBuffer.length, // Full buffer by default\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n } = options;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n duration = audioBuffer.duration,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n } = options;\n\n const sampleRate = audioBuffer.sampleRate;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC8LO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,kBAAkB,YAAY;AAAA;AAAA,IAC9B,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAMO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,WAAW,YAAY;AAAA,IACvB,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,QAAM,aAAa,YAAY;AAE/B,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AC1TO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["export * from './types';\nexport * from './utils';\n","/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\n\n/**\n * WaveformData object from waveform-data.js library.\n * Supports resample() and slice() for dynamic zoom levels.\n * See: https://github.com/bbc/waveform-data.js\n */\nexport interface WaveformDataObject {\n /** Sample rate of the original audio */\n readonly sample_rate: number;\n /** Number of audio samples per pixel */\n readonly scale: number;\n /** Length of waveform data in pixels */\n readonly length: number;\n /** Bit depth (8 or 16) */\n readonly bits: number;\n /** Duration in seconds */\n readonly duration: number;\n /** Number of channels */\n readonly channels: number;\n /** Get channel data */\n channel: (index: number) => {\n min_array: () => number[];\n max_array: () => number[];\n };\n /** Resample to different scale */\n resample: (options: { scale: number } | { width: number }) => WaveformDataObject;\n /** Slice a portion of the waveform */\n slice: (options: { startTime: number; endTime: number } | { startIndex: number; endIndex: number }) => WaveformDataObject;\n}\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n *\n * Clips can be created with just waveformData (for instant visual rendering)\n * and have audioBuffer added later when audio finishes loading.\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /**\n * The audio buffer containing the audio data.\n * Optional for peaks-first rendering - can be added later.\n * Required for playback and editing operations.\n */\n audioBuffer?: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /**\n * Sample rate for this clip's audio.\n * Required when audioBuffer is not provided (for peaks-first rendering).\n * When audioBuffer is present, this should match audioBuffer.sampleRate.\n */\n sampleRate: number;\n\n /**\n * Total duration of the source audio in samples.\n * Required when audioBuffer is not provided (for trim bounds calculation).\n * When audioBuffer is present, this should equal audioBuffer.length.\n */\n sourceDurationSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n\n /**\n * Pre-computed waveform data from waveform-data.js library.\n * When provided, the library will use this instead of computing peaks from the audioBuffer.\n * Supports resampling to different zoom levels and slicing for clip trimming.\n * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`\n */\n waveformData?: WaveformDataObject;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n *\n * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptions {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer/source duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in samples - required if audioBuffer not provided */\n sourceDurationSamples?: number;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n *\n * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptionsSeconds {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer/source duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in seconds - required if audioBuffer not provided */\n sourceDuration?: number;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n\n // Determine source duration: audioBuffer > explicit option > waveformData (converted to samples)\n const sourceDurationSamples = audioBuffer?.length\n ?? options.sourceDurationSamples\n ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : undefined);\n\n if (sampleRate === undefined) {\n throw new Error('createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n if (sourceDurationSamples === undefined) {\n throw new Error('createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default duration to full source duration\n const durationSamples = options.durationSamples ?? sourceDurationSamples;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n sampleRate,\n sourceDurationSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n if (sampleRate === undefined) {\n throw new Error('createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n\n // Determine source duration: audioBuffer > explicit option > waveformData\n const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;\n if (sourceDuration === undefined) {\n throw new Error('createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match (could cause visual/audio sync issues)\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default clip duration to full source duration\n const duration = options.duration ?? sourceDuration;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n sampleRate,\n sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACgRO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAGlF,QAAM,wBAAwB,aAAa,UACtC,QAAQ,0BACP,gBAAgB,aAAa,KAAK,KAAK,aAAa,WAAW,UAAU,IAAI;AAEnF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AACA,MAAI,0BAA0B,QAAW;AACvC,UAAM,IAAI,MAAM,gHAAgH;AAAA,EAClI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAClF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,mHAAmH;AAAA,EACrI;AAGA,QAAM,iBAAiB,aAAa,YAAY,QAAQ,kBAAkB,cAAc;AACxF,MAAI,mBAAmB,QAAW;AAChC,UAAM,IAAI,MAAM,oHAAoH;AAAA,EACtI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA,uBAAuB,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;ACrcO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
package/dist/index.mjs CHANGED
@@ -3,51 +3,83 @@ function createClip(options) {
3
3
  const {
4
4
  audioBuffer,
5
5
  startSample,
6
- durationSamples = audioBuffer.length,
7
- // Full buffer by default
8
6
  offsetSamples = 0,
9
7
  gain = 1,
10
8
  name,
11
9
  color,
12
10
  fadeIn,
13
- fadeOut
11
+ fadeOut,
12
+ waveformData
14
13
  } = options;
14
+ const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;
15
+ const sourceDurationSamples = audioBuffer?.length ?? options.sourceDurationSamples ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : void 0);
16
+ if (sampleRate === void 0) {
17
+ throw new Error("createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)");
18
+ }
19
+ if (sourceDurationSamples === void 0) {
20
+ throw new Error("createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)");
21
+ }
22
+ if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {
23
+ console.warn(
24
+ `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). Using audioBuffer sample rate. Waveform visualization may be slightly off.`
25
+ );
26
+ }
27
+ const durationSamples = options.durationSamples ?? sourceDurationSamples;
15
28
  return {
16
29
  id: generateId(),
17
30
  audioBuffer,
18
31
  startSample,
19
32
  durationSamples,
20
33
  offsetSamples,
34
+ sampleRate,
35
+ sourceDurationSamples,
21
36
  gain,
22
37
  name,
23
38
  color,
24
39
  fadeIn,
25
- fadeOut
40
+ fadeOut,
41
+ waveformData
26
42
  };
27
43
  }
28
44
  function createClipFromSeconds(options) {
29
45
  const {
30
46
  audioBuffer,
31
47
  startTime,
32
- duration = audioBuffer.duration,
33
48
  offset = 0,
34
49
  gain = 1,
35
50
  name,
36
51
  color,
37
52
  fadeIn,
38
- fadeOut
53
+ fadeOut,
54
+ waveformData
39
55
  } = options;
40
- const sampleRate = audioBuffer.sampleRate;
56
+ const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;
57
+ if (sampleRate === void 0) {
58
+ throw new Error("createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)");
59
+ }
60
+ const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;
61
+ if (sourceDuration === void 0) {
62
+ throw new Error("createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)");
63
+ }
64
+ if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {
65
+ console.warn(
66
+ `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). Using audioBuffer sample rate. Waveform visualization may be slightly off.`
67
+ );
68
+ }
69
+ const duration = options.duration ?? sourceDuration;
41
70
  return createClip({
42
71
  audioBuffer,
43
72
  startSample: Math.round(startTime * sampleRate),
44
73
  durationSamples: Math.round(duration * sampleRate),
45
74
  offsetSamples: Math.round(offset * sampleRate),
75
+ sampleRate,
76
+ sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),
46
77
  gain,
47
78
  name,
48
79
  color,
49
80
  fadeIn,
50
- fadeOut
81
+ fadeOut,
82
+ waveformData
51
83
  });
52
84
  }
53
85
  function createTrack(options) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /** The audio buffer containing the audio data */\n audioBuffer: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n */\nexport interface CreateClipOptions {\n audioBuffer: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n */\nexport interface CreateClipOptionsSeconds {\n audioBuffer: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n durationSamples = audioBuffer.length, // Full buffer by default\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n } = options;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n duration = audioBuffer.duration,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n } = options;\n\n const sampleRate = audioBuffer.sampleRate;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";AA8LO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,kBAAkB,YAAY;AAAA;AAAA,IAC9B,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAMO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,WAAW,YAAY;AAAA,IACvB,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,QAAM,aAAa,YAAY;AAE/B,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AC1TO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
1
+ {"version":3,"sources":["../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\n\n/**\n * WaveformData object from waveform-data.js library.\n * Supports resample() and slice() for dynamic zoom levels.\n * See: https://github.com/bbc/waveform-data.js\n */\nexport interface WaveformDataObject {\n /** Sample rate of the original audio */\n readonly sample_rate: number;\n /** Number of audio samples per pixel */\n readonly scale: number;\n /** Length of waveform data in pixels */\n readonly length: number;\n /** Bit depth (8 or 16) */\n readonly bits: number;\n /** Duration in seconds */\n readonly duration: number;\n /** Number of channels */\n readonly channels: number;\n /** Get channel data */\n channel: (index: number) => {\n min_array: () => number[];\n max_array: () => number[];\n };\n /** Resample to different scale */\n resample: (options: { scale: number } | { width: number }) => WaveformDataObject;\n /** Slice a portion of the waveform */\n slice: (options: { startTime: number; endTime: number } | { startIndex: number; endIndex: number }) => WaveformDataObject;\n}\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n *\n * Clips can be created with just waveformData (for instant visual rendering)\n * and have audioBuffer added later when audio finishes loading.\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /**\n * The audio buffer containing the audio data.\n * Optional for peaks-first rendering - can be added later.\n * Required for playback and editing operations.\n */\n audioBuffer?: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /**\n * Sample rate for this clip's audio.\n * Required when audioBuffer is not provided (for peaks-first rendering).\n * When audioBuffer is present, this should match audioBuffer.sampleRate.\n */\n sampleRate: number;\n\n /**\n * Total duration of the source audio in samples.\n * Required when audioBuffer is not provided (for trim bounds calculation).\n * When audioBuffer is present, this should equal audioBuffer.length.\n */\n sourceDurationSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n\n /**\n * Pre-computed waveform data from waveform-data.js library.\n * When provided, the library will use this instead of computing peaks from the audioBuffer.\n * Supports resampling to different zoom levels and slicing for clip trimming.\n * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`\n */\n waveformData?: WaveformDataObject;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n *\n * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptions {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer/source duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in samples - required if audioBuffer not provided */\n sourceDurationSamples?: number;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n *\n * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptionsSeconds {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer/source duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in seconds - required if audioBuffer not provided */\n sourceDuration?: number;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n\n // Determine source duration: audioBuffer > explicit option > waveformData (converted to samples)\n const sourceDurationSamples = audioBuffer?.length\n ?? options.sourceDurationSamples\n ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : undefined);\n\n if (sampleRate === undefined) {\n throw new Error('createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n if (sourceDurationSamples === undefined) {\n throw new Error('createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default duration to full source duration\n const durationSamples = options.durationSamples ?? sourceDurationSamples;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n sampleRate,\n sourceDurationSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n if (sampleRate === undefined) {\n throw new Error('createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n\n // Determine source duration: audioBuffer > explicit option > waveformData\n const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;\n if (sourceDuration === undefined) {\n throw new Error('createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match (could cause visual/audio sync issues)\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default clip duration to full source duration\n const duration = options.duration ?? sourceDuration;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n sampleRate,\n sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";AAgRO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAGlF,QAAM,wBAAwB,aAAa,UACtC,QAAQ,0BACP,gBAAgB,aAAa,KAAK,KAAK,aAAa,WAAW,UAAU,IAAI;AAEnF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AACA,MAAI,0BAA0B,QAAW;AACvC,UAAM,IAAI,MAAM,gHAAgH;AAAA,EAClI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAClF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,mHAAmH;AAAA,EACrI;AAGA,QAAM,iBAAiB,aAAa,YAAY,QAAQ,kBAAkB,cAAc;AACxF,MAAI,mBAAmB,QAAW;AAChC,UAAM,IAAI,MAAM,oHAAoH;AAAA,EACtI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA,uBAAuB,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;ACrcO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@waveform-playlist/core",
3
- "version": "5.0.0-alpha.1",
3
+ "version": "5.0.0-alpha.10",
4
4
  "description": "Core types, interfaces and utilities for waveform-playlist",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",