@waveform-playlist/core 5.3.2 → 6.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -377,6 +377,76 @@ interface Gap {
377
377
  }
378
378
  declare function findGaps(track: ClipTrack): Gap[];
379
379
 
380
+ /**
381
+ * Shared annotation types used across waveform-playlist packages
382
+ */
383
+ /**
384
+ * Base annotation data structure
385
+ */
386
+ interface AnnotationData {
387
+ id: string;
388
+ start: number;
389
+ end: number;
390
+ lines: string[];
391
+ language?: string;
392
+ }
393
+ /**
394
+ * Annotation format definition for parsing/serializing
395
+ */
396
+ interface AnnotationFormat {
397
+ name: string;
398
+ parse: (data: unknown) => AnnotationData[];
399
+ serialize: (annotations: AnnotationData[]) => unknown;
400
+ }
401
+ /**
402
+ * Options for annotation list behavior
403
+ */
404
+ interface AnnotationListOptions {
405
+ editable?: boolean;
406
+ linkEndpoints?: boolean;
407
+ isContinuousPlay?: boolean;
408
+ }
409
+ /**
410
+ * Event handlers for annotation operations
411
+ */
412
+ interface AnnotationEventMap {
413
+ 'annotation-select': (annotation: AnnotationData) => void;
414
+ 'annotation-update': (annotation: AnnotationData) => void;
415
+ 'annotation-delete': (id: string) => void;
416
+ 'annotation-create': (annotation: AnnotationData) => void;
417
+ }
418
+ /**
419
+ * Configuration options passed to annotation action handlers.
420
+ * Used by both browser and annotations packages.
421
+ */
422
+ interface AnnotationActionOptions {
423
+ /** Whether annotation endpoints are linked (moving one endpoint moves the other) */
424
+ linkEndpoints?: boolean;
425
+ /** Whether to continue playing after an annotation ends */
426
+ continuousPlay?: boolean;
427
+ /** Additional custom properties */
428
+ [key: string]: unknown;
429
+ }
430
+ /**
431
+ * An action control shown on annotation items (e.g., delete, split).
432
+ */
433
+ interface AnnotationAction {
434
+ class?: string;
435
+ text?: string;
436
+ title: string;
437
+ action: (annotation: AnnotationData, index: number, annotations: AnnotationData[], opts: AnnotationActionOptions) => void;
438
+ }
439
+ /**
440
+ * Props passed to the renderAnnotationItem function for custom rendering.
441
+ */
442
+ interface RenderAnnotationItemProps {
443
+ annotation: AnnotationData;
444
+ index: number;
445
+ isActive: boolean;
446
+ onClick: () => void;
447
+ formatTime: (seconds: number) => string;
448
+ }
449
+
380
450
  interface WaveformConfig {
381
451
  sampleRate: number;
382
452
  samplesPerPixel: number;
@@ -459,4 +529,4 @@ declare function pixelsToSamples(pixels: number, samplesPerPixel: number): numbe
459
529
  declare function pixelsToSeconds(pixels: number, samplesPerPixel: number, sampleRate: number): number;
460
530
  declare function secondsToPixels(seconds: number, samplesPerPixel: number, sampleRate: number): number;
461
531
 
462
- export { type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type ColorMapEntry, type ColorMapName, type ColorMapValue, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type FFTSize, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type RenderMode, type SpectrogramComputeConfig, type SpectrogramConfig, type SpectrogramData, type SpectrogramDisplayConfig, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type TrackSpectrogramOverrides, type WaveformConfig, type WaveformDataObject, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
532
+ export { type AnnotationAction, type AnnotationActionOptions, type AnnotationData, type AnnotationEventMap, type AnnotationFormat, type AnnotationListOptions, type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type ColorMapEntry, type ColorMapName, type ColorMapValue, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type FFTSize, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type RenderAnnotationItemProps, type RenderMode, type SpectrogramComputeConfig, type SpectrogramConfig, type SpectrogramData, type SpectrogramDisplayConfig, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type TrackSpectrogramOverrides, type WaveformConfig, type WaveformDataObject, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
package/dist/index.d.ts CHANGED
@@ -377,6 +377,76 @@ interface Gap {
377
377
  }
378
378
  declare function findGaps(track: ClipTrack): Gap[];
379
379
 
380
+ /**
381
+ * Shared annotation types used across waveform-playlist packages
382
+ */
383
+ /**
384
+ * Base annotation data structure
385
+ */
386
+ interface AnnotationData {
387
+ id: string;
388
+ start: number;
389
+ end: number;
390
+ lines: string[];
391
+ language?: string;
392
+ }
393
+ /**
394
+ * Annotation format definition for parsing/serializing
395
+ */
396
+ interface AnnotationFormat {
397
+ name: string;
398
+ parse: (data: unknown) => AnnotationData[];
399
+ serialize: (annotations: AnnotationData[]) => unknown;
400
+ }
401
+ /**
402
+ * Options for annotation list behavior
403
+ */
404
+ interface AnnotationListOptions {
405
+ editable?: boolean;
406
+ linkEndpoints?: boolean;
407
+ isContinuousPlay?: boolean;
408
+ }
409
+ /**
410
+ * Event handlers for annotation operations
411
+ */
412
+ interface AnnotationEventMap {
413
+ 'annotation-select': (annotation: AnnotationData) => void;
414
+ 'annotation-update': (annotation: AnnotationData) => void;
415
+ 'annotation-delete': (id: string) => void;
416
+ 'annotation-create': (annotation: AnnotationData) => void;
417
+ }
418
+ /**
419
+ * Configuration options passed to annotation action handlers.
420
+ * Used by both browser and annotations packages.
421
+ */
422
+ interface AnnotationActionOptions {
423
+ /** Whether annotation endpoints are linked (moving one endpoint moves the other) */
424
+ linkEndpoints?: boolean;
425
+ /** Whether to continue playing after an annotation ends */
426
+ continuousPlay?: boolean;
427
+ /** Additional custom properties */
428
+ [key: string]: unknown;
429
+ }
430
+ /**
431
+ * An action control shown on annotation items (e.g., delete, split).
432
+ */
433
+ interface AnnotationAction {
434
+ class?: string;
435
+ text?: string;
436
+ title: string;
437
+ action: (annotation: AnnotationData, index: number, annotations: AnnotationData[], opts: AnnotationActionOptions) => void;
438
+ }
439
+ /**
440
+ * Props passed to the renderAnnotationItem function for custom rendering.
441
+ */
442
+ interface RenderAnnotationItemProps {
443
+ annotation: AnnotationData;
444
+ index: number;
445
+ isActive: boolean;
446
+ onClick: () => void;
447
+ formatTime: (seconds: number) => string;
448
+ }
449
+
380
450
  interface WaveformConfig {
381
451
  sampleRate: number;
382
452
  samplesPerPixel: number;
@@ -459,4 +529,4 @@ declare function pixelsToSamples(pixels: number, samplesPerPixel: number): numbe
459
529
  declare function pixelsToSeconds(pixels: number, samplesPerPixel: number, sampleRate: number): number;
460
530
  declare function secondsToPixels(seconds: number, samplesPerPixel: number, sampleRate: number): number;
461
531
 
462
- export { type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type ColorMapEntry, type ColorMapName, type ColorMapValue, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type FFTSize, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type RenderMode, type SpectrogramComputeConfig, type SpectrogramConfig, type SpectrogramData, type SpectrogramDisplayConfig, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type TrackSpectrogramOverrides, type WaveformConfig, type WaveformDataObject, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
532
+ export { type AnnotationAction, type AnnotationActionOptions, type AnnotationData, type AnnotationEventMap, type AnnotationFormat, type AnnotationListOptions, type AudioBuffer$1 as AudioBuffer, type AudioClip, type ClipTrack, type ColorMapEntry, type ColorMapName, type ColorMapValue, type CreateClipOptions, type CreateClipOptionsSeconds, type CreateTrackOptions, type FFTSize, type Fade, type FadeType, type Gap, InteractionState, type PlaylistConfig, type PlayoutState, type RenderAnnotationItemProps, type RenderMode, type SpectrogramComputeConfig, type SpectrogramConfig, type SpectrogramData, type SpectrogramDisplayConfig, type TimeSelection, type Timeline, type Track, type TrackEffectsFunction, type TrackSpectrogramOverrides, type WaveformConfig, type WaveformDataObject, clipsOverlap, createClip, createClipFromSeconds, createTimeline, createTrack, findGaps, getClipsAtSample, getClipsInRange, pixelsToSamples, pixelsToSeconds, samplesToPixels, samplesToSeconds, secondsToPixels, secondsToSamples, sortClipsByTime };
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["export * from './types';\nexport * from './utils';\n","/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\nimport type { RenderMode, SpectrogramConfig, ColorMapValue } from './spectrogram';\n\n/**\n * WaveformData object from waveform-data.js library.\n * Supports resample() and slice() for dynamic zoom levels.\n * See: https://github.com/bbc/waveform-data.js\n */\nexport interface WaveformDataObject {\n /** Sample rate of the original audio */\n readonly sample_rate: number;\n /** Number of audio samples per pixel */\n readonly scale: number;\n /** Length of waveform data in pixels */\n readonly length: number;\n /** Bit depth (8 or 16) */\n readonly bits: number;\n /** Duration in seconds */\n readonly duration: number;\n /** Number of channels */\n readonly channels: number;\n /** Get channel data */\n channel: (index: number) => {\n min_array: () => number[];\n max_array: () => number[];\n };\n /** Resample to different scale */\n resample: (options: { scale: number } | { width: number }) => WaveformDataObject;\n /** Slice a portion of the waveform */\n slice: (options: { startTime: number; endTime: number } | { startIndex: number; endIndex: number }) => WaveformDataObject;\n}\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n *\n * Clips can be created with just waveformData (for instant visual rendering)\n * and have audioBuffer added later when audio finishes loading.\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /**\n * The audio buffer containing the audio data.\n * Optional for peaks-first rendering - can be added later.\n * Required for playback and editing operations.\n */\n audioBuffer?: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /**\n * Sample rate for this clip's audio.\n * Required when audioBuffer is not provided (for peaks-first rendering).\n * When audioBuffer is present, this should match audioBuffer.sampleRate.\n */\n sampleRate: number;\n\n /**\n * Total duration of the source audio in samples.\n * Required when audioBuffer is not provided (for trim bounds calculation).\n * When audioBuffer is present, this should equal audioBuffer.length.\n */\n sourceDurationSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n\n /**\n * Pre-computed waveform data from waveform-data.js library.\n * When provided, the library will use this instead of computing peaks from the audioBuffer.\n * Supports resampling to different zoom levels and slicing for clip trimming.\n * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`\n */\n waveformData?: WaveformDataObject;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n\n /** Visualization render mode. Default: 'waveform' */\n renderMode?: RenderMode;\n\n /** Per-track spectrogram configuration (FFT size, window, frequency scale, etc.) */\n spectrogramConfig?: SpectrogramConfig;\n\n /** Per-track spectrogram color map name or custom color array */\n spectrogramColorMap?: ColorMapValue;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n *\n * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptions {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer/source duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in samples - required if audioBuffer not provided */\n sourceDurationSamples?: number;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n *\n * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptionsSeconds {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer/source duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in seconds - required if audioBuffer not provided */\n sourceDuration?: number;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n\n // Determine source duration: audioBuffer > explicit option > waveformData (converted to samples)\n const sourceDurationSamples = audioBuffer?.length\n ?? options.sourceDurationSamples\n ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : undefined);\n\n if (sampleRate === undefined) {\n throw new Error('createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n if (sourceDurationSamples === undefined) {\n throw new Error('createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default duration to full source duration\n const durationSamples = options.durationSamples ?? sourceDurationSamples;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n sampleRate,\n sourceDurationSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n if (sampleRate === undefined) {\n throw new Error('createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n\n // Determine source duration: audioBuffer > explicit option > waveformData\n const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;\n if (sourceDuration === undefined) {\n throw new Error('createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match (could cause visual/audio sync issues)\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default clip duration to full source duration\n const duration = options.duration ?? sourceDuration;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n sampleRate,\n sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n\n// Export spectrogram types\nexport * from './spectrogram';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC0RO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAGlF,QAAM,wBAAwB,aAAa,UACtC,QAAQ,0BACP,gBAAgB,aAAa,KAAK,KAAK,aAAa,WAAW,UAAU,IAAI;AAEnF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AACA,MAAI,0BAA0B,QAAW;AACvC,UAAM,IAAI,MAAM,gHAAgH;AAAA,EAClI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAClF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,mHAAmH;AAAA,EACrI;AAGA,QAAM,iBAAiB,aAAa,YAAY,QAAQ,kBAAkB,cAAc;AACxF,MAAI,mBAAmB,QAAW;AAChC,UAAM,IAAI,MAAM,oHAAoH;AAAA,EACtI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA,uBAAuB,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AC/cO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["export * from './types';\nexport * from './utils';\n","/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\nimport type { RenderMode, SpectrogramConfig, ColorMapValue } from './spectrogram';\n\n/**\n * WaveformData object from waveform-data.js library.\n * Supports resample() and slice() for dynamic zoom levels.\n * See: https://github.com/bbc/waveform-data.js\n */\nexport interface WaveformDataObject {\n /** Sample rate of the original audio */\n readonly sample_rate: number;\n /** Number of audio samples per pixel */\n readonly scale: number;\n /** Length of waveform data in pixels */\n readonly length: number;\n /** Bit depth (8 or 16) */\n readonly bits: number;\n /** Duration in seconds */\n readonly duration: number;\n /** Number of channels */\n readonly channels: number;\n /** Get channel data */\n channel: (index: number) => {\n min_array: () => number[];\n max_array: () => number[];\n };\n /** Resample to different scale */\n resample: (options: { scale: number } | { width: number }) => WaveformDataObject;\n /** Slice a portion of the waveform */\n slice: (options: { startTime: number; endTime: number } | { startIndex: number; endIndex: number }) => WaveformDataObject;\n}\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n *\n * Clips can be created with just waveformData (for instant visual rendering)\n * and have audioBuffer added later when audio finishes loading.\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /**\n * The audio buffer containing the audio data.\n * Optional for peaks-first rendering - can be added later.\n * Required for playback and editing operations.\n */\n audioBuffer?: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /**\n * Sample rate for this clip's audio.\n * Required when audioBuffer is not provided (for peaks-first rendering).\n * When audioBuffer is present, this should match audioBuffer.sampleRate.\n */\n sampleRate: number;\n\n /**\n * Total duration of the source audio in samples.\n * Required when audioBuffer is not provided (for trim bounds calculation).\n * When audioBuffer is present, this should equal audioBuffer.length.\n */\n sourceDurationSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n\n /**\n * Pre-computed waveform data from waveform-data.js library.\n * When provided, the library will use this instead of computing peaks from the audioBuffer.\n * Supports resampling to different zoom levels and slicing for clip trimming.\n * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`\n */\n waveformData?: WaveformDataObject;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n\n /** Visualization render mode. Default: 'waveform' */\n renderMode?: RenderMode;\n\n /** Per-track spectrogram configuration (FFT size, window, frequency scale, etc.) */\n spectrogramConfig?: SpectrogramConfig;\n\n /** Per-track spectrogram color map name or custom color array */\n spectrogramColorMap?: ColorMapValue;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n *\n * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptions {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer/source duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in samples - required if audioBuffer not provided */\n sourceDurationSamples?: number;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n *\n * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptionsSeconds {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer/source duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in seconds - required if audioBuffer not provided */\n sourceDuration?: number;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n\n // Determine source duration: audioBuffer > explicit option > waveformData (converted to samples)\n const sourceDurationSamples = audioBuffer?.length\n ?? options.sourceDurationSamples\n ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : undefined);\n\n if (sampleRate === undefined) {\n throw new Error('createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n if (sourceDurationSamples === undefined) {\n throw new Error('createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default duration to full source duration\n const durationSamples = options.durationSamples ?? sourceDurationSamples;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n sampleRate,\n sourceDurationSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n if (sampleRate === undefined) {\n throw new Error('createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n\n // Determine source duration: audioBuffer > explicit option > waveformData\n const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;\n if (sourceDuration === undefined) {\n throw new Error('createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match (could cause visual/audio sync issues)\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default clip duration to full source duration\n const duration = options.duration ?? sourceDuration;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n sampleRate,\n sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n\n// Export spectrogram types\nexport * from './spectrogram';\n\n// Export annotation types\nexport * from './annotations';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC0RO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAGlF,QAAM,wBAAwB,aAAa,UACtC,QAAQ,0BACP,gBAAgB,aAAa,KAAK,KAAK,aAAa,WAAW,UAAU,IAAI;AAEnF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AACA,MAAI,0BAA0B,QAAW;AACvC,UAAM,IAAI,MAAM,gHAAgH;AAAA,EAClI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAClF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,mHAAmH;AAAA,EACrI;AAGA,QAAM,iBAAiB,aAAa,YAAY,QAAQ,kBAAkB,cAAc;AACxF,MAAI,mBAAmB,QAAW;AAChC,UAAM,IAAI,MAAM,oHAAoH;AAAA,EACtI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA,uBAAuB,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AC/cO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\nimport type { RenderMode, SpectrogramConfig, ColorMapValue } from './spectrogram';\n\n/**\n * WaveformData object from waveform-data.js library.\n * Supports resample() and slice() for dynamic zoom levels.\n * See: https://github.com/bbc/waveform-data.js\n */\nexport interface WaveformDataObject {\n /** Sample rate of the original audio */\n readonly sample_rate: number;\n /** Number of audio samples per pixel */\n readonly scale: number;\n /** Length of waveform data in pixels */\n readonly length: number;\n /** Bit depth (8 or 16) */\n readonly bits: number;\n /** Duration in seconds */\n readonly duration: number;\n /** Number of channels */\n readonly channels: number;\n /** Get channel data */\n channel: (index: number) => {\n min_array: () => number[];\n max_array: () => number[];\n };\n /** Resample to different scale */\n resample: (options: { scale: number } | { width: number }) => WaveformDataObject;\n /** Slice a portion of the waveform */\n slice: (options: { startTime: number; endTime: number } | { startIndex: number; endIndex: number }) => WaveformDataObject;\n}\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n *\n * Clips can be created with just waveformData (for instant visual rendering)\n * and have audioBuffer added later when audio finishes loading.\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /**\n * The audio buffer containing the audio data.\n * Optional for peaks-first rendering - can be added later.\n * Required for playback and editing operations.\n */\n audioBuffer?: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /**\n * Sample rate for this clip's audio.\n * Required when audioBuffer is not provided (for peaks-first rendering).\n * When audioBuffer is present, this should match audioBuffer.sampleRate.\n */\n sampleRate: number;\n\n /**\n * Total duration of the source audio in samples.\n * Required when audioBuffer is not provided (for trim bounds calculation).\n * When audioBuffer is present, this should equal audioBuffer.length.\n */\n sourceDurationSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n\n /**\n * Pre-computed waveform data from waveform-data.js library.\n * When provided, the library will use this instead of computing peaks from the audioBuffer.\n * Supports resampling to different zoom levels and slicing for clip trimming.\n * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`\n */\n waveformData?: WaveformDataObject;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n\n /** Visualization render mode. Default: 'waveform' */\n renderMode?: RenderMode;\n\n /** Per-track spectrogram configuration (FFT size, window, frequency scale, etc.) */\n spectrogramConfig?: SpectrogramConfig;\n\n /** Per-track spectrogram color map name or custom color array */\n spectrogramColorMap?: ColorMapValue;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n *\n * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptions {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer/source duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in samples - required if audioBuffer not provided */\n sourceDurationSamples?: number;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n *\n * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptionsSeconds {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer/source duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in seconds - required if audioBuffer not provided */\n sourceDuration?: number;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n\n // Determine source duration: audioBuffer > explicit option > waveformData (converted to samples)\n const sourceDurationSamples = audioBuffer?.length\n ?? options.sourceDurationSamples\n ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : undefined);\n\n if (sampleRate === undefined) {\n throw new Error('createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n if (sourceDurationSamples === undefined) {\n throw new Error('createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default duration to full source duration\n const durationSamples = options.durationSamples ?? sourceDurationSamples;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n sampleRate,\n sourceDurationSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n if (sampleRate === undefined) {\n throw new Error('createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n\n // Determine source duration: audioBuffer > explicit option > waveformData\n const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;\n if (sourceDuration === undefined) {\n throw new Error('createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match (could cause visual/audio sync issues)\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default clip duration to full source duration\n const duration = options.duration ?? sourceDuration;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n sampleRate,\n sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n\n// Export spectrogram types\nexport * from './spectrogram';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";AA0RO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAGlF,QAAM,wBAAwB,aAAa,UACtC,QAAQ,0BACP,gBAAgB,aAAa,KAAK,KAAK,aAAa,WAAW,UAAU,IAAI;AAEnF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AACA,MAAI,0BAA0B,QAAW;AACvC,UAAM,IAAI,MAAM,gHAAgH;AAAA,EAClI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAClF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,mHAAmH;AAAA,EACrI;AAGA,QAAM,iBAAiB,aAAa,YAAY,QAAQ,kBAAkB,cAAc;AACxF,MAAI,mBAAmB,QAAW;AAChC,UAAM,IAAI,MAAM,oHAAoH;AAAA,EACtI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA,uBAAuB,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AC/cO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
1
+ {"version":3,"sources":["../src/types/clip.ts","../src/types/index.ts","../src/utils/conversions.ts"],"sourcesContent":["/**\n * Clip-Based Model Types\n *\n * These types support a professional multi-track editing model where:\n * - Each track can contain multiple audio clips\n * - Clips can be positioned anywhere on the timeline\n * - Clips have independent trim points (offset/duration)\n * - Gaps between clips are silent\n * - Clips can overlap (for crossfades)\n */\n\nimport { Fade } from './index';\nimport type { RenderMode, SpectrogramConfig, ColorMapValue } from './spectrogram';\n\n/**\n * WaveformData object from waveform-data.js library.\n * Supports resample() and slice() for dynamic zoom levels.\n * See: https://github.com/bbc/waveform-data.js\n */\nexport interface WaveformDataObject {\n /** Sample rate of the original audio */\n readonly sample_rate: number;\n /** Number of audio samples per pixel */\n readonly scale: number;\n /** Length of waveform data in pixels */\n readonly length: number;\n /** Bit depth (8 or 16) */\n readonly bits: number;\n /** Duration in seconds */\n readonly duration: number;\n /** Number of channels */\n readonly channels: number;\n /** Get channel data */\n channel: (index: number) => {\n min_array: () => number[];\n max_array: () => number[];\n };\n /** Resample to different scale */\n resample: (options: { scale: number } | { width: number }) => WaveformDataObject;\n /** Slice a portion of the waveform */\n slice: (options: { startTime: number; endTime: number } | { startIndex: number; endIndex: number }) => WaveformDataObject;\n}\n\n/**\n * Generic effects function type for track-level audio processing.\n *\n * The actual implementation receives Tone.js audio nodes. Using generic types\n * here to avoid circular dependencies with the playout package.\n *\n * @param graphEnd - The end of the track's audio graph (Tone.js Gain node)\n * @param destination - Where to connect the effects output (Tone.js ToneAudioNode)\n * @param isOffline - Whether rendering offline (for export)\n * @returns Optional cleanup function called when track is disposed\n *\n * @example\n * ```typescript\n * const trackEffects: TrackEffectsFunction = (graphEnd, destination, isOffline) => {\n * const reverb = new Tone.Reverb({ decay: 1.5 });\n * graphEnd.connect(reverb);\n * reverb.connect(destination);\n *\n * return () => {\n * reverb.dispose();\n * };\n * };\n * ```\n */\nexport type TrackEffectsFunction = (\n graphEnd: unknown,\n destination: unknown,\n isOffline: boolean\n) => void | (() => void);\n\n/**\n * Represents a single audio clip on the timeline\n *\n * IMPORTANT: All positions/durations are stored as SAMPLE COUNTS (integers)\n * to avoid floating-point precision errors. Convert to seconds only when\n * needed for playback using: seconds = samples / sampleRate\n *\n * Clips can be created with just waveformData (for instant visual rendering)\n * and have audioBuffer added later when audio finishes loading.\n */\nexport interface AudioClip {\n /** Unique identifier for this clip */\n id: string;\n\n /**\n * The audio buffer containing the audio data.\n * Optional for peaks-first rendering - can be added later.\n * Required for playback and editing operations.\n */\n audioBuffer?: AudioBuffer;\n\n /** Position on timeline where this clip starts (in samples at timeline sampleRate) */\n startSample: number;\n\n /** Duration of this clip (in samples) - how much of the audio buffer to play */\n durationSamples: number;\n\n /** Offset into the audio buffer where playback starts (in samples) - the \"trim start\" point */\n offsetSamples: number;\n\n /**\n * Sample rate for this clip's audio.\n * Required when audioBuffer is not provided (for peaks-first rendering).\n * When audioBuffer is present, this should match audioBuffer.sampleRate.\n */\n sampleRate: number;\n\n /**\n * Total duration of the source audio in samples.\n * Required when audioBuffer is not provided (for trim bounds calculation).\n * When audioBuffer is present, this should equal audioBuffer.length.\n */\n sourceDurationSamples: number;\n\n /** Optional fade in effect */\n fadeIn?: Fade;\n\n /** Optional fade out effect */\n fadeOut?: Fade;\n\n /** Clip-specific gain/volume multiplier (0.0 to 1.0+) */\n gain: number;\n\n /** Optional label/name for this clip */\n name?: string;\n\n /** Optional color for visual distinction */\n color?: string;\n\n /**\n * Pre-computed waveform data from waveform-data.js library.\n * When provided, the library will use this instead of computing peaks from the audioBuffer.\n * Supports resampling to different zoom levels and slicing for clip trimming.\n * Load with: `const waveformData = await loadWaveformData('/path/to/peaks.dat')`\n */\n waveformData?: WaveformDataObject;\n}\n\n/**\n * Represents a track containing multiple audio clips\n */\nexport interface ClipTrack {\n /** Unique identifier for this track */\n id: string;\n\n /** Display name for this track */\n name: string;\n\n /** Array of audio clips on this track */\n clips: AudioClip[];\n\n /** Whether this track is muted */\n muted: boolean;\n\n /** Whether this track is soloed */\n soloed: boolean;\n\n /** Track volume (0.0 to 1.0+) */\n volume: number;\n\n /** Stereo pan (-1.0 = left, 0 = center, 1.0 = right) */\n pan: number;\n\n /** Optional track color for visual distinction */\n color?: string;\n\n /** Track height in pixels (for UI) */\n height?: number;\n\n /** Optional effects function for this track */\n effects?: TrackEffectsFunction;\n\n /** Visualization render mode. Default: 'waveform' */\n renderMode?: RenderMode;\n\n /** Per-track spectrogram configuration (FFT size, window, frequency scale, etc.) */\n spectrogramConfig?: SpectrogramConfig;\n\n /** Per-track spectrogram color map name or custom color array */\n spectrogramColorMap?: ColorMapValue;\n}\n\n/**\n * Represents the entire timeline/project\n */\nexport interface Timeline {\n /** All tracks in the timeline */\n tracks: ClipTrack[];\n\n /** Total timeline duration in seconds */\n duration: number;\n\n /** Sample rate for all audio (typically 44100 or 48000) */\n sampleRate: number;\n\n /** Optional project name */\n name?: string;\n\n /** Optional tempo (BPM) for grid snapping */\n tempo?: number;\n\n /** Optional time signature for grid snapping */\n timeSignature?: {\n numerator: number;\n denominator: number;\n };\n}\n\n/**\n * Options for creating a new audio clip (using sample counts)\n *\n * Either audioBuffer OR (sampleRate + sourceDurationSamples + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptions {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startSample: number; // Position on timeline (in samples)\n durationSamples?: number; // Defaults to full buffer/source duration (in samples)\n offsetSamples?: number; // Defaults to 0\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in samples - required if audioBuffer not provided */\n sourceDurationSamples?: number;\n}\n\n/**\n * Options for creating a new audio clip (using seconds for convenience)\n *\n * Either audioBuffer OR (sampleRate + sourceDuration + waveformData) must be provided.\n * Providing waveformData without audioBuffer enables peaks-first rendering.\n */\nexport interface CreateClipOptionsSeconds {\n /** Audio buffer - optional for peaks-first rendering */\n audioBuffer?: AudioBuffer;\n startTime: number; // Position on timeline (in seconds)\n duration?: number; // Defaults to full buffer/source duration (in seconds)\n offset?: number; // Defaults to 0 (in seconds)\n gain?: number; // Defaults to 1.0\n name?: string;\n color?: string;\n fadeIn?: Fade;\n fadeOut?: Fade;\n /** Pre-computed waveform data from waveform-data.js (e.g., from BBC audiowaveform) */\n waveformData?: WaveformDataObject;\n /** Sample rate - required if audioBuffer not provided */\n sampleRate?: number;\n /** Total source audio duration in seconds - required if audioBuffer not provided */\n sourceDuration?: number;\n}\n\n/**\n * Options for creating a new track\n */\nexport interface CreateTrackOptions {\n name: string;\n clips?: AudioClip[];\n muted?: boolean;\n soloed?: boolean;\n volume?: number;\n pan?: number;\n color?: string;\n height?: number;\n}\n\n/**\n * Creates a new AudioClip with sensible defaults (using sample counts)\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDurationSamples can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClip(options: CreateClipOptions): AudioClip {\n const {\n audioBuffer,\n startSample,\n offsetSamples = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n\n // Determine source duration: audioBuffer > explicit option > waveformData (converted to samples)\n const sourceDurationSamples = audioBuffer?.length\n ?? options.sourceDurationSamples\n ?? (waveformData && sampleRate ? Math.ceil(waveformData.duration * sampleRate) : undefined);\n\n if (sampleRate === undefined) {\n throw new Error('createClip: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n if (sourceDurationSamples === undefined) {\n throw new Error('createClip: sourceDurationSamples is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default duration to full source duration\n const durationSamples = options.durationSamples ?? sourceDurationSamples;\n\n return {\n id: generateId(),\n audioBuffer,\n startSample,\n durationSamples,\n offsetSamples,\n sampleRate,\n sourceDurationSamples,\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n };\n}\n\n/**\n * Creates a new AudioClip from time-based values (convenience function)\n * Converts seconds to samples using the audioBuffer's sampleRate or explicit sampleRate\n *\n * For peaks-first rendering (no audioBuffer), sampleRate and sourceDuration can be:\n * - Provided explicitly via options\n * - Derived from waveformData (sample_rate and duration properties)\n */\nexport function createClipFromSeconds(options: CreateClipOptionsSeconds): AudioClip {\n const {\n audioBuffer,\n startTime,\n offset = 0,\n gain = 1.0,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n } = options;\n\n // Determine sample rate: audioBuffer > explicit option > waveformData\n const sampleRate = audioBuffer?.sampleRate ?? options.sampleRate ?? waveformData?.sample_rate;\n if (sampleRate === undefined) {\n throw new Error('createClipFromSeconds: sampleRate is required when audioBuffer is not provided (can use waveformData.sample_rate)');\n }\n\n // Determine source duration: audioBuffer > explicit option > waveformData\n const sourceDuration = audioBuffer?.duration ?? options.sourceDuration ?? waveformData?.duration;\n if (sourceDuration === undefined) {\n throw new Error('createClipFromSeconds: sourceDuration is required when audioBuffer is not provided (can use waveformData.duration)');\n }\n\n // Warn if sample rates don't match (could cause visual/audio sync issues)\n if (audioBuffer && waveformData && audioBuffer.sampleRate !== waveformData.sample_rate) {\n console.warn(\n `Sample rate mismatch: audioBuffer (${audioBuffer.sampleRate}) vs waveformData (${waveformData.sample_rate}). ` +\n `Using audioBuffer sample rate. Waveform visualization may be slightly off.`\n );\n }\n\n // Default clip duration to full source duration\n const duration = options.duration ?? sourceDuration;\n\n return createClip({\n audioBuffer,\n startSample: Math.round(startTime * sampleRate),\n durationSamples: Math.round(duration * sampleRate),\n offsetSamples: Math.round(offset * sampleRate),\n sampleRate,\n sourceDurationSamples: Math.ceil(sourceDuration * sampleRate),\n gain,\n name,\n color,\n fadeIn,\n fadeOut,\n waveformData,\n });\n}\n\n/**\n * Creates a new ClipTrack with sensible defaults\n */\nexport function createTrack(options: CreateTrackOptions): ClipTrack {\n const {\n name,\n clips = [],\n muted = false,\n soloed = false,\n volume = 1.0,\n pan = 0,\n color,\n height,\n } = options;\n\n return {\n id: generateId(),\n name,\n clips,\n muted,\n soloed,\n volume,\n pan,\n color,\n height,\n };\n}\n\n/**\n * Creates a new Timeline with sensible defaults\n */\nexport function createTimeline(\n tracks: ClipTrack[],\n sampleRate: number = 44100,\n options?: {\n name?: string;\n tempo?: number;\n timeSignature?: { numerator: number; denominator: number };\n }\n): Timeline {\n // Calculate total duration from all clips across all tracks (in seconds)\n const durationSamples = tracks.reduce((maxSamples, track) => {\n const trackSamples = track.clips.reduce((max, clip) => {\n return Math.max(max, clip.startSample + clip.durationSamples);\n }, 0);\n return Math.max(maxSamples, trackSamples);\n }, 0);\n\n const duration = durationSamples / sampleRate;\n\n return {\n tracks,\n duration,\n sampleRate,\n name: options?.name,\n tempo: options?.tempo,\n timeSignature: options?.timeSignature,\n };\n}\n\n/**\n * Generates a unique ID for clips and tracks\n */\nfunction generateId(): string {\n return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n}\n\n/**\n * Utility: Get all clips within a sample range\n */\nexport function getClipsInRange(\n track: ClipTrack,\n startSample: number,\n endSample: number\n): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n // Clip overlaps with range if:\n // - Clip starts before range ends AND\n // - Clip ends after range starts\n return clip.startSample < endSample && clipEnd > startSample;\n });\n}\n\n/**\n * Utility: Get all clips at a specific sample position\n */\nexport function getClipsAtSample(track: ClipTrack, sample: number): AudioClip[] {\n return track.clips.filter((clip) => {\n const clipEnd = clip.startSample + clip.durationSamples;\n return sample >= clip.startSample && sample < clipEnd;\n });\n}\n\n/**\n * Utility: Check if two clips overlap\n */\nexport function clipsOverlap(clip1: AudioClip, clip2: AudioClip): boolean {\n const clip1End = clip1.startSample + clip1.durationSamples;\n const clip2End = clip2.startSample + clip2.durationSamples;\n\n return clip1.startSample < clip2End && clip1End > clip2.startSample;\n}\n\n/**\n * Utility: Sort clips by startSample\n */\nexport function sortClipsByTime(clips: AudioClip[]): AudioClip[] {\n return [...clips].sort((a, b) => a.startSample - b.startSample);\n}\n\n/**\n * Utility: Find gaps between clips (silent regions)\n */\nexport interface Gap {\n startSample: number;\n endSample: number;\n durationSamples: number;\n}\n\nexport function findGaps(track: ClipTrack): Gap[] {\n if (track.clips.length === 0) return [];\n\n const sorted = sortClipsByTime(track.clips);\n const gaps: Gap[] = [];\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const currentClipEnd = sorted[i].startSample + sorted[i].durationSamples;\n const nextClipStart = sorted[i + 1].startSample;\n\n if (nextClipStart > currentClipEnd) {\n gaps.push({\n startSample: currentClipEnd,\n endSample: nextClipStart,\n durationSamples: nextClipStart - currentClipEnd,\n });\n }\n }\n\n return gaps;\n}\n","export interface WaveformConfig {\n sampleRate: number;\n samplesPerPixel: number;\n waveHeight?: number;\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n}\n\nexport interface AudioBuffer {\n length: number;\n duration: number;\n numberOfChannels: number;\n sampleRate: number;\n getChannelData(channel: number): Float32Array;\n}\n\nexport interface Track {\n id: string;\n name: string;\n src?: string | AudioBuffer; // Support both URL strings and AudioBuffer objects\n gain: number;\n muted: boolean;\n soloed: boolean;\n stereoPan: number;\n startTime: number;\n endTime?: number;\n fadeIn?: Fade;\n fadeOut?: Fade;\n cueIn?: number;\n cueOut?: number;\n}\n\n/**\n * Simple fade configuration\n */\nexport interface Fade {\n /** Duration of the fade in seconds */\n duration: number;\n /** Type of fade curve (default: 'linear') */\n type?: FadeType;\n}\n\nexport type FadeType = 'logarithmic' | 'linear' | 'sCurve' | 'exponential';\n\nexport interface PlaylistConfig {\n samplesPerPixel?: number;\n waveHeight?: number;\n container?: HTMLElement;\n isAutomaticScroll?: boolean;\n timescale?: boolean;\n colors?: {\n waveOutlineColor?: string;\n waveFillColor?: string;\n waveProgressColor?: string;\n };\n controls?: {\n show?: boolean;\n width?: number;\n };\n zoomLevels?: number[];\n}\n\nexport interface PlayoutState {\n isPlaying: boolean;\n isPaused: boolean;\n cursor: number;\n duration: number;\n}\n\nexport interface TimeSelection {\n start: number;\n end: number;\n}\n\nexport enum InteractionState {\n Cursor = 'cursor',\n Select = 'select',\n Shift = 'shift',\n FadeIn = 'fadein',\n FadeOut = 'fadeout',\n}\n\n// Export clip-based model types\nexport * from './clip';\n\n// Export spectrogram types\nexport * from './spectrogram';\n\n// Export annotation types\nexport * from './annotations';\n","export function samplesToSeconds(samples: number, sampleRate: number): number {\n return samples / sampleRate;\n}\n\nexport function secondsToSamples(seconds: number, sampleRate: number): number {\n return Math.ceil(seconds * sampleRate);\n}\n\nexport function samplesToPixels(samples: number, samplesPerPixel: number): number {\n return Math.floor(samples / samplesPerPixel);\n}\n\nexport function pixelsToSamples(pixels: number, samplesPerPixel: number): number {\n return Math.floor(pixels * samplesPerPixel);\n}\n\nexport function pixelsToSeconds(\n pixels: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return (pixels * samplesPerPixel) / sampleRate;\n}\n\nexport function secondsToPixels(\n seconds: number,\n samplesPerPixel: number,\n sampleRate: number\n): number {\n return Math.ceil((seconds * sampleRate) / samplesPerPixel);\n}\n"],"mappings":";AA0RO,SAAS,WAAW,SAAuC;AAChE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAGlF,QAAM,wBAAwB,aAAa,UACtC,QAAQ,0BACP,gBAAgB,aAAa,KAAK,KAAK,aAAa,WAAW,UAAU,IAAI;AAEnF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AACA,MAAI,0BAA0B,QAAW;AACvC,UAAM,IAAI,MAAM,gHAAgH;AAAA,EAClI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,sBAAsB,SAA8C;AAClF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,aAAa,aAAa,cAAc,QAAQ,cAAc,cAAc;AAClF,MAAI,eAAe,QAAW;AAC5B,UAAM,IAAI,MAAM,mHAAmH;AAAA,EACrI;AAGA,QAAM,iBAAiB,aAAa,YAAY,QAAQ,kBAAkB,cAAc;AACxF,MAAI,mBAAmB,QAAW;AAChC,UAAM,IAAI,MAAM,oHAAoH;AAAA,EACtI;AAGA,MAAI,eAAe,gBAAgB,YAAY,eAAe,aAAa,aAAa;AACtF,YAAQ;AAAA,MACN,sCAAsC,YAAY,UAAU,sBAAsB,aAAa,WAAW;AAAA,IAE5G;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,WAAW;AAAA,IAChB;AAAA,IACA,aAAa,KAAK,MAAM,YAAY,UAAU;AAAA,IAC9C,iBAAiB,KAAK,MAAM,WAAW,UAAU;AAAA,IACjD,eAAe,KAAK,MAAM,SAAS,UAAU;AAAA,IAC7C;AAAA,IACA,uBAAuB,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YAAY,SAAwC;AAClE,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ,CAAC;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKO,SAAS,eACd,QACA,aAAqB,OACrB,SAKU;AAEV,QAAM,kBAAkB,OAAO,OAAO,CAAC,YAAY,UAAU;AAC3D,UAAM,eAAe,MAAM,MAAM,OAAO,CAAC,KAAK,SAAS;AACrD,aAAO,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,eAAe;AAAA,IAC9D,GAAG,CAAC;AACJ,WAAO,KAAK,IAAI,YAAY,YAAY;AAAA,EAC1C,GAAG,CAAC;AAEJ,QAAM,WAAW,kBAAkB;AAEnC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,SAAS;AAAA,IACf,OAAO,SAAS;AAAA,IAChB,eAAe,SAAS;AAAA,EAC1B;AACF;AAKA,SAAS,aAAqB;AAC5B,SAAO,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AACjE;AAKO,SAAS,gBACd,OACA,aACA,WACa;AACb,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AAIxC,WAAO,KAAK,cAAc,aAAa,UAAU;AAAA,EACnD,CAAC;AACH;AAKO,SAAS,iBAAiB,OAAkB,QAA6B;AAC9E,SAAO,MAAM,MAAM,OAAO,CAAC,SAAS;AAClC,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,WAAO,UAAU,KAAK,eAAe,SAAS;AAAA,EAChD,CAAC;AACH;AAKO,SAAS,aAAa,OAAkB,OAA2B;AACxE,QAAM,WAAW,MAAM,cAAc,MAAM;AAC3C,QAAM,WAAW,MAAM,cAAc,MAAM;AAE3C,SAAO,MAAM,cAAc,YAAY,WAAW,MAAM;AAC1D;AAKO,SAAS,gBAAgB,OAAiC;AAC/D,SAAO,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAChE;AAWO,SAAS,SAAS,OAAyB;AAChD,MAAI,MAAM,MAAM,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,SAAS,gBAAgB,MAAM,KAAK;AAC1C,QAAM,OAAc,CAAC;AAErB,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,UAAM,iBAAiB,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,EAAE;AACzD,UAAM,gBAAgB,OAAO,IAAI,CAAC,EAAE;AAEpC,QAAI,gBAAgB,gBAAgB;AAClC,WAAK,KAAK;AAAA,QACR,aAAa;AAAA,QACb,WAAW;AAAA,QACX,iBAAiB,gBAAgB;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AC/cO,IAAK,mBAAL,kBAAKA,sBAAL;AACL,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,WAAQ;AACR,EAAAA,kBAAA,YAAS;AACT,EAAAA,kBAAA,aAAU;AALA,SAAAA;AAAA,GAAA;;;AC3EL,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,UAAU;AACnB;AAEO,SAAS,iBAAiB,SAAiB,YAA4B;AAC5E,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAEO,SAAS,gBAAgB,SAAiB,iBAAiC;AAChF,SAAO,KAAK,MAAM,UAAU,eAAe;AAC7C;AAEO,SAAS,gBAAgB,QAAgB,iBAAiC;AAC/E,SAAO,KAAK,MAAM,SAAS,eAAe;AAC5C;AAEO,SAAS,gBACd,QACA,iBACA,YACQ;AACR,SAAQ,SAAS,kBAAmB;AACtC;AAEO,SAAS,gBACd,SACA,iBACA,YACQ;AACR,SAAO,KAAK,KAAM,UAAU,aAAc,eAAe;AAC3D;","names":["InteractionState"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@waveform-playlist/core",
3
- "version": "5.3.2",
3
+ "version": "6.0.0",
4
4
  "description": "Core types, interfaces and utilities for waveform-playlist",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",