@dawcore/components 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -39,6 +39,7 @@ var __decorateClass = (decorators, target, key, kind) => {
39
39
  var index_exports = {};
40
40
  __export(index_exports, {
41
41
  AudioResumeController: () => AudioResumeController,
42
+ ClipPointerHandler: () => ClipPointerHandler,
42
43
  DawClipElement: () => DawClipElement,
43
44
  DawEditorElement: () => DawEditorElement,
44
45
  DawPauseButtonElement: () => DawPauseButtonElement,
@@ -53,7 +54,8 @@ __export(index_exports, {
53
54
  DawTransportButton: () => DawTransportButton,
54
55
  DawTransportElement: () => DawTransportElement,
55
56
  DawWaveformElement: () => DawWaveformElement,
56
- RecordingController: () => RecordingController
57
+ RecordingController: () => RecordingController,
58
+ splitAtPlayhead: () => splitAtPlayhead
57
59
  });
58
60
  module.exports = __toCommonJS(index_exports);
59
61
 
@@ -629,9 +631,40 @@ DawTransportButton.styles = import_lit6.css`
629
631
 
630
632
  // src/elements/daw-play-button.ts
631
633
  var DawPlayButtonElement = class extends DawTransportButton {
634
+ constructor() {
635
+ super(...arguments);
636
+ this._isRecording = false;
637
+ this._targetRef = null;
638
+ this._onRecStart = () => {
639
+ this._isRecording = true;
640
+ };
641
+ this._onRecEnd = () => {
642
+ this._isRecording = false;
643
+ };
644
+ }
645
+ connectedCallback() {
646
+ super.connectedCallback();
647
+ requestAnimationFrame(() => {
648
+ const target = this.target;
649
+ if (!target) return;
650
+ this._targetRef = target;
651
+ target.addEventListener("daw-recording-start", this._onRecStart);
652
+ target.addEventListener("daw-recording-complete", this._onRecEnd);
653
+ target.addEventListener("daw-recording-error", this._onRecEnd);
654
+ });
655
+ }
656
+ disconnectedCallback() {
657
+ super.disconnectedCallback();
658
+ if (this._targetRef) {
659
+ this._targetRef.removeEventListener("daw-recording-start", this._onRecStart);
660
+ this._targetRef.removeEventListener("daw-recording-complete", this._onRecEnd);
661
+ this._targetRef.removeEventListener("daw-recording-error", this._onRecEnd);
662
+ this._targetRef = null;
663
+ }
664
+ }
632
665
  render() {
633
666
  return import_lit7.html`
634
- <button part="button" @click=${this._onClick}>
667
+ <button part="button" ?disabled=${this._isRecording} @click=${this._onClick}>
635
668
  <slot>Play</slot>
636
669
  </button>
637
670
  `;
@@ -647,6 +680,9 @@ var DawPlayButtonElement = class extends DawTransportButton {
647
680
  target.play();
648
681
  }
649
682
  };
683
+ __decorateClass([
684
+ (0, import_decorators6.state)()
685
+ ], DawPlayButtonElement.prototype, "_isRecording", 2);
650
686
  DawPlayButtonElement = __decorateClass([
651
687
  (0, import_decorators6.customElement)("daw-play-button")
652
688
  ], DawPlayButtonElement);
@@ -655,9 +691,42 @@ DawPlayButtonElement = __decorateClass([
655
691
  var import_lit8 = require("lit");
656
692
  var import_decorators7 = require("lit/decorators.js");
657
693
  var DawPauseButtonElement = class extends DawTransportButton {
694
+ constructor() {
695
+ super(...arguments);
696
+ this._isPaused = false;
697
+ this._isRecording = false;
698
+ this._targetRef = null;
699
+ this._onRecStart = () => {
700
+ this._isRecording = true;
701
+ };
702
+ this._onRecEnd = () => {
703
+ this._isRecording = false;
704
+ this._isPaused = false;
705
+ };
706
+ }
707
+ connectedCallback() {
708
+ super.connectedCallback();
709
+ requestAnimationFrame(() => {
710
+ const target = this.target;
711
+ if (!target) return;
712
+ this._targetRef = target;
713
+ target.addEventListener("daw-recording-start", this._onRecStart);
714
+ target.addEventListener("daw-recording-complete", this._onRecEnd);
715
+ target.addEventListener("daw-recording-error", this._onRecEnd);
716
+ });
717
+ }
718
+ disconnectedCallback() {
719
+ super.disconnectedCallback();
720
+ if (this._targetRef) {
721
+ this._targetRef.removeEventListener("daw-recording-start", this._onRecStart);
722
+ this._targetRef.removeEventListener("daw-recording-complete", this._onRecEnd);
723
+ this._targetRef.removeEventListener("daw-recording-error", this._onRecEnd);
724
+ this._targetRef = null;
725
+ }
726
+ }
658
727
  render() {
659
728
  return import_lit8.html`
660
- <button part="button" @click=${this._onClick}>
729
+ <button part="button" ?data-paused=${this._isPaused} @click=${this._onClick}>
661
730
  <slot>Pause</slot>
662
731
  </button>
663
732
  `;
@@ -670,9 +739,36 @@ var DawPauseButtonElement = class extends DawTransportButton {
670
739
  );
671
740
  return;
672
741
  }
673
- target.pause();
742
+ if (this._isRecording) {
743
+ if (this._isPaused) {
744
+ target.resumeRecording();
745
+ target.play(target.currentTime);
746
+ this._isPaused = false;
747
+ } else {
748
+ target.pauseRecording();
749
+ target.pause();
750
+ this._isPaused = true;
751
+ }
752
+ } else {
753
+ target.pause();
754
+ }
674
755
  }
675
756
  };
757
+ DawPauseButtonElement.styles = [
758
+ DawTransportButton.styles,
759
+ import_lit8.css`
760
+ button[data-paused] {
761
+ background: rgba(255, 255, 255, 0.1);
762
+ border-color: var(--daw-controls-text, #e0d4c8);
763
+ }
764
+ `
765
+ ];
766
+ __decorateClass([
767
+ (0, import_decorators7.state)()
768
+ ], DawPauseButtonElement.prototype, "_isPaused", 2);
769
+ __decorateClass([
770
+ (0, import_decorators7.state)()
771
+ ], DawPauseButtonElement.prototype, "_isRecording", 2);
676
772
  DawPauseButtonElement = __decorateClass([
677
773
  (0, import_decorators7.customElement)("daw-pause-button")
678
774
  ], DawPauseButtonElement);
@@ -696,6 +792,9 @@ var DawStopButtonElement = class extends DawTransportButton {
696
792
  );
697
793
  return;
698
794
  }
795
+ if (target.isRecording) {
796
+ target.stopRecording();
797
+ }
699
798
  target.stop();
700
799
  }
701
800
  };
@@ -1027,20 +1126,22 @@ function extractPeaks(waveformData, samplesPerPixel, isMono, offsetSamples, dura
1027
1126
 
1028
1127
  // src/workers/peakPipeline.ts
1029
1128
  var PeakPipeline = class {
1030
- constructor() {
1129
+ constructor(baseScale = 128, bits = 16) {
1031
1130
  this._worker = null;
1032
1131
  this._cache = /* @__PURE__ */ new WeakMap();
1033
1132
  this._inflight = /* @__PURE__ */ new WeakMap();
1133
+ this._baseScale = baseScale;
1134
+ this._bits = bits;
1034
1135
  }
1035
1136
  /**
1036
1137
  * Generate PeakData for a clip from its AudioBuffer.
1037
1138
  * Uses cached WaveformData when available; otherwise generates via worker.
1038
- * The worker generates at `scale` (= samplesPerPixel) for exact rendering.
1139
+ * Worker generates at baseScale (default 128); extractPeaks resamples to the requested zoom.
1039
1140
  */
1040
- async generatePeaks(audioBuffer, samplesPerPixel, isMono) {
1041
- const waveformData = await this._getWaveformData(audioBuffer, samplesPerPixel);
1141
+ async generatePeaks(audioBuffer, samplesPerPixel, isMono, offsetSamples, durationSamples) {
1142
+ const waveformData = await this._getWaveformData(audioBuffer);
1042
1143
  try {
1043
- return extractPeaks(waveformData, samplesPerPixel, isMono);
1144
+ return extractPeaks(waveformData, samplesPerPixel, isMono, offsetSamples, durationSamples);
1044
1145
  } catch (err) {
1045
1146
  console.warn("[dawcore] extractPeaks failed: " + String(err));
1046
1147
  throw err;
@@ -1052,14 +1153,24 @@ var PeakPipeline = class {
1052
1153
  * Returns a new Map of clipId → PeakData. Clips without cached data or where
1053
1154
  * the target scale is finer than the cached base are skipped.
1054
1155
  */
1055
- reextractPeaks(clipBuffers, samplesPerPixel, isMono) {
1156
+ reextractPeaks(clipBuffers, samplesPerPixel, isMono, clipOffsets) {
1056
1157
  const result = /* @__PURE__ */ new Map();
1057
1158
  for (const [clipId, audioBuffer] of clipBuffers) {
1058
1159
  const cached = this._cache.get(audioBuffer);
1059
1160
  if (cached) {
1060
1161
  if (samplesPerPixel < cached.scale) continue;
1061
1162
  try {
1062
- result.set(clipId, extractPeaks(cached, samplesPerPixel, isMono));
1163
+ const offsets = clipOffsets?.get(clipId);
1164
+ result.set(
1165
+ clipId,
1166
+ extractPeaks(
1167
+ cached,
1168
+ samplesPerPixel,
1169
+ isMono,
1170
+ offsets?.offsetSamples,
1171
+ offsets?.durationSamples
1172
+ )
1173
+ );
1063
1174
  } catch (err) {
1064
1175
  console.warn("[dawcore] reextractPeaks failed for clip " + clipId + ": " + String(err));
1065
1176
  }
@@ -1071,9 +1182,9 @@ var PeakPipeline = class {
1071
1182
  this._worker?.terminate();
1072
1183
  this._worker = null;
1073
1184
  }
1074
- async _getWaveformData(audioBuffer, samplesPerPixel) {
1185
+ async _getWaveformData(audioBuffer) {
1075
1186
  const cached = this._cache.get(audioBuffer);
1076
- if (cached && cached.scale <= samplesPerPixel) return cached;
1187
+ if (cached) return cached;
1077
1188
  const inflight = this._inflight.get(audioBuffer);
1078
1189
  if (inflight) return inflight;
1079
1190
  if (!this._worker) {
@@ -1087,8 +1198,8 @@ var PeakPipeline = class {
1087
1198
  channels,
1088
1199
  length: audioBuffer.length,
1089
1200
  sampleRate: audioBuffer.sampleRate,
1090
- scale: samplesPerPixel,
1091
- bits: 16,
1201
+ scale: this._baseScale,
1202
+ bits: this._bits,
1092
1203
  splitChannels: true
1093
1204
  }).then((waveformData) => {
1094
1205
  this._cache.set(audioBuffer, waveformData);
@@ -1209,7 +1320,7 @@ DawTrackControlsElement.styles = import_lit10.css`
1209
1320
  :host {
1210
1321
  display: flex;
1211
1322
  flex-direction: column;
1212
- justify-content: center;
1323
+ justify-content: flex-start;
1213
1324
  box-sizing: border-box;
1214
1325
  padding: 6px 8px;
1215
1326
  background: var(--daw-controls-background, #0f0f1a);
@@ -1217,13 +1328,14 @@ DawTrackControlsElement.styles = import_lit10.css`
1217
1328
  border-bottom: 1px solid rgba(255, 255, 255, 0.05);
1218
1329
  font-family: system-ui, sans-serif;
1219
1330
  font-size: 11px;
1331
+ overflow: hidden;
1220
1332
  }
1221
1333
  .header {
1222
1334
  display: flex;
1223
1335
  align-items: center;
1224
1336
  justify-content: space-between;
1225
1337
  gap: 4px;
1226
- margin-bottom: 6px;
1338
+ margin-bottom: 3px;
1227
1339
  }
1228
1340
  .name {
1229
1341
  flex: 1;
@@ -1250,7 +1362,7 @@ DawTrackControlsElement.styles = import_lit10.css`
1250
1362
  .buttons {
1251
1363
  display: flex;
1252
1364
  gap: 3px;
1253
- margin-bottom: 6px;
1365
+ margin-bottom: 3px;
1254
1366
  }
1255
1367
  .btn {
1256
1368
  background: rgba(255, 255, 255, 0.06);
@@ -1280,7 +1392,7 @@ DawTrackControlsElement.styles = import_lit10.css`
1280
1392
  display: flex;
1281
1393
  align-items: center;
1282
1394
  gap: 4px;
1283
- height: 20px;
1395
+ height: 16px;
1284
1396
  }
1285
1397
  .slider-label {
1286
1398
  width: 50px;
@@ -1377,6 +1489,77 @@ var hostStyles = import_lit11.css`
1377
1489
  --daw-clip-header-text: #e0d4c8;
1378
1490
  }
1379
1491
  `;
1492
+ var clipStyles = import_lit11.css`
1493
+ .clip-container {
1494
+ position: absolute;
1495
+ overflow: hidden;
1496
+ }
1497
+ .clip-header {
1498
+ position: relative;
1499
+ z-index: 1;
1500
+ height: 20px;
1501
+ background: var(--daw-clip-header-background, rgba(0, 0, 0, 0.4));
1502
+ border-bottom: 1px solid rgba(255, 255, 255, 0.08);
1503
+ display: flex;
1504
+ align-items: center;
1505
+ padding: 0 6px;
1506
+ user-select: none;
1507
+ -webkit-user-drag: none;
1508
+ }
1509
+ .clip-header span {
1510
+ font-size: 10px;
1511
+ font-weight: 500;
1512
+ letter-spacing: 0.02em;
1513
+ font-family: system-ui, sans-serif;
1514
+ color: var(--daw-clip-header-text, #e0d4c8);
1515
+ white-space: nowrap;
1516
+ overflow: hidden;
1517
+ text-overflow: ellipsis;
1518
+ opacity: 0.8;
1519
+ }
1520
+ .clip-boundary {
1521
+ position: absolute;
1522
+ top: 0;
1523
+ width: 8px;
1524
+ height: 100%;
1525
+ z-index: 2;
1526
+ cursor: col-resize;
1527
+ background: transparent;
1528
+ border: none;
1529
+ touch-action: none;
1530
+ user-select: none;
1531
+ -webkit-user-drag: none;
1532
+ transition: background 0.1s, border-color 0.1s;
1533
+ }
1534
+ .clip-boundary[data-boundary-edge='left'] {
1535
+ left: 0;
1536
+ }
1537
+ .clip-boundary[data-boundary-edge='right'] {
1538
+ right: 0;
1539
+ }
1540
+ .clip-boundary[data-boundary-edge='left']:hover {
1541
+ background: rgba(255, 255, 255, 0.2);
1542
+ border-left: 2px solid rgba(255, 255, 255, 0.5);
1543
+ }
1544
+ .clip-boundary[data-boundary-edge='right']:hover {
1545
+ background: rgba(255, 255, 255, 0.2);
1546
+ border-right: 2px solid rgba(255, 255, 255, 0.5);
1547
+ }
1548
+ .clip-boundary[data-boundary-edge='left'].dragging {
1549
+ background: rgba(255, 255, 255, 0.4);
1550
+ border-left: 2px solid rgba(255, 255, 255, 0.8);
1551
+ }
1552
+ .clip-boundary[data-boundary-edge='right'].dragging {
1553
+ background: rgba(255, 255, 255, 0.4);
1554
+ border-right: 2px solid rgba(255, 255, 255, 0.8);
1555
+ }
1556
+ .clip-header[data-interactive] {
1557
+ cursor: grab;
1558
+ }
1559
+ .clip-header[data-interactive]:active {
1560
+ cursor: grabbing;
1561
+ }
1562
+ `;
1380
1563
 
1381
1564
  // src/controllers/viewport-controller.ts
1382
1565
  var OVERSCAN_MULTIPLIER = 1.5;
@@ -1559,6 +1742,9 @@ var RecordingController = class {
1559
1742
  }
1560
1743
  const channelCount = stream.getAudioTracks()[0]?.getSettings()?.channelCount ?? 1;
1561
1744
  const startSample = options.startSample ?? Math.floor(this._host._currentTime * this._host.effectiveSampleRate);
1745
+ const outputLatency = rawCtx.outputLatency ?? 0;
1746
+ const lookAhead = context.lookAhead ?? 0;
1747
+ const latencySamples = Math.floor((outputLatency + lookAhead) * rawCtx.sampleRate);
1562
1748
  const source = context.createMediaStreamSource(stream);
1563
1749
  const workletNode = context.createAudioWorkletNode("recording-processor", {
1564
1750
  channelCount,
@@ -1585,6 +1771,8 @@ var RecordingController = class {
1585
1771
  channelCount,
1586
1772
  bits,
1587
1773
  isFirstMessage: true,
1774
+ latencySamples,
1775
+ wasOverdub: options.overdub ?? false,
1588
1776
  _onTrackEnded: onTrackEnded,
1589
1777
  _audioTrack: audioTrack
1590
1778
  };
@@ -1605,6 +1793,9 @@ var RecordingController = class {
1605
1793
  })
1606
1794
  );
1607
1795
  this._host.requestUpdate();
1796
+ if (options.overdub && typeof this._host.play === "function") {
1797
+ await this._host.play(this._host._currentTime);
1798
+ }
1608
1799
  } catch (err) {
1609
1800
  this._cleanupSession(trackId);
1610
1801
  console.warn("[dawcore] RecordingController: Failed to start recording: " + String(err));
@@ -1617,11 +1808,28 @@ var RecordingController = class {
1617
1808
  );
1618
1809
  }
1619
1810
  }
1811
+ pauseRecording(trackId) {
1812
+ const id = trackId ?? [...this._sessions.keys()][0];
1813
+ if (!id) return;
1814
+ const session = this._sessions.get(id);
1815
+ if (!session) return;
1816
+ session.workletNode.port.postMessage({ command: "pause" });
1817
+ }
1818
+ resumeRecording(trackId) {
1819
+ const id = trackId ?? [...this._sessions.keys()][0];
1820
+ if (!id) return;
1821
+ const session = this._sessions.get(id);
1822
+ if (!session) return;
1823
+ session.workletNode.port.postMessage({ command: "resume" });
1824
+ }
1620
1825
  stopRecording(trackId) {
1621
1826
  const id = trackId ?? [...this._sessions.keys()][0];
1622
1827
  if (!id) return;
1623
1828
  const session = this._sessions.get(id);
1624
1829
  if (!session) return;
1830
+ if (session.wasOverdub && typeof this._host.stop === "function") {
1831
+ this._host.stop();
1832
+ }
1625
1833
  session.workletNode.port.postMessage({ command: "stop" });
1626
1834
  session.source.disconnect();
1627
1835
  session.workletNode.disconnect();
@@ -1639,7 +1847,8 @@ var RecordingController = class {
1639
1847
  );
1640
1848
  return;
1641
1849
  }
1642
- const stopCtx = (0, import_playout2.getGlobalContext)().rawContext;
1850
+ const context = (0, import_playout2.getGlobalContext)();
1851
+ const stopCtx = context.rawContext;
1643
1852
  const channelData = session.chunks.map((chunkArr) => (0, import_recording.concatenateAudioData)(chunkArr));
1644
1853
  const audioBuffer = (0, import_recording.createAudioBuffer)(
1645
1854
  stopCtx,
@@ -1647,7 +1856,21 @@ var RecordingController = class {
1647
1856
  this._host.effectiveSampleRate,
1648
1857
  session.channelCount
1649
1858
  );
1650
- const durationSamples = audioBuffer.length;
1859
+ const latencyOffsetSamples = session.latencySamples;
1860
+ const effectiveDuration = Math.max(0, audioBuffer.length - latencyOffsetSamples);
1861
+ if (effectiveDuration === 0) {
1862
+ console.warn("[dawcore] RecordingController: Recording too short for latency compensation");
1863
+ this._sessions.delete(id);
1864
+ this._host.requestUpdate();
1865
+ this._host.dispatchEvent(
1866
+ new CustomEvent("daw-recording-error", {
1867
+ bubbles: true,
1868
+ composed: true,
1869
+ detail: { trackId: id, error: new Error("Recording too short to save") }
1870
+ })
1871
+ );
1872
+ return;
1873
+ }
1651
1874
  const event = new CustomEvent("daw-recording-complete", {
1652
1875
  bubbles: true,
1653
1876
  composed: true,
@@ -1656,14 +1879,21 @@ var RecordingController = class {
1656
1879
  trackId: id,
1657
1880
  audioBuffer,
1658
1881
  startSample: session.startSample,
1659
- durationSamples
1882
+ durationSamples: effectiveDuration,
1883
+ offsetSamples: latencyOffsetSamples
1660
1884
  }
1661
1885
  });
1662
1886
  const notPrevented = this._host.dispatchEvent(event);
1663
1887
  this._sessions.delete(id);
1664
1888
  this._host.requestUpdate();
1665
1889
  if (notPrevented) {
1666
- this._createClipFromRecording(id, audioBuffer, session.startSample, durationSamples);
1890
+ this._createClipFromRecording(
1891
+ id,
1892
+ audioBuffer,
1893
+ session.startSample,
1894
+ effectiveDuration,
1895
+ latencyOffsetSamples
1896
+ );
1667
1897
  }
1668
1898
  }
1669
1899
  // Session fields are mutated in place on the hot path (~60fps worklet messages).
@@ -1694,7 +1924,9 @@ var RecordingController = class {
1694
1924
  );
1695
1925
  const newPeakCount = Math.floor(session.peaks[ch].length / 2);
1696
1926
  const waveformSelector = `daw-waveform[data-recording-track="${trackId}"][data-recording-channel="${ch}"]`;
1697
- const waveformEl = this._host.shadowRoot?.querySelector(waveformSelector);
1927
+ const waveformEl = this._host.shadowRoot?.querySelector(
1928
+ waveformSelector
1929
+ );
1698
1930
  if (waveformEl) {
1699
1931
  if (session.isFirstMessage) {
1700
1932
  waveformEl.peaks = session.peaks[ch];
@@ -1713,9 +1945,15 @@ var RecordingController = class {
1713
1945
  this._host.requestUpdate();
1714
1946
  }
1715
1947
  }
1716
- _createClipFromRecording(trackId, audioBuffer, startSample, durationSamples) {
1948
+ _createClipFromRecording(trackId, audioBuffer, startSample, durationSamples, offsetSamples = 0) {
1717
1949
  if (typeof this._host._addRecordedClip === "function") {
1718
- this._host._addRecordedClip(trackId, audioBuffer, startSample, durationSamples);
1950
+ this._host._addRecordedClip(
1951
+ trackId,
1952
+ audioBuffer,
1953
+ startSample,
1954
+ durationSamples,
1955
+ offsetSamples
1956
+ );
1719
1957
  } else {
1720
1958
  console.warn(
1721
1959
  '[dawcore] RecordingController: host does not implement _addRecordedClip \u2014 clip not created for track "' + trackId + '"'
@@ -1746,6 +1984,11 @@ var RecordingController = class {
1746
1984
 
1747
1985
  // src/interactions/pointer-handler.ts
1748
1986
  var import_core = require("@waveform-playlist/core");
1987
+
1988
+ // src/interactions/constants.ts
1989
+ var DRAG_THRESHOLD = 3;
1990
+
1991
+ // src/interactions/pointer-handler.ts
1749
1992
  var PointerHandler = class {
1750
1993
  constructor(host) {
1751
1994
  this._isDragging = false;
@@ -1754,6 +1997,34 @@ var PointerHandler = class {
1754
1997
  // Cached from onPointerDown to avoid forced layout reflows at 60fps during drag
1755
1998
  this._timelineRect = null;
1756
1999
  this.onPointerDown = (e) => {
2000
+ const clipHandler = this._host._clipHandler;
2001
+ if (clipHandler) {
2002
+ const target = e.composedPath()[0];
2003
+ if (target && clipHandler.tryHandle(target, e)) {
2004
+ e.preventDefault();
2005
+ this._timeline = this._host.shadowRoot?.querySelector(".timeline");
2006
+ if (this._timeline) {
2007
+ this._timeline.setPointerCapture(e.pointerId);
2008
+ const onMove = (me) => clipHandler.onPointerMove(me);
2009
+ const onUp = (ue) => {
2010
+ clipHandler.onPointerUp(ue);
2011
+ this._timeline?.removeEventListener("pointermove", onMove);
2012
+ this._timeline?.removeEventListener("pointerup", onUp);
2013
+ try {
2014
+ this._timeline?.releasePointerCapture(ue.pointerId);
2015
+ } catch (err) {
2016
+ console.warn(
2017
+ "[dawcore] releasePointerCapture failed (may already be released): " + String(err)
2018
+ );
2019
+ }
2020
+ this._timeline = null;
2021
+ };
2022
+ this._timeline.addEventListener("pointermove", onMove);
2023
+ this._timeline.addEventListener("pointerup", onUp);
2024
+ }
2025
+ return;
2026
+ }
2027
+ }
1757
2028
  this._timeline = this._host.shadowRoot?.querySelector(".timeline");
1758
2029
  if (!this._timeline) return;
1759
2030
  this._timelineRect = this._timeline.getBoundingClientRect();
@@ -1766,7 +2037,7 @@ var PointerHandler = class {
1766
2037
  this._onPointerMove = (e) => {
1767
2038
  if (!this._timeline) return;
1768
2039
  const currentPx = this._pxFromPointer(e);
1769
- if (!this._isDragging && Math.abs(currentPx - this._dragStartPx) > 3) {
2040
+ if (!this._isDragging && Math.abs(currentPx - this._dragStartPx) > DRAG_THRESHOLD) {
1770
2041
  this._isDragging = true;
1771
2042
  }
1772
2043
  if (this._isDragging) {
@@ -1901,6 +2172,245 @@ var PointerHandler = class {
1901
2172
  }
1902
2173
  };
1903
2174
 
2175
+ // src/interactions/clip-pointer-handler.ts
2176
+ var ClipPointerHandler = class {
2177
+ constructor(host) {
2178
+ this._mode = null;
2179
+ this._clipId = "";
2180
+ this._trackId = "";
2181
+ this._startPx = 0;
2182
+ this._isDragging = false;
2183
+ this._lastDeltaPx = 0;
2184
+ this._cumulativeDeltaSamples = 0;
2185
+ // Trim visual feedback: snapshot of original clip state
2186
+ this._clipContainer = null;
2187
+ this._boundaryEl = null;
2188
+ this._originalLeft = 0;
2189
+ this._originalWidth = 0;
2190
+ this._originalOffsetSamples = 0;
2191
+ this._originalDurationSamples = 0;
2192
+ this._host = host;
2193
+ }
2194
+ /** Returns true if a drag interaction is currently in progress. */
2195
+ get isActive() {
2196
+ return this._mode !== null;
2197
+ }
2198
+ /**
2199
+ * Attempts to handle a pointerdown event on the given target element.
2200
+ * Returns true if the target is a recognized clip interaction element.
2201
+ */
2202
+ tryHandle(target, e) {
2203
+ if (!this._host.interactiveClips) return false;
2204
+ const boundary = target.closest?.(".clip-boundary");
2205
+ const header = target.closest?.(".clip-header");
2206
+ if (boundary && boundary.dataset.boundaryEdge !== void 0) {
2207
+ const clipId = boundary.dataset.clipId;
2208
+ const trackId = boundary.dataset.trackId;
2209
+ const edge = boundary.dataset.boundaryEdge;
2210
+ if (!clipId || !trackId || edge !== "left" && edge !== "right") return false;
2211
+ this._beginDrag(edge === "left" ? "trim-left" : "trim-right", clipId, trackId, e);
2212
+ this._boundaryEl = boundary;
2213
+ return true;
2214
+ }
2215
+ if (header && header.dataset.interactive !== void 0) {
2216
+ const clipId = header.dataset.clipId;
2217
+ const trackId = header.dataset.trackId;
2218
+ if (!clipId || !trackId) return false;
2219
+ this._beginDrag("move", clipId, trackId, e);
2220
+ return true;
2221
+ }
2222
+ return false;
2223
+ }
2224
+ _beginDrag(mode, clipId, trackId, e) {
2225
+ this._mode = mode;
2226
+ this._clipId = clipId;
2227
+ this._trackId = trackId;
2228
+ this._startPx = e.clientX;
2229
+ this._isDragging = false;
2230
+ this._lastDeltaPx = 0;
2231
+ this._cumulativeDeltaSamples = 0;
2232
+ if (mode === "trim-left" || mode === "trim-right") {
2233
+ const container = this._host.shadowRoot?.querySelector(
2234
+ `.clip-container[data-clip-id="${clipId}"]`
2235
+ );
2236
+ if (container) {
2237
+ this._clipContainer = container;
2238
+ this._originalLeft = parseFloat(container.style.left) || 0;
2239
+ this._originalWidth = parseFloat(container.style.width) || 0;
2240
+ } else {
2241
+ console.warn("[dawcore] clip container not found for trim visual feedback: " + clipId);
2242
+ }
2243
+ const engine = this._host.engine;
2244
+ if (engine) {
2245
+ const bounds = engine.getClipBounds(trackId, clipId);
2246
+ if (bounds) {
2247
+ this._originalOffsetSamples = bounds.offsetSamples;
2248
+ this._originalDurationSamples = bounds.durationSamples;
2249
+ }
2250
+ }
2251
+ }
2252
+ }
2253
+ /** Processes pointermove events during an active drag. */
2254
+ onPointerMove(e) {
2255
+ if (this._mode === null) return;
2256
+ const totalDeltaPx = e.clientX - this._startPx;
2257
+ if (!this._isDragging && Math.abs(totalDeltaPx) > DRAG_THRESHOLD) {
2258
+ this._isDragging = true;
2259
+ if (this._boundaryEl) {
2260
+ this._boundaryEl.classList.add("dragging");
2261
+ }
2262
+ }
2263
+ if (!this._isDragging) return;
2264
+ const engine = this._host.engine;
2265
+ if (!engine) return;
2266
+ if (this._mode === "move") {
2267
+ const incrementalDeltaPx = totalDeltaPx - this._lastDeltaPx;
2268
+ this._lastDeltaPx = totalDeltaPx;
2269
+ const incrementalDeltaSamples = Math.round(incrementalDeltaPx * this._host.samplesPerPixel);
2270
+ this._cumulativeDeltaSamples += incrementalDeltaSamples;
2271
+ engine.moveClip(this._trackId, this._clipId, incrementalDeltaSamples, true);
2272
+ } else {
2273
+ const boundary = this._mode === "trim-left" ? "left" : "right";
2274
+ const rawDeltaSamples = Math.round(totalDeltaPx * this._host.samplesPerPixel);
2275
+ const deltaSamples = engine.constrainTrimDelta(
2276
+ this._trackId,
2277
+ this._clipId,
2278
+ boundary,
2279
+ rawDeltaSamples
2280
+ );
2281
+ const deltaPx = Math.round(deltaSamples / this._host.samplesPerPixel);
2282
+ this._cumulativeDeltaSamples = deltaSamples;
2283
+ if (this._clipContainer) {
2284
+ if (this._mode === "trim-left") {
2285
+ const newLeft = this._originalLeft + deltaPx;
2286
+ const newWidth = this._originalWidth - deltaPx;
2287
+ if (newWidth > 0) {
2288
+ this._clipContainer.style.left = newLeft + "px";
2289
+ this._clipContainer.style.width = newWidth + "px";
2290
+ const newOffset = this._originalOffsetSamples + deltaSamples;
2291
+ const newDuration = this._originalDurationSamples - deltaSamples;
2292
+ if (this._updateWaveformPeaks(newOffset, newDuration)) {
2293
+ const waveforms = this._clipContainer.querySelectorAll("daw-waveform");
2294
+ for (const wf of waveforms) {
2295
+ wf.style.left = "0px";
2296
+ }
2297
+ } else {
2298
+ const waveforms = this._clipContainer.querySelectorAll("daw-waveform");
2299
+ for (const wf of waveforms) {
2300
+ wf.style.left = -deltaPx + "px";
2301
+ }
2302
+ }
2303
+ }
2304
+ } else {
2305
+ const newWidth = this._originalWidth + deltaPx;
2306
+ if (newWidth > 0) {
2307
+ this._clipContainer.style.width = newWidth + "px";
2308
+ const newDuration = this._originalDurationSamples + deltaSamples;
2309
+ this._updateWaveformPeaks(this._originalOffsetSamples, newDuration);
2310
+ }
2311
+ }
2312
+ }
2313
+ }
2314
+ }
2315
+ /** Processes pointerup events to finalize and dispatch result events. */
2316
+ onPointerUp(_e) {
2317
+ if (this._mode === null) return;
2318
+ try {
2319
+ if (!this._isDragging || this._cumulativeDeltaSamples === 0) {
2320
+ this._restoreTrimVisual();
2321
+ return;
2322
+ }
2323
+ const engine = this._host.engine;
2324
+ if (this._mode === "move") {
2325
+ if (engine) {
2326
+ engine.updateTrack(this._trackId);
2327
+ this._host.dispatchEvent(
2328
+ new CustomEvent("daw-clip-move", {
2329
+ bubbles: true,
2330
+ composed: true,
2331
+ detail: {
2332
+ trackId: this._trackId,
2333
+ clipId: this._clipId,
2334
+ deltaSamples: this._cumulativeDeltaSamples
2335
+ }
2336
+ })
2337
+ );
2338
+ } else {
2339
+ console.warn(
2340
+ "[dawcore] engine unavailable at move drop \u2014 audio may be out of sync for track " + this._trackId
2341
+ );
2342
+ }
2343
+ } else {
2344
+ this._restoreTrimVisual();
2345
+ const boundary = this._mode === "trim-left" ? "left" : "right";
2346
+ if (engine) {
2347
+ engine.trimClip(this._trackId, this._clipId, boundary, this._cumulativeDeltaSamples);
2348
+ this._host.dispatchEvent(
2349
+ new CustomEvent("daw-clip-trim", {
2350
+ bubbles: true,
2351
+ composed: true,
2352
+ detail: {
2353
+ trackId: this._trackId,
2354
+ clipId: this._clipId,
2355
+ boundary,
2356
+ deltaSamples: this._cumulativeDeltaSamples
2357
+ }
2358
+ })
2359
+ );
2360
+ }
2361
+ }
2362
+ } finally {
2363
+ this._reset();
2364
+ }
2365
+ }
2366
+ /** Re-extract peaks from cache and set on waveform elements during trim drag.
2367
+ * Returns true if peaks were successfully updated. */
2368
+ _updateWaveformPeaks(offsetSamples, durationSamples) {
2369
+ if (!this._clipContainer || durationSamples <= 0) return false;
2370
+ const peakSlice = this._host.reextractClipPeaks(this._clipId, offsetSamples, durationSamples);
2371
+ if (!peakSlice) return false;
2372
+ const waveforms = this._clipContainer.querySelectorAll("daw-waveform");
2373
+ for (let i = 0; i < waveforms.length; i++) {
2374
+ const wf = waveforms[i];
2375
+ const channelPeaks = peakSlice.data[i];
2376
+ if (channelPeaks) {
2377
+ wf.peaks = channelPeaks;
2378
+ wf.length = peakSlice.length;
2379
+ }
2380
+ }
2381
+ return true;
2382
+ }
2383
+ /** Restore clip container CSS to original values after trim visual preview. */
2384
+ _restoreTrimVisual() {
2385
+ if (this._clipContainer) {
2386
+ this._clipContainer.style.left = this._originalLeft + "px";
2387
+ this._clipContainer.style.width = this._originalWidth + "px";
2388
+ const waveforms = this._clipContainer.querySelectorAll("daw-waveform");
2389
+ for (const wf of waveforms) {
2390
+ wf.style.left = "0px";
2391
+ }
2392
+ }
2393
+ }
2394
+ _reset() {
2395
+ if (this._boundaryEl) {
2396
+ this._boundaryEl.classList.remove("dragging");
2397
+ this._boundaryEl = null;
2398
+ }
2399
+ this._mode = null;
2400
+ this._clipId = "";
2401
+ this._trackId = "";
2402
+ this._startPx = 0;
2403
+ this._isDragging = false;
2404
+ this._lastDeltaPx = 0;
2405
+ this._cumulativeDeltaSamples = 0;
2406
+ this._clipContainer = null;
2407
+ this._originalLeft = 0;
2408
+ this._originalWidth = 0;
2409
+ this._originalOffsetSamples = 0;
2410
+ this._originalDurationSamples = 0;
2411
+ }
2412
+ };
2413
+
1904
2414
  // src/interactions/file-loader.ts
1905
2415
  var import_core2 = require("@waveform-playlist/core");
1906
2416
  async function loadFiles(host, files) {
@@ -1935,10 +2445,16 @@ async function loadFiles(host, files) {
1935
2445
  sourceDuration: audioBuffer.duration
1936
2446
  });
1937
2447
  host._clipBuffers = new Map(host._clipBuffers).set(clip.id, audioBuffer);
2448
+ host._clipOffsets.set(clip.id, {
2449
+ offsetSamples: clip.offsetSamples,
2450
+ durationSamples: clip.durationSamples
2451
+ });
1938
2452
  const peakData = await host._peakPipeline.generatePeaks(
1939
2453
  audioBuffer,
1940
2454
  host.samplesPerPixel,
1941
- host.mono
2455
+ host.mono,
2456
+ clip.offsetSamples,
2457
+ clip.durationSamples
1942
2458
  );
1943
2459
  host._peaksData = new Map(host._peaksData).set(clip.id, peakData);
1944
2460
  const trackId = crypto.randomUUID();
@@ -1997,17 +2513,31 @@ async function loadFiles(host, files) {
1997
2513
 
1998
2514
  // src/interactions/recording-clip.ts
1999
2515
  var import_core3 = require("@waveform-playlist/core");
2000
- function addRecordedClip(host, trackId, buf, startSample, durSamples) {
2516
+ function addRecordedClip(host, trackId, buf, startSample, durSamples, offsetSamples = 0) {
2517
+ let trimmedBuf = buf;
2518
+ if (offsetSamples > 0 && offsetSamples < buf.length) {
2519
+ const trimmed = new AudioBuffer({
2520
+ numberOfChannels: buf.numberOfChannels,
2521
+ length: durSamples,
2522
+ sampleRate: buf.sampleRate
2523
+ });
2524
+ for (let ch = 0; ch < buf.numberOfChannels; ch++) {
2525
+ const source = buf.getChannelData(ch);
2526
+ trimmed.copyToChannel(source.subarray(offsetSamples, offsetSamples + durSamples), ch);
2527
+ }
2528
+ trimmedBuf = trimmed;
2529
+ }
2001
2530
  const clip = (0, import_core3.createClip)({
2002
- audioBuffer: buf,
2531
+ audioBuffer: trimmedBuf,
2003
2532
  startSample,
2004
2533
  durationSamples: durSamples,
2005
2534
  offsetSamples: 0,
2535
+ // offset already applied by slicing
2006
2536
  gain: 1,
2007
2537
  name: "Recording"
2008
2538
  });
2009
- host._clipBuffers = new Map(host._clipBuffers).set(clip.id, buf);
2010
- host._peakPipeline.generatePeaks(buf, host.samplesPerPixel, host.mono).then((pd) => {
2539
+ host._clipBuffers = new Map(host._clipBuffers).set(clip.id, trimmedBuf);
2540
+ host._peakPipeline.generatePeaks(trimmedBuf, host.samplesPerPixel, host.mono).then((pd) => {
2011
2541
  host._peaksData = new Map(host._peaksData).set(clip.id, pd);
2012
2542
  const t = host._engineTracks.get(trackId);
2013
2543
  if (!t) {
@@ -2040,7 +2570,12 @@ function addRecordedClip(host, trackId, buf, startSample, durSamples) {
2040
2570
  });
2041
2571
  }
2042
2572
  host._recomputeDuration();
2043
- host._engine?.setTracks([...host._engineTracks.values()]);
2573
+ const updatedTrack = host._engineTracks.get(trackId);
2574
+ if (host._engine?.updateTrack && updatedTrack) {
2575
+ host._engine.updateTrack(trackId, updatedTrack);
2576
+ } else {
2577
+ host._engine?.setTracks([...host._engineTracks.values()]);
2578
+ }
2044
2579
  }).catch((err) => {
2045
2580
  console.warn("[dawcore] Failed to generate peaks for recorded clip: " + String(err));
2046
2581
  const next = new Map(host._clipBuffers);
@@ -2058,6 +2593,140 @@ function addRecordedClip(host, trackId, buf, startSample, durSamples) {
2058
2593
  });
2059
2594
  }
2060
2595
 
2596
+ // src/interactions/split-handler.ts
2597
+ function splitAtPlayhead(host) {
2598
+ const { engine } = host;
2599
+ if (!engine) return false;
2600
+ const stateBefore = engine.getState();
2601
+ const { selectedTrackId, tracks } = stateBefore;
2602
+ if (!selectedTrackId) return false;
2603
+ const track = tracks.find((t) => t.id === selectedTrackId);
2604
+ if (!track) return false;
2605
+ const atSample = Math.round(host.currentTime * host.effectiveSampleRate);
2606
+ const clip = findClipAtSample(track.clips, atSample);
2607
+ if (!clip) return false;
2608
+ const originalClipId = clip.id;
2609
+ const clipIdsBefore = new Set(track.clips.map((c) => c.id));
2610
+ engine.splitClip(selectedTrackId, originalClipId, atSample);
2611
+ const stateAfter = engine.getState();
2612
+ const trackAfter = stateAfter.tracks.find((t) => t.id === selectedTrackId);
2613
+ if (!trackAfter) {
2614
+ console.warn(
2615
+ '[dawcore] splitAtPlayhead: track "' + selectedTrackId + '" disappeared after split'
2616
+ );
2617
+ return false;
2618
+ }
2619
+ const newClips = trackAfter.clips.filter((c) => !clipIdsBefore.has(c.id));
2620
+ if (newClips.length !== 2) {
2621
+ if (newClips.length > 0) {
2622
+ console.warn(
2623
+ "[dawcore] splitAtPlayhead: expected 2 new clips after split but got " + newClips.length
2624
+ );
2625
+ }
2626
+ return false;
2627
+ }
2628
+ const sorted = [...newClips].sort((a, b) => a.startSample - b.startSample);
2629
+ const leftClipId = sorted[0].id;
2630
+ const rightClipId = sorted[1].id;
2631
+ host.dispatchEvent(
2632
+ new CustomEvent("daw-clip-split", {
2633
+ bubbles: true,
2634
+ composed: true,
2635
+ detail: {
2636
+ trackId: selectedTrackId,
2637
+ originalClipId,
2638
+ leftClipId,
2639
+ rightClipId
2640
+ }
2641
+ })
2642
+ );
2643
+ return true;
2644
+ }
2645
+ function findClipAtSample(clips, atSample) {
2646
+ return clips.find(
2647
+ (c) => atSample > c.startSample && atSample < c.startSample + c.durationSamples
2648
+ );
2649
+ }
2650
+
2651
+ // src/interactions/clip-peak-sync.ts
2652
+ function syncPeaksForChangedClips(host, tracks) {
2653
+ const currentClipIds = /* @__PURE__ */ new Set();
2654
+ for (const track of tracks) {
2655
+ for (const clip of track.clips) {
2656
+ currentClipIds.add(clip.id);
2657
+ const cached = host._clipOffsets.get(clip.id);
2658
+ const needsPeaks = !host._peaksData.has(clip.id) || !cached || cached.offsetSamples !== clip.offsetSamples || cached.durationSamples !== clip.durationSamples;
2659
+ if (!needsPeaks) continue;
2660
+ const audioBuffer = clip.audioBuffer ?? host._clipBuffers.get(clip.id) ?? findAudioBufferForClip(host, clip, track);
2661
+ if (!audioBuffer) {
2662
+ console.warn(
2663
+ "[dawcore] syncPeaksForChangedClips: no AudioBuffer for clip " + clip.id + " \u2014 waveform will be blank"
2664
+ );
2665
+ continue;
2666
+ }
2667
+ host._clipBuffers = new Map(host._clipBuffers).set(clip.id, audioBuffer);
2668
+ host._clipOffsets.set(clip.id, {
2669
+ offsetSamples: clip.offsetSamples,
2670
+ durationSamples: clip.durationSamples
2671
+ });
2672
+ host._peakPipeline.generatePeaks(
2673
+ audioBuffer,
2674
+ host.samplesPerPixel,
2675
+ host.mono,
2676
+ clip.offsetSamples,
2677
+ clip.durationSamples
2678
+ ).then((peakData) => {
2679
+ host._peaksData = new Map(host._peaksData).set(clip.id, peakData);
2680
+ }).catch((err) => {
2681
+ console.warn(
2682
+ "[dawcore] Failed to generate peaks for clip " + clip.id + ": " + String(err)
2683
+ );
2684
+ });
2685
+ }
2686
+ }
2687
+ cleanupOrphanedClipData(host, currentClipIds);
2688
+ }
2689
+ function cleanupOrphanedClipData(host, currentClipIds) {
2690
+ let buffersChanged = false;
2691
+ let peaksChanged = false;
2692
+ for (const id of host._clipBuffers.keys()) {
2693
+ if (!currentClipIds.has(id)) {
2694
+ host._clipBuffers.delete(id);
2695
+ buffersChanged = true;
2696
+ }
2697
+ }
2698
+ let offsetsChanged = false;
2699
+ for (const id of host._clipOffsets.keys()) {
2700
+ if (!currentClipIds.has(id)) {
2701
+ host._clipOffsets.delete(id);
2702
+ offsetsChanged = true;
2703
+ }
2704
+ }
2705
+ for (const id of host._peaksData.keys()) {
2706
+ if (!currentClipIds.has(id)) {
2707
+ host._peaksData.delete(id);
2708
+ peaksChanged = true;
2709
+ }
2710
+ }
2711
+ if (buffersChanged) {
2712
+ host._clipBuffers = new Map(host._clipBuffers);
2713
+ }
2714
+ if (offsetsChanged) {
2715
+ host._clipOffsets = new Map(host._clipOffsets);
2716
+ }
2717
+ if (peaksChanged) {
2718
+ host._peaksData = new Map(host._peaksData);
2719
+ }
2720
+ }
2721
+ function findAudioBufferForClip(host, clip, track) {
2722
+ for (const sibling of track.clips) {
2723
+ if (sibling.id === clip.id) continue;
2724
+ const buf = host._clipBuffers.get(sibling.id);
2725
+ if (buf) return buf;
2726
+ }
2727
+ return null;
2728
+ }
2729
+
2061
2730
  // src/elements/daw-editor.ts
2062
2731
  var DawEditorElement = class extends import_lit12.LitElement {
2063
2732
  constructor() {
@@ -2069,6 +2738,9 @@ var DawEditorElement = class extends import_lit12.LitElement {
2069
2738
  this.barWidth = 1;
2070
2739
  this.barGap = 0;
2071
2740
  this.fileDrop = false;
2741
+ this.clipHeaders = false;
2742
+ this.clipHeaderHeight = 20;
2743
+ this.interactiveClips = false;
2072
2744
  this.sampleRate = 48e3;
2073
2745
  /** Resolved sample rate — falls back to sampleRate property until first audio decode. */
2074
2746
  this._resolvedSampleRate = null;
@@ -2085,14 +2757,15 @@ var DawEditorElement = class extends import_lit12.LitElement {
2085
2757
  this._currentTime = 0;
2086
2758
  this._engine = null;
2087
2759
  this._enginePromise = null;
2088
- this._audioInitialized = false;
2089
2760
  this._audioCache = /* @__PURE__ */ new Map();
2090
2761
  this._clipBuffers = /* @__PURE__ */ new Map();
2762
+ this._clipOffsets = /* @__PURE__ */ new Map();
2091
2763
  this._peakPipeline = new PeakPipeline();
2092
2764
  this._trackElements = /* @__PURE__ */ new Map();
2093
2765
  this._childObserver = null;
2094
2766
  this._audioResume = new AudioResumeController(this);
2095
2767
  this._recordingController = new RecordingController(this);
2768
+ this._clipPointer = new ClipPointerHandler(this);
2096
2769
  this._pointer = new PointerHandler(this);
2097
2770
  this._viewport = (() => {
2098
2771
  const v = new ViewportController(this);
@@ -2193,9 +2866,42 @@ var DawEditorElement = class extends import_lit12.LitElement {
2193
2866
  );
2194
2867
  }
2195
2868
  };
2869
+ this._onKeyDown = (e) => {
2870
+ if (!this.interactiveClips) return;
2871
+ if (e.key === "s" || e.key === "S") {
2872
+ if (e.ctrlKey || e.metaKey || e.altKey) return;
2873
+ const tag = e.target?.tagName;
2874
+ if (tag === "INPUT" || tag === "TEXTAREA") return;
2875
+ if (e.target?.isContentEditable) return;
2876
+ e.preventDefault();
2877
+ this.splitAtPlayhead();
2878
+ }
2879
+ };
2196
2880
  // --- Recording ---
2197
2881
  this.recordingStream = null;
2198
2882
  }
2883
+ get _clipHandler() {
2884
+ return this.interactiveClips ? this._clipPointer : null;
2885
+ }
2886
+ get engine() {
2887
+ return this._engine;
2888
+ }
2889
+ /** Re-extract peaks for a clip at new offset/duration from cached WaveformData. */
2890
+ reextractClipPeaks(clipId, offsetSamples, durationSamples) {
2891
+ const buf = this._clipBuffers.get(clipId);
2892
+ if (!buf) return null;
2893
+ const singleClipBuffers = /* @__PURE__ */ new Map([[clipId, buf]]);
2894
+ const singleClipOffsets = /* @__PURE__ */ new Map([[clipId, { offsetSamples, durationSamples }]]);
2895
+ const result = this._peakPipeline.reextractPeaks(
2896
+ singleClipBuffers,
2897
+ this.samplesPerPixel,
2898
+ this.mono,
2899
+ singleClipOffsets
2900
+ );
2901
+ const peakData = result.get(clipId);
2902
+ if (!peakData) return null;
2903
+ return { data: peakData.data, length: peakData.length };
2904
+ }
2199
2905
  get effectiveSampleRate() {
2200
2906
  return this._resolvedSampleRate ?? this.sampleRate;
2201
2907
  }
@@ -2236,6 +2942,10 @@ var DawEditorElement = class extends import_lit12.LitElement {
2236
2942
  // --- Lifecycle ---
2237
2943
  connectedCallback() {
2238
2944
  super.connectedCallback();
2945
+ if (!this.hasAttribute("tabindex")) {
2946
+ this.setAttribute("tabindex", "0");
2947
+ }
2948
+ this.addEventListener("keydown", this._onKeyDown);
2239
2949
  this.addEventListener("daw-track-connected", this._onTrackConnected);
2240
2950
  this.addEventListener("daw-track-update", this._onTrackUpdate);
2241
2951
  this.addEventListener("daw-track-control", this._onTrackControl);
@@ -2261,6 +2971,7 @@ var DawEditorElement = class extends import_lit12.LitElement {
2261
2971
  }
2262
2972
  disconnectedCallback() {
2263
2973
  super.disconnectedCallback();
2974
+ this.removeEventListener("keydown", this._onKeyDown);
2264
2975
  this.removeEventListener("daw-track-connected", this._onTrackConnected);
2265
2976
  this.removeEventListener("daw-track-update", this._onTrackUpdate);
2266
2977
  this.removeEventListener("daw-track-control", this._onTrackControl);
@@ -2270,6 +2981,7 @@ var DawEditorElement = class extends import_lit12.LitElement {
2270
2981
  this._trackElements.clear();
2271
2982
  this._audioCache.clear();
2272
2983
  this._clipBuffers.clear();
2984
+ this._clipOffsets.clear();
2273
2985
  this._peakPipeline.terminate();
2274
2986
  try {
2275
2987
  this._disposeEngine();
@@ -2281,17 +2993,19 @@ var DawEditorElement = class extends import_lit12.LitElement {
2281
2993
  if (changedProperties.has("eagerResume")) {
2282
2994
  this._audioResume.target = this.eagerResume;
2283
2995
  }
2996
+ if (changedProperties.has("samplesPerPixel") && this._isPlaying) {
2997
+ this._startPlayhead();
2998
+ }
2284
2999
  if (changedProperties.has("samplesPerPixel") && this._clipBuffers.size > 0) {
2285
- const reextracted = this._peakPipeline.reextractPeaks(
3000
+ const re = this._peakPipeline.reextractPeaks(
2286
3001
  this._clipBuffers,
2287
3002
  this.samplesPerPixel,
2288
- this.mono
3003
+ this.mono,
3004
+ this._clipOffsets
2289
3005
  );
2290
- if (reextracted.size > 0) {
3006
+ if (re.size > 0) {
2291
3007
  const next = new Map(this._peaksData);
2292
- for (const [clipId, peakData] of reextracted) {
2293
- next.set(clipId, peakData);
2294
- }
3008
+ for (const [id, pd] of re) next.set(id, pd);
2295
3009
  this._peaksData = next;
2296
3010
  }
2297
3011
  }
@@ -2303,6 +3017,7 @@ var DawEditorElement = class extends import_lit12.LitElement {
2303
3017
  const nextPeaks = new Map(this._peaksData);
2304
3018
  for (const clip of removedTrack.clips) {
2305
3019
  this._clipBuffers.delete(clip.id);
3020
+ this._clipOffsets.delete(clip.id);
2306
3021
  nextPeaks.delete(clip.id);
2307
3022
  }
2308
3023
  this._peaksData = nextPeaks;
@@ -2381,10 +3096,16 @@ var DawEditorElement = class extends import_lit12.LitElement {
2381
3096
  sourceDuration: audioBuffer.duration
2382
3097
  });
2383
3098
  this._clipBuffers = new Map(this._clipBuffers).set(clip.id, audioBuffer);
3099
+ this._clipOffsets.set(clip.id, {
3100
+ offsetSamples: clip.offsetSamples,
3101
+ durationSamples: clip.durationSamples
3102
+ });
2384
3103
  const peakData = await this._peakPipeline.generatePeaks(
2385
3104
  audioBuffer,
2386
3105
  this.samplesPerPixel,
2387
- this.mono
3106
+ this.mono,
3107
+ clip.offsetSamples,
3108
+ clip.durationSamples
2388
3109
  );
2389
3110
  this._peaksData = new Map(this._peaksData).set(clip.id, peakData);
2390
3111
  clips.push(clip);
@@ -2476,10 +3197,20 @@ var DawEditorElement = class extends import_lit12.LitElement {
2476
3197
  samplesPerPixel: this.samplesPerPixel,
2477
3198
  zoomLevels: [256, 512, 1024, 2048, 4096, 8192, this.samplesPerPixel].filter((v, i, a) => a.indexOf(v) === i).sort((a, b) => a - b)
2478
3199
  });
3200
+ let lastTracksVersion = -1;
2479
3201
  engine.on("statechange", (engineState) => {
2480
3202
  this._isPlaying = engineState.isPlaying;
2481
3203
  this._duration = engineState.duration;
2482
3204
  this._selectedTrackId = engineState.selectedTrackId;
3205
+ if (engineState.tracksVersion !== lastTracksVersion) {
3206
+ lastTracksVersion = engineState.tracksVersion;
3207
+ const nextTracks = /* @__PURE__ */ new Map();
3208
+ for (const track of engineState.tracks) {
3209
+ nextTracks.set(track.id, track);
3210
+ }
3211
+ this._engineTracks = nextTracks;
3212
+ syncPeaksForChangedClips(this, engineState.tracks);
3213
+ }
2483
3214
  });
2484
3215
  engine.on("timeupdate", (time) => {
2485
3216
  this._currentTime = time;
@@ -2502,14 +3233,11 @@ var DawEditorElement = class extends import_lit12.LitElement {
2502
3233
  return loadFiles(this, files);
2503
3234
  }
2504
3235
  // --- Playback ---
2505
- async play() {
3236
+ async play(startTime) {
2506
3237
  try {
2507
3238
  const engine = await this._ensureEngine();
2508
- if (!this._audioInitialized) {
2509
- await engine.init();
2510
- this._audioInitialized = true;
2511
- }
2512
- engine.play();
3239
+ await engine.init();
3240
+ engine.play(startTime);
2513
3241
  this._startPlayhead();
2514
3242
  this.dispatchEvent(new CustomEvent("daw-play", { bubbles: true, composed: true }));
2515
3243
  } catch (err) {
@@ -2540,14 +3268,32 @@ var DawEditorElement = class extends import_lit12.LitElement {
2540
3268
  this._engine.seek(time);
2541
3269
  this._currentTime = time;
2542
3270
  }
3271
+ /** Split the clip under the playhead on the selected track. */
3272
+ splitAtPlayhead() {
3273
+ return splitAtPlayhead({
3274
+ effectiveSampleRate: this.effectiveSampleRate,
3275
+ currentTime: this._currentTime,
3276
+ engine: this._engine,
3277
+ dispatchEvent: (e) => this.dispatchEvent(e)
3278
+ });
3279
+ }
3280
+ get currentTime() {
3281
+ return this._currentTime;
3282
+ }
2543
3283
  get isRecording() {
2544
3284
  return this._recordingController.isRecording;
2545
3285
  }
3286
+ pauseRecording() {
3287
+ this._recordingController.pauseRecording();
3288
+ }
3289
+ resumeRecording() {
3290
+ this._recordingController.resumeRecording();
3291
+ }
2546
3292
  stopRecording() {
2547
3293
  this._recordingController.stopRecording();
2548
3294
  }
2549
- _addRecordedClip(trackId, buf, startSample, durSamples) {
2550
- addRecordedClip(this, trackId, buf, startSample, durSamples);
3295
+ _addRecordedClip(trackId, buf, startSample, durSamples, offsetSamples = 0) {
3296
+ addRecordedClip(this, trackId, buf, startSample, durSamples, offsetSamples);
2551
3297
  }
2552
3298
  async startRecording(stream, options) {
2553
3299
  const s = stream ?? this.recordingStream;
@@ -2560,15 +3306,19 @@ var DawEditorElement = class extends import_lit12.LitElement {
2560
3306
  _renderRecordingPreview(trackId, chH) {
2561
3307
  const rs = this._recordingController.getSession(trackId);
2562
3308
  if (!rs) return "";
3309
+ const audibleSamples = Math.max(0, rs.totalSamples - rs.latencySamples);
3310
+ if (audibleSamples === 0) return "";
3311
+ const latencyPixels = Math.floor(rs.latencySamples / this.samplesPerPixel);
2563
3312
  const left = Math.floor(rs.startSample / this.samplesPerPixel);
2564
- const w = Math.floor(rs.totalSamples / this.samplesPerPixel);
2565
- return rs.peaks.map(
2566
- (chPeaks, ch) => import_lit12.html`
3313
+ const w = Math.floor(audibleSamples / this.samplesPerPixel);
3314
+ return rs.peaks.map((chPeaks, ch) => {
3315
+ const slicedPeaks = latencyPixels > 0 ? chPeaks.slice(latencyPixels * 2) : chPeaks;
3316
+ return import_lit12.html`
2567
3317
  <daw-waveform
2568
3318
  data-recording-track=${trackId}
2569
3319
  data-recording-channel=${ch}
2570
3320
  style="position:absolute;left:${left}px;top:${ch * chH}px;"
2571
- .peaks=${chPeaks}
3321
+ .peaks=${slicedPeaks}
2572
3322
  .length=${w}
2573
3323
  .waveHeight=${chH}
2574
3324
  .barWidth=${this.barWidth}
@@ -2577,8 +3327,8 @@ var DawEditorElement = class extends import_lit12.LitElement {
2577
3327
  .visibleEnd=${this._viewport.visibleEnd}
2578
3328
  .originX=${left}
2579
3329
  ></daw-waveform>
2580
- `
2581
- );
3330
+ `;
3331
+ });
2582
3332
  }
2583
3333
  // --- Playhead ---
2584
3334
  _startPlayhead() {
@@ -2620,13 +3370,14 @@ var DawEditorElement = class extends import_lit12.LitElement {
2620
3370
  const orderedTracks = this._getOrderedTracks().map(([trackId, track]) => {
2621
3371
  const descriptor = this._tracks.get(trackId);
2622
3372
  const firstPeaks = track.clips.map((c) => this._peaksData.get(c.id)).find((p) => p && p.data.length > 0);
2623
- const numChannels = firstPeaks ? firstPeaks.data.length : 1;
3373
+ const recSession = this._recordingController.getSession(trackId);
3374
+ const numChannels = firstPeaks ? firstPeaks.data.length : recSession ? recSession.channelCount : 1;
2624
3375
  return {
2625
3376
  trackId,
2626
3377
  track,
2627
3378
  descriptor,
2628
3379
  numChannels,
2629
- trackHeight: this.waveHeight * numChannels
3380
+ trackHeight: this.waveHeight * numChannels + (this.clipHeaders ? this.clipHeaderHeight : 0)
2630
3381
  };
2631
3382
  });
2632
3383
  return import_lit12.html`
@@ -2680,21 +3431,47 @@ var DawEditorElement = class extends import_lit12.LitElement {
2680
3431
  );
2681
3432
  const clipLeft = Math.floor(clip.startSample / this.samplesPerPixel);
2682
3433
  const channels = peakData?.data ?? [new Int16Array(0)];
2683
- return channels.map(
2684
- (channelPeaks, chIdx) => import_lit12.html`
2685
- <daw-waveform
2686
- style="position: absolute; left: ${clipLeft}px; top: ${chIdx * channelHeight}px;"
2687
- .peaks=${channelPeaks}
2688
- .length=${peakData?.length ?? width}
2689
- .waveHeight=${channelHeight}
2690
- .barWidth=${this.barWidth}
2691
- .barGap=${this.barGap}
2692
- .visibleStart=${this._viewport.visibleStart}
2693
- .visibleEnd=${this._viewport.visibleEnd}
2694
- .originX=${clipLeft}
2695
- ></daw-waveform>
2696
- `
2697
- );
3434
+ const hdrH = this.clipHeaders ? this.clipHeaderHeight : 0;
3435
+ const chH = this.waveHeight;
3436
+ return import_lit12.html` <div
3437
+ class="clip-container"
3438
+ style="left:${clipLeft}px;top:0;width:${width}px;height:${t.trackHeight}px;"
3439
+ data-clip-id=${clip.id}
3440
+ >
3441
+ ${hdrH > 0 ? import_lit12.html`<div
3442
+ class="clip-header"
3443
+ data-clip-id=${clip.id}
3444
+ data-track-id=${t.trackId}
3445
+ ?data-interactive=${this.interactiveClips}
3446
+ >
3447
+ <span>${clip.name || t.descriptor?.name || ""}</span>
3448
+ </div>` : ""}
3449
+ ${channels.map(
3450
+ (chPeaks, chIdx) => import_lit12.html` <daw-waveform
3451
+ style="position:absolute;left:0;top:${hdrH + chIdx * chH}px;"
3452
+ .peaks=${chPeaks}
3453
+ .length=${peakData?.length ?? width}
3454
+ .waveHeight=${chH}
3455
+ .barWidth=${this.barWidth}
3456
+ .barGap=${this.barGap}
3457
+ .visibleStart=${this._viewport.visibleStart}
3458
+ .visibleEnd=${this._viewport.visibleEnd}
3459
+ .originX=${clipLeft}
3460
+ ></daw-waveform>`
3461
+ )}
3462
+ ${this.interactiveClips ? import_lit12.html` <div
3463
+ class="clip-boundary"
3464
+ data-boundary-edge="left"
3465
+ data-clip-id=${clip.id}
3466
+ data-track-id=${t.trackId}
3467
+ ></div>
3468
+ <div
3469
+ class="clip-boundary"
3470
+ data-boundary-edge="right"
3471
+ data-clip-id=${clip.id}
3472
+ data-track-id=${t.trackId}
3473
+ ></div>` : ""}
3474
+ </div>`;
2698
3475
  })}
2699
3476
  ${this._renderRecordingPreview(t.trackId, channelHeight)}
2700
3477
  </div>
@@ -2742,7 +3519,8 @@ DawEditorElement.styles = [
2742
3519
  outline: 2px dashed var(--daw-selection-color, rgba(99, 199, 95, 0.3));
2743
3520
  outline-offset: -2px;
2744
3521
  }
2745
- `
3522
+ `,
3523
+ clipStyles
2746
3524
  ];
2747
3525
  DawEditorElement._CONTROL_PROPS = /* @__PURE__ */ new Set(["volume", "pan", "muted", "soloed"]);
2748
3526
  __decorateClass([
@@ -2766,6 +3544,15 @@ __decorateClass([
2766
3544
  __decorateClass([
2767
3545
  (0, import_decorators10.property)({ type: Boolean, attribute: "file-drop" })
2768
3546
  ], DawEditorElement.prototype, "fileDrop", 2);
3547
+ __decorateClass([
3548
+ (0, import_decorators10.property)({ type: Boolean, attribute: "clip-headers" })
3549
+ ], DawEditorElement.prototype, "clipHeaders", 2);
3550
+ __decorateClass([
3551
+ (0, import_decorators10.property)({ type: Number, attribute: "clip-header-height" })
3552
+ ], DawEditorElement.prototype, "clipHeaderHeight", 2);
3553
+ __decorateClass([
3554
+ (0, import_decorators10.property)({ type: Boolean, attribute: "interactive-clips" })
3555
+ ], DawEditorElement.prototype, "interactiveClips", 2);
2769
3556
  __decorateClass([
2770
3557
  (0, import_decorators10.property)({ type: Number, attribute: "sample-rate" })
2771
3558
  ], DawEditorElement.prototype, "sampleRate", 2);
@@ -3030,7 +3817,7 @@ var DawRecordButtonElement = class extends DawTransportButton {
3030
3817
  }
3031
3818
  connectedCallback() {
3032
3819
  super.connectedCallback();
3033
- this._listenToTarget();
3820
+ requestAnimationFrame(() => this._listenToTarget());
3034
3821
  }
3035
3822
  disconnectedCallback() {
3036
3823
  super.disconnectedCallback();
@@ -3055,11 +3842,12 @@ var DawRecordButtonElement = class extends DawTransportButton {
3055
3842
  render() {
3056
3843
  return import_lit15.html`
3057
3844
  <button part="button" ?data-recording=${this._isRecording} @click=${this._onClick}>
3058
- <slot>${this._isRecording ? "Stop Rec" : "Record"}</slot>
3845
+ <slot>Record</slot>
3059
3846
  </button>
3060
3847
  `;
3061
3848
  }
3062
3849
  _onClick() {
3850
+ if (this._isRecording) return;
3063
3851
  const target = this.target;
3064
3852
  if (!target) {
3065
3853
  console.warn(
@@ -3067,11 +3855,7 @@ var DawRecordButtonElement = class extends DawTransportButton {
3067
3855
  );
3068
3856
  return;
3069
3857
  }
3070
- if (this._isRecording) {
3071
- target.stopRecording();
3072
- } else {
3073
- target.startRecording(target.recordingStream);
3074
- }
3858
+ target.startRecording(target.recordingStream);
3075
3859
  }
3076
3860
  };
3077
3861
  DawRecordButtonElement.styles = [
@@ -3080,6 +3864,7 @@ DawRecordButtonElement.styles = [
3080
3864
  button[data-recording] {
3081
3865
  color: #d08070;
3082
3866
  border-color: #d08070;
3867
+ background: rgba(208, 128, 112, 0.15);
3083
3868
  }
3084
3869
  `
3085
3870
  ];
@@ -3092,6 +3877,7 @@ DawRecordButtonElement = __decorateClass([
3092
3877
  // Annotate the CommonJS export names for ESM import in node:
3093
3878
  0 && (module.exports = {
3094
3879
  AudioResumeController,
3880
+ ClipPointerHandler,
3095
3881
  DawClipElement,
3096
3882
  DawEditorElement,
3097
3883
  DawPauseButtonElement,
@@ -3106,6 +3892,7 @@ DawRecordButtonElement = __decorateClass([
3106
3892
  DawTransportButton,
3107
3893
  DawTransportElement,
3108
3894
  DawWaveformElement,
3109
- RecordingController
3895
+ RecordingController,
3896
+ splitAtPlayhead
3110
3897
  });
3111
3898
  //# sourceMappingURL=index.js.map