@waveform-playlist/playout 10.3.0 → 11.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +38 -0
- package/dist/index.d.ts +38 -0
- package/dist/index.js +206 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +206 -4
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -153,6 +153,112 @@ var ToneTrack = class {
|
|
|
153
153
|
}
|
|
154
154
|
}
|
|
155
155
|
}
|
|
156
|
+
/**
|
|
157
|
+
* Add a clip to this track at runtime. Creates a Transport.schedule event
|
|
158
|
+
* and fadeGainNode. If playing, starts the source mid-clip if needed.
|
|
159
|
+
*/
|
|
160
|
+
addClip(clipInfo) {
|
|
161
|
+
const transport = getTransport();
|
|
162
|
+
const rawContext = getContext().rawContext;
|
|
163
|
+
const volumeNativeInput = this.volumeNode.input.input;
|
|
164
|
+
const fadeGainNode = rawContext.createGain();
|
|
165
|
+
fadeGainNode.gain.value = clipInfo.gain;
|
|
166
|
+
fadeGainNode.connect(volumeNativeInput);
|
|
167
|
+
const absTransportTime = this.track.startTime + clipInfo.startTime;
|
|
168
|
+
const scheduleId = transport.schedule((audioContextTime) => {
|
|
169
|
+
if (absTransportTime < this._scheduleGuardOffset) return;
|
|
170
|
+
this.startClipSource(clipInfo, fadeGainNode, audioContextTime);
|
|
171
|
+
}, absTransportTime);
|
|
172
|
+
const scheduled = { clipInfo, fadeGainNode, scheduleId };
|
|
173
|
+
this.scheduledClips.push(scheduled);
|
|
174
|
+
return scheduled;
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Remove a scheduled clip by index. Clears the Transport event and
|
|
178
|
+
* disconnects the fadeGainNode.
|
|
179
|
+
*/
|
|
180
|
+
removeScheduledClip(index) {
|
|
181
|
+
const scheduled = this.scheduledClips[index];
|
|
182
|
+
if (!scheduled) return;
|
|
183
|
+
const transport = getTransport();
|
|
184
|
+
try {
|
|
185
|
+
transport.clear(scheduled.scheduleId);
|
|
186
|
+
} catch {
|
|
187
|
+
}
|
|
188
|
+
try {
|
|
189
|
+
scheduled.fadeGainNode.disconnect();
|
|
190
|
+
} catch {
|
|
191
|
+
}
|
|
192
|
+
this.scheduledClips.splice(index, 1);
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Replace clips on this track. Diffs old vs new by buffer + timing —
|
|
196
|
+
* unchanged clips keep their active sources playing (no audible interruption).
|
|
197
|
+
* Changed/added/removed clips are rescheduled. Disconnecting a removed clip's
|
|
198
|
+
* fadeGainNode silences its source immediately (audio path broken) without
|
|
199
|
+
* needing to explicitly stop it.
|
|
200
|
+
*/
|
|
201
|
+
replaceClips(newClips, newStartTime) {
|
|
202
|
+
if (newStartTime !== void 0) {
|
|
203
|
+
this.track.startTime = newStartTime;
|
|
204
|
+
}
|
|
205
|
+
const tp = getTransport();
|
|
206
|
+
const kept = [];
|
|
207
|
+
const toAdd = [];
|
|
208
|
+
const matched = /* @__PURE__ */ new Set();
|
|
209
|
+
for (const clipInfo of newClips) {
|
|
210
|
+
const idx = this.scheduledClips.findIndex(
|
|
211
|
+
(s, i) => !matched.has(i) && this._clipsEqual(s.clipInfo, clipInfo)
|
|
212
|
+
);
|
|
213
|
+
if (idx !== -1) {
|
|
214
|
+
kept.push(this.scheduledClips[idx]);
|
|
215
|
+
matched.add(idx);
|
|
216
|
+
} else {
|
|
217
|
+
toAdd.push(clipInfo);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
for (let i = 0; i < this.scheduledClips.length; i++) {
|
|
221
|
+
if (!matched.has(i)) {
|
|
222
|
+
const scheduled = this.scheduledClips[i];
|
|
223
|
+
try {
|
|
224
|
+
tp.clear(scheduled.scheduleId);
|
|
225
|
+
} catch {
|
|
226
|
+
}
|
|
227
|
+
try {
|
|
228
|
+
scheduled.fadeGainNode.disconnect();
|
|
229
|
+
} catch {
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
this.scheduledClips = kept;
|
|
234
|
+
const isPlaying = tp.state === "started";
|
|
235
|
+
for (const clipInfo of toAdd) {
|
|
236
|
+
const scheduled = this.addClip(clipInfo);
|
|
237
|
+
if (isPlaying) {
|
|
238
|
+
const context = getContext();
|
|
239
|
+
const transportOffset = tp.seconds;
|
|
240
|
+
const audioContextTime = context.currentTime;
|
|
241
|
+
const lookAhead = context.lookAhead ?? 0;
|
|
242
|
+
const audibleOffset = Math.max(0, transportOffset - lookAhead);
|
|
243
|
+
const absClipStart = this.track.startTime + clipInfo.startTime;
|
|
244
|
+
const absClipEnd = absClipStart + clipInfo.duration;
|
|
245
|
+
if (absClipStart < transportOffset && absClipEnd > audibleOffset) {
|
|
246
|
+
const elapsed = audibleOffset - absClipStart;
|
|
247
|
+
this.startClipSource(
|
|
248
|
+
clipInfo,
|
|
249
|
+
scheduled.fadeGainNode,
|
|
250
|
+
audioContextTime,
|
|
251
|
+
clipInfo.offset + Math.max(0, elapsed),
|
|
252
|
+
clipInfo.duration - Math.max(0, elapsed)
|
|
253
|
+
);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
/** Compare two clips by reference (buffer), timing, and fade properties */
|
|
259
|
+
_clipsEqual(a, b) {
|
|
260
|
+
return a.buffer === b.buffer && a.startTime === b.startTime && a.duration === b.duration && a.offset === b.offset && a.gain === b.gain && a.fadeIn?.duration === b.fadeIn?.duration && a.fadeIn?.type === b.fadeIn?.type && a.fadeOut?.duration === b.fadeOut?.duration && a.fadeOut?.type === b.fadeOut?.type;
|
|
261
|
+
}
|
|
156
262
|
/**
|
|
157
263
|
* Stop all active AudioBufferSourceNodes and clear the set.
|
|
158
264
|
* Native AudioBufferSourceNodes ignore Transport state changes —
|
|
@@ -1017,6 +1123,34 @@ var TonePlayout = class {
|
|
|
1017
1123
|
getTrack(trackId) {
|
|
1018
1124
|
return this.tracks.get(trackId);
|
|
1019
1125
|
}
|
|
1126
|
+
getTrackIds() {
|
|
1127
|
+
return [...this.tracks.keys()];
|
|
1128
|
+
}
|
|
1129
|
+
/**
|
|
1130
|
+
* Replace clips on a track, preserving the track's audio graph.
|
|
1131
|
+
* Only works for ToneTrack (audio clips), not MidiToneTrack.
|
|
1132
|
+
*/
|
|
1133
|
+
replaceTrackClips(trackId, newClips, newStartTime) {
|
|
1134
|
+
const track = this.tracks.get(trackId);
|
|
1135
|
+
if (!track || !("replaceClips" in track)) return false;
|
|
1136
|
+
track.replaceClips(newClips, newStartTime);
|
|
1137
|
+
return true;
|
|
1138
|
+
}
|
|
1139
|
+
/**
|
|
1140
|
+
* Start mid-clip sources for a specific track at the current Transport position.
|
|
1141
|
+
* Call after adding/updating a track during active playback so clips that span
|
|
1142
|
+
* the current position produce audio immediately.
|
|
1143
|
+
*/
|
|
1144
|
+
resumeTrackMidPlayback(trackId) {
|
|
1145
|
+
const track = this.tracks.get(trackId);
|
|
1146
|
+
if (!track) return;
|
|
1147
|
+
const transport = getTransport2();
|
|
1148
|
+
if (transport.state !== "started") return;
|
|
1149
|
+
const context = getContext4();
|
|
1150
|
+
const lookAhead = context.lookAhead ?? 0;
|
|
1151
|
+
const audibleOffset = Math.max(0, transport.seconds - lookAhead);
|
|
1152
|
+
track.startMidClipSources(audibleOffset, context.currentTime);
|
|
1153
|
+
}
|
|
1020
1154
|
play(when, offset, duration) {
|
|
1021
1155
|
if (!this.isInitialized) {
|
|
1022
1156
|
throw new Error("[waveform-playlist] TonePlayout not initialized. Call init() first.");
|
|
@@ -1459,6 +1593,7 @@ function createToneAdapter(options) {
|
|
|
1459
1593
|
let _loopStart = 0;
|
|
1460
1594
|
let _loopEnd = 0;
|
|
1461
1595
|
let _audioInitialized = false;
|
|
1596
|
+
let _pendingInit = null;
|
|
1462
1597
|
function addTrackToPlayout(p, track) {
|
|
1463
1598
|
const audioClips = track.clips.filter((c) => c.audioBuffer && !c.midiNotes);
|
|
1464
1599
|
const midiClips = track.clips.filter((c) => c.midiNotes && c.midiNotes.length > 0);
|
|
@@ -1542,7 +1677,7 @@ function createToneAdapter(options) {
|
|
|
1542
1677
|
try {
|
|
1543
1678
|
playout.dispose();
|
|
1544
1679
|
} catch (err) {
|
|
1545
|
-
console.warn("[waveform-playlist] Error disposing previous playout
|
|
1680
|
+
console.warn("[waveform-playlist] Error disposing previous playout:", err);
|
|
1546
1681
|
}
|
|
1547
1682
|
playout = null;
|
|
1548
1683
|
}
|
|
@@ -1552,9 +1687,9 @@ function createToneAdapter(options) {
|
|
|
1552
1687
|
effects: options?.effects
|
|
1553
1688
|
});
|
|
1554
1689
|
if (_audioInitialized) {
|
|
1555
|
-
playout.init().catch((err) => {
|
|
1690
|
+
_pendingInit = playout.init().catch((err) => {
|
|
1556
1691
|
console.warn(
|
|
1557
|
-
"[waveform-playlist] Failed to
|
|
1692
|
+
"[waveform-playlist] Failed to initialize playout. Audio playback will require another user gesture.",
|
|
1558
1693
|
err
|
|
1559
1694
|
);
|
|
1560
1695
|
_audioInitialized = false;
|
|
@@ -1573,13 +1708,80 @@ function createToneAdapter(options) {
|
|
|
1573
1708
|
}
|
|
1574
1709
|
return {
|
|
1575
1710
|
async init() {
|
|
1711
|
+
if (_pendingInit) {
|
|
1712
|
+
await _pendingInit;
|
|
1713
|
+
_pendingInit = null;
|
|
1714
|
+
return;
|
|
1715
|
+
}
|
|
1576
1716
|
if (playout) {
|
|
1577
1717
|
await playout.init();
|
|
1578
1718
|
_audioInitialized = true;
|
|
1579
1719
|
}
|
|
1580
1720
|
},
|
|
1581
1721
|
setTracks(tracks) {
|
|
1582
|
-
|
|
1722
|
+
if (!playout) {
|
|
1723
|
+
buildPlayout(tracks);
|
|
1724
|
+
return;
|
|
1725
|
+
}
|
|
1726
|
+
const newTrackIds = new Set(tracks.map((t) => t.id));
|
|
1727
|
+
const oldTrackIds = new Set(playout.getTrackIds());
|
|
1728
|
+
for (const id of oldTrackIds) {
|
|
1729
|
+
if (!newTrackIds.has(id)) {
|
|
1730
|
+
playout.removeTrack(id);
|
|
1731
|
+
}
|
|
1732
|
+
}
|
|
1733
|
+
for (const track of tracks) {
|
|
1734
|
+
if (oldTrackIds.has(track.id)) {
|
|
1735
|
+
playout.removeTrack(track.id);
|
|
1736
|
+
playout.removeTrack(track.id + ":midi");
|
|
1737
|
+
}
|
|
1738
|
+
addTrackToPlayout(playout, track);
|
|
1739
|
+
}
|
|
1740
|
+
playout.applyInitialSoloState();
|
|
1741
|
+
if (_isPlaying) {
|
|
1742
|
+
for (const track of tracks) {
|
|
1743
|
+
playout.resumeTrackMidPlayback(track.id);
|
|
1744
|
+
playout.resumeTrackMidPlayback(track.id + ":midi");
|
|
1745
|
+
}
|
|
1746
|
+
}
|
|
1747
|
+
},
|
|
1748
|
+
updateTrack(trackId, track) {
|
|
1749
|
+
if (!playout) return;
|
|
1750
|
+
const audioClips = track.clips.filter((c) => c.audioBuffer && !c.midiNotes);
|
|
1751
|
+
if (audioClips.length > 0) {
|
|
1752
|
+
const startTime = Math.min(...audioClips.map(clipStartTime));
|
|
1753
|
+
const clipInfos = audioClips.map((clip) => ({
|
|
1754
|
+
buffer: clip.audioBuffer,
|
|
1755
|
+
startTime: clipStartTime(clip) - startTime,
|
|
1756
|
+
duration: clipDurationTime(clip),
|
|
1757
|
+
offset: clipOffsetTime(clip),
|
|
1758
|
+
fadeIn: clip.fadeIn,
|
|
1759
|
+
fadeOut: clip.fadeOut,
|
|
1760
|
+
gain: clip.gain
|
|
1761
|
+
}));
|
|
1762
|
+
const audioUpdated = playout.replaceTrackClips(trackId, clipInfos, startTime);
|
|
1763
|
+
const midiClips = track.clips.filter((c) => c.midiNotes && c.midiNotes.length > 0);
|
|
1764
|
+
if (midiClips.length > 0) {
|
|
1765
|
+
const midiTrackId = trackId + ":midi";
|
|
1766
|
+
playout.removeTrack(midiTrackId);
|
|
1767
|
+
addTrackToPlayout(playout, track);
|
|
1768
|
+
if (_isPlaying) {
|
|
1769
|
+
playout.resumeTrackMidPlayback(midiTrackId);
|
|
1770
|
+
}
|
|
1771
|
+
}
|
|
1772
|
+
if (audioUpdated) {
|
|
1773
|
+
playout.applyInitialSoloState();
|
|
1774
|
+
return;
|
|
1775
|
+
}
|
|
1776
|
+
}
|
|
1777
|
+
playout.removeTrack(trackId);
|
|
1778
|
+
playout.removeTrack(trackId + ":midi");
|
|
1779
|
+
addTrackToPlayout(playout, track);
|
|
1780
|
+
playout.applyInitialSoloState();
|
|
1781
|
+
if (_isPlaying) {
|
|
1782
|
+
playout.resumeTrackMidPlayback(trackId);
|
|
1783
|
+
playout.resumeTrackMidPlayback(trackId + ":midi");
|
|
1784
|
+
}
|
|
1583
1785
|
},
|
|
1584
1786
|
addTrack(track) {
|
|
1585
1787
|
if (!playout) {
|