eprec 1.11.0 → 1.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/app/assets/styles.css +287 -0
- package/app/client/app.tsx +11 -2
- package/app/client/editing-workspace.tsx +283 -78
- package/app/client/trim-points.tsx +1287 -0
- package/app/config/routes.ts +1 -0
- package/app/router.tsx +6 -0
- package/app/routes/index.tsx +3 -0
- package/app/routes/trim-points.tsx +51 -0
- package/app/trim-api.ts +261 -0
- package/app/trim-commands.ts +154 -0
- package/package.json +1 -1
- package/server/processing-queue.ts +441 -0
- package/src/app-server.ts +8 -0
- package/src/cli.ts +8 -11
|
@@ -0,0 +1,1287 @@
|
|
|
1
|
+
import type { Handle } from 'remix/component'
|
|
2
|
+
import {
|
|
3
|
+
buildFfmpegCommandPreview,
|
|
4
|
+
computeOutputDuration,
|
|
5
|
+
normalizeTrimRanges,
|
|
6
|
+
type TrimRange,
|
|
7
|
+
} from '../trim-commands.ts'
|
|
8
|
+
|
|
9
|
+
type AppConfig = {
|
|
10
|
+
initialVideoPath?: string
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
declare global {
|
|
14
|
+
interface Window {
|
|
15
|
+
__EPREC_APP__?: AppConfig
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
type TrimRangeWithId = TrimRange & { id: string }
|
|
20
|
+
|
|
21
|
+
const DEFAULT_TRIM_LENGTH = 2.5
|
|
22
|
+
const MIN_TRIM_LENGTH = 0.1
|
|
23
|
+
const PLAYHEAD_STEP = 0.1
|
|
24
|
+
const KEYBOARD_STEP = 0.1
|
|
25
|
+
const SHIFT_STEP = 1
|
|
26
|
+
const DEMO_VIDEO_PATH = 'fixtures/e2e-test.mp4'
|
|
27
|
+
const WAVEFORM_SAMPLES = 240
|
|
28
|
+
|
|
29
|
+
function readInitialVideoPath() {
|
|
30
|
+
if (typeof window === 'undefined') return ''
|
|
31
|
+
const raw = window.__EPREC_APP__?.initialVideoPath
|
|
32
|
+
if (typeof raw !== 'string') return ''
|
|
33
|
+
return raw.trim()
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function buildVideoPreviewUrl(value: string) {
|
|
37
|
+
return `/api/video?path=${encodeURIComponent(value)}`
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function buildOutputPath(value: string) {
|
|
41
|
+
const trimmed = value.trim()
|
|
42
|
+
if (!trimmed) return ''
|
|
43
|
+
const extensionMatch = trimmed.match(/(\.[^./\\]+)$/)
|
|
44
|
+
if (extensionMatch) {
|
|
45
|
+
return trimmed.replace(/(\.[^./\\]+)$/, '.trimmed$1')
|
|
46
|
+
}
|
|
47
|
+
return `${trimmed}.trimmed.mp4`
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function clamp(value: number, min: number, max: number) {
|
|
51
|
+
return Math.min(Math.max(value, min), max)
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function sortRanges(ranges: TrimRangeWithId[]) {
|
|
55
|
+
return ranges.slice().sort((a, b) => a.start - b.start)
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function formatTimestamp(value: number) {
|
|
59
|
+
const clamped = Math.max(value, 0)
|
|
60
|
+
const totalSeconds = Math.floor(clamped)
|
|
61
|
+
const minutes = Math.floor(totalSeconds / 60)
|
|
62
|
+
const seconds = totalSeconds % 60
|
|
63
|
+
const hundredths = Math.floor((clamped - totalSeconds) * 100)
|
|
64
|
+
return `${String(minutes).padStart(2, '0')}:${String(seconds).padStart(2, '0')}.${String(hundredths).padStart(2, '0')}`
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function parseTimestampInput(value: string) {
|
|
68
|
+
const trimmed = value.trim()
|
|
69
|
+
if (!trimmed) return null
|
|
70
|
+
if (/^\d+(\.\d+)?$/.test(trimmed)) {
|
|
71
|
+
const seconds = Number.parseFloat(trimmed)
|
|
72
|
+
return Number.isFinite(seconds) ? seconds : null
|
|
73
|
+
}
|
|
74
|
+
const parts = trimmed.split(':').map((part) => part.trim())
|
|
75
|
+
if (parts.length !== 2 && parts.length !== 3) return null
|
|
76
|
+
const secondsPart = Number.parseFloat(parts[parts.length - 1] ?? '')
|
|
77
|
+
const minutesPart = Number.parseFloat(parts[parts.length - 2] ?? '')
|
|
78
|
+
const hoursPart =
|
|
79
|
+
parts.length === 3 ? Number.parseFloat(parts[0] ?? '') : 0
|
|
80
|
+
if (
|
|
81
|
+
!Number.isFinite(secondsPart) ||
|
|
82
|
+
!Number.isFinite(minutesPart) ||
|
|
83
|
+
!Number.isFinite(hoursPart)
|
|
84
|
+
) {
|
|
85
|
+
return null
|
|
86
|
+
}
|
|
87
|
+
if (secondsPart < 0 || minutesPart < 0 || hoursPart < 0) return null
|
|
88
|
+
return hoursPart * 3600 + minutesPart * 60 + secondsPart
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function formatSeconds(value: number) {
|
|
92
|
+
return `${value.toFixed(1)}s`
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function classNames(...values: Array<string | false | null | undefined>) {
|
|
96
|
+
return values.filter(Boolean).join(' ')
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export function TrimPoints(handle: Handle) {
|
|
100
|
+
const initialVideoPath = readInitialVideoPath()
|
|
101
|
+
let videoPathInput = initialVideoPath
|
|
102
|
+
let outputPathInput = initialVideoPath ? buildOutputPath(initialVideoPath) : ''
|
|
103
|
+
let pathStatus: 'idle' | 'loading' | 'ready' | 'error' = initialVideoPath
|
|
104
|
+
? 'loading'
|
|
105
|
+
: 'idle'
|
|
106
|
+
let pathError = ''
|
|
107
|
+
let previewUrl = ''
|
|
108
|
+
let previewError = ''
|
|
109
|
+
let previewDuration = 0
|
|
110
|
+
let previewReady = false
|
|
111
|
+
let previewNode: HTMLVideoElement | null = null
|
|
112
|
+
let trackNode: HTMLDivElement | null = null
|
|
113
|
+
let playhead = 0
|
|
114
|
+
let previewPlaying = false
|
|
115
|
+
let timeInputValue = formatTimestamp(playhead)
|
|
116
|
+
let isTimeEditing = false
|
|
117
|
+
let trimRanges: TrimRangeWithId[] = []
|
|
118
|
+
let selectedRangeId: string | null = null
|
|
119
|
+
let rangeCounter = 1
|
|
120
|
+
let activeDrag:
|
|
121
|
+
| { rangeId: string; edge: 'start' | 'end'; pointerId: number }
|
|
122
|
+
| null = null
|
|
123
|
+
let runStatus: 'idle' | 'running' | 'success' | 'error' = 'idle'
|
|
124
|
+
let runProgress = 0
|
|
125
|
+
let runError = ''
|
|
126
|
+
let runLogs: string[] = []
|
|
127
|
+
let runController: AbortController | null = null
|
|
128
|
+
let initialLoadTriggered = false
|
|
129
|
+
let waveformSamples: number[] = []
|
|
130
|
+
let waveformStatus: 'idle' | 'loading' | 'ready' | 'error' = 'idle'
|
|
131
|
+
let waveformError = ''
|
|
132
|
+
let waveformSource = ''
|
|
133
|
+
let waveformNode: HTMLCanvasElement | null = null
|
|
134
|
+
|
|
135
|
+
// Cleanup ffmpeg operation on unmount
|
|
136
|
+
handle.signal.addEventListener('abort', () => {
|
|
137
|
+
if (runController) {
|
|
138
|
+
runController.abort()
|
|
139
|
+
}
|
|
140
|
+
})
|
|
141
|
+
|
|
142
|
+
const updateVideoPathInput = (value: string) => {
|
|
143
|
+
videoPathInput = value
|
|
144
|
+
if (pathError) pathError = ''
|
|
145
|
+
if (pathStatus === 'error') pathStatus = 'idle'
|
|
146
|
+
handle.update()
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const updateOutputPathInput = (value: string) => {
|
|
150
|
+
outputPathInput = value
|
|
151
|
+
handle.update()
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const resetPreviewState = () => {
|
|
155
|
+
previewReady = false
|
|
156
|
+
previewError = ''
|
|
157
|
+
previewDuration = 0
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
const syncVideoToTime = (
|
|
161
|
+
value: number,
|
|
162
|
+
options: { skipVideo?: boolean; updateInput?: boolean } = {},
|
|
163
|
+
) => {
|
|
164
|
+
const maxDuration = previewDuration > 0 ? previewDuration : value
|
|
165
|
+
const nextTime = clamp(value, 0, Math.max(maxDuration, 0))
|
|
166
|
+
playhead = nextTime
|
|
167
|
+
if (!isTimeEditing || options.updateInput) {
|
|
168
|
+
timeInputValue = formatTimestamp(nextTime)
|
|
169
|
+
}
|
|
170
|
+
if (
|
|
171
|
+
previewNode &&
|
|
172
|
+
previewReady &&
|
|
173
|
+
!options.skipVideo &&
|
|
174
|
+
Math.abs(previewNode.currentTime - nextTime) > 0.02
|
|
175
|
+
) {
|
|
176
|
+
previewNode.currentTime = nextTime
|
|
177
|
+
}
|
|
178
|
+
handle.update()
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
const updateTimeInput = (value: string) => {
|
|
182
|
+
timeInputValue = value
|
|
183
|
+
isTimeEditing = true
|
|
184
|
+
handle.update()
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const commitTimeInput = () => {
|
|
188
|
+
const parsed = parseTimestampInput(timeInputValue)
|
|
189
|
+
isTimeEditing = false
|
|
190
|
+
if (parsed === null) {
|
|
191
|
+
timeInputValue = formatTimestamp(playhead)
|
|
192
|
+
handle.update()
|
|
193
|
+
return
|
|
194
|
+
}
|
|
195
|
+
syncVideoToTime(parsed, { updateInput: true })
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
const drawWaveform = () => {
|
|
199
|
+
if (!waveformNode) return
|
|
200
|
+
const ctx = waveformNode.getContext('2d')
|
|
201
|
+
if (!ctx) return
|
|
202
|
+
const width = waveformNode.clientWidth
|
|
203
|
+
const height = waveformNode.clientHeight
|
|
204
|
+
if (width <= 0 || height <= 0) return
|
|
205
|
+
const dpr =
|
|
206
|
+
typeof window !== 'undefined' ? window.devicePixelRatio || 1 : 1
|
|
207
|
+
waveformNode.width = Math.floor(width * dpr)
|
|
208
|
+
waveformNode.height = Math.floor(height * dpr)
|
|
209
|
+
ctx.setTransform(dpr, 0, 0, dpr, 0, 0)
|
|
210
|
+
ctx.clearRect(0, 0, width, height)
|
|
211
|
+
const color =
|
|
212
|
+
typeof window !== 'undefined'
|
|
213
|
+
? window.getComputedStyle(waveformNode).color
|
|
214
|
+
: '#94a3b8'
|
|
215
|
+
ctx.strokeStyle = color
|
|
216
|
+
ctx.lineWidth = 1
|
|
217
|
+
if (waveformSamples.length === 0) {
|
|
218
|
+
ctx.beginPath()
|
|
219
|
+
ctx.moveTo(0, height / 2)
|
|
220
|
+
ctx.lineTo(width, height / 2)
|
|
221
|
+
ctx.stroke()
|
|
222
|
+
return
|
|
223
|
+
}
|
|
224
|
+
const mid = height / 2
|
|
225
|
+
const step = width / waveformSamples.length
|
|
226
|
+
ctx.beginPath()
|
|
227
|
+
waveformSamples.forEach((sample, index) => {
|
|
228
|
+
const x = index * step
|
|
229
|
+
const amplitude = sample * (mid - 2)
|
|
230
|
+
ctx.moveTo(x, mid - amplitude)
|
|
231
|
+
ctx.lineTo(x, mid + amplitude)
|
|
232
|
+
})
|
|
233
|
+
ctx.stroke()
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const loadWaveform = async (url: string) => {
|
|
237
|
+
if (!url || waveformStatus === 'loading') return
|
|
238
|
+
if (waveformSource === url && waveformStatus === 'ready') return
|
|
239
|
+
waveformSource = url
|
|
240
|
+
waveformStatus = 'loading'
|
|
241
|
+
waveformError = ''
|
|
242
|
+
waveformSamples = []
|
|
243
|
+
drawWaveform()
|
|
244
|
+
handle.update()
|
|
245
|
+
const fetchedUrl = url
|
|
246
|
+
try {
|
|
247
|
+
if (typeof window === 'undefined' || !('AudioContext' in window)) {
|
|
248
|
+
throw new Error('AudioContext unavailable in this browser.')
|
|
249
|
+
}
|
|
250
|
+
const response = await fetch(url, {
|
|
251
|
+
cache: 'no-store',
|
|
252
|
+
signal: handle.signal,
|
|
253
|
+
})
|
|
254
|
+
if (!response.ok) {
|
|
255
|
+
throw new Error(`Waveform load failed (status ${response.status}).`)
|
|
256
|
+
}
|
|
257
|
+
const buffer = await response.arrayBuffer()
|
|
258
|
+
if (handle.signal.aborted) return
|
|
259
|
+
const audioContext = new AudioContext()
|
|
260
|
+
let audioBuffer: AudioBuffer
|
|
261
|
+
try {
|
|
262
|
+
audioBuffer = await audioContext.decodeAudioData(buffer.slice(0))
|
|
263
|
+
} finally {
|
|
264
|
+
void audioContext.close()
|
|
265
|
+
}
|
|
266
|
+
if (audioBuffer.numberOfChannels === 0) {
|
|
267
|
+
throw new Error('No audio track found in the video.')
|
|
268
|
+
}
|
|
269
|
+
const channelCount = audioBuffer.numberOfChannels
|
|
270
|
+
const channels = Array.from({ length: channelCount }, (_, index) =>
|
|
271
|
+
audioBuffer.getChannelData(index),
|
|
272
|
+
)
|
|
273
|
+
const totalSamples = audioBuffer.length
|
|
274
|
+
const sampleCount = Math.max(
|
|
275
|
+
1,
|
|
276
|
+
Math.min(WAVEFORM_SAMPLES, totalSamples),
|
|
277
|
+
)
|
|
278
|
+
const blockSize = Math.max(1, Math.floor(totalSamples / sampleCount))
|
|
279
|
+
const samples = new Array(sampleCount).fill(0)
|
|
280
|
+
let maxValue = 0
|
|
281
|
+
for (let i = 0; i < sampleCount; i++) {
|
|
282
|
+
const start = i * blockSize
|
|
283
|
+
const end =
|
|
284
|
+
i === sampleCount - 1 ? totalSamples : start + blockSize
|
|
285
|
+
let peak = 0
|
|
286
|
+
for (let j = start; j < end; j++) {
|
|
287
|
+
let sum = 0
|
|
288
|
+
for (const channel of channels) {
|
|
289
|
+
sum += Math.abs(channel[j] ?? 0)
|
|
290
|
+
}
|
|
291
|
+
const avg = sum / channelCount
|
|
292
|
+
if (avg > peak) peak = avg
|
|
293
|
+
}
|
|
294
|
+
samples[i] = peak
|
|
295
|
+
if (peak > maxValue) maxValue = peak
|
|
296
|
+
}
|
|
297
|
+
const normalizedSamples =
|
|
298
|
+
maxValue > 0 ? samples.map((sample) => sample / maxValue) : samples
|
|
299
|
+
if (waveformSource !== fetchedUrl) return
|
|
300
|
+
waveformSamples = normalizedSamples
|
|
301
|
+
waveformStatus = 'ready'
|
|
302
|
+
handle.update()
|
|
303
|
+
drawWaveform()
|
|
304
|
+
} catch (error) {
|
|
305
|
+
if (handle.signal.aborted) return
|
|
306
|
+
if (waveformSource !== fetchedUrl) return
|
|
307
|
+
waveformStatus = 'error'
|
|
308
|
+
waveformError =
|
|
309
|
+
error instanceof Error
|
|
310
|
+
? error.message
|
|
311
|
+
: 'Unable to render waveform.'
|
|
312
|
+
handle.update()
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
const applyPreviewSource = (url: string) => {
|
|
317
|
+
previewUrl = url
|
|
318
|
+
resetPreviewState()
|
|
319
|
+
handle.update()
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
const loadVideoFromPath = async (override?: string) => {
|
|
323
|
+
const candidate = (override ?? videoPathInput).trim()
|
|
324
|
+
if (!candidate) {
|
|
325
|
+
pathError = 'Enter a video file path to load.'
|
|
326
|
+
pathStatus = 'error'
|
|
327
|
+
handle.update()
|
|
328
|
+
return
|
|
329
|
+
}
|
|
330
|
+
videoPathInput = candidate
|
|
331
|
+
pathStatus = 'loading'
|
|
332
|
+
pathError = ''
|
|
333
|
+
previewError = ''
|
|
334
|
+
handle.update()
|
|
335
|
+
const preview = buildVideoPreviewUrl(candidate)
|
|
336
|
+
try {
|
|
337
|
+
const response = await fetch(preview, {
|
|
338
|
+
method: 'HEAD',
|
|
339
|
+
cache: 'no-store',
|
|
340
|
+
signal: handle.signal,
|
|
341
|
+
})
|
|
342
|
+
if (!response.ok) {
|
|
343
|
+
const message =
|
|
344
|
+
response.status === 404
|
|
345
|
+
? 'Video file not found. Check the path.'
|
|
346
|
+
: `Unable to load the video (status ${response.status}).`
|
|
347
|
+
throw new Error(message)
|
|
348
|
+
}
|
|
349
|
+
if (handle.signal.aborted) return
|
|
350
|
+
pathStatus = 'ready'
|
|
351
|
+
outputPathInput = buildOutputPath(candidate)
|
|
352
|
+
applyPreviewSource(preview)
|
|
353
|
+
void loadWaveform(preview)
|
|
354
|
+
} catch (error) {
|
|
355
|
+
if (handle.signal.aborted) return
|
|
356
|
+
pathStatus = 'error'
|
|
357
|
+
pathError =
|
|
358
|
+
error instanceof Error ? error.message : 'Unable to load the video.'
|
|
359
|
+
handle.update()
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
const loadDemoVideo = () => {
|
|
364
|
+
videoPathInput = DEMO_VIDEO_PATH
|
|
365
|
+
outputPathInput = buildOutputPath(DEMO_VIDEO_PATH)
|
|
366
|
+
void loadVideoFromPath(DEMO_VIDEO_PATH)
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
if (initialVideoPath && !initialLoadTriggered) {
|
|
370
|
+
initialLoadTriggered = true
|
|
371
|
+
void loadVideoFromPath(initialVideoPath)
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
const setPlayhead = (value: number) => {
|
|
375
|
+
if (!previewReady || previewDuration <= 0) return
|
|
376
|
+
syncVideoToTime(value, { updateInput: true })
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
const addTrimRange = () => {
|
|
380
|
+
if (!previewReady || previewDuration <= MIN_TRIM_LENGTH) {
|
|
381
|
+
pathError = 'Load a video before adding trim ranges.'
|
|
382
|
+
pathStatus = 'error'
|
|
383
|
+
handle.update()
|
|
384
|
+
return
|
|
385
|
+
}
|
|
386
|
+
const start = clamp(playhead, 0, previewDuration - MIN_TRIM_LENGTH)
|
|
387
|
+
const end = clamp(
|
|
388
|
+
start + DEFAULT_TRIM_LENGTH,
|
|
389
|
+
start + MIN_TRIM_LENGTH,
|
|
390
|
+
previewDuration,
|
|
391
|
+
)
|
|
392
|
+
const newRange: TrimRangeWithId = {
|
|
393
|
+
id: `trim-${rangeCounter++}`,
|
|
394
|
+
start,
|
|
395
|
+
end,
|
|
396
|
+
}
|
|
397
|
+
trimRanges = sortRanges([...trimRanges, newRange])
|
|
398
|
+
selectedRangeId = newRange.id
|
|
399
|
+
syncVideoToTime(start, { updateInput: true })
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
const removeTrimRange = (rangeId: string) => {
|
|
403
|
+
trimRanges = trimRanges.filter((range) => range.id !== rangeId)
|
|
404
|
+
if (selectedRangeId === rangeId) {
|
|
405
|
+
selectedRangeId = trimRanges[0]?.id ?? null
|
|
406
|
+
}
|
|
407
|
+
handle.update()
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
const updateTrimRange = (
|
|
411
|
+
rangeId: string,
|
|
412
|
+
patch: Partial<TrimRange>,
|
|
413
|
+
edge?: 'start' | 'end',
|
|
414
|
+
) => {
|
|
415
|
+
trimRanges = sortRanges(
|
|
416
|
+
trimRanges.map((range) => {
|
|
417
|
+
if (range.id !== rangeId) return range
|
|
418
|
+
let nextStart = Number.isFinite(patch.start)
|
|
419
|
+
? patch.start
|
|
420
|
+
: range.start
|
|
421
|
+
let nextEnd = Number.isFinite(patch.end) ? patch.end : range.end
|
|
422
|
+
if (edge === 'start') {
|
|
423
|
+
nextStart = clamp(
|
|
424
|
+
nextStart,
|
|
425
|
+
0,
|
|
426
|
+
Math.max(previewDuration - MIN_TRIM_LENGTH, 0),
|
|
427
|
+
)
|
|
428
|
+
nextEnd = clamp(
|
|
429
|
+
nextEnd,
|
|
430
|
+
nextStart + MIN_TRIM_LENGTH,
|
|
431
|
+
previewDuration,
|
|
432
|
+
)
|
|
433
|
+
} else if (edge === 'end') {
|
|
434
|
+
nextEnd = clamp(nextEnd, MIN_TRIM_LENGTH, previewDuration)
|
|
435
|
+
nextStart = clamp(nextStart, 0, nextEnd - MIN_TRIM_LENGTH)
|
|
436
|
+
} else {
|
|
437
|
+
const minStart = clamp(
|
|
438
|
+
nextStart,
|
|
439
|
+
0,
|
|
440
|
+
Math.max(previewDuration - MIN_TRIM_LENGTH, 0),
|
|
441
|
+
)
|
|
442
|
+
const minEnd = clamp(
|
|
443
|
+
nextEnd,
|
|
444
|
+
minStart + MIN_TRIM_LENGTH,
|
|
445
|
+
previewDuration,
|
|
446
|
+
)
|
|
447
|
+
nextStart = minStart
|
|
448
|
+
nextEnd = minEnd
|
|
449
|
+
}
|
|
450
|
+
return { ...range, start: nextStart, end: nextEnd }
|
|
451
|
+
}),
|
|
452
|
+
)
|
|
453
|
+
selectedRangeId = rangeId
|
|
454
|
+
handle.update()
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
const selectRange = (rangeId: string) => {
|
|
458
|
+
selectedRangeId = rangeId
|
|
459
|
+
const range = trimRanges.find((entry) => entry.id === rangeId)
|
|
460
|
+
if (range) {
|
|
461
|
+
syncVideoToTime(range.start, { updateInput: true })
|
|
462
|
+
return
|
|
463
|
+
}
|
|
464
|
+
handle.update()
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
const getTimeFromClientX = (clientX: number) => {
|
|
468
|
+
if (!trackNode || previewDuration <= 0) return 0
|
|
469
|
+
const rect = trackNode.getBoundingClientRect()
|
|
470
|
+
const ratio = clamp((clientX - rect.left) / rect.width, 0, 1)
|
|
471
|
+
return ratio * previewDuration
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
const startDrag = (
|
|
475
|
+
event: PointerEvent,
|
|
476
|
+
rangeId: string,
|
|
477
|
+
edge: 'start' | 'end',
|
|
478
|
+
) => {
|
|
479
|
+
if (!trackNode || previewDuration <= 0) return
|
|
480
|
+
activeDrag = { rangeId, edge, pointerId: event.pointerId }
|
|
481
|
+
const target = event.currentTarget as HTMLElement
|
|
482
|
+
target.setPointerCapture(event.pointerId)
|
|
483
|
+
const nextTime = getTimeFromClientX(event.clientX)
|
|
484
|
+
updateTrimRange(rangeId, { [edge]: nextTime }, edge)
|
|
485
|
+
syncVideoToTime(nextTime, { updateInput: true })
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
const moveDrag = (event: PointerEvent) => {
|
|
489
|
+
if (!activeDrag || activeDrag.pointerId !== event.pointerId) return
|
|
490
|
+
const nextTime = getTimeFromClientX(event.clientX)
|
|
491
|
+
updateTrimRange(
|
|
492
|
+
activeDrag.rangeId,
|
|
493
|
+
{ [activeDrag.edge]: nextTime },
|
|
494
|
+
activeDrag.edge,
|
|
495
|
+
)
|
|
496
|
+
syncVideoToTime(nextTime, { updateInput: true })
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
const endDrag = (event: PointerEvent) => {
|
|
500
|
+
if (!activeDrag || activeDrag.pointerId !== event.pointerId) return
|
|
501
|
+
activeDrag = null
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
const handleRangeKey = (
|
|
505
|
+
event: KeyboardEvent,
|
|
506
|
+
range: TrimRangeWithId,
|
|
507
|
+
edge: 'start' | 'end',
|
|
508
|
+
) => {
|
|
509
|
+
const isForward =
|
|
510
|
+
event.key === 'ArrowUp' || event.key === 'ArrowRight'
|
|
511
|
+
const isBackward =
|
|
512
|
+
event.key === 'ArrowDown' || event.key === 'ArrowLeft'
|
|
513
|
+
if (!isForward && !isBackward) return
|
|
514
|
+
event.preventDefault()
|
|
515
|
+
const step = event.shiftKey ? SHIFT_STEP : KEYBOARD_STEP
|
|
516
|
+
const delta = isForward ? step : -step
|
|
517
|
+
const nextValue = edge === 'start' ? range.start + delta : range.end + delta
|
|
518
|
+
updateTrimRange(
|
|
519
|
+
range.id,
|
|
520
|
+
{
|
|
521
|
+
[edge]: nextValue,
|
|
522
|
+
},
|
|
523
|
+
edge,
|
|
524
|
+
)
|
|
525
|
+
syncVideoToTime(nextValue, { updateInput: true })
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
const handleNumberKey = (
|
|
529
|
+
event: KeyboardEvent,
|
|
530
|
+
range: TrimRangeWithId,
|
|
531
|
+
edge: 'start' | 'end',
|
|
532
|
+
) => {
|
|
533
|
+
if (event.key !== 'ArrowUp' && event.key !== 'ArrowDown') return
|
|
534
|
+
event.preventDefault()
|
|
535
|
+
const step = event.shiftKey ? SHIFT_STEP : KEYBOARD_STEP
|
|
536
|
+
const delta = event.key === 'ArrowUp' ? step : -step
|
|
537
|
+
const nextValue = edge === 'start' ? range.start + delta : range.end + delta
|
|
538
|
+
updateTrimRange(
|
|
539
|
+
range.id,
|
|
540
|
+
{
|
|
541
|
+
[edge]: nextValue,
|
|
542
|
+
},
|
|
543
|
+
edge,
|
|
544
|
+
)
|
|
545
|
+
syncVideoToTime(nextValue, { updateInput: true })
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
const runTrimCommand = async () => {
|
|
549
|
+
if (runStatus === 'running') return
|
|
550
|
+
const normalized = normalizeTrimRanges(
|
|
551
|
+
trimRanges,
|
|
552
|
+
previewDuration,
|
|
553
|
+
MIN_TRIM_LENGTH,
|
|
554
|
+
)
|
|
555
|
+
if (!videoPathInput.trim()) {
|
|
556
|
+
runStatus = 'error'
|
|
557
|
+
runError = 'Provide a video file path before running ffmpeg.'
|
|
558
|
+
handle.update()
|
|
559
|
+
return
|
|
560
|
+
}
|
|
561
|
+
if (!outputPathInput.trim()) {
|
|
562
|
+
runStatus = 'error'
|
|
563
|
+
runError = 'Provide an output path before running ffmpeg.'
|
|
564
|
+
handle.update()
|
|
565
|
+
return
|
|
566
|
+
}
|
|
567
|
+
if (!previewReady || previewDuration <= 0) {
|
|
568
|
+
runStatus = 'error'
|
|
569
|
+
runError = 'Load the video preview before running ffmpeg.'
|
|
570
|
+
handle.update()
|
|
571
|
+
return
|
|
572
|
+
}
|
|
573
|
+
if (normalized.length === 0) {
|
|
574
|
+
runStatus = 'error'
|
|
575
|
+
runError = 'Add at least one trim range to run ffmpeg.'
|
|
576
|
+
handle.update()
|
|
577
|
+
return
|
|
578
|
+
}
|
|
579
|
+
runStatus = 'running'
|
|
580
|
+
runProgress = 0
|
|
581
|
+
runError = ''
|
|
582
|
+
runLogs = []
|
|
583
|
+
runController = new AbortController()
|
|
584
|
+
handle.update()
|
|
585
|
+
|
|
586
|
+
try {
|
|
587
|
+
const response = await fetch('/api/trim', {
|
|
588
|
+
method: 'POST',
|
|
589
|
+
headers: { 'Content-Type': 'application/json' },
|
|
590
|
+
body: JSON.stringify({
|
|
591
|
+
inputPath: videoPathInput.trim(),
|
|
592
|
+
outputPath: outputPathInput.trim(),
|
|
593
|
+
duration: previewDuration,
|
|
594
|
+
ranges: normalized,
|
|
595
|
+
}),
|
|
596
|
+
signal: runController.signal,
|
|
597
|
+
})
|
|
598
|
+
if (!response.ok) {
|
|
599
|
+
runStatus = 'error'
|
|
600
|
+
runError = await response.text()
|
|
601
|
+
handle.update()
|
|
602
|
+
return
|
|
603
|
+
}
|
|
604
|
+
const reader = response.body
|
|
605
|
+
?.pipeThrough(new TextDecoderStream())
|
|
606
|
+
.getReader()
|
|
607
|
+
if (!reader) {
|
|
608
|
+
runStatus = 'error'
|
|
609
|
+
runError = 'Streaming response not available.'
|
|
610
|
+
handle.update()
|
|
611
|
+
return
|
|
612
|
+
}
|
|
613
|
+
let buffer = ''
|
|
614
|
+
while (true) {
|
|
615
|
+
const { value, done } = await reader.read()
|
|
616
|
+
if (done) break
|
|
617
|
+
buffer += value
|
|
618
|
+
const lines = buffer.split('\n')
|
|
619
|
+
buffer = lines.pop() ?? ''
|
|
620
|
+
for (const line of lines) {
|
|
621
|
+
if (!line.trim()) continue
|
|
622
|
+
let payload: any = null
|
|
623
|
+
try {
|
|
624
|
+
payload = JSON.parse(line)
|
|
625
|
+
} catch {
|
|
626
|
+
runLogs = [...runLogs, line.trim()]
|
|
627
|
+
continue
|
|
628
|
+
}
|
|
629
|
+
if (payload?.type === 'log' && payload.message) {
|
|
630
|
+
runLogs = [...runLogs, payload.message]
|
|
631
|
+
}
|
|
632
|
+
if (payload?.type === 'progress') {
|
|
633
|
+
const nextProgress =
|
|
634
|
+
typeof payload.progress === 'number' ? payload.progress : 0
|
|
635
|
+
runProgress = clamp(nextProgress, 0, 1)
|
|
636
|
+
}
|
|
637
|
+
if (payload?.type === 'done') {
|
|
638
|
+
if (payload.success) {
|
|
639
|
+
runStatus = 'success'
|
|
640
|
+
runProgress = 1
|
|
641
|
+
} else {
|
|
642
|
+
runStatus = 'error'
|
|
643
|
+
runError = payload.error ?? 'ffmpeg failed.'
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
handle.update()
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
if (runStatus === 'running') {
|
|
650
|
+
runStatus = 'error'
|
|
651
|
+
runError = 'ffmpeg stream ended unexpectedly.'
|
|
652
|
+
handle.update()
|
|
653
|
+
}
|
|
654
|
+
} catch (error) {
|
|
655
|
+
if (runController === null) {
|
|
656
|
+
// Cancellation already set the error message, don't overwrite it
|
|
657
|
+
} else {
|
|
658
|
+
runStatus = 'error'
|
|
659
|
+
runError =
|
|
660
|
+
error instanceof Error ? error.message : 'Unable to run ffmpeg.'
|
|
661
|
+
}
|
|
662
|
+
handle.update()
|
|
663
|
+
} finally {
|
|
664
|
+
runController = null
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
const cancelRun = () => {
|
|
669
|
+
if (runController) {
|
|
670
|
+
runController.abort()
|
|
671
|
+
runController = null
|
|
672
|
+
runStatus = 'error'
|
|
673
|
+
runError = 'Run canceled.'
|
|
674
|
+
handle.update()
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
return () => {
|
|
679
|
+
const duration = previewDuration
|
|
680
|
+
const sortedRanges = sortRanges(trimRanges)
|
|
681
|
+
const normalizedRanges = normalizeTrimRanges(
|
|
682
|
+
trimRanges,
|
|
683
|
+
duration,
|
|
684
|
+
MIN_TRIM_LENGTH,
|
|
685
|
+
)
|
|
686
|
+
const totalRemoved = normalizedRanges.reduce(
|
|
687
|
+
(total, range) => total + (range.end - range.start),
|
|
688
|
+
0,
|
|
689
|
+
)
|
|
690
|
+
const outputDuration = computeOutputDuration(
|
|
691
|
+
duration,
|
|
692
|
+
trimRanges,
|
|
693
|
+
MIN_TRIM_LENGTH,
|
|
694
|
+
)
|
|
695
|
+
const commandPreview =
|
|
696
|
+
videoPathInput.trim() && outputPathInput.trim() && normalizedRanges.length > 0
|
|
697
|
+
? buildFfmpegCommandPreview({
|
|
698
|
+
inputPath: videoPathInput.trim(),
|
|
699
|
+
outputPath: outputPathInput.trim(),
|
|
700
|
+
ranges: normalizedRanges,
|
|
701
|
+
includeProgress: true,
|
|
702
|
+
})
|
|
703
|
+
: ''
|
|
704
|
+
const progressLabel =
|
|
705
|
+
runStatus === 'running'
|
|
706
|
+
? `${Math.round(runProgress * 100)}%`
|
|
707
|
+
: runStatus === 'success'
|
|
708
|
+
? 'Complete'
|
|
709
|
+
: runStatus === 'error'
|
|
710
|
+
? 'Error'
|
|
711
|
+
: 'Idle'
|
|
712
|
+
const hintId = 'trim-keyboard-hint'
|
|
713
|
+
return (
|
|
714
|
+
<main class="app-shell trim-shell">
|
|
715
|
+
<header class="app-header">
|
|
716
|
+
<span class="app-kicker">Eprec Studio</span>
|
|
717
|
+
<h1 class="app-title">Trim points</h1>
|
|
718
|
+
<p class="app-subtitle">
|
|
719
|
+
Define ranges to remove, preview their timestamps on the timeline,
|
|
720
|
+
and run ffmpeg with live progress.
|
|
721
|
+
</p>
|
|
722
|
+
<nav class="app-nav">
|
|
723
|
+
<a class="app-link" href="/">
|
|
724
|
+
Editing workspace
|
|
725
|
+
</a>
|
|
726
|
+
</nav>
|
|
727
|
+
</header>
|
|
728
|
+
|
|
729
|
+
<section class="app-card app-card--full source-card">
|
|
730
|
+
<div class="source-header">
|
|
731
|
+
<div>
|
|
732
|
+
<h2>Video source</h2>
|
|
733
|
+
<p class="app-muted">
|
|
734
|
+
Load a local video file to calculate the trim timeline and output
|
|
735
|
+
command.
|
|
736
|
+
</p>
|
|
737
|
+
</div>
|
|
738
|
+
<span
|
|
739
|
+
class={classNames(
|
|
740
|
+
'status-pill',
|
|
741
|
+
pathStatus === 'ready' && 'status-pill--success',
|
|
742
|
+
pathStatus === 'loading' && 'status-pill--warning',
|
|
743
|
+
pathStatus === 'error' && 'status-pill--danger',
|
|
744
|
+
pathStatus === 'idle' && 'status-pill--info',
|
|
745
|
+
)}
|
|
746
|
+
>
|
|
747
|
+
{pathStatus}
|
|
748
|
+
</span>
|
|
749
|
+
</div>
|
|
750
|
+
<div class="source-grid">
|
|
751
|
+
<div class="source-fields">
|
|
752
|
+
<label class="input-label">
|
|
753
|
+
Video file path
|
|
754
|
+
<input
|
|
755
|
+
class="text-input"
|
|
756
|
+
type="text"
|
|
757
|
+
placeholder="/path/to/video.mp4"
|
|
758
|
+
value={videoPathInput}
|
|
759
|
+
on={{
|
|
760
|
+
input: (event) => {
|
|
761
|
+
const target = event.currentTarget as HTMLInputElement
|
|
762
|
+
updateVideoPathInput(target.value)
|
|
763
|
+
},
|
|
764
|
+
}}
|
|
765
|
+
/>
|
|
766
|
+
</label>
|
|
767
|
+
<label class="input-label">
|
|
768
|
+
Output file path
|
|
769
|
+
<input
|
|
770
|
+
class="text-input"
|
|
771
|
+
type="text"
|
|
772
|
+
placeholder="/path/to/video.trimmed.mp4"
|
|
773
|
+
value={outputPathInput}
|
|
774
|
+
on={{
|
|
775
|
+
input: (event) => {
|
|
776
|
+
const target = event.currentTarget as HTMLInputElement
|
|
777
|
+
updateOutputPathInput(target.value)
|
|
778
|
+
},
|
|
779
|
+
}}
|
|
780
|
+
/>
|
|
781
|
+
</label>
|
|
782
|
+
<div class="source-actions">
|
|
783
|
+
<button
|
|
784
|
+
class="button button--primary"
|
|
785
|
+
type="button"
|
|
786
|
+
disabled={pathStatus === 'loading'}
|
|
787
|
+
on={{ click: () => void loadVideoFromPath() }}
|
|
788
|
+
>
|
|
789
|
+
{pathStatus === 'loading' ? 'Checking...' : 'Load video'}
|
|
790
|
+
</button>
|
|
791
|
+
<button
|
|
792
|
+
class="button button--ghost"
|
|
793
|
+
type="button"
|
|
794
|
+
on={{ click: loadDemoVideo }}
|
|
795
|
+
>
|
|
796
|
+
Use demo video
|
|
797
|
+
</button>
|
|
798
|
+
</div>
|
|
799
|
+
{pathStatus === 'error' && pathError ? (
|
|
800
|
+
<p class="status-note status-note--danger">{pathError}</p>
|
|
801
|
+
) : null}
|
|
802
|
+
</div>
|
|
803
|
+
<div class="trim-preview">
|
|
804
|
+
<div class="panel-header">
|
|
805
|
+
<h3>Preview</h3>
|
|
806
|
+
<span class="summary-subtext">
|
|
807
|
+
{previewReady
|
|
808
|
+
? `Duration ${formatTimestamp(previewDuration)}`
|
|
809
|
+
: 'Load a video to preview'}
|
|
810
|
+
</span>
|
|
811
|
+
</div>
|
|
812
|
+
<video
|
|
813
|
+
class="timeline-video-player"
|
|
814
|
+
src={previewUrl}
|
|
815
|
+
controls
|
|
816
|
+
preload="metadata"
|
|
817
|
+
connect={(node: HTMLVideoElement, signal) => {
|
|
818
|
+
previewNode = node
|
|
819
|
+
const handleLoaded = () => {
|
|
820
|
+
const nextDuration = Number(node.duration)
|
|
821
|
+
previewDuration = Number.isFinite(nextDuration)
|
|
822
|
+
? nextDuration
|
|
823
|
+
: 0
|
|
824
|
+
previewReady = previewDuration > 0
|
|
825
|
+
previewError = ''
|
|
826
|
+
playhead = clamp(playhead, 0, previewDuration)
|
|
827
|
+
if (!isTimeEditing) {
|
|
828
|
+
timeInputValue = formatTimestamp(playhead)
|
|
829
|
+
}
|
|
830
|
+
if (
|
|
831
|
+
Math.abs(node.currentTime - playhead) > 0.02 &&
|
|
832
|
+
previewReady
|
|
833
|
+
) {
|
|
834
|
+
node.currentTime = playhead
|
|
835
|
+
}
|
|
836
|
+
void loadWaveform(previewUrl)
|
|
837
|
+
handle.update()
|
|
838
|
+
}
|
|
839
|
+
const handleTimeUpdate = () => {
|
|
840
|
+
if (!previewReady || previewDuration <= 0) return
|
|
841
|
+
playhead = clamp(node.currentTime, 0, previewDuration)
|
|
842
|
+
if (!isTimeEditing) {
|
|
843
|
+
timeInputValue = formatTimestamp(playhead)
|
|
844
|
+
}
|
|
845
|
+
handle.update()
|
|
846
|
+
}
|
|
847
|
+
const handlePlay = () => {
|
|
848
|
+
previewPlaying = true
|
|
849
|
+
handle.update()
|
|
850
|
+
}
|
|
851
|
+
const handlePause = () => {
|
|
852
|
+
previewPlaying = false
|
|
853
|
+
handle.update()
|
|
854
|
+
}
|
|
855
|
+
const handleError = () => {
|
|
856
|
+
previewError = 'Unable to load the preview video.'
|
|
857
|
+
previewReady = false
|
|
858
|
+
handle.update()
|
|
859
|
+
}
|
|
860
|
+
node.addEventListener('loadedmetadata', handleLoaded)
|
|
861
|
+
node.addEventListener('timeupdate', handleTimeUpdate)
|
|
862
|
+
node.addEventListener('play', handlePlay)
|
|
863
|
+
node.addEventListener('pause', handlePause)
|
|
864
|
+
node.addEventListener('error', handleError)
|
|
865
|
+
signal.addEventListener('abort', () => {
|
|
866
|
+
node.removeEventListener('loadedmetadata', handleLoaded)
|
|
867
|
+
node.removeEventListener('timeupdate', handleTimeUpdate)
|
|
868
|
+
node.removeEventListener('play', handlePlay)
|
|
869
|
+
node.removeEventListener('pause', handlePause)
|
|
870
|
+
node.removeEventListener('error', handleError)
|
|
871
|
+
if (previewNode === node) {
|
|
872
|
+
previewNode = null
|
|
873
|
+
}
|
|
874
|
+
})
|
|
875
|
+
}}
|
|
876
|
+
/>
|
|
877
|
+
{previewError ? (
|
|
878
|
+
<p class="status-note status-note--danger">{previewError}</p>
|
|
879
|
+
) : null}
|
|
880
|
+
<div class="trim-time-row">
|
|
881
|
+
<label class="input-label">
|
|
882
|
+
Video time
|
|
883
|
+
<input
|
|
884
|
+
class="text-input text-input--compact"
|
|
885
|
+
type="text"
|
|
886
|
+
placeholder="00:00.00"
|
|
887
|
+
value={timeInputValue}
|
|
888
|
+
disabled={!previewReady}
|
|
889
|
+
on={{
|
|
890
|
+
focus: () => {
|
|
891
|
+
isTimeEditing = true
|
|
892
|
+
handle.update()
|
|
893
|
+
},
|
|
894
|
+
input: (event) => {
|
|
895
|
+
const target = event.currentTarget as HTMLInputElement
|
|
896
|
+
updateTimeInput(target.value)
|
|
897
|
+
},
|
|
898
|
+
blur: () => commitTimeInput(),
|
|
899
|
+
keydown: (event) => {
|
|
900
|
+
if (event.key === 'Enter') {
|
|
901
|
+
event.preventDefault()
|
|
902
|
+
commitTimeInput()
|
|
903
|
+
}
|
|
904
|
+
if (event.key === 'Escape') {
|
|
905
|
+
event.preventDefault()
|
|
906
|
+
isTimeEditing = false
|
|
907
|
+
timeInputValue = formatTimestamp(playhead)
|
|
908
|
+
handle.update()
|
|
909
|
+
}
|
|
910
|
+
},
|
|
911
|
+
}}
|
|
912
|
+
/>
|
|
913
|
+
</label>
|
|
914
|
+
<span class="summary-subtext">
|
|
915
|
+
{previewPlaying ? 'Playing' : 'Paused'}
|
|
916
|
+
</span>
|
|
917
|
+
</div>
|
|
918
|
+
</div>
|
|
919
|
+
</div>
|
|
920
|
+
</section>
|
|
921
|
+
|
|
922
|
+
<section class="app-card app-card--full timeline-card">
|
|
923
|
+
<div class="timeline-header">
|
|
924
|
+
<div>
|
|
925
|
+
<h2>Trim timeline</h2>
|
|
926
|
+
<p class="app-muted">
|
|
927
|
+
Drag the trim handles or use arrow keys to fine-tune start and
|
|
928
|
+
end timestamps.
|
|
929
|
+
</p>
|
|
930
|
+
</div>
|
|
931
|
+
<button
|
|
932
|
+
class="button button--primary"
|
|
933
|
+
type="button"
|
|
934
|
+
disabled={!previewReady}
|
|
935
|
+
on={{ click: addTrimRange }}
|
|
936
|
+
>
|
|
937
|
+
Add trim range
|
|
938
|
+
</button>
|
|
939
|
+
</div>
|
|
940
|
+
<p class="app-muted trim-hint" id={hintId}>
|
|
941
|
+
Use arrow keys to nudge by {KEYBOARD_STEP}s. Hold Shift for {SHIFT_STEP}
|
|
942
|
+
s.
|
|
943
|
+
</p>
|
|
944
|
+
<div
|
|
945
|
+
class={classNames(
|
|
946
|
+
'trim-track',
|
|
947
|
+
!previewReady && 'trim-track--disabled',
|
|
948
|
+
)}
|
|
949
|
+
connect={(node: HTMLDivElement) => {
|
|
950
|
+
trackNode = node
|
|
951
|
+
}}
|
|
952
|
+
style={`--playhead:${duration > 0 ? (playhead / duration) * 100 : 0}%`}
|
|
953
|
+
>
|
|
954
|
+
<canvas
|
|
955
|
+
class="trim-waveform"
|
|
956
|
+
connect={(node: HTMLCanvasElement, signal) => {
|
|
957
|
+
waveformNode = node
|
|
958
|
+
drawWaveform()
|
|
959
|
+
if (typeof ResizeObserver === 'undefined') return
|
|
960
|
+
const observer = new ResizeObserver(() => drawWaveform())
|
|
961
|
+
observer.observe(node)
|
|
962
|
+
signal.addEventListener('abort', () => {
|
|
963
|
+
observer.disconnect()
|
|
964
|
+
if (waveformNode === node) {
|
|
965
|
+
waveformNode = null
|
|
966
|
+
}
|
|
967
|
+
})
|
|
968
|
+
}}
|
|
969
|
+
/>
|
|
970
|
+
{sortedRanges.map((range) => (
|
|
971
|
+
<div
|
|
972
|
+
class={classNames(
|
|
973
|
+
'trim-range',
|
|
974
|
+
range.id === selectedRangeId && 'is-selected',
|
|
975
|
+
)}
|
|
976
|
+
style={`--range-left:${duration > 0 ? (range.start / duration) * 100 : 0}%; --range-width:${duration > 0 ? ((range.end - range.start) / duration) * 100 : 0}%`}
|
|
977
|
+
on={{ click: () => selectRange(range.id) }}
|
|
978
|
+
role="group"
|
|
979
|
+
aria-label={`Trim range ${formatTimestamp(range.start)} to ${formatTimestamp(range.end)}`}
|
|
980
|
+
>
|
|
981
|
+
<span class="trim-range-label">
|
|
982
|
+
Remove {formatTimestamp(range.start)} -{' '}
|
|
983
|
+
{formatTimestamp(range.end)}
|
|
984
|
+
</span>
|
|
985
|
+
<span class="trim-handle-label trim-handle-label--start">
|
|
986
|
+
{formatTimestamp(range.start)}
|
|
987
|
+
</span>
|
|
988
|
+
<button
|
|
989
|
+
type="button"
|
|
990
|
+
class="trim-handle trim-handle--start"
|
|
991
|
+
role="slider"
|
|
992
|
+
aria-label="Trim start"
|
|
993
|
+
aria-valuemin={0}
|
|
994
|
+
aria-valuemax={duration}
|
|
995
|
+
aria-valuenow={range.start}
|
|
996
|
+
aria-valuetext={formatTimestamp(range.start)}
|
|
997
|
+
aria-describedby={hintId}
|
|
998
|
+
on={{
|
|
999
|
+
focus: () =>
|
|
1000
|
+
syncVideoToTime(range.start, { updateInput: true }),
|
|
1001
|
+
pointerdown: (event) =>
|
|
1002
|
+
startDrag(event, range.id, 'start'),
|
|
1003
|
+
pointermove: moveDrag,
|
|
1004
|
+
pointerup: endDrag,
|
|
1005
|
+
pointercancel: endDrag,
|
|
1006
|
+
keydown: (event) => handleRangeKey(event, range, 'start'),
|
|
1007
|
+
}}
|
|
1008
|
+
/>
|
|
1009
|
+
<span class="trim-handle-label trim-handle-label--end">
|
|
1010
|
+
{formatTimestamp(range.end)}
|
|
1011
|
+
</span>
|
|
1012
|
+
<button
|
|
1013
|
+
type="button"
|
|
1014
|
+
class="trim-handle trim-handle--end"
|
|
1015
|
+
role="slider"
|
|
1016
|
+
aria-label="Trim end"
|
|
1017
|
+
aria-valuemin={0}
|
|
1018
|
+
aria-valuemax={duration}
|
|
1019
|
+
aria-valuenow={range.end}
|
|
1020
|
+
aria-valuetext={formatTimestamp(range.end)}
|
|
1021
|
+
aria-describedby={hintId}
|
|
1022
|
+
on={{
|
|
1023
|
+
focus: () =>
|
|
1024
|
+
syncVideoToTime(range.end, { updateInput: true }),
|
|
1025
|
+
pointerdown: (event) => startDrag(event, range.id, 'end'),
|
|
1026
|
+
pointermove: moveDrag,
|
|
1027
|
+
pointerup: endDrag,
|
|
1028
|
+
pointercancel: endDrag,
|
|
1029
|
+
keydown: (event) => handleRangeKey(event, range, 'end'),
|
|
1030
|
+
}}
|
|
1031
|
+
/>
|
|
1032
|
+
</div>
|
|
1033
|
+
))}
|
|
1034
|
+
<span class="trim-playhead" />
|
|
1035
|
+
</div>
|
|
1036
|
+
<div class="trim-waveform-meta">
|
|
1037
|
+
{waveformStatus === 'loading' ? (
|
|
1038
|
+
<span class="summary-subtext">Rendering waveform...</span>
|
|
1039
|
+
) : waveformStatus === 'error' ? (
|
|
1040
|
+
<span class="summary-subtext">{waveformError}</span>
|
|
1041
|
+
) : (
|
|
1042
|
+
<span class="summary-subtext">
|
|
1043
|
+
Waveform {waveformSamples.length > 0 ? 'ready' : 'idle'}
|
|
1044
|
+
</span>
|
|
1045
|
+
)}
|
|
1046
|
+
</div>
|
|
1047
|
+
<div class="timeline-controls">
|
|
1048
|
+
<label class="control-label">
|
|
1049
|
+
Playhead
|
|
1050
|
+
<span class="control-value">{formatTimestamp(playhead)}</span>
|
|
1051
|
+
</label>
|
|
1052
|
+
<input
|
|
1053
|
+
class="timeline-slider"
|
|
1054
|
+
type="range"
|
|
1055
|
+
min="0"
|
|
1056
|
+
max={duration || 1}
|
|
1057
|
+
step={PLAYHEAD_STEP}
|
|
1058
|
+
value={playhead}
|
|
1059
|
+
disabled={!previewReady}
|
|
1060
|
+
on={{
|
|
1061
|
+
input: (event) => {
|
|
1062
|
+
const target = event.currentTarget as HTMLInputElement
|
|
1063
|
+
setPlayhead(Number(target.value))
|
|
1064
|
+
},
|
|
1065
|
+
}}
|
|
1066
|
+
/>
|
|
1067
|
+
<button
|
|
1068
|
+
class="button button--ghost"
|
|
1069
|
+
type="button"
|
|
1070
|
+
disabled={!previewReady || sortedRanges.length === 0}
|
|
1071
|
+
on={{
|
|
1072
|
+
click: () => {
|
|
1073
|
+
const next = sortedRanges.find(
|
|
1074
|
+
(range) => range.start > playhead,
|
|
1075
|
+
)
|
|
1076
|
+
if (next) setPlayhead(next.start)
|
|
1077
|
+
},
|
|
1078
|
+
}}
|
|
1079
|
+
>
|
|
1080
|
+
Next trim
|
|
1081
|
+
</button>
|
|
1082
|
+
</div>
|
|
1083
|
+
</section>
|
|
1084
|
+
|
|
1085
|
+
<div class="app-grid app-grid--two trim-grid">
|
|
1086
|
+
<section class="app-card">
|
|
1087
|
+
<div class="panel-header">
|
|
1088
|
+
<h2>Trim ranges</h2>
|
|
1089
|
+
<span class="summary-subtext">
|
|
1090
|
+
{sortedRanges.length} total
|
|
1091
|
+
</span>
|
|
1092
|
+
</div>
|
|
1093
|
+
{sortedRanges.length === 0 ? (
|
|
1094
|
+
<p class="app-muted">
|
|
1095
|
+
Add a trim range to start removing segments.
|
|
1096
|
+
</p>
|
|
1097
|
+
) : (
|
|
1098
|
+
<ul class="stacked-list trim-range-list">
|
|
1099
|
+
{sortedRanges.map((range) => (
|
|
1100
|
+
<li
|
|
1101
|
+
class={classNames(
|
|
1102
|
+
'stacked-item',
|
|
1103
|
+
'trim-range-row',
|
|
1104
|
+
range.id === selectedRangeId && 'is-selected',
|
|
1105
|
+
)}
|
|
1106
|
+
>
|
|
1107
|
+
<button
|
|
1108
|
+
class="trim-range-summary"
|
|
1109
|
+
type="button"
|
|
1110
|
+
on={{ click: () => selectRange(range.id) }}
|
|
1111
|
+
>
|
|
1112
|
+
<span class="trim-range-time">
|
|
1113
|
+
{formatTimestamp(range.start)} -{' '}
|
|
1114
|
+
{formatTimestamp(range.end)}
|
|
1115
|
+
</span>
|
|
1116
|
+
<span class="summary-subtext">
|
|
1117
|
+
Remove {formatSeconds(range.end - range.start)}
|
|
1118
|
+
</span>
|
|
1119
|
+
</button>
|
|
1120
|
+
<div class="trim-range-fields">
|
|
1121
|
+
<label class="input-label">
|
|
1122
|
+
Start
|
|
1123
|
+
<input
|
|
1124
|
+
class="text-input text-input--compact"
|
|
1125
|
+
type="number"
|
|
1126
|
+
min="0"
|
|
1127
|
+
max={duration}
|
|
1128
|
+
step={KEYBOARD_STEP}
|
|
1129
|
+
value={range.start.toFixed(2)}
|
|
1130
|
+
on={{
|
|
1131
|
+
focus: () =>
|
|
1132
|
+
syncVideoToTime(range.start, {
|
|
1133
|
+
updateInput: true,
|
|
1134
|
+
}),
|
|
1135
|
+
input: (event) => {
|
|
1136
|
+
const target =
|
|
1137
|
+
event.currentTarget as HTMLInputElement
|
|
1138
|
+
const nextValue = Number(target.value)
|
|
1139
|
+
if (!Number.isFinite(nextValue)) return
|
|
1140
|
+
updateTrimRange(
|
|
1141
|
+
range.id,
|
|
1142
|
+
{ start: nextValue },
|
|
1143
|
+
'start',
|
|
1144
|
+
)
|
|
1145
|
+
syncVideoToTime(nextValue, {
|
|
1146
|
+
updateInput: true,
|
|
1147
|
+
})
|
|
1148
|
+
},
|
|
1149
|
+
keydown: (event) =>
|
|
1150
|
+
handleNumberKey(event, range, 'start'),
|
|
1151
|
+
}}
|
|
1152
|
+
/>
|
|
1153
|
+
</label>
|
|
1154
|
+
<label class="input-label">
|
|
1155
|
+
End
|
|
1156
|
+
<input
|
|
1157
|
+
class="text-input text-input--compact"
|
|
1158
|
+
type="number"
|
|
1159
|
+
min="0"
|
|
1160
|
+
max={duration}
|
|
1161
|
+
step={KEYBOARD_STEP}
|
|
1162
|
+
value={range.end.toFixed(2)}
|
|
1163
|
+
on={{
|
|
1164
|
+
focus: () =>
|
|
1165
|
+
syncVideoToTime(range.end, {
|
|
1166
|
+
updateInput: true,
|
|
1167
|
+
}),
|
|
1168
|
+
input: (event) => {
|
|
1169
|
+
const target =
|
|
1170
|
+
event.currentTarget as HTMLInputElement
|
|
1171
|
+
const nextValue = Number(target.value)
|
|
1172
|
+
if (!Number.isFinite(nextValue)) return
|
|
1173
|
+
updateTrimRange(
|
|
1174
|
+
range.id,
|
|
1175
|
+
{ end: nextValue },
|
|
1176
|
+
'end',
|
|
1177
|
+
)
|
|
1178
|
+
syncVideoToTime(nextValue, {
|
|
1179
|
+
updateInput: true,
|
|
1180
|
+
})
|
|
1181
|
+
},
|
|
1182
|
+
keydown: (event) =>
|
|
1183
|
+
handleNumberKey(event, range, 'end'),
|
|
1184
|
+
}}
|
|
1185
|
+
/>
|
|
1186
|
+
</label>
|
|
1187
|
+
<button
|
|
1188
|
+
class="button button--ghost"
|
|
1189
|
+
type="button"
|
|
1190
|
+
on={{ click: () => removeTrimRange(range.id) }}
|
|
1191
|
+
>
|
|
1192
|
+
Remove
|
|
1193
|
+
</button>
|
|
1194
|
+
</div>
|
|
1195
|
+
</li>
|
|
1196
|
+
))}
|
|
1197
|
+
</ul>
|
|
1198
|
+
)}
|
|
1199
|
+
</section>
|
|
1200
|
+
|
|
1201
|
+
<section class="app-card">
|
|
1202
|
+
<h2>Output summary</h2>
|
|
1203
|
+
<div class="summary-grid">
|
|
1204
|
+
<div class="summary-item">
|
|
1205
|
+
<span class="summary-label">Removed</span>
|
|
1206
|
+
<span class="summary-value">{formatSeconds(totalRemoved)}</span>
|
|
1207
|
+
<span class="summary-subtext">
|
|
1208
|
+
{normalizedRanges.length} normalized ranges
|
|
1209
|
+
</span>
|
|
1210
|
+
</div>
|
|
1211
|
+
<div class="summary-item">
|
|
1212
|
+
<span class="summary-label">Output length</span>
|
|
1213
|
+
<span class="summary-value">
|
|
1214
|
+
{previewReady
|
|
1215
|
+
? formatTimestamp(outputDuration)
|
|
1216
|
+
: '--:--.--'}
|
|
1217
|
+
</span>
|
|
1218
|
+
<span class="summary-subtext">
|
|
1219
|
+
{previewReady && duration > 0
|
|
1220
|
+
? `${Math.round((outputDuration / duration) * 100)}% kept`
|
|
1221
|
+
: 'Load a video to calculate'}
|
|
1222
|
+
</span>
|
|
1223
|
+
</div>
|
|
1224
|
+
<div class="summary-item">
|
|
1225
|
+
<span class="summary-label">Command status</span>
|
|
1226
|
+
<span class="summary-value">{progressLabel}</span>
|
|
1227
|
+
<span class="summary-subtext">
|
|
1228
|
+
{runStatus === 'running'
|
|
1229
|
+
? 'ffmpeg in progress'
|
|
1230
|
+
: 'Ready to run'}
|
|
1231
|
+
</span>
|
|
1232
|
+
</div>
|
|
1233
|
+
</div>
|
|
1234
|
+
</section>
|
|
1235
|
+
</div>
|
|
1236
|
+
|
|
1237
|
+
<section class="app-card app-card--full trim-command-card">
|
|
1238
|
+
<div class="panel-header">
|
|
1239
|
+
<h2>ffmpeg command</h2>
|
|
1240
|
+
<div class="trim-command-actions">
|
|
1241
|
+
<button
|
|
1242
|
+
class="button button--primary"
|
|
1243
|
+
type="button"
|
|
1244
|
+
disabled={runStatus === 'running' || !commandPreview}
|
|
1245
|
+
on={{ click: runTrimCommand }}
|
|
1246
|
+
>
|
|
1247
|
+
{runStatus === 'running' ? 'Running...' : 'Run ffmpeg'}
|
|
1248
|
+
</button>
|
|
1249
|
+
<button
|
|
1250
|
+
class="button button--ghost"
|
|
1251
|
+
type="button"
|
|
1252
|
+
disabled={runStatus !== 'running'}
|
|
1253
|
+
on={{ click: cancelRun }}
|
|
1254
|
+
>
|
|
1255
|
+
Cancel
|
|
1256
|
+
</button>
|
|
1257
|
+
</div>
|
|
1258
|
+
</div>
|
|
1259
|
+
<p class="app-muted">
|
|
1260
|
+
Use this command in your terminal, or run it here to watch progress
|
|
1261
|
+
stream back into the UI.
|
|
1262
|
+
</p>
|
|
1263
|
+
{commandPreview ? (
|
|
1264
|
+
<pre class="command-preview">{commandPreview}</pre>
|
|
1265
|
+
) : (
|
|
1266
|
+
<p class="status-note status-note--warning">
|
|
1267
|
+
Load a video and add at least one trim range to generate the
|
|
1268
|
+
command.
|
|
1269
|
+
</p>
|
|
1270
|
+
)}
|
|
1271
|
+
<div class="trim-progress">
|
|
1272
|
+
<progress max="1" value={runProgress} />
|
|
1273
|
+
<span class="summary-subtext">{progressLabel}</span>
|
|
1274
|
+
</div>
|
|
1275
|
+
{runError ? (
|
|
1276
|
+
<p class="status-note status-note--danger">{runError}</p>
|
|
1277
|
+
) : null}
|
|
1278
|
+
<pre class="command-preview trim-output">
|
|
1279
|
+
{runLogs.length > 0
|
|
1280
|
+
? runLogs.slice(-200).join('\n')
|
|
1281
|
+
: 'ffmpeg output will appear here.'}
|
|
1282
|
+
</pre>
|
|
1283
|
+
</section>
|
|
1284
|
+
</main>
|
|
1285
|
+
)
|
|
1286
|
+
}
|
|
1287
|
+
}
|