straight_to_video 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +16 -0
- data/LICENSE.txt +7 -0
- data/README.md +156 -0
- data/Rakefile +4 -0
- data/app/assets/javascripts/mediabunny.min.mjs +110 -0
- data/app/assets/javascripts/straight-to-video.js +360 -0
- data/assets/img/backdrop.webp +0 -0
- data/assets/img/logo.webp +0 -0
- data/config/importmap.rb +2 -0
- data/index.html +247 -0
- data/index.js +359 -0
- data/lib/straight_to_video/engine.rb +22 -0
- data/lib/straight_to_video/version.rb +3 -0
- data/lib/straight_to_video.rb +2 -0
- data/package-lock.json +146 -0
- data/package.json +47 -0
- data/playwright.config.mjs +33 -0
- data/script/release +75 -0
- data/script/test +14 -0
- data/script/upgrade +9 -0
- data/script/vendor +52 -0
- metadata +83 -0
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
// straight-to-video@0.0.3 vendored by the straight_to_video gem
|
|
2
|
+
// straight-to-video - https://github.com/searlsco/straight-to-video
|
|
3
|
+
|
|
4
|
+
// ----- External imports -----
|
|
5
|
+
import {
|
|
6
|
+
Input, ALL_FORMATS, BlobSource, AudioBufferSink,
|
|
7
|
+
Output, Mp4OutputFormat, BufferTarget,
|
|
8
|
+
AudioSampleSource, AudioSample, EncodedVideoPacketSource, EncodedPacket
|
|
9
|
+
} from 'mediabunny'
|
|
10
|
+
|
|
11
|
+
// ----- Constants -----
|
|
12
|
+
const MAX_LONG_SIDE = 1920
|
|
13
|
+
const TARGET_AUDIO_BITRATE = 96_000
|
|
14
|
+
const TARGET_AUDIO_SR = 48_000
|
|
15
|
+
const TARGET_AUDIO_CHANNELS = 2
|
|
16
|
+
|
|
17
|
+
// ----- Video metadata probe -----
|
|
18
|
+
async function probeVideo (file) {
|
|
19
|
+
return new Promise((resolve, reject) => {
|
|
20
|
+
const url = URL.createObjectURL(file)
|
|
21
|
+
const v = document.createElement('video')
|
|
22
|
+
v.preload = 'metadata'
|
|
23
|
+
v.muted = true
|
|
24
|
+
v.src = url
|
|
25
|
+
v.onloadedmetadata = () => {
|
|
26
|
+
const width = v.videoWidth
|
|
27
|
+
const height = v.videoHeight
|
|
28
|
+
const duration = v.duration
|
|
29
|
+
URL.revokeObjectURL(url)
|
|
30
|
+
resolve({ width, height, duration })
|
|
31
|
+
}
|
|
32
|
+
v.onerror = () => { URL.revokeObjectURL(url); reject(v.error || new Error('failed to load metadata')) }
|
|
33
|
+
})
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// ----- Audio helpers -----
|
|
37
|
+
async function decodeAudioPCM (file, { duration }) {
|
|
38
|
+
const totalFrames = Math.max(1, Math.ceil(Number(duration) * TARGET_AUDIO_SR))
|
|
39
|
+
const tracks = await (async () => {
|
|
40
|
+
try {
|
|
41
|
+
const input = new Input({ source: new BlobSource(file), formats: ALL_FORMATS })
|
|
42
|
+
return await input.getTracks()
|
|
43
|
+
} catch (_) {
|
|
44
|
+
return []
|
|
45
|
+
}
|
|
46
|
+
})()
|
|
47
|
+
const audio = tracks.find(t => typeof t.isAudioTrack === 'function' && t.isAudioTrack())
|
|
48
|
+
if (!audio) return new AudioBuffer({ length: totalFrames, sampleRate: TARGET_AUDIO_SR, numberOfChannels: TARGET_AUDIO_CHANNELS })
|
|
49
|
+
|
|
50
|
+
const ctx = new OfflineAudioContext({ numberOfChannels: TARGET_AUDIO_CHANNELS, length: totalFrames, sampleRate: TARGET_AUDIO_SR })
|
|
51
|
+
const sink = new AudioBufferSink(audio)
|
|
52
|
+
for await (const { buffer, timestamp } of sink.buffers(0, Number(duration))) {
|
|
53
|
+
const src = ctx.createBufferSource()
|
|
54
|
+
src.buffer = buffer
|
|
55
|
+
src.connect(ctx.destination)
|
|
56
|
+
src.start(Math.max(0, Number(timestamp)))
|
|
57
|
+
}
|
|
58
|
+
return await ctx.startRendering()
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async function renderStereo48kExact (buffer, exactFrames) {
|
|
62
|
+
const frames = Math.max(1024, Number(exactFrames))
|
|
63
|
+
const ctx = new OfflineAudioContext({ numberOfChannels: TARGET_AUDIO_CHANNELS, length: frames, sampleRate: TARGET_AUDIO_SR })
|
|
64
|
+
const src = ctx.createBufferSource()
|
|
65
|
+
src.buffer = buffer
|
|
66
|
+
src.connect(ctx.destination)
|
|
67
|
+
src.start(0)
|
|
68
|
+
return await ctx.startRendering()
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function interleaveStereoF32 (buffer) {
|
|
72
|
+
const len = buffer.length
|
|
73
|
+
const out = new Float32Array(len * TARGET_AUDIO_CHANNELS)
|
|
74
|
+
const ch0 = buffer.getChannelData(0)
|
|
75
|
+
const ch1 = buffer.getChannelData(1)
|
|
76
|
+
for (let i = 0, j = 0; i < len; i++, j += 2) {
|
|
77
|
+
out[j] = ch0[i]
|
|
78
|
+
out[j + 1] = ch1[i]
|
|
79
|
+
}
|
|
80
|
+
return out
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// ----- Video pipeline -----
|
|
84
|
+
async function canOptimizeVideo (file) {
|
|
85
|
+
if (!(file instanceof File)) return { ok: false, reason: 'not-a-file', message: 'Argument provided is not a File.' }
|
|
86
|
+
const env = typeof window !== 'undefined'
|
|
87
|
+
&& 'VideoEncoder' in window
|
|
88
|
+
&& 'OfflineAudioContext' in window
|
|
89
|
+
&& typeof document?.createElement === 'function'
|
|
90
|
+
if (!env) return { ok: false, reason: 'unsupported-environment', message: 'Browser does not support WebCodecs or OfflineAudioContext.' }
|
|
91
|
+
try {
|
|
92
|
+
const { width, height, duration } = await probeVideo(file)
|
|
93
|
+
const long = Math.max(width, height)
|
|
94
|
+
const scale = Math.min(1, MAX_LONG_SIDE / Math.max(2, long))
|
|
95
|
+
const targetWidth = Math.max(2, Math.round(width * scale))
|
|
96
|
+
const targetHeight = Math.max(2, Math.round(height * scale))
|
|
97
|
+
const fps = Math.max(width, height) <= 1920 ? 30 : 60
|
|
98
|
+
const sup = await selectVideoEncoderConfig({ width: targetWidth, height: targetHeight, fps }).then(() => true).catch(() => false)
|
|
99
|
+
if (!sup) return { ok: false, reason: 'unsupported-video-config', message: 'No supported encoder configuration for this resolution on this device.' }
|
|
100
|
+
|
|
101
|
+
// Header sniffing when file.type is empty/incorrect
|
|
102
|
+
const type = String(file.type || '').toLowerCase()
|
|
103
|
+
if (!type) {
|
|
104
|
+
const blob = file.slice(0, 4096)
|
|
105
|
+
const buf = new Uint8Array(await blob.arrayBuffer())
|
|
106
|
+
const asAscii = (u8) => String.fromCharCode(...u8)
|
|
107
|
+
// MP4/MOV ftyp signature typically at offset 4..
|
|
108
|
+
const ascii = asAscii(buf)
|
|
109
|
+
const hasFtyp = ascii.includes('ftyp')
|
|
110
|
+
// WebM/Matroska: EBML header 1A 45 DF A3
|
|
111
|
+
const hasEbml = buf.length >= 4 && buf[0] === 0x1A && buf[1] === 0x45 && buf[2] === 0xDF && buf[3] === 0xA3
|
|
112
|
+
if (!(hasFtyp || hasEbml)) return { ok: false, reason: 'unknown-container', message: 'Unrecognized container; expected MP4/MOV or WebM.' }
|
|
113
|
+
}
|
|
114
|
+
return { ok: true, reason: 'ok', message: 'ok' }
|
|
115
|
+
} catch (e) {
|
|
116
|
+
return { ok: false, reason: 'probe-failed', message: String(e?.message || e) }
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async function optimizeVideo (file, { onProgress } = {}) {
|
|
121
|
+
if (!(file instanceof File)) return { changed: false, file }
|
|
122
|
+
const type = file.type || ''
|
|
123
|
+
if (!/^video\//i.test(type)) return { changed: false, file }
|
|
124
|
+
if (typeof window === 'undefined' || !('VideoEncoder' in window)) return { changed: false, file }
|
|
125
|
+
const feas = await canOptimizeVideo(file)
|
|
126
|
+
if (!feas.ok) return { changed: false, file }
|
|
127
|
+
|
|
128
|
+
const srcMeta = await probeVideo(file)
|
|
129
|
+
const newFile = await encodeVideo({ file, srcMeta: { w: srcMeta.width, h: srcMeta.height, duration: srcMeta.duration }, onProgress })
|
|
130
|
+
return { changed: true, file: newFile }
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async function selectVideoEncoderConfig ({ width, height, fps }) {
|
|
134
|
+
const hevc = { codec: 'hvc1.1.4.L123.B0', width, height, framerate: fps, hardwareAcceleration: 'prefer-hardware', hevc: { format: 'hevc' } }
|
|
135
|
+
const supH = await VideoEncoder.isConfigSupported(hevc).catch(() => ({ supported: false }))
|
|
136
|
+
if (supH.supported) return { codecId: 'hevc', config: supH.config }
|
|
137
|
+
|
|
138
|
+
const avc = { codec: 'avc1.64002A', width, height, framerate: fps, hardwareAcceleration: 'prefer-hardware', avc: { format: 'avc' } }
|
|
139
|
+
const supA = await VideoEncoder.isConfigSupported(avc)
|
|
140
|
+
return { codecId: 'avc', config: supA.config }
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
async function waitForFrameReady (video, budgetMs) {
|
|
144
|
+
if (typeof video.requestVideoFrameCallback !== 'function') return false
|
|
145
|
+
return await new Promise((resolve) => {
|
|
146
|
+
let settled = false
|
|
147
|
+
const to = setTimeout(() => { if (!settled) { settled = true; resolve(false) } }, Math.max(1, budgetMs || 17))
|
|
148
|
+
video.requestVideoFrameCallback(() => { if (!settled) { settled = true; clearTimeout(to); resolve(true) } })
|
|
149
|
+
})
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async function encodeVideo ({ file, srcMeta, onProgress }) {
|
|
153
|
+
const w = srcMeta.w
|
|
154
|
+
const h = srcMeta.h
|
|
155
|
+
const durationCfr = Number(srcMeta.duration)
|
|
156
|
+
const long = Math.max(w, h)
|
|
157
|
+
const scale = Math.min(1, MAX_LONG_SIDE / Math.max(2, long))
|
|
158
|
+
const targetWidth = Math.max(2, Math.round(w * scale))
|
|
159
|
+
const targetHeight = Math.max(2, Math.round(h * scale))
|
|
160
|
+
|
|
161
|
+
const targetFps = Math.max(w, h) <= 1920 ? 30 : 60
|
|
162
|
+
const step = 1 / Math.max(1, targetFps)
|
|
163
|
+
const frames = Math.max(1, Math.floor(durationCfr / step))
|
|
164
|
+
|
|
165
|
+
const output = new Output({ format: new Mp4OutputFormat({ fastStart: 'in-memory' }), target: new BufferTarget() })
|
|
166
|
+
const { codecId, config: usedCfg } = await selectVideoEncoderConfig({ width: targetWidth, height: targetHeight, fps: targetFps })
|
|
167
|
+
const videoTrack = new EncodedVideoPacketSource(codecId)
|
|
168
|
+
output.addVideoTrack(videoTrack, { frameRate: targetFps })
|
|
169
|
+
|
|
170
|
+
const _warn = console.warn
|
|
171
|
+
console.warn = (...args) => {
|
|
172
|
+
const m = args && args[0]
|
|
173
|
+
if (typeof m === 'string' && m.includes('Unsupported audio codec') && m.includes('apac')) return
|
|
174
|
+
_warn.apply(console, args)
|
|
175
|
+
}
|
|
176
|
+
const audioBuffer = await decodeAudioPCM(file, { duration: durationCfr })
|
|
177
|
+
console.warn = _warn
|
|
178
|
+
|
|
179
|
+
const audioSource = new AudioSampleSource({
|
|
180
|
+
codec: 'aac',
|
|
181
|
+
bitrate: TARGET_AUDIO_BITRATE,
|
|
182
|
+
bitrateMode: 'constant',
|
|
183
|
+
numberOfChannels: TARGET_AUDIO_CHANNELS,
|
|
184
|
+
sampleRate: TARGET_AUDIO_SR,
|
|
185
|
+
onEncodedPacket: (_packet, meta) => {
|
|
186
|
+
const aot = 2; const idx = 3; const b0 = (aot << 3) | (idx >> 1); const b1 = ((idx & 1) << 7) | (TARGET_AUDIO_CHANNELS << 3)
|
|
187
|
+
meta.decoderConfig = { codec: 'mp4a.40.2', numberOfChannels: TARGET_AUDIO_CHANNELS, sampleRate: TARGET_AUDIO_SR, description: new Uint8Array([b0, b1]) }
|
|
188
|
+
}
|
|
189
|
+
})
|
|
190
|
+
output.addAudioTrack(audioSource)
|
|
191
|
+
|
|
192
|
+
await output.start()
|
|
193
|
+
|
|
194
|
+
let codecDesc = null
|
|
195
|
+
const pendingPackets = []
|
|
196
|
+
const ve = new VideoEncoder({
|
|
197
|
+
output: (chunk, meta) => {
|
|
198
|
+
if (!codecDesc && meta?.decoderConfig?.description) codecDesc = meta.decoderConfig.description
|
|
199
|
+
pendingPackets.push({ chunk })
|
|
200
|
+
},
|
|
201
|
+
error: () => {}
|
|
202
|
+
})
|
|
203
|
+
ve.configure(usedCfg)
|
|
204
|
+
|
|
205
|
+
const url = URL.createObjectURL(file)
|
|
206
|
+
const v = document.createElement('video')
|
|
207
|
+
v.muted = true; v.preload = 'auto'; v.playsInline = true
|
|
208
|
+
v.src = url
|
|
209
|
+
await new Promise((resolve, reject) => { v.onloadedmetadata = resolve; v.onerror = () => reject(new Error('video load failed')) })
|
|
210
|
+
const canvas = document.createElement('canvas'); canvas.width = targetWidth; canvas.height = targetHeight
|
|
211
|
+
const ctx = canvas.getContext('2d', { alpha: false })
|
|
212
|
+
|
|
213
|
+
for (let i = 0; i < frames; i++) {
|
|
214
|
+
const t = i * step
|
|
215
|
+
const targetTime = Math.min(Math.max(0, t), Math.max(0.000001, durationCfr - 0.000001))
|
|
216
|
+
const drawTime = i === 0
|
|
217
|
+
? Math.min(Math.max(0, t + (step * 0.5)), Math.max(0.000001, durationCfr - 0.000001))
|
|
218
|
+
: targetTime
|
|
219
|
+
|
|
220
|
+
await new Promise((resolve) => { v.currentTime = drawTime; v.onseeked = () => resolve() })
|
|
221
|
+
const budgetMs = Math.min(34, Math.max(17, Math.round(step * 1000)))
|
|
222
|
+
const presented = await waitForFrameReady(v, budgetMs)
|
|
223
|
+
if (!presented && i === 0) {
|
|
224
|
+
const nudge = Math.min(step * 0.25, 0.004)
|
|
225
|
+
await new Promise((resolve) => { v.currentTime = Math.min(drawTime + nudge, Math.max(0.000001, durationCfr - 0.000001)); v.onseeked = () => resolve() })
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
ctx.drawImage(v, 0, 0, canvas.width, canvas.height)
|
|
229
|
+
const vf = new VideoFrame(canvas, { timestamp: Math.round(t * 1e6), duration: Math.round(step * 1e6) })
|
|
230
|
+
ve.encode(vf, { keyFrame: i === 0 })
|
|
231
|
+
vf.close()
|
|
232
|
+
|
|
233
|
+
if (typeof onProgress === 'function') {
|
|
234
|
+
try { onProgress(Math.min(1, (i + 1) / frames)) } catch (_) {}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
await ve.flush()
|
|
238
|
+
URL.revokeObjectURL(url)
|
|
239
|
+
|
|
240
|
+
const muxCount = Math.min(frames, pendingPackets.length)
|
|
241
|
+
|
|
242
|
+
for (let i = 0; i < muxCount; i++) {
|
|
243
|
+
const { chunk } = pendingPackets[i]
|
|
244
|
+
const data = new Uint8Array(chunk.byteLength); chunk.copyTo(data)
|
|
245
|
+
const ts = i * step; const dur = step
|
|
246
|
+
const pkt = new EncodedPacket(data, chunk.type === 'key' ? 'key' : 'delta', ts, dur)
|
|
247
|
+
await videoTrack.add(pkt, { decoderConfig: { codec: usedCfg.codec, codedWidth: targetWidth, codedHeight: targetHeight, description: codecDesc } })
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
const samplesPerVideoFrame = TARGET_AUDIO_SR / targetFps
|
|
251
|
+
const totalVideoSamples = muxCount * samplesPerVideoFrame
|
|
252
|
+
const targetSamples = Math.max(1024, Math.floor(totalVideoSamples / 1024) * 1024 - 2048)
|
|
253
|
+
const audioExact = await renderStereo48kExact(audioBuffer, targetSamples)
|
|
254
|
+
const interleaved = interleaveStereoF32(audioExact)
|
|
255
|
+
const sample = new AudioSample({ format: 'f32', sampleRate: TARGET_AUDIO_SR, numberOfChannels: TARGET_AUDIO_CHANNELS, timestamp: 0, data: interleaved })
|
|
256
|
+
await audioSource.add(sample)
|
|
257
|
+
audioSource.close()
|
|
258
|
+
|
|
259
|
+
await output.finalize()
|
|
260
|
+
const { buffer } = output.target
|
|
261
|
+
const payload = new Uint8Array(buffer)
|
|
262
|
+
const nm = file.name; const dot = nm.lastIndexOf('.')
|
|
263
|
+
const newName = `${nm.substring(0, dot)}-optimized.mp4`
|
|
264
|
+
return new File([payload], newName, { type: 'video/mp4', lastModified: Date.now() })
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// ----- Controller registration (optional) -----
|
|
268
|
+
function registerStraightToVideoController (app, opts = {}) {
|
|
269
|
+
const { Controller, name = 'straight-to-video' } = opts || {}
|
|
270
|
+
if (!Controller) {
|
|
271
|
+
throw new Error('registerStraightToVideoController requires a Controller class from @hotwired/stimulus. Call as registerStraightToVideoController(app, { Controller, name? }).')
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
class StraightToVideoController extends Controller {
|
|
275
|
+
static get targets () { return ['fileInput'] }
|
|
276
|
+
static get values () { return { submitting: Boolean } }
|
|
277
|
+
|
|
278
|
+
connect () {
|
|
279
|
+
this._onWindowSubmitCapture = (e) => this._onWindowSubmitCaptureHandler(e)
|
|
280
|
+
window.addEventListener('submit', this._onWindowSubmitCapture, { capture: true })
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
disconnect () {
|
|
284
|
+
if (this._onWindowSubmitCapture) window.removeEventListener('submit', this._onWindowSubmitCapture, { capture: true })
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
async change (e) {
|
|
288
|
+
const fileInput = e.target
|
|
289
|
+
if (!fileInput?.files?.length || this.submittingValue || this._hasFlag(fileInput, 'processing')) return
|
|
290
|
+
this._unmarkFlag(fileInput, 'processed')
|
|
291
|
+
delete fileInput.dataset.summary
|
|
292
|
+
await this._processFileInput(fileInput)
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
async _onWindowSubmitCaptureHandler (e) {
|
|
296
|
+
if (e.target !== this.element) return
|
|
297
|
+
const toProcess = this.fileInputTargets.filter((fi) => fi?.files?.length && !this._hasFlag(fi, 'processed'))
|
|
298
|
+
if (toProcess.length === 0) return
|
|
299
|
+
|
|
300
|
+
e.preventDefault()
|
|
301
|
+
e.stopPropagation()
|
|
302
|
+
if (typeof e.stopImmediatePropagation === 'function') e.stopImmediatePropagation()
|
|
303
|
+
|
|
304
|
+
this.submittingValue = true
|
|
305
|
+
await Promise.allSettled(toProcess.map((fi) => this._processFileInput(fi)))
|
|
306
|
+
this.submittingValue = false
|
|
307
|
+
this._resubmit(e.submitter)
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
_swapFile (input, newFile) {
|
|
311
|
+
const dt = new DataTransfer()
|
|
312
|
+
dt.items.add(newFile)
|
|
313
|
+
input.files = dt.files
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
_hasFlag (input, flag) { return input.dataset[flag] === '1' }
|
|
317
|
+
_markFlag (input, flag) { input.dataset[flag] = '1' }
|
|
318
|
+
_unmarkFlag (input, flag) { delete input.dataset[flag] }
|
|
319
|
+
|
|
320
|
+
submittingValueChanged () {
|
|
321
|
+
const controls = this.element.querySelectorAll('input, select, textarea, button')
|
|
322
|
+
controls.forEach(el => { el.disabled = this.submittingValue })
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
async _processFileInput (fileInput) {
|
|
326
|
+
this._markFlag(fileInput, 'processing')
|
|
327
|
+
fileInput.disabled = true
|
|
328
|
+
try {
|
|
329
|
+
const original = fileInput.files[0]
|
|
330
|
+
const { changed, file } = await optimizeVideo(original, {
|
|
331
|
+
onProgress: (ratio) => this._fire(fileInput, 'progress', { progress: Math.round(ratio * 100) })
|
|
332
|
+
})
|
|
333
|
+
if (changed) this._swapFile(fileInput, file)
|
|
334
|
+
this._markFlag(fileInput, 'processed')
|
|
335
|
+
this._fire(fileInput, 'done', { changed })
|
|
336
|
+
} catch (err) {
|
|
337
|
+
console.error(err)
|
|
338
|
+
this._markFlag(fileInput, 'processed')
|
|
339
|
+
this._fire(fileInput, 'error', { error: err })
|
|
340
|
+
} finally {
|
|
341
|
+
fileInput.disabled = false
|
|
342
|
+
this._unmarkFlag(fileInput, 'processing')
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
_fire (el, name, detail = {}) {
|
|
347
|
+
el.dispatchEvent(new CustomEvent(`straight-to-video:${name}`, { bubbles: true, cancelable: true, detail }))
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
_resubmit (submitter) {
|
|
351
|
+
setTimeout(() => { submitter ? this.element.requestSubmit(submitter) : this.element.requestSubmit() }, 0)
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
app.register(name, StraightToVideoController)
|
|
356
|
+
return StraightToVideoController
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
// Public API
|
|
360
|
+
export { canOptimizeVideo, optimizeVideo, registerStraightToVideoController}
|
|
Binary file
|
|
Binary file
|
data/config/importmap.rb
ADDED
data/index.html
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
<!DOCTYPE html>
|
|
2
|
+
<html lang="en">
|
|
3
|
+
<head>
|
|
4
|
+
<meta charset="utf-8" />
|
|
5
|
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
6
|
+
<title>Straight‑to‑Video — Save bandwidth and server costs</title>
|
|
7
|
+
<meta name="description" content="Browser-based, hardware-accelerated video upload optimization" />
|
|
8
|
+
<meta property="og:image" content="https://searlsco.github.io/straight-to-video/assets/img/backdrop.webp">
|
|
9
|
+
<link rel="preconnect" href="https://cdn.jsdelivr.net" />
|
|
10
|
+
<script type="importmap">
|
|
11
|
+
{
|
|
12
|
+
"imports": {
|
|
13
|
+
"straight-to-video": "./index.js",
|
|
14
|
+
"@hotwired/stimulus": "https://cdn.jsdelivr.net/npm/@hotwired/stimulus/+esm",
|
|
15
|
+
"mediabunny": "https://cdn.jsdelivr.net/npm/mediabunny/+esm"
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
</script>
|
|
19
|
+
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/prismjs@1.29.0/themes/prism-tomorrow.min.css">
|
|
20
|
+
<style>
|
|
21
|
+
:root {
|
|
22
|
+
--bg: #0a0b14;
|
|
23
|
+
--bg2: #090a12;
|
|
24
|
+
--fg: #eaf4ff;
|
|
25
|
+
--muted: #b5c0d0cc;
|
|
26
|
+
--accent: #7afcff; /* neon cyan */
|
|
27
|
+
--accent2: #ff5ef1; /* neon magenta */
|
|
28
|
+
--accent3: #ffd36e; /* warm neon */
|
|
29
|
+
--grid: #ffffff0d;
|
|
30
|
+
--section-mt: 48px;
|
|
31
|
+
color-scheme: dark;
|
|
32
|
+
}
|
|
33
|
+
body {
|
|
34
|
+
margin: 0; color: var(--fg); background:
|
|
35
|
+
radial-gradient(1200px 600px at 10% 10%, #101225, transparent),
|
|
36
|
+
radial-gradient(900px 500px at 90% 10%, #170f2a, transparent),
|
|
37
|
+
linear-gradient(180deg, var(--bg), var(--bg2));
|
|
38
|
+
font-family: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Inter, sans-serif;
|
|
39
|
+
line-height: 1.35;
|
|
40
|
+
}
|
|
41
|
+
a { color: var(--accent); text-decoration: none; }
|
|
42
|
+
a:hover { text-decoration: underline; }
|
|
43
|
+
.wrap { max-width: 1100px; margin: 0 auto; padding: 24px; }
|
|
44
|
+
|
|
45
|
+
.hero { margin: 0; display: grid; place-items: center; text-align: center; position: relative; }
|
|
46
|
+
.hero::after { content: ''; position: absolute; left: 50%; transform: translateX(-50%); bottom: -48px; width: min(980px, 96%); height: 160px; background: radial-gradient(520px 100px at 50% 0, #ffffff0a, transparent 75%); pointer-events: none; filter: blur(12px); }
|
|
47
|
+
.hero-logo { max-width: min(780px, 90%); width: auto; height: auto; max-height: 40dvh; filter: drop-shadow(0 6px 28px #0008); margin-top: var(--section-mt); }
|
|
48
|
+
.hero-tag { margin-top: 12px; font-size: clamp(44px, 7.5vw, 93px); font-weight: 900; letter-spacing: .3px; }
|
|
49
|
+
.hero-sub { margin-top: 12px; font-size: clamp(20px, 2.8vw, 26px); font-weight: 700; color: var(--fg); max-width: 860px; }
|
|
50
|
+
.btn { padding: 14px 18px; border-radius: 14px; border: 1px solid #ffffff22; background: #101421; color: var(--fg); cursor: pointer; font-weight: 700; letter-spacing: .2px; }
|
|
51
|
+
.btn.primary { background: linear-gradient(135deg, #1b1f33, #0f1a26); border-color: #ffffff33; box-shadow: 0 0 0 1px #ffffff0a inset, 0 0 24px #7afcff20; }
|
|
52
|
+
.btn:hover { transform: translateY(-1px); box-shadow: 0 6px 18px #0008, 0 0 24px #7afcff33; }
|
|
53
|
+
.btn.ghost { background: var(--bg); border-color: #ffffff33; box-shadow: none; }
|
|
54
|
+
.btn.ghost:hover { transform: translateY(-1px); box-shadow: 0 6px 18px #0006; border-color: #ffffff55; }
|
|
55
|
+
.btn.readme { color: #ff8a50; border-color: #ff6a4a; }
|
|
56
|
+
.btn.readme:hover { box-shadow: 0 6px 18px #0006, 0 0 16px 2px rgba(255, 72, 32, 0.25); border-color: #ff8a50; color: #ff9b6a; }
|
|
57
|
+
|
|
58
|
+
/* Try It (single‑button) */
|
|
59
|
+
.try { margin: var(--section-mt) 0 0; padding: 0; border: 0; background: none; backdrop-filter: none; display: grid; gap: 0; justify-items: center; position: relative; }
|
|
60
|
+
.try::before { content: ''; position: absolute; top: -24px; left: 50%; transform: translateX(-50%); width: min(980px, 96%); height: 80px; background: radial-gradient(520px 80px at 50% 0, #ffffff06, transparent 75%); pointer-events: none; filter: blur(10px); }
|
|
61
|
+
.pill { padding: 6px 10px; border-radius: 999px; border: 1px solid #ffffff1f; font-size: 12px; color: var(--muted); }
|
|
62
|
+
progress { width: min(420px, 90%); height: 12px; margin-top: var(--section-mt); }
|
|
63
|
+
.hidden { display: none; }
|
|
64
|
+
.mono { font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-size: 13px; margin: 0; }
|
|
65
|
+
.small { font-size: 13px; color: var(--muted); }
|
|
66
|
+
.sizes { font-size: clamp(18px, 3vw, 28px); color: var(--fg); }
|
|
67
|
+
.sizes strong { font-weight: 900; }
|
|
68
|
+
.sizes { margin-top: 8px; }
|
|
69
|
+
|
|
70
|
+
#tryBtn {
|
|
71
|
+
font-size: clamp(18px, 2.8vw, 26px);
|
|
72
|
+
padding: 18px 32px;
|
|
73
|
+
border-radius: 16px;
|
|
74
|
+
border: 1px solid #ffffff33;
|
|
75
|
+
background: linear-gradient(135deg, #ff3b3b, #ff8a50);
|
|
76
|
+
color: #fff;
|
|
77
|
+
text-shadow: 0 1px 6px #000c;
|
|
78
|
+
box-shadow: 0 12px 28px #0008, 0 0 64px 12px rgba(255, 66, 48, 0.58);
|
|
79
|
+
}
|
|
80
|
+
#tryBtn:hover { transform: translateY(-1px); box-shadow: 0 18px 36px #000a, 0 0 86px 14px rgba(255, 66, 48, 0.65); }
|
|
81
|
+
#tryBtn:active { transform: translateY(0); }
|
|
82
|
+
|
|
83
|
+
/* Features */
|
|
84
|
+
.features { display: grid; grid-template-columns: repeat(3, 1fr); gap: 16px; margin: var(--section-mt) 0 0; }
|
|
85
|
+
.card { padding: 16px; border-radius: 14px; border: 1px solid #ffffff1f; background: #0c0f1b; }
|
|
86
|
+
.card h3 { margin: 0 0 6px; font-size: 16px; }
|
|
87
|
+
.card p { margin: 0; color: var(--muted); font-size: 14px; }
|
|
88
|
+
|
|
89
|
+
/* Code */
|
|
90
|
+
pre[class*="language-"] {
|
|
91
|
+
border-radius: 12px;
|
|
92
|
+
background: var(--bg);
|
|
93
|
+
border: 1px solid #ffffff26;
|
|
94
|
+
box-shadow: none;
|
|
95
|
+
position: relative;
|
|
96
|
+
padding: 12px 14px;
|
|
97
|
+
line-height: 1.4;
|
|
98
|
+
margin: 0;
|
|
99
|
+
}
|
|
100
|
+
pre[class*="language-"] code { line-height: 1.4; }
|
|
101
|
+
/* bash code blocks with a non-selectable prompt */
|
|
102
|
+
pre.language-bash { padding-left: 32px; }
|
|
103
|
+
pre.language-bash::before {
|
|
104
|
+
content: '$';
|
|
105
|
+
position: absolute; left: 12px; top: 12px; color: var(--muted);
|
|
106
|
+
font-family: inherit; font-size: inherit; line-height: inherit;
|
|
107
|
+
user-select: none; -webkit-user-select: none;
|
|
108
|
+
pointer-events: none;
|
|
109
|
+
}
|
|
110
|
+
/* Install section layout */
|
|
111
|
+
#install { max-width: 520px; margin: var(--section-mt) auto 0; }
|
|
112
|
+
#install pre + pre { margin-top: 28px; }
|
|
113
|
+
#install p { text-align: center; margin-top: var(--section-mt); }
|
|
114
|
+
|
|
115
|
+
/* Footer */
|
|
116
|
+
footer { display: none; }
|
|
117
|
+
|
|
118
|
+
/* CRT mode removed */
|
|
119
|
+
|
|
120
|
+
/* Responsive */
|
|
121
|
+
@media (max-width: 860px) {
|
|
122
|
+
.features { grid-template-columns: 1fr; }
|
|
123
|
+
}
|
|
124
|
+
@media (max-width: 400px) {
|
|
125
|
+
.wrap { padding-left: 8px; padding-right: 8px; }
|
|
126
|
+
pre[class*="language-"] { padding: 10px 12px; }
|
|
127
|
+
pre[class*="language-"] code { font-size: 12px; }
|
|
128
|
+
.mono { font-size: 12px; }
|
|
129
|
+
#install { padding: 8px 0; }
|
|
130
|
+
}
|
|
131
|
+
</style>
|
|
132
|
+
</head>
|
|
133
|
+
<body>
|
|
134
|
+
|
|
135
|
+
<main class="wrap">
|
|
136
|
+
<section class="hero">
|
|
137
|
+
<img src="assets/img/logo.webp" alt="Straight‑to‑Video logo" class="hero-logo" />
|
|
138
|
+
<div class="hero-tag">Be wise, optimize!</div>
|
|
139
|
+
<div class="hero-sub">Skip the server and encode from the comfort of your browser</div>
|
|
140
|
+
</section>
|
|
141
|
+
|
|
142
|
+
<form id="try" class="try" data-controller="straight-to-video" novalidate>
|
|
143
|
+
<div>
|
|
144
|
+
<button id="tryBtn" type="button" class="btn primary">Optimize Video</button>
|
|
145
|
+
</div>
|
|
146
|
+
<input id="file" class="hidden" aria-label="Video file" data-straight-to-video-target="fileInput" type="file" accept="video/*" />
|
|
147
|
+
<progress id="p" class="hidden" max="100" value="0" aria-label="Progress"></progress>
|
|
148
|
+
<div id="sizes" class="sizes hidden" aria-live="polite"></div>
|
|
149
|
+
<div>
|
|
150
|
+
<a id="download" class="hidden">Download</a>
|
|
151
|
+
</div>
|
|
152
|
+
<pre id="out" class="mono"></pre>
|
|
153
|
+
</form>
|
|
154
|
+
|
|
155
|
+
<section class="features">
|
|
156
|
+
<div class="card">
|
|
157
|
+
<h3>Faster user experience</h3>
|
|
158
|
+
<p>Modern browsers can <a href="https://developer.mozilla.org/en-US/docs/Web/API/WebCodecs_API" target="_blank" rel="noopener">encode video</a> faster than ancient ISPs can upload large files.</p>
|
|
159
|
+
</div>
|
|
160
|
+
<div class="card">
|
|
161
|
+
<h3>Ready to share</h3>
|
|
162
|
+
<p>Videos are optimized to <a href="https://developers.facebook.com/docs/instagram-platform/instagram-graph-api/reference/ig-user/media#video-specifications" target="_blank" rel="noopener">Instagram's specifications</a>—no server-side transcoding!</p>
|
|
163
|
+
</div>
|
|
164
|
+
<div class="card">
|
|
165
|
+
<h3>Stimulus support</h3>
|
|
166
|
+
<p>The bundled <a href="https://stimulus.hotwired.dev/" target="_blank" rel="noopener">Stimulus</a> controller can be wired up in two lines of JavaScript.</p>
|
|
167
|
+
</div>
|
|
168
|
+
</section>
|
|
169
|
+
|
|
170
|
+
<section id="install">
|
|
171
|
+
<h2>Install</h2>
|
|
172
|
+
<pre class="language-bash"><code>npm install straight-to-video</code></pre>
|
|
173
|
+
<pre class="language-bash"><code>bundle add straight_to_video</code></pre>
|
|
174
|
+
<p>
|
|
175
|
+
<a class="btn ghost readme" href="https://github.com/searlsco/straight-to-video" target="_blank" rel="noopener">View README</a>
|
|
176
|
+
</p>
|
|
177
|
+
</section>
|
|
178
|
+
</main>
|
|
179
|
+
|
|
180
|
+
<script type="module">
|
|
181
|
+
import { Application, Controller } from '@hotwired/stimulus'
|
|
182
|
+
import { registerStraightToVideoController } from 'straight-to-video'
|
|
183
|
+
|
|
184
|
+
const app = Application.start()
|
|
185
|
+
registerStraightToVideoController(app, { Controller })
|
|
186
|
+
|
|
187
|
+
const form = document.getElementById('try')
|
|
188
|
+
const fileEl = document.getElementById('file')
|
|
189
|
+
const tryBtn = document.getElementById('tryBtn')
|
|
190
|
+
const p = document.getElementById('p')
|
|
191
|
+
const pct = document.getElementById('pct')
|
|
192
|
+
const out = document.getElementById('out')
|
|
193
|
+
const dl = document.getElementById('download')
|
|
194
|
+
const sizes = document.getElementById('sizes')
|
|
195
|
+
|
|
196
|
+
// Removed environment badge
|
|
197
|
+
|
|
198
|
+
// Single‑button flow: open picker then auto‑submit
|
|
199
|
+
tryBtn.addEventListener('click', () => {
|
|
200
|
+
if (fileEl.showPicker) { try { fileEl.showPicker(); return } catch (_) {} }
|
|
201
|
+
fileEl.click()
|
|
202
|
+
})
|
|
203
|
+
|
|
204
|
+
let beforeSize = 0
|
|
205
|
+
fileEl.addEventListener('change', () => {
|
|
206
|
+
if (!(fileEl.files && fileEl.files.length)) return
|
|
207
|
+
beforeSize = fileEl.files[0].size || 0
|
|
208
|
+
sizes.classList.remove('hidden')
|
|
209
|
+
p.classList.remove('hidden')
|
|
210
|
+
pct && (pct.textContent = '0%')
|
|
211
|
+
try { form.requestSubmit() } catch (_) {}
|
|
212
|
+
})
|
|
213
|
+
|
|
214
|
+
// Mirror controller events into the UI
|
|
215
|
+
fileEl.addEventListener('straight-to-video:progress', (e) => {
|
|
216
|
+
const v = Math.max(0, Math.min(100, e.detail?.progress ?? 0))
|
|
217
|
+
p.value = v; if (pct) pct.textContent = `${v}%`
|
|
218
|
+
})
|
|
219
|
+
fileEl.addEventListener('straight-to-video:error', (e) => {
|
|
220
|
+
out.textContent = `Error: ${e.detail?.error?.message || e.detail?.error}`
|
|
221
|
+
})
|
|
222
|
+
fileEl.addEventListener('straight-to-video:done', () => {
|
|
223
|
+
const f = fileEl.files?.[0]
|
|
224
|
+
if (!f) return
|
|
225
|
+
const url = URL.createObjectURL(f)
|
|
226
|
+
dl.href = url
|
|
227
|
+
dl.download = f.name || 'video-optimized.mp4'
|
|
228
|
+
dl.textContent = `Download ${dl.download}`
|
|
229
|
+
dl.classList.remove('hidden')
|
|
230
|
+
const afterSize = f.size || 0
|
|
231
|
+
const saved = Math.max(0, beforeSize - afterSize)
|
|
232
|
+
const pctSaved = beforeSize > 0 ? Math.round((saved / beforeSize) * 100) : 0
|
|
233
|
+
const fmt = (b) => (b / (1024 * 1024)).toFixed(2) + ' MB'
|
|
234
|
+
sizes.innerHTML = `Before: <strong>${fmt(beforeSize)}</strong> • After: <strong>${fmt(afterSize)}</strong> • Saved: <strong>${pctSaved}%</strong>`
|
|
235
|
+
out.textContent = ''
|
|
236
|
+
setTimeout(() => dl.click(), 150)
|
|
237
|
+
})
|
|
238
|
+
|
|
239
|
+
// Prevent navigation; controller will optimize on submit and we auto-download above
|
|
240
|
+
form.addEventListener('submit', (e) => e.preventDefault())
|
|
241
|
+
</script>
|
|
242
|
+
<script src="https://cdn.jsdelivr.net/npm/prismjs@1.29.0/prism.min.js"></script>
|
|
243
|
+
<script src="https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/prism-json.min.js"></script>
|
|
244
|
+
<script src="https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/prism-javascript.min.js"></script>
|
|
245
|
+
<script src="https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/prism-markup.min.js"></script>
|
|
246
|
+
</body>
|
|
247
|
+
</html>
|