@omnimedia/omnitool 1.1.0-4 → 1.1.0-40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +8 -4
- package/s/context.ts +0 -7
- package/s/demo/demo.bundle.ts +35 -5
- package/s/demo/demo.css +5 -0
- package/s/demo/routines/transcode-test.ts +8 -4
- package/s/demo/routines/transitions-test.ts +43 -0
- package/s/demo/routines/waveform-test.ts +3 -2
- package/s/driver/driver.ts +19 -11
- package/s/driver/fns/schematic.ts +46 -23
- package/s/driver/fns/work.ts +114 -102
- package/s/features/transition/parts/fragment.ts +24 -0
- package/s/features/transition/parts/types.ts +94 -0
- package/s/features/transition/parts/uniforms.ts +29 -0
- package/s/features/transition/parts/vertex.ts +31 -0
- package/s/features/transition/transition.ts +60 -0
- package/s/index.html.ts +6 -1
- package/s/timeline/index.ts +1 -0
- package/s/timeline/parts/basics.ts +1 -1
- package/s/timeline/parts/compositor/export.ts +77 -0
- package/s/timeline/parts/compositor/parts/html-tree.ts +37 -0
- package/s/timeline/parts/compositor/parts/schedulers.ts +95 -0
- package/s/timeline/parts/compositor/parts/tree-builder.ts +196 -0
- package/s/timeline/parts/compositor/parts/webcodecs-tree.ts +30 -0
- package/s/timeline/parts/compositor/playback.ts +94 -0
- package/s/timeline/parts/compositor/samplers/html.ts +115 -0
- package/s/timeline/parts/compositor/samplers/webcodecs.ts +61 -0
- package/s/timeline/parts/filmstrip.ts +42 -15
- package/s/timeline/parts/item.ts +48 -6
- package/s/timeline/parts/media.ts +21 -0
- package/s/timeline/parts/waveform.ts +3 -4
- package/s/timeline/sugar/builders.ts +102 -0
- package/s/timeline/sugar/o.ts +163 -38
- package/s/timeline/sugar/omni-test.ts +5 -3
- package/s/timeline/sugar/omni.ts +26 -11
- package/s/timeline/types.ts +29 -0
- package/s/timeline/utils/audio-stream.ts +15 -0
- package/s/timeline/utils/matrix.ts +33 -0
- package/s/timeline/utils/video-cursor.ts +40 -0
- package/x/context.d.ts +1 -4
- package/x/context.js +1 -5
- package/x/context.js.map +1 -1
- package/x/demo/demo.bundle.js +23 -6
- package/x/demo/demo.bundle.js.map +1 -1
- package/x/demo/demo.bundle.min.js +606 -36
- package/x/demo/demo.bundle.min.js.map +4 -4
- package/x/demo/demo.css +5 -0
- package/x/demo/routines/transcode-test.js +8 -4
- package/x/demo/routines/transcode-test.js.map +1 -1
- package/x/demo/routines/transitions-test.d.ts +5 -0
- package/x/demo/routines/transitions-test.js +35 -0
- package/x/demo/routines/transitions-test.js.map +1 -0
- package/x/demo/routines/waveform-test.d.ts +2 -1
- package/x/demo/routines/waveform-test.js +2 -2
- package/x/demo/routines/waveform-test.js.map +1 -1
- package/x/driver/driver.d.ts +4 -6
- package/x/driver/driver.js +17 -10
- package/x/driver/driver.js.map +1 -1
- package/x/driver/driver.worker.bundle.min.js +2537 -148
- package/x/driver/driver.worker.bundle.min.js.map +4 -4
- package/x/driver/fns/host.d.ts +9 -2
- package/x/driver/fns/schematic.d.ts +40 -22
- package/x/driver/fns/work.d.ts +11 -4
- package/x/driver/fns/work.js +107 -101
- package/x/driver/fns/work.js.map +1 -1
- package/x/features/speech/transcribe/worker.bundle.min.js +542 -542
- package/x/features/speech/transcribe/worker.bundle.min.js.map +4 -4
- package/x/features/transition/parts/fragment.d.ts +1 -0
- package/x/features/transition/parts/fragment.js +25 -0
- package/x/features/transition/parts/fragment.js.map +1 -0
- package/x/features/transition/parts/types.d.ts +23 -0
- package/x/features/transition/parts/types.js.map +1 -0
- package/x/features/transition/parts/uniforms.d.ts +31 -0
- package/x/features/transition/parts/uniforms.js +27 -0
- package/x/features/transition/parts/uniforms.js.map +1 -0
- package/x/features/transition/parts/vertex.d.ts +1 -0
- package/x/features/transition/parts/vertex.js +32 -0
- package/x/features/transition/parts/vertex.js.map +1 -0
- package/x/features/transition/transition.d.ts +5 -0
- package/x/features/transition/transition.js +50 -0
- package/x/features/transition/transition.js.map +1 -0
- package/x/index.html +13 -3
- package/x/index.html.js +6 -1
- package/x/index.html.js.map +1 -1
- package/x/timeline/index.d.ts +1 -0
- package/x/timeline/index.js +1 -0
- package/x/timeline/index.js.map +1 -1
- package/x/timeline/parts/basics.d.ts +1 -1
- package/x/timeline/parts/compositor/export.d.ts +11 -0
- package/x/timeline/parts/compositor/export.js +64 -0
- package/x/timeline/parts/compositor/export.js.map +1 -0
- package/x/timeline/parts/compositor/parts/html-tree.d.ts +3 -0
- package/x/timeline/parts/compositor/parts/html-tree.js +40 -0
- package/x/timeline/parts/compositor/parts/html-tree.js.map +1 -0
- package/x/timeline/parts/compositor/parts/schedulers.d.ts +15 -0
- package/x/timeline/parts/compositor/parts/schedulers.js +70 -0
- package/x/timeline/parts/compositor/parts/schedulers.js.map +1 -0
- package/x/timeline/parts/compositor/parts/tree-builder.d.ts +37 -0
- package/x/timeline/parts/compositor/parts/tree-builder.js +160 -0
- package/x/timeline/parts/compositor/parts/tree-builder.js.map +1 -0
- package/x/timeline/parts/compositor/parts/webcodecs-tree.d.ts +3 -0
- package/x/timeline/parts/compositor/parts/webcodecs-tree.js +28 -0
- package/x/timeline/parts/compositor/parts/webcodecs-tree.js.map +1 -0
- package/x/timeline/parts/compositor/playback.d.ts +26 -0
- package/x/timeline/parts/compositor/playback.js +79 -0
- package/x/timeline/parts/compositor/playback.js.map +1 -0
- package/x/timeline/parts/compositor/samplers/html.d.ts +3 -0
- package/x/timeline/parts/compositor/samplers/html.js +106 -0
- package/x/timeline/parts/compositor/samplers/html.js.map +1 -0
- package/x/timeline/parts/compositor/samplers/webcodecs.d.ts +3 -0
- package/x/timeline/parts/compositor/samplers/webcodecs.js +52 -0
- package/x/timeline/parts/compositor/samplers/webcodecs.js.map +1 -0
- package/x/timeline/parts/filmstrip.d.ts +2 -1
- package/x/timeline/parts/filmstrip.js +29 -10
- package/x/timeline/parts/filmstrip.js.map +1 -1
- package/x/timeline/parts/item.d.ts +42 -8
- package/x/timeline/parts/item.js +7 -3
- package/x/timeline/parts/item.js.map +1 -1
- package/x/timeline/parts/media.d.ts +3 -0
- package/x/timeline/parts/media.js +17 -0
- package/x/timeline/parts/media.js.map +1 -1
- package/x/timeline/parts/waveform.d.ts +2 -1
- package/x/timeline/parts/waveform.js +2 -4
- package/x/timeline/parts/waveform.js.map +1 -1
- package/x/timeline/sugar/builders.js +104 -0
- package/x/timeline/sugar/builders.js.map +1 -0
- package/x/timeline/sugar/o.d.ts +27 -5
- package/x/timeline/sugar/o.js +137 -38
- package/x/timeline/sugar/o.js.map +1 -1
- package/x/timeline/sugar/omni-test.js +4 -2
- package/x/timeline/sugar/omni-test.js.map +1 -1
- package/x/timeline/sugar/omni.d.ts +8 -2
- package/x/timeline/sugar/omni.js +22 -9
- package/x/timeline/sugar/omni.js.map +1 -1
- package/x/timeline/types.d.ts +24 -0
- package/x/timeline/types.js +2 -0
- package/x/timeline/types.js.map +1 -0
- package/x/timeline/utils/audio-stream.d.ts +6 -0
- package/x/timeline/utils/audio-stream.js +17 -0
- package/x/timeline/utils/audio-stream.js.map +1 -0
- package/x/timeline/utils/matrix.d.ts +8 -0
- package/x/timeline/utils/matrix.js +26 -0
- package/x/timeline/utils/matrix.js.map +1 -0
- package/x/timeline/utils/video-cursor.d.ts +10 -0
- package/x/timeline/utils/video-cursor.js +36 -0
- package/x/timeline/utils/video-cursor.js.map +1 -0
- package/s/tools/common/loader.ts +0 -26
- package/s/tools/common/transformer-pipeline.ts +0 -26
- package/s/tools/speech-recognition/common/model.ts +0 -26
- package/s/tools/speech-recognition/whisper/fns/host.ts +0 -25
- package/s/tools/speech-recognition/whisper/fns/schematic.ts +0 -23
- package/s/tools/speech-recognition/whisper/fns/work.ts +0 -91
- package/s/tools/speech-recognition/whisper/parts/types.ts +0 -38
- package/s/tools/speech-recognition/whisper/parts/worker.bundle.ts +0 -7
- package/s/tools/speech-recognition/whisper/tool.ts +0 -70
- package/x/tools/common/loader.d.ts +0 -19
- package/x/tools/common/loader.js +0 -18
- package/x/tools/common/loader.js.map +0 -1
- package/x/tools/common/transformer-pipeline.d.ts +0 -8
- package/x/tools/common/transformer-pipeline.js +0 -24
- package/x/tools/common/transformer-pipeline.js.map +0 -1
- package/x/tools/speech-recognition/common/model.d.ts +0 -14
- package/x/tools/speech-recognition/common/model.js +0 -16
- package/x/tools/speech-recognition/common/model.js.map +0 -1
- package/x/tools/speech-recognition/whisper/fns/host.d.ts +0 -13
- package/x/tools/speech-recognition/whisper/fns/host.js +0 -19
- package/x/tools/speech-recognition/whisper/fns/host.js.map +0 -1
- package/x/tools/speech-recognition/whisper/fns/schematic.d.ts +0 -19
- package/x/tools/speech-recognition/whisper/fns/schematic.js +0 -2
- package/x/tools/speech-recognition/whisper/fns/schematic.js.map +0 -1
- package/x/tools/speech-recognition/whisper/fns/work.d.ts +0 -12
- package/x/tools/speech-recognition/whisper/fns/work.js +0 -74
- package/x/tools/speech-recognition/whisper/fns/work.js.map +0 -1
- package/x/tools/speech-recognition/whisper/parts/types.d.ts +0 -31
- package/x/tools/speech-recognition/whisper/parts/types.js.map +0 -1
- package/x/tools/speech-recognition/whisper/parts/worker.bundle.js +0 -4
- package/x/tools/speech-recognition/whisper/parts/worker.bundle.js.map +0 -1
- package/x/tools/speech-recognition/whisper/parts/worker.bundle.min.js +0 -8
- package/x/tools/speech-recognition/whisper/parts/worker.bundle.min.js.map +0 -7
- package/x/tools/speech-recognition/whisper/tool.d.ts +0 -12
- package/x/tools/speech-recognition/whisper/tool.js +0 -63
- package/x/tools/speech-recognition/whisper/tool.js.map +0 -1
- /package/x/{tools/speech-recognition/whisper → features/transition}/parts/types.js +0 -0
- /package/x/{tools/speech-recognition/whisper/parts/worker.bundle.d.ts → timeline/sugar/builders.d.ts} +0 -0
package/s/driver/fns/work.ts
CHANGED
|
@@ -1,88 +1,86 @@
|
|
|
1
1
|
import {Comrade} from "@e280/comrade"
|
|
2
|
-
import {autoDetectRenderer, Container, Renderer, Sprite, Text, Texture, DOMAdapter, WebWorkerAdapter} from "pixi.js"
|
|
2
|
+
import {autoDetectRenderer, Container, Renderer, Sprite, Text, Texture, DOMAdapter, WebWorkerAdapter, Matrix} from "pixi.js"
|
|
3
3
|
import {Input, ALL_FORMATS, VideoSampleSink, Output, Mp4OutputFormat, VideoSampleSource, VideoSample, AudioSampleSink, AudioSampleSource, AudioSample, StreamTarget, BlobSource, UrlSource} from "mediabunny"
|
|
4
4
|
|
|
5
|
-
import {
|
|
5
|
+
import {Mat6, mat6ToMatrix} from "../../timeline/utils/matrix.js"
|
|
6
|
+
import {makeTransition} from "../../features/transition/transition.js"
|
|
7
|
+
import {Composition, DecoderSource, DriverSchematic, Layer} from "./schematic.js"
|
|
6
8
|
|
|
7
9
|
DOMAdapter.set(WebWorkerAdapter)
|
|
8
10
|
|
|
11
|
+
const loadSource = async (source: DecoderSource) => {
|
|
12
|
+
if(source instanceof Blob) {
|
|
13
|
+
return new BlobSource(source)
|
|
14
|
+
} else {
|
|
15
|
+
return new UrlSource(source)
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
9
19
|
export const setupDriverWork = (
|
|
10
20
|
Comrade.work<DriverSchematic>(shell => ({
|
|
11
21
|
async hello() {
|
|
12
22
|
await shell.host.world()
|
|
13
23
|
},
|
|
14
24
|
|
|
15
|
-
async
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
25
|
+
async decodeAudio({source, audio, start, end}) {
|
|
26
|
+
const input = new Input({
|
|
27
|
+
source: await loadSource(source),
|
|
28
|
+
formats: ALL_FORMATS
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
const audioTrack = await input.getPrimaryAudioTrack()
|
|
32
|
+
const audioDecodable = await audioTrack?.canDecode()
|
|
33
|
+
const audioWriter = audio.getWriter()
|
|
34
|
+
|
|
35
|
+
if (audioDecodable && audioTrack) {
|
|
36
|
+
const sink = new AudioSampleSink(audioTrack)
|
|
37
|
+
for await (const sample of sink.samples(start, end)) {
|
|
38
|
+
const frame = sample.toAudioData()
|
|
39
|
+
await audioWriter.write(frame)
|
|
40
|
+
sample.close()
|
|
41
|
+
frame.close()
|
|
21
42
|
}
|
|
43
|
+
await audioWriter.close()
|
|
22
44
|
}
|
|
45
|
+
},
|
|
46
|
+
|
|
47
|
+
async decodeVideo({source, video, start, end}) {
|
|
23
48
|
const input = new Input({
|
|
24
|
-
source: await loadSource(),
|
|
49
|
+
source: await loadSource(source),
|
|
25
50
|
formats: ALL_FORMATS
|
|
26
51
|
})
|
|
27
52
|
|
|
28
|
-
const
|
|
29
|
-
input.getPrimaryVideoTrack(),
|
|
30
|
-
input.getPrimaryAudioTrack()
|
|
31
|
-
])
|
|
32
|
-
|
|
53
|
+
const videoTrack = await input.getPrimaryVideoTrack()
|
|
33
54
|
const videoDecodable = await videoTrack?.canDecode()
|
|
34
|
-
const audioDecodable = await audioTrack?.canDecode()
|
|
35
|
-
|
|
36
55
|
const videoWriter = video.getWriter()
|
|
37
|
-
const audioWriter = audio.getWriter()
|
|
38
56
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
await videoWriter.close()
|
|
50
|
-
}
|
|
51
|
-
})(),
|
|
52
|
-
(async () => {
|
|
53
|
-
if (audioDecodable && audioTrack) {
|
|
54
|
-
const sink = new AudioSampleSink(audioTrack)
|
|
55
|
-
for await (const sample of sink.samples()) {
|
|
56
|
-
const frame = sample.toAudioData()
|
|
57
|
-
await audioWriter.write(frame)
|
|
58
|
-
sample.close()
|
|
59
|
-
frame.close()
|
|
60
|
-
}
|
|
61
|
-
await audioWriter.close()
|
|
62
|
-
}
|
|
63
|
-
})()
|
|
64
|
-
])
|
|
57
|
+
if (videoDecodable && videoTrack) {
|
|
58
|
+
const sink = new VideoSampleSink(videoTrack)
|
|
59
|
+
for await (const sample of sink.samples(start, end)) {
|
|
60
|
+
const frame = sample.toVideoFrame()
|
|
61
|
+
await videoWriter.write(frame)
|
|
62
|
+
sample.close()
|
|
63
|
+
frame.close()
|
|
64
|
+
}
|
|
65
|
+
await videoWriter.close()
|
|
66
|
+
}
|
|
65
67
|
},
|
|
66
68
|
|
|
67
|
-
async encode({
|
|
69
|
+
async encode({video, audio, config, bridge}) {
|
|
68
70
|
const output = new Output({
|
|
69
71
|
format: new Mp4OutputFormat(),
|
|
70
72
|
target: new StreamTarget(bridge, {chunked: true})
|
|
71
73
|
})
|
|
72
|
-
const videoSource = new VideoSampleSource(config.video)
|
|
73
|
-
output.addVideoTrack(videoSource)
|
|
74
74
|
// since AudioSample is not transferable it fails to transfer encoder bitrate config
|
|
75
75
|
// so it needs to be hardcoded not set through constants eg QUALITY_LOW
|
|
76
|
-
const audioSource = new AudioSampleSource(config.audio)
|
|
77
|
-
output.addAudioTrack(audioSource)
|
|
78
76
|
|
|
79
|
-
|
|
77
|
+
const promises = []
|
|
80
78
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
(async () => {
|
|
79
|
+
if(video) {
|
|
80
|
+
const videoSource = new VideoSampleSource(config.video)
|
|
81
|
+
output.addVideoTrack(videoSource)
|
|
82
|
+
const videoReader = video.getReader()
|
|
83
|
+
promises.push((async () => {
|
|
86
84
|
while (true) {
|
|
87
85
|
const {done, value} = await videoReader.read()
|
|
88
86
|
if (done) break
|
|
@@ -90,8 +88,14 @@ export const setupDriverWork = (
|
|
|
90
88
|
await videoSource.add(sample)
|
|
91
89
|
sample.close()
|
|
92
90
|
}
|
|
93
|
-
})()
|
|
94
|
-
|
|
91
|
+
})())
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if(audio) {
|
|
95
|
+
const audioSource = new AudioSampleSource(config.audio)
|
|
96
|
+
output.addAudioTrack(audioSource)
|
|
97
|
+
const audioReader = audio.getReader()
|
|
98
|
+
promises.push((async () => {
|
|
95
99
|
while (true) {
|
|
96
100
|
const {done, value} = await audioReader.read()
|
|
97
101
|
if (done) break
|
|
@@ -100,9 +104,11 @@ export const setupDriverWork = (
|
|
|
100
104
|
sample.close()
|
|
101
105
|
value.close()
|
|
102
106
|
}
|
|
103
|
-
})()
|
|
104
|
-
|
|
107
|
+
})())
|
|
108
|
+
}
|
|
105
109
|
|
|
110
|
+
await output.start()
|
|
111
|
+
await Promise.all(promises)
|
|
106
112
|
await output.finalize()
|
|
107
113
|
},
|
|
108
114
|
|
|
@@ -110,22 +116,18 @@ export const setupDriverWork = (
|
|
|
110
116
|
const {stage, renderer} = await renderPIXI(1920, 1080)
|
|
111
117
|
stage.removeChildren()
|
|
112
118
|
|
|
113
|
-
const {
|
|
119
|
+
const {dispose} = await renderLayer(composition, stage)
|
|
114
120
|
renderer.render(stage)
|
|
115
121
|
|
|
116
122
|
// make sure browser support webgl/webgpu otherwise it might take much longer to construct frame
|
|
117
123
|
// if its very slow on eg edge try chrome
|
|
118
124
|
const frame = new VideoFrame(renderer.canvas, {
|
|
119
|
-
timestamp:
|
|
120
|
-
duration:
|
|
125
|
+
timestamp: 0,
|
|
126
|
+
duration: 0,
|
|
121
127
|
})
|
|
122
128
|
|
|
123
|
-
baseFrame?.close()
|
|
124
129
|
renderer.clear()
|
|
125
|
-
|
|
126
|
-
for (const disposable of disposables) {
|
|
127
|
-
disposable.destroy(true)
|
|
128
|
-
}
|
|
130
|
+
dispose()
|
|
129
131
|
|
|
130
132
|
shell.transfer = [frame]
|
|
131
133
|
return frame
|
|
@@ -157,78 +159,88 @@ async function renderPIXI(width: number, height: number) {
|
|
|
157
159
|
return pixi
|
|
158
160
|
}
|
|
159
161
|
|
|
162
|
+
const transitions: Map<string, ReturnType<typeof makeTransition>> = new Map()
|
|
163
|
+
|
|
160
164
|
type RenderableObject = Sprite | Text | Texture
|
|
161
165
|
|
|
162
166
|
async function renderLayer(
|
|
163
167
|
layer: Layer | Composition,
|
|
164
168
|
parent: Container,
|
|
165
|
-
disposables: RenderableObject[] = []
|
|
166
169
|
) {
|
|
167
170
|
if (Array.isArray(layer)) {
|
|
168
|
-
|
|
171
|
+
layer.reverse()
|
|
172
|
+
const disposers: (() => void)[] = []
|
|
169
173
|
for (const child of layer) {
|
|
170
|
-
const result = await renderLayer(child, parent
|
|
171
|
-
|
|
174
|
+
const result = await renderLayer(child, parent)
|
|
175
|
+
disposers.push(result.dispose)
|
|
172
176
|
}
|
|
173
|
-
return {
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
if (!isRenderableLayer(layer)) {
|
|
177
|
-
console.warn('Invalid layer', layer)
|
|
178
|
-
return {disposables}
|
|
177
|
+
return {dispose: () => disposers.forEach(d => d())}
|
|
179
178
|
}
|
|
180
179
|
|
|
181
180
|
switch (layer.kind) {
|
|
182
181
|
case 'text':
|
|
183
|
-
return renderTextLayer(layer, parent
|
|
182
|
+
return renderTextLayer(layer, parent)
|
|
184
183
|
case 'image':
|
|
185
|
-
return renderImageLayer(layer, parent
|
|
184
|
+
return renderImageLayer(layer, parent)
|
|
185
|
+
case 'transition':
|
|
186
|
+
return renderTransitionLayer(layer, parent)
|
|
187
|
+
case 'gap': {
|
|
188
|
+
pixi?.renderer.clear()
|
|
189
|
+
return {dispose: () => {}}
|
|
190
|
+
}
|
|
186
191
|
default:
|
|
187
192
|
console.warn('Unknown layer kind', (layer as any).kind)
|
|
188
|
-
return {
|
|
193
|
+
return {dispose: () => {}}
|
|
189
194
|
}
|
|
190
195
|
}
|
|
191
196
|
|
|
192
|
-
function isRenderableLayer(layer: any): layer is Layer {
|
|
193
|
-
return !!layer && typeof layer === 'object' && typeof layer.kind === 'string'
|
|
194
|
-
}
|
|
195
|
-
|
|
196
197
|
function renderTextLayer(
|
|
197
198
|
layer: Extract<Layer, {kind: 'text'}>,
|
|
198
199
|
parent: Container,
|
|
199
|
-
disposables: RenderableObject[]
|
|
200
200
|
) {
|
|
201
201
|
const text = new Text({
|
|
202
202
|
text: layer.content,
|
|
203
|
-
style:
|
|
204
|
-
fontFamily: 'sans-serif',
|
|
205
|
-
fontSize: layer.fontSize ?? 48,
|
|
206
|
-
fill: layer.color ?? 'white'
|
|
207
|
-
}
|
|
203
|
+
style: layer.style
|
|
208
204
|
})
|
|
209
|
-
applyTransform(text, layer)
|
|
205
|
+
applyTransform(text, layer.matrix)
|
|
210
206
|
parent.addChild(text)
|
|
211
|
-
|
|
212
|
-
return {disposables}
|
|
207
|
+
return {dispose: () => text.destroy(true)}
|
|
213
208
|
}
|
|
214
209
|
|
|
215
210
|
function renderImageLayer(
|
|
216
211
|
layer: Extract<Layer, {kind: 'image'}>,
|
|
217
212
|
parent: Container,
|
|
218
|
-
disposables: RenderableObject[]
|
|
219
213
|
) {
|
|
220
214
|
const texture = Texture.from(layer.frame)
|
|
221
215
|
const sprite = new Sprite(texture)
|
|
222
|
-
applyTransform(sprite, layer)
|
|
216
|
+
applyTransform(sprite, layer.matrix)
|
|
217
|
+
parent.addChild(sprite)
|
|
218
|
+
return {dispose: () => {
|
|
219
|
+
sprite.destroy(true)
|
|
220
|
+
texture.destroy(true)
|
|
221
|
+
layer.frame.close()
|
|
222
|
+
}}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
function renderTransitionLayer(
|
|
226
|
+
{from, to, progress, name}: Extract<Layer, {kind: 'transition'}>,
|
|
227
|
+
parent: Container,
|
|
228
|
+
) {
|
|
229
|
+
const transition = transitions.get(name) ??
|
|
230
|
+
(transitions.set(name, makeTransition({
|
|
231
|
+
name: "circle",
|
|
232
|
+
renderer: pixi!.renderer
|
|
233
|
+
})),
|
|
234
|
+
transitions.get(name)!
|
|
235
|
+
)
|
|
236
|
+
const texture = transition.render({from, to, progress, width: from.displayWidth, height: from.displayHeight})
|
|
237
|
+
const sprite = new Sprite(texture)
|
|
223
238
|
parent.addChild(sprite)
|
|
224
|
-
|
|
225
|
-
return {baseFrame: layer.frame, disposables}
|
|
239
|
+
return {dispose: () => sprite.destroy(false)}
|
|
226
240
|
}
|
|
227
241
|
|
|
228
|
-
function applyTransform(target: Sprite | Text,
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
if(t.opacity) target.alpha = t.opacity
|
|
233
|
-
if(t.anchor && 'anchor' in target) target.anchor.set(t.anchor)
|
|
242
|
+
function applyTransform(target: Sprite | Text, worldMatrix?: Mat6) {
|
|
243
|
+
if (!worldMatrix) return
|
|
244
|
+
const mx = mat6ToMatrix(worldMatrix)
|
|
245
|
+
target.setFromMatrix(mx)
|
|
234
246
|
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
export const fragment = (glsl: string) => `
|
|
2
|
+
precision highp float;
|
|
3
|
+
varying vec2 vTextureCoord;
|
|
4
|
+
varying vec2 _uv;
|
|
5
|
+
uniform sampler2D from, to;
|
|
6
|
+
uniform float progress, ratio, _fromR, _toR;
|
|
7
|
+
uniform float customUniform;
|
|
8
|
+
|
|
9
|
+
vec4 getFromColor(vec2 uv){
|
|
10
|
+
return texture2D(from, .5+(uv-.5)*vec2(max(ratio/_fromR,1.), max(_fromR/ratio,1.)));
|
|
11
|
+
}
|
|
12
|
+
vec4 getToColor(vec2 uv){
|
|
13
|
+
return texture2D(to, .5+(uv-.5)*vec2(max(ratio/_toR,1.), max(_toR/ratio,1.)));
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// gl-transition code here
|
|
17
|
+
${glsl}
|
|
18
|
+
// gl-transition code end
|
|
19
|
+
|
|
20
|
+
void main(){
|
|
21
|
+
vec2 uv = vTextureCoord.xy;
|
|
22
|
+
gl_FragColor = transition(vTextureCoord);
|
|
23
|
+
}
|
|
24
|
+
`
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import {Renderer} from "pixi.js"
|
|
2
|
+
|
|
3
|
+
export interface TransitionOptions {
|
|
4
|
+
name: Transition
|
|
5
|
+
renderer: Renderer
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface TransitionRendererOptions {
|
|
9
|
+
from: VideoFrame
|
|
10
|
+
to: VideoFrame
|
|
11
|
+
progress: number
|
|
12
|
+
width: number
|
|
13
|
+
height: number
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface GLTransition {
|
|
17
|
+
author: string
|
|
18
|
+
createdAt: string
|
|
19
|
+
glsl: string
|
|
20
|
+
license: string
|
|
21
|
+
name: Transition
|
|
22
|
+
updatedAt: string
|
|
23
|
+
defaultParams: any
|
|
24
|
+
paramsTypes: any
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export type Transition =
|
|
28
|
+
| "Bounce"
|
|
29
|
+
| "BowTieHorizontal"
|
|
30
|
+
| "BowTieVertical"
|
|
31
|
+
| "ButterflyWaveScrawler"
|
|
32
|
+
| "CircleCrop"
|
|
33
|
+
| "ColourDistance"
|
|
34
|
+
| "CrazyParametricFun"
|
|
35
|
+
| "CrossZoom"
|
|
36
|
+
| "Directional"
|
|
37
|
+
| "DoomScreenTransition"
|
|
38
|
+
| "Dreamy"
|
|
39
|
+
| "DreamyZoom"
|
|
40
|
+
| "GlitchDisplace"
|
|
41
|
+
| "GlitchMemories"
|
|
42
|
+
| "GridFlip"
|
|
43
|
+
| "InvertedPageCurl"
|
|
44
|
+
| "LinearBlur"
|
|
45
|
+
| "Mosaic"
|
|
46
|
+
| "PolkaDotsCurtain"
|
|
47
|
+
| "Radial"
|
|
48
|
+
| "SimpleZoom"
|
|
49
|
+
| "StereoViewer"
|
|
50
|
+
| "Swirl"
|
|
51
|
+
| "WaterDrop"
|
|
52
|
+
| "ZoomInCircles"
|
|
53
|
+
| "angular"
|
|
54
|
+
| "burn"
|
|
55
|
+
| "cannabisleaf"
|
|
56
|
+
| "circle"
|
|
57
|
+
| "circleopen"
|
|
58
|
+
| "colorphase"
|
|
59
|
+
| "crosshatch"
|
|
60
|
+
| "crosswarp"
|
|
61
|
+
| "cube"
|
|
62
|
+
| "directionalwarp"
|
|
63
|
+
| "directionalwipe"
|
|
64
|
+
| "displacement"
|
|
65
|
+
| "doorway"
|
|
66
|
+
| "fade"
|
|
67
|
+
| "fadecolor"
|
|
68
|
+
| "fadegrayscale"
|
|
69
|
+
| "flyeye"
|
|
70
|
+
| "heart"
|
|
71
|
+
| "hexagonalize"
|
|
72
|
+
| "kaleidoscope"
|
|
73
|
+
| "luma"
|
|
74
|
+
| "luminance_melt"
|
|
75
|
+
| "morph"
|
|
76
|
+
| "multiply_blend"
|
|
77
|
+
| "perlin"
|
|
78
|
+
| "pinwheel"
|
|
79
|
+
| "pixelize"
|
|
80
|
+
| "polar_function"
|
|
81
|
+
| "randomsquares"
|
|
82
|
+
| "ripple"
|
|
83
|
+
| "rotate_scale_fade"
|
|
84
|
+
| "squareswire"
|
|
85
|
+
| "squeeze"
|
|
86
|
+
| "swap"
|
|
87
|
+
| "undulatingBurnOut"
|
|
88
|
+
| "wind"
|
|
89
|
+
| "windowblinds"
|
|
90
|
+
| "windowslice"
|
|
91
|
+
| "wipeDown"
|
|
92
|
+
| "wipeLeft"
|
|
93
|
+
| "wipeRight"
|
|
94
|
+
| "wipeUp"
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import {GLTransition} from "./types.js"
|
|
2
|
+
|
|
3
|
+
export const uniforms = {
|
|
4
|
+
custom: (transition: GLTransition) => Object.fromEntries(
|
|
5
|
+
Object.entries(transition.defaultParams).map(([name, value]) => [
|
|
6
|
+
name,
|
|
7
|
+
{
|
|
8
|
+
value,
|
|
9
|
+
type: getUniformType(transition.paramsTypes[name])
|
|
10
|
+
}
|
|
11
|
+
])
|
|
12
|
+
),
|
|
13
|
+
basics: {
|
|
14
|
+
_fromR: {value: 1, type: "f32"},
|
|
15
|
+
_toR: {value: 1, type: "f32"},
|
|
16
|
+
ratio: {value: 1, type: "f32"},
|
|
17
|
+
progress: {value: 0, type: "f32"},
|
|
18
|
+
customUniform: {value: 0, type: "f32"},
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const getUniformType = (type: string) => {
|
|
23
|
+
if(type === "f32" || type === "i32") {
|
|
24
|
+
return type
|
|
25
|
+
} else if(type === "float") {
|
|
26
|
+
return "f32"
|
|
27
|
+
}
|
|
28
|
+
else return `${type}<f32>`
|
|
29
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
export const vertex = `
|
|
2
|
+
in vec2 aPosition;
|
|
3
|
+
varying vec2 _uv; // gl-transition
|
|
4
|
+
uniform mat3 projectionMatrix;
|
|
5
|
+
uniform vec4 uInputSize;
|
|
6
|
+
uniform vec4 uOutputFrame;
|
|
7
|
+
out vec2 vTextureCoord;
|
|
8
|
+
uniform vec4 uOutputTexture;
|
|
9
|
+
|
|
10
|
+
vec4 filterVertexPosition( void )
|
|
11
|
+
{
|
|
12
|
+
vec2 position = aPosition * uOutputFrame.zw + uOutputFrame.xy;
|
|
13
|
+
|
|
14
|
+
position.x = position.x * (2.0 / uOutputTexture.x) - 1.0;
|
|
15
|
+
position.y = position.y * (2.0*uOutputTexture.z / uOutputTexture.y) - uOutputTexture.z;
|
|
16
|
+
|
|
17
|
+
return vec4(position, 0.0, 1.0);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
vec2 filterTextureCoord( void )
|
|
21
|
+
{
|
|
22
|
+
return aPosition * (uOutputFrame.zw * uInputSize.zw);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
void main(void)
|
|
26
|
+
{
|
|
27
|
+
gl_Position = filterVertexPosition();
|
|
28
|
+
vTextureCoord = filterTextureCoord();
|
|
29
|
+
_uv = vec2(0.5, 0.5) * (aPosition +vec2(1.0, 1.0)); // gl-transition
|
|
30
|
+
}
|
|
31
|
+
`
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
//@ts-ignore
|
|
2
|
+
import transitions from "gl-transitions"
|
|
3
|
+
import {Filter, GlProgram, Sprite, Texture, ImageSource} from "pixi.js"
|
|
4
|
+
|
|
5
|
+
import {vertex} from "./parts/vertex.js"
|
|
6
|
+
import {uniforms} from "./parts/uniforms.js"
|
|
7
|
+
import {fragment} from "./parts/fragment.js"
|
|
8
|
+
import {GLTransition, TransitionOptions, TransitionRendererOptions} from "./parts/types.js"
|
|
9
|
+
|
|
10
|
+
export function makeTransition({name, renderer}: TransitionOptions) {
|
|
11
|
+
const transition = transitions.find((t: GLTransition) => t.name === name) as GLTransition
|
|
12
|
+
const transitionSprite = new Sprite()
|
|
13
|
+
const transitionTexture = new Texture()
|
|
14
|
+
const sourceFrom = new ImageSource({})
|
|
15
|
+
const sourceTo = new ImageSource({})
|
|
16
|
+
|
|
17
|
+
const filter = new Filter({
|
|
18
|
+
glProgram: new GlProgram({
|
|
19
|
+
vertex,
|
|
20
|
+
fragment: fragment(transition.glsl),
|
|
21
|
+
}),
|
|
22
|
+
resources: {
|
|
23
|
+
from: sourceFrom,
|
|
24
|
+
to: sourceTo,
|
|
25
|
+
uniforms: {
|
|
26
|
+
...uniforms.basics,
|
|
27
|
+
...uniforms.custom(transition)
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
transitionSprite.filters = [filter]
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
render({width, height, from, to, progress}: TransitionRendererOptions) {
|
|
36
|
+
if(transitionSprite.width !== width || transitionSprite.height !== height) {
|
|
37
|
+
transitionSprite.setSize({width, height})
|
|
38
|
+
transitionTexture.source.resize(width, height)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
sourceFrom.resource = from
|
|
42
|
+
sourceTo.resource = to
|
|
43
|
+
sourceFrom.update()
|
|
44
|
+
sourceTo.update()
|
|
45
|
+
|
|
46
|
+
filter.resources.uniforms.uniforms.progress = progress
|
|
47
|
+
|
|
48
|
+
renderer.render({
|
|
49
|
+
container: transitionSprite,
|
|
50
|
+
target: transitionTexture,
|
|
51
|
+
clear: false,
|
|
52
|
+
width,
|
|
53
|
+
height
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
return transitionTexture
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
package/s/index.html.ts
CHANGED
|
@@ -30,7 +30,7 @@ export default ssg.page(import.meta.url, async orb => ({
|
|
|
30
30
|
<section>
|
|
31
31
|
<h1>Omnitool <small>v${orb.packageVersion()}</small></h1>
|
|
32
32
|
<button class=fetch>fetch</button>
|
|
33
|
-
<
|
|
33
|
+
<input type="file" class="file-input">
|
|
34
34
|
<div class=results></div>
|
|
35
35
|
<div class=filmstrip-demo>
|
|
36
36
|
<label for="viewable-range">viewable range:</label>
|
|
@@ -47,6 +47,11 @@ export default ssg.page(import.meta.url, async orb => ({
|
|
|
47
47
|
<label for="width">width:</label>
|
|
48
48
|
<input class="width" id="width" name="width" type="range" min="100" max="1000000" value="1000" />
|
|
49
49
|
</div>
|
|
50
|
+
<div class=player>
|
|
51
|
+
<input class="seek" type="number" min="0">
|
|
52
|
+
<button class=play>play</button>
|
|
53
|
+
<button class=stop>stop</button>
|
|
54
|
+
</div>
|
|
50
55
|
</section>
|
|
51
56
|
`,
|
|
52
57
|
}))
|
package/s/timeline/index.ts
CHANGED
|
@@ -5,6 +5,7 @@ export * from "./parts/media.js"
|
|
|
5
5
|
export * from "./parts/resource-pool.js"
|
|
6
6
|
export * from "./parts/resource.js"
|
|
7
7
|
export * from "./parts/filmstrip.js"
|
|
8
|
+
export * from "./parts/compositor/playback.js"
|
|
8
9
|
|
|
9
10
|
export * from "./sugar/o.js"
|
|
10
11
|
export * from "./sugar/omni.js"
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import {TimelineFile} from "../basics.js"
|
|
2
|
+
import {fixedStep} from "./parts/schedulers.js"
|
|
3
|
+
import {Driver} from "../../../driver/driver.js"
|
|
4
|
+
import {makeWebCodecsSampler} from "./samplers/webcodecs.js"
|
|
5
|
+
import {DecoderSource} from "../../../driver/fns/schematic.js"
|
|
6
|
+
import {buildWebCodecsNodeTree} from "./parts/webcodecs-tree.js"
|
|
7
|
+
|
|
8
|
+
export class Export {
|
|
9
|
+
#sampler
|
|
10
|
+
constructor(
|
|
11
|
+
private driver: Driver,
|
|
12
|
+
private framerate = 30,
|
|
13
|
+
private resolveMedia: (hash: string) => DecoderSource = _hash => "/assets/temp/gl.mp4"
|
|
14
|
+
) {
|
|
15
|
+
this.#sampler = makeWebCodecsSampler(this.driver, this.resolveMedia)
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async #build(timeline: TimelineFile) {
|
|
19
|
+
const rootItem = new Map(timeline.items.map(i => [i.id, i])).get(timeline.rootId)!
|
|
20
|
+
const items = new Map(timeline.items.map(i => [i.id, i]))
|
|
21
|
+
return await buildWebCodecsNodeTree(rootItem, items, this.#sampler)
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async render(timeline: TimelineFile) {
|
|
25
|
+
const root = await this.#build(timeline)
|
|
26
|
+
|
|
27
|
+
const videoStream = new TransformStream<VideoFrame, VideoFrame>()
|
|
28
|
+
const audioStream = new TransformStream<AudioData, AudioData>()
|
|
29
|
+
|
|
30
|
+
const encodePromise = this.driver.encode({
|
|
31
|
+
video: videoStream.readable,
|
|
32
|
+
audio: audioStream.readable,
|
|
33
|
+
config: {
|
|
34
|
+
audio: {codec: "opus", bitrate: 128000},
|
|
35
|
+
video: {codec: "vp9", bitrate: 1000000},
|
|
36
|
+
},
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
const videoWriter = videoStream.writable.getWriter()
|
|
40
|
+
const audioWriter = audioStream.writable.getWriter()
|
|
41
|
+
|
|
42
|
+
const audioPromise = (async () => {
|
|
43
|
+
if (root.audio) {
|
|
44
|
+
for await (const chunk of root.audio.getStream()) {
|
|
45
|
+
await audioWriter.write(chunk)
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
await audioWriter.close()
|
|
49
|
+
})()
|
|
50
|
+
|
|
51
|
+
const videoPromise = (async () => {
|
|
52
|
+
let i = 0
|
|
53
|
+
const dt = 1 / this.framerate
|
|
54
|
+
|
|
55
|
+
await fixedStep(
|
|
56
|
+
{fps: this.framerate, duration: root.duration ?? 0},
|
|
57
|
+
async t => {
|
|
58
|
+
const layers = await root.visuals?.sampleAt(t) ?? []
|
|
59
|
+
const composed = await this.driver.composite(layers)
|
|
60
|
+
const vf = new VideoFrame(composed, {
|
|
61
|
+
timestamp: Math.round(i * dt * 1_000_000),
|
|
62
|
+
duration: Math.round(dt * 1_000_000),
|
|
63
|
+
})
|
|
64
|
+
await videoWriter.write(vf)
|
|
65
|
+
composed.close()
|
|
66
|
+
i++
|
|
67
|
+
}
|
|
68
|
+
)
|
|
69
|
+
await videoWriter.close()
|
|
70
|
+
})()
|
|
71
|
+
|
|
72
|
+
await audioPromise
|
|
73
|
+
await videoPromise
|
|
74
|
+
await encodePromise
|
|
75
|
+
// this.#sampler.dispose()
|
|
76
|
+
}
|
|
77
|
+
}
|