@omnimedia/omnitool 1.1.0-5 → 1.1.0-7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -2
- package/s/demo/demo.css +5 -0
- package/s/demo/routines/transcode-test.ts +4 -2
- package/s/demo/routines/transitions-test.ts +2 -2
- package/s/driver/driver.ts +17 -9
- package/s/driver/fns/schematic.ts +44 -21
- package/s/driver/fns/work.ts +112 -97
- package/s/index.html.ts +6 -1
- package/s/timeline/parts/basics.ts +1 -1
- package/s/timeline/parts/compositor/export.ts +77 -0
- package/s/timeline/parts/compositor/parts/html-tree.ts +37 -0
- package/s/timeline/parts/compositor/parts/schedulers.ts +85 -0
- package/s/timeline/parts/compositor/parts/tree-builder.ts +184 -0
- package/s/timeline/parts/compositor/parts/webcodecs-tree.ts +30 -0
- package/s/timeline/parts/compositor/playback.ts +81 -0
- package/s/timeline/parts/compositor/samplers/html.ts +115 -0
- package/s/timeline/parts/compositor/samplers/webcodecs.ts +60 -0
- package/s/timeline/parts/item.ts +38 -6
- package/s/timeline/parts/media.ts +21 -0
- package/s/timeline/parts/waveform.ts +1 -1
- package/s/timeline/sugar/builders.ts +102 -0
- package/s/timeline/sugar/o.ts +75 -16
- package/s/timeline/sugar/omni-test.ts +2 -2
- package/s/timeline/sugar/omni.ts +14 -11
- package/s/timeline/timeline.ts +22 -0
- package/s/timeline/types.ts +29 -0
- package/s/timeline/utils/audio-stream.ts +15 -0
- package/s/timeline/utils/matrix.ts +33 -0
- package/s/timeline/utils/video-cursor.ts +40 -0
- package/x/demo/demo.bundle.min.js +39 -37
- package/x/demo/demo.bundle.min.js.map +4 -4
- package/x/demo/demo.css +5 -0
- package/x/demo/routines/transcode-test.js +4 -2
- package/x/demo/routines/transcode-test.js.map +1 -1
- package/x/demo/routines/transitions-test.js +2 -2
- package/x/demo/routines/transitions-test.js.map +1 -1
- package/x/driver/driver.d.ts +3 -5
- package/x/driver/driver.js +16 -9
- package/x/driver/driver.js.map +1 -1
- package/x/driver/driver.worker.bundle.min.js +2537 -148
- package/x/driver/driver.worker.bundle.min.js.map +4 -4
- package/x/driver/fns/host.d.ts +9 -2
- package/x/driver/fns/schematic.d.ts +38 -20
- package/x/driver/fns/work.d.ts +11 -4
- package/x/driver/fns/work.js +105 -96
- package/x/driver/fns/work.js.map +1 -1
- package/x/features/speech/transcribe/worker.bundle.min.js +541 -541
- package/x/features/speech/transcribe/worker.bundle.min.js.map +4 -4
- package/x/index.html +13 -3
- package/x/index.html.js +6 -1
- package/x/index.html.js.map +1 -1
- package/x/timeline/parts/basics.d.ts +1 -1
- package/x/timeline/parts/compositor/export.d.ts +9 -0
- package/x/timeline/parts/compositor/export.js +64 -0
- package/x/timeline/parts/compositor/export.js.map +1 -0
- package/x/timeline/parts/compositor/parts/html-tree.d.ts +3 -0
- package/x/timeline/parts/compositor/parts/html-tree.js +40 -0
- package/x/timeline/parts/compositor/parts/html-tree.js.map +1 -0
- package/x/timeline/parts/compositor/parts/schedulers.d.ts +15 -0
- package/x/timeline/parts/compositor/parts/schedulers.js +64 -0
- package/x/timeline/parts/compositor/parts/schedulers.js.map +1 -0
- package/x/timeline/parts/compositor/parts/tree-builder.d.ts +37 -0
- package/x/timeline/parts/compositor/parts/tree-builder.js +147 -0
- package/x/timeline/parts/compositor/parts/tree-builder.js.map +1 -0
- package/x/timeline/parts/compositor/parts/webcodecs-tree.d.ts +3 -0
- package/x/timeline/parts/compositor/parts/webcodecs-tree.js +28 -0
- package/x/timeline/parts/compositor/parts/webcodecs-tree.js.map +1 -0
- package/x/timeline/parts/compositor/playback.d.ts +19 -0
- package/x/timeline/parts/compositor/playback.js +71 -0
- package/x/timeline/parts/compositor/playback.js.map +1 -0
- package/x/timeline/parts/compositor/samplers/html.d.ts +3 -0
- package/x/timeline/parts/compositor/samplers/html.js +106 -0
- package/x/timeline/parts/compositor/samplers/html.js.map +1 -0
- package/x/timeline/parts/compositor/samplers/webcodecs.d.ts +2 -0
- package/x/timeline/parts/compositor/samplers/webcodecs.js +55 -0
- package/x/timeline/parts/compositor/samplers/webcodecs.js.map +1 -0
- package/x/timeline/parts/item.d.ts +34 -8
- package/x/timeline/parts/item.js +6 -3
- package/x/timeline/parts/item.js.map +1 -1
- package/x/timeline/parts/media.d.ts +3 -0
- package/x/timeline/parts/media.js +17 -0
- package/x/timeline/parts/media.js.map +1 -1
- package/x/timeline/parts/waveform.js +1 -1
- package/x/timeline/parts/waveform.js.map +1 -1
- package/x/timeline/sugar/builders.d.ts +96 -0
- package/x/timeline/sugar/builders.js +108 -0
- package/x/timeline/sugar/builders.js.map +1 -0
- package/x/timeline/sugar/o.d.ts +21 -8
- package/x/timeline/sugar/o.js +63 -14
- package/x/timeline/sugar/o.js.map +1 -1
- package/x/timeline/sugar/omni-test.js +1 -1
- package/x/timeline/sugar/omni-test.js.map +1 -1
- package/x/timeline/sugar/omni.d.ts +7 -3
- package/x/timeline/sugar/omni.js +9 -8
- package/x/timeline/sugar/omni.js.map +1 -1
- package/x/timeline/timeline.d.ts +9 -0
- package/x/timeline/timeline.js +22 -0
- package/x/timeline/timeline.js.map +1 -0
- package/x/timeline/types.d.ts +24 -0
- package/x/timeline/types.js +2 -0
- package/x/timeline/types.js.map +1 -0
- package/x/timeline/utils/audio-stream.d.ts +6 -0
- package/x/timeline/utils/audio-stream.js +17 -0
- package/x/timeline/utils/audio-stream.js.map +1 -0
- package/x/timeline/utils/matrix.d.ts +8 -0
- package/x/timeline/utils/matrix.js +26 -0
- package/x/timeline/utils/matrix.js.map +1 -0
- package/x/timeline/utils/video-cursor.d.ts +10 -0
- package/x/timeline/utils/video-cursor.js +36 -0
- package/x/timeline/utils/video-cursor.js.map +1 -0
- package/x/tools/speech-recognition/whisper/parts/worker.bundle.min.js +6 -6
- package/x/tools/speech-recognition/whisper/parts/worker.bundle.min.js.map +4 -4
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import {Item} from "../../item.js"
|
|
2
|
+
import {AudioPlaybackComponent, HTMLSampler, Node, TreeBuilder} from "./tree-builder.js"
|
|
3
|
+
|
|
4
|
+
class HTMLNodeBuilder extends TreeBuilder<AudioPlaybackComponent> {
|
|
5
|
+
constructor(protected items: Map<number, Item.Any>, protected sampler: HTMLSampler) {
|
|
6
|
+
super(items, sampler)
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
composeAudio_Stack(children: Node<AudioPlaybackComponent>[]) {
|
|
10
|
+
return {
|
|
11
|
+
onTimeUpdate: (time: number) => {
|
|
12
|
+
for (const child of children) {
|
|
13
|
+
if (child.audio) child.audio.onTimeUpdate(time)
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
composeAudio_Sequence(children: Node<AudioPlaybackComponent>[]) {
|
|
19
|
+
return {
|
|
20
|
+
onTimeUpdate: (time: number) => {
|
|
21
|
+
let localTime = time
|
|
22
|
+
for (const child of children) {
|
|
23
|
+
if (localTime < child.duration) {
|
|
24
|
+
if (child.audio) child.audio.onTimeUpdate(localTime)
|
|
25
|
+
break
|
|
26
|
+
}
|
|
27
|
+
localTime -= child.duration
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function buildHTMLNodeTree(root: Item.Any, items: Map<number, Item.Any>, sampler: HTMLSampler) {
|
|
35
|
+
const builder = new HTMLNodeBuilder(items, sampler)
|
|
36
|
+
return builder.build(root)
|
|
37
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
export type RealtimeController = {
|
|
2
|
+
play(): void
|
|
3
|
+
pause(): void
|
|
4
|
+
seek(t: number): void
|
|
5
|
+
dispose(): void
|
|
6
|
+
setFPS(v: number): void
|
|
7
|
+
isPlaying(): boolean
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export const realtime = (onTick: (t: number) => void): RealtimeController => {
|
|
11
|
+
let playing = false
|
|
12
|
+
let rafId: number | null = null
|
|
13
|
+
let fps = 60
|
|
14
|
+
|
|
15
|
+
let frameDuration = 1000 / fps
|
|
16
|
+
let currentTimeS = 0
|
|
17
|
+
let lastTime = 0
|
|
18
|
+
let accumulator = 0
|
|
19
|
+
|
|
20
|
+
const tick = (now: number) => {
|
|
21
|
+
if (!playing) return
|
|
22
|
+
|
|
23
|
+
const deltaTime = now - lastTime
|
|
24
|
+
lastTime = now
|
|
25
|
+
|
|
26
|
+
accumulator += deltaTime
|
|
27
|
+
|
|
28
|
+
while (accumulator >= frameDuration) {
|
|
29
|
+
onTick(currentTimeS)
|
|
30
|
+
currentTimeS += frameDuration / 1000
|
|
31
|
+
accumulator -= frameDuration
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
rafId = requestAnimationFrame(tick)
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return {
|
|
38
|
+
play() {
|
|
39
|
+
if (playing) return
|
|
40
|
+
playing = true
|
|
41
|
+
lastTime = performance.now()
|
|
42
|
+
rafId = requestAnimationFrame(tick)
|
|
43
|
+
},
|
|
44
|
+
pause() {
|
|
45
|
+
if (!playing) return
|
|
46
|
+
playing = false
|
|
47
|
+
if (rafId !== null) cancelAnimationFrame(rafId)
|
|
48
|
+
rafId = null
|
|
49
|
+
},
|
|
50
|
+
seek(t) {
|
|
51
|
+
currentTimeS = Math.max(0, t)
|
|
52
|
+
accumulator = 0
|
|
53
|
+
},
|
|
54
|
+
dispose() {
|
|
55
|
+
this.pause()
|
|
56
|
+
},
|
|
57
|
+
isPlaying() {
|
|
58
|
+
return playing
|
|
59
|
+
},
|
|
60
|
+
setFPS(v) {
|
|
61
|
+
fps = v
|
|
62
|
+
frameDuration = 1000 / fps
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export type FixedStepOptions = {
|
|
68
|
+
fps: number
|
|
69
|
+
duration: number
|
|
70
|
+
abort?: AbortSignal
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export const fixedStep = async (
|
|
74
|
+
opts: FixedStepOptions,
|
|
75
|
+
onFrame: (t: number, index: number) => Promise<void> | void
|
|
76
|
+
) => {
|
|
77
|
+
const dt = 1 / opts.fps
|
|
78
|
+
const total = Math.ceil(opts.duration * opts.fps)
|
|
79
|
+
|
|
80
|
+
for (let i = 0; i < total; i++) {
|
|
81
|
+
if (opts.abort?.aborted) break
|
|
82
|
+
const t = i * dt
|
|
83
|
+
await onFrame(t, i)
|
|
84
|
+
}
|
|
85
|
+
}
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import {Item, Kind} from "../../item.js"
|
|
2
|
+
import {ImageLayer, Layer} from "../../../../driver/fns/schematic.js"
|
|
3
|
+
import {I6, Mat6, mul6, transformToMat6} from "../../../utils/matrix.js"
|
|
4
|
+
|
|
5
|
+
export type AudioStreamComponent = {
|
|
6
|
+
getStream: () => AsyncGenerator<AudioData>
|
|
7
|
+
}
|
|
8
|
+
export type AudioPlaybackComponent = {
|
|
9
|
+
onTimeUpdate: (time: number) => void
|
|
10
|
+
}
|
|
11
|
+
export type VisualComponent = {
|
|
12
|
+
sampleAt: (time: number) => Promise<Layer[]>
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export type Node<T> = {
|
|
16
|
+
duration: number
|
|
17
|
+
visuals?: VisualComponent
|
|
18
|
+
audio?: T
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
interface Sampler<T> {
|
|
22
|
+
video(item: Item.Video, parentMatrix: Mat6): Promise<Node<T>>
|
|
23
|
+
audio(item: Item.Audio): Promise<Node<T>>
|
|
24
|
+
dispose(): Promise<void>
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const requireItem = (items: Map<number, Item.Any>, id: number) => items.get(id)!
|
|
28
|
+
export const getWorldMat6 = (
|
|
29
|
+
items: Map<number, Item.Any>,
|
|
30
|
+
item: Item.Text | Item.Sequence | Item.Stack | Item.Video,
|
|
31
|
+
parent?: Mat6
|
|
32
|
+
): Mat6 => {
|
|
33
|
+
let world = parent ?? I6
|
|
34
|
+
if (item.spatialId) {
|
|
35
|
+
const spatial = requireItem(items, item.spatialId) as Item.Spatial
|
|
36
|
+
const local = transformToMat6(spatial.transform)
|
|
37
|
+
world = mul6(local, world)
|
|
38
|
+
}
|
|
39
|
+
return world
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export type WebcodecsSampler = Sampler<AudioStreamComponent>
|
|
43
|
+
export interface HTMLSampler extends Sampler<AudioPlaybackComponent> {
|
|
44
|
+
setPaused(v: boolean): void
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export abstract class TreeBuilder<T> {
|
|
48
|
+
constructor(protected items: Map<number, Item.Any>, protected sampler: Sampler<T>) {}
|
|
49
|
+
|
|
50
|
+
async build(root: Item.Any, parentMatrix?: Mat6): Promise<Node<T>> {
|
|
51
|
+
switch (root.kind) {
|
|
52
|
+
case Kind.Video: return this.sampler.video(root, getWorldMat6(this.items, root, parentMatrix))
|
|
53
|
+
case Kind.Audio: return this.sampler.audio(root)
|
|
54
|
+
case Kind.Text: {
|
|
55
|
+
const matrix = getWorldMat6(this.items, root, parentMatrix)
|
|
56
|
+
return {
|
|
57
|
+
duration: Infinity,
|
|
58
|
+
visuals: {
|
|
59
|
+
sampleAt: async () => [{kind: "text", content: root.content, color: "white", fontSize: 48, matrix}]
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
case Kind.Gap: return {
|
|
64
|
+
duration: root.duration,
|
|
65
|
+
visuals: {
|
|
66
|
+
sampleAt: async () => []
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
case Kind.Stack: {
|
|
70
|
+
const matrix = getWorldMat6(this.items, root, parentMatrix)
|
|
71
|
+
const children = await Promise.all(root.childrenIds.map(id => this.build(requireItem(this.items, id), matrix)))
|
|
72
|
+
return this.#composeStack(children)
|
|
73
|
+
}
|
|
74
|
+
case Kind.Sequence: {
|
|
75
|
+
const matrix = getWorldMat6(this.items, root, parentMatrix)
|
|
76
|
+
return this.#composeSequence(root, matrix)
|
|
77
|
+
}
|
|
78
|
+
default: return {duration: 0}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
abstract composeAudio_Stack(children: Node<T>[]): T | undefined
|
|
83
|
+
abstract composeAudio_Sequence(children: Node<T>[]): T | undefined
|
|
84
|
+
|
|
85
|
+
// Visual composition is the same for both builders, so it lives here.
|
|
86
|
+
#composeVisuals_Stack(children: Node<T>[]): VisualComponent {
|
|
87
|
+
return {
|
|
88
|
+
sampleAt: async (time) => {
|
|
89
|
+
const layers = await Promise.all(children.map(c => c.visuals ? c.visuals.sampleAt(time) : Promise.resolve([])))
|
|
90
|
+
return layers.flat()
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
#composeVisuals_Sequence(children: Node<T>[]): VisualComponent {
|
|
96
|
+
return {
|
|
97
|
+
sampleAt: async (time) => {
|
|
98
|
+
let localTime = time
|
|
99
|
+
for (const child of children) {
|
|
100
|
+
if (localTime < child.duration) return child.visuals ? child.visuals.sampleAt(localTime) : []
|
|
101
|
+
localTime -= child.duration
|
|
102
|
+
}
|
|
103
|
+
return []
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
#composeStack(children: Node<T>[]): Node<T> {
|
|
109
|
+
const duration = Math.max(0, ...children.map(k => (Number.isFinite(k.duration) ? k.duration : 0)))
|
|
110
|
+
return {
|
|
111
|
+
duration,
|
|
112
|
+
visuals: this.#composeVisuals_Stack(children),
|
|
113
|
+
audio: this.composeAudio_Stack(children),
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async #composeSequence(sequence: Item.Sequence, parentMatrix?: Mat6): Promise<Node<T>> {
|
|
118
|
+
const childItems = sequence.childrenIds.map(id => requireItem(this.items, id))
|
|
119
|
+
const children = await this.#processChildren(childItems, parentMatrix)
|
|
120
|
+
const duration = children.reduce((a, k) => a + k.duration, 0)
|
|
121
|
+
return {
|
|
122
|
+
duration,
|
|
123
|
+
visuals: this.#composeVisuals_Sequence(children),
|
|
124
|
+
audio: this.composeAudio_Sequence(children),
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
async #processChildren(childItems: Item.Any[], parentMatrix?: Mat6): Promise<Node<T>[]> {
|
|
129
|
+
const processedNodes: Node<T>[] = []
|
|
130
|
+
for (let i = 0; i < childItems.length; i++) {
|
|
131
|
+
const item = childItems[i]
|
|
132
|
+
|
|
133
|
+
if (item.kind !== Kind.Transition) {
|
|
134
|
+
processedNodes.push(await this.build(item, parentMatrix))
|
|
135
|
+
continue
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const outgoingNode = processedNodes.pop()
|
|
139
|
+
const incomingItem = childItems[i + 1]
|
|
140
|
+
|
|
141
|
+
if (!outgoingNode || !incomingItem || incomingItem.kind === Kind.Transition) {
|
|
142
|
+
if (outgoingNode) processedNodes.push(outgoingNode)
|
|
143
|
+
continue
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
const incomingNode = await this.build(incomingItem, parentMatrix)
|
|
147
|
+
const transitionNode = await this.#createTransitionNode(item, outgoingNode, incomingNode)
|
|
148
|
+
processedNodes.push(transitionNode)
|
|
149
|
+
i++
|
|
150
|
+
}
|
|
151
|
+
return processedNodes
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
async #createTransitionNode(transitionItem: Item.Transition, outgoingNode: Node<T>, incomingNode: Node<T>): Promise<Node<T>> {
|
|
155
|
+
const overlap = Math.max(0, Math.min(transitionItem.duration, outgoingNode.duration, incomingNode.duration))
|
|
156
|
+
const start = Math.max(0, outgoingNode.duration - overlap)
|
|
157
|
+
const combinedDuration = outgoingNode.duration + incomingNode.duration - overlap
|
|
158
|
+
return {
|
|
159
|
+
duration: combinedDuration,
|
|
160
|
+
visuals: {
|
|
161
|
+
sampleAt: async (t) => {
|
|
162
|
+
if (!outgoingNode.visuals || !incomingNode.visuals) return []
|
|
163
|
+
if (t < start) return outgoingNode.visuals.sampleAt(t)
|
|
164
|
+
if (t < outgoingNode.duration) {
|
|
165
|
+
const localTime = t - start
|
|
166
|
+
const progress = overlap > 0 ? (localTime / overlap) : 1
|
|
167
|
+
const from = await outgoingNode.visuals.sampleAt(t) as ImageLayer[]
|
|
168
|
+
const to = await incomingNode.visuals.sampleAt(localTime) as ImageLayer[]
|
|
169
|
+
if(!from[0]?.frame || !to[0]?.frame) return []
|
|
170
|
+
return [{
|
|
171
|
+
kind: "transition",
|
|
172
|
+
name: "circle",
|
|
173
|
+
progress,
|
|
174
|
+
from: from[0].frame,
|
|
175
|
+
to: to[0].frame,
|
|
176
|
+
}]
|
|
177
|
+
}
|
|
178
|
+
return incomingNode.visuals.sampleAt(t - outgoingNode.duration + overlap)
|
|
179
|
+
}
|
|
180
|
+
},
|
|
181
|
+
audio: this.composeAudio_Sequence([outgoingNode, incomingNode])
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import {Item} from "../../item.js"
|
|
2
|
+
import {AudioStreamComponent, Node, WebcodecsSampler, TreeBuilder} from "./tree-builder.js"
|
|
3
|
+
|
|
4
|
+
class WebCodecsNodeBuilder extends TreeBuilder<AudioStreamComponent> {
|
|
5
|
+
composeAudio_Stack(children: Node<AudioStreamComponent>[]) {
|
|
6
|
+
return {
|
|
7
|
+
getStream: async function*() {
|
|
8
|
+
for (const child of children) {
|
|
9
|
+
if (child.audio)
|
|
10
|
+
yield* child.audio.getStream()
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
composeAudio_Sequence(children: Node<AudioStreamComponent>[]) {
|
|
16
|
+
return {
|
|
17
|
+
getStream: async function*() {
|
|
18
|
+
for (const child of children) {
|
|
19
|
+
if (child.audio)
|
|
20
|
+
yield* child.audio.getStream()
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function buildWebCodecsNodeTree(root: Item.Any, items: Map<number, Item.Any>, sampler: WebcodecsSampler) {
|
|
28
|
+
const builder = new WebCodecsNodeBuilder(items, sampler)
|
|
29
|
+
return builder.build(root)
|
|
30
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import {TimelineFile} from "../basics.js"
|
|
2
|
+
import {context} from "../../../context.js"
|
|
3
|
+
import {realtime} from "./parts/schedulers.js"
|
|
4
|
+
import {makeHtmlSampler} from "./samplers/html.js"
|
|
5
|
+
import {buildHTMLNodeTree} from "./parts/html-tree.js"
|
|
6
|
+
import {DecoderSource} from "../../../driver/fns/schematic.js"
|
|
7
|
+
import {AudioPlaybackComponent, HTMLSampler, Node} from "./parts/tree-builder.js"
|
|
8
|
+
|
|
9
|
+
type ResolveMedia = (hash: string) => DecoderSource
|
|
10
|
+
|
|
11
|
+
export class VideoPlayer {
|
|
12
|
+
#controller = realtime(t => this.#tick(t))
|
|
13
|
+
|
|
14
|
+
constructor(
|
|
15
|
+
public canvas: HTMLCanvasElement,
|
|
16
|
+
private root: Node<AudioPlaybackComponent>,
|
|
17
|
+
private sampler: HTMLSampler,
|
|
18
|
+
private resolveMedia: ResolveMedia = _hash => "/assets/temp/gl.mp4"
|
|
19
|
+
) {
|
|
20
|
+
this.#controller.setFPS(30)
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
get context() {
|
|
24
|
+
return this.canvas.getContext("2d")!
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
static async create(timeline: TimelineFile) {
|
|
28
|
+
const rootItem = new Map(timeline.items.map(i => [i.id, i])).get(timeline.rootId)!
|
|
29
|
+
const items = new Map(timeline.items.map(i => [i.id, i]))
|
|
30
|
+
const sampler = makeHtmlSampler(() => "/assets/temp/gl.mp4")
|
|
31
|
+
const root = await buildHTMLNodeTree(rootItem, items, sampler)
|
|
32
|
+
const canvas = document.createElement("canvas")
|
|
33
|
+
canvas.width = 1920
|
|
34
|
+
canvas.height = 1080
|
|
35
|
+
return new this(canvas, root, sampler)
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async #tick(t: number) {
|
|
39
|
+
const driver = await context.driver
|
|
40
|
+
const dur = this.root.duration
|
|
41
|
+
const tt = t > dur ? dur : t
|
|
42
|
+
this.root.audio?.onTimeUpdate(tt)
|
|
43
|
+
for (const layer of await this.root.visuals?.sampleAt(tt) ?? []) {
|
|
44
|
+
const frame = await driver.composite(layer)
|
|
45
|
+
this.context.drawImage(frame, 0, 0)
|
|
46
|
+
frame.close()
|
|
47
|
+
}
|
|
48
|
+
if (t >= dur) this.pause()
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async play() {
|
|
52
|
+
if (!this.#controller.isPlaying()) {
|
|
53
|
+
this.sampler.setPaused!(false)
|
|
54
|
+
this.#controller.play()
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
pause() {
|
|
59
|
+
if(this.#controller.isPlaying()) {
|
|
60
|
+
this.#controller.pause()
|
|
61
|
+
this.sampler.setPaused!(true)
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
async seek(time: number) {
|
|
66
|
+
const driver = await context.driver
|
|
67
|
+
this.pause()
|
|
68
|
+
this.#controller.seek(time)
|
|
69
|
+
this.root.audio?.onTimeUpdate(time)
|
|
70
|
+
for (const draw of await this.root.visuals?.sampleAt(time) ?? []) {
|
|
71
|
+
const frame = await driver.composite(draw)
|
|
72
|
+
this.context.drawImage(frame, 0, 0)
|
|
73
|
+
frame.close()
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
setFPS(value: number) {
|
|
78
|
+
this.#controller.setFPS(value)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import {Item} from "../../item.js"
|
|
2
|
+
import {HTMLSampler} from "../parts/tree-builder.js"
|
|
3
|
+
import {DecoderSource} from "../../../../driver/fns/schematic.js"
|
|
4
|
+
|
|
5
|
+
const toUrl = (src: DecoderSource) => (src instanceof Blob ? URL.createObjectURL(src) : String(src))
|
|
6
|
+
|
|
7
|
+
export function makeHtmlSampler(resolveMedia: (hash: string) => DecoderSource): HTMLSampler {
|
|
8
|
+
const videoElements = new Map<number, HTMLVideoElement>()
|
|
9
|
+
const audioElements = new Map<number, HTMLAudioElement>()
|
|
10
|
+
|
|
11
|
+
function getOrCreateVideoElement(clip: Item.Video) {
|
|
12
|
+
let video = videoElements.get(clip.id)
|
|
13
|
+
if (!video) {
|
|
14
|
+
video = document.createElement("video")
|
|
15
|
+
video.playsInline = true
|
|
16
|
+
video.muted = true
|
|
17
|
+
video.preload = "auto"
|
|
18
|
+
video.crossOrigin = "anonymous"
|
|
19
|
+
video.src = toUrl(resolveMedia(clip.mediaHash))
|
|
20
|
+
videoElements.set(clip.id, video)
|
|
21
|
+
}
|
|
22
|
+
return video
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function getOrCreateAudioElement(clip: Item.Audio) {
|
|
26
|
+
let audio = audioElements.get(clip.id)
|
|
27
|
+
if (!audio) {
|
|
28
|
+
audio = document.createElement("audio")
|
|
29
|
+
audio.preload = "auto"
|
|
30
|
+
audio.crossOrigin = "anonymous"
|
|
31
|
+
audio.src = toUrl(resolveMedia(clip.mediaHash))
|
|
32
|
+
audio.volume = 0.2
|
|
33
|
+
audioElements.set(clip.id, audio)
|
|
34
|
+
}
|
|
35
|
+
return audio
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let paused = true
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
async video(item, matrix) {
|
|
42
|
+
const video = getOrCreateVideoElement(item)
|
|
43
|
+
return {
|
|
44
|
+
duration: item.duration,
|
|
45
|
+
// if paused seek otherwise play
|
|
46
|
+
visuals: {
|
|
47
|
+
sampleAt: async (t) => {
|
|
48
|
+
if (t < 0 || t >= item.duration)
|
|
49
|
+
return []
|
|
50
|
+
|
|
51
|
+
if(video.paused && paused) {
|
|
52
|
+
await seek(video, t)
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if(video.paused && !paused) {
|
|
56
|
+
await video.play()
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const frame = new VideoFrame(video)
|
|
60
|
+
return frame ? [{kind: "image", frame, matrix}] : []
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
},
|
|
65
|
+
async audio(item) {
|
|
66
|
+
const audio = getOrCreateAudioElement(item)
|
|
67
|
+
return {
|
|
68
|
+
duration: item.duration,
|
|
69
|
+
audio: {
|
|
70
|
+
onTimeUpdate: async (time) => {
|
|
71
|
+
const localTime = item.start + time
|
|
72
|
+
if(audio.paused && paused) {
|
|
73
|
+
await seek(audio, localTime)
|
|
74
|
+
}
|
|
75
|
+
if(audio.paused && !paused) {
|
|
76
|
+
await audio.play()
|
|
77
|
+
}
|
|
78
|
+
return []
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
},
|
|
83
|
+
async dispose() {
|
|
84
|
+
const elements = [...videoElements.values(), ...audioElements.values()]
|
|
85
|
+
for (const element of elements) {
|
|
86
|
+
element.pause()
|
|
87
|
+
if (element.src.startsWith("blob:"))
|
|
88
|
+
URL.revokeObjectURL(element.src)
|
|
89
|
+
element.remove()
|
|
90
|
+
}
|
|
91
|
+
videoElements.clear()
|
|
92
|
+
audioElements.clear()
|
|
93
|
+
},
|
|
94
|
+
async setPaused(p) {
|
|
95
|
+
paused = p
|
|
96
|
+
const elements = [...videoElements.values(), ...audioElements.values()]
|
|
97
|
+
for(const element of elements) {
|
|
98
|
+
if(p) element.pause()
|
|
99
|
+
}
|
|
100
|
+
},
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function seek(media: HTMLVideoElement | HTMLAudioElement, time: number): Promise<void> {
|
|
105
|
+
return new Promise((resolve) => {
|
|
106
|
+
const onSeeked = () => {
|
|
107
|
+
media.removeEventListener("seeked", onSeeked)
|
|
108
|
+
resolve()
|
|
109
|
+
}
|
|
110
|
+
media.addEventListener("seeked", onSeeked)
|
|
111
|
+
if(media.fastSeek) {
|
|
112
|
+
media.fastSeek(time)
|
|
113
|
+
} else media.currentTime = time
|
|
114
|
+
})
|
|
115
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import {Item} from "../../item.js"
|
|
2
|
+
import {context} from "../../../../context.js"
|
|
3
|
+
import {WebcodecsSampler} from "../parts/tree-builder.js"
|
|
4
|
+
import {VideoCursor} from "../../../utils/video-cursor.js"
|
|
5
|
+
import {AudioStream} from "../../../utils/audio-stream.js"
|
|
6
|
+
|
|
7
|
+
const toUs = (seconds: number) => Math.round(seconds * 1_000_000)
|
|
8
|
+
|
|
9
|
+
export function makeWebCodecsSampler(resolveMedia: (hash: string) => any): WebcodecsSampler {
|
|
10
|
+
const videoCursors = new Map<number, VideoCursor>()
|
|
11
|
+
|
|
12
|
+
async function getCursorForVideo(videoItem: Item.Video): Promise<VideoCursor> {
|
|
13
|
+
const existing = videoCursors.get(videoItem.id)
|
|
14
|
+
if (existing) return existing
|
|
15
|
+
const driver = await context.driver
|
|
16
|
+
const source = resolveMedia(videoItem.mediaHash)
|
|
17
|
+
const video = driver.decodeVideo({source})
|
|
18
|
+
const cursor = new VideoCursor(video.getReader())
|
|
19
|
+
videoCursors.set(videoItem.id, cursor)
|
|
20
|
+
return cursor
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return {
|
|
24
|
+
async video(item, matrix) {
|
|
25
|
+
const cursor = await getCursorForVideo(item)
|
|
26
|
+
const baseUs = toUs(item.start ?? 0)
|
|
27
|
+
return {
|
|
28
|
+
duration: item.duration,
|
|
29
|
+
visuals: {
|
|
30
|
+
sampleAt: async (time: number) => {
|
|
31
|
+
const frame = await cursor.atOrNear(baseUs + toUs(time))
|
|
32
|
+
return frame ? [{kind: "image", frame, matrix}] : []
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
async audio(item) {
|
|
38
|
+
return {
|
|
39
|
+
duration: item.duration,
|
|
40
|
+
audio: {
|
|
41
|
+
getStream: async function*() {
|
|
42
|
+
const driver = await context.driver
|
|
43
|
+
const source = resolveMedia(item.mediaHash)
|
|
44
|
+
const startUs = item.start
|
|
45
|
+
const endUs = (item.start + item.duration)
|
|
46
|
+
const audio = driver.decodeAudio({source, start: startUs, end: endUs})
|
|
47
|
+
const audioStream = new AudioStream(audio.getReader())
|
|
48
|
+
yield* audioStream.stream()
|
|
49
|
+
},
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
async dispose() {
|
|
54
|
+
const tasks = Array.from([...videoCursors.values()], c => c.cancel())
|
|
55
|
+
videoCursors.clear()
|
|
56
|
+
await Promise.all(tasks)
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
package/s/timeline/parts/item.ts
CHANGED
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
|
|
2
2
|
import {Id, Hash} from "./basics.js"
|
|
3
|
+
import {Transform} from "../types.js"
|
|
3
4
|
|
|
4
5
|
export enum Kind {
|
|
5
6
|
Sequence,
|
|
6
7
|
Stack,
|
|
7
|
-
|
|
8
|
+
Video,
|
|
9
|
+
Audio,
|
|
8
10
|
Text,
|
|
11
|
+
Gap,
|
|
12
|
+
Spatial,
|
|
9
13
|
Transition,
|
|
10
14
|
}
|
|
11
15
|
|
|
@@ -14,21 +18,44 @@ export enum Effect {
|
|
|
14
18
|
}
|
|
15
19
|
|
|
16
20
|
export namespace Item {
|
|
21
|
+
export type Spatial = {
|
|
22
|
+
id: Id
|
|
23
|
+
kind: Kind.Spatial
|
|
24
|
+
transform: Transform
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export type Gap = {
|
|
28
|
+
id: Id
|
|
29
|
+
kind: Kind.Gap
|
|
30
|
+
duration: number
|
|
31
|
+
}
|
|
32
|
+
|
|
17
33
|
export type Sequence = {
|
|
18
34
|
id: Id
|
|
19
35
|
kind: Kind.Sequence
|
|
20
|
-
|
|
36
|
+
childrenIds: Id[]
|
|
37
|
+
spatialId?: Id
|
|
21
38
|
}
|
|
22
39
|
|
|
23
40
|
export type Stack = {
|
|
24
41
|
id: Id
|
|
25
42
|
kind: Kind.Stack
|
|
26
|
-
|
|
43
|
+
childrenIds: Id[]
|
|
44
|
+
spatialId?: Id
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export type Video = {
|
|
48
|
+
id: Id
|
|
49
|
+
kind: Kind.Video
|
|
50
|
+
mediaHash: Hash
|
|
51
|
+
start: number
|
|
52
|
+
duration: number
|
|
53
|
+
spatialId?: Id
|
|
27
54
|
}
|
|
28
55
|
|
|
29
|
-
export type
|
|
56
|
+
export type Audio = {
|
|
30
57
|
id: Id
|
|
31
|
-
kind: Kind.
|
|
58
|
+
kind: Kind.Audio
|
|
32
59
|
mediaHash: Hash
|
|
33
60
|
start: number
|
|
34
61
|
duration: number
|
|
@@ -38,6 +65,8 @@ export namespace Item {
|
|
|
38
65
|
id: Id
|
|
39
66
|
kind: Kind.Text
|
|
40
67
|
content: string
|
|
68
|
+
spatialId?: Id
|
|
69
|
+
color: string
|
|
41
70
|
}
|
|
42
71
|
|
|
43
72
|
export type Transition = {
|
|
@@ -50,9 +79,12 @@ export namespace Item {
|
|
|
50
79
|
export type Any = (
|
|
51
80
|
| Sequence
|
|
52
81
|
| Stack
|
|
53
|
-
|
|
|
82
|
+
| Video
|
|
83
|
+
| Audio
|
|
54
84
|
| Text
|
|
85
|
+
| Gap
|
|
55
86
|
| Transition
|
|
87
|
+
| Spatial
|
|
56
88
|
)
|
|
57
89
|
}
|
|
58
90
|
|