@editframe/elements 0.12.0-beta.2 → 0.12.0-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assets/src/MP4File.js +68 -17
- package/dist/elements/EFMedia.d.ts +0 -1
- package/dist/elements/src/elements/EFMedia.js +70 -13
- package/dist/elements/src/gui/ContextMixin.js +2 -2
- package/dist/elements/src/gui/EFPreview.js +3 -1
- package/package.json +2 -2
- package/src/elements/EFMedia.browsertest.ts +2 -0
- package/src/elements/EFMedia.ts +86 -14
- package/src/gui/ContextMixin.ts +3 -2
- package/src/gui/EFPreview.ts +4 -2
- package/dist/elements/src/elements/util.js +0 -11
|
@@ -40,13 +40,13 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
40
40
|
await this.readyPromise;
|
|
41
41
|
const trackInfo = {};
|
|
42
42
|
for (const videoTrack of this.getInfo().videoTracks) {
|
|
43
|
-
trackInfo[videoTrack.id] = { index: 0
|
|
43
|
+
trackInfo[videoTrack.id] = { index: 0 };
|
|
44
44
|
this.setSegmentOptions(videoTrack.id, null, {
|
|
45
45
|
rapAlignement: true
|
|
46
46
|
});
|
|
47
47
|
}
|
|
48
48
|
for (const audioTrack of this.getInfo().audioTracks) {
|
|
49
|
-
trackInfo[audioTrack.id] = { index: 0
|
|
49
|
+
trackInfo[audioTrack.id] = { index: 0 };
|
|
50
50
|
const sampleRate = audioTrack.audio.sample_rate;
|
|
51
51
|
const probablePacketSize = 1024;
|
|
52
52
|
const probableFourSecondsOfSamples = Math.ceil(
|
|
@@ -61,21 +61,12 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
61
61
|
yield {
|
|
62
62
|
track: initSegment.id,
|
|
63
63
|
segment: "init",
|
|
64
|
-
data: initSegment.buffer
|
|
65
|
-
complete: false
|
|
64
|
+
data: initSegment.buffer
|
|
66
65
|
};
|
|
67
66
|
}
|
|
68
67
|
const fragmentStartSamples = {};
|
|
69
68
|
let finishedReading = false;
|
|
70
|
-
|
|
71
|
-
for (const fragmentedTrack of this.fragmentedTracks) {
|
|
72
|
-
if (!trackInfo[fragmentedTrack.id]?.complete) {
|
|
73
|
-
return false;
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
return true;
|
|
77
|
-
};
|
|
78
|
-
while (!(finishedReading && allTracksFinished())) {
|
|
69
|
+
do {
|
|
79
70
|
for (const fragTrak of this.fragmentedTracks) {
|
|
80
71
|
const trak = fragTrak.trak;
|
|
81
72
|
if (trak.nextSample === void 0) {
|
|
@@ -84,6 +75,8 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
84
75
|
if (trak.samples === void 0) {
|
|
85
76
|
throw new Error("trak.samples is undefined");
|
|
86
77
|
}
|
|
78
|
+
log("trak.nextSample", fragTrak.id, trak.nextSample);
|
|
79
|
+
log("trak.samples.length", fragTrak.id, trak.samples.length);
|
|
87
80
|
while (trak.nextSample < trak.samples.length) {
|
|
88
81
|
let result = void 0;
|
|
89
82
|
const fragTrakNextSample = trak.samples[trak.nextSample];
|
|
@@ -120,9 +113,6 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
120
113
|
if (!trackInfoForFrag) {
|
|
121
114
|
throw new Error("trackInfoForFrag is undefined");
|
|
122
115
|
}
|
|
123
|
-
if (trak.nextSample >= trak.samples.length) {
|
|
124
|
-
trackInfoForFrag.complete = true;
|
|
125
|
-
}
|
|
126
116
|
const startSample = fragmentStartSamples[fragTrak.id];
|
|
127
117
|
const endSample = trak.samples[trak.nextSample - 1];
|
|
128
118
|
if (!startSample || !endSample) {
|
|
@@ -137,7 +127,6 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
137
127
|
track: fragTrak.id,
|
|
138
128
|
segment: trackInfoForFrag.index,
|
|
139
129
|
data: fragTrak.segmentStream.buffer,
|
|
140
|
-
complete: trackInfoForFrag.complete,
|
|
141
130
|
cts: startSample.cts,
|
|
142
131
|
dts: startSample.dts,
|
|
143
132
|
duration: endSample.cts - startSample.cts + endSample.duration
|
|
@@ -149,6 +138,68 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
149
138
|
}
|
|
150
139
|
}
|
|
151
140
|
finishedReading = await this.waitForMoreSamples();
|
|
141
|
+
} while (!finishedReading);
|
|
142
|
+
for (const fragTrak of this.fragmentedTracks) {
|
|
143
|
+
const trak = fragTrak.trak;
|
|
144
|
+
if (trak.nextSample === void 0) {
|
|
145
|
+
throw new Error("trak.nextSample is undefined");
|
|
146
|
+
}
|
|
147
|
+
if (trak.samples === void 0) {
|
|
148
|
+
throw new Error("trak.samples is undefined");
|
|
149
|
+
}
|
|
150
|
+
while (trak.nextSample < trak.samples.length) {
|
|
151
|
+
let result = void 0;
|
|
152
|
+
try {
|
|
153
|
+
result = this.createFragment(
|
|
154
|
+
fragTrak.id,
|
|
155
|
+
trak.nextSample,
|
|
156
|
+
fragTrak.segmentStream
|
|
157
|
+
);
|
|
158
|
+
} catch (error) {
|
|
159
|
+
console.error("Failed to createFragment", error);
|
|
160
|
+
}
|
|
161
|
+
if (result) {
|
|
162
|
+
fragTrak.segmentStream = result;
|
|
163
|
+
trak.nextSample++;
|
|
164
|
+
} else {
|
|
165
|
+
finishedReading = await this.waitForMoreSamples();
|
|
166
|
+
break;
|
|
167
|
+
}
|
|
168
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
169
|
+
const emitSegment = (
|
|
170
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
171
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
172
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // if we have more samples than the number of samples requested, we emit the fragment
|
|
173
|
+
trak.nextSample >= trak.samples.length
|
|
174
|
+
);
|
|
175
|
+
if (emitSegment) {
|
|
176
|
+
const trackInfoForFrag = trackInfo[fragTrak.id];
|
|
177
|
+
if (!trackInfoForFrag) {
|
|
178
|
+
throw new Error("trackInfoForFrag is undefined");
|
|
179
|
+
}
|
|
180
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
181
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
182
|
+
if (!startSample || !endSample) {
|
|
183
|
+
throw new Error("startSample or endSample is undefined");
|
|
184
|
+
}
|
|
185
|
+
log(
|
|
186
|
+
`Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`,
|
|
187
|
+
`startTime=${startSample.cts}`,
|
|
188
|
+
`endTime=${endSample.cts + endSample.duration}`
|
|
189
|
+
);
|
|
190
|
+
yield {
|
|
191
|
+
track: fragTrak.id,
|
|
192
|
+
segment: trackInfoForFrag.index,
|
|
193
|
+
data: fragTrak.segmentStream.buffer,
|
|
194
|
+
cts: startSample.cts,
|
|
195
|
+
dts: startSample.dts,
|
|
196
|
+
duration: endSample.cts - startSample.cts + endSample.duration
|
|
197
|
+
};
|
|
198
|
+
trackInfoForFrag.index += 1;
|
|
199
|
+
fragTrak.segmentStream = null;
|
|
200
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
201
|
+
}
|
|
202
|
+
}
|
|
152
203
|
}
|
|
153
204
|
}
|
|
154
205
|
waitForMoreSamples() {
|
|
@@ -47,7 +47,6 @@ export declare class EFMedia extends EFMedia_base {
|
|
|
47
47
|
protected updated(changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>): void;
|
|
48
48
|
get hasOwnDuration(): boolean;
|
|
49
49
|
get durationMs(): number;
|
|
50
|
-
get startTimeMs(): number;
|
|
51
50
|
audioBufferTask: Task<readonly [Record<string, File> | undefined, Record<string, {
|
|
52
51
|
segment: TrackSegment;
|
|
53
52
|
track: MP4Box.TrackInfo;
|
|
@@ -10,9 +10,8 @@ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
|
|
|
10
10
|
import { EF_RENDERING } from "../EF_RENDERING.js";
|
|
11
11
|
import { apiHostContext } from "../gui/apiHostContext.js";
|
|
12
12
|
import { EFSourceMixin } from "./EFSourceMixin.js";
|
|
13
|
-
import { EFTemporal } from "./EFTemporal.js";
|
|
13
|
+
import { EFTemporal, isEFTemporal } from "./EFTemporal.js";
|
|
14
14
|
import { FetchMixin } from "./FetchMixin.js";
|
|
15
|
-
import { getStartTimeMs } from "./util.js";
|
|
16
15
|
var __defProp = Object.defineProperty;
|
|
17
16
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
18
17
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
@@ -62,10 +61,10 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
62
61
|
return await Promise.all(
|
|
63
62
|
Object.entries(fragmentIndex).map(async ([trackId, track]) => {
|
|
64
63
|
const start = track.initSegment.offset;
|
|
65
|
-
const end = track.initSegment.offset + track.initSegment.size
|
|
64
|
+
const end = track.initSegment.offset + track.initSegment.size;
|
|
66
65
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
67
66
|
signal,
|
|
68
|
-
headers: { Range: `bytes=${start}-${end}` }
|
|
67
|
+
headers: { Range: `bytes=${start}-${end - 1}` }
|
|
69
68
|
});
|
|
70
69
|
const buffer = await response.arrayBuffer();
|
|
71
70
|
buffer.fileStart = 0;
|
|
@@ -131,14 +130,14 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
131
130
|
const end = segment.offset + segment.size;
|
|
132
131
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
133
132
|
signal,
|
|
134
|
-
headers: { Range: `bytes=${start}-${end}` }
|
|
133
|
+
headers: { Range: `bytes=${start}-${end - 1}` }
|
|
135
134
|
});
|
|
136
135
|
if (nextSegment) {
|
|
137
136
|
const nextStart = nextSegment.offset;
|
|
138
137
|
const nextEnd = nextSegment.offset + nextSegment.size;
|
|
139
138
|
fetch(this.fragmentTrackPath(trackId), {
|
|
140
139
|
signal,
|
|
141
|
-
headers: { Range: `bytes=${nextStart}-${nextEnd}` }
|
|
140
|
+
headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` }
|
|
142
141
|
}).then(() => {
|
|
143
142
|
log("Prefetched next segment");
|
|
144
143
|
}).catch((error) => {
|
|
@@ -270,6 +269,67 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
270
269
|
if (changedProperties.has("ownCurrentTimeMs")) {
|
|
271
270
|
this.executeSeek(this.trimAdjustedOwnCurrentTimeMs);
|
|
272
271
|
}
|
|
272
|
+
if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
|
|
273
|
+
const timelineTimeMs = (this.rootTimegroup ?? this).currentTimeMs;
|
|
274
|
+
if (this.startTimeMs > timelineTimeMs || this.endTimeMs < timelineTimeMs) {
|
|
275
|
+
this.style.display = "none";
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
this.style.display = "";
|
|
279
|
+
const animations = this.getAnimations({ subtree: true });
|
|
280
|
+
this.style.setProperty("--ef-duration", `${this.durationMs}ms`);
|
|
281
|
+
this.style.setProperty(
|
|
282
|
+
"--ef-transition--duration",
|
|
283
|
+
`${this.parentTimegroup?.overlapMs ?? 0}ms`
|
|
284
|
+
);
|
|
285
|
+
this.style.setProperty(
|
|
286
|
+
"--ef-transition-out-start",
|
|
287
|
+
`${this.durationMs - (this.parentTimegroup?.overlapMs ?? 0)}ms`
|
|
288
|
+
);
|
|
289
|
+
for (const animation of animations) {
|
|
290
|
+
if (animation.playState === "running") {
|
|
291
|
+
animation.pause();
|
|
292
|
+
}
|
|
293
|
+
const effect = animation.effect;
|
|
294
|
+
if (!(effect && effect instanceof KeyframeEffect)) {
|
|
295
|
+
return;
|
|
296
|
+
}
|
|
297
|
+
const target = effect.target;
|
|
298
|
+
if (!target) {
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
if (target.closest("ef-video, ef-audio") !== this) {
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
if (isEFTemporal(target)) {
|
|
305
|
+
const timing = effect.getTiming();
|
|
306
|
+
const duration = Number(timing.duration) ?? 0;
|
|
307
|
+
const delay = Number(timing.delay);
|
|
308
|
+
const newTime = Math.floor(
|
|
309
|
+
Math.min(target.ownCurrentTimeMs, duration - 1 + delay)
|
|
310
|
+
);
|
|
311
|
+
if (Number.isNaN(newTime)) {
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
animation.currentTime = newTime;
|
|
315
|
+
} else if (target) {
|
|
316
|
+
const nearestTimegroup = target.closest("ef-timegroup");
|
|
317
|
+
if (!nearestTimegroup) {
|
|
318
|
+
return;
|
|
319
|
+
}
|
|
320
|
+
const timing = effect.getTiming();
|
|
321
|
+
const duration = Number(timing.duration) ?? 0;
|
|
322
|
+
const delay = Number(timing.delay);
|
|
323
|
+
const newTime = Math.floor(
|
|
324
|
+
Math.min(nearestTimegroup.ownCurrentTimeMs, duration - 1 + delay)
|
|
325
|
+
);
|
|
326
|
+
if (Number.isNaN(newTime)) {
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
animation.currentTime = newTime;
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
273
333
|
}
|
|
274
334
|
get hasOwnDuration() {
|
|
275
335
|
return true;
|
|
@@ -300,9 +360,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
300
360
|
}
|
|
301
361
|
return Math.max(...durations) - this.trimStartMs - this.trimEndMs;
|
|
302
362
|
}
|
|
303
|
-
get startTimeMs() {
|
|
304
|
-
return getStartTimeMs(this);
|
|
305
|
-
}
|
|
306
363
|
#audioContext;
|
|
307
364
|
async fetchAudioSpanningTime(fromMs, toMs) {
|
|
308
365
|
if (this.sourceInMs) {
|
|
@@ -325,11 +382,11 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
325
382
|
return;
|
|
326
383
|
}
|
|
327
384
|
const start = audioTrackIndex.initSegment.offset;
|
|
328
|
-
const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size
|
|
385
|
+
const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
|
|
329
386
|
const audioInitFragmentRequest = this.fetch(
|
|
330
387
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
331
388
|
{
|
|
332
|
-
headers: { Range: `bytes=${start}-${end}` }
|
|
389
|
+
headers: { Range: `bytes=${start}-${end - 1}` }
|
|
333
390
|
}
|
|
334
391
|
);
|
|
335
392
|
const fragments = Object.values(audioTrackIndex.segments).filter(
|
|
@@ -350,11 +407,11 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
350
407
|
return;
|
|
351
408
|
}
|
|
352
409
|
const fragmentStart = firstFragment.offset;
|
|
353
|
-
const fragmentEnd = lastFragment.offset + lastFragment.size
|
|
410
|
+
const fragmentEnd = lastFragment.offset + lastFragment.size;
|
|
354
411
|
const audioFragmentRequest = this.fetch(
|
|
355
412
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
356
413
|
{
|
|
357
|
-
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd}` }
|
|
414
|
+
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` }
|
|
358
415
|
}
|
|
359
416
|
);
|
|
360
417
|
const initResponse = await audioInitFragmentRequest;
|
|
@@ -83,14 +83,14 @@ function ContextMixin(superClass) {
|
|
|
83
83
|
const scale = stageHeight / canvasHeight;
|
|
84
84
|
if (this.stageScale !== scale) {
|
|
85
85
|
canvasElement.style.transform = `scale(${scale})`;
|
|
86
|
-
canvasElement.style.transformOrigin = "
|
|
86
|
+
canvasElement.style.transformOrigin = "center left";
|
|
87
87
|
}
|
|
88
88
|
this.stageScale = scale;
|
|
89
89
|
} else {
|
|
90
90
|
const scale = stageWidth / canvasWidth;
|
|
91
91
|
if (this.stageScale !== scale) {
|
|
92
92
|
canvasElement.style.transform = `scale(${scale})`;
|
|
93
|
-
canvasElement.style.transformOrigin = "
|
|
93
|
+
canvasElement.style.transformOrigin = "center left";
|
|
94
94
|
}
|
|
95
95
|
this.stageScale = scale;
|
|
96
96
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { html, css, LitElement } from "lit";
|
|
2
2
|
import { customElement } from "lit/decorators.js";
|
|
3
3
|
import { ref } from "lit/directives/ref.js";
|
|
4
|
-
import { TWMixin } from "./TWMixin.js";
|
|
5
4
|
import { ContextMixin } from "./ContextMixin.js";
|
|
5
|
+
import { TWMixin } from "./TWMixin.js";
|
|
6
6
|
var __defProp = Object.defineProperty;
|
|
7
7
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
8
8
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
@@ -23,7 +23,9 @@ let EFPreview = class extends ContextMixin(TWMixin(LitElement)) {
|
|
|
23
23
|
<slot
|
|
24
24
|
${ref(this.canvasRef)}
|
|
25
25
|
class="inline-block"
|
|
26
|
+
name="canvas"
|
|
26
27
|
></slot>
|
|
28
|
+
<slot name="controls"></slot>
|
|
27
29
|
</div>
|
|
28
30
|
`;
|
|
29
31
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@editframe/elements",
|
|
3
|
-
"version": "0.12.0-beta.
|
|
3
|
+
"version": "0.12.0-beta.6",
|
|
4
4
|
"description": "",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": {
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"author": "",
|
|
21
21
|
"license": "UNLICENSED",
|
|
22
22
|
"dependencies": {
|
|
23
|
-
"@editframe/assets": "0.12.0-beta.
|
|
23
|
+
"@editframe/assets": "0.12.0-beta.6",
|
|
24
24
|
"@lit/context": "^1.1.2",
|
|
25
25
|
"@lit/task": "^1.0.1",
|
|
26
26
|
"d3": "^7.9.0",
|
|
@@ -4,6 +4,7 @@ import { afterEach, beforeEach, describe, expect, test } from "vitest";
|
|
|
4
4
|
import { EFMedia } from "./EFMedia.ts";
|
|
5
5
|
import "../gui/EFWorkbench.ts";
|
|
6
6
|
import "../gui/EFPreview.ts";
|
|
7
|
+
import "./EFTimegroup.ts";
|
|
7
8
|
import { createTestFragmentIndex } from "TEST/createTestFragmentIndex.ts";
|
|
8
9
|
import { useMockWorker } from "TEST/useMockWorker.ts";
|
|
9
10
|
import { http, HttpResponse } from "msw";
|
|
@@ -291,6 +292,7 @@ describe("EFMedia", () => {
|
|
|
291
292
|
expect(element.durationMs).toBe(4_000);
|
|
292
293
|
expect(timegroup.durationMs).toBe(4_000);
|
|
293
294
|
});
|
|
295
|
+
|
|
294
296
|
test("Computes duration from track fragment index sourceout and sourcein", async () => {
|
|
295
297
|
// Mock the request for the track fragment index, responds with a 10 second duration
|
|
296
298
|
worker.use(
|
package/src/elements/EFMedia.ts
CHANGED
|
@@ -14,9 +14,8 @@ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.ts";
|
|
|
14
14
|
import { EF_RENDERING } from "../EF_RENDERING.ts";
|
|
15
15
|
import { apiHostContext } from "../gui/apiHostContext.ts";
|
|
16
16
|
import { EFSourceMixin } from "./EFSourceMixin.ts";
|
|
17
|
-
import { EFTemporal } from "./EFTemporal.ts";
|
|
17
|
+
import { EFTemporal, isEFTemporal } from "./EFTemporal.ts";
|
|
18
18
|
import { FetchMixin } from "./FetchMixin.ts";
|
|
19
|
-
import { getStartTimeMs } from "./util.ts";
|
|
20
19
|
|
|
21
20
|
const log = debug("ef:elements:EFMedia");
|
|
22
21
|
|
|
@@ -110,10 +109,10 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
110
109
|
return await Promise.all(
|
|
111
110
|
Object.entries(fragmentIndex).map(async ([trackId, track]) => {
|
|
112
111
|
const start = track.initSegment.offset;
|
|
113
|
-
const end = track.initSegment.offset + track.initSegment.size
|
|
112
|
+
const end = track.initSegment.offset + track.initSegment.size;
|
|
114
113
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
115
114
|
signal,
|
|
116
|
-
headers: { Range: `bytes=${start}-${end}` },
|
|
115
|
+
headers: { Range: `bytes=${start}-${end - 1}` },
|
|
117
116
|
});
|
|
118
117
|
const buffer =
|
|
119
118
|
(await response.arrayBuffer()) as MP4Box.MP4ArrayBuffer;
|
|
@@ -220,7 +219,7 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
220
219
|
|
|
221
220
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
222
221
|
signal,
|
|
223
|
-
headers: { Range: `bytes=${start}-${end}` },
|
|
222
|
+
headers: { Range: `bytes=${start}-${end - 1}` },
|
|
224
223
|
});
|
|
225
224
|
|
|
226
225
|
if (nextSegment) {
|
|
@@ -228,7 +227,7 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
228
227
|
const nextEnd = nextSegment.offset + nextSegment.size;
|
|
229
228
|
fetch(this.fragmentTrackPath(trackId), {
|
|
230
229
|
signal,
|
|
231
|
-
headers: { Range: `bytes=${nextStart}-${nextEnd}` },
|
|
230
|
+
headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` },
|
|
232
231
|
})
|
|
233
232
|
.then(() => {
|
|
234
233
|
log("Prefetched next segment");
|
|
@@ -297,6 +296,83 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
297
296
|
if (changedProperties.has("ownCurrentTimeMs")) {
|
|
298
297
|
this.executeSeek(this.trimAdjustedOwnCurrentTimeMs);
|
|
299
298
|
}
|
|
299
|
+
// TODO: this is copied straight from EFTimegroup.ts
|
|
300
|
+
// and should be refactored to be shared/reduce bad duplication of
|
|
301
|
+
// critical logic.
|
|
302
|
+
if (
|
|
303
|
+
changedProperties.has("currentTime") ||
|
|
304
|
+
changedProperties.has("ownCurrentTimeMs")
|
|
305
|
+
) {
|
|
306
|
+
const timelineTimeMs = (this.rootTimegroup ?? this).currentTimeMs;
|
|
307
|
+
if (
|
|
308
|
+
this.startTimeMs > timelineTimeMs ||
|
|
309
|
+
this.endTimeMs < timelineTimeMs
|
|
310
|
+
) {
|
|
311
|
+
this.style.display = "none";
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
this.style.display = "";
|
|
315
|
+
|
|
316
|
+
const animations = this.getAnimations({ subtree: true });
|
|
317
|
+
this.style.setProperty("--ef-duration", `${this.durationMs}ms`);
|
|
318
|
+
this.style.setProperty(
|
|
319
|
+
"--ef-transition--duration",
|
|
320
|
+
`${this.parentTimegroup?.overlapMs ?? 0}ms`,
|
|
321
|
+
);
|
|
322
|
+
this.style.setProperty(
|
|
323
|
+
"--ef-transition-out-start",
|
|
324
|
+
`${this.durationMs - (this.parentTimegroup?.overlapMs ?? 0)}ms`,
|
|
325
|
+
);
|
|
326
|
+
|
|
327
|
+
for (const animation of animations) {
|
|
328
|
+
if (animation.playState === "running") {
|
|
329
|
+
animation.pause();
|
|
330
|
+
}
|
|
331
|
+
const effect = animation.effect;
|
|
332
|
+
if (!(effect && effect instanceof KeyframeEffect)) {
|
|
333
|
+
return;
|
|
334
|
+
}
|
|
335
|
+
const target = effect.target;
|
|
336
|
+
// TODO: better generalize work avoidance for temporal elements
|
|
337
|
+
if (!target) {
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
if (target.closest("ef-video, ef-audio") !== this) {
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// Important to avoid going to the end of the animation
|
|
345
|
+
// or it will reset awkwardly.
|
|
346
|
+
if (isEFTemporal(target)) {
|
|
347
|
+
const timing = effect.getTiming();
|
|
348
|
+
const duration = Number(timing.duration) ?? 0;
|
|
349
|
+
const delay = Number(timing.delay);
|
|
350
|
+
const newTime = Math.floor(
|
|
351
|
+
Math.min(target.ownCurrentTimeMs, duration - 1 + delay),
|
|
352
|
+
);
|
|
353
|
+
if (Number.isNaN(newTime)) {
|
|
354
|
+
return;
|
|
355
|
+
}
|
|
356
|
+
animation.currentTime = newTime;
|
|
357
|
+
} else if (target) {
|
|
358
|
+
const nearestTimegroup = target.closest("ef-timegroup");
|
|
359
|
+
if (!nearestTimegroup) {
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
const timing = effect.getTiming();
|
|
363
|
+
const duration = Number(timing.duration) ?? 0;
|
|
364
|
+
const delay = Number(timing.delay);
|
|
365
|
+
const newTime = Math.floor(
|
|
366
|
+
Math.min(nearestTimegroup.ownCurrentTimeMs, duration - 1 + delay),
|
|
367
|
+
);
|
|
368
|
+
|
|
369
|
+
if (Number.isNaN(newTime)) {
|
|
370
|
+
return;
|
|
371
|
+
}
|
|
372
|
+
animation.currentTime = newTime;
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
}
|
|
300
376
|
}
|
|
301
377
|
|
|
302
378
|
get hasOwnDuration() {
|
|
@@ -351,10 +427,6 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
351
427
|
return Math.max(...durations) - this.trimStartMs - this.trimEndMs;
|
|
352
428
|
}
|
|
353
429
|
|
|
354
|
-
get startTimeMs() {
|
|
355
|
-
return getStartTimeMs(this);
|
|
356
|
-
}
|
|
357
|
-
|
|
358
430
|
#audioContext = new OfflineAudioContext(2, 48000 / 30, 48000);
|
|
359
431
|
|
|
360
432
|
audioBufferTask = new Task(this, {
|
|
@@ -413,11 +485,11 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
413
485
|
|
|
414
486
|
const start = audioTrackIndex.initSegment.offset;
|
|
415
487
|
const end =
|
|
416
|
-
audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size
|
|
488
|
+
audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
|
|
417
489
|
const audioInitFragmentRequest = this.fetch(
|
|
418
490
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
419
491
|
{
|
|
420
|
-
headers: { Range: `bytes=${start}-${end}` },
|
|
492
|
+
headers: { Range: `bytes=${start}-${end - 1}` },
|
|
421
493
|
},
|
|
422
494
|
);
|
|
423
495
|
|
|
@@ -443,12 +515,12 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
443
515
|
return;
|
|
444
516
|
}
|
|
445
517
|
const fragmentStart = firstFragment.offset;
|
|
446
|
-
const fragmentEnd = lastFragment.offset + lastFragment.size
|
|
518
|
+
const fragmentEnd = lastFragment.offset + lastFragment.size;
|
|
447
519
|
|
|
448
520
|
const audioFragmentRequest = this.fetch(
|
|
449
521
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
450
522
|
{
|
|
451
|
-
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd}` },
|
|
523
|
+
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` },
|
|
452
524
|
},
|
|
453
525
|
);
|
|
454
526
|
|
package/src/gui/ContextMixin.ts
CHANGED
|
@@ -136,18 +136,19 @@ export function ContextMixin<T extends Constructor<LitElement>>(superClass: T) {
|
|
|
136
136
|
const canvasHeight = canvasElement.clientHeight;
|
|
137
137
|
const stageRatio = stageWidth / stageHeight;
|
|
138
138
|
const canvasRatio = canvasWidth / canvasHeight;
|
|
139
|
+
|
|
139
140
|
if (stageRatio > canvasRatio) {
|
|
140
141
|
const scale = stageHeight / canvasHeight;
|
|
141
142
|
if (this.stageScale !== scale) {
|
|
142
143
|
canvasElement.style.transform = `scale(${scale})`;
|
|
143
|
-
canvasElement.style.transformOrigin = "
|
|
144
|
+
canvasElement.style.transformOrigin = "center left";
|
|
144
145
|
}
|
|
145
146
|
this.stageScale = scale;
|
|
146
147
|
} else {
|
|
147
148
|
const scale = stageWidth / canvasWidth;
|
|
148
149
|
if (this.stageScale !== scale) {
|
|
149
150
|
canvasElement.style.transform = `scale(${scale})`;
|
|
150
|
-
canvasElement.style.transformOrigin = "
|
|
151
|
+
canvasElement.style.transformOrigin = "center left";
|
|
151
152
|
}
|
|
152
153
|
this.stageScale = scale;
|
|
153
154
|
}
|
package/src/gui/EFPreview.ts
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { LitElement,
|
|
1
|
+
import { LitElement, css, html } from "lit";
|
|
2
2
|
import { customElement } from "lit/decorators.js";
|
|
3
3
|
import { ref } from "lit/directives/ref.js";
|
|
4
4
|
|
|
5
|
-
import { TWMixin } from "./TWMixin.ts";
|
|
6
5
|
import { ContextMixin } from "./ContextMixin.ts";
|
|
6
|
+
import { TWMixin } from "./TWMixin.ts";
|
|
7
7
|
|
|
8
8
|
@customElement("ef-preview")
|
|
9
9
|
export class EFPreview extends ContextMixin(TWMixin(LitElement)) {
|
|
@@ -26,7 +26,9 @@ export class EFPreview extends ContextMixin(TWMixin(LitElement)) {
|
|
|
26
26
|
<slot
|
|
27
27
|
${ref(this.canvasRef)}
|
|
28
28
|
class="inline-block"
|
|
29
|
+
name="canvas"
|
|
29
30
|
></slot>
|
|
31
|
+
<slot name="controls"></slot>
|
|
30
32
|
</div>
|
|
31
33
|
`;
|
|
32
34
|
}
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import { EFTimegroup } from "./EFTimegroup.js";
|
|
2
|
-
const getStartTimeMs = (element) => {
|
|
3
|
-
const nearestTimeGroup = element.closest("ef-timegroup");
|
|
4
|
-
if (!(nearestTimeGroup instanceof EFTimegroup)) {
|
|
5
|
-
return 0;
|
|
6
|
-
}
|
|
7
|
-
return nearestTimeGroup.startTimeMs;
|
|
8
|
-
};
|
|
9
|
-
export {
|
|
10
|
-
getStartTimeMs
|
|
11
|
-
};
|