@editframe/elements 0.12.0-beta.1 → 0.12.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -40,13 +40,13 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
40
40
|
await this.readyPromise;
|
|
41
41
|
const trackInfo = {};
|
|
42
42
|
for (const videoTrack of this.getInfo().videoTracks) {
|
|
43
|
-
trackInfo[videoTrack.id] = { index: 0
|
|
43
|
+
trackInfo[videoTrack.id] = { index: 0 };
|
|
44
44
|
this.setSegmentOptions(videoTrack.id, null, {
|
|
45
45
|
rapAlignement: true
|
|
46
46
|
});
|
|
47
47
|
}
|
|
48
48
|
for (const audioTrack of this.getInfo().audioTracks) {
|
|
49
|
-
trackInfo[audioTrack.id] = { index: 0
|
|
49
|
+
trackInfo[audioTrack.id] = { index: 0 };
|
|
50
50
|
const sampleRate = audioTrack.audio.sample_rate;
|
|
51
51
|
const probablePacketSize = 1024;
|
|
52
52
|
const probableFourSecondsOfSamples = Math.ceil(
|
|
@@ -61,21 +61,12 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
61
61
|
yield {
|
|
62
62
|
track: initSegment.id,
|
|
63
63
|
segment: "init",
|
|
64
|
-
data: initSegment.buffer
|
|
65
|
-
complete: false
|
|
64
|
+
data: initSegment.buffer
|
|
66
65
|
};
|
|
67
66
|
}
|
|
68
67
|
const fragmentStartSamples = {};
|
|
69
68
|
let finishedReading = false;
|
|
70
|
-
|
|
71
|
-
for (const fragmentedTrack of this.fragmentedTracks) {
|
|
72
|
-
if (!trackInfo[fragmentedTrack.id]?.complete) {
|
|
73
|
-
return false;
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
return true;
|
|
77
|
-
};
|
|
78
|
-
while (!(finishedReading && allTracksFinished())) {
|
|
69
|
+
do {
|
|
79
70
|
for (const fragTrak of this.fragmentedTracks) {
|
|
80
71
|
const trak = fragTrak.trak;
|
|
81
72
|
if (trak.nextSample === void 0) {
|
|
@@ -84,6 +75,8 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
84
75
|
if (trak.samples === void 0) {
|
|
85
76
|
throw new Error("trak.samples is undefined");
|
|
86
77
|
}
|
|
78
|
+
log("trak.nextSample", fragTrak.id, trak.nextSample);
|
|
79
|
+
log("trak.samples.length", fragTrak.id, trak.samples.length);
|
|
87
80
|
while (trak.nextSample < trak.samples.length) {
|
|
88
81
|
let result = void 0;
|
|
89
82
|
const fragTrakNextSample = trak.samples[trak.nextSample];
|
|
@@ -120,22 +113,20 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
120
113
|
if (!trackInfoForFrag) {
|
|
121
114
|
throw new Error("trackInfoForFrag is undefined");
|
|
122
115
|
}
|
|
123
|
-
if (trak.nextSample >= trak.samples.length) {
|
|
124
|
-
trackInfoForFrag.complete = true;
|
|
125
|
-
}
|
|
126
|
-
log(
|
|
127
|
-
`Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`
|
|
128
|
-
);
|
|
129
116
|
const startSample = fragmentStartSamples[fragTrak.id];
|
|
130
117
|
const endSample = trak.samples[trak.nextSample - 1];
|
|
131
118
|
if (!startSample || !endSample) {
|
|
132
119
|
throw new Error("startSample or endSample is undefined");
|
|
133
120
|
}
|
|
121
|
+
log(
|
|
122
|
+
`Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`,
|
|
123
|
+
`startTime=${startSample.cts}`,
|
|
124
|
+
`endTime=${endSample.cts + endSample.duration}`
|
|
125
|
+
);
|
|
134
126
|
yield {
|
|
135
127
|
track: fragTrak.id,
|
|
136
128
|
segment: trackInfoForFrag.index,
|
|
137
129
|
data: fragTrak.segmentStream.buffer,
|
|
138
|
-
complete: trackInfoForFrag.complete,
|
|
139
130
|
cts: startSample.cts,
|
|
140
131
|
dts: startSample.dts,
|
|
141
132
|
duration: endSample.cts - startSample.cts + endSample.duration
|
|
@@ -147,6 +138,68 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
147
138
|
}
|
|
148
139
|
}
|
|
149
140
|
finishedReading = await this.waitForMoreSamples();
|
|
141
|
+
} while (!finishedReading);
|
|
142
|
+
for (const fragTrak of this.fragmentedTracks) {
|
|
143
|
+
const trak = fragTrak.trak;
|
|
144
|
+
if (trak.nextSample === void 0) {
|
|
145
|
+
throw new Error("trak.nextSample is undefined");
|
|
146
|
+
}
|
|
147
|
+
if (trak.samples === void 0) {
|
|
148
|
+
throw new Error("trak.samples is undefined");
|
|
149
|
+
}
|
|
150
|
+
while (trak.nextSample < trak.samples.length) {
|
|
151
|
+
let result = void 0;
|
|
152
|
+
try {
|
|
153
|
+
result = this.createFragment(
|
|
154
|
+
fragTrak.id,
|
|
155
|
+
trak.nextSample,
|
|
156
|
+
fragTrak.segmentStream
|
|
157
|
+
);
|
|
158
|
+
} catch (error) {
|
|
159
|
+
console.error("Failed to createFragment", error);
|
|
160
|
+
}
|
|
161
|
+
if (result) {
|
|
162
|
+
fragTrak.segmentStream = result;
|
|
163
|
+
trak.nextSample++;
|
|
164
|
+
} else {
|
|
165
|
+
finishedReading = await this.waitForMoreSamples();
|
|
166
|
+
break;
|
|
167
|
+
}
|
|
168
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
169
|
+
const emitSegment = (
|
|
170
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
171
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
172
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // if we have more samples than the number of samples requested, we emit the fragment
|
|
173
|
+
trak.nextSample >= trak.samples.length
|
|
174
|
+
);
|
|
175
|
+
if (emitSegment) {
|
|
176
|
+
const trackInfoForFrag = trackInfo[fragTrak.id];
|
|
177
|
+
if (!trackInfoForFrag) {
|
|
178
|
+
throw new Error("trackInfoForFrag is undefined");
|
|
179
|
+
}
|
|
180
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
181
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
182
|
+
if (!startSample || !endSample) {
|
|
183
|
+
throw new Error("startSample or endSample is undefined");
|
|
184
|
+
}
|
|
185
|
+
log(
|
|
186
|
+
`Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`,
|
|
187
|
+
`startTime=${startSample.cts}`,
|
|
188
|
+
`endTime=${endSample.cts + endSample.duration}`
|
|
189
|
+
);
|
|
190
|
+
yield {
|
|
191
|
+
track: fragTrak.id,
|
|
192
|
+
segment: trackInfoForFrag.index,
|
|
193
|
+
data: fragTrak.segmentStream.buffer,
|
|
194
|
+
cts: startSample.cts,
|
|
195
|
+
dts: startSample.dts,
|
|
196
|
+
duration: endSample.cts - startSample.cts + endSample.duration
|
|
197
|
+
};
|
|
198
|
+
trackInfoForFrag.index += 1;
|
|
199
|
+
fragTrak.segmentStream = null;
|
|
200
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
201
|
+
}
|
|
202
|
+
}
|
|
150
203
|
}
|
|
151
204
|
}
|
|
152
205
|
waitForMoreSamples() {
|
|
@@ -62,10 +62,10 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
62
62
|
return await Promise.all(
|
|
63
63
|
Object.entries(fragmentIndex).map(async ([trackId, track]) => {
|
|
64
64
|
const start = track.initSegment.offset;
|
|
65
|
-
const end = track.initSegment.offset + track.initSegment.size
|
|
65
|
+
const end = track.initSegment.offset + track.initSegment.size;
|
|
66
66
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
67
67
|
signal,
|
|
68
|
-
headers: { Range: `bytes=${start}-${end}` }
|
|
68
|
+
headers: { Range: `bytes=${start}-${end - 1}` }
|
|
69
69
|
});
|
|
70
70
|
const buffer = await response.arrayBuffer();
|
|
71
71
|
buffer.fileStart = 0;
|
|
@@ -131,14 +131,14 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
131
131
|
const end = segment.offset + segment.size;
|
|
132
132
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
133
133
|
signal,
|
|
134
|
-
headers: { Range: `bytes=${start}-${end}` }
|
|
134
|
+
headers: { Range: `bytes=${start}-${end - 1}` }
|
|
135
135
|
});
|
|
136
136
|
if (nextSegment) {
|
|
137
137
|
const nextStart = nextSegment.offset;
|
|
138
138
|
const nextEnd = nextSegment.offset + nextSegment.size;
|
|
139
139
|
fetch(this.fragmentTrackPath(trackId), {
|
|
140
140
|
signal,
|
|
141
|
-
headers: { Range: `bytes=${nextStart}-${nextEnd}` }
|
|
141
|
+
headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` }
|
|
142
142
|
}).then(() => {
|
|
143
143
|
log("Prefetched next segment");
|
|
144
144
|
}).catch((error) => {
|
|
@@ -325,11 +325,11 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
325
325
|
return;
|
|
326
326
|
}
|
|
327
327
|
const start = audioTrackIndex.initSegment.offset;
|
|
328
|
-
const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size
|
|
328
|
+
const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
|
|
329
329
|
const audioInitFragmentRequest = this.fetch(
|
|
330
330
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
331
331
|
{
|
|
332
|
-
headers: { Range: `bytes=${start}-${end}` }
|
|
332
|
+
headers: { Range: `bytes=${start}-${end - 1}` }
|
|
333
333
|
}
|
|
334
334
|
);
|
|
335
335
|
const fragments = Object.values(audioTrackIndex.segments).filter(
|
|
@@ -350,11 +350,11 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
350
350
|
return;
|
|
351
351
|
}
|
|
352
352
|
const fragmentStart = firstFragment.offset;
|
|
353
|
-
const fragmentEnd = lastFragment.offset + lastFragment.size
|
|
353
|
+
const fragmentEnd = lastFragment.offset + lastFragment.size;
|
|
354
354
|
const audioFragmentRequest = this.fetch(
|
|
355
355
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
356
356
|
{
|
|
357
|
-
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd}` }
|
|
357
|
+
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` }
|
|
358
358
|
}
|
|
359
359
|
);
|
|
360
360
|
const initResponse = await audioInitFragmentRequest;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@editframe/elements",
|
|
3
|
-
"version": "0.12.0-beta.
|
|
3
|
+
"version": "0.12.0-beta.3",
|
|
4
4
|
"description": "",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": {
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"author": "",
|
|
21
21
|
"license": "UNLICENSED",
|
|
22
22
|
"dependencies": {
|
|
23
|
-
"@editframe/assets": "0.12.0-beta.
|
|
23
|
+
"@editframe/assets": "0.12.0-beta.3",
|
|
24
24
|
"@lit/context": "^1.1.2",
|
|
25
25
|
"@lit/task": "^1.0.1",
|
|
26
26
|
"d3": "^7.9.0",
|
package/src/elements/EFMedia.ts
CHANGED
|
@@ -110,10 +110,10 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
110
110
|
return await Promise.all(
|
|
111
111
|
Object.entries(fragmentIndex).map(async ([trackId, track]) => {
|
|
112
112
|
const start = track.initSegment.offset;
|
|
113
|
-
const end = track.initSegment.offset + track.initSegment.size
|
|
113
|
+
const end = track.initSegment.offset + track.initSegment.size;
|
|
114
114
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
115
115
|
signal,
|
|
116
|
-
headers: { Range: `bytes=${start}-${end}` },
|
|
116
|
+
headers: { Range: `bytes=${start}-${end - 1}` },
|
|
117
117
|
});
|
|
118
118
|
const buffer =
|
|
119
119
|
(await response.arrayBuffer()) as MP4Box.MP4ArrayBuffer;
|
|
@@ -220,7 +220,7 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
220
220
|
|
|
221
221
|
const response = await fetch(this.fragmentTrackPath(trackId), {
|
|
222
222
|
signal,
|
|
223
|
-
headers: { Range: `bytes=${start}-${end}` },
|
|
223
|
+
headers: { Range: `bytes=${start}-${end - 1}` },
|
|
224
224
|
});
|
|
225
225
|
|
|
226
226
|
if (nextSegment) {
|
|
@@ -228,7 +228,7 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
228
228
|
const nextEnd = nextSegment.offset + nextSegment.size;
|
|
229
229
|
fetch(this.fragmentTrackPath(trackId), {
|
|
230
230
|
signal,
|
|
231
|
-
headers: { Range: `bytes=${nextStart}-${nextEnd}` },
|
|
231
|
+
headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` },
|
|
232
232
|
})
|
|
233
233
|
.then(() => {
|
|
234
234
|
log("Prefetched next segment");
|
|
@@ -413,11 +413,11 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
413
413
|
|
|
414
414
|
const start = audioTrackIndex.initSegment.offset;
|
|
415
415
|
const end =
|
|
416
|
-
audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size
|
|
416
|
+
audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
|
|
417
417
|
const audioInitFragmentRequest = this.fetch(
|
|
418
418
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
419
419
|
{
|
|
420
|
-
headers: { Range: `bytes=${start}-${end}` },
|
|
420
|
+
headers: { Range: `bytes=${start}-${end - 1}` },
|
|
421
421
|
},
|
|
422
422
|
);
|
|
423
423
|
|
|
@@ -443,12 +443,12 @@ export class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
443
443
|
return;
|
|
444
444
|
}
|
|
445
445
|
const fragmentStart = firstFragment.offset;
|
|
446
|
-
const fragmentEnd = lastFragment.offset + lastFragment.size
|
|
446
|
+
const fragmentEnd = lastFragment.offset + lastFragment.size;
|
|
447
447
|
|
|
448
448
|
const audioFragmentRequest = this.fetch(
|
|
449
449
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
450
450
|
{
|
|
451
|
-
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd}` },
|
|
451
|
+
headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` },
|
|
452
452
|
},
|
|
453
453
|
);
|
|
454
454
|
|