@remotion/media-parser 4.0.350 → 4.0.351
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.d.ts +6 -0
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +131 -0
- package/dist/containers/iso-base-media/mdat/mdat.js +1 -1
- package/dist/containers/iso-base-media/mvhd.d.ts +30 -0
- package/dist/containers/iso-base-media/mvhd.js +65 -0
- package/dist/esm/index.mjs +3 -3
- package/dist/esm/universal.mjs +1 -1
- package/dist/esm/web.mjs +1 -1
- package/dist/esm/worker-server-entry.mjs +2 -2
- package/dist/esm/worker-web-entry.mjs +2 -2
- package/dist/readers/from-fetch.js +1 -0
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -4
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { MinimalFlatSampleForTesting } from '../../../state/iso-base-media/cached-sample-positions';
|
|
2
|
+
export type JumpMark = {
|
|
3
|
+
afterSampleWithOffset: number;
|
|
4
|
+
jumpToOffset: number;
|
|
5
|
+
};
|
|
6
|
+
export declare const calculateJumpMarks: (samplePositionTracks: MinimalFlatSampleForTesting[][], endOfMdat: number) => JumpMark[];
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/*
|
|
3
|
+
https://discord.com/channels/809501355504959528/1001500302375125055/1364798934832119870
|
|
4
|
+
Android 13 produces MP4 videos where first, all video samples are at the beginning,
|
|
5
|
+
then all audio samples are at the end.
|
|
6
|
+
|
|
7
|
+
This causes issues with our video decoder: "Waited too long for VideoDecoder" because
|
|
8
|
+
the overall progress is stuck.
|
|
9
|
+
*/
|
|
10
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
11
|
+
exports.calculateJumpMarks = void 0;
|
|
12
|
+
// In WebCodecs, we require the tracks to deviate by at most 10 seconds
|
|
13
|
+
// Therefore, we need to emit them to be less than 10 seconds apart
|
|
14
|
+
const MAX_SPREAD_IN_SECONDS = 8;
|
|
15
|
+
const getKey = (samplePositionTrack) => {
|
|
16
|
+
return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}`;
|
|
17
|
+
};
|
|
18
|
+
const findBestJump = ({ allSamplesSortedByOffset, visited, progresses, }) => {
|
|
19
|
+
var _a;
|
|
20
|
+
const minProgress = Math.min(...Object.values(progresses));
|
|
21
|
+
const trackNumberWithLowestProgress = (_a = Object.entries(progresses).find(([, progress]) => progress === minProgress)) === null || _a === void 0 ? void 0 : _a[0];
|
|
22
|
+
const firstSampleAboveMinProgress = allSamplesSortedByOffset.findIndex((sample) => sample.track.trackId === Number(trackNumberWithLowestProgress) &&
|
|
23
|
+
!visited.has(getKey(sample)));
|
|
24
|
+
return firstSampleAboveMinProgress;
|
|
25
|
+
};
|
|
26
|
+
const calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
|
|
27
|
+
const progresses = {};
|
|
28
|
+
for (const track of samplePositionTracks) {
|
|
29
|
+
progresses[track[0].track.trackId] = 0;
|
|
30
|
+
}
|
|
31
|
+
const jumpMarks = [];
|
|
32
|
+
const allSamplesSortedByOffset = samplePositionTracks
|
|
33
|
+
.flat(1)
|
|
34
|
+
.sort((a, b) => a.samplePosition.offset - b.samplePosition.offset);
|
|
35
|
+
let indexToVisit = 0;
|
|
36
|
+
const visited = new Set();
|
|
37
|
+
let rollOverToProcess = false;
|
|
38
|
+
const increaseIndex = () => {
|
|
39
|
+
indexToVisit++;
|
|
40
|
+
if (indexToVisit >= allSamplesSortedByOffset.length) {
|
|
41
|
+
rollOverToProcess = true;
|
|
42
|
+
indexToVisit = 0;
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
let lastVisitedSample = null;
|
|
46
|
+
const addJumpMark = ({ firstSampleAboveMinProgress, }) => {
|
|
47
|
+
if (!lastVisitedSample) {
|
|
48
|
+
throw new Error('no last visited sample');
|
|
49
|
+
}
|
|
50
|
+
const jumpMark = {
|
|
51
|
+
afterSampleWithOffset: lastVisitedSample.samplePosition.offset,
|
|
52
|
+
jumpToOffset: allSamplesSortedByOffset[firstSampleAboveMinProgress].samplePosition
|
|
53
|
+
.offset,
|
|
54
|
+
};
|
|
55
|
+
indexToVisit = firstSampleAboveMinProgress;
|
|
56
|
+
jumpMarks.push(jumpMark);
|
|
57
|
+
};
|
|
58
|
+
const addFinalJumpIfNecessary = () => {
|
|
59
|
+
if (indexToVisit === allSamplesSortedByOffset.length - 1) {
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
jumpMarks.push({
|
|
63
|
+
afterSampleWithOffset: allSamplesSortedByOffset[indexToVisit].samplePosition.offset,
|
|
64
|
+
jumpToOffset: endOfMdat,
|
|
65
|
+
});
|
|
66
|
+
};
|
|
67
|
+
const considerJump = () => {
|
|
68
|
+
const firstSampleAboveMinProgress = findBestJump({
|
|
69
|
+
allSamplesSortedByOffset,
|
|
70
|
+
visited,
|
|
71
|
+
progresses,
|
|
72
|
+
});
|
|
73
|
+
if (firstSampleAboveMinProgress > -1 &&
|
|
74
|
+
firstSampleAboveMinProgress !== indexToVisit + 1) {
|
|
75
|
+
addJumpMark({ firstSampleAboveMinProgress });
|
|
76
|
+
indexToVisit = firstSampleAboveMinProgress;
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
while (true) {
|
|
80
|
+
increaseIndex();
|
|
81
|
+
if (!visited.has(getKey(allSamplesSortedByOffset[indexToVisit]))) {
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
while (true) {
|
|
88
|
+
const currentSamplePosition = allSamplesSortedByOffset[indexToVisit];
|
|
89
|
+
const sampleKey = getKey(currentSamplePosition);
|
|
90
|
+
if (visited.has(sampleKey)) {
|
|
91
|
+
considerJump();
|
|
92
|
+
continue;
|
|
93
|
+
}
|
|
94
|
+
visited.add(sampleKey);
|
|
95
|
+
if (rollOverToProcess) {
|
|
96
|
+
if (!lastVisitedSample) {
|
|
97
|
+
throw new Error('no last visited sample');
|
|
98
|
+
}
|
|
99
|
+
jumpMarks.push({
|
|
100
|
+
afterSampleWithOffset: lastVisitedSample.samplePosition.offset,
|
|
101
|
+
jumpToOffset: currentSamplePosition.samplePosition.offset,
|
|
102
|
+
});
|
|
103
|
+
rollOverToProcess = false;
|
|
104
|
+
}
|
|
105
|
+
lastVisitedSample = currentSamplePosition;
|
|
106
|
+
if (visited.size === allSamplesSortedByOffset.length) {
|
|
107
|
+
addFinalJumpIfNecessary();
|
|
108
|
+
break;
|
|
109
|
+
}
|
|
110
|
+
const timestamp = currentSamplePosition.samplePosition.decodingTimestamp /
|
|
111
|
+
currentSamplePosition.track.originalTimescale;
|
|
112
|
+
progresses[currentSamplePosition.track.trackId] = timestamp;
|
|
113
|
+
const progressValues = Object.values(progresses);
|
|
114
|
+
const maxProgress = Math.max(...progressValues);
|
|
115
|
+
const minProgress = Math.min(...progressValues);
|
|
116
|
+
const spread = maxProgress - minProgress;
|
|
117
|
+
if (visited.size === allSamplesSortedByOffset.length) {
|
|
118
|
+
addFinalJumpIfNecessary();
|
|
119
|
+
break;
|
|
120
|
+
}
|
|
121
|
+
// Also don't allow audio progress to go more
|
|
122
|
+
if (spread > MAX_SPREAD_IN_SECONDS) {
|
|
123
|
+
considerJump();
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
increaseIndex();
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
return jumpMarks;
|
|
130
|
+
};
|
|
131
|
+
exports.calculateJumpMarks = calculateJumpMarks;
|
|
@@ -92,7 +92,7 @@ const parseMdatSection = async (state) => {
|
|
|
92
92
|
return (0, skip_1.makeSkip)(endOfMdat);
|
|
93
93
|
}
|
|
94
94
|
const exactMatch = nextSampleArray.find((s) => s.samplePosition.offset === state.iterator.counter.getOffset());
|
|
95
|
-
const nextSample = exactMatch !== null && exactMatch !== void 0 ? exactMatch : nextSampleArray[0];
|
|
95
|
+
const nextSample = exactMatch !== null && exactMatch !== void 0 ? exactMatch : nextSampleArray.sort((a, b) => a.samplePosition.offset - b.samplePosition.offset)[0];
|
|
96
96
|
if (nextSample.samplePosition.offset !== state.iterator.counter.getOffset()) {
|
|
97
97
|
return (0, skip_1.makeSkip)(nextSample.samplePosition.offset);
|
|
98
98
|
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import type { BufferIterator } from '../../iterator/buffer-iterator';
|
|
2
|
+
import type { BaseBox } from './base-type';
|
|
3
|
+
export type ThreeDMatrix = [
|
|
4
|
+
number,
|
|
5
|
+
number,
|
|
6
|
+
number,
|
|
7
|
+
number,
|
|
8
|
+
number,
|
|
9
|
+
number,
|
|
10
|
+
number,
|
|
11
|
+
number,
|
|
12
|
+
number
|
|
13
|
+
];
|
|
14
|
+
export interface MvhdBox extends BaseBox {
|
|
15
|
+
durationInUnits: number;
|
|
16
|
+
durationInSeconds: number;
|
|
17
|
+
creationTime: number | null;
|
|
18
|
+
modificationTime: number | null;
|
|
19
|
+
timeScale: number;
|
|
20
|
+
rate: number;
|
|
21
|
+
volume: number;
|
|
22
|
+
matrix: ThreeDMatrix;
|
|
23
|
+
nextTrackId: number;
|
|
24
|
+
type: 'mvhd-box';
|
|
25
|
+
}
|
|
26
|
+
export declare const parseMvhd: ({ iterator, offset, size, }: {
|
|
27
|
+
iterator: BufferIterator;
|
|
28
|
+
offset: number;
|
|
29
|
+
size: number;
|
|
30
|
+
}) => MvhdBox;
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseMvhd = void 0;
|
|
4
|
+
const buffer_iterator_1 = require("../../iterator/buffer-iterator");
|
|
5
|
+
const to_date_1 = require("./to-date");
|
|
6
|
+
const parseMvhd = ({ iterator, offset, size, }) => {
|
|
7
|
+
const version = iterator.getUint8();
|
|
8
|
+
// Flags, we discard them
|
|
9
|
+
iterator.discard(3);
|
|
10
|
+
const creationTime = version === 1 ? iterator.getUint64() : iterator.getUint32();
|
|
11
|
+
const modificationTime = version === 1 ? iterator.getUint64() : iterator.getUint32();
|
|
12
|
+
const timeScale = iterator.getUint32();
|
|
13
|
+
const durationInUnits = version === 1 ? iterator.getUint64() : iterator.getUint32();
|
|
14
|
+
const durationInSeconds = Number(durationInUnits) / timeScale;
|
|
15
|
+
const rateArray = iterator.getSlice(4);
|
|
16
|
+
const rateView = (0, buffer_iterator_1.getArrayBufferIterator)(rateArray, rateArray.length);
|
|
17
|
+
const rate = rateView.getInt8() * 10 +
|
|
18
|
+
rateView.getInt8() +
|
|
19
|
+
rateView.getInt8() * 0.1 +
|
|
20
|
+
rateView.getInt8() * 0.01;
|
|
21
|
+
const volumeArray = iterator.getSlice(2);
|
|
22
|
+
const volumeView = (0, buffer_iterator_1.getArrayBufferIterator)(volumeArray, volumeArray.length);
|
|
23
|
+
const volume = volumeView.getInt8() + volumeView.getInt8() * 0.1;
|
|
24
|
+
// reserved 16bit
|
|
25
|
+
iterator.discard(2);
|
|
26
|
+
// reserved 32bit x2
|
|
27
|
+
iterator.discard(4);
|
|
28
|
+
iterator.discard(4);
|
|
29
|
+
// matrix
|
|
30
|
+
const matrix = [
|
|
31
|
+
iterator.getFixedPointSigned1616Number(),
|
|
32
|
+
iterator.getFixedPointSigned1616Number(),
|
|
33
|
+
iterator.getFixedPointSigned230Number(),
|
|
34
|
+
iterator.getFixedPointSigned1616Number(),
|
|
35
|
+
iterator.getFixedPointSigned1616Number(),
|
|
36
|
+
iterator.getFixedPointSigned230Number(),
|
|
37
|
+
iterator.getFixedPointSigned1616Number(),
|
|
38
|
+
iterator.getFixedPointSigned1616Number(),
|
|
39
|
+
iterator.getFixedPointSigned230Number(),
|
|
40
|
+
];
|
|
41
|
+
// pre-defined
|
|
42
|
+
iterator.discard(4 * 6);
|
|
43
|
+
// next track id
|
|
44
|
+
const nextTrackId = iterator.getUint32();
|
|
45
|
+
volumeView.destroy();
|
|
46
|
+
const bytesRemaining = size - (iterator.counter.getOffset() - offset);
|
|
47
|
+
if (bytesRemaining !== 0) {
|
|
48
|
+
throw new Error('expected 0 bytes ' + bytesRemaining);
|
|
49
|
+
}
|
|
50
|
+
return {
|
|
51
|
+
creationTime: (0, to_date_1.toUnixTimestamp)(Number(creationTime)),
|
|
52
|
+
modificationTime: (0, to_date_1.toUnixTimestamp)(Number(modificationTime)),
|
|
53
|
+
timeScale,
|
|
54
|
+
durationInUnits: Number(durationInUnits),
|
|
55
|
+
durationInSeconds,
|
|
56
|
+
rate,
|
|
57
|
+
volume,
|
|
58
|
+
matrix: matrix,
|
|
59
|
+
nextTrackId,
|
|
60
|
+
type: 'mvhd-box',
|
|
61
|
+
boxSize: size,
|
|
62
|
+
offset,
|
|
63
|
+
};
|
|
64
|
+
};
|
|
65
|
+
exports.parseMvhd = parseMvhd;
|
package/dist/esm/index.mjs
CHANGED
|
@@ -11899,7 +11899,7 @@ var parseMdatSection = async (state) => {
|
|
|
11899
11899
|
return makeSkip(endOfMdat);
|
|
11900
11900
|
}
|
|
11901
11901
|
const exactMatch = nextSampleArray.find((s) => s.samplePosition.offset === state.iterator.counter.getOffset());
|
|
11902
|
-
const nextSample = exactMatch ?? nextSampleArray[0];
|
|
11902
|
+
const nextSample = exactMatch ?? nextSampleArray.sort((a, b) => a.samplePosition.offset - b.samplePosition.offset)[0];
|
|
11903
11903
|
if (nextSample.samplePosition.offset !== state.iterator.counter.getOffset()) {
|
|
11904
11904
|
return makeSkip(nextSample.samplePosition.offset);
|
|
11905
11905
|
}
|
|
@@ -12503,7 +12503,7 @@ var makeFetchRequest = async ({
|
|
|
12503
12503
|
}) => {
|
|
12504
12504
|
const resolvedUrl = resolveUrl(src);
|
|
12505
12505
|
const resolvedUrlString = resolvedUrl.toString();
|
|
12506
|
-
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("http://")) {
|
|
12506
|
+
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("data:") && !resolvedUrlString.startsWith("http://")) {
|
|
12507
12507
|
return Promise.reject(new Error(`${resolvedUrlString} is not a URL - needs to start with http:// or https:// or blob:. If you want to read a local file, pass \`reader: nodeReader\` to parseMedia().`));
|
|
12508
12508
|
}
|
|
12509
12509
|
const ownController = new AbortController;
|
|
@@ -18187,7 +18187,7 @@ var downloadAndParseMedia = async (options) => {
|
|
|
18187
18187
|
return returnValue;
|
|
18188
18188
|
};
|
|
18189
18189
|
// src/version.ts
|
|
18190
|
-
var VERSION = "4.0.
|
|
18190
|
+
var VERSION = "4.0.351";
|
|
18191
18191
|
|
|
18192
18192
|
// src/index.ts
|
|
18193
18193
|
var MediaParserInternals = {
|
package/dist/esm/universal.mjs
CHANGED
|
@@ -153,7 +153,7 @@ var makeFetchRequest = async ({
|
|
|
153
153
|
}) => {
|
|
154
154
|
const resolvedUrl = resolveUrl(src);
|
|
155
155
|
const resolvedUrlString = resolvedUrl.toString();
|
|
156
|
-
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("http://")) {
|
|
156
|
+
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("data:") && !resolvedUrlString.startsWith("http://")) {
|
|
157
157
|
return Promise.reject(new Error(`${resolvedUrlString} is not a URL - needs to start with http:// or https:// or blob:. If you want to read a local file, pass \`reader: nodeReader\` to parseMedia().`));
|
|
158
158
|
}
|
|
159
159
|
const ownController = new AbortController;
|
package/dist/esm/web.mjs
CHANGED
|
@@ -153,7 +153,7 @@ var makeFetchRequest = async ({
|
|
|
153
153
|
}) => {
|
|
154
154
|
const resolvedUrl = resolveUrl(src);
|
|
155
155
|
const resolvedUrlString = resolvedUrl.toString();
|
|
156
|
-
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("http://")) {
|
|
156
|
+
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("data:") && !resolvedUrlString.startsWith("http://")) {
|
|
157
157
|
return Promise.reject(new Error(`${resolvedUrlString} is not a URL - needs to start with http:// or https:// or blob:. If you want to read a local file, pass \`reader: nodeReader\` to parseMedia().`));
|
|
158
158
|
}
|
|
159
159
|
const ownController = new AbortController;
|
|
@@ -222,7 +222,7 @@ var makeFetchRequest = async ({
|
|
|
222
222
|
}) => {
|
|
223
223
|
const resolvedUrl = resolveUrl(src);
|
|
224
224
|
const resolvedUrlString = resolvedUrl.toString();
|
|
225
|
-
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("http://")) {
|
|
225
|
+
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("data:") && !resolvedUrlString.startsWith("http://")) {
|
|
226
226
|
return Promise.reject(new Error(`${resolvedUrlString} is not a URL - needs to start with http:// or https:// or blob:. If you want to read a local file, pass \`reader: nodeReader\` to parseMedia().`));
|
|
227
227
|
}
|
|
228
228
|
const ownController = new AbortController;
|
|
@@ -11984,7 +11984,7 @@ var parseMdatSection = async (state) => {
|
|
|
11984
11984
|
return makeSkip(endOfMdat);
|
|
11985
11985
|
}
|
|
11986
11986
|
const exactMatch = nextSampleArray.find((s) => s.samplePosition.offset === state.iterator.counter.getOffset());
|
|
11987
|
-
const nextSample = exactMatch ?? nextSampleArray[0];
|
|
11987
|
+
const nextSample = exactMatch ?? nextSampleArray.sort((a, b) => a.samplePosition.offset - b.samplePosition.offset)[0];
|
|
11988
11988
|
if (nextSample.samplePosition.offset !== state.iterator.counter.getOffset()) {
|
|
11989
11989
|
return makeSkip(nextSample.samplePosition.offset);
|
|
11990
11990
|
}
|
|
@@ -222,7 +222,7 @@ var makeFetchRequest = async ({
|
|
|
222
222
|
}) => {
|
|
223
223
|
const resolvedUrl = resolveUrl(src);
|
|
224
224
|
const resolvedUrlString = resolvedUrl.toString();
|
|
225
|
-
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("http://")) {
|
|
225
|
+
if (!resolvedUrlString.startsWith("https://") && !resolvedUrlString.startsWith("blob:") && !resolvedUrlString.startsWith("data:") && !resolvedUrlString.startsWith("http://")) {
|
|
226
226
|
return Promise.reject(new Error(`${resolvedUrlString} is not a URL - needs to start with http:// or https:// or blob:. If you want to read a local file, pass \`reader: nodeReader\` to parseMedia().`));
|
|
227
227
|
}
|
|
228
228
|
const ownController = new AbortController;
|
|
@@ -11881,7 +11881,7 @@ var parseMdatSection = async (state) => {
|
|
|
11881
11881
|
return makeSkip(endOfMdat);
|
|
11882
11882
|
}
|
|
11883
11883
|
const exactMatch = nextSampleArray.find((s) => s.samplePosition.offset === state.iterator.counter.getOffset());
|
|
11884
|
-
const nextSample = exactMatch ?? nextSampleArray[0];
|
|
11884
|
+
const nextSample = exactMatch ?? nextSampleArray.sort((a, b) => a.samplePosition.offset - b.samplePosition.offset)[0];
|
|
11885
11885
|
if (nextSample.samplePosition.offset !== state.iterator.counter.getOffset()) {
|
|
11886
11886
|
return makeSkip(nextSample.samplePosition.offset);
|
|
11887
11887
|
}
|
|
@@ -51,6 +51,7 @@ const makeFetchRequest = async ({ range, src, controller, }) => {
|
|
|
51
51
|
const resolvedUrlString = resolvedUrl.toString();
|
|
52
52
|
if (!resolvedUrlString.startsWith('https://') &&
|
|
53
53
|
!resolvedUrlString.startsWith('blob:') &&
|
|
54
|
+
!resolvedUrlString.startsWith('data:') &&
|
|
54
55
|
!resolvedUrlString.startsWith('http://')) {
|
|
55
56
|
return Promise.reject(new Error(`${resolvedUrlString} is not a URL - needs to start with http:// or https:// or blob:. If you want to read a local file, pass \`reader: nodeReader\` to parseMedia().`));
|
|
56
57
|
}
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "4.0.
|
|
1
|
+
export declare const VERSION = "4.0.351";
|
package/dist/version.js
CHANGED
package/package.json
CHANGED
|
@@ -3,16 +3,16 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-parser"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/media-parser",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.351",
|
|
7
7
|
"main": "dist/index.js",
|
|
8
8
|
"sideEffects": false,
|
|
9
9
|
"devDependencies": {
|
|
10
10
|
"@types/wicg-file-system-access": "2023.10.5",
|
|
11
11
|
"eslint": "9.19.0",
|
|
12
|
-
"mediabunny": "1.
|
|
12
|
+
"mediabunny": "1.17.0",
|
|
13
13
|
"@types/bun": "1.2.8",
|
|
14
|
-
"@remotion/example-videos": "4.0.
|
|
15
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
14
|
+
"@remotion/example-videos": "4.0.351",
|
|
15
|
+
"@remotion/eslint-config-internal": "4.0.351"
|
|
16
16
|
},
|
|
17
17
|
"publishConfig": {
|
|
18
18
|
"access": "public"
|