@remotion/serverless-client 4.0.314 → 4.0.316
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-make.log +2 -2
- package/dist/compress-props.d.ts +4 -2
- package/dist/compress-props.js +5 -2
- package/dist/esm/index.mjs +170 -227
- package/dist/get-or-create-bucket.d.ts +2 -0
- package/dist/get-or-create-bucket.js +4 -0
- package/dist/get-overall-progress-from-storage.d.ts +2 -1
- package/dist/get-overall-progress-from-storage.js +2 -1
- package/dist/progress.d.ts +2 -1
- package/dist/progress.js +2 -1
- package/dist/provider-implementation.d.ts +10 -1
- package/dist/types.d.ts +3 -1
- package/package.json +5 -5
- package/src/compress-props.ts +7 -0
- package/src/get-or-create-bucket.ts +6 -0
- package/src/get-overall-progress-from-storage.ts +3 -0
- package/src/progress.ts +3 -0
- package/src/provider-implementation.ts +10 -0
- package/src/test/expected-out-name.test.ts +1 -0
- package/src/types.ts +3 -0
- package/tsconfig.tsbuildinfo +1 -1
- package/.turbo/turbo-test.log +0 -35
package/.turbo/turbo-make.log
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
|
|
2
2
|
|
|
3
|
-
> @remotion/serverless-client@4.0.
|
|
3
|
+
> @remotion/serverless-client@4.0.316 make /Users/jonathanburger/remotion/packages/serverless-client
|
|
4
4
|
> tsc -d && bun --env-file=../.env.bundle bundle.ts
|
|
5
5
|
|
|
6
|
-
[0m[2m[[
|
|
6
|
+
[0m[2m[[1m8.17ms[0m[2m][0m Generated.
|
package/dist/compress-props.d.ts
CHANGED
|
@@ -8,7 +8,7 @@ export declare const getNeedsToUpload: <Provider extends CloudProvider>({ type,
|
|
|
8
8
|
sizes: number[];
|
|
9
9
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
10
10
|
}) => boolean;
|
|
11
|
-
export declare const compressInputProps: <Provider extends CloudProvider>({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, }: {
|
|
11
|
+
export declare const compressInputProps: <Provider extends CloudProvider>({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, requestHandler, }: {
|
|
12
12
|
stringifiedInputProps: string;
|
|
13
13
|
region: Provider["region"];
|
|
14
14
|
userSpecifiedBucketName: string | null;
|
|
@@ -17,8 +17,9 @@ export declare const compressInputProps: <Provider extends CloudProvider>({ stri
|
|
|
17
17
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
18
18
|
forcePathStyle: boolean;
|
|
19
19
|
skipPutAcl: boolean;
|
|
20
|
+
requestHandler: Provider["requestHandler"] | undefined;
|
|
20
21
|
}) => Promise<SerializedInputProps>;
|
|
21
|
-
export declare const decompressInputProps: <Provider extends CloudProvider>({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, forcePathStyle, }: {
|
|
22
|
+
export declare const decompressInputProps: <Provider extends CloudProvider>({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, forcePathStyle, requestHandler, }: {
|
|
22
23
|
serialized: SerializedInputProps;
|
|
23
24
|
region: Provider["region"];
|
|
24
25
|
bucketName: string;
|
|
@@ -26,5 +27,6 @@ export declare const decompressInputProps: <Provider extends CloudProvider>({ se
|
|
|
26
27
|
propsType: PropsType;
|
|
27
28
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
28
29
|
forcePathStyle: boolean;
|
|
30
|
+
requestHandler: Provider["requestHandler"] | null;
|
|
29
31
|
}) => Promise<string>;
|
|
30
32
|
export {};
|
package/dist/compress-props.js
CHANGED
|
@@ -40,7 +40,7 @@ const getNeedsToUpload = ({ type, sizes, providerSpecifics, }) => {
|
|
|
40
40
|
return false;
|
|
41
41
|
};
|
|
42
42
|
exports.getNeedsToUpload = getNeedsToUpload;
|
|
43
|
-
const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, }) => {
|
|
43
|
+
const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, requestHandler, }) => {
|
|
44
44
|
const hash = providerSpecifics.randomHash();
|
|
45
45
|
if (needsToUpload) {
|
|
46
46
|
const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, get_or_create_bucket_1.internalGetOrCreateBucket)({
|
|
@@ -50,6 +50,7 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
|
|
|
50
50
|
providerSpecifics,
|
|
51
51
|
forcePathStyle,
|
|
52
52
|
skipPutAcl,
|
|
53
|
+
requestHandler,
|
|
53
54
|
})).bucketName;
|
|
54
55
|
await providerSpecifics.writeFile({
|
|
55
56
|
body: stringifiedInputProps,
|
|
@@ -62,6 +63,7 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
|
|
|
62
63
|
privacy: 'private',
|
|
63
64
|
forcePathStyle,
|
|
64
65
|
storageClass: null,
|
|
66
|
+
requestHandler,
|
|
65
67
|
});
|
|
66
68
|
return {
|
|
67
69
|
type: 'bucket-url',
|
|
@@ -75,7 +77,7 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
|
|
|
75
77
|
};
|
|
76
78
|
};
|
|
77
79
|
exports.compressInputProps = compressInputProps;
|
|
78
|
-
const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, forcePathStyle, }) => {
|
|
80
|
+
const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, forcePathStyle, requestHandler, }) => {
|
|
79
81
|
if (serialized.type === 'payload') {
|
|
80
82
|
return serialized.payload;
|
|
81
83
|
}
|
|
@@ -86,6 +88,7 @@ const decompressInputProps = async ({ serialized, region, bucketName, expectedBu
|
|
|
86
88
|
key: makeKey(propsType, serialized.hash),
|
|
87
89
|
region,
|
|
88
90
|
forcePathStyle,
|
|
91
|
+
requestHandler,
|
|
89
92
|
});
|
|
90
93
|
const body = await (0, stream_to_string_1.streamToString)(response);
|
|
91
94
|
const payload = body;
|
package/dist/esm/index.mjs
CHANGED
|
@@ -1,205 +1,3 @@
|
|
|
1
|
-
var __create = Object.create;
|
|
2
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
3
|
-
var __defProp = Object.defineProperty;
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
-
var __toESM = (mod, isNodeMode, target) => {
|
|
7
|
-
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
8
|
-
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
9
|
-
for (let key of __getOwnPropNames(mod))
|
|
10
|
-
if (!__hasOwnProp.call(to, key))
|
|
11
|
-
__defProp(to, key, {
|
|
12
|
-
get: () => mod[key],
|
|
13
|
-
enumerable: true
|
|
14
|
-
});
|
|
15
|
-
return to;
|
|
16
|
-
};
|
|
17
|
-
var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
18
|
-
|
|
19
|
-
// ../streaming/dist/make-stream-payload-message.js
|
|
20
|
-
var require_make_stream_payload_message = __commonJS((exports) => {
|
|
21
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
22
|
-
exports.makeStreamPayloadMessage = exports.magicWordStr = undefined;
|
|
23
|
-
exports.magicWordStr = "remotion_buffer:";
|
|
24
|
-
var makeStreamPayloadMessage = ({ status, body, nonce }) => {
|
|
25
|
-
const nonceArr = new TextEncoder().encode(nonce);
|
|
26
|
-
const magicWordArr = new TextEncoder().encode(exports.magicWordStr);
|
|
27
|
-
const separatorArr = new TextEncoder().encode(":");
|
|
28
|
-
const bodyLengthArr = new TextEncoder().encode(body.length.toString());
|
|
29
|
-
const statusArr = new TextEncoder().encode(String(status));
|
|
30
|
-
const totalLength = nonceArr.length + magicWordArr.length + separatorArr.length * 3 + bodyLengthArr.length + statusArr.length + body.length;
|
|
31
|
-
const concat = new Uint8Array(totalLength);
|
|
32
|
-
let offset = 0;
|
|
33
|
-
const appendToConcat = (data) => {
|
|
34
|
-
concat.set(data, offset);
|
|
35
|
-
offset += data.length;
|
|
36
|
-
};
|
|
37
|
-
appendToConcat(magicWordArr);
|
|
38
|
-
appendToConcat(nonceArr);
|
|
39
|
-
appendToConcat(separatorArr);
|
|
40
|
-
appendToConcat(bodyLengthArr);
|
|
41
|
-
appendToConcat(separatorArr);
|
|
42
|
-
appendToConcat(statusArr);
|
|
43
|
-
appendToConcat(separatorArr);
|
|
44
|
-
appendToConcat(body);
|
|
45
|
-
return concat;
|
|
46
|
-
};
|
|
47
|
-
exports.makeStreamPayloadMessage = makeStreamPayloadMessage;
|
|
48
|
-
});
|
|
49
|
-
|
|
50
|
-
// ../streaming/dist/make-streamer.js
|
|
51
|
-
var require_make_streamer = __commonJS((exports) => {
|
|
52
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
53
|
-
exports.makeStreamPayloadMessage = exports.makeStreamer = exports.streamingKey = undefined;
|
|
54
|
-
exports.streamingKey = "remotion_buffer:";
|
|
55
|
-
var makeStreamer = (onMessage) => {
|
|
56
|
-
const separator = new Uint8Array(exports.streamingKey.length);
|
|
57
|
-
for (let i = 0;i < exports.streamingKey.length; i++) {
|
|
58
|
-
separator[i] = exports.streamingKey.charCodeAt(i);
|
|
59
|
-
}
|
|
60
|
-
let unprocessedBuffers = [];
|
|
61
|
-
let outputBuffer = new Uint8Array(0);
|
|
62
|
-
let missingData = null;
|
|
63
|
-
const findSeparatorIndex = () => {
|
|
64
|
-
let searchIndex = 0;
|
|
65
|
-
while (true) {
|
|
66
|
-
const separatorIndex = outputBuffer.indexOf(separator[0], searchIndex);
|
|
67
|
-
if (separatorIndex === -1) {
|
|
68
|
-
return -1;
|
|
69
|
-
}
|
|
70
|
-
if (outputBuffer.subarray(separatorIndex, separatorIndex + separator.length).toString() !== separator.toString()) {
|
|
71
|
-
searchIndex = separatorIndex + 1;
|
|
72
|
-
continue;
|
|
73
|
-
}
|
|
74
|
-
return separatorIndex;
|
|
75
|
-
}
|
|
76
|
-
};
|
|
77
|
-
const processInput = () => {
|
|
78
|
-
let separatorIndex = findSeparatorIndex();
|
|
79
|
-
if (separatorIndex === -1) {
|
|
80
|
-
return;
|
|
81
|
-
}
|
|
82
|
-
separatorIndex += separator.length;
|
|
83
|
-
let nonceString = "";
|
|
84
|
-
let lengthString = "";
|
|
85
|
-
let statusString = "";
|
|
86
|
-
while (true) {
|
|
87
|
-
if (separatorIndex > outputBuffer.length - 1) {
|
|
88
|
-
return;
|
|
89
|
-
}
|
|
90
|
-
const nextDigit = outputBuffer[separatorIndex];
|
|
91
|
-
separatorIndex++;
|
|
92
|
-
if (nextDigit === 58) {
|
|
93
|
-
break;
|
|
94
|
-
}
|
|
95
|
-
nonceString += String.fromCharCode(nextDigit);
|
|
96
|
-
}
|
|
97
|
-
while (true) {
|
|
98
|
-
if (separatorIndex > outputBuffer.length - 1) {
|
|
99
|
-
return;
|
|
100
|
-
}
|
|
101
|
-
const nextDigit = outputBuffer[separatorIndex];
|
|
102
|
-
separatorIndex++;
|
|
103
|
-
if (nextDigit === 58) {
|
|
104
|
-
break;
|
|
105
|
-
}
|
|
106
|
-
lengthString += String.fromCharCode(nextDigit);
|
|
107
|
-
}
|
|
108
|
-
while (true) {
|
|
109
|
-
if (separatorIndex > outputBuffer.length - 1) {
|
|
110
|
-
return;
|
|
111
|
-
}
|
|
112
|
-
const nextDigit = outputBuffer[separatorIndex];
|
|
113
|
-
if (nextDigit === 58) {
|
|
114
|
-
break;
|
|
115
|
-
}
|
|
116
|
-
separatorIndex++;
|
|
117
|
-
statusString += String.fromCharCode(nextDigit);
|
|
118
|
-
}
|
|
119
|
-
const length = Number(lengthString);
|
|
120
|
-
const status = Number(statusString);
|
|
121
|
-
const dataLength = outputBuffer.length - separatorIndex - 1;
|
|
122
|
-
if (dataLength < length) {
|
|
123
|
-
missingData = {
|
|
124
|
-
dataMissing: length - dataLength
|
|
125
|
-
};
|
|
126
|
-
return;
|
|
127
|
-
}
|
|
128
|
-
const data = outputBuffer.subarray(separatorIndex + 1, separatorIndex + 1 + Number(lengthString));
|
|
129
|
-
onMessage(status === 1 ? "error" : "success", nonceString, data);
|
|
130
|
-
missingData = null;
|
|
131
|
-
outputBuffer = outputBuffer.subarray(separatorIndex + Number(lengthString) + 1);
|
|
132
|
-
processInput();
|
|
133
|
-
};
|
|
134
|
-
const onData = (data) => {
|
|
135
|
-
unprocessedBuffers.push(data);
|
|
136
|
-
if (missingData) {
|
|
137
|
-
missingData.dataMissing -= data.length;
|
|
138
|
-
}
|
|
139
|
-
if (missingData && missingData.dataMissing > 0) {
|
|
140
|
-
return;
|
|
141
|
-
}
|
|
142
|
-
const newBuffer = new Uint8Array(outputBuffer.length + unprocessedBuffers.reduce((acc, val) => acc + val.length, 0));
|
|
143
|
-
newBuffer.set(outputBuffer, 0);
|
|
144
|
-
let offset = outputBuffer.length;
|
|
145
|
-
for (const buf of unprocessedBuffers) {
|
|
146
|
-
newBuffer.set(buf, offset);
|
|
147
|
-
offset += buf.length;
|
|
148
|
-
}
|
|
149
|
-
outputBuffer = newBuffer;
|
|
150
|
-
unprocessedBuffers = [];
|
|
151
|
-
processInput();
|
|
152
|
-
};
|
|
153
|
-
return {
|
|
154
|
-
onData,
|
|
155
|
-
getOutputBuffer: () => outputBuffer,
|
|
156
|
-
clear: () => {
|
|
157
|
-
unprocessedBuffers = [];
|
|
158
|
-
outputBuffer = new Uint8Array(0);
|
|
159
|
-
}
|
|
160
|
-
};
|
|
161
|
-
};
|
|
162
|
-
exports.makeStreamer = makeStreamer;
|
|
163
|
-
var makeStreamPayloadMessage = ({ status, body, nonce }) => {
|
|
164
|
-
const nonceArr = new TextEncoder().encode(nonce);
|
|
165
|
-
const magicWordArr = new TextEncoder().encode(exports.streamingKey);
|
|
166
|
-
const separatorArr = new TextEncoder().encode(":");
|
|
167
|
-
const bodyLengthArr = new TextEncoder().encode(body.length.toString());
|
|
168
|
-
const statusArr = new TextEncoder().encode(String(status));
|
|
169
|
-
const totalLength = nonceArr.length + magicWordArr.length + separatorArr.length * 3 + bodyLengthArr.length + statusArr.length + body.length;
|
|
170
|
-
const concat = new Uint8Array(totalLength);
|
|
171
|
-
let offset = 0;
|
|
172
|
-
const appendToConcat = (data) => {
|
|
173
|
-
concat.set(data, offset);
|
|
174
|
-
offset += data.length;
|
|
175
|
-
};
|
|
176
|
-
appendToConcat(magicWordArr);
|
|
177
|
-
appendToConcat(nonceArr);
|
|
178
|
-
appendToConcat(separatorArr);
|
|
179
|
-
appendToConcat(bodyLengthArr);
|
|
180
|
-
appendToConcat(separatorArr);
|
|
181
|
-
appendToConcat(statusArr);
|
|
182
|
-
appendToConcat(separatorArr);
|
|
183
|
-
appendToConcat(body);
|
|
184
|
-
return concat;
|
|
185
|
-
};
|
|
186
|
-
exports.makeStreamPayloadMessage = makeStreamPayloadMessage;
|
|
187
|
-
});
|
|
188
|
-
|
|
189
|
-
// ../streaming/dist/index.js
|
|
190
|
-
var require_dist = __commonJS((exports) => {
|
|
191
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
192
|
-
exports.makeStreamer = exports.makeStreamPayloadMessage = undefined;
|
|
193
|
-
var make_stream_payload_message_1 = require_make_stream_payload_message();
|
|
194
|
-
Object.defineProperty(exports, "makeStreamPayloadMessage", { enumerable: true, get: function() {
|
|
195
|
-
return make_stream_payload_message_1.makeStreamPayloadMessage;
|
|
196
|
-
} });
|
|
197
|
-
var make_streamer_1 = require_make_streamer();
|
|
198
|
-
Object.defineProperty(exports, "makeStreamer", { enumerable: true, get: function() {
|
|
199
|
-
return make_streamer_1.makeStreamer;
|
|
200
|
-
} });
|
|
201
|
-
});
|
|
202
|
-
|
|
203
1
|
// ../core/dist/esm/no-react.mjs
|
|
204
2
|
function interpolateFunction(input, inputRange, outputRange, options) {
|
|
205
3
|
const { extrapolateLeft, extrapolateRight, easing } = options;
|
|
@@ -785,6 +583,9 @@ var getAbsoluteSrc = (relativeSrc) => {
|
|
|
785
583
|
if (typeof window === "undefined") {
|
|
786
584
|
return relativeSrc;
|
|
787
585
|
}
|
|
586
|
+
if (relativeSrc.startsWith("http://") || relativeSrc.startsWith("https://") || relativeSrc.startsWith("file://") || relativeSrc.startsWith("blob:") || relativeSrc.startsWith("data:")) {
|
|
587
|
+
return relativeSrc;
|
|
588
|
+
}
|
|
788
589
|
return new URL(relativeSrc, window.origin).href;
|
|
789
590
|
};
|
|
790
591
|
var getOffthreadVideoSource = ({
|
|
@@ -971,7 +772,7 @@ var validateFramesPerFunction = ({
|
|
|
971
772
|
import * as tty from "tty";
|
|
972
773
|
|
|
973
774
|
// ../core/dist/esm/version.mjs
|
|
974
|
-
var VERSION = "4.0.
|
|
775
|
+
var VERSION = "4.0.316";
|
|
975
776
|
|
|
976
777
|
// ../renderer/dist/esm/error-handling.mjs
|
|
977
778
|
var isColorSupported = () => {
|
|
@@ -1303,10 +1104,143 @@ var wrapWithErrorHandling = (fn) => {
|
|
|
1303
1104
|
}
|
|
1304
1105
|
};
|
|
1305
1106
|
};
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
var
|
|
1309
|
-
|
|
1107
|
+
// ../streaming/dist/esm/index.mjs
|
|
1108
|
+
var magicWordStr = "remotion_buffer:";
|
|
1109
|
+
var makeStreamPayloadMessage = ({
|
|
1110
|
+
status,
|
|
1111
|
+
body,
|
|
1112
|
+
nonce
|
|
1113
|
+
}) => {
|
|
1114
|
+
const nonceArr = new TextEncoder().encode(nonce);
|
|
1115
|
+
const magicWordArr = new TextEncoder().encode(magicWordStr);
|
|
1116
|
+
const separatorArr = new TextEncoder().encode(":");
|
|
1117
|
+
const bodyLengthArr = new TextEncoder().encode(body.length.toString());
|
|
1118
|
+
const statusArr = new TextEncoder().encode(String(status));
|
|
1119
|
+
const totalLength = nonceArr.length + magicWordArr.length + separatorArr.length * 3 + bodyLengthArr.length + statusArr.length + body.length;
|
|
1120
|
+
const concat = new Uint8Array(totalLength);
|
|
1121
|
+
let offset = 0;
|
|
1122
|
+
const appendToConcat = (data) => {
|
|
1123
|
+
concat.set(data, offset);
|
|
1124
|
+
offset += data.length;
|
|
1125
|
+
};
|
|
1126
|
+
appendToConcat(magicWordArr);
|
|
1127
|
+
appendToConcat(nonceArr);
|
|
1128
|
+
appendToConcat(separatorArr);
|
|
1129
|
+
appendToConcat(bodyLengthArr);
|
|
1130
|
+
appendToConcat(separatorArr);
|
|
1131
|
+
appendToConcat(statusArr);
|
|
1132
|
+
appendToConcat(separatorArr);
|
|
1133
|
+
appendToConcat(body);
|
|
1134
|
+
return concat;
|
|
1135
|
+
};
|
|
1136
|
+
var streamingKey = "remotion_buffer:";
|
|
1137
|
+
var makeStreamer = (onMessage) => {
|
|
1138
|
+
const separator = new Uint8Array(streamingKey.length);
|
|
1139
|
+
for (let i = 0;i < streamingKey.length; i++) {
|
|
1140
|
+
separator[i] = streamingKey.charCodeAt(i);
|
|
1141
|
+
}
|
|
1142
|
+
let unprocessedBuffers = [];
|
|
1143
|
+
let outputBuffer = new Uint8Array(0);
|
|
1144
|
+
let missingData = null;
|
|
1145
|
+
const findSeparatorIndex = () => {
|
|
1146
|
+
let searchIndex = 0;
|
|
1147
|
+
while (true) {
|
|
1148
|
+
const separatorIndex = outputBuffer.indexOf(separator[0], searchIndex);
|
|
1149
|
+
if (separatorIndex === -1) {
|
|
1150
|
+
return -1;
|
|
1151
|
+
}
|
|
1152
|
+
if (outputBuffer.subarray(separatorIndex, separatorIndex + separator.length).toString() !== separator.toString()) {
|
|
1153
|
+
searchIndex = separatorIndex + 1;
|
|
1154
|
+
continue;
|
|
1155
|
+
}
|
|
1156
|
+
return separatorIndex;
|
|
1157
|
+
}
|
|
1158
|
+
};
|
|
1159
|
+
const processInput = () => {
|
|
1160
|
+
let separatorIndex = findSeparatorIndex();
|
|
1161
|
+
if (separatorIndex === -1) {
|
|
1162
|
+
return;
|
|
1163
|
+
}
|
|
1164
|
+
separatorIndex += separator.length;
|
|
1165
|
+
let nonceString = "";
|
|
1166
|
+
let lengthString = "";
|
|
1167
|
+
let statusString = "";
|
|
1168
|
+
while (true) {
|
|
1169
|
+
if (separatorIndex > outputBuffer.length - 1) {
|
|
1170
|
+
return;
|
|
1171
|
+
}
|
|
1172
|
+
const nextDigit = outputBuffer[separatorIndex];
|
|
1173
|
+
separatorIndex++;
|
|
1174
|
+
if (nextDigit === 58) {
|
|
1175
|
+
break;
|
|
1176
|
+
}
|
|
1177
|
+
nonceString += String.fromCharCode(nextDigit);
|
|
1178
|
+
}
|
|
1179
|
+
while (true) {
|
|
1180
|
+
if (separatorIndex > outputBuffer.length - 1) {
|
|
1181
|
+
return;
|
|
1182
|
+
}
|
|
1183
|
+
const nextDigit = outputBuffer[separatorIndex];
|
|
1184
|
+
separatorIndex++;
|
|
1185
|
+
if (nextDigit === 58) {
|
|
1186
|
+
break;
|
|
1187
|
+
}
|
|
1188
|
+
lengthString += String.fromCharCode(nextDigit);
|
|
1189
|
+
}
|
|
1190
|
+
while (true) {
|
|
1191
|
+
if (separatorIndex > outputBuffer.length - 1) {
|
|
1192
|
+
return;
|
|
1193
|
+
}
|
|
1194
|
+
const nextDigit = outputBuffer[separatorIndex];
|
|
1195
|
+
if (nextDigit === 58) {
|
|
1196
|
+
break;
|
|
1197
|
+
}
|
|
1198
|
+
separatorIndex++;
|
|
1199
|
+
statusString += String.fromCharCode(nextDigit);
|
|
1200
|
+
}
|
|
1201
|
+
const length = Number(lengthString);
|
|
1202
|
+
const status = Number(statusString);
|
|
1203
|
+
const dataLength = outputBuffer.length - separatorIndex - 1;
|
|
1204
|
+
if (dataLength < length) {
|
|
1205
|
+
missingData = {
|
|
1206
|
+
dataMissing: length - dataLength
|
|
1207
|
+
};
|
|
1208
|
+
return;
|
|
1209
|
+
}
|
|
1210
|
+
const data = outputBuffer.subarray(separatorIndex + 1, separatorIndex + 1 + Number(lengthString));
|
|
1211
|
+
onMessage(status === 1 ? "error" : "success", nonceString, data);
|
|
1212
|
+
missingData = null;
|
|
1213
|
+
outputBuffer = outputBuffer.subarray(separatorIndex + Number(lengthString) + 1);
|
|
1214
|
+
processInput();
|
|
1215
|
+
};
|
|
1216
|
+
const onData = (data) => {
|
|
1217
|
+
unprocessedBuffers.push(data);
|
|
1218
|
+
if (missingData) {
|
|
1219
|
+
missingData.dataMissing -= data.length;
|
|
1220
|
+
}
|
|
1221
|
+
if (missingData && missingData.dataMissing > 0) {
|
|
1222
|
+
return;
|
|
1223
|
+
}
|
|
1224
|
+
const newBuffer = new Uint8Array(outputBuffer.length + unprocessedBuffers.reduce((acc, val) => acc + val.length, 0));
|
|
1225
|
+
newBuffer.set(outputBuffer, 0);
|
|
1226
|
+
let offset = outputBuffer.length;
|
|
1227
|
+
for (const buf of unprocessedBuffers) {
|
|
1228
|
+
newBuffer.set(buf, offset);
|
|
1229
|
+
offset += buf.length;
|
|
1230
|
+
}
|
|
1231
|
+
outputBuffer = newBuffer;
|
|
1232
|
+
unprocessedBuffers = [];
|
|
1233
|
+
processInput();
|
|
1234
|
+
};
|
|
1235
|
+
return {
|
|
1236
|
+
onData,
|
|
1237
|
+
getOutputBuffer: () => outputBuffer,
|
|
1238
|
+
clear: () => {
|
|
1239
|
+
unprocessedBuffers = [];
|
|
1240
|
+
outputBuffer = new Uint8Array(0);
|
|
1241
|
+
}
|
|
1242
|
+
};
|
|
1243
|
+
};
|
|
1310
1244
|
// src/min-max.ts
|
|
1311
1245
|
var min = (arr) => {
|
|
1312
1246
|
if (arr.length === 0) {
|
|
@@ -1371,7 +1305,8 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
1371
1305
|
const remotionBuckets = await params.providerSpecifics.getBuckets({
|
|
1372
1306
|
region: params.region,
|
|
1373
1307
|
forceBucketName: null,
|
|
1374
|
-
forcePathStyle: params.forcePathStyle
|
|
1308
|
+
forcePathStyle: params.forcePathStyle,
|
|
1309
|
+
requestHandler: params.requestHandler
|
|
1375
1310
|
});
|
|
1376
1311
|
if (remotionBuckets.length > 1) {
|
|
1377
1312
|
throw new Error(`You have multiple buckets (${remotionBuckets.map((b) => b.name)}) in your S3 region (${params.region}) starting with "${params.providerSpecifics.getBucketPrefix()}". Please see https://remotion.dev/docs/lambda/multiple-buckets.`);
|
|
@@ -1384,7 +1319,8 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
1384
1319
|
bucketName: existingBucketName,
|
|
1385
1320
|
region,
|
|
1386
1321
|
customCredentials: params.customCredentials,
|
|
1387
|
-
forcePathStyle: params.forcePathStyle
|
|
1322
|
+
forcePathStyle: params.forcePathStyle,
|
|
1323
|
+
requestHandler: params.requestHandler
|
|
1388
1324
|
});
|
|
1389
1325
|
return { bucketName: remotionBuckets[0].name, alreadyExisted: true };
|
|
1390
1326
|
}
|
|
@@ -1393,14 +1329,16 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
1393
1329
|
bucketName,
|
|
1394
1330
|
region: params.region,
|
|
1395
1331
|
forcePathStyle: params.forcePathStyle,
|
|
1396
|
-
skipPutAcl: params.skipPutAcl
|
|
1332
|
+
skipPutAcl: params.skipPutAcl,
|
|
1333
|
+
requestHandler: params.requestHandler
|
|
1397
1334
|
});
|
|
1398
1335
|
await params.providerSpecifics.applyLifeCycle({
|
|
1399
1336
|
enableFolderExpiry: enableFolderExpiry ?? null,
|
|
1400
1337
|
bucketName,
|
|
1401
1338
|
region,
|
|
1402
1339
|
customCredentials: params.customCredentials,
|
|
1403
|
-
forcePathStyle: params.forcePathStyle
|
|
1340
|
+
forcePathStyle: params.forcePathStyle,
|
|
1341
|
+
requestHandler: params.requestHandler
|
|
1404
1342
|
});
|
|
1405
1343
|
return { bucketName, alreadyExisted: false };
|
|
1406
1344
|
};
|
|
@@ -1481,7 +1419,8 @@ var compressInputProps = async ({
|
|
|
1481
1419
|
needsToUpload,
|
|
1482
1420
|
providerSpecifics,
|
|
1483
1421
|
forcePathStyle,
|
|
1484
|
-
skipPutAcl
|
|
1422
|
+
skipPutAcl,
|
|
1423
|
+
requestHandler
|
|
1485
1424
|
}) => {
|
|
1486
1425
|
const hash = providerSpecifics.randomHash();
|
|
1487
1426
|
if (needsToUpload) {
|
|
@@ -1491,7 +1430,8 @@ var compressInputProps = async ({
|
|
|
1491
1430
|
customCredentials: null,
|
|
1492
1431
|
providerSpecifics,
|
|
1493
1432
|
forcePathStyle,
|
|
1494
|
-
skipPutAcl
|
|
1433
|
+
skipPutAcl,
|
|
1434
|
+
requestHandler
|
|
1495
1435
|
})).bucketName;
|
|
1496
1436
|
await providerSpecifics.writeFile({
|
|
1497
1437
|
body: stringifiedInputProps,
|
|
@@ -1503,7 +1443,8 @@ var compressInputProps = async ({
|
|
|
1503
1443
|
key: makeKey(propsType, hash),
|
|
1504
1444
|
privacy: "private",
|
|
1505
1445
|
forcePathStyle,
|
|
1506
|
-
storageClass: null
|
|
1446
|
+
storageClass: null,
|
|
1447
|
+
requestHandler
|
|
1507
1448
|
});
|
|
1508
1449
|
return {
|
|
1509
1450
|
type: "bucket-url",
|
|
@@ -1523,7 +1464,8 @@ var decompressInputProps = async ({
|
|
|
1523
1464
|
expectedBucketOwner,
|
|
1524
1465
|
propsType,
|
|
1525
1466
|
providerSpecifics,
|
|
1526
|
-
forcePathStyle
|
|
1467
|
+
forcePathStyle,
|
|
1468
|
+
requestHandler
|
|
1527
1469
|
}) => {
|
|
1528
1470
|
if (serialized.type === "payload") {
|
|
1529
1471
|
return serialized.payload;
|
|
@@ -1534,7 +1476,8 @@ var decompressInputProps = async ({
|
|
|
1534
1476
|
expectedBucketOwner,
|
|
1535
1477
|
key: makeKey(propsType, serialized.hash),
|
|
1536
1478
|
region,
|
|
1537
|
-
forcePathStyle
|
|
1479
|
+
forcePathStyle,
|
|
1480
|
+
requestHandler
|
|
1538
1481
|
});
|
|
1539
1482
|
const body = await streamToString(response);
|
|
1540
1483
|
const payload = body;
|
|
@@ -2254,7 +2197,8 @@ var getOverallProgressFromStorage = async ({
|
|
|
2254
2197
|
expectedBucketOwner,
|
|
2255
2198
|
region,
|
|
2256
2199
|
providerSpecifics,
|
|
2257
|
-
forcePathStyle
|
|
2200
|
+
forcePathStyle,
|
|
2201
|
+
requestHandler
|
|
2258
2202
|
}) => {
|
|
2259
2203
|
try {
|
|
2260
2204
|
const Body = await providerSpecifics.readFile({
|
|
@@ -2262,7 +2206,8 @@ var getOverallProgressFromStorage = async ({
|
|
|
2262
2206
|
key: overallProgressKey(renderId),
|
|
2263
2207
|
expectedBucketOwner,
|
|
2264
2208
|
region,
|
|
2265
|
-
forcePathStyle
|
|
2209
|
+
forcePathStyle,
|
|
2210
|
+
requestHandler
|
|
2266
2211
|
});
|
|
2267
2212
|
const str = await streamToString(Body);
|
|
2268
2213
|
return JSON.parse(str);
|
|
@@ -2507,7 +2452,8 @@ var getProgress = async ({
|
|
|
2507
2452
|
customCredentials,
|
|
2508
2453
|
providerSpecifics,
|
|
2509
2454
|
forcePathStyle,
|
|
2510
|
-
functionName
|
|
2455
|
+
functionName,
|
|
2456
|
+
requestHandler
|
|
2511
2457
|
}) => {
|
|
2512
2458
|
const overallProgress = await getOverallProgressFromStorage({
|
|
2513
2459
|
renderId,
|
|
@@ -2515,7 +2461,8 @@ var getProgress = async ({
|
|
|
2515
2461
|
expectedBucketOwner,
|
|
2516
2462
|
region,
|
|
2517
2463
|
providerSpecifics,
|
|
2518
|
-
forcePathStyle
|
|
2464
|
+
forcePathStyle,
|
|
2465
|
+
requestHandler
|
|
2519
2466
|
});
|
|
2520
2467
|
if (overallProgress.postRenderData) {
|
|
2521
2468
|
if (!overallProgress.renderMetadata) {
|
|
@@ -2727,7 +2674,6 @@ var getProgress = async ({
|
|
|
2727
2674
|
};
|
|
2728
2675
|
};
|
|
2729
2676
|
// src/streaming/streaming.ts
|
|
2730
|
-
var import_streaming = __toESM(require_dist(), 1);
|
|
2731
2677
|
var framesRendered = "frames-rendered";
|
|
2732
2678
|
var errorOccurred = "error-occurred";
|
|
2733
2679
|
var renderIdDetermined = "render-id-determined";
|
|
@@ -2777,7 +2723,7 @@ var makeStreamPayload = ({
|
|
|
2777
2723
|
message
|
|
2778
2724
|
}) => {
|
|
2779
2725
|
const body = formatMap[message.type] === "json" ? new TextEncoder().encode(JSON.stringify(message.payload)) : message.payload;
|
|
2780
|
-
return
|
|
2726
|
+
return makeStreamPayloadMessage({
|
|
2781
2727
|
body,
|
|
2782
2728
|
nonce: messageTypeToMessageId(message.type),
|
|
2783
2729
|
status: 0
|
|
@@ -2803,9 +2749,6 @@ var {
|
|
|
2803
2749
|
validateDimension: validateDimension2,
|
|
2804
2750
|
validateDurationInFrames: validateDurationInFrames2
|
|
2805
2751
|
} = NoReactInternals;
|
|
2806
|
-
var export_makeStreamer = import_streaming2.makeStreamer;
|
|
2807
|
-
var export_makeStreamPayloadMessage = import_streaming2.makeStreamPayloadMessage;
|
|
2808
|
-
|
|
2809
2752
|
export {
|
|
2810
2753
|
wrapWithErrorHandling,
|
|
2811
2754
|
validateWebhook,
|
|
@@ -2830,8 +2773,8 @@ export {
|
|
|
2830
2773
|
outStillName,
|
|
2831
2774
|
outName,
|
|
2832
2775
|
messageTypeIdToMessageType,
|
|
2833
|
-
|
|
2834
|
-
|
|
2776
|
+
makeStreamer,
|
|
2777
|
+
makeStreamPayloadMessage,
|
|
2835
2778
|
makeStreamPayload,
|
|
2836
2779
|
makeBucketName,
|
|
2837
2780
|
isErrInsufficientResourcesErr,
|
|
@@ -8,12 +8,14 @@ type GetOrCreateBucketInputInner<Provider extends CloudProvider> = {
|
|
|
8
8
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
9
9
|
forcePathStyle: boolean;
|
|
10
10
|
skipPutAcl: boolean;
|
|
11
|
+
requestHandler: Provider['requestHandler'] | null;
|
|
11
12
|
};
|
|
12
13
|
export type GetOrCreateBucketInput<Provider extends CloudProvider> = {
|
|
13
14
|
region: Provider['region'];
|
|
14
15
|
enableFolderExpiry?: boolean;
|
|
15
16
|
customCredentials?: CustomCredentials<Provider>;
|
|
16
17
|
forcePathStyle?: boolean;
|
|
18
|
+
requestHandler?: Provider['requestHandler'];
|
|
17
19
|
};
|
|
18
20
|
export type GetOrCreateBucketOutput = {
|
|
19
21
|
bucketName: string;
|
|
@@ -7,6 +7,7 @@ const internalGetOrCreateBucket = async (params) => {
|
|
|
7
7
|
region: params.region,
|
|
8
8
|
forceBucketName: null,
|
|
9
9
|
forcePathStyle: params.forcePathStyle,
|
|
10
|
+
requestHandler: params.requestHandler,
|
|
10
11
|
});
|
|
11
12
|
if (remotionBuckets.length > 1) {
|
|
12
13
|
throw new Error(`You have multiple buckets (${remotionBuckets.map((b) => b.name)}) in your S3 region (${params.region}) starting with "${params.providerSpecifics.getBucketPrefix()}". Please see https://remotion.dev/docs/lambda/multiple-buckets.`);
|
|
@@ -21,6 +22,7 @@ const internalGetOrCreateBucket = async (params) => {
|
|
|
21
22
|
region,
|
|
22
23
|
customCredentials: params.customCredentials,
|
|
23
24
|
forcePathStyle: params.forcePathStyle,
|
|
25
|
+
requestHandler: params.requestHandler,
|
|
24
26
|
});
|
|
25
27
|
return { bucketName: remotionBuckets[0].name, alreadyExisted: true };
|
|
26
28
|
}
|
|
@@ -30,6 +32,7 @@ const internalGetOrCreateBucket = async (params) => {
|
|
|
30
32
|
region: params.region,
|
|
31
33
|
forcePathStyle: params.forcePathStyle,
|
|
32
34
|
skipPutAcl: params.skipPutAcl,
|
|
35
|
+
requestHandler: params.requestHandler,
|
|
33
36
|
});
|
|
34
37
|
// apply to newly created bucket
|
|
35
38
|
await params.providerSpecifics.applyLifeCycle({
|
|
@@ -38,6 +41,7 @@ const internalGetOrCreateBucket = async (params) => {
|
|
|
38
41
|
region,
|
|
39
42
|
customCredentials: params.customCredentials,
|
|
40
43
|
forcePathStyle: params.forcePathStyle,
|
|
44
|
+
requestHandler: params.requestHandler,
|
|
41
45
|
});
|
|
42
46
|
return { bucketName, alreadyExisted: false };
|
|
43
47
|
};
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import type { OverallRenderProgress } from './overall-render-progress';
|
|
2
2
|
import type { ProviderSpecifics } from './provider-implementation';
|
|
3
3
|
import type { CloudProvider } from './types';
|
|
4
|
-
export declare const getOverallProgressFromStorage: <Provider extends CloudProvider>({ renderId, bucketName, expectedBucketOwner, region, providerSpecifics, forcePathStyle, }: {
|
|
4
|
+
export declare const getOverallProgressFromStorage: <Provider extends CloudProvider>({ renderId, bucketName, expectedBucketOwner, region, providerSpecifics, forcePathStyle, requestHandler, }: {
|
|
5
5
|
renderId: string;
|
|
6
6
|
expectedBucketOwner: string | null;
|
|
7
7
|
bucketName: string;
|
|
8
8
|
region: Provider["region"];
|
|
9
9
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
10
10
|
forcePathStyle: boolean;
|
|
11
|
+
requestHandler: Provider["requestHandler"] | null;
|
|
11
12
|
}) => Promise<OverallRenderProgress<Provider>>;
|
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.getOverallProgressFromStorage = void 0;
|
|
4
4
|
const constants_1 = require("./constants");
|
|
5
5
|
const stream_to_string_1 = require("./stream-to-string");
|
|
6
|
-
const getOverallProgressFromStorage = async ({ renderId, bucketName, expectedBucketOwner, region, providerSpecifics, forcePathStyle, }) => {
|
|
6
|
+
const getOverallProgressFromStorage = async ({ renderId, bucketName, expectedBucketOwner, region, providerSpecifics, forcePathStyle, requestHandler, }) => {
|
|
7
7
|
try {
|
|
8
8
|
const Body = await providerSpecifics.readFile({
|
|
9
9
|
bucketName,
|
|
@@ -11,6 +11,7 @@ const getOverallProgressFromStorage = async ({ renderId, bucketName, expectedBuc
|
|
|
11
11
|
expectedBucketOwner,
|
|
12
12
|
region,
|
|
13
13
|
forcePathStyle,
|
|
14
|
+
requestHandler,
|
|
14
15
|
});
|
|
15
16
|
const str = await (0, stream_to_string_1.streamToString)(Body);
|
|
16
17
|
return JSON.parse(str);
|