@mux/ai 0.1.2 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +10 -18
- package/README.md +493 -171
- package/dist/index-BNnz9P_5.d.mts +144 -0
- package/dist/index-Bnv7tv90.d.ts +477 -0
- package/dist/index-DyTSka2R.d.ts +144 -0
- package/dist/index-vJ5r2FNm.d.mts +477 -0
- package/dist/index.d.mts +13 -0
- package/dist/index.d.ts +10 -161
- package/dist/index.js +1784 -1304
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2205 -0
- package/dist/index.mjs.map +1 -0
- package/dist/primitives/index.d.mts +3 -0
- package/dist/primitives/index.d.ts +3 -0
- package/dist/primitives/index.js +409 -0
- package/dist/primitives/index.js.map +1 -0
- package/dist/primitives/index.mjs +358 -0
- package/dist/primitives/index.mjs.map +1 -0
- package/dist/types-ktXDZ93V.d.mts +137 -0
- package/dist/types-ktXDZ93V.d.ts +137 -0
- package/dist/workflows/index.d.mts +8 -0
- package/dist/workflows/index.d.ts +8 -0
- package/dist/workflows/index.js +2217 -0
- package/dist/workflows/index.js.map +1 -0
- package/dist/workflows/index.mjs +2168 -0
- package/dist/workflows/index.mjs.map +1 -0
- package/package.json +103 -30
- package/dist/index.cjs +0 -1773
- package/dist/index.cjs.map +0 -1
- package/dist/index.d.cts +0 -164
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
// src/lib/url-signing.ts
|
|
2
|
+
import Mux from "@mux/mux-node";
|
|
3
|
+
|
|
4
|
+
// src/env.ts
|
|
5
|
+
import path from "path";
|
|
6
|
+
import { config } from "dotenv";
|
|
7
|
+
import { expand } from "dotenv-expand";
|
|
8
|
+
import { z } from "zod";
|
|
9
|
+
expand(config({
|
|
10
|
+
path: path.resolve(
|
|
11
|
+
process.cwd(),
|
|
12
|
+
process.env.NODE_ENV === "test" ? ".env.test" : ".env"
|
|
13
|
+
)
|
|
14
|
+
}));
|
|
15
|
+
function optionalString(description, message) {
|
|
16
|
+
return z.preprocess(
|
|
17
|
+
(value) => typeof value === "string" && value.trim().length === 0 ? void 0 : value,
|
|
18
|
+
z.string().trim().min(1, message).optional()
|
|
19
|
+
).describe(description);
|
|
20
|
+
}
|
|
21
|
+
function requiredString(description, message) {
|
|
22
|
+
return z.preprocess(
|
|
23
|
+
(value) => typeof value === "string" ? value.trim().length > 0 ? value.trim() : void 0 : value,
|
|
24
|
+
z.string().trim().min(1, message)
|
|
25
|
+
).describe(description);
|
|
26
|
+
}
|
|
27
|
+
var EnvSchema = z.object({
|
|
28
|
+
NODE_ENV: z.string().default("development").describe("Runtime environment."),
|
|
29
|
+
MUX_TOKEN_ID: requiredString("Mux access token ID.", "Required to access Mux APIs"),
|
|
30
|
+
MUX_TOKEN_SECRET: requiredString("Mux access token secret.", "Required to access Mux APIs"),
|
|
31
|
+
MUX_SIGNING_KEY: optionalString("Mux signing key ID for signed playback URLs.", "Used to sign playback URLs"),
|
|
32
|
+
MUX_PRIVATE_KEY: optionalString("Mux signing private key for signed playback URLs.", "Used to sign playback URLs"),
|
|
33
|
+
OPENAI_API_KEY: optionalString("OpenAI API key for OpenAI-backed workflows.", "OpenAI API key"),
|
|
34
|
+
ANTHROPIC_API_KEY: optionalString("Anthropic API key for Claude-backed workflows.", "Anthropic API key"),
|
|
35
|
+
GOOGLE_GENERATIVE_AI_API_KEY: optionalString("Google Generative AI API key for Gemini-backed workflows.", "Google Generative AI API key"),
|
|
36
|
+
ELEVENLABS_API_KEY: optionalString("ElevenLabs API key for audio translation.", "ElevenLabs API key"),
|
|
37
|
+
HIVE_API_KEY: optionalString("Hive Visual Moderation API key.", "Hive API key"),
|
|
38
|
+
S3_ENDPOINT: optionalString("S3-compatible endpoint for uploads.", "S3 endpoint"),
|
|
39
|
+
S3_REGION: optionalString("S3 region (defaults to 'auto' when omitted)."),
|
|
40
|
+
S3_BUCKET: optionalString("Bucket used for caption and audio uploads.", "S3 bucket"),
|
|
41
|
+
S3_ACCESS_KEY_ID: optionalString("Access key ID for S3-compatible uploads.", "S3 access key id"),
|
|
42
|
+
S3_SECRET_ACCESS_KEY: optionalString("Secret access key for S3-compatible uploads.", "S3 secret access key")
|
|
43
|
+
});
|
|
44
|
+
function parseEnv() {
|
|
45
|
+
const parsedEnv = EnvSchema.safeParse(process.env);
|
|
46
|
+
if (!parsedEnv.success) {
|
|
47
|
+
console.error("\u274C Invalid env:");
|
|
48
|
+
console.error(JSON.stringify(parsedEnv.error.flatten().fieldErrors, null, 2));
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
return parsedEnv.data;
|
|
52
|
+
}
|
|
53
|
+
var env = parseEnv();
|
|
54
|
+
var env_default = env;
|
|
55
|
+
|
|
56
|
+
// src/lib/url-signing.ts
|
|
57
|
+
function createSigningClient(context) {
|
|
58
|
+
return new Mux({
|
|
59
|
+
// These are not needed for signing, but the SDK requires them
|
|
60
|
+
// Using empty strings as we only need the jwt functionality
|
|
61
|
+
tokenId: env_default.MUX_TOKEN_ID || "",
|
|
62
|
+
tokenSecret: env_default.MUX_TOKEN_SECRET || "",
|
|
63
|
+
jwtSigningKey: context.keyId,
|
|
64
|
+
jwtPrivateKey: context.keySecret
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
async function signPlaybackId(playbackId, context, type = "video", params) {
|
|
68
|
+
const client = createSigningClient(context);
|
|
69
|
+
const stringParams = params ? Object.fromEntries(
|
|
70
|
+
Object.entries(params).map(([key, value]) => [key, String(value)])
|
|
71
|
+
) : void 0;
|
|
72
|
+
return client.jwt.signPlaybackId(playbackId, {
|
|
73
|
+
type,
|
|
74
|
+
expiration: context.expiration || "1h",
|
|
75
|
+
params: stringParams
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
async function signUrl(url, playbackId, context, type = "video", params) {
|
|
79
|
+
const token = await signPlaybackId(playbackId, context, type, params);
|
|
80
|
+
const separator = url.includes("?") ? "&" : "?";
|
|
81
|
+
return `${url}${separator}token=${token}`;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// src/primitives/storyboards.ts
|
|
85
|
+
var DEFAULT_STORYBOARD_WIDTH = 640;
|
|
86
|
+
async function getStoryboardUrl(playbackId, width = DEFAULT_STORYBOARD_WIDTH, signingContext) {
|
|
87
|
+
const baseUrl = `https://image.mux.com/${playbackId}/storyboard.png`;
|
|
88
|
+
if (signingContext) {
|
|
89
|
+
return signUrl(baseUrl, playbackId, signingContext, "storyboard", { width });
|
|
90
|
+
}
|
|
91
|
+
return `${baseUrl}?width=${width}`;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// src/primitives/text-chunking.ts
|
|
95
|
+
function estimateTokenCount(text) {
|
|
96
|
+
const words = text.trim().split(/\s+/).length;
|
|
97
|
+
return Math.ceil(words / 0.75);
|
|
98
|
+
}
|
|
99
|
+
function chunkByTokens(text, maxTokens, overlapTokens = 0) {
|
|
100
|
+
if (!text.trim()) {
|
|
101
|
+
return [];
|
|
102
|
+
}
|
|
103
|
+
const chunks = [];
|
|
104
|
+
const words = text.trim().split(/\s+/);
|
|
105
|
+
const wordsPerChunk = Math.floor(maxTokens * 0.75);
|
|
106
|
+
const overlapWords = Math.floor(overlapTokens * 0.75);
|
|
107
|
+
let chunkIndex = 0;
|
|
108
|
+
let currentPosition = 0;
|
|
109
|
+
while (currentPosition < words.length) {
|
|
110
|
+
const chunkWords = words.slice(
|
|
111
|
+
currentPosition,
|
|
112
|
+
currentPosition + wordsPerChunk
|
|
113
|
+
);
|
|
114
|
+
const chunkText2 = chunkWords.join(" ");
|
|
115
|
+
const tokenCount = estimateTokenCount(chunkText2);
|
|
116
|
+
chunks.push({
|
|
117
|
+
id: `chunk-${chunkIndex}`,
|
|
118
|
+
text: chunkText2,
|
|
119
|
+
tokenCount
|
|
120
|
+
});
|
|
121
|
+
currentPosition += wordsPerChunk - overlapWords;
|
|
122
|
+
chunkIndex++;
|
|
123
|
+
if (currentPosition <= (chunkIndex - 1) * (wordsPerChunk - overlapWords)) {
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return chunks;
|
|
128
|
+
}
|
|
129
|
+
function createChunkFromCues(cues, index) {
|
|
130
|
+
const text = cues.map((c) => c.text).join(" ");
|
|
131
|
+
return {
|
|
132
|
+
id: `chunk-${index}`,
|
|
133
|
+
text,
|
|
134
|
+
tokenCount: estimateTokenCount(text),
|
|
135
|
+
startTime: cues[0].startTime,
|
|
136
|
+
endTime: cues[cues.length - 1].endTime
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
function chunkVTTCues(cues, maxTokens, overlapCues = 2) {
|
|
140
|
+
if (cues.length === 0)
|
|
141
|
+
return [];
|
|
142
|
+
const chunks = [];
|
|
143
|
+
let currentCues = [];
|
|
144
|
+
let currentTokens = 0;
|
|
145
|
+
let chunkIndex = 0;
|
|
146
|
+
for (let i = 0; i < cues.length; i++) {
|
|
147
|
+
const cue = cues[i];
|
|
148
|
+
const cueTokens = estimateTokenCount(cue.text);
|
|
149
|
+
if (currentTokens + cueTokens > maxTokens && currentCues.length > 0) {
|
|
150
|
+
chunks.push(createChunkFromCues(currentCues, chunkIndex));
|
|
151
|
+
chunkIndex++;
|
|
152
|
+
const overlapStart = Math.max(0, currentCues.length - overlapCues);
|
|
153
|
+
currentCues = currentCues.slice(overlapStart);
|
|
154
|
+
currentTokens = currentCues.reduce(
|
|
155
|
+
(sum, c) => sum + estimateTokenCount(c.text),
|
|
156
|
+
0
|
|
157
|
+
);
|
|
158
|
+
}
|
|
159
|
+
currentCues.push(cue);
|
|
160
|
+
currentTokens += cueTokens;
|
|
161
|
+
}
|
|
162
|
+
if (currentCues.length > 0) {
|
|
163
|
+
chunks.push(createChunkFromCues(currentCues, chunkIndex));
|
|
164
|
+
}
|
|
165
|
+
return chunks;
|
|
166
|
+
}
|
|
167
|
+
function chunkText(text, strategy) {
|
|
168
|
+
switch (strategy.type) {
|
|
169
|
+
case "token": {
|
|
170
|
+
return chunkByTokens(text, strategy.maxTokens, strategy.overlap ?? 0);
|
|
171
|
+
}
|
|
172
|
+
default: {
|
|
173
|
+
const exhaustiveCheck = strategy;
|
|
174
|
+
throw new Error(`Unsupported chunking strategy: ${exhaustiveCheck}`);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// src/primitives/thumbnails.ts
|
|
180
|
+
async function getThumbnailUrls(playbackId, duration, options = {}) {
|
|
181
|
+
const { interval = 10, width = 640, signingContext } = options;
|
|
182
|
+
const timestamps = [];
|
|
183
|
+
if (duration <= 50) {
|
|
184
|
+
const spacing = duration / 6;
|
|
185
|
+
for (let i = 1; i <= 5; i++) {
|
|
186
|
+
timestamps.push(Math.round(i * spacing));
|
|
187
|
+
}
|
|
188
|
+
} else {
|
|
189
|
+
for (let time = 0; time < duration; time += interval) {
|
|
190
|
+
timestamps.push(time);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
const baseUrl = `https://image.mux.com/${playbackId}/thumbnail.png`;
|
|
194
|
+
const urlPromises = timestamps.map(async (time) => {
|
|
195
|
+
if (signingContext) {
|
|
196
|
+
return signUrl(baseUrl, playbackId, signingContext, "thumbnail", { time, width });
|
|
197
|
+
}
|
|
198
|
+
return `${baseUrl}?time=${time}&width=${width}`;
|
|
199
|
+
});
|
|
200
|
+
return Promise.all(urlPromises);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// src/primitives/transcripts.ts
|
|
204
|
+
function getReadyTextTracks(asset) {
|
|
205
|
+
return (asset.tracks || []).filter(
|
|
206
|
+
(track) => track.type === "text" && track.status === "ready"
|
|
207
|
+
);
|
|
208
|
+
}
|
|
209
|
+
function findCaptionTrack(asset, languageCode) {
|
|
210
|
+
const tracks = getReadyTextTracks(asset);
|
|
211
|
+
if (!tracks.length)
|
|
212
|
+
return void 0;
|
|
213
|
+
if (!languageCode) {
|
|
214
|
+
return tracks[0];
|
|
215
|
+
}
|
|
216
|
+
return tracks.find(
|
|
217
|
+
(track) => track.text_type === "subtitles" && track.language_code === languageCode
|
|
218
|
+
);
|
|
219
|
+
}
|
|
220
|
+
function extractTextFromVTT(vttContent) {
|
|
221
|
+
if (!vttContent.trim()) {
|
|
222
|
+
return "";
|
|
223
|
+
}
|
|
224
|
+
const lines = vttContent.split("\n");
|
|
225
|
+
const textLines = [];
|
|
226
|
+
for (let i = 0; i < lines.length; i++) {
|
|
227
|
+
const line = lines[i].trim();
|
|
228
|
+
if (!line)
|
|
229
|
+
continue;
|
|
230
|
+
if (line === "WEBVTT")
|
|
231
|
+
continue;
|
|
232
|
+
if (line.startsWith("NOTE "))
|
|
233
|
+
continue;
|
|
234
|
+
if (line.includes("-->"))
|
|
235
|
+
continue;
|
|
236
|
+
if (/^[\w-]+$/.test(line) && !line.includes(" "))
|
|
237
|
+
continue;
|
|
238
|
+
if (line.startsWith("STYLE") || line.startsWith("REGION"))
|
|
239
|
+
continue;
|
|
240
|
+
const cleanLine = line.replace(/<[^>]*>/g, "").trim();
|
|
241
|
+
if (cleanLine) {
|
|
242
|
+
textLines.push(cleanLine);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
return textLines.join(" ").replace(/\s+/g, " ").trim();
|
|
246
|
+
}
|
|
247
|
+
function vttTimestampToSeconds(timestamp) {
|
|
248
|
+
const parts = timestamp.split(":");
|
|
249
|
+
if (parts.length !== 3)
|
|
250
|
+
return 0;
|
|
251
|
+
const hours = Number.parseInt(parts[0], 10) || 0;
|
|
252
|
+
const minutes = Number.parseInt(parts[1], 10) || 0;
|
|
253
|
+
const seconds = Number.parseFloat(parts[2]) || 0;
|
|
254
|
+
return hours * 3600 + minutes * 60 + seconds;
|
|
255
|
+
}
|
|
256
|
+
function extractTimestampedTranscript(vttContent) {
|
|
257
|
+
if (!vttContent.trim()) {
|
|
258
|
+
return "";
|
|
259
|
+
}
|
|
260
|
+
const lines = vttContent.split("\n");
|
|
261
|
+
const segments = [];
|
|
262
|
+
for (let i = 0; i < lines.length; i++) {
|
|
263
|
+
const line = lines[i].trim();
|
|
264
|
+
if (line.includes("-->")) {
|
|
265
|
+
const startTime = line.split(" --> ")[0].trim();
|
|
266
|
+
const timeInSeconds = vttTimestampToSeconds(startTime);
|
|
267
|
+
let j = i + 1;
|
|
268
|
+
while (j < lines.length && !lines[j].trim()) {
|
|
269
|
+
j++;
|
|
270
|
+
}
|
|
271
|
+
if (j < lines.length) {
|
|
272
|
+
const text = lines[j].trim().replace(/<[^>]*>/g, "");
|
|
273
|
+
if (text) {
|
|
274
|
+
segments.push({ time: timeInSeconds, text });
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
return segments.map((segment) => `[${Math.floor(segment.time)}s] ${segment.text}`).join("\n");
|
|
280
|
+
}
|
|
281
|
+
function parseVTTCues(vttContent) {
|
|
282
|
+
if (!vttContent.trim())
|
|
283
|
+
return [];
|
|
284
|
+
const lines = vttContent.split("\n");
|
|
285
|
+
const cues = [];
|
|
286
|
+
for (let i = 0; i < lines.length; i++) {
|
|
287
|
+
const line = lines[i].trim();
|
|
288
|
+
if (line.includes("-->")) {
|
|
289
|
+
const [startStr, endStr] = line.split(" --> ").map((s) => s.trim());
|
|
290
|
+
const startTime = vttTimestampToSeconds(startStr);
|
|
291
|
+
const endTime = vttTimestampToSeconds(endStr.split(" ")[0]);
|
|
292
|
+
const textLines = [];
|
|
293
|
+
let j = i + 1;
|
|
294
|
+
while (j < lines.length && lines[j].trim() && !lines[j].includes("-->")) {
|
|
295
|
+
const cleanLine = lines[j].trim().replace(/<[^>]*>/g, "");
|
|
296
|
+
if (cleanLine)
|
|
297
|
+
textLines.push(cleanLine);
|
|
298
|
+
j++;
|
|
299
|
+
}
|
|
300
|
+
if (textLines.length > 0) {
|
|
301
|
+
cues.push({
|
|
302
|
+
startTime,
|
|
303
|
+
endTime,
|
|
304
|
+
text: textLines.join(" ")
|
|
305
|
+
});
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
return cues;
|
|
310
|
+
}
|
|
311
|
+
async function buildTranscriptUrl(playbackId, trackId, signingContext) {
|
|
312
|
+
const baseUrl = `https://stream.mux.com/${playbackId}/text/${trackId}.vtt`;
|
|
313
|
+
if (signingContext) {
|
|
314
|
+
return signUrl(baseUrl, playbackId, signingContext, "video");
|
|
315
|
+
}
|
|
316
|
+
return baseUrl;
|
|
317
|
+
}
|
|
318
|
+
async function fetchTranscriptForAsset(asset, playbackId, options = {}) {
|
|
319
|
+
const { languageCode, cleanTranscript = true, signingContext } = options;
|
|
320
|
+
const track = findCaptionTrack(asset, languageCode);
|
|
321
|
+
if (!track) {
|
|
322
|
+
return { transcriptText: "" };
|
|
323
|
+
}
|
|
324
|
+
if (!track.id) {
|
|
325
|
+
return { transcriptText: "", track };
|
|
326
|
+
}
|
|
327
|
+
const transcriptUrl = await buildTranscriptUrl(playbackId, track.id, signingContext);
|
|
328
|
+
try {
|
|
329
|
+
const response = await fetch(transcriptUrl);
|
|
330
|
+
if (!response.ok) {
|
|
331
|
+
return { transcriptText: "", transcriptUrl, track };
|
|
332
|
+
}
|
|
333
|
+
const rawVtt = await response.text();
|
|
334
|
+
const transcriptText = cleanTranscript ? extractTextFromVTT(rawVtt) : rawVtt;
|
|
335
|
+
return { transcriptText, transcriptUrl, track };
|
|
336
|
+
} catch (error) {
|
|
337
|
+
console.warn("Failed to fetch transcript:", error);
|
|
338
|
+
return { transcriptText: "", transcriptUrl, track };
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
export {
|
|
342
|
+
DEFAULT_STORYBOARD_WIDTH,
|
|
343
|
+
buildTranscriptUrl,
|
|
344
|
+
chunkByTokens,
|
|
345
|
+
chunkText,
|
|
346
|
+
chunkVTTCues,
|
|
347
|
+
estimateTokenCount,
|
|
348
|
+
extractTextFromVTT,
|
|
349
|
+
extractTimestampedTranscript,
|
|
350
|
+
fetchTranscriptForAsset,
|
|
351
|
+
findCaptionTrack,
|
|
352
|
+
getReadyTextTracks,
|
|
353
|
+
getStoryboardUrl,
|
|
354
|
+
getThumbnailUrls,
|
|
355
|
+
parseVTTCues,
|
|
356
|
+
vttTimestampToSeconds
|
|
357
|
+
};
|
|
358
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/lib/url-signing.ts","../../src/env.ts","../../src/primitives/storyboards.ts","../../src/primitives/text-chunking.ts","../../src/primitives/thumbnails.ts","../../src/primitives/transcripts.ts"],"sourcesContent":["import Mux from \"@mux/mux-node\";\n\nimport env from \"../env\";\nimport type { MuxAIConfig } from \"../types\";\n\n/**\n * Context required to sign URLs for signed playback IDs.\n */\nexport interface SigningContext {\n /** The signing key ID from Mux dashboard. */\n keyId: string;\n /** The base64-encoded private key from Mux dashboard. */\n keySecret: string;\n /** Token expiration time (e.g. '1h', '1d'). Defaults to '1h'. */\n expiration?: string;\n}\n\n/**\n * Token type determines which Mux service the token is valid for.\n */\nexport type TokenType = \"video\" | \"thumbnail\" | \"storyboard\" | \"gif\";\n\n/**\n * Resolves signing context from config or environment variables.\n * Returns undefined if signing keys are not configured.\n */\nexport function resolveSigningContext(config: MuxAIConfig): SigningContext | undefined {\n const keyId = config.muxSigningKey ?? env.MUX_SIGNING_KEY;\n const keySecret = config.muxPrivateKey ?? env.MUX_PRIVATE_KEY;\n\n if (!keyId || !keySecret) {\n return undefined;\n }\n\n return { keyId, keySecret };\n}\n\n/**\n * Creates a Mux client configured for JWT signing.\n * This client is used internally for signing operations.\n */\nfunction createSigningClient(context: SigningContext): Mux {\n return new Mux({\n // These are not needed for signing, but the SDK requires them\n // Using empty strings as we only need the jwt functionality\n tokenId: env.MUX_TOKEN_ID || \"\",\n tokenSecret: env.MUX_TOKEN_SECRET || \"\",\n jwtSigningKey: context.keyId,\n jwtPrivateKey: context.keySecret,\n });\n}\n\n/**\n * Generates a signed token for a playback ID using the Mux SDK.\n *\n * @param playbackId - The Mux playback ID to sign\n * @param context - Signing context with key credentials\n * @param type - Token type (video, thumbnail, storyboard, gif)\n * @param params - Additional parameters for thumbnail/storyboard tokens (values will be stringified)\n * @returns Signed JWT token\n */\nexport async function signPlaybackId(\n playbackId: string,\n context: SigningContext,\n type: TokenType = \"video\",\n params?: Record<string, string | number>,\n): Promise<string> {\n const client = createSigningClient(context);\n\n // Convert params to Record<string, string> as required by the SDK\n const stringParams = params ?\n Object.fromEntries(\n Object.entries(params).map(([key, value]) => [key, String(value)]),\n ) :\n undefined;\n\n return client.jwt.signPlaybackId(playbackId, {\n type,\n expiration: context.expiration || \"1h\",\n params: stringParams,\n });\n}\n\n/**\n * Appends a signed token to a Mux URL.\n *\n * @param url - The base Mux URL (e.g. https://image.mux.com/{playbackId}/thumbnail.png)\n * @param playbackId - The Mux playback ID\n * @param context - Signing context with key credentials\n * @param type - Token type for the URL\n * @param params - Additional parameters for the token\n * @returns URL with token query parameter appended\n */\nexport async function signUrl(\n url: string,\n playbackId: string,\n context: SigningContext,\n type: TokenType = \"video\",\n params?: Record<string, string | number>,\n): Promise<string> {\n const token = await signPlaybackId(playbackId, context, type, params);\n const separator = url.includes(\"?\") ? \"&\" : \"?\";\n return `${url}${separator}token=${token}`;\n}\n","/* eslint-disable node/no-process-env */\nimport path from \"node:path\";\n\nimport { config } from \"dotenv\";\nimport { expand } from \"dotenv-expand\";\nimport { z } from \"zod\";\n\nexpand(config({\n path: path.resolve(\n process.cwd(),\n process.env.NODE_ENV === \"test\" ? \".env.test\" : \".env\",\n ),\n}));\n\nfunction optionalString(description: string, message?: string) {\n return z.preprocess(\n value => typeof value === \"string\" && value.trim().length === 0 ? undefined : value,\n z.string().trim().min(1, message).optional(),\n ).describe(description);\n}\n\nfunction requiredString(description: string, message?: string) {\n return z.preprocess(\n value => typeof value === \"string\" ? value.trim().length > 0 ? value.trim() : undefined : value,\n z.string().trim().min(1, message),\n ).describe(description);\n}\n\nconst EnvSchema = z.object({\n NODE_ENV: z.string().default(\"development\").describe(\"Runtime environment.\"),\n\n MUX_TOKEN_ID: requiredString(\"Mux access token ID.\", \"Required to access Mux APIs\"),\n MUX_TOKEN_SECRET: requiredString(\"Mux access token secret.\", \"Required to access Mux APIs\"),\n\n MUX_SIGNING_KEY: optionalString(\"Mux signing key ID for signed playback URLs.\", \"Used to sign playback URLs\"),\n MUX_PRIVATE_KEY: optionalString(\"Mux signing private key for signed playback URLs.\", \"Used to sign playback URLs\"),\n\n OPENAI_API_KEY: optionalString(\"OpenAI API key for OpenAI-backed workflows.\", \"OpenAI API key\"),\n ANTHROPIC_API_KEY: optionalString(\"Anthropic API key for Claude-backed workflows.\", \"Anthropic API key\"),\n GOOGLE_GENERATIVE_AI_API_KEY: optionalString(\"Google Generative AI API key for Gemini-backed workflows.\", \"Google Generative AI API key\"),\n\n ELEVENLABS_API_KEY: optionalString(\"ElevenLabs API key for audio translation.\", \"ElevenLabs API key\"),\n HIVE_API_KEY: optionalString(\"Hive Visual Moderation API key.\", \"Hive API key\"),\n\n S3_ENDPOINT: optionalString(\"S3-compatible endpoint for uploads.\", \"S3 endpoint\"),\n S3_REGION: optionalString(\"S3 region (defaults to 'auto' when omitted).\"),\n S3_BUCKET: optionalString(\"Bucket used for caption and audio uploads.\", \"S3 bucket\"),\n S3_ACCESS_KEY_ID: optionalString(\"Access key ID for S3-compatible uploads.\", \"S3 access key id\"),\n S3_SECRET_ACCESS_KEY: optionalString(\"Secret access key for S3-compatible uploads.\", \"S3 secret access key\"),\n});\n\nexport type Env = z.infer<typeof EnvSchema>;\n\nfunction parseEnv(): Env {\n const parsedEnv = EnvSchema.safeParse(process.env);\n\n if (!parsedEnv.success) {\n console.error(\"❌ Invalid env:\");\n console.error(JSON.stringify(parsedEnv.error.flatten().fieldErrors, null, 2));\n process.exit(1);\n }\n\n return parsedEnv.data;\n}\n\nconst env: Env = parseEnv();\n\nexport function reloadEnv(): Env {\n const parsed = parseEnv();\n Object.assign(env, parsed);\n return env;\n}\n\nexport { env };\nexport default env;\n","import type { SigningContext } from \"../lib/url-signing\";\nimport { signUrl } from \"../lib/url-signing\";\n\nexport const DEFAULT_STORYBOARD_WIDTH = 640;\n\n/**\n * Generates a storyboard URL for the given playback ID.\n * If a signing context is provided, the URL will be signed with a token.\n *\n * @param playbackId - The Mux playback ID\n * @param width - Width of the storyboard in pixels (default: 640)\n * @param signingContext - Optional signing context for signed playback IDs\n * @returns Storyboard URL (signed if context provided)\n */\nexport async function getStoryboardUrl(\n playbackId: string,\n width: number = DEFAULT_STORYBOARD_WIDTH,\n signingContext?: SigningContext,\n): Promise<string> {\n const baseUrl = `https://image.mux.com/${playbackId}/storyboard.png`;\n\n if (signingContext) {\n return signUrl(baseUrl, playbackId, signingContext, \"storyboard\", { width });\n }\n\n return `${baseUrl}?width=${width}`;\n}\n","import type { ChunkingStrategy, TextChunk } from \"../types\";\n\nimport type { VTTCue } from \"./transcripts\";\n\n/**\n * Simple token counter that approximates tokens by word count.\n * For production use with OpenAI, consider using a proper tokenizer like tiktoken.\n * This approximation is generally close enough for chunking purposes (1 token ≈ 0.75 words).\n */\nexport function estimateTokenCount(text: string): number {\n const words = text.trim().split(/\\s+/).length;\n return Math.ceil(words / 0.75);\n}\n\n/**\n * Chunks text into overlapping segments based on token count.\n *\n * @param text - The text to chunk\n * @param maxTokens - Maximum tokens per chunk\n * @param overlapTokens - Number of tokens to overlap between chunks\n * @returns Array of text chunks with metadata\n */\nexport function chunkByTokens(\n text: string,\n maxTokens: number,\n overlapTokens: number = 0,\n): TextChunk[] {\n if (!text.trim()) {\n return [];\n }\n\n const chunks: TextChunk[] = [];\n const words = text.trim().split(/\\s+/);\n\n // Convert tokens to approximate word count\n const wordsPerChunk = Math.floor(maxTokens * 0.75);\n const overlapWords = Math.floor(overlapTokens * 0.75);\n\n let chunkIndex = 0;\n let currentPosition = 0;\n\n while (currentPosition < words.length) {\n const chunkWords = words.slice(\n currentPosition,\n currentPosition + wordsPerChunk,\n );\n const chunkText = chunkWords.join(\" \");\n const tokenCount = estimateTokenCount(chunkText);\n\n chunks.push({\n id: `chunk-${chunkIndex}`,\n text: chunkText,\n tokenCount,\n });\n\n // Move forward by chunk size minus overlap\n currentPosition += wordsPerChunk - overlapWords;\n chunkIndex++;\n\n // Prevent infinite loop if overlap is too large\n if (currentPosition <= (chunkIndex - 1) * (wordsPerChunk - overlapWords)) {\n break;\n }\n }\n\n return chunks;\n}\n\n/**\n * Creates a TextChunk from a group of VTT cues.\n */\nfunction createChunkFromCues(cues: VTTCue[], index: number): TextChunk {\n const text = cues.map(c => c.text).join(\" \");\n return {\n id: `chunk-${index}`,\n text,\n tokenCount: estimateTokenCount(text),\n startTime: cues[0].startTime,\n endTime: cues[cues.length - 1].endTime,\n };\n}\n\n/**\n * Chunks VTT cues into groups that respect natural cue boundaries.\n * Splits at cue boundaries rather than mid-sentence, preserving accurate timestamps.\n *\n * @param cues - Array of VTT cues to chunk\n * @param maxTokens - Maximum tokens per chunk\n * @param overlapCues - Number of cues to overlap between chunks (default: 2)\n * @returns Array of text chunks with accurate start/end times\n */\nexport function chunkVTTCues(\n cues: VTTCue[],\n maxTokens: number,\n overlapCues: number = 2,\n): TextChunk[] {\n if (cues.length === 0)\n return [];\n\n const chunks: TextChunk[] = [];\n let currentCues: VTTCue[] = [];\n let currentTokens = 0;\n let chunkIndex = 0;\n\n for (let i = 0; i < cues.length; i++) {\n const cue = cues[i];\n const cueTokens = estimateTokenCount(cue.text);\n\n // If adding this cue would exceed limit, finalize current chunk\n if (currentTokens + cueTokens > maxTokens && currentCues.length > 0) {\n chunks.push(createChunkFromCues(currentCues, chunkIndex));\n chunkIndex++;\n\n // Start new chunk with overlap from end of previous\n const overlapStart = Math.max(0, currentCues.length - overlapCues);\n currentCues = currentCues.slice(overlapStart);\n currentTokens = currentCues.reduce(\n (sum, c) => sum + estimateTokenCount(c.text),\n 0,\n );\n }\n\n currentCues.push(cue);\n currentTokens += cueTokens;\n }\n\n // Don't forget the last chunk\n if (currentCues.length > 0) {\n chunks.push(createChunkFromCues(currentCues, chunkIndex));\n }\n\n return chunks;\n}\n\n/**\n * Chunks text according to the specified strategy.\n *\n * @param text - The text to chunk\n * @param strategy - The chunking strategy to use\n * @returns Array of text chunks\n */\nexport function chunkText(text: string, strategy: ChunkingStrategy): TextChunk[] {\n switch (strategy.type) {\n case \"token\": {\n return chunkByTokens(text, strategy.maxTokens, strategy.overlap ?? 0);\n }\n default: {\n const exhaustiveCheck: never = strategy as never;\n throw new Error(`Unsupported chunking strategy: ${exhaustiveCheck}`);\n }\n }\n}\n","import type { SigningContext } from \"../lib/url-signing\";\nimport { signUrl } from \"../lib/url-signing\";\n\nexport interface ThumbnailOptions {\n /** Interval between thumbnails in seconds (default: 10) */\n interval?: number;\n /** Width of the thumbnail in pixels (default: 640) */\n width?: number;\n /** Optional signing context for signed playback IDs */\n signingContext?: SigningContext;\n}\n\n/**\n * Generates thumbnail URLs at regular intervals based on video duration.\n * If a signing context is provided, the URLs will be signed with tokens.\n *\n * @param playbackId - The Mux playback ID\n * @param duration - Video duration in seconds\n * @param options - Thumbnail generation options\n * @returns Array of thumbnail URLs (signed if context provided)\n */\nexport async function getThumbnailUrls(\n playbackId: string,\n duration: number,\n options: ThumbnailOptions = {},\n): Promise<string[]> {\n const { interval = 10, width = 640, signingContext } = options;\n const timestamps: number[] = [];\n\n if (duration <= 50) {\n const spacing = duration / 6;\n for (let i = 1; i <= 5; i++) {\n timestamps.push(Math.round(i * spacing));\n }\n } else {\n for (let time = 0; time < duration; time += interval) {\n timestamps.push(time);\n }\n }\n\n const baseUrl = `https://image.mux.com/${playbackId}/thumbnail.png`;\n\n const urlPromises = timestamps.map(async (time) => {\n if (signingContext) {\n return signUrl(baseUrl, playbackId, signingContext, \"thumbnail\", { time, width });\n }\n\n return `${baseUrl}?time=${time}&width=${width}`;\n });\n\n return Promise.all(urlPromises);\n}\n","import type { SigningContext } from \"../lib/url-signing\";\nimport { signUrl } from \"../lib/url-signing\";\nimport type { AssetTextTrack, MuxAsset } from \"../types\";\n\n/** A single cue from a VTT file with timing info. */\nexport interface VTTCue {\n startTime: number;\n endTime: number;\n text: string;\n}\n\nexport interface TranscriptFetchOptions {\n languageCode?: string;\n cleanTranscript?: boolean;\n /** Optional signing context for signed playback IDs */\n signingContext?: SigningContext;\n}\n\nexport interface TranscriptResult {\n transcriptText: string;\n transcriptUrl?: string;\n track?: AssetTextTrack;\n}\n\nexport function getReadyTextTracks(asset: MuxAsset): AssetTextTrack[] {\n return (asset.tracks || []).filter(\n track => track.type === \"text\" && track.status === \"ready\",\n );\n}\n\nexport function findCaptionTrack(asset: MuxAsset, languageCode?: string): AssetTextTrack | undefined {\n const tracks = getReadyTextTracks(asset);\n if (!tracks.length)\n return undefined;\n\n if (!languageCode) {\n return tracks[0];\n }\n\n return tracks.find(\n track =>\n track.text_type === \"subtitles\" &&\n track.language_code === languageCode,\n );\n}\n\nexport function extractTextFromVTT(vttContent: string): string {\n if (!vttContent.trim()) {\n return \"\";\n }\n\n const lines = vttContent.split(\"\\n\");\n const textLines: string[] = [];\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i].trim();\n\n if (!line)\n continue;\n if (line === \"WEBVTT\")\n continue;\n if (line.startsWith(\"NOTE \"))\n continue;\n if (line.includes(\"-->\"))\n continue;\n if (/^[\\w-]+$/.test(line) && !line.includes(\" \"))\n continue;\n if (line.startsWith(\"STYLE\") || line.startsWith(\"REGION\"))\n continue;\n\n const cleanLine = line.replace(/<[^>]*>/g, \"\").trim();\n\n if (cleanLine) {\n textLines.push(cleanLine);\n }\n }\n\n return textLines.join(\" \").replace(/\\s+/g, \" \").trim();\n}\n\nexport function vttTimestampToSeconds(timestamp: string): number {\n const parts = timestamp.split(\":\");\n if (parts.length !== 3)\n return 0;\n\n const hours = Number.parseInt(parts[0], 10) || 0;\n const minutes = Number.parseInt(parts[1], 10) || 0;\n const seconds = Number.parseFloat(parts[2]) || 0;\n\n return hours * 3600 + minutes * 60 + seconds;\n}\n\nexport function extractTimestampedTranscript(vttContent: string): string {\n if (!vttContent.trim()) {\n return \"\";\n }\n\n const lines = vttContent.split(\"\\n\");\n const segments: Array<{ time: number; text: string }> = [];\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i].trim();\n\n if (line.includes(\"-->\")) {\n const startTime = line.split(\" --> \")[0].trim();\n const timeInSeconds = vttTimestampToSeconds(startTime);\n\n let j = i + 1;\n while (j < lines.length && !lines[j].trim()) {\n j++;\n }\n\n if (j < lines.length) {\n const text = lines[j].trim().replace(/<[^>]*>/g, \"\");\n if (text) {\n segments.push({ time: timeInSeconds, text });\n }\n }\n }\n }\n\n return segments\n .map(segment => `[${Math.floor(segment.time)}s] ${segment.text}`)\n .join(\"\\n\");\n}\n\n/**\n * Parses VTT content into structured cues with timing.\n *\n * @param vttContent - Raw VTT file content\n * @returns Array of VTT cues with start/end times and text\n */\nexport function parseVTTCues(vttContent: string): VTTCue[] {\n if (!vttContent.trim())\n return [];\n\n const lines = vttContent.split(\"\\n\");\n const cues: VTTCue[] = [];\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i].trim();\n\n if (line.includes(\"-->\")) {\n const [startStr, endStr] = line.split(\" --> \").map(s => s.trim());\n const startTime = vttTimestampToSeconds(startStr);\n const endTime = vttTimestampToSeconds(endStr.split(\" \")[0]); // Handle cue settings\n\n // Collect text lines until empty line or next timestamp\n const textLines: string[] = [];\n let j = i + 1;\n while (j < lines.length && lines[j].trim() && !lines[j].includes(\"-->\")) {\n const cleanLine = lines[j].trim().replace(/<[^>]*>/g, \"\");\n if (cleanLine)\n textLines.push(cleanLine);\n j++;\n }\n\n if (textLines.length > 0) {\n cues.push({\n startTime,\n endTime,\n text: textLines.join(\" \"),\n });\n }\n }\n }\n\n return cues;\n}\n\n/**\n * Builds a transcript URL for the given playback ID and track ID.\n * If a signing context is provided, the URL will be signed with a token.\n *\n * @param playbackId - The Mux playback ID\n * @param trackId - The text track ID\n * @param signingContext - Optional signing context for signed playback IDs\n * @returns Transcript URL (signed if context provided)\n */\nexport async function buildTranscriptUrl(\n playbackId: string,\n trackId: string,\n signingContext?: SigningContext,\n): Promise<string> {\n const baseUrl = `https://stream.mux.com/${playbackId}/text/${trackId}.vtt`;\n\n if (signingContext) {\n return signUrl(baseUrl, playbackId, signingContext, \"video\");\n }\n\n return baseUrl;\n}\n\nexport async function fetchTranscriptForAsset(\n asset: MuxAsset,\n playbackId: string,\n options: TranscriptFetchOptions = {},\n): Promise<TranscriptResult> {\n const { languageCode, cleanTranscript = true, signingContext } = options;\n const track = findCaptionTrack(asset, languageCode);\n\n if (!track) {\n return { transcriptText: \"\" };\n }\n\n if (!track.id) {\n return { transcriptText: \"\", track };\n }\n\n const transcriptUrl = await buildTranscriptUrl(playbackId, track.id, signingContext);\n\n try {\n const response = await fetch(transcriptUrl);\n if (!response.ok) {\n return { transcriptText: \"\", transcriptUrl, track };\n }\n\n const rawVtt = await response.text();\n const transcriptText = cleanTranscript ? extractTextFromVTT(rawVtt) : rawVtt;\n\n return { transcriptText, transcriptUrl, track };\n } catch (error) {\n console.warn(\"Failed to fetch transcript:\", error);\n return { transcriptText: \"\", transcriptUrl, track };\n }\n}\n"],"mappings":";AAAA,OAAO,SAAS;;;ACChB,OAAO,UAAU;AAEjB,SAAS,cAAc;AACvB,SAAS,cAAc;AACvB,SAAS,SAAS;AAElB,OAAO,OAAO;AAAA,EACZ,MAAM,KAAK;AAAA,IACT,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI,aAAa,SAAS,cAAc;AAAA,EAClD;AACF,CAAC,CAAC;AAEF,SAAS,eAAe,aAAqB,SAAkB;AAC7D,SAAO,EAAE;AAAA,IACP,WAAS,OAAO,UAAU,YAAY,MAAM,KAAK,EAAE,WAAW,IAAI,SAAY;AAAA,IAC9E,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,GAAG,OAAO,EAAE,SAAS;AAAA,EAC7C,EAAE,SAAS,WAAW;AACxB;AAEA,SAAS,eAAe,aAAqB,SAAkB;AAC7D,SAAO,EAAE;AAAA,IACP,WAAS,OAAO,UAAU,WAAW,MAAM,KAAK,EAAE,SAAS,IAAI,MAAM,KAAK,IAAI,SAAY;AAAA,IAC1F,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,GAAG,OAAO;AAAA,EAClC,EAAE,SAAS,WAAW;AACxB;AAEA,IAAM,YAAY,EAAE,OAAO;AAAA,EACzB,UAAU,EAAE,OAAO,EAAE,QAAQ,aAAa,EAAE,SAAS,sBAAsB;AAAA,EAE3E,cAAc,eAAe,wBAAwB,6BAA6B;AAAA,EAClF,kBAAkB,eAAe,4BAA4B,6BAA6B;AAAA,EAE1F,iBAAiB,eAAe,gDAAgD,4BAA4B;AAAA,EAC5G,iBAAiB,eAAe,qDAAqD,4BAA4B;AAAA,EAEjH,gBAAgB,eAAe,+CAA+C,gBAAgB;AAAA,EAC9F,mBAAmB,eAAe,kDAAkD,mBAAmB;AAAA,EACvG,8BAA8B,eAAe,6DAA6D,8BAA8B;AAAA,EAExI,oBAAoB,eAAe,6CAA6C,oBAAoB;AAAA,EACpG,cAAc,eAAe,mCAAmC,cAAc;AAAA,EAE9E,aAAa,eAAe,uCAAuC,aAAa;AAAA,EAChF,WAAW,eAAe,8CAA8C;AAAA,EACxE,WAAW,eAAe,8CAA8C,WAAW;AAAA,EACnF,kBAAkB,eAAe,4CAA4C,kBAAkB;AAAA,EAC/F,sBAAsB,eAAe,gDAAgD,sBAAsB;AAC7G,CAAC;AAID,SAAS,WAAgB;AACvB,QAAM,YAAY,UAAU,UAAU,QAAQ,GAAG;AAEjD,MAAI,CAAC,UAAU,SAAS;AACtB,YAAQ,MAAM,qBAAgB;AAC9B,YAAQ,MAAM,KAAK,UAAU,UAAU,MAAM,QAAQ,EAAE,aAAa,MAAM,CAAC,CAAC;AAC5E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO,UAAU;AACnB;AAEA,IAAM,MAAW,SAAS;AAS1B,IAAO,cAAQ;;;ADjCf,SAAS,oBAAoB,SAA8B;AACzD,SAAO,IAAI,IAAI;AAAA;AAAA;AAAA,IAGb,SAAS,YAAI,gBAAgB;AAAA,IAC7B,aAAa,YAAI,oBAAoB;AAAA,IACrC,eAAe,QAAQ;AAAA,IACvB,eAAe,QAAQ;AAAA,EACzB,CAAC;AACH;AAWA,eAAsB,eACpB,YACA,SACA,OAAkB,SAClB,QACiB;AACjB,QAAM,SAAS,oBAAoB,OAAO;AAG1C,QAAM,eAAe,SACjB,OAAO;AAAA,IACL,OAAO,QAAQ,MAAM,EAAE,IAAI,CAAC,CAAC,KAAK,KAAK,MAAM,CAAC,KAAK,OAAO,KAAK,CAAC,CAAC;AAAA,EACnE,IACF;AAEF,SAAO,OAAO,IAAI,eAAe,YAAY;AAAA,IAC3C;AAAA,IACA,YAAY,QAAQ,cAAc;AAAA,IAClC,QAAQ;AAAA,EACV,CAAC;AACH;AAYA,eAAsB,QACpB,KACA,YACA,SACA,OAAkB,SAClB,QACiB;AACjB,QAAM,QAAQ,MAAM,eAAe,YAAY,SAAS,MAAM,MAAM;AACpE,QAAM,YAAY,IAAI,SAAS,GAAG,IAAI,MAAM;AAC5C,SAAO,GAAG,GAAG,GAAG,SAAS,SAAS,KAAK;AACzC;;;AEpGO,IAAM,2BAA2B;AAWxC,eAAsB,iBACpB,YACA,QAAgB,0BAChB,gBACiB;AACjB,QAAM,UAAU,yBAAyB,UAAU;AAEnD,MAAI,gBAAgB;AAClB,WAAO,QAAQ,SAAS,YAAY,gBAAgB,cAAc,EAAE,MAAM,CAAC;AAAA,EAC7E;AAEA,SAAO,GAAG,OAAO,UAAU,KAAK;AAClC;;;ACjBO,SAAS,mBAAmB,MAAsB;AACvD,QAAM,QAAQ,KAAK,KAAK,EAAE,MAAM,KAAK,EAAE;AACvC,SAAO,KAAK,KAAK,QAAQ,IAAI;AAC/B;AAUO,SAAS,cACd,MACA,WACA,gBAAwB,GACX;AACb,MAAI,CAAC,KAAK,KAAK,GAAG;AAChB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAsB,CAAC;AAC7B,QAAM,QAAQ,KAAK,KAAK,EAAE,MAAM,KAAK;AAGrC,QAAM,gBAAgB,KAAK,MAAM,YAAY,IAAI;AACjD,QAAM,eAAe,KAAK,MAAM,gBAAgB,IAAI;AAEpD,MAAI,aAAa;AACjB,MAAI,kBAAkB;AAEtB,SAAO,kBAAkB,MAAM,QAAQ;AACrC,UAAM,aAAa,MAAM;AAAA,MACvB;AAAA,MACA,kBAAkB;AAAA,IACpB;AACA,UAAMA,aAAY,WAAW,KAAK,GAAG;AACrC,UAAM,aAAa,mBAAmBA,UAAS;AAE/C,WAAO,KAAK;AAAA,MACV,IAAI,SAAS,UAAU;AAAA,MACvB,MAAMA;AAAA,MACN;AAAA,IACF,CAAC;AAGD,uBAAmB,gBAAgB;AACnC;AAGA,QAAI,oBAAoB,aAAa,MAAM,gBAAgB,eAAe;AACxE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,oBAAoB,MAAgB,OAA0B;AACrE,QAAM,OAAO,KAAK,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,GAAG;AAC3C,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,IAClB;AAAA,IACA,YAAY,mBAAmB,IAAI;AAAA,IACnC,WAAW,KAAK,CAAC,EAAE;AAAA,IACnB,SAAS,KAAK,KAAK,SAAS,CAAC,EAAE;AAAA,EACjC;AACF;AAWO,SAAS,aACd,MACA,WACA,cAAsB,GACT;AACb,MAAI,KAAK,WAAW;AAClB,WAAO,CAAC;AAEV,QAAM,SAAsB,CAAC;AAC7B,MAAI,cAAwB,CAAC;AAC7B,MAAI,gBAAgB;AACpB,MAAI,aAAa;AAEjB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,MAAM,KAAK,CAAC;AAClB,UAAM,YAAY,mBAAmB,IAAI,IAAI;AAG7C,QAAI,gBAAgB,YAAY,aAAa,YAAY,SAAS,GAAG;AACnE,aAAO,KAAK,oBAAoB,aAAa,UAAU,CAAC;AACxD;AAGA,YAAM,eAAe,KAAK,IAAI,GAAG,YAAY,SAAS,WAAW;AACjE,oBAAc,YAAY,MAAM,YAAY;AAC5C,sBAAgB,YAAY;AAAA,QAC1B,CAAC,KAAK,MAAM,MAAM,mBAAmB,EAAE,IAAI;AAAA,QAC3C;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,KAAK,GAAG;AACpB,qBAAiB;AAAA,EACnB;AAGA,MAAI,YAAY,SAAS,GAAG;AAC1B,WAAO,KAAK,oBAAoB,aAAa,UAAU,CAAC;AAAA,EAC1D;AAEA,SAAO;AACT;AASO,SAAS,UAAU,MAAc,UAAyC;AAC/E,UAAQ,SAAS,MAAM;AAAA,IACrB,KAAK,SAAS;AACZ,aAAO,cAAc,MAAM,SAAS,WAAW,SAAS,WAAW,CAAC;AAAA,IACtE;AAAA,IACA,SAAS;AACP,YAAM,kBAAyB;AAC/B,YAAM,IAAI,MAAM,kCAAkC,eAAe,EAAE;AAAA,IACrE;AAAA,EACF;AACF;;;AClIA,eAAsB,iBACpB,YACA,UACA,UAA4B,CAAC,GACV;AACnB,QAAM,EAAE,WAAW,IAAI,QAAQ,KAAK,eAAe,IAAI;AACvD,QAAM,aAAuB,CAAC;AAE9B,MAAI,YAAY,IAAI;AAClB,UAAM,UAAU,WAAW;AAC3B,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK;AAC3B,iBAAW,KAAK,KAAK,MAAM,IAAI,OAAO,CAAC;AAAA,IACzC;AAAA,EACF,OAAO;AACL,aAAS,OAAO,GAAG,OAAO,UAAU,QAAQ,UAAU;AACpD,iBAAW,KAAK,IAAI;AAAA,IACtB;AAAA,EACF;AAEA,QAAM,UAAU,yBAAyB,UAAU;AAEnD,QAAM,cAAc,WAAW,IAAI,OAAO,SAAS;AACjD,QAAI,gBAAgB;AAClB,aAAO,QAAQ,SAAS,YAAY,gBAAgB,aAAa,EAAE,MAAM,MAAM,CAAC;AAAA,IAClF;AAEA,WAAO,GAAG,OAAO,SAAS,IAAI,UAAU,KAAK;AAAA,EAC/C,CAAC;AAED,SAAO,QAAQ,IAAI,WAAW;AAChC;;;AC3BO,SAAS,mBAAmB,OAAmC;AACpE,UAAQ,MAAM,UAAU,CAAC,GAAG;AAAA,IAC1B,WAAS,MAAM,SAAS,UAAU,MAAM,WAAW;AAAA,EACrD;AACF;AAEO,SAAS,iBAAiB,OAAiB,cAAmD;AACnG,QAAM,SAAS,mBAAmB,KAAK;AACvC,MAAI,CAAC,OAAO;AACV,WAAO;AAET,MAAI,CAAC,cAAc;AACjB,WAAO,OAAO,CAAC;AAAA,EACjB;AAEA,SAAO,OAAO;AAAA,IACZ,WACE,MAAM,cAAc,eACpB,MAAM,kBAAkB;AAAA,EAC5B;AACF;AAEO,SAAS,mBAAmB,YAA4B;AAC7D,MAAI,CAAC,WAAW,KAAK,GAAG;AACtB,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,WAAW,MAAM,IAAI;AACnC,QAAM,YAAsB,CAAC;AAE7B,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC,EAAE,KAAK;AAE3B,QAAI,CAAC;AACH;AACF,QAAI,SAAS;AACX;AACF,QAAI,KAAK,WAAW,OAAO;AACzB;AACF,QAAI,KAAK,SAAS,KAAK;AACrB;AACF,QAAI,WAAW,KAAK,IAAI,KAAK,CAAC,KAAK,SAAS,GAAG;AAC7C;AACF,QAAI,KAAK,WAAW,OAAO,KAAK,KAAK,WAAW,QAAQ;AACtD;AAEF,UAAM,YAAY,KAAK,QAAQ,YAAY,EAAE,EAAE,KAAK;AAEpD,QAAI,WAAW;AACb,gBAAU,KAAK,SAAS;AAAA,IAC1B;AAAA,EACF;AAEA,SAAO,UAAU,KAAK,GAAG,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AACvD;AAEO,SAAS,sBAAsB,WAA2B;AAC/D,QAAM,QAAQ,UAAU,MAAM,GAAG;AACjC,MAAI,MAAM,WAAW;AACnB,WAAO;AAET,QAAM,QAAQ,OAAO,SAAS,MAAM,CAAC,GAAG,EAAE,KAAK;AAC/C,QAAM,UAAU,OAAO,SAAS,MAAM,CAAC,GAAG,EAAE,KAAK;AACjD,QAAM,UAAU,OAAO,WAAW,MAAM,CAAC,CAAC,KAAK;AAE/C,SAAO,QAAQ,OAAO,UAAU,KAAK;AACvC;AAEO,SAAS,6BAA6B,YAA4B;AACvE,MAAI,CAAC,WAAW,KAAK,GAAG;AACtB,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,WAAW,MAAM,IAAI;AACnC,QAAM,WAAkD,CAAC;AAEzD,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC,EAAE,KAAK;AAE3B,QAAI,KAAK,SAAS,KAAK,GAAG;AACxB,YAAM,YAAY,KAAK,MAAM,OAAO,EAAE,CAAC,EAAE,KAAK;AAC9C,YAAM,gBAAgB,sBAAsB,SAAS;AAErD,UAAI,IAAI,IAAI;AACZ,aAAO,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,EAAE,KAAK,GAAG;AAC3C;AAAA,MACF;AAEA,UAAI,IAAI,MAAM,QAAQ;AACpB,cAAM,OAAO,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,YAAY,EAAE;AACnD,YAAI,MAAM;AACR,mBAAS,KAAK,EAAE,MAAM,eAAe,KAAK,CAAC;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,SACJ,IAAI,aAAW,IAAI,KAAK,MAAM,QAAQ,IAAI,CAAC,MAAM,QAAQ,IAAI,EAAE,EAC/D,KAAK,IAAI;AACd;AAQO,SAAS,aAAa,YAA8B;AACzD,MAAI,CAAC,WAAW,KAAK;AACnB,WAAO,CAAC;AAEV,QAAM,QAAQ,WAAW,MAAM,IAAI;AACnC,QAAM,OAAiB,CAAC;AAExB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC,EAAE,KAAK;AAE3B,QAAI,KAAK,SAAS,KAAK,GAAG;AACxB,YAAM,CAAC,UAAU,MAAM,IAAI,KAAK,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAChE,YAAM,YAAY,sBAAsB,QAAQ;AAChD,YAAM,UAAU,sBAAsB,OAAO,MAAM,GAAG,EAAE,CAAC,CAAC;AAG1D,YAAM,YAAsB,CAAC;AAC7B,UAAI,IAAI,IAAI;AACZ,aAAO,IAAI,MAAM,UAAU,MAAM,CAAC,EAAE,KAAK,KAAK,CAAC,MAAM,CAAC,EAAE,SAAS,KAAK,GAAG;AACvE,cAAM,YAAY,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,YAAY,EAAE;AACxD,YAAI;AACF,oBAAU,KAAK,SAAS;AAC1B;AAAA,MACF;AAEA,UAAI,UAAU,SAAS,GAAG;AACxB,aAAK,KAAK;AAAA,UACR;AAAA,UACA;AAAA,UACA,MAAM,UAAU,KAAK,GAAG;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAWA,eAAsB,mBACpB,YACA,SACA,gBACiB;AACjB,QAAM,UAAU,0BAA0B,UAAU,SAAS,OAAO;AAEpE,MAAI,gBAAgB;AAClB,WAAO,QAAQ,SAAS,YAAY,gBAAgB,OAAO;AAAA,EAC7D;AAEA,SAAO;AACT;AAEA,eAAsB,wBACpB,OACA,YACA,UAAkC,CAAC,GACR;AAC3B,QAAM,EAAE,cAAc,kBAAkB,MAAM,eAAe,IAAI;AACjE,QAAM,QAAQ,iBAAiB,OAAO,YAAY;AAElD,MAAI,CAAC,OAAO;AACV,WAAO,EAAE,gBAAgB,GAAG;AAAA,EAC9B;AAEA,MAAI,CAAC,MAAM,IAAI;AACb,WAAO,EAAE,gBAAgB,IAAI,MAAM;AAAA,EACrC;AAEA,QAAM,gBAAgB,MAAM,mBAAmB,YAAY,MAAM,IAAI,cAAc;AAEnF,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,QAAI,CAAC,SAAS,IAAI;AAChB,aAAO,EAAE,gBAAgB,IAAI,eAAe,MAAM;AAAA,IACpD;AAEA,UAAM,SAAS,MAAM,SAAS,KAAK;AACnC,UAAM,iBAAiB,kBAAkB,mBAAmB,MAAM,IAAI;AAEtE,WAAO,EAAE,gBAAgB,eAAe,MAAM;AAAA,EAChD,SAAS,OAAO;AACd,YAAQ,KAAK,+BAA+B,KAAK;AACjD,WAAO,EAAE,gBAAgB,IAAI,eAAe,MAAM;AAAA,EACpD;AACF;","names":["chunkText"]}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import Mux from '@mux/mux-node';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Shared credential bag for every workflow. Each property falls back to the
|
|
5
|
+
* corresponding environment variable when omitted.
|
|
6
|
+
*/
|
|
7
|
+
interface MuxAIConfig {
|
|
8
|
+
/** Override for the MUX_TOKEN_ID environment variable. */
|
|
9
|
+
muxTokenId?: string;
|
|
10
|
+
/** Override for the MUX_TOKEN_SECRET environment variable. */
|
|
11
|
+
muxTokenSecret?: string;
|
|
12
|
+
/** Mux signing key ID for signed playback IDs (defaults to the MUX_SIGNING_KEY environment variable). */
|
|
13
|
+
muxSigningKey?: string;
|
|
14
|
+
/** Mux signing key private key for signed playback IDs (defaults to the MUX_PRIVATE_KEY environment variable). */
|
|
15
|
+
muxPrivateKey?: string;
|
|
16
|
+
/** OpenAI API key (defaults to the OPENAI_API_KEY environment variable). */
|
|
17
|
+
openaiApiKey?: string;
|
|
18
|
+
/** Anthropic API key (defaults to the ANTHROPIC_API_KEY environment variable). */
|
|
19
|
+
anthropicApiKey?: string;
|
|
20
|
+
/** Google Generative AI API key (defaults to the GOOGLE_GENERATIVE_AI_API_KEY environment variable). */
|
|
21
|
+
googleApiKey?: string;
|
|
22
|
+
/** Hive Visual Moderation API key (defaults to the HIVE_API_KEY environment variable). */
|
|
23
|
+
hiveApiKey?: string;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Base options mixed into every higher-level workflow configuration.
|
|
27
|
+
*/
|
|
28
|
+
interface MuxAIOptions extends MuxAIConfig {
|
|
29
|
+
/** Optional timeout (ms) for helper utilities that support request limits. */
|
|
30
|
+
timeout?: number;
|
|
31
|
+
/**
|
|
32
|
+
* Optional cancellation signal passed through to underlying AI SDK calls.
|
|
33
|
+
* When aborted, in-flight model requests will be
|
|
34
|
+
* cancelled where supported.
|
|
35
|
+
*/
|
|
36
|
+
abortSignal?: AbortSignal;
|
|
37
|
+
}
|
|
38
|
+
/** Tone controls for the summarization helper. */
|
|
39
|
+
type ToneType = "normal" | "sassy" | "professional";
|
|
40
|
+
/** Common transport for image-based workflows. */
|
|
41
|
+
type ImageSubmissionMode = "url" | "base64";
|
|
42
|
+
/** Result of calling mux-node's asset retrieval helper. */
|
|
43
|
+
type MuxAsset = Awaited<ReturnType<Mux["video"]["assets"]["retrieve"]>>;
|
|
44
|
+
/** Single ready track extracted from a Mux asset. */
|
|
45
|
+
type AssetTextTrack = NonNullable<MuxAsset["tracks"]>[number];
|
|
46
|
+
/** Playback policy type for Mux assets. */
|
|
47
|
+
type PlaybackPolicy = "public" | "signed";
|
|
48
|
+
/** Convenience bundle returned by `getPlaybackIdForAsset`. */
|
|
49
|
+
interface PlaybackAsset {
|
|
50
|
+
asset: MuxAsset;
|
|
51
|
+
playbackId: string;
|
|
52
|
+
/** The policy type of the playback ID ('public' or 'signed'). */
|
|
53
|
+
policy: PlaybackPolicy;
|
|
54
|
+
}
|
|
55
|
+
/** Configuration for token-based chunking. */
|
|
56
|
+
interface TokenChunkingConfig {
|
|
57
|
+
type: "token";
|
|
58
|
+
/** Maximum tokens per chunk. */
|
|
59
|
+
maxTokens: number;
|
|
60
|
+
/** Number of overlapping tokens between chunks. */
|
|
61
|
+
overlap?: number;
|
|
62
|
+
}
|
|
63
|
+
/** Configuration for VTT-aware chunking that respects cue boundaries. */
|
|
64
|
+
interface VTTChunkingConfig {
|
|
65
|
+
type: "vtt";
|
|
66
|
+
/** Maximum tokens per chunk. */
|
|
67
|
+
maxTokens: number;
|
|
68
|
+
/** Number of cues to overlap between chunks (default: 2). */
|
|
69
|
+
overlapCues?: number;
|
|
70
|
+
}
|
|
71
|
+
/** Union type for all chunking strategy configurations. */
|
|
72
|
+
type ChunkingStrategy = TokenChunkingConfig | VTTChunkingConfig;
|
|
73
|
+
/** A single chunk of text extracted from a transcript. */
|
|
74
|
+
interface TextChunk {
|
|
75
|
+
/** Unique identifier for this chunk. */
|
|
76
|
+
id: string;
|
|
77
|
+
/** The text content of the chunk. */
|
|
78
|
+
text: string;
|
|
79
|
+
/** Number of tokens in this chunk. */
|
|
80
|
+
tokenCount: number;
|
|
81
|
+
/** Start time in seconds (if available from timestamped transcript). */
|
|
82
|
+
startTime?: number;
|
|
83
|
+
/** End time in seconds (if available from timestamped transcript). */
|
|
84
|
+
endTime?: number;
|
|
85
|
+
}
|
|
86
|
+
/** A chunk with its embedding vector. */
|
|
87
|
+
interface ChunkEmbedding {
|
|
88
|
+
/** Reference to the chunk ID. */
|
|
89
|
+
chunkId: string;
|
|
90
|
+
/** The embedding vector. */
|
|
91
|
+
embedding: number[];
|
|
92
|
+
/** Optional metadata for this chunk. */
|
|
93
|
+
metadata: {
|
|
94
|
+
startTime?: number;
|
|
95
|
+
endTime?: number;
|
|
96
|
+
tokenCount: number;
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
/** Result of generating embeddings for a video asset. */
|
|
100
|
+
interface VideoEmbeddingsResult {
|
|
101
|
+
/** The Mux asset ID. */
|
|
102
|
+
assetId: string;
|
|
103
|
+
/** Individual chunk embeddings. */
|
|
104
|
+
chunks: ChunkEmbedding[];
|
|
105
|
+
/** Averaged embedding across all chunks. */
|
|
106
|
+
averagedEmbedding: number[];
|
|
107
|
+
/** AI provider used. */
|
|
108
|
+
provider: string;
|
|
109
|
+
/** Model used for embedding generation. */
|
|
110
|
+
model: string;
|
|
111
|
+
/** Additional metadata about the generation. */
|
|
112
|
+
metadata: {
|
|
113
|
+
totalChunks: number;
|
|
114
|
+
totalTokens: number;
|
|
115
|
+
chunkingStrategy: string;
|
|
116
|
+
embeddingDimensions: number;
|
|
117
|
+
generatedAt: string;
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Token usage breakdown returned by AI SDK providers.
|
|
122
|
+
* Used for efficiency and cost analysis.
|
|
123
|
+
*/
|
|
124
|
+
interface TokenUsage {
|
|
125
|
+
/** Number of tokens in the input prompt (text + image). */
|
|
126
|
+
inputTokens?: number;
|
|
127
|
+
/** Number of tokens generated in the output. */
|
|
128
|
+
outputTokens?: number;
|
|
129
|
+
/** Total tokens consumed (input + output). */
|
|
130
|
+
totalTokens?: number;
|
|
131
|
+
/** Tokens used for chain-of-thought reasoning (if applicable). */
|
|
132
|
+
reasoningTokens?: number;
|
|
133
|
+
/** Input tokens served from cache (reduces cost). */
|
|
134
|
+
cachedInputTokens?: number;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
export type { AssetTextTrack as A, ChunkingStrategy as C, ImageSubmissionMode as I, MuxAIConfig as M, PlaybackPolicy as P, ToneType as T, VTTChunkingConfig as V, MuxAIOptions as a, MuxAsset as b, PlaybackAsset as c, TokenChunkingConfig as d, TextChunk as e, ChunkEmbedding as f, VideoEmbeddingsResult as g, TokenUsage as h };
|