@mux/ai 0.5.2 → 0.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -86,6 +86,7 @@ S3_SECRET_ACCESS_KEY=your-secret-key
86
86
  | [`getSummaryAndTags`](./docs/WORKFLOWS.md#video-summarization)<br/>[API](./docs/API.md#getsummaryandtagsassetid-options) · [Source](./src/workflows/summarization.ts) | Generate titles, descriptions, and tags for an asset | OpenAI, Anthropic, Google | `gpt-5.1` (OpenAI), `claude-sonnet-4-5` (Anthropic), `gemini-3-flash-preview` (Google) | Video (required), Captions (optional) | None |
87
87
  | [`getModerationScores`](./docs/WORKFLOWS.md#content-moderation)<br/>[API](./docs/API.md#getmoderationscoresassetid-options) · [Source](./src/workflows/moderation.ts) | Detect inappropriate (sexual or violent) content in an asset | OpenAI, Hive | `omni-moderation-latest` (OpenAI) or Hive visual moderation task | Video (required) | None |
88
88
  | [`hasBurnedInCaptions`](./docs/WORKFLOWS.md#burned-in-caption-detection)<br/>[API](./docs/API.md#hasburnedincaptionsassetid-options) · [Source](./src/workflows/burned-in-captions.ts) | Detect burned-in captions (hardcoded subtitles) in an asset | OpenAI, Anthropic, Google | `gpt-5.1` (OpenAI), `claude-sonnet-4-5` (Anthropic), `gemini-3-flash-preview` (Google) | Video (required) | None |
89
+ | [`askQuestions`](./docs/WORKFLOWS.md#ask-questions)<br/>[API](./docs/API.md#askquestionsassetid-questions-options) · [Source](./src/workflows/ask-questions.ts) | Answer yes/no questions about an asset's content | OpenAI, Anthropic, Google | `gpt-5.1` (OpenAI), `claude-sonnet-4-5` (Anthropic), `gemini-3-flash-preview` (Google) | Video (required), Captions (optional) | None |
89
90
  | [`generateChapters`](./docs/WORKFLOWS.md#chapter-generation)<br/>[API](./docs/API.md#generatechaptersassetid-languagecode-options) · [Source](./src/workflows/chapters.ts) | Generate chapter markers for an asset using the transcript | OpenAI, Anthropic, Google | `gpt-5.1` (OpenAI), `claude-sonnet-4-5` (Anthropic), `gemini-3-flash-preview` (Google) | Video or audio-only, Captions/Transcripts (required) | None |
90
91
  | [`generateEmbeddings`](./docs/WORKFLOWS.md#embeddings)<br/>[API](./docs/API.md#generateembeddingsassetid-options) · [Source](./src/workflows/embeddings.ts) | Generate vector embeddings for an asset's transcript chunks | OpenAI, Google | `text-embedding-3-small` (OpenAI), `gemini-embedding-001` (Google) | Video or audio-only, Captions/Transcripts (required) | None |
91
92
  | [`translateCaptions`](./docs/WORKFLOWS.md#caption-translation)<br/>[API](./docs/API.md#translatecaptionsassetid-fromlanguagecode-tolanguagecode-options) · [Source](./src/workflows/translate-captions.ts) | Translate an asset's captions into different languages | OpenAI, Anthropic, Google | `gpt-5.1` (OpenAI), `claude-sonnet-4-5` (Anthropic), `gemini-3-flash-preview` (Google) | Video or audio-only, Captions/Transcripts (required) | AWS S3 (if `uploadToMux=true`) |
@@ -95,6 +96,9 @@ S3_SECRET_ACCESS_KEY=your-secret-key
95
96
 
96
97
  All workflows are compatible with [Workflow DevKit](https://useworkflow.dev). The workflows in this SDK are exported with `"use workflow"` directives and `"use step"` directives in the code.
97
98
 
99
+ Workflow DevKit serializes workflow inputs/outputs for observability. To avoid sending plaintext secrets through `start(...)`, encrypt credentials in the trigger host and decrypt them in workflow steps.
100
+ See the dedicated [Workflow Encryption guide](./docs/WORKFLOW-ENCRYPTION.md) for full setup and patterns.
101
+
98
102
  If you are using Workflow DevKit in your project, then you must call workflow functions like this:
99
103
 
100
104
  ```ts
@@ -108,18 +112,23 @@ const run = await start(getSummaryAndTags, [assetId]);
108
112
  // const result = await run.returnValue
109
113
  ```
110
114
 
111
- ### Multi-tenant credentials with Workflow Dev Kit (interim encryption pattern)
115
+ ### Multi-tenant credentials with Workflow Dev Kit
116
+
117
+ Set a shared workflow secret key (base64-encoded 32-byte value) in your environment:
118
+
119
+ ```bash
120
+ MUX_AI_WORKFLOW_SECRET_KEY=your_base64_32_byte_key
121
+ ```
112
122
 
113
- Workflow Dev Kit serializes workflow inputs and step I/O. Do not pass plaintext secrets through
114
- `start()`. Instead, encrypt credentials in userland and pass ciphertext only.
115
- Set `MUX_AI_WORKFLOW_SECRET_KEY` to a base64-encoded 32-byte key on the workflow execution host.
123
+ Then encrypt credentials before calling `start()`:
116
124
 
117
125
  ```ts
118
126
  import { start } from "workflow/api";
119
- import { encryptForWorkflow, getSummaryAndTags } from "@mux/ai/workflows";
127
+ import { encryptForWorkflow } from "@mux/ai";
128
+ import { getSummaryAndTags } from "@mux/ai/workflows";
120
129
 
121
130
  const workflowKey = process.env.MUX_AI_WORKFLOW_SECRET_KEY!;
122
- const encryptedCredentials = encryptForWorkflow(
131
+ const encryptedCredentials = await encryptForWorkflow(
123
132
  {
124
133
  muxTokenId: "mux-token-id",
125
134
  muxTokenSecret: "mux-token-secret",
@@ -130,33 +139,27 @@ const encryptedCredentials = encryptForWorkflow(
130
139
 
131
140
  const run = await start(getSummaryAndTags, [
132
141
  "your-asset-id",
133
- { provider: "openai", credentials: encryptedCredentials },
142
+ {
143
+ provider: "openai",
144
+ credentials: encryptedCredentials,
145
+ },
134
146
  ]);
135
147
  ```
136
148
 
137
- If you build custom steps, decrypt inside the step using the same workflow key:
138
-
139
- ```ts
140
- import { decryptFromWorkflow } from "@mux/ai/workflows";
141
-
142
- async function resolveCredentials(encrypted: unknown) {
143
- "use step";
144
- return decryptFromWorkflow(
145
- encrypted as any,
146
- process.env.MUX_AI_WORKFLOW_SECRET_KEY!,
147
- );
148
- }
149
- ```
149
+ For Mux tokens specifically, `setWorkflowCredentialsProvider(...)` (or environment variables) is still recommended so raw Mux secrets are never embedded in workflow input payloads.
150
150
 
151
- You can also register a credential provider on the execution host to resolve secrets inside steps:
151
+ You can also register a credential provider on the execution host to resolve secrets inside steps.
152
+ This is useful for dynamic key resolution, e.g. rotating keys or per-tenant secrets:
152
153
 
153
154
  ```ts
154
- import { setWorkflowCredentialsProvider } from "@mux/ai/workflows";
155
+ import {
156
+ setWorkflowCredentialsProvider,
157
+ } from "@mux/ai";
155
158
 
156
159
  setWorkflowCredentialsProvider(async () => ({
157
160
  muxTokenId: "mux-token-id",
158
161
  muxTokenSecret: "mux-token-secret",
159
- openaiApiKey: "openai-api-key",
162
+ openaiApiKey: await getOpenAIKeyForTenant(),
160
163
  }));
161
164
  ```
162
165
 
@@ -466,6 +469,8 @@ S3_SECRET_ACCESS_KEY=your-r2-secret-key
466
469
 
467
470
  - **[Workflows Guide](./docs/WORKFLOWS.md)** - Detailed guide to each pre-built workflow with examples
468
471
  - **[API Reference](./docs/API.md)** - Complete API documentation for all functions, parameters, and return types
472
+ - **[Workflow Encryption](./docs/WORKFLOW-ENCRYPTION.md)** - Encrypting credentials across Workflow DevKit boundaries
473
+ - **[Storage Adapters](./docs/STORAGE-ADAPTERS.md)** - Using custom storage SDKs (AWS, Cloudflare R2, MinIO)
469
474
  - **[Primitives Guide](./docs/PRIMITIVES.md)** - Low-level building blocks for custom workflows
470
475
  - **[Examples](./docs/EXAMPLES.md)** - Running examples from the repository
471
476
 
@@ -2,7 +2,7 @@ import { z } from 'zod';
2
2
  import { createAnthropic } from '@ai-sdk/anthropic';
3
3
  import { createGoogleGenerativeAI } from '@ai-sdk/google';
4
4
  import { createOpenAI } from '@ai-sdk/openai';
5
- import { k as TokenUsage, M as MuxAIOptions, I as ImageSubmissionMode, C as ChunkingStrategy, j as VideoEmbeddingsResult, T as ToneType } from './types-BhVuLeSp.js';
5
+ import { M as MuxAIOptions, I as ImageSubmissionMode, h as TokenUsage, b as ChunkingStrategy, j as VideoEmbeddingsResult, i as ToneType, f as StorageAdapter } from './types-BQVi_wnh.js';
6
6
 
7
7
  interface ImageDownloadOptions {
8
8
  /** Request timeout in milliseconds (default: 10000) */
@@ -17,6 +17,155 @@ interface ImageDownloadOptions {
17
17
  exponentialBackoff?: boolean;
18
18
  }
19
19
 
20
+ type SupportedProvider = "openai" | "anthropic" | "google";
21
+ type SupportedEmbeddingProvider = "openai" | "google";
22
+ type OpenAIModelId = Parameters<ReturnType<typeof createOpenAI>["chat"]>[0];
23
+ type AnthropicModelId = Parameters<ReturnType<typeof createAnthropic>["chat"]>[0];
24
+ type GoogleModelId = Parameters<ReturnType<typeof createGoogleGenerativeAI>["chat"]>[0];
25
+ type OpenAIEmbeddingModelId = Parameters<ReturnType<typeof createOpenAI>["embedding"]>[0];
26
+ type GoogleEmbeddingModelId = Parameters<ReturnType<typeof createGoogleGenerativeAI>["textEmbeddingModel"]>[0];
27
+ interface ModelIdByProvider {
28
+ openai: OpenAIModelId;
29
+ anthropic: AnthropicModelId;
30
+ google: GoogleModelId;
31
+ }
32
+ interface EmbeddingModelIdByProvider {
33
+ openai: OpenAIEmbeddingModelId;
34
+ google: GoogleEmbeddingModelId;
35
+ }
36
+
37
+ /** A single yes/no question to be answered about video content. */
38
+ interface Question {
39
+ /** The question text */
40
+ question: string;
41
+ }
42
+ /** A single answer to a question. */
43
+ interface QuestionAnswer {
44
+ /** The original question */
45
+ question: string;
46
+ /** Answer selected from the allowed options */
47
+ answer: string;
48
+ /** Confidence score between 0 and 1 */
49
+ confidence: number;
50
+ /** Reasoning explaining the answer based on observable evidence */
51
+ reasoning: string;
52
+ }
53
+ /** Configuration options for askQuestions workflow. */
54
+ interface AskQuestionsOptions extends MuxAIOptions {
55
+ /** AI provider to run (defaults to 'openai'). */
56
+ provider?: SupportedProvider;
57
+ /** Provider-specific chat model identifier. */
58
+ model?: ModelIdByProvider[SupportedProvider];
59
+ /** Allowed answers for each question (defaults to ["yes", "no"]). */
60
+ answerOptions?: string[];
61
+ /** Fetch transcript alongside storyboard (defaults to true). */
62
+ includeTranscript?: boolean;
63
+ /** Strip timestamps/markup from transcripts (defaults to true). */
64
+ cleanTranscript?: boolean;
65
+ /** How storyboard should be delivered to the provider (defaults to 'url'). */
66
+ imageSubmissionMode?: ImageSubmissionMode;
67
+ /** Fine-tune storyboard downloads when imageSubmissionMode === 'base64'. */
68
+ imageDownloadOptions?: ImageDownloadOptions;
69
+ /** Storyboard width in pixels (defaults to 640). */
70
+ storyboardWidth?: number;
71
+ }
72
+ /** Structured return payload for askQuestions workflow. */
73
+ interface AskQuestionsResult {
74
+ /** Asset ID passed into the workflow. */
75
+ assetId: string;
76
+ /** Array of answers for each question. */
77
+ answers: QuestionAnswer[];
78
+ /** Storyboard image URL that was analyzed. */
79
+ storyboardUrl: string;
80
+ /** Token usage from the AI provider (for efficiency/cost analysis). */
81
+ usage?: TokenUsage;
82
+ /** Raw transcript text used for analysis (when includeTranscript is true). */
83
+ transcriptText?: string;
84
+ }
85
+ /** Zod schema for a single answer. */
86
+ declare const questionAnswerSchema: z.ZodObject<{
87
+ question: z.ZodString;
88
+ answer: z.ZodString;
89
+ confidence: z.ZodNumber;
90
+ reasoning: z.ZodString;
91
+ }, "strip", z.ZodTypeAny, {
92
+ question: string;
93
+ answer: string;
94
+ confidence: number;
95
+ reasoning: string;
96
+ }, {
97
+ question: string;
98
+ answer: string;
99
+ confidence: number;
100
+ reasoning: string;
101
+ }>;
102
+ type QuestionAnswerType = z.infer<typeof questionAnswerSchema>;
103
+ declare function createAskQuestionsSchema(allowedAnswers: [string, ...string[]]): z.ZodObject<{
104
+ answers: z.ZodArray<z.ZodObject<{
105
+ question: z.ZodString;
106
+ confidence: z.ZodNumber;
107
+ reasoning: z.ZodString;
108
+ } & {
109
+ answer: z.ZodEnum<[string, ...string[]]>;
110
+ }, "strip", z.ZodTypeAny, {
111
+ question: string;
112
+ answer: string;
113
+ confidence: number;
114
+ reasoning: string;
115
+ }, {
116
+ question: string;
117
+ answer: string;
118
+ confidence: number;
119
+ reasoning: string;
120
+ }>, "many">;
121
+ }, "strip", z.ZodTypeAny, {
122
+ answers: {
123
+ question: string;
124
+ answer: string;
125
+ confidence: number;
126
+ reasoning: string;
127
+ }[];
128
+ }, {
129
+ answers: {
130
+ question: string;
131
+ answer: string;
132
+ confidence: number;
133
+ reasoning: string;
134
+ }[];
135
+ }>;
136
+ type AskQuestionsSchema = ReturnType<typeof createAskQuestionsSchema>;
137
+ type AskQuestionsType = z.infer<AskQuestionsSchema>;
138
+ /**
139
+ * Answer questions about a Mux video asset by analyzing storyboard frames and transcript.
140
+ * Defaults to yes/no answers unless `answerOptions` are provided.
141
+ *
142
+ * This workflow takes a list of questions and returns structured answers with confidence
143
+ * scores and reasoning for each question. All questions are processed in a single LLM call for
144
+ * efficiency.
145
+ *
146
+ * @param assetId - The Mux asset ID to analyze
147
+ * @param questions - Array of questions to answer (each must have a 'question' field)
148
+ * @param options - Configuration options for the workflow
149
+ * @returns Structured answers with confidence scores and reasoning
150
+ *
151
+ * @example
152
+ * ```typescript
153
+ * const result = await askQuestions("abc123", [
154
+ * { question: "Does this video contain cooking?" },
155
+ * { question: "Are there people visible in the video?" },
156
+ * ]);
157
+ *
158
+ * console.log(result.answers[0]);
159
+ * // {
160
+ * // question: "Does this video contain cooking?",
161
+ * // answer: "yes",
162
+ * // confidence: 0.95,
163
+ * // reasoning: "A chef prepares ingredients and cooks in a kitchen throughout the video."
164
+ * // }
165
+ * ```
166
+ */
167
+ declare function askQuestions(assetId: string, questions: Question[], options?: AskQuestionsOptions): Promise<AskQuestionsResult>;
168
+
20
169
  /**
21
170
  * A single section of a prompt, rendered as an XML-like tag.
22
171
  */
@@ -39,23 +188,6 @@ type SectionOverride = string | PromptSection | undefined;
39
188
  */
40
189
  type PromptOverrides<TSections extends string> = Partial<Record<TSections, SectionOverride>>;
41
190
 
42
- type SupportedProvider = "openai" | "anthropic" | "google";
43
- type SupportedEmbeddingProvider = "openai" | "google";
44
- type OpenAIModelId = Parameters<ReturnType<typeof createOpenAI>["chat"]>[0];
45
- type AnthropicModelId = Parameters<ReturnType<typeof createAnthropic>["chat"]>[0];
46
- type GoogleModelId = Parameters<ReturnType<typeof createGoogleGenerativeAI>["chat"]>[0];
47
- type OpenAIEmbeddingModelId = Parameters<ReturnType<typeof createOpenAI>["embedding"]>[0];
48
- type GoogleEmbeddingModelId = Parameters<ReturnType<typeof createGoogleGenerativeAI>["textEmbeddingModel"]>[0];
49
- interface ModelIdByProvider {
50
- openai: OpenAIModelId;
51
- anthropic: AnthropicModelId;
52
- google: GoogleModelId;
53
- }
54
- interface EmbeddingModelIdByProvider {
55
- openai: OpenAIEmbeddingModelId;
56
- google: GoogleEmbeddingModelId;
57
- }
58
-
59
191
  /** Structured payload returned from `hasBurnedInCaptions`. */
60
192
  interface BurnedInCaptionsResult {
61
193
  assetId: string;
@@ -108,12 +240,12 @@ declare const burnedInCaptionsSchema: z.ZodObject<{
108
240
  confidence: z.ZodNumber;
109
241
  detectedLanguage: z.ZodNullable<z.ZodString>;
110
242
  }, "strip", z.ZodTypeAny, {
111
- hasBurnedInCaptions: boolean;
112
243
  confidence: number;
244
+ hasBurnedInCaptions: boolean;
113
245
  detectedLanguage: string | null;
114
246
  }, {
115
- hasBurnedInCaptions: boolean;
116
247
  confidence: number;
248
+ hasBurnedInCaptions: boolean;
117
249
  detectedLanguage: string | null;
118
250
  }>;
119
251
  /** Inferred shape returned from the burned-in captions schema. */
@@ -237,6 +369,7 @@ interface ThumbnailModerationScore {
237
369
  sexual: number;
238
370
  violence: number;
239
371
  error: boolean;
372
+ errorMessage?: string;
240
373
  }
241
374
  /** Aggregated moderation payload returned from `getModerationScores`. */
242
375
  interface ModerationResult {
@@ -246,6 +379,8 @@ interface ModerationResult {
246
379
  /** Convenience flag so callers can understand why `thumbnailScores` may contain a transcript entry. */
247
380
  isAudioOnly: boolean;
248
381
  thumbnailScores: ThumbnailModerationScore[];
382
+ /** Workflow usage metadata (asset duration, thumbnails, etc.). */
383
+ usage?: TokenUsage;
249
384
  maxScores: {
250
385
  sexual: number;
251
386
  violence: number;
@@ -318,7 +453,7 @@ declare const summarySchema: z.ZodObject<{
318
453
  keywords: z.ZodArray<z.ZodString, "many">;
319
454
  title: z.ZodString;
320
455
  description: z.ZodString;
321
- }, "strip", z.ZodTypeAny, {
456
+ }, "strict", z.ZodTypeAny, {
322
457
  title: string;
323
458
  description: string;
324
459
  keywords: string[];
@@ -497,6 +632,8 @@ interface AudioTranslationResult {
497
632
  dubbingId: string;
498
633
  uploadedTrackId?: string;
499
634
  presignedUrl?: string;
635
+ /** Workflow usage metadata (asset duration, thumbnails, etc.). */
636
+ usage?: TokenUsage;
500
637
  }
501
638
  /** Configuration accepted by `translateAudio`. */
502
639
  interface AudioTranslationOptions extends MuxAIOptions {
@@ -515,8 +652,8 @@ interface AudioTranslationOptions extends MuxAIOptions {
515
652
  * bucket and attached to the Mux asset.
516
653
  */
517
654
  uploadToMux?: boolean;
518
- /** Override for env.ELEVENLABS_API_KEY. */
519
- elevenLabsApiKey?: string;
655
+ /** Optional storage adapter override for upload + presign operations. */
656
+ storageAdapter?: StorageAdapter;
520
657
  }
521
658
  declare function translateAudio(assetId: string, toLanguageCode: string, options?: AudioTranslationOptions): Promise<AudioTranslationResult>;
522
659
 
@@ -561,6 +698,8 @@ interface TranslationOptions<P extends SupportedProvider = SupportedProvider> ex
561
698
  * bucket and attached to the Mux asset.
562
699
  */
563
700
  uploadToMux?: boolean;
701
+ /** Optional storage adapter override for upload + presign operations. */
702
+ storageAdapter?: StorageAdapter;
564
703
  }
565
704
  /** Schema used when requesting caption translation from a language model. */
566
705
  declare const translationSchema: z.ZodObject<{
@@ -574,6 +713,9 @@ declare const translationSchema: z.ZodObject<{
574
713
  type TranslationPayload = z.infer<typeof translationSchema>;
575
714
  declare function translateCaptions<P extends SupportedProvider = SupportedProvider>(assetId: string, fromLanguageCode: string, toLanguageCode: string, options: TranslationOptions<P>): Promise<TranslationResult>;
576
715
 
716
+ type index_AskQuestionsOptions = AskQuestionsOptions;
717
+ type index_AskQuestionsResult = AskQuestionsResult;
718
+ type index_AskQuestionsType = AskQuestionsType;
577
719
  type index_AudioTranslationOptions = AudioTranslationOptions;
578
720
  type index_AudioTranslationResult = AudioTranslationResult;
579
721
  type index_BurnedInCaptionsAnalysis = BurnedInCaptionsAnalysis;
@@ -595,6 +737,9 @@ type index_HiveModerationSource = HiveModerationSource;
595
737
  type index_ModerationOptions = ModerationOptions;
596
738
  type index_ModerationProvider = ModerationProvider;
597
739
  type index_ModerationResult = ModerationResult;
740
+ type index_Question = Question;
741
+ type index_QuestionAnswer = QuestionAnswer;
742
+ type index_QuestionAnswerType = QuestionAnswerType;
598
743
  declare const index_SUMMARY_KEYWORD_LIMIT: typeof SUMMARY_KEYWORD_LIMIT;
599
744
  type index_SummarizationOptions = SummarizationOptions;
600
745
  type index_SummarizationPromptOverrides = SummarizationPromptOverrides;
@@ -605,6 +750,7 @@ type index_ThumbnailModerationScore = ThumbnailModerationScore;
605
750
  type index_TranslationOptions<P extends SupportedProvider = SupportedProvider> = TranslationOptions<P>;
606
751
  type index_TranslationPayload = TranslationPayload;
607
752
  type index_TranslationResult = TranslationResult;
753
+ declare const index_askQuestions: typeof askQuestions;
608
754
  declare const index_burnedInCaptionsSchema: typeof burnedInCaptionsSchema;
609
755
  declare const index_chapterSchema: typeof chapterSchema;
610
756
  declare const index_chaptersSchema: typeof chaptersSchema;
@@ -614,12 +760,13 @@ declare const index_generateVideoEmbeddings: typeof generateVideoEmbeddings;
614
760
  declare const index_getModerationScores: typeof getModerationScores;
615
761
  declare const index_getSummaryAndTags: typeof getSummaryAndTags;
616
762
  declare const index_hasBurnedInCaptions: typeof hasBurnedInCaptions;
763
+ declare const index_questionAnswerSchema: typeof questionAnswerSchema;
617
764
  declare const index_summarySchema: typeof summarySchema;
618
765
  declare const index_translateAudio: typeof translateAudio;
619
766
  declare const index_translateCaptions: typeof translateCaptions;
620
767
  declare const index_translationSchema: typeof translationSchema;
621
768
  declare namespace index {
622
- export { type index_AudioTranslationOptions as AudioTranslationOptions, type index_AudioTranslationResult as AudioTranslationResult, type index_BurnedInCaptionsAnalysis as BurnedInCaptionsAnalysis, type index_BurnedInCaptionsOptions as BurnedInCaptionsOptions, type index_BurnedInCaptionsPromptOverrides as BurnedInCaptionsPromptOverrides, type index_BurnedInCaptionsPromptSections as BurnedInCaptionsPromptSections, type index_BurnedInCaptionsResult as BurnedInCaptionsResult, type index_Chapter as Chapter, type index_ChapterSystemPromptSections as ChapterSystemPromptSections, type index_ChaptersOptions as ChaptersOptions, type index_ChaptersPromptOverrides as ChaptersPromptOverrides, type index_ChaptersPromptSections as ChaptersPromptSections, type index_ChaptersResult as ChaptersResult, type index_ChaptersType as ChaptersType, type index_EmbeddingsOptions as EmbeddingsOptions, type index_EmbeddingsResult as EmbeddingsResult, type index_HiveModerationOutput as HiveModerationOutput, type index_HiveModerationSource as HiveModerationSource, type index_ModerationOptions as ModerationOptions, type index_ModerationProvider as ModerationProvider, type index_ModerationResult as ModerationResult, index_SUMMARY_KEYWORD_LIMIT as SUMMARY_KEYWORD_LIMIT, type index_SummarizationOptions as SummarizationOptions, type index_SummarizationPromptOverrides as SummarizationPromptOverrides, type index_SummarizationPromptSections as SummarizationPromptSections, type index_SummaryAndTagsResult as SummaryAndTagsResult, type index_SummaryType as SummaryType, type index_ThumbnailModerationScore as ThumbnailModerationScore, type index_TranslationOptions as TranslationOptions, type index_TranslationPayload as TranslationPayload, type index_TranslationResult as TranslationResult, index_burnedInCaptionsSchema as burnedInCaptionsSchema, index_chapterSchema as chapterSchema, index_chaptersSchema as chaptersSchema, index_generateChapters as generateChapters, index_generateEmbeddings as generateEmbeddings, index_generateVideoEmbeddings as generateVideoEmbeddings, index_getModerationScores as getModerationScores, index_getSummaryAndTags as getSummaryAndTags, index_hasBurnedInCaptions as hasBurnedInCaptions, index_summarySchema as summarySchema, index_translateAudio as translateAudio, index_translateCaptions as translateCaptions, index_translationSchema as translationSchema };
769
+ export { type index_AskQuestionsOptions as AskQuestionsOptions, type index_AskQuestionsResult as AskQuestionsResult, type index_AskQuestionsType as AskQuestionsType, type index_AudioTranslationOptions as AudioTranslationOptions, type index_AudioTranslationResult as AudioTranslationResult, type index_BurnedInCaptionsAnalysis as BurnedInCaptionsAnalysis, type index_BurnedInCaptionsOptions as BurnedInCaptionsOptions, type index_BurnedInCaptionsPromptOverrides as BurnedInCaptionsPromptOverrides, type index_BurnedInCaptionsPromptSections as BurnedInCaptionsPromptSections, type index_BurnedInCaptionsResult as BurnedInCaptionsResult, type index_Chapter as Chapter, type index_ChapterSystemPromptSections as ChapterSystemPromptSections, type index_ChaptersOptions as ChaptersOptions, type index_ChaptersPromptOverrides as ChaptersPromptOverrides, type index_ChaptersPromptSections as ChaptersPromptSections, type index_ChaptersResult as ChaptersResult, type index_ChaptersType as ChaptersType, type index_EmbeddingsOptions as EmbeddingsOptions, type index_EmbeddingsResult as EmbeddingsResult, type index_HiveModerationOutput as HiveModerationOutput, type index_HiveModerationSource as HiveModerationSource, type index_ModerationOptions as ModerationOptions, type index_ModerationProvider as ModerationProvider, type index_ModerationResult as ModerationResult, type index_Question as Question, type index_QuestionAnswer as QuestionAnswer, type index_QuestionAnswerType as QuestionAnswerType, index_SUMMARY_KEYWORD_LIMIT as SUMMARY_KEYWORD_LIMIT, type index_SummarizationOptions as SummarizationOptions, type index_SummarizationPromptOverrides as SummarizationPromptOverrides, type index_SummarizationPromptSections as SummarizationPromptSections, type index_SummaryAndTagsResult as SummaryAndTagsResult, type index_SummaryType as SummaryType, type index_ThumbnailModerationScore as ThumbnailModerationScore, type index_TranslationOptions as TranslationOptions, type index_TranslationPayload as TranslationPayload, type index_TranslationResult as TranslationResult, index_askQuestions as askQuestions, index_burnedInCaptionsSchema as burnedInCaptionsSchema, index_chapterSchema as chapterSchema, index_chaptersSchema as chaptersSchema, index_generateChapters as generateChapters, index_generateEmbeddings as generateEmbeddings, index_generateVideoEmbeddings as generateVideoEmbeddings, index_getModerationScores as getModerationScores, index_getSummaryAndTags as getSummaryAndTags, index_hasBurnedInCaptions as hasBurnedInCaptions, index_questionAnswerSchema as questionAnswerSchema, index_summarySchema as summarySchema, index_translateAudio as translateAudio, index_translateCaptions as translateCaptions, index_translationSchema as translationSchema };
623
770
  }
624
771
 
625
- export { type SummarizationPromptSections as A, type BurnedInCaptionsResult as B, type Chapter as C, type SummarizationPromptOverrides as D, type EmbeddingsOptions as E, type SummarizationOptions as F, getSummaryAndTags as G, type HiveModerationSource as H, type AudioTranslationResult as I, type AudioTranslationOptions as J, translateAudio as K, type TranslationResult as L, type ModerationResult as M, type TranslationOptions as N, translationSchema as O, type TranslationPayload as P, translateCaptions as Q, SUMMARY_KEYWORD_LIMIT as S, type ThumbnailModerationScore as T, type BurnedInCaptionsPromptSections as a, type BurnedInCaptionsPromptOverrides as b, type BurnedInCaptionsOptions as c, burnedInCaptionsSchema as d, type BurnedInCaptionsAnalysis as e, chapterSchema as f, chaptersSchema as g, hasBurnedInCaptions as h, index as i, type ChaptersType as j, type ChaptersResult as k, type ChaptersPromptSections as l, type ChaptersPromptOverrides as m, type ChaptersOptions as n, type ChapterSystemPromptSections as o, generateChapters as p, type EmbeddingsResult as q, generateEmbeddings as r, generateVideoEmbeddings as s, type ModerationProvider as t, type HiveModerationOutput as u, type ModerationOptions as v, getModerationScores as w, summarySchema as x, type SummaryType as y, type SummaryAndTagsResult as z };
772
+ export { type AskQuestionsOptions as A, type BurnedInCaptionsAnalysis as B, type Chapter as C, type TranslationOptions as D, type EmbeddingsOptions as E, type TranslationPayload as F, type TranslationResult as G, type HiveModerationOutput as H, askQuestions as I, burnedInCaptionsSchema as J, chapterSchema as K, chaptersSchema as L, type ModerationOptions as M, generateChapters as N, generateEmbeddings as O, generateVideoEmbeddings as P, type Question as Q, getModerationScores as R, SUMMARY_KEYWORD_LIMIT as S, type ThumbnailModerationScore as T, getSummaryAndTags as U, hasBurnedInCaptions as V, questionAnswerSchema as W, summarySchema as X, translateAudio as Y, translateCaptions as Z, translationSchema as _, type AskQuestionsResult as a, type AskQuestionsType as b, type AudioTranslationOptions as c, type AudioTranslationResult as d, type BurnedInCaptionsOptions as e, type BurnedInCaptionsPromptOverrides as f, type BurnedInCaptionsPromptSections as g, type BurnedInCaptionsResult as h, index as i, type ChapterSystemPromptSections as j, type ChaptersOptions as k, type ChaptersPromptOverrides as l, type ChaptersPromptSections as m, type ChaptersResult as n, type ChaptersType as o, type EmbeddingsResult as p, type HiveModerationSource as q, type ModerationProvider as r, type ModerationResult as s, type QuestionAnswer as t, type QuestionAnswerType as u, type SummarizationOptions as v, type SummarizationPromptOverrides as w, type SummarizationPromptSections as x, type SummaryAndTagsResult as y, type SummaryType as z };
@@ -1,4 +1,103 @@
1
- import { b as WorkflowCredentialsInput, A as AssetTextTrack, c as MuxAsset, h as TextChunk, C as ChunkingStrategy } from './types-BhVuLeSp.js';
1
+ import { k as WorkflowCredentialsInput, A as AssetTextTrack, d as MuxAsset, T as TextChunk, b as ChunkingStrategy } from './types-BQVi_wnh.js';
2
+
3
+ interface HeatmapOptions {
4
+ /** Time window for results, e.g., ['7:days'] (default: ['7:days']) */
5
+ timeframe?: string;
6
+ /** Optional workflow credentials */
7
+ credentials?: WorkflowCredentialsInput;
8
+ }
9
+ interface HeatmapResponse {
10
+ assetId?: string;
11
+ videoId?: string;
12
+ playbackId?: string;
13
+ /** Array of 100 values representing engagement for each 1/100th of the video */
14
+ heatmap: number[];
15
+ timeframe: [number, number];
16
+ }
17
+ /**
18
+ * Fetches engagement heatmap for a Mux asset.
19
+ * Returns a length 100 array where each value represents how many times
20
+ * that 1/100th of the video was watched.
21
+ *
22
+ * @param assetId - The Mux asset ID
23
+ * @param options - Heatmap query options
24
+ * @returns Heatmap data with 100 engagement values
25
+ */
26
+ declare function getHeatmapForAsset(assetId: string, options?: HeatmapOptions): Promise<HeatmapResponse>;
27
+ /**
28
+ * Fetches engagement heatmap for a Mux video ID.
29
+ * Returns a length 100 array where each value represents how many times
30
+ * that 1/100th of the video was watched.
31
+ *
32
+ * @param videoId - The Mux video ID
33
+ * @param options - Heatmap query options
34
+ * @returns Heatmap data with 100 engagement values
35
+ */
36
+ declare function getHeatmapForVideo(videoId: string, options?: HeatmapOptions): Promise<HeatmapResponse>;
37
+ /**
38
+ * Fetches engagement heatmap for a Mux playback ID.
39
+ * Returns a length 100 array where each value represents how many times
40
+ * that 1/100th of the video was watched.
41
+ *
42
+ * @param playbackId - The Mux playback ID
43
+ * @param options - Heatmap query options
44
+ * @returns Heatmap data with 100 engagement values
45
+ */
46
+ declare function getHeatmapForPlaybackId(playbackId: string, options?: HeatmapOptions): Promise<HeatmapResponse>;
47
+
48
+ interface Hotspot {
49
+ /** Inclusive start time in milliseconds */
50
+ startMs: number;
51
+ /** Exclusive end time in milliseconds */
52
+ endMs: number;
53
+ /** Hotspot score using distribution-based normalization (0-1) */
54
+ score: number;
55
+ }
56
+ interface HotspotOptions {
57
+ /** Maximum number of hotspots to return (default: 5) */
58
+ limit?: number;
59
+ /** Sort order: 'asc' or 'desc' (default: 'desc') */
60
+ orderDirection?: "asc" | "desc";
61
+ /** Order by field (default: 'score') */
62
+ orderBy?: "score";
63
+ /** Time window for results, e.g., ['7:days'] (default: ['7:days']) */
64
+ timeframe?: string;
65
+ /** Optional workflow credentials */
66
+ credentials?: WorkflowCredentialsInput;
67
+ }
68
+ interface HotspotResponse {
69
+ assetId?: string;
70
+ videoId?: string;
71
+ playbackId?: string;
72
+ hotspots: Hotspot[];
73
+ }
74
+ /**
75
+ * Fetches engagement hotspots for a Mux asset.
76
+ * Returns the top N "hot" time ranges based on engagement data.
77
+ *
78
+ * @param assetId - The Mux asset ID
79
+ * @param options - Hotspot query options
80
+ * @returns Array of hotspots with time ranges and scores
81
+ */
82
+ declare function getHotspotsForAsset(assetId: string, options?: HotspotOptions): Promise<Hotspot[]>;
83
+ /**
84
+ * Fetches engagement hotspots for a Mux video ID.
85
+ * Returns the top N "hot" time ranges based on engagement data.
86
+ *
87
+ * @param videoId - The Mux video ID
88
+ * @param options - Hotspot query options
89
+ * @returns Array of hotspots with time ranges and scores
90
+ */
91
+ declare function getHotspotsForVideo(videoId: string, options?: HotspotOptions): Promise<Hotspot[]>;
92
+ /**
93
+ * Fetches engagement hotspots for a Mux playback ID.
94
+ * Returns the top N "hot" time ranges based on engagement data.
95
+ *
96
+ * @param playbackId - The Mux playback ID
97
+ * @param options - Hotspot query options
98
+ * @returns Array of hotspots with time ranges and scores
99
+ */
100
+ declare function getHotspotsForPlaybackId(playbackId: string, options?: HotspotOptions): Promise<Hotspot[]>;
2
101
 
3
102
  declare const DEFAULT_STORYBOARD_WIDTH = 640;
4
103
  /**
@@ -127,6 +226,11 @@ interface ThumbnailOptions {
127
226
  declare function getThumbnailUrls(playbackId: string, duration: number, options?: ThumbnailOptions): Promise<string[]>;
128
227
 
129
228
  declare const index_DEFAULT_STORYBOARD_WIDTH: typeof DEFAULT_STORYBOARD_WIDTH;
229
+ type index_HeatmapOptions = HeatmapOptions;
230
+ type index_HeatmapResponse = HeatmapResponse;
231
+ type index_Hotspot = Hotspot;
232
+ type index_HotspotOptions = HotspotOptions;
233
+ type index_HotspotResponse = HotspotResponse;
130
234
  type index_ThumbnailOptions = ThumbnailOptions;
131
235
  type index_TranscriptFetchOptions = TranscriptFetchOptions;
132
236
  type index_TranscriptResult = TranscriptResult;
@@ -140,6 +244,12 @@ declare const index_extractTextFromVTT: typeof extractTextFromVTT;
140
244
  declare const index_extractTimestampedTranscript: typeof extractTimestampedTranscript;
141
245
  declare const index_fetchTranscriptForAsset: typeof fetchTranscriptForAsset;
142
246
  declare const index_findCaptionTrack: typeof findCaptionTrack;
247
+ declare const index_getHeatmapForAsset: typeof getHeatmapForAsset;
248
+ declare const index_getHeatmapForPlaybackId: typeof getHeatmapForPlaybackId;
249
+ declare const index_getHeatmapForVideo: typeof getHeatmapForVideo;
250
+ declare const index_getHotspotsForAsset: typeof getHotspotsForAsset;
251
+ declare const index_getHotspotsForPlaybackId: typeof getHotspotsForPlaybackId;
252
+ declare const index_getHotspotsForVideo: typeof getHotspotsForVideo;
143
253
  declare const index_getReadyTextTracks: typeof getReadyTextTracks;
144
254
  declare const index_getStoryboardUrl: typeof getStoryboardUrl;
145
255
  declare const index_getThumbnailUrls: typeof getThumbnailUrls;
@@ -147,7 +257,7 @@ declare const index_parseVTTCues: typeof parseVTTCues;
147
257
  declare const index_secondsToTimestamp: typeof secondsToTimestamp;
148
258
  declare const index_vttTimestampToSeconds: typeof vttTimestampToSeconds;
149
259
  declare namespace index {
150
- export { index_DEFAULT_STORYBOARD_WIDTH as DEFAULT_STORYBOARD_WIDTH, type index_ThumbnailOptions as ThumbnailOptions, type index_TranscriptFetchOptions as TranscriptFetchOptions, type index_TranscriptResult as TranscriptResult, type index_VTTCue as VTTCue, index_buildTranscriptUrl as buildTranscriptUrl, index_chunkByTokens as chunkByTokens, index_chunkText as chunkText, index_chunkVTTCues as chunkVTTCues, index_estimateTokenCount as estimateTokenCount, index_extractTextFromVTT as extractTextFromVTT, index_extractTimestampedTranscript as extractTimestampedTranscript, index_fetchTranscriptForAsset as fetchTranscriptForAsset, index_findCaptionTrack as findCaptionTrack, index_getReadyTextTracks as getReadyTextTracks, index_getStoryboardUrl as getStoryboardUrl, index_getThumbnailUrls as getThumbnailUrls, index_parseVTTCues as parseVTTCues, index_secondsToTimestamp as secondsToTimestamp, index_vttTimestampToSeconds as vttTimestampToSeconds };
260
+ export { index_DEFAULT_STORYBOARD_WIDTH as DEFAULT_STORYBOARD_WIDTH, type index_HeatmapOptions as HeatmapOptions, type index_HeatmapResponse as HeatmapResponse, type index_Hotspot as Hotspot, type index_HotspotOptions as HotspotOptions, type index_HotspotResponse as HotspotResponse, type index_ThumbnailOptions as ThumbnailOptions, type index_TranscriptFetchOptions as TranscriptFetchOptions, type index_TranscriptResult as TranscriptResult, type index_VTTCue as VTTCue, index_buildTranscriptUrl as buildTranscriptUrl, index_chunkByTokens as chunkByTokens, index_chunkText as chunkText, index_chunkVTTCues as chunkVTTCues, index_estimateTokenCount as estimateTokenCount, index_extractTextFromVTT as extractTextFromVTT, index_extractTimestampedTranscript as extractTimestampedTranscript, index_fetchTranscriptForAsset as fetchTranscriptForAsset, index_findCaptionTrack as findCaptionTrack, index_getHeatmapForAsset as getHeatmapForAsset, index_getHeatmapForPlaybackId as getHeatmapForPlaybackId, index_getHeatmapForVideo as getHeatmapForVideo, index_getHotspotsForAsset as getHotspotsForAsset, index_getHotspotsForPlaybackId as getHotspotsForPlaybackId, index_getHotspotsForVideo as getHotspotsForVideo, index_getReadyTextTracks as getReadyTextTracks, index_getStoryboardUrl as getStoryboardUrl, index_getThumbnailUrls as getThumbnailUrls, index_parseVTTCues as parseVTTCues, index_secondsToTimestamp as secondsToTimestamp, index_vttTimestampToSeconds as vttTimestampToSeconds };
151
261
  }
152
262
 
153
- export { DEFAULT_STORYBOARD_WIDTH as D, type ThumbnailOptions as T, type VTTCue as V, chunkVTTCues as a, chunkText as b, chunkByTokens as c, getThumbnailUrls as d, estimateTokenCount as e, type TranscriptFetchOptions as f, getStoryboardUrl as g, type TranscriptResult as h, index as i, getReadyTextTracks as j, findCaptionTrack as k, extractTextFromVTT as l, extractTimestampedTranscript as m, buildTranscriptUrl as n, fetchTranscriptForAsset as o, parseVTTCues as p, secondsToTimestamp as s, vttTimestampToSeconds as v };
263
+ export { secondsToTimestamp as A, vttTimestampToSeconds as B, DEFAULT_STORYBOARD_WIDTH as D, type HeatmapOptions as H, type ThumbnailOptions as T, type VTTCue as V, type HeatmapResponse as a, type Hotspot as b, type HotspotOptions as c, type HotspotResponse as d, type TranscriptFetchOptions as e, type TranscriptResult as f, buildTranscriptUrl as g, chunkByTokens as h, index as i, chunkText as j, chunkVTTCues as k, estimateTokenCount as l, extractTextFromVTT as m, extractTimestampedTranscript as n, fetchTranscriptForAsset as o, findCaptionTrack as p, getHeatmapForAsset as q, getHeatmapForPlaybackId as r, getHeatmapForVideo as s, getHotspotsForAsset as t, getHotspotsForPlaybackId as u, getHotspotsForVideo as v, getReadyTextTracks as w, getStoryboardUrl as x, getThumbnailUrls as y, parseVTTCues as z };
package/dist/index.d.ts CHANGED
@@ -1,14 +1,15 @@
1
- import { W as WorkflowCredentials } from './types-BhVuLeSp.js';
2
- export { A as AssetTextTrack, i as ChunkEmbedding, C as ChunkingStrategy, E as Encrypted, a as EncryptedPayload, I as ImageSubmissionMode, M as MuxAIOptions, c as MuxAsset, f as PlaybackAsset, P as PlaybackPolicy, h as TextChunk, g as TokenChunkingConfig, k as TokenUsage, T as ToneType, V as VTTChunkingConfig, j as VideoEmbeddingsResult, b as WorkflowCredentialsInput, d as decryptFromWorkflow, e as encryptForWorkflow } from './types-BhVuLeSp.js';
3
- export { i as primitives } from './index-2oUwgWsE.js';
4
- export { i as workflows } from './index-DgCfxP4T.js';
1
+ import { W as WorkflowCredentials, S as StoragePutObjectInput, a as StoragePresignGetObjectInput } from './types-BQVi_wnh.js';
2
+ export { A as AssetTextTrack, C as ChunkEmbedding, b as ChunkingStrategy, E as Encrypted, c as EncryptedPayload, I as ImageSubmissionMode, M as MuxAIOptions, d as MuxAsset, P as PlaybackAsset, e as PlaybackPolicy, f as StorageAdapter, T as TextChunk, g as TokenChunkingConfig, h as TokenUsage, i as ToneType, U as UsageMetadata, V as VTTChunkingConfig, j as VideoEmbeddingsResult, k as WorkflowCredentialsInput, l as WorkflowMuxClient, m as decryptFromWorkflow, n as encryptForWorkflow } from './types-BQVi_wnh.js';
3
+ import { WORKFLOW_SERIALIZE, WORKFLOW_DESERIALIZE } from '@workflow/serde';
4
+ export { i as primitives } from './index-DZlygsvb.js';
5
+ export { i as workflows } from './index-BMqnP1RV.js';
5
6
  import '@mux/mux-node';
6
7
  import 'zod';
7
8
  import '@ai-sdk/anthropic';
8
9
  import '@ai-sdk/google';
9
10
  import '@ai-sdk/openai';
10
11
 
11
- var version = "0.5.2";
12
+ var version = "0.7.2";
12
13
 
13
14
  /**
14
15
  * A function that returns workflow credentials, either synchronously or asynchronously.
@@ -21,4 +22,27 @@ type WorkflowCredentialsProvider = () => Promise<WorkflowCredentials | undefined
21
22
  */
22
23
  declare function setWorkflowCredentialsProvider(provider?: WorkflowCredentialsProvider): void;
23
24
 
24
- export { WorkflowCredentials, type WorkflowCredentialsProvider, setWorkflowCredentialsProvider, version };
25
+ interface WorkflowStorageClientOptions {
26
+ accessKeyId?: string;
27
+ secretAccessKey?: string;
28
+ }
29
+ /**
30
+ * Serializable storage client wrapper for workflow boundaries.
31
+ *
32
+ * By default, this uses the internal SigV4 implementation to keep object
33
+ * operations compatible across edge/ESM and Node runtimes.
34
+ */
35
+ declare class WorkflowStorageClient {
36
+ static classId: string;
37
+ private readonly accessKeyId?;
38
+ private readonly secretAccessKey?;
39
+ constructor(options?: WorkflowStorageClientOptions);
40
+ private resolveCredentials;
41
+ putObject(input: StoragePutObjectInput): Promise<void>;
42
+ createPresignedGetUrl(input: StoragePresignGetObjectInput): Promise<string>;
43
+ static [WORKFLOW_SERIALIZE](instance: WorkflowStorageClient): WorkflowStorageClientOptions;
44
+ static [WORKFLOW_DESERIALIZE](this: typeof WorkflowStorageClient, value: WorkflowStorageClientOptions): WorkflowStorageClient;
45
+ }
46
+ declare function createWorkflowStorageClient(options?: WorkflowStorageClientOptions): WorkflowStorageClient;
47
+
48
+ export { StoragePresignGetObjectInput, StoragePutObjectInput, WorkflowCredentials, type WorkflowCredentialsProvider, WorkflowStorageClient, createWorkflowStorageClient, setWorkflowCredentialsProvider, version };