@mulmocast/slide 0.1.7 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. package/.claude/skills/extend/SKILL.md +120 -0
  2. package/.claude/skills/extend/references/extended-script-schema.md +153 -0
  3. package/.claude/skills/narrate/SKILL.md +145 -0
  4. package/.claude/skills/narrate/references/extended-script-schema.md +153 -0
  5. package/README.md +154 -0
  6. package/lib/actions/bundle.js +14 -20
  7. package/lib/actions/bundle.js.map +1 -1
  8. package/lib/actions/common.d.ts +1 -1
  9. package/lib/actions/common.d.ts.map +1 -1
  10. package/lib/actions/common.js +26 -66
  11. package/lib/actions/common.js.map +1 -1
  12. package/lib/actions/extend-init.d.ts +2 -0
  13. package/lib/actions/extend-init.d.ts.map +1 -0
  14. package/lib/actions/extend-init.js +49 -0
  15. package/lib/actions/extend-init.js.map +1 -0
  16. package/lib/actions/extend-validate.d.ts +2 -0
  17. package/lib/actions/extend-validate.d.ts.map +1 -0
  18. package/lib/actions/extend-validate.js +56 -0
  19. package/lib/actions/extend-validate.js.map +1 -0
  20. package/lib/actions/movie.js +14 -20
  21. package/lib/actions/movie.js.map +1 -1
  22. package/lib/actions/preview.d.ts.map +1 -1
  23. package/lib/actions/preview.js +19 -55
  24. package/lib/actions/preview.js.map +1 -1
  25. package/lib/actions/upload.js +5 -44
  26. package/lib/actions/upload.js.map +1 -1
  27. package/lib/cli.js +60 -73
  28. package/lib/cli.js.map +1 -1
  29. package/lib/convert/markdown-plugins/directive.d.ts +1 -1
  30. package/lib/convert/markdown-plugins/directive.d.ts.map +1 -1
  31. package/lib/convert/markdown-plugins/directive.js +2 -5
  32. package/lib/convert/markdown-plugins/directive.js.map +1 -1
  33. package/lib/convert/markdown-plugins/index.d.ts +2 -2
  34. package/lib/convert/markdown-plugins/index.d.ts.map +1 -1
  35. package/lib/convert/markdown-plugins/index.js +23 -30
  36. package/lib/convert/markdown-plugins/index.js.map +1 -1
  37. package/lib/convert/markdown-plugins/layout.d.ts +91 -0
  38. package/lib/convert/markdown-plugins/layout.d.ts.map +1 -0
  39. package/lib/convert/markdown-plugins/layout.js +251 -0
  40. package/lib/convert/markdown-plugins/layout.js.map +1 -0
  41. package/lib/convert/markdown-plugins/mermaid.d.ts +1 -1
  42. package/lib/convert/markdown-plugins/mermaid.d.ts.map +1 -1
  43. package/lib/convert/markdown-plugins/mermaid.js +2 -5
  44. package/lib/convert/markdown-plugins/mermaid.js.map +1 -1
  45. package/lib/convert/markdown-plugins/types.d.ts +2 -0
  46. package/lib/convert/markdown-plugins/types.d.ts.map +1 -1
  47. package/lib/convert/markdown-plugins/types.js +1 -2
  48. package/lib/convert/markdown-plugins/types.js.map +1 -1
  49. package/lib/convert/markdown-transform.d.ts +42 -0
  50. package/lib/convert/markdown-transform.d.ts.map +1 -0
  51. package/lib/convert/markdown-transform.js +58 -0
  52. package/lib/convert/markdown-transform.js.map +1 -0
  53. package/lib/convert/markdown-utils-common.d.ts +21 -0
  54. package/lib/convert/markdown-utils-common.d.ts.map +1 -0
  55. package/lib/convert/markdown-utils-common.js +51 -0
  56. package/lib/convert/markdown-utils-common.js.map +1 -0
  57. package/lib/convert/markdown-utils.d.ts +4 -17
  58. package/lib/convert/markdown-utils.d.ts.map +1 -1
  59. package/lib/convert/markdown-utils.js +10 -91
  60. package/lib/convert/markdown-utils.js.map +1 -1
  61. package/lib/convert/markdown.d.ts +1 -1
  62. package/lib/convert/markdown.d.ts.map +1 -1
  63. package/lib/convert/markdown.js +36 -88
  64. package/lib/convert/markdown.js.map +1 -1
  65. package/lib/convert/marp.d.ts +4 -11
  66. package/lib/convert/marp.d.ts.map +1 -1
  67. package/lib/convert/marp.js +25 -119
  68. package/lib/convert/marp.js.map +1 -1
  69. package/lib/convert/movie.d.ts +1 -1
  70. package/lib/convert/movie.d.ts.map +1 -1
  71. package/lib/convert/movie.js +26 -64
  72. package/lib/convert/movie.js.map +1 -1
  73. package/lib/convert/movie_bundle.js +5 -44
  74. package/lib/convert/movie_bundle.js.map +1 -1
  75. package/lib/convert/pdf.d.ts +2 -1
  76. package/lib/convert/pdf.d.ts.map +1 -1
  77. package/lib/convert/pdf.js +27 -56
  78. package/lib/convert/pdf.js.map +1 -1
  79. package/lib/convert/pptx.d.ts +1 -1
  80. package/lib/convert/pptx.d.ts.map +1 -1
  81. package/lib/convert/pptx.js +31 -68
  82. package/lib/convert/pptx.js.map +1 -1
  83. package/lib/index.browser.d.ts +13 -0
  84. package/lib/index.browser.d.ts.map +1 -0
  85. package/lib/index.browser.js +13 -0
  86. package/lib/index.browser.js.map +1 -0
  87. package/lib/index.common.d.ts +17 -0
  88. package/lib/index.common.d.ts.map +1 -0
  89. package/lib/index.common.js +18 -0
  90. package/lib/index.common.js.map +1 -0
  91. package/lib/index.node.d.ts +11 -0
  92. package/lib/index.node.d.ts.map +1 -0
  93. package/lib/index.node.js +13 -0
  94. package/lib/index.node.js.map +1 -0
  95. package/lib/utils/audio-save.js +8 -49
  96. package/lib/utils/audio-save.js.map +1 -1
  97. package/lib/utils/bundle-server.js +6 -45
  98. package/lib/utils/bundle-server.js.map +1 -1
  99. package/lib/utils/dependencies.js +6 -43
  100. package/lib/utils/dependencies.js.map +1 -1
  101. package/lib/utils/lang-common.d.ts +10 -0
  102. package/lib/utils/lang-common.d.ts.map +1 -0
  103. package/lib/utils/lang-common.js +11 -0
  104. package/lib/utils/lang-common.js.map +1 -0
  105. package/lib/utils/lang.d.ts +3 -4
  106. package/lib/utils/lang.d.ts.map +1 -1
  107. package/lib/utils/lang.js +11 -21
  108. package/lib/utils/lang.js.map +1 -1
  109. package/lib/utils/llm.d.ts +1 -1
  110. package/lib/utils/llm.d.ts.map +1 -1
  111. package/lib/utils/llm.js +6 -46
  112. package/lib/utils/llm.js.map +1 -1
  113. package/lib/utils/pdf.d.ts +1 -1
  114. package/lib/utils/pdf.d.ts.map +1 -1
  115. package/lib/utils/pdf.js +13 -52
  116. package/lib/utils/pdf.js.map +1 -1
  117. package/lib/vue/main.js +4 -9
  118. package/lib/vue/main.js.map +1 -1
  119. package/package.json +22 -9
@@ -0,0 +1,120 @@
1
+ # /extend - MulmoScript to ExtendedScript Conversion
2
+
3
+ Convert a MulmoScript JSON into an ExtendedScript by adding `scriptMeta` and `beats[].meta` metadata fields. The metadata is used by mulmocast-preprocessor's AI features (summarize, query).
4
+
5
+ ## Invocation
6
+
7
+ ```
8
+ /extend <mulmo_script.json path> [--source <source file path>]
9
+ ```
10
+
11
+ ## Instructions
12
+
13
+ ### Step 1: Read Inputs
14
+
15
+ 1. Read the MulmoScript JSON at the specified path
16
+ 2. Read the ExtendedScript schema: `.claude/skills/extend/references/extended-script-schema.md`
17
+ 3. Check for extracted texts file:
18
+ - Look for `extracted_texts.json` in the same directory as the MulmoScript
19
+ - This file is generated by `mulmo-slide pdf` and contains per-page text extracted from the source PDF
20
+ - If found, read it — this provides the raw text content for each beat
21
+ 4. Locate the source file:
22
+ - If `--source` is specified, use that file
23
+ - Otherwise, infer from the MulmoScript path:
24
+ - `scripts/{basename}/mulmo_script.json` -> search for `samples/{basename}.*` (try `.md`, `.pptx`, `.pdf`, `.key`)
25
+ - If no source file found, work from the MulmoScript content and extracted texts alone
26
+ 5. If a source file is found, read it
27
+
28
+ ### Step 2: Analyze Content
29
+
30
+ Analyze the MulmoScript beats and source file (if available) to understand:
31
+
32
+ - The overall theme and purpose of the presentation
33
+ - The target audience
34
+ - The logical structure (sections, flow)
35
+ - Content types in each slide (text, code, diagrams, tables, data)
36
+ - URLs and external references mentioned
37
+ - Key terminology and concepts
38
+
39
+ For Markdown sources, also examine:
40
+ - Header hierarchy and structure
41
+ - Speaker notes (after `---` or in HTML comments)
42
+ - Code blocks and their languages
43
+ - Mermaid diagrams
44
+ - Links and references
45
+
46
+ ### Step 3: Generate Metadata
47
+
48
+ Based on the analysis, generate:
49
+
50
+ **`scriptMeta`** (script-level):
51
+ - `background`: 1-2 sentence overview of the presentation's theme
52
+ - `audience`: Who this presentation is for
53
+ - `goals`: 2-4 learning objectives or presentation goals
54
+ - `keywords`: 5-10 main keywords for search/discovery
55
+ - `references`: Extract URLs from slides, categorize as web/code/document/video
56
+ - `author`: If identifiable from content
57
+ - `faq`: 2-4 likely questions with answers based on the content
58
+
59
+ **`beats[].meta`** (per-beat):
60
+ - `section`: Logical section name (e.g., "opening", "introduction", "main-topic-1", "demo", "closing")
61
+ - Use consistent naming: consecutive beats in the same logical section share the same section value
62
+ - `tags`: Content type and topic tags. Use from this vocabulary when applicable:
63
+ - Content type: `intro`, `overview`, `definition`, `example`, `code`, `diagram`, `data`, `table`, `demo`, `comparison`, `summary`, `conclusion`, `q-and-a`
64
+ - Add topic-specific tags as needed
65
+ - `keywords`: 2-5 beat-specific important terms
66
+ - `notes`: If `extracted_texts.json` exists, put the corresponding extracted text for that beat here. This preserves the raw source content as metadata.
67
+ - `context`: Background information that would help an AI answer questions about this beat. Add supplementary information: related concepts, technical details, connections to other topics. This is the most valuable field - be substantive.
68
+ - `expectedQuestions`: 1-3 questions the audience might ask about this specific beat's content
69
+
70
+ **`beats[].text`** (narration):
71
+ - If the beat's `text` field is empty and `extracted_texts.json` is available, generate a concise narration text based on the extracted text. The narration should be a natural spoken summary of the slide content, NOT a verbatim copy of the extracted text.
72
+ - If the beat already has `text`, preserve it as-is.
73
+
74
+ ### Step 4: Build ExtendedScript
75
+
76
+ 1. Start with the original MulmoScript (preserve ALL existing fields exactly)
77
+ 2. Add `scriptMeta` at the top level
78
+ 3. Add `meta` to each beat
79
+ 4. Add `outputProfiles: {}` (empty, for user to configure later)
80
+ 5. If beats don't have `id` fields, add them (e.g., `"beat-1"`, `"beat-2"`, ...)
81
+
82
+ ### Step 5: Write and Validate Output
83
+
84
+ 1. Determine output path:
85
+ - Same directory as input: replace `mulmo_script.json` with `extended_script.json`
86
+ - Example: `scripts/simple_text/mulmo_script.json` -> `scripts/simple_text/extended_script.json`
87
+ 2. Write the JSON with 2-space indentation
88
+ 3. Run `mulmo-slide extend validate <output_path>` (or `yarn cli extend validate <output_path>`) to validate against the schema
89
+ 4. If validation fails, fix the errors and re-write the file, then validate again
90
+ 5. Generate MulmoScript from ExtendedScript using the preprocessor:
91
+ ```bash
92
+ npx mulmocast-preprocessor <extended_script.json> -o <mulmo_script.json>
93
+ ```
94
+ Note: The preprocessor may not fully strip `scriptMeta`. If `mulmo movie` fails with "Unrecognized key" errors, manually strip remaining fields with a node one-liner:
95
+ ```bash
96
+ node -e "const fs=require('fs');const d=JSON.parse(fs.readFileSync('<mulmo_script.json>','utf8'));delete d.scriptMeta;delete d.outputProfiles;d.beats.forEach(b=>{delete b.meta;delete b.variants});fs.writeFileSync('<mulmo_script.json>',JSON.stringify(d,null,2))"
97
+ ```
98
+ 6. Display a summary to the user:
99
+ - Number of beats processed
100
+ - Sections identified
101
+ - Key topics/keywords
102
+ - Output file path
103
+
104
+ ### Step 6: Offer Adjustments
105
+
106
+ Ask the user if they want to adjust any of the generated metadata. Common adjustments:
107
+ - Refine audience description
108
+ - Add/remove keywords
109
+ - Adjust FAQ entries
110
+ - Modify section boundaries
111
+ - Enhance context fields
112
+
113
+ ## Quality Guidelines
114
+
115
+ - **context field**: This is the most important field for AI features. Don't just restate the slide text. Add supplementary information: related concepts, technical background, real-world examples, historical context, common misconceptions.
116
+ - **section naming**: Use lowercase-kebab-case. Keep section names meaningful and consistent across beats.
117
+ - **tags**: Be specific but not excessive. 2-4 tags per beat is typical.
118
+ - **expectedQuestions**: Write natural questions a real audience member would ask, not generic ones.
119
+ - **keywords**: Prefer specific technical terms over generic words.
120
+ - **Preserve original content**: Never modify existing MulmoScript fields (text, image, speaker, etc.).
@@ -0,0 +1,153 @@
1
+ # ExtendedScript Schema Reference
2
+
3
+ This document defines the ExtendedScript format used by `mulmocast-preprocessor`.
4
+ Canonical source: `@mulmocast/extended-types` npm package (`mulmocast-plus/packages/mulmocast-extended-types/src/index.ts`)
5
+
6
+ ## Type Definitions
7
+
8
+ ### BeatMeta
9
+
10
+ Metadata for filtering and context, attached to each beat.
11
+
12
+ ```typescript
13
+ {
14
+ tags?: string[]; // Content type/topic tags (e.g., "intro", "code", "diagram", "data", "demo", "conclusion")
15
+ section?: string; // Logical section identifier (e.g., "opening", "chapter1", "closing")
16
+ context?: string; // Background info not in the slide text (for AI summarize/query)
17
+ notes?: string; // Raw extracted text from source document (e.g., PDF text)
18
+ keywords?: string[]; // Beat-specific search keywords
19
+ expectedQuestions?: string[]; // Questions the audience might ask about this beat
20
+ }
21
+ ```
22
+
23
+ ### BeatVariant
24
+
25
+ Profile-specific content overrides (not generated by `/extend`).
26
+
27
+ ```typescript
28
+ {
29
+ text?: string; // Override text for this profile
30
+ skip?: boolean; // Skip this beat in this profile
31
+ image?: MulmoImageAsset; // Override image
32
+ imagePrompt?: string; // Override image prompt
33
+ }
34
+ ```
35
+
36
+ ### ExtendedBeat
37
+
38
+ A beat with optional `variants` and `meta` fields (extends MulmoBeat).
39
+
40
+ ```typescript
41
+ {
42
+ // ... all MulmoBeat fields preserved ...
43
+ variants?: Record<string, BeatVariant>; // Profile-keyed overrides
44
+ meta?: BeatMeta; // Metadata for this beat
45
+ }
46
+ ```
47
+
48
+ ### Reference
49
+
50
+ External resource reference.
51
+
52
+ ```typescript
53
+ {
54
+ type?: "web" | "code" | "document" | "video";
55
+ url: string;
56
+ title?: string;
57
+ description?: string;
58
+ }
59
+ ```
60
+
61
+ ### FAQ
62
+
63
+ Frequently asked question for quick Q&A matching.
64
+
65
+ ```typescript
66
+ {
67
+ question: string;
68
+ answer: string;
69
+ relatedBeats?: string[]; // Beat IDs related to this FAQ
70
+ }
71
+ ```
72
+
73
+ ### ScriptMeta
74
+
75
+ Script-level metadata for AI features.
76
+
77
+ ```typescript
78
+ {
79
+ audience?: string; // Target audience description
80
+ prerequisites?: string[]; // Required knowledge
81
+ goals?: string[]; // Learning goals / presentation objectives
82
+ background?: string; // Overview / theme of the presentation
83
+ faq?: FAQ[]; // Frequently asked questions
84
+ keywords?: string[]; // Script-wide search keywords
85
+ references?: Reference[]; // External resource links
86
+ author?: string; // Author name
87
+ version?: string; // Version string
88
+ }
89
+ ```
90
+
91
+ ### OutputProfile
92
+
93
+ Profile display information (not generated by `/extend`).
94
+
95
+ ```typescript
96
+ {
97
+ name: string;
98
+ description?: string;
99
+ }
100
+ ```
101
+
102
+ ### ExtendedScript
103
+
104
+ The top-level type (extends MulmoScript).
105
+
106
+ ```typescript
107
+ {
108
+ // ... all MulmoScript fields preserved ...
109
+ beats: ExtendedBeat[]; // Beats with meta
110
+ outputProfiles?: Record<string, OutputProfile>; // Profile definitions
111
+ scriptMeta?: ScriptMeta; // Script-level metadata
112
+ }
113
+ ```
114
+
115
+ ## Example
116
+
117
+ ```json
118
+ {
119
+ "$mulmocast": { "version": "1.1" },
120
+ "title": "Example Presentation",
121
+ "lang": "en",
122
+ "speechParams": { "speakers": { "Presenter": { "voiceId": "alloy" } } },
123
+ "scriptMeta": {
124
+ "audience": "Software engineers interested in AI",
125
+ "goals": ["Understand LLM agent patterns", "Learn practical implementation approaches"],
126
+ "background": "LLM agents combine language models with tools and memory for autonomous task execution.",
127
+ "keywords": ["LLM", "agents", "AI"],
128
+ "references": [
129
+ {
130
+ "type": "document",
131
+ "url": "https://arxiv.org/abs/2210.03629",
132
+ "title": "ReAct Paper"
133
+ }
134
+ ],
135
+ "author": "AI Team"
136
+ },
137
+ "beats": [
138
+ {
139
+ "id": "intro-1",
140
+ "speaker": "Presenter",
141
+ "text": "Welcome to this presentation about LLM agents.",
142
+ "meta": {
143
+ "section": "opening",
144
+ "tags": ["intro"],
145
+ "keywords": ["LLM agents"],
146
+ "context": "This is the opening slide that sets the stage for the presentation.",
147
+ "expectedQuestions": ["What are LLM agents?"]
148
+ }
149
+ }
150
+ ],
151
+ "outputProfiles": {}
152
+ }
153
+ ```
@@ -0,0 +1,145 @@
1
+ # /narrate - Source File to Narrated ExtendedScript
2
+
3
+ Convert any supported source file (PDF, PPTX, Markdown, Keynote) into a validated ExtendedScript with AI-generated narration and metadata. This is the main entry point for the full pipeline.
4
+
5
+ ## Invocation
6
+
7
+ ```
8
+ /narrate <source file path>
9
+ ```
10
+
11
+ Supported formats: `.pdf`, `.pptx`, `.md`, `.key`
12
+
13
+ ## Instructions
14
+
15
+ ### Step 1: Convert Source to MulmoScript
16
+
17
+ Detect the file format and run the appropriate converter.
18
+
19
+ **PDF:**
20
+ ```bash
21
+ yarn cli pdf <file>
22
+ ```
23
+
24
+ **PPTX:**
25
+ ```bash
26
+ yarn cli pptx <file>
27
+ ```
28
+
29
+ **Markdown:**
30
+ ```bash
31
+ yarn cli markdown <file>
32
+ ```
33
+
34
+ **Keynote (.key):**
35
+ ```bash
36
+ yarn cli keynote <file>
37
+ ```
38
+
39
+ This produces `scripts/{basename}/mulmo_script.json` and (for PDF) `scripts/{basename}/extracted_texts.json`.
40
+
41
+ Confirm the output:
42
+ - How many slides/beats were generated
43
+ - Whether `extracted_texts.json` exists (PDF only)
44
+
45
+ ### Step 2: Read Inputs
46
+
47
+ 1. Read the generated `scripts/{basename}/mulmo_script.json`
48
+ 2. Read the ExtendedScript schema: `.claude/skills/narrate/references/extended-script-schema.md`
49
+ 3. Check for `scripts/{basename}/extracted_texts.json` (generated by PDF converter)
50
+ - If found, read it — this provides raw text content for each beat
51
+ 4. If the source is Markdown, also read the original `.md` file for speaker notes and structure
52
+
53
+ ### Step 3: Analyze Content
54
+
55
+ Analyze the MulmoScript beats, extracted texts, and source file to understand:
56
+
57
+ - The overall theme and purpose
58
+ - The target audience
59
+ - The logical structure (sections, flow)
60
+ - Content types in each slide (text, code, diagrams, tables, data)
61
+ - URLs and external references
62
+ - Key terminology and concepts
63
+
64
+ For slides with images, read the slide images to understand visual content that may not be in extracted text.
65
+
66
+ ### Step 4: Generate Narration and Metadata
67
+
68
+ Based on the analysis, generate:
69
+
70
+ **`scriptMeta`** (script-level):
71
+ - `background`: 1-2 sentence overview of the theme
72
+ - `audience`: Who this is for
73
+ - `goals`: 2-4 learning objectives or goals
74
+ - `keywords`: 5-10 main keywords
75
+ - `references`: Extract URLs, categorize as web/code/document/video
76
+ - `author`: If identifiable from content
77
+ - `faq`: 2-4 likely questions with answers
78
+
79
+ **`beats[].text`** (narration):
80
+ - If the beat's `text` is empty, generate a concise narration based on the slide content and extracted text
81
+ - The narration should be natural spoken language, NOT a verbatim copy of source text
82
+ - Match the language specified in `lang` field of the MulmoScript
83
+ - If the beat already has `text`, preserve it as-is
84
+
85
+ **`beats[].meta`** (per-beat):
86
+ - `section`: Logical section name (lowercase-kebab-case, e.g., "introduction", "main-topic-1", "conclusion")
87
+ - `tags`: Content type tags from: `intro`, `overview`, `definition`, `example`, `code`, `diagram`, `data`, `table`, `demo`, `comparison`, `summary`, `conclusion`, `q-and-a`
88
+ - `keywords`: 2-5 beat-specific terms
89
+ - `notes`: If `extracted_texts.json` exists, put the raw extracted text here
90
+ - `context`: Background info for AI query/summarize. Be substantive — don't just restate the slide
91
+ - `expectedQuestions`: 1-3 natural audience questions
92
+
93
+ ### Step 5: Build and Write ExtendedScript
94
+
95
+ 1. Start with the original MulmoScript (preserve ALL existing fields)
96
+ 2. Add `scriptMeta` at the top level
97
+ 3. Add `meta` to each beat, set `text` for narration
98
+ 4. Add `outputProfiles: {}` (empty)
99
+ 5. If beats don't have `id` fields, add them (`"beat-1"`, `"beat-2"`, ...)
100
+ 6. Write to `scripts/{basename}/extended_script.json` with 2-space indentation
101
+
102
+ ### Step 6: Validate
103
+
104
+ Run validation:
105
+ ```bash
106
+ yarn cli extend validate scripts/{basename}/extended_script.json
107
+ ```
108
+
109
+ If validation fails, fix the errors and re-write the file. Repeat until validation passes.
110
+
111
+ ### Step 7: Present Results and Next Steps
112
+
113
+ Display a summary:
114
+ - Number of beats processed
115
+ - Sections identified
116
+ - Key topics/keywords
117
+ - Output file path
118
+
119
+ Then show the user the next steps they can take:
120
+
121
+ ```
122
+ ExtendedScript is ready! Here's what you can do next:
123
+
124
+ ## Query the content interactively
125
+ npx mulmocast-preprocessor query scripts/{basename}/extended_script.json -i
126
+
127
+ ## Generate a summary
128
+ npx mulmocast-preprocessor summarize scripts/{basename}/extended_script.json
129
+
130
+ ## Generate a narrated video
131
+ npx mulmocast-preprocessor scripts/{basename}/extended_script.json -o scripts/{basename}/mulmo_script.json
132
+ npx mulmo movie scripts/{basename}/mulmo_script.json
133
+ ```
134
+
135
+ Ask the user if they want to adjust any narration or metadata before proceeding.
136
+
137
+ ## Quality Guidelines
138
+
139
+ - **Narration**: Write as if presenting to an audience. Use clear, spoken language. Avoid reading raw data verbatim — summarize and explain instead.
140
+ - **context field**: Most important for AI features. Add supplementary info: related concepts, technical background, real-world examples, historical context.
141
+ - **section naming**: Use lowercase-kebab-case. Consecutive beats in the same logical section share the same section value.
142
+ - **tags**: 2-4 tags per beat. Be specific but not excessive.
143
+ - **expectedQuestions**: Natural questions a real audience member would ask.
144
+ - **keywords**: Prefer specific technical terms over generic words.
145
+ - **Preserve original content**: Never modify existing MulmoScript fields (image, speaker, etc.) except `text` when generating narration.
@@ -0,0 +1,153 @@
1
+ # ExtendedScript Schema Reference
2
+
3
+ This document defines the ExtendedScript format used by `mulmocast-preprocessor`.
4
+ Canonical source: `@mulmocast/extended-types` npm package (`mulmocast-plus/packages/mulmocast-extended-types/src/index.ts`)
5
+
6
+ ## Type Definitions
7
+
8
+ ### BeatMeta
9
+
10
+ Metadata for filtering and context, attached to each beat.
11
+
12
+ ```typescript
13
+ {
14
+ tags?: string[]; // Content type/topic tags (e.g., "intro", "code", "diagram", "data", "demo", "conclusion")
15
+ section?: string; // Logical section identifier (e.g., "opening", "chapter1", "closing")
16
+ context?: string; // Background info not in the slide text (for AI summarize/query)
17
+ notes?: string; // Raw extracted text from source document (e.g., PDF text)
18
+ keywords?: string[]; // Beat-specific search keywords
19
+ expectedQuestions?: string[]; // Questions the audience might ask about this beat
20
+ }
21
+ ```
22
+
23
+ ### BeatVariant
24
+
25
+ Profile-specific content overrides (not generated by `/extend`).
26
+
27
+ ```typescript
28
+ {
29
+ text?: string; // Override text for this profile
30
+ skip?: boolean; // Skip this beat in this profile
31
+ image?: MulmoImageAsset; // Override image
32
+ imagePrompt?: string; // Override image prompt
33
+ }
34
+ ```
35
+
36
+ ### ExtendedBeat
37
+
38
+ A beat with optional `variants` and `meta` fields (extends MulmoBeat).
39
+
40
+ ```typescript
41
+ {
42
+ // ... all MulmoBeat fields preserved ...
43
+ variants?: Record<string, BeatVariant>; // Profile-keyed overrides
44
+ meta?: BeatMeta; // Metadata for this beat
45
+ }
46
+ ```
47
+
48
+ ### Reference
49
+
50
+ External resource reference.
51
+
52
+ ```typescript
53
+ {
54
+ type?: "web" | "code" | "document" | "video";
55
+ url: string;
56
+ title?: string;
57
+ description?: string;
58
+ }
59
+ ```
60
+
61
+ ### FAQ
62
+
63
+ Frequently asked question for quick Q&A matching.
64
+
65
+ ```typescript
66
+ {
67
+ question: string;
68
+ answer: string;
69
+ relatedBeats?: string[]; // Beat IDs related to this FAQ
70
+ }
71
+ ```
72
+
73
+ ### ScriptMeta
74
+
75
+ Script-level metadata for AI features.
76
+
77
+ ```typescript
78
+ {
79
+ audience?: string; // Target audience description
80
+ prerequisites?: string[]; // Required knowledge
81
+ goals?: string[]; // Learning goals / presentation objectives
82
+ background?: string; // Overview / theme of the presentation
83
+ faq?: FAQ[]; // Frequently asked questions
84
+ keywords?: string[]; // Script-wide search keywords
85
+ references?: Reference[]; // External resource links
86
+ author?: string; // Author name
87
+ version?: string; // Version string
88
+ }
89
+ ```
90
+
91
+ ### OutputProfile
92
+
93
+ Profile display information (not generated by `/extend`).
94
+
95
+ ```typescript
96
+ {
97
+ name: string;
98
+ description?: string;
99
+ }
100
+ ```
101
+
102
+ ### ExtendedScript
103
+
104
+ The top-level type (extends MulmoScript).
105
+
106
+ ```typescript
107
+ {
108
+ // ... all MulmoScript fields preserved ...
109
+ beats: ExtendedBeat[]; // Beats with meta
110
+ outputProfiles?: Record<string, OutputProfile>; // Profile definitions
111
+ scriptMeta?: ScriptMeta; // Script-level metadata
112
+ }
113
+ ```
114
+
115
+ ## Example
116
+
117
+ ```json
118
+ {
119
+ "$mulmocast": { "version": "1.1" },
120
+ "title": "Example Presentation",
121
+ "lang": "en",
122
+ "speechParams": { "speakers": { "Presenter": { "voiceId": "alloy" } } },
123
+ "scriptMeta": {
124
+ "audience": "Software engineers interested in AI",
125
+ "goals": ["Understand LLM agent patterns", "Learn practical implementation approaches"],
126
+ "background": "LLM agents combine language models with tools and memory for autonomous task execution.",
127
+ "keywords": ["LLM", "agents", "AI"],
128
+ "references": [
129
+ {
130
+ "type": "document",
131
+ "url": "https://arxiv.org/abs/2210.03629",
132
+ "title": "ReAct Paper"
133
+ }
134
+ ],
135
+ "author": "AI Team"
136
+ },
137
+ "beats": [
138
+ {
139
+ "id": "intro-1",
140
+ "speaker": "Presenter",
141
+ "text": "Welcome to this presentation about LLM agents.",
142
+ "meta": {
143
+ "section": "opening",
144
+ "tags": ["intro"],
145
+ "keywords": ["LLM agents"],
146
+ "context": "This is the opening slide that sets the stage for the presentation.",
147
+ "expectedQuestions": ["What are LLM agents?"]
148
+ }
149
+ }
150
+ ],
151
+ "outputProfiles": {}
152
+ }
153
+ ```