eprec 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,11 +27,9 @@ export function Layout({
27
27
  </head>
28
28
  <body>
29
29
  <div id="root">${children ?? ''}</div>
30
- ${
31
- entryScript
32
- ? html`<script type="module" src="${entryScript}"></script>`
33
- : ''
34
- }
30
+ ${entryScript
31
+ ? html`<script type="module" src="${entryScript}"></script>`
32
+ : ''}
35
33
  </body>
36
34
  </html>`
37
35
  }
package/app/router.tsx CHANGED
@@ -76,6 +76,7 @@ const cacheControl =
76
76
  export function createAppRouter(rootDir: string) {
77
77
  const router = createRouter({
78
78
  middleware: [
79
+ bunStaticFiles(path.join(rootDir, 'fixtures'), { cacheControl }),
79
80
  bunStaticFiles(path.join(rootDir, 'public'), { cacheControl }),
80
81
  bunStaticFiles(path.join(rootDir, 'app'), {
81
82
  filter: (pathname) => pathname.startsWith('assets/'),
@@ -88,15 +89,18 @@ export function createAppRouter(rootDir: string) {
88
89
  title: 'Not Found',
89
90
  entryScript: false,
90
91
  children: html`<main class="app-shell">
91
- <h1 class="app-title">404 - Not Found</h1>
92
- </main>`,
92
+ <h1 class="app-title">404 - Not Found</h1>
93
+ </main>`,
93
94
  }),
94
95
  { status: 404 },
95
96
  )
96
97
  },
97
98
  })
98
99
 
99
- router.map(routes.index, indexHandlers)
100
+ router.map(routes.index, {
101
+ middleware: indexHandlers.middleware,
102
+ action: indexHandlers.loader,
103
+ })
100
104
 
101
105
  return router
102
106
  }
@@ -13,34 +13,37 @@ const indexHandler = {
13
13
  <span class="app-kicker">Eprec Studio</span>
14
14
  <h1 class="app-title">Editing workspace</h1>
15
15
  <p class="app-subtitle">
16
- Prepare edits with the CLI, then review them here.
16
+ Review transcript-based edits, refine cut ranges, and prepare
17
+ exports.
17
18
  </p>
18
19
  </header>
19
- <div class="app-grid">
20
- <section class="app-card">
21
- <h2>Workflow</h2>
22
- <ol class="app-list">
23
- <li>Run a CLI edit command.</li>
24
- <li>Open the workspace UI.</li>
25
- <li>Review and refine the cut list.</li>
26
- </ol>
27
- </section>
20
+ <section class="app-card app-card--full">
21
+ <h2>Timeline editor</h2>
22
+ <p class="app-muted">
23
+ Loading preview video, timeline controls, and cut ranges.
24
+ </p>
25
+ <div class="timeline-track timeline-track--skeleton"></div>
26
+ </section>
27
+ <div class="app-grid app-grid--two">
28
28
  <section class="app-card">
29
- <h2>UI status</h2>
30
- <p class="status-pill">UI booted</p>
29
+ <h2>Chapter plan</h2>
31
30
  <p class="app-muted">
32
- Client-side components load after the first paint.
31
+ Output names and skip flags appear after the client boots.
33
32
  </p>
34
33
  </section>
35
34
  <section class="app-card">
36
- <h2>Interaction check</h2>
37
- <p class="app-muted">Client bundle loads after this page.</p>
38
- <button class="counter-button" type="button" disabled>
39
- <span>Click count</span>
40
- <span class="counter-value">0</span>
41
- </button>
35
+ <h2>Command windows</h2>
36
+ <p class="app-muted">
37
+ Jarvis command detection will populate this panel.
38
+ </p>
42
39
  </section>
43
40
  </div>
41
+ <section class="app-card app-card--full">
42
+ <h2>Transcript search</h2>
43
+ <p class="app-muted">
44
+ Search and jump controls will load in the interactive UI.
45
+ </p>
46
+ </section>
44
47
  </main>`,
45
48
  }),
46
49
  )
package/app-server.ts CHANGED
@@ -34,7 +34,9 @@ async function getServerPort(nodeEnv: string, desiredPort: number) {
34
34
  }
35
35
  const port = await getPort({ port: desiredPort })
36
36
  if (port !== desiredPort) {
37
- console.warn(`⚠️ Port ${desiredPort} was taken, using port ${port} instead`)
37
+ console.warn(
38
+ `⚠️ Port ${desiredPort} was taken, using port ${port} instead`,
39
+ )
38
40
  }
39
41
  return port
40
42
  }
package/cli.ts CHANGED
@@ -1,11 +1,13 @@
1
1
  #!/usr/bin/env bun
2
2
  import path from 'node:path'
3
- import type { CommandBuilder, CommandHandler } from 'yargs'
3
+ import type { Arguments, CommandBuilder, CommandHandler } from 'yargs'
4
4
  import yargs from 'yargs/yargs'
5
5
  import { hideBin } from 'yargs/helpers'
6
6
  import { startAppServer } from './app-server'
7
+ import { setLogHooks } from './process-course/logging'
7
8
  import { ensureFfmpegAvailable } from './process-course/ffmpeg'
8
9
  import {
10
+ VIDEO_EXTENSIONS,
9
11
  normalizeProcessArgs,
10
12
  configureProcessCommand,
11
13
  } from './process-course/cli'
@@ -13,46 +15,86 @@ import { runProcessCourse } from './process-course-video'
13
15
  import {
14
16
  configureEditVideoCommand,
15
17
  configureCombineVideosCommand,
16
- handleCombineVideosCommand,
17
- handleEditVideoCommand,
18
+ createCombineVideosHandler,
19
+ createEditVideoHandler,
18
20
  } from './process-course/edits/cli'
19
21
  import { detectSpeechSegmentsForFile } from './speech-detection'
20
22
  import {
21
23
  getDefaultWhisperModelPath,
22
24
  transcribeAudio,
23
25
  } from './whispercpp-transcribe'
26
+ import {
27
+ PromptCancelled,
28
+ createInquirerPrompter,
29
+ createPathPicker,
30
+ isInteractive,
31
+ pauseActiveSpinner,
32
+ resumeActiveSpinner,
33
+ resolveOptionalString,
34
+ type PathPicker,
35
+ type Prompter,
36
+ withSpinner,
37
+ } from './cli-ux'
24
38
 
25
- function resolveOptionalString(value: unknown) {
26
- if (typeof value !== 'string') {
27
- return undefined
28
- }
29
- const trimmed = value.trim()
30
- return trimmed.length > 0 ? trimmed : undefined
39
+ type CliUxContext = {
40
+ interactive: boolean
41
+ prompter?: Prompter
42
+ pathPicker?: PathPicker
31
43
  }
32
44
 
33
- async function main() {
34
- const parser = yargs(hideBin(process.argv))
45
+ async function main(rawArgs = hideBin(process.argv)) {
46
+ const context = createCliUxContext()
47
+ let args = rawArgs
48
+
49
+ if (context.interactive && args.length === 0 && context.prompter) {
50
+ const selection = await promptForCommand(context.prompter)
51
+ if (!selection) {
52
+ return
53
+ }
54
+ args = selection
55
+ }
56
+
57
+ const handlerOptions = {
58
+ interactive: context.interactive,
59
+ pathPicker: context.pathPicker,
60
+ }
61
+
62
+ const parser = yargs(args)
35
63
  .scriptName('eprec')
36
64
  .command(
37
- 'process <input...>',
65
+ 'process [input...]',
38
66
  'Process chapters into separate files',
39
67
  configureProcessCommand,
40
68
  async (argv) => {
41
- const args = normalizeProcessArgs(argv)
42
- await runProcessCourse(args)
69
+ const processArgs = await resolveProcessArgs(argv, context)
70
+ await withSpinner(
71
+ 'Processing course',
72
+ async () => {
73
+ setLogHooks({
74
+ beforeLog: pauseActiveSpinner,
75
+ afterLog: resumeActiveSpinner,
76
+ })
77
+ try {
78
+ await runProcessCourse(processArgs)
79
+ } finally {
80
+ setLogHooks({})
81
+ }
82
+ },
83
+ { successText: 'Processing complete', enabled: context.interactive },
84
+ )
43
85
  },
44
86
  )
45
87
  .command(
46
88
  'edit',
47
89
  'Edit a single video using transcript text edits',
48
90
  configureEditVideoCommand as CommandBuilder,
49
- handleEditVideoCommand as CommandHandler,
91
+ createEditVideoHandler(handlerOptions) as CommandHandler,
50
92
  )
51
93
  .command(
52
94
  'combine',
53
95
  'Combine two videos with speech-aligned padding',
54
96
  configureCombineVideosCommand as CommandBuilder,
55
- handleCombineVideosCommand as CommandHandler,
97
+ createCombineVideosHandler(handlerOptions) as CommandHandler,
56
98
  )
57
99
  .command(
58
100
  'app start',
@@ -77,7 +119,7 @@ async function main() {
77
119
  },
78
120
  )
79
121
  .command(
80
- 'transcribe <input>',
122
+ 'transcribe [input]',
81
123
  'Transcribe a single audio/video file',
82
124
  (command) =>
83
125
  command
@@ -108,31 +150,33 @@ async function main() {
108
150
  describe: 'Output base path (without extension)',
109
151
  }),
110
152
  async (argv) => {
111
- const inputPath = path.resolve(String(argv.input))
112
- const outputBasePath =
113
- resolveOptionalString(argv['output-base']) ??
114
- path.join(
115
- path.dirname(inputPath),
116
- `${path.parse(inputPath).name}-transcript`,
117
- )
118
- const threads =
119
- typeof argv.threads === 'number' && Number.isFinite(argv.threads)
120
- ? argv.threads
121
- : undefined
122
- const result = await transcribeAudio(inputPath, {
123
- modelPath: resolveOptionalString(argv['model-path']),
124
- language: resolveOptionalString(argv.language),
125
- threads,
126
- binaryPath: resolveOptionalString(argv['binary-path']),
127
- outputBasePath,
128
- })
129
- console.log(`Transcript written to ${outputBasePath}.txt`)
130
- console.log(`Segments written to ${outputBasePath}.json`)
131
- console.log(result.text)
153
+ const transcribeArgs = await resolveTranscribeArgs(argv, context)
154
+ let resultText = ''
155
+ await withSpinner(
156
+ 'Transcribing audio',
157
+ async () => {
158
+ const result = await transcribeAudio(transcribeArgs.inputPath, {
159
+ modelPath: transcribeArgs.modelPath,
160
+ language: transcribeArgs.language,
161
+ threads: transcribeArgs.threads,
162
+ binaryPath: transcribeArgs.binaryPath,
163
+ outputBasePath: transcribeArgs.outputBasePath,
164
+ })
165
+ resultText = result.text
166
+ },
167
+ { successText: 'Transcription complete', enabled: context.interactive },
168
+ )
169
+ console.log(
170
+ `Transcript written to ${transcribeArgs.outputBasePath}.txt`,
171
+ )
172
+ console.log(
173
+ `Segments written to ${transcribeArgs.outputBasePath}.json`,
174
+ )
175
+ console.log(resultText)
132
176
  },
133
177
  )
134
178
  .command(
135
- 'detect-speech <input>',
179
+ 'detect-speech [input]',
136
180
  'Show detected speech segments for a file',
137
181
  (command) =>
138
182
  command
@@ -149,12 +193,23 @@ async function main() {
149
193
  describe: 'End time in seconds',
150
194
  }),
151
195
  async (argv) => {
152
- await ensureFfmpegAvailable()
153
- const segments = await detectSpeechSegmentsForFile({
154
- inputPath: String(argv.input),
155
- start: typeof argv.start === 'number' ? argv.start : undefined,
156
- end: typeof argv.end === 'number' ? argv.end : undefined,
157
- })
196
+ const { inputPath, start, end } = await resolveDetectSpeechArgs(
197
+ argv,
198
+ context,
199
+ )
200
+ let segments: unknown = []
201
+ await withSpinner(
202
+ 'Detecting speech',
203
+ async () => {
204
+ await ensureFfmpegAvailable()
205
+ segments = await detectSpeechSegmentsForFile({
206
+ inputPath,
207
+ start,
208
+ end,
209
+ })
210
+ },
211
+ { successText: 'Speech detection complete', enabled: context.interactive },
212
+ )
158
213
  console.log(JSON.stringify(segments, null, 2))
159
214
  },
160
215
  )
@@ -165,7 +220,185 @@ async function main() {
165
220
  await parser.parseAsync()
166
221
  }
167
222
 
223
+ function createCliUxContext(): CliUxContext {
224
+ const interactive = isInteractive()
225
+ if (!interactive) {
226
+ return { interactive }
227
+ }
228
+ const prompter = createInquirerPrompter()
229
+ const pathPicker = createPathPicker(prompter)
230
+ return { interactive, prompter, pathPicker }
231
+ }
232
+
233
+ async function promptForCommand(
234
+ prompter: Prompter,
235
+ ): Promise<string[] | null> {
236
+ const selection = await prompter.select('Choose a command', [
237
+ {
238
+ name: 'Process chapters into separate files',
239
+ value: 'process',
240
+ },
241
+ {
242
+ name: 'Edit a single video using transcript text edits',
243
+ value: 'edit',
244
+ },
245
+ {
246
+ name: 'Combine two videos with speech-aligned padding',
247
+ value: 'combine',
248
+ },
249
+ {
250
+ name: 'Start the web UI server',
251
+ value: 'app-start',
252
+ },
253
+ {
254
+ name: 'Transcribe a single audio/video file',
255
+ value: 'transcribe',
256
+ },
257
+ {
258
+ name: 'Show detected speech segments for a file',
259
+ value: 'detect-speech',
260
+ },
261
+ { name: 'Show help', value: 'help' },
262
+ { name: 'Exit', value: 'exit' },
263
+ ])
264
+ switch (selection) {
265
+ case 'exit':
266
+ return null
267
+ case 'help':
268
+ return ['--help']
269
+ case 'app-start':
270
+ return ['app', 'start']
271
+ default:
272
+ return [selection]
273
+ }
274
+ }
275
+
276
+ async function resolveProcessArgs(argv: Arguments, context: CliUxContext) {
277
+ let inputPaths = collectStringArray(argv.input)
278
+ if (inputPaths.length === 0) {
279
+ if (!context.interactive || !context.pathPicker || !context.prompter) {
280
+ throw new Error('At least one input file is required.')
281
+ }
282
+ inputPaths = await promptForInputFiles(context)
283
+ }
284
+
285
+ let outputDir = resolveOptionalString(argv['output-dir'])
286
+ if (!outputDir && context.interactive && context.prompter && context.pathPicker) {
287
+ const chooseOutput = await context.prompter.confirm(
288
+ 'Choose a custom output directory?',
289
+ { defaultValue: false },
290
+ )
291
+ if (chooseOutput) {
292
+ outputDir = await context.pathPicker.pickExistingDirectory({
293
+ message: 'Select output directory',
294
+ })
295
+ }
296
+ }
297
+
298
+ const updatedArgs = {
299
+ ...argv,
300
+ input: inputPaths,
301
+ 'output-dir': outputDir ?? argv['output-dir'],
302
+ } as Arguments
303
+ return normalizeProcessArgs(updatedArgs)
304
+ }
305
+
306
+ async function promptForInputFiles(context: CliUxContext) {
307
+ if (!context.prompter || !context.pathPicker) {
308
+ throw new Error('Interactive prompts are not available.')
309
+ }
310
+ const inputPaths: string[] = []
311
+ let addAnother = true
312
+ while (addAnother) {
313
+ const inputPath = await context.pathPicker.pickExistingFile({
314
+ message:
315
+ inputPaths.length === 0
316
+ ? 'Select input video file'
317
+ : 'Select another input video file',
318
+ extensions: VIDEO_EXTENSIONS,
319
+ })
320
+ inputPaths.push(inputPath)
321
+ addAnother = await context.prompter.confirm('Add another input file?', {
322
+ defaultValue: false,
323
+ })
324
+ }
325
+ return inputPaths
326
+ }
327
+
328
+ async function resolveTranscribeArgs(argv: Arguments, context: CliUxContext) {
329
+ let input = resolveOptionalString(argv.input)
330
+ if (!input) {
331
+ if (!context.interactive || !context.pathPicker) {
332
+ throw new Error('Input audio/video file is required.')
333
+ }
334
+ input = await context.pathPicker.pickExistingFile({
335
+ message: 'Select input audio/video file',
336
+ })
337
+ }
338
+ const inputPath = path.resolve(input)
339
+ const outputBasePath =
340
+ resolveOptionalString(argv['output-base']) ??
341
+ buildTranscribeOutputBase(inputPath)
342
+ const threads = resolveOptionalNumber(argv.threads)
343
+ return {
344
+ inputPath,
345
+ outputBasePath,
346
+ threads,
347
+ modelPath: resolveOptionalString(argv['model-path']),
348
+ language: resolveOptionalString(argv.language),
349
+ binaryPath: resolveOptionalString(argv['binary-path']),
350
+ }
351
+ }
352
+
353
+ async function resolveDetectSpeechArgs(argv: Arguments, context: CliUxContext) {
354
+ let input = resolveOptionalString(argv.input)
355
+ if (!input) {
356
+ if (!context.interactive || !context.pathPicker) {
357
+ throw new Error('Input audio/video file is required.')
358
+ }
359
+ input = await context.pathPicker.pickExistingFile({
360
+ message: 'Select input audio/video file',
361
+ })
362
+ }
363
+ return {
364
+ inputPath: String(input),
365
+ start: resolveOptionalNumber(argv.start),
366
+ end: resolveOptionalNumber(argv.end),
367
+ }
368
+ }
369
+
370
+ function buildTranscribeOutputBase(inputPath: string) {
371
+ return path.join(
372
+ path.dirname(inputPath),
373
+ `${path.parse(inputPath).name}-transcript`,
374
+ )
375
+ }
376
+
377
+ function collectStringArray(value: unknown) {
378
+ if (Array.isArray(value)) {
379
+ return value.filter(
380
+ (entry): entry is string =>
381
+ typeof entry === 'string' && entry.trim().length > 0,
382
+ )
383
+ }
384
+ if (typeof value === 'string' && value.trim().length > 0) {
385
+ return [value]
386
+ }
387
+ return []
388
+ }
389
+
390
+ function resolveOptionalNumber(value: unknown) {
391
+ if (typeof value !== 'number' || !Number.isFinite(value)) {
392
+ return undefined
393
+ }
394
+ return value
395
+ }
396
+
168
397
  main().catch((error) => {
398
+ if (error instanceof PromptCancelled) {
399
+ console.log('[info] Cancelled.')
400
+ return
401
+ }
169
402
  console.error(
170
403
  `[error] ${error instanceof Error ? error.message : String(error)}`,
171
404
  )
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "eprec",
3
3
  "type": "module",
4
- "version": "1.1.0",
4
+ "version": "1.3.0",
5
5
  "license": "MIT",
6
6
  "repository": {
7
7
  "type": "git",
@@ -11,8 +11,9 @@
11
11
  "app:start": "bun ./app-server.ts",
12
12
  "format": "prettier --write .",
13
13
  "test": "bun test process-course utils.test.ts",
14
- "test:e2e": "bun test e2e",
15
- "test:all": "bun test",
14
+ "test:e2e": "bun test ./e2e",
15
+ "test:smoke": "bunx playwright test -c playwright-smoke-config.ts",
16
+ "test:all": "bun test '**/*.test.ts'",
16
17
  "validate": "bun run test"
17
18
  },
18
19
  "bin": {
@@ -33,6 +34,7 @@
33
34
  "prettier": "@epic-web/config/prettier",
34
35
  "devDependencies": {
35
36
  "@epic-web/config": "^1.21.3",
37
+ "@playwright/test": "^1.58.0",
36
38
  "@types/bun": "latest",
37
39
  "@types/yargs": "^17.0.35",
38
40
  "prettier": "^3.8.1"
@@ -42,7 +44,9 @@
42
44
  },
43
45
  "dependencies": {
44
46
  "get-port": "^7.1.0",
47
+ "inquirer": "^13.2.1",
45
48
  "onnxruntime-node": "^1.23.2",
49
+ "ora": "^9.1.0",
46
50
  "remix": "3.0.0-alpha.0",
47
51
  "yargs": "^18.0.0"
48
52
  }
@@ -8,6 +8,16 @@ import { normalizeSkipPhrases } from './utils/transcript'
8
8
  import { parseChapterSelection } from './utils/chapter-selection'
9
9
  import type { ChapterSelection } from './types'
10
10
 
11
+ export const VIDEO_EXTENSIONS = [
12
+ '.mp4',
13
+ '.mkv',
14
+ '.avi',
15
+ '.mov',
16
+ '.webm',
17
+ '.flv',
18
+ '.m4v',
19
+ ]
20
+
11
21
  export interface CliArgs {
12
22
  inputPaths: string[]
13
23
  outputDir: string | null
@@ -118,16 +128,7 @@ export function normalizeProcessArgs(
118
128
  if (!outputDir && inputPaths.length > 0) {
119
129
  const outputCandidate = inputPaths.at(-1)
120
130
  if (outputCandidate !== undefined) {
121
- const videoExtensions = [
122
- '.mp4',
123
- '.mkv',
124
- '.avi',
125
- '.mov',
126
- '.webm',
127
- '.flv',
128
- '.m4v',
129
- ]
130
- const hasVideoExtension = videoExtensions.some((ext) =>
131
+ const hasVideoExtension = VIDEO_EXTENSIONS.some((ext) =>
131
132
  outputCandidate.toLowerCase().endsWith(ext),
132
133
  )
133
134
 
@@ -0,0 +1,108 @@
1
+ import { test, expect } from 'bun:test'
2
+ import type { Arguments } from 'yargs'
3
+ import {
4
+ resolveEditVideoArgs,
5
+ resolveCombineVideosArgs,
6
+ buildCombinedOutputPath,
7
+ } from './cli'
8
+ import { buildEditedOutputPath } from './video-editor'
9
+ import type { PathPicker } from '../../cli-ux'
10
+
11
+ function createArgs(values: Record<string, unknown>): Arguments {
12
+ return values as Arguments
13
+ }
14
+
15
+ function createPathPicker(options?: {
16
+ files?: string[]
17
+ outputs?: string[]
18
+ directories?: string[]
19
+ }): PathPicker {
20
+ const fileResponses = options?.files ?? []
21
+ const outputResponses = options?.outputs ?? []
22
+ const directoryResponses = options?.directories ?? []
23
+ let fileIndex = 0
24
+ let outputIndex = 0
25
+ let directoryIndex = 0
26
+
27
+ return {
28
+ async pickExistingFile() {
29
+ const response = fileResponses[fileIndex]
30
+ fileIndex += 1
31
+ if (!response) {
32
+ throw new Error('Missing file response')
33
+ }
34
+ return response
35
+ },
36
+ async pickExistingDirectory() {
37
+ const response = directoryResponses[directoryIndex]
38
+ directoryIndex += 1
39
+ if (!response) {
40
+ throw new Error('Missing directory response')
41
+ }
42
+ return response
43
+ },
44
+ async pickOutputPath({ defaultPath }) {
45
+ const response = outputResponses[outputIndex] ?? defaultPath
46
+ outputIndex += 1
47
+ if (!response) {
48
+ throw new Error('Missing output response')
49
+ }
50
+ return response
51
+ },
52
+ }
53
+ }
54
+
55
+ test('resolveEditVideoArgs prompts for missing required paths', async () => {
56
+ const args = createArgs({})
57
+ const pathPicker = createPathPicker({
58
+ files: ['input.mp4', 'transcript.json', 'edited.txt'],
59
+ })
60
+ const result = await resolveEditVideoArgs(args, {
61
+ interactive: true,
62
+ pathPicker,
63
+ })
64
+
65
+ expect(result).toEqual({
66
+ input: 'input.mp4',
67
+ transcript: 'transcript.json',
68
+ edited: 'edited.txt',
69
+ output: buildEditedOutputPath('input.mp4'),
70
+ 'padding-ms': undefined,
71
+ })
72
+ })
73
+
74
+ test('resolveCombineVideosArgs prompts for transcript when edited provided', async () => {
75
+ const args = createArgs({
76
+ video1: 'video1.mp4',
77
+ edited1: 'edited1.txt',
78
+ video2: 'video2.mp4',
79
+ output: 'combined.mp4',
80
+ })
81
+ const pathPicker = createPathPicker({
82
+ files: ['transcript1.json'],
83
+ })
84
+ const result = await resolveCombineVideosArgs(args, {
85
+ interactive: true,
86
+ pathPicker,
87
+ })
88
+
89
+ expect(result.transcript1).toBe('transcript1.json')
90
+ expect(result.transcript2).toBeUndefined()
91
+ expect(result.output).toBe('combined.mp4')
92
+ })
93
+
94
+ test('resolveCombineVideosArgs uses default output when missing', async () => {
95
+ const args = createArgs({
96
+ video1: 'video1.mov',
97
+ video2: 'video2.mp4',
98
+ })
99
+ const pathPicker = createPathPicker()
100
+ const result = await resolveCombineVideosArgs(args, {
101
+ interactive: true,
102
+ pathPicker,
103
+ })
104
+
105
+ expect(result.output).toBe(
106
+ buildCombinedOutputPath('video1.mov', 'video2.mp4'),
107
+ )
108
+ })