omgkit 2.0.6 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/package.json +6 -3
  2. package/plugin/agents/architect.md +357 -43
  3. package/plugin/agents/code-reviewer.md +481 -22
  4. package/plugin/agents/debugger.md +397 -30
  5. package/plugin/agents/docs-manager.md +431 -23
  6. package/plugin/agents/fullstack-developer.md +395 -34
  7. package/plugin/agents/git-manager.md +438 -20
  8. package/plugin/agents/oracle.md +329 -53
  9. package/plugin/agents/planner.md +275 -32
  10. package/plugin/agents/researcher.md +343 -21
  11. package/plugin/agents/scout.md +423 -18
  12. package/plugin/agents/sprint-master.md +418 -48
  13. package/plugin/agents/tester.md +551 -26
  14. package/plugin/skills/backend/api-architecture/SKILL.md +857 -0
  15. package/plugin/skills/backend/caching-strategies/SKILL.md +755 -0
  16. package/plugin/skills/backend/event-driven-architecture/SKILL.md +753 -0
  17. package/plugin/skills/backend/real-time-systems/SKILL.md +635 -0
  18. package/plugin/skills/databases/database-optimization/SKILL.md +571 -0
  19. package/plugin/skills/devops/monorepo-management/SKILL.md +595 -0
  20. package/plugin/skills/devops/observability/SKILL.md +622 -0
  21. package/plugin/skills/devops/performance-profiling/SKILL.md +905 -0
  22. package/plugin/skills/frontend/advanced-ui-design/SKILL.md +426 -0
  23. package/plugin/skills/integrations/ai-integration/SKILL.md +730 -0
  24. package/plugin/skills/integrations/payment-integration/SKILL.md +735 -0
  25. package/plugin/skills/methodology/problem-solving/SKILL.md +355 -0
  26. package/plugin/skills/methodology/research-validation/SKILL.md +668 -0
  27. package/plugin/skills/methodology/sequential-thinking/SKILL.md +260 -0
  28. package/plugin/skills/mobile/mobile-development/SKILL.md +756 -0
  29. package/plugin/skills/security/security-hardening/SKILL.md +633 -0
  30. package/plugin/skills/tools/document-processing/SKILL.md +916 -0
  31. package/plugin/skills/tools/image-processing/SKILL.md +748 -0
  32. package/plugin/skills/tools/mcp-development/SKILL.md +883 -0
  33. package/plugin/skills/tools/media-processing/SKILL.md +831 -0
@@ -0,0 +1,831 @@
1
+ ---
2
+ name: media-processing
3
+ description: Audio and video processing with ffmpeg including transcoding, streaming, and batch operations
4
+ category: tools
5
+ triggers:
6
+ - media processing
7
+ - ffmpeg
8
+ - video transcoding
9
+ - audio processing
10
+ - video editing
11
+ - media conversion
12
+ - streaming
13
+ ---
14
+
15
+ # Media Processing
16
+
17
+ Enterprise **audio and video processing** with ffmpeg. This skill covers transcoding, format conversion, streaming protocols, and batch processing pipelines.
18
+
19
+ ## Purpose
20
+
21
+ Handle media processing requirements efficiently:
22
+
23
+ - Transcode videos between formats
24
+ - Extract and process audio tracks
25
+ - Generate thumbnails and previews
26
+ - Implement adaptive streaming (HLS/DASH)
27
+ - Process user-uploaded media
28
+ - Build automated media pipelines
29
+
30
+ ## Features
31
+
32
+ ### 1. Video Transcoding
33
+
34
+ ```typescript
35
+ import ffmpeg from 'fluent-ffmpeg';
36
+ import { path as ffmpegPath } from '@ffmpeg-installer/ffmpeg';
37
+ import { path as ffprobePath } from '@ffprobe-installer/ffprobe';
38
+
39
+ ffmpeg.setFfmpegPath(ffmpegPath);
40
+ ffmpeg.setFfprobePath(ffprobePath);
41
+
42
+ interface TranscodeOptions {
43
+ inputPath: string;
44
+ outputPath: string;
45
+ format?: 'mp4' | 'webm' | 'mov';
46
+ codec?: 'h264' | 'h265' | 'vp9';
47
+ resolution?: '1080p' | '720p' | '480p' | '360p';
48
+ bitrate?: string;
49
+ fps?: number;
50
+ onProgress?: (progress: number) => void;
51
+ }
52
+
53
+ const RESOLUTIONS = {
54
+ '1080p': { width: 1920, height: 1080 },
55
+ '720p': { width: 1280, height: 720 },
56
+ '480p': { width: 854, height: 480 },
57
+ '360p': { width: 640, height: 360 },
58
+ };
59
+
60
+ const CODECS = {
61
+ h264: { video: 'libx264', audio: 'aac' },
62
+ h265: { video: 'libx265', audio: 'aac' },
63
+ vp9: { video: 'libvpx-vp9', audio: 'libopus' },
64
+ };
65
+
66
+ async function transcodeVideo(options: TranscodeOptions): Promise<void> {
67
+ const {
68
+ inputPath,
69
+ outputPath,
70
+ format = 'mp4',
71
+ codec = 'h264',
72
+ resolution = '720p',
73
+ bitrate,
74
+ fps,
75
+ onProgress,
76
+ } = options;
77
+
78
+ const { width, height } = RESOLUTIONS[resolution];
79
+ const { video: videoCodec, audio: audioCodec } = CODECS[codec];
80
+
81
+ return new Promise((resolve, reject) => {
82
+ let command = ffmpeg(inputPath)
83
+ .videoCodec(videoCodec)
84
+ .audioCodec(audioCodec)
85
+ .size(`${width}x${height}`)
86
+ .autopad()
87
+ .format(format);
88
+
89
+ // Apply bitrate if specified
90
+ if (bitrate) {
91
+ command = command.videoBitrate(bitrate);
92
+ }
93
+
94
+ // Apply FPS if specified
95
+ if (fps) {
96
+ command = command.fps(fps);
97
+ }
98
+
99
+ // H.264 specific options for better compatibility
100
+ if (codec === 'h264') {
101
+ command = command.outputOptions([
102
+ '-preset medium',
103
+ '-profile:v high',
104
+ '-level 4.0',
105
+ '-movflags +faststart', // Web optimization
106
+ ]);
107
+ }
108
+
109
+ command
110
+ .on('progress', (progress) => {
111
+ onProgress?.(progress.percent || 0);
112
+ })
113
+ .on('end', () => resolve())
114
+ .on('error', reject)
115
+ .save(outputPath);
116
+ });
117
+ }
118
+
119
+ // Get video metadata
120
+ interface VideoMetadata {
121
+ duration: number;
122
+ width: number;
123
+ height: number;
124
+ codec: string;
125
+ bitrate: number;
126
+ fps: number;
127
+ audioCodec?: string;
128
+ audioChannels?: number;
129
+ }
130
+
131
+ async function getVideoMetadata(filePath: string): Promise<VideoMetadata> {
132
+ return new Promise((resolve, reject) => {
133
+ ffmpeg.ffprobe(filePath, (err, metadata) => {
134
+ if (err) return reject(err);
135
+
136
+ const videoStream = metadata.streams.find(s => s.codec_type === 'video');
137
+ const audioStream = metadata.streams.find(s => s.codec_type === 'audio');
138
+
139
+ if (!videoStream) {
140
+ return reject(new Error('No video stream found'));
141
+ }
142
+
143
+ resolve({
144
+ duration: metadata.format.duration || 0,
145
+ width: videoStream.width || 0,
146
+ height: videoStream.height || 0,
147
+ codec: videoStream.codec_name || '',
148
+ bitrate: parseInt(metadata.format.bit_rate || '0'),
149
+ fps: eval(videoStream.r_frame_rate || '0'),
150
+ audioCodec: audioStream?.codec_name,
151
+ audioChannels: audioStream?.channels,
152
+ });
153
+ });
154
+ });
155
+ }
156
+ ```
157
+
158
+ ### 2. Thumbnail Generation
159
+
160
+ ```typescript
161
+ interface ThumbnailOptions {
162
+ inputPath: string;
163
+ outputDir: string;
164
+ count?: number;
165
+ size?: string;
166
+ filename?: string;
167
+ timestamps?: number[]; // Specific timestamps in seconds
168
+ }
169
+
170
+ async function generateThumbnails(options: ThumbnailOptions): Promise<string[]> {
171
+ const {
172
+ inputPath,
173
+ outputDir,
174
+ count = 1,
175
+ size = '320x180',
176
+ filename = 'thumb_%i.jpg',
177
+ timestamps,
178
+ } = options;
179
+
180
+ await fs.mkdir(outputDir, { recursive: true });
181
+
182
+ return new Promise((resolve, reject) => {
183
+ const command = ffmpeg(inputPath).screenshots({
184
+ count: timestamps ? undefined : count,
185
+ folder: outputDir,
186
+ size,
187
+ filename,
188
+ timemarks: timestamps,
189
+ });
190
+
191
+ const generatedFiles: string[] = [];
192
+
193
+ command
194
+ .on('filenames', (filenames) => {
195
+ generatedFiles.push(...filenames.map(f => path.join(outputDir, f)));
196
+ })
197
+ .on('end', () => resolve(generatedFiles))
198
+ .on('error', reject);
199
+ });
200
+ }
201
+
202
+ // Generate video preview (animated GIF or short clip)
203
+ async function generatePreview(
204
+ inputPath: string,
205
+ outputPath: string,
206
+ options: {
207
+ duration?: number;
208
+ startTime?: number;
209
+ fps?: number;
210
+ width?: number;
211
+ format?: 'gif' | 'webm';
212
+ } = {}
213
+ ): Promise<void> {
214
+ const {
215
+ duration = 5,
216
+ startTime = 0,
217
+ fps = 10,
218
+ width = 320,
219
+ format = 'gif',
220
+ } = options;
221
+
222
+ return new Promise((resolve, reject) => {
223
+ let command = ffmpeg(inputPath)
224
+ .setStartTime(startTime)
225
+ .setDuration(duration)
226
+ .fps(fps)
227
+ .size(`${width}x?`);
228
+
229
+ if (format === 'gif') {
230
+ command = command.outputOptions([
231
+ '-vf', `fps=${fps},scale=${width}:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`,
232
+ ]);
233
+ }
234
+
235
+ command
236
+ .on('end', () => resolve())
237
+ .on('error', reject)
238
+ .save(outputPath);
239
+ });
240
+ }
241
+
242
+ // Generate video sprite sheet for preview scrubbing
243
+ async function generateSpriteSheet(
244
+ inputPath: string,
245
+ outputPath: string,
246
+ options: {
247
+ cols?: number;
248
+ rows?: number;
249
+ thumbWidth?: number;
250
+ interval?: number; // Seconds between frames
251
+ } = {}
252
+ ): Promise<{ spritePath: string; vttPath: string }> {
253
+ const { cols = 10, rows = 10, thumbWidth = 160, interval = 5 } = options;
254
+ const totalFrames = cols * rows;
255
+
256
+ const metadata = await getVideoMetadata(inputPath);
257
+ const actualInterval = Math.max(interval, metadata.duration / totalFrames);
258
+
259
+ return new Promise((resolve, reject) => {
260
+ ffmpeg(inputPath)
261
+ .outputOptions([
262
+ `-vf`, `fps=1/${actualInterval},scale=${thumbWidth}:-1,tile=${cols}x${rows}`,
263
+ '-frames:v', '1',
264
+ ])
265
+ .on('end', async () => {
266
+ // Generate VTT file for sprite coordinates
267
+ const vttPath = outputPath.replace(/\.\w+$/, '.vtt');
268
+ const vttContent = generateVTT(metadata.duration, cols, rows, thumbWidth, actualInterval);
269
+ await fs.writeFile(vttPath, vttContent);
270
+ resolve({ spritePath: outputPath, vttPath });
271
+ })
272
+ .on('error', reject)
273
+ .save(outputPath);
274
+ });
275
+ }
276
+ ```
277
+
278
+ ### 3. Audio Processing
279
+
280
+ ```typescript
281
+ interface AudioOptions {
282
+ inputPath: string;
283
+ outputPath: string;
284
+ format?: 'mp3' | 'aac' | 'flac' | 'wav' | 'ogg';
285
+ bitrate?: string;
286
+ sampleRate?: number;
287
+ channels?: 1 | 2;
288
+ normalize?: boolean;
289
+ }
290
+
291
+ async function processAudio(options: AudioOptions): Promise<void> {
292
+ const {
293
+ inputPath,
294
+ outputPath,
295
+ format = 'mp3',
296
+ bitrate = '192k',
297
+ sampleRate = 44100,
298
+ channels = 2,
299
+ normalize = false,
300
+ } = options;
301
+
302
+ return new Promise((resolve, reject) => {
303
+ let command = ffmpeg(inputPath)
304
+ .audioCodec(getAudioCodec(format))
305
+ .audioBitrate(bitrate)
306
+ .audioFrequency(sampleRate)
307
+ .audioChannels(channels);
308
+
309
+ if (normalize) {
310
+ command = command.audioFilters('loudnorm=I=-16:TP=-1.5:LRA=11');
311
+ }
312
+
313
+ command
314
+ .on('end', () => resolve())
315
+ .on('error', reject)
316
+ .save(outputPath);
317
+ });
318
+ }
319
+
320
+ function getAudioCodec(format: string): string {
321
+ const codecs: Record<string, string> = {
322
+ mp3: 'libmp3lame',
323
+ aac: 'aac',
324
+ flac: 'flac',
325
+ wav: 'pcm_s16le',
326
+ ogg: 'libvorbis',
327
+ };
328
+ return codecs[format] || 'aac';
329
+ }
330
+
331
+ // Extract audio from video
332
+ async function extractAudio(
333
+ videoPath: string,
334
+ outputPath: string,
335
+ options: Partial<AudioOptions> = {}
336
+ ): Promise<void> {
337
+ return processAudio({
338
+ inputPath: videoPath,
339
+ outputPath,
340
+ ...options,
341
+ });
342
+ }
343
+
344
+ // Merge audio tracks
345
+ async function mergeAudioTracks(
346
+ tracks: string[],
347
+ outputPath: string,
348
+ options: {
349
+ crossfade?: number;
350
+ normalize?: boolean;
351
+ } = {}
352
+ ): Promise<void> {
353
+ const { crossfade = 0, normalize = true } = options;
354
+
355
+ return new Promise((resolve, reject) => {
356
+ let command = ffmpeg();
357
+
358
+ // Add all input files
359
+ tracks.forEach(track => {
360
+ command = command.input(track);
361
+ });
362
+
363
+ // Build filter complex for concatenation
364
+ const filterInputs = tracks.map((_, i) => `[${i}:a]`).join('');
365
+ let filter = `${filterInputs}concat=n=${tracks.length}:v=0:a=1`;
366
+
367
+ if (crossfade > 0) {
368
+ filter = tracks.map((_, i) => `[${i}:a]`).join('') +
369
+ `acrossfade=d=${crossfade}:c1=tri:c2=tri`;
370
+ }
371
+
372
+ if (normalize) {
373
+ filter += ',loudnorm=I=-16:TP=-1.5:LRA=11';
374
+ }
375
+
376
+ filter += '[out]';
377
+
378
+ command
379
+ .complexFilter(filter)
380
+ .outputOptions(['-map', '[out]'])
381
+ .on('end', () => resolve())
382
+ .on('error', reject)
383
+ .save(outputPath);
384
+ });
385
+ }
386
+
387
+ // Generate waveform visualization
388
+ async function generateWaveform(
389
+ audioPath: string,
390
+ outputPath: string,
391
+ options: {
392
+ width?: number;
393
+ height?: number;
394
+ color?: string;
395
+ background?: string;
396
+ } = {}
397
+ ): Promise<void> {
398
+ const {
399
+ width = 1920,
400
+ height = 200,
401
+ color = '0x00FF00',
402
+ background = '0x000000',
403
+ } = options;
404
+
405
+ return new Promise((resolve, reject) => {
406
+ ffmpeg(audioPath)
407
+ .complexFilter([
408
+ `showwavespic=s=${width}x${height}:colors=${color}`,
409
+ `drawbox=c=${background}@0.5:replace=1:t=fill`,
410
+ ])
411
+ .outputOptions(['-frames:v', '1'])
412
+ .on('end', () => resolve())
413
+ .on('error', reject)
414
+ .save(outputPath);
415
+ });
416
+ }
417
+ ```
418
+
419
+ ### 4. HLS/DASH Streaming
420
+
421
+ ```typescript
422
+ interface StreamingOptions {
423
+ inputPath: string;
424
+ outputDir: string;
425
+ qualities: StreamQuality[];
426
+ segmentDuration?: number;
427
+ playlistType?: 'vod' | 'event';
428
+ }
429
+
430
+ interface StreamQuality {
431
+ name: string;
432
+ resolution: string;
433
+ bitrate: string;
434
+ audioBitrate?: string;
435
+ }
436
+
437
+ const DEFAULT_QUALITIES: StreamQuality[] = [
438
+ { name: '1080p', resolution: '1920x1080', bitrate: '5000k', audioBitrate: '192k' },
439
+ { name: '720p', resolution: '1280x720', bitrate: '2500k', audioBitrate: '128k' },
440
+ { name: '480p', resolution: '854x480', bitrate: '1000k', audioBitrate: '96k' },
441
+ { name: '360p', resolution: '640x360', bitrate: '500k', audioBitrate: '64k' },
442
+ ];
443
+
444
+ async function generateHLSStream(options: StreamingOptions): Promise<string> {
445
+ const {
446
+ inputPath,
447
+ outputDir,
448
+ qualities = DEFAULT_QUALITIES,
449
+ segmentDuration = 6,
450
+ playlistType = 'vod',
451
+ } = options;
452
+
453
+ await fs.mkdir(outputDir, { recursive: true });
454
+
455
+ // Generate each quality level
456
+ const variants: string[] = [];
457
+
458
+ for (const quality of qualities) {
459
+ const qualityDir = path.join(outputDir, quality.name);
460
+ await fs.mkdir(qualityDir, { recursive: true });
461
+
462
+ await new Promise<void>((resolve, reject) => {
463
+ ffmpeg(inputPath)
464
+ .videoCodec('libx264')
465
+ .audioCodec('aac')
466
+ .size(quality.resolution)
467
+ .videoBitrate(quality.bitrate)
468
+ .audioBitrate(quality.audioBitrate || '128k')
469
+ .outputOptions([
470
+ '-preset fast',
471
+ '-profile:v main',
472
+ '-level 3.1',
473
+ '-start_number 0',
474
+ `-hls_time ${segmentDuration}`,
475
+ `-hls_playlist_type ${playlistType}`,
476
+ '-hls_segment_filename', path.join(qualityDir, 'segment_%03d.ts'),
477
+ '-f hls',
478
+ ])
479
+ .on('end', () => resolve())
480
+ .on('error', reject)
481
+ .save(path.join(qualityDir, 'playlist.m3u8'));
482
+ });
483
+
484
+ variants.push({
485
+ bandwidth: parseInt(quality.bitrate) * 1000,
486
+ resolution: quality.resolution,
487
+ path: `${quality.name}/playlist.m3u8`,
488
+ });
489
+ }
490
+
491
+ // Generate master playlist
492
+ const masterPlaylist = generateMasterPlaylist(variants);
493
+ const masterPath = path.join(outputDir, 'master.m3u8');
494
+ await fs.writeFile(masterPath, masterPlaylist);
495
+
496
+ return masterPath;
497
+ }
498
+
499
+ function generateMasterPlaylist(variants: Array<{
500
+ bandwidth: number;
501
+ resolution: string;
502
+ path: string;
503
+ }>): string {
504
+ let content = '#EXTM3U\n#EXT-X-VERSION:3\n\n';
505
+
506
+ for (const variant of variants.sort((a, b) => b.bandwidth - a.bandwidth)) {
507
+ content += `#EXT-X-STREAM-INF:BANDWIDTH=${variant.bandwidth},RESOLUTION=${variant.resolution}\n`;
508
+ content += `${variant.path}\n\n`;
509
+ }
510
+
511
+ return content;
512
+ }
513
+
514
+ // Generate DASH manifest
515
+ async function generateDASHStream(
516
+ inputPath: string,
517
+ outputDir: string,
518
+ qualities: StreamQuality[] = DEFAULT_QUALITIES
519
+ ): Promise<string> {
520
+ await fs.mkdir(outputDir, { recursive: true });
521
+
522
+ return new Promise((resolve, reject) => {
523
+ let command = ffmpeg(inputPath);
524
+
525
+ // Add output for each quality
526
+ const maps: string[] = [];
527
+ const adaptationSet: string[] = [];
528
+
529
+ qualities.forEach((quality, index) => {
530
+ command = command
531
+ .output(path.join(outputDir, `stream_${index}.mp4`))
532
+ .videoCodec('libx264')
533
+ .size(quality.resolution)
534
+ .videoBitrate(quality.bitrate);
535
+
536
+ maps.push(`-map 0:v:0 -map 0:a:0`);
537
+ });
538
+
539
+ command
540
+ .outputOptions([
541
+ '-f dash',
542
+ '-init_seg_name', 'init_$RepresentationID$.m4s',
543
+ '-media_seg_name', 'chunk_$RepresentationID$_$Number%05d$.m4s',
544
+ '-use_timeline 1',
545
+ '-use_template 1',
546
+ '-adaptation_sets', 'id=0,streams=v id=1,streams=a',
547
+ ])
548
+ .on('end', () => resolve(path.join(outputDir, 'manifest.mpd')))
549
+ .on('error', reject)
550
+ .save(path.join(outputDir, 'manifest.mpd'));
551
+ });
552
+ }
553
+ ```
554
+
555
+ ### 5. Batch Processing Pipeline
556
+
557
+ ```typescript
558
+ import PQueue from 'p-queue';
559
+
560
+ interface BatchJob {
561
+ id: string;
562
+ inputPath: string;
563
+ outputPath: string;
564
+ operation: 'transcode' | 'thumbnail' | 'extract-audio' | 'hls';
565
+ options: Record<string, any>;
566
+ }
567
+
568
+ interface BatchResult {
569
+ id: string;
570
+ success: boolean;
571
+ outputPath?: string;
572
+ error?: string;
573
+ duration: number;
574
+ }
575
+
576
+ class MediaProcessingPipeline {
577
+ private queue: PQueue;
578
+ private results: Map<string, BatchResult> = new Map();
579
+
580
+ constructor(concurrency: number = 2) {
581
+ this.queue = new PQueue({ concurrency });
582
+ }
583
+
584
+ async processBatch(
585
+ jobs: BatchJob[],
586
+ onProgress?: (completed: number, total: number, current: BatchJob) => void
587
+ ): Promise<BatchResult[]> {
588
+ let completed = 0;
589
+
590
+ const tasks = jobs.map(job =>
591
+ this.queue.add(async () => {
592
+ const startTime = Date.now();
593
+
594
+ try {
595
+ onProgress?.(completed, jobs.length, job);
596
+
597
+ const outputPath = await this.processJob(job);
598
+
599
+ const result: BatchResult = {
600
+ id: job.id,
601
+ success: true,
602
+ outputPath,
603
+ duration: Date.now() - startTime,
604
+ };
605
+
606
+ this.results.set(job.id, result);
607
+ completed++;
608
+ return result;
609
+ } catch (error) {
610
+ const result: BatchResult = {
611
+ id: job.id,
612
+ success: false,
613
+ error: error.message,
614
+ duration: Date.now() - startTime,
615
+ };
616
+
617
+ this.results.set(job.id, result);
618
+ completed++;
619
+ return result;
620
+ }
621
+ })
622
+ );
623
+
624
+ return Promise.all(tasks);
625
+ }
626
+
627
+ private async processJob(job: BatchJob): Promise<string> {
628
+ switch (job.operation) {
629
+ case 'transcode':
630
+ await transcodeVideo({
631
+ inputPath: job.inputPath,
632
+ outputPath: job.outputPath,
633
+ ...job.options,
634
+ });
635
+ return job.outputPath;
636
+
637
+ case 'thumbnail':
638
+ const thumbs = await generateThumbnails({
639
+ inputPath: job.inputPath,
640
+ outputDir: path.dirname(job.outputPath),
641
+ ...job.options,
642
+ });
643
+ return thumbs[0];
644
+
645
+ case 'extract-audio':
646
+ await extractAudio(job.inputPath, job.outputPath, job.options);
647
+ return job.outputPath;
648
+
649
+ case 'hls':
650
+ return generateHLSStream({
651
+ inputPath: job.inputPath,
652
+ outputDir: job.outputPath,
653
+ ...job.options,
654
+ });
655
+
656
+ default:
657
+ throw new Error(`Unknown operation: ${job.operation}`);
658
+ }
659
+ }
660
+
661
+ getResult(jobId: string): BatchResult | undefined {
662
+ return this.results.get(jobId);
663
+ }
664
+
665
+ async waitForCompletion(): Promise<void> {
666
+ await this.queue.onIdle();
667
+ }
668
+ }
669
+
670
+ // Usage example
671
+ async function processUserUploads(uploads: Upload[]): Promise<void> {
672
+ const pipeline = new MediaProcessingPipeline(2);
673
+
674
+ const jobs: BatchJob[] = uploads.map(upload => ({
675
+ id: upload.id,
676
+ inputPath: upload.tempPath,
677
+ outputPath: path.join(MEDIA_DIR, upload.id, 'video.mp4'),
678
+ operation: 'transcode',
679
+ options: {
680
+ resolution: '720p',
681
+ format: 'mp4',
682
+ },
683
+ }));
684
+
685
+ // Add thumbnail jobs
686
+ uploads.forEach(upload => {
687
+ jobs.push({
688
+ id: `${upload.id}-thumb`,
689
+ inputPath: upload.tempPath,
690
+ outputPath: path.join(MEDIA_DIR, upload.id, 'thumbnails'),
691
+ operation: 'thumbnail',
692
+ options: { count: 5 },
693
+ });
694
+ });
695
+
696
+ const results = await pipeline.processBatch(jobs, (completed, total, current) => {
697
+ console.log(`Processing ${completed}/${total}: ${current.id}`);
698
+ });
699
+
700
+ // Update database with results
701
+ for (const result of results) {
702
+ if (result.success) {
703
+ await db.media.update({
704
+ where: { id: result.id.replace('-thumb', '') },
705
+ data: { processedPath: result.outputPath, status: 'ready' },
706
+ });
707
+ } else {
708
+ await db.media.update({
709
+ where: { id: result.id },
710
+ data: { status: 'failed', error: result.error },
711
+ });
712
+ }
713
+ }
714
+ }
715
+ ```
716
+
717
+ ## Use Cases
718
+
719
+ ### 1. Video Upload Processing
720
+
721
+ ```typescript
722
+ // Complete video upload processing workflow
723
+ async function handleVideoUpload(file: Express.Multer.File, userId: string): Promise<Video> {
724
+ const videoId = generateId();
725
+ const baseDir = path.join(MEDIA_DIR, videoId);
726
+
727
+ // Create video record
728
+ const video = await db.video.create({
729
+ data: {
730
+ id: videoId,
731
+ userId,
732
+ originalName: file.originalname,
733
+ status: 'processing',
734
+ },
735
+ });
736
+
737
+ // Process asynchronously
738
+ processVideoAsync(videoId, file.path, baseDir);
739
+
740
+ return video;
741
+ }
742
+
743
+ async function processVideoAsync(videoId: string, inputPath: string, outputDir: string): Promise<void> {
744
+ try {
745
+ await fs.mkdir(outputDir, { recursive: true });
746
+
747
+ // Get metadata
748
+ const metadata = await getVideoMetadata(inputPath);
749
+
750
+ // Generate thumbnails
751
+ const thumbnails = await generateThumbnails({
752
+ inputPath,
753
+ outputDir: path.join(outputDir, 'thumbnails'),
754
+ count: 10,
755
+ });
756
+
757
+ // Transcode to web format
758
+ await transcodeVideo({
759
+ inputPath,
760
+ outputPath: path.join(outputDir, 'video.mp4'),
761
+ format: 'mp4',
762
+ codec: 'h264',
763
+ resolution: '720p',
764
+ });
765
+
766
+ // Generate HLS for streaming
767
+ await generateHLSStream({
768
+ inputPath,
769
+ outputDir: path.join(outputDir, 'hls'),
770
+ qualities: [
771
+ { name: '720p', resolution: '1280x720', bitrate: '2500k' },
772
+ { name: '480p', resolution: '854x480', bitrate: '1000k' },
773
+ ],
774
+ });
775
+
776
+ // Update database
777
+ await db.video.update({
778
+ where: { id: videoId },
779
+ data: {
780
+ status: 'ready',
781
+ duration: metadata.duration,
782
+ width: metadata.width,
783
+ height: metadata.height,
784
+ thumbnailUrl: `/media/${videoId}/thumbnails/thumb_1.jpg`,
785
+ streamUrl: `/media/${videoId}/hls/master.m3u8`,
786
+ },
787
+ });
788
+
789
+ // Cleanup original
790
+ await fs.unlink(inputPath);
791
+ } catch (error) {
792
+ await db.video.update({
793
+ where: { id: videoId },
794
+ data: { status: 'failed', error: error.message },
795
+ });
796
+ }
797
+ }
798
+ ```
799
+
800
+ ## Best Practices
801
+
802
+ ### Do's
803
+
804
+ - **Use hardware acceleration** - Enable NVENC/VAAPI when available
805
+ - **Implement progress tracking** - Monitor long-running operations
806
+ - **Handle large files with streaming** - Don't load entire files in memory
807
+ - **Set reasonable timeouts** - Prevent hung processes
808
+ - **Validate input formats** - Check before processing
809
+ - **Clean up temporary files** - Prevent disk exhaustion
810
+
811
+ ### Don'ts
812
+
813
+ - Don't process untrusted files without validation
814
+ - Don't use synchronous operations for large files
815
+ - Don't ignore ffmpeg exit codes
816
+ - Don't skip error handling
817
+ - Don't process without concurrency limits
818
+ - Don't forget to set output format explicitly
819
+
820
+ ## Related Skills
821
+
822
+ - **document-processing** - Similar processing patterns
823
+ - **image-processing** - Companion skill for images
824
+ - **backend-development** - Integration patterns
825
+
826
+ ## Reference Resources
827
+
828
+ - [FFmpeg Documentation](https://ffmpeg.org/documentation.html)
829
+ - [fluent-ffmpeg](https://github.com/fluent-ffmpeg/node-fluent-ffmpeg)
830
+ - [HLS Specification](https://datatracker.ietf.org/doc/html/rfc8216)
831
+ - [DASH Specification](https://dashif.org/docs/DASH-IF-IOP-v4.3.pdf)