cognitive-modules-cli 1.4.0 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,8 @@
1
1
  /**
2
2
  * Module Runner - Execute Cognitive Modules
3
- * v2.5: Streaming response and multimodal support
3
+ * v2.2: Envelope format with meta/data separation, risk_rule, repair pass
4
4
  */
5
- import type { Provider, CognitiveModule, ModuleResult, ModuleInput, StreamingChunk, MediaInput, ModalityType, RuntimeCapabilities } from '../types.js';
5
+ import type { Provider, CognitiveModule, ModuleResult, ModuleInput } from '../types.js';
6
6
  export interface RunOptions {
7
7
  input?: ModuleInput;
8
8
  args?: string;
@@ -12,45 +12,3 @@ export interface RunOptions {
12
12
  enableRepair?: boolean;
13
13
  }
14
14
  export declare function runModule(module: CognitiveModule, provider: Provider, options?: RunOptions): Promise<ModuleResult>;
15
- export interface StreamRunOptions extends RunOptions {
16
- /** Callback for each chunk */
17
- onChunk?: (chunk: StreamingChunk) => void;
18
- /** Callback for progress updates */
19
- onProgress?: (percent: number, message?: string) => void;
20
- /** Heartbeat interval in milliseconds (default: 15000) */
21
- heartbeatInterval?: number;
22
- /** Maximum stream duration in milliseconds (default: 300000) */
23
- maxDuration?: number;
24
- }
25
- /**
26
- * Run module with streaming response
27
- *
28
- * @param module - The cognitive module to execute
29
- * @param provider - The LLM provider
30
- * @param options - Run options including streaming callbacks
31
- * @yields Streaming chunks
32
- */
33
- export declare function runModuleStream(module: CognitiveModule, provider: Provider, options?: StreamRunOptions): AsyncGenerator<StreamingChunk, ModuleResult | undefined, unknown>;
34
- /**
35
- * Load media file as base64
36
- */
37
- export declare function loadMediaAsBase64(path: string): Promise<{
38
- data: string;
39
- media_type: string;
40
- } | null>;
41
- /**
42
- * Validate media input against module constraints
43
- */
44
- export declare function validateMediaInput(media: MediaInput, module: CognitiveModule, maxSizeMb?: number): {
45
- valid: boolean;
46
- error?: string;
47
- code?: string;
48
- };
49
- /**
50
- * Get runtime capabilities
51
- */
52
- export declare function getRuntimeCapabilities(): RuntimeCapabilities;
53
- /**
54
- * Check if runtime supports a specific modality
55
- */
56
- export declare function runtimeSupportsModality(modality: ModalityType, direction?: 'input' | 'output'): boolean;
@@ -1,12 +1,8 @@
1
1
  /**
2
2
  * Module Runner - Execute Cognitive Modules
3
- * v2.5: Streaming response and multimodal support
3
+ * v2.2: Envelope format with meta/data separation, risk_rule, repair pass
4
4
  */
5
- import { aggregateRisk, isV22Envelope, isProviderV25, isModuleV25, moduleSupportsStreaming, moduleSupportsMultimodal, getModuleInputModalities, ErrorCodesV25, DEFAULT_RUNTIME_CAPABILITIES } from '../types.js';
6
- import { randomUUID } from 'crypto';
7
- import { readFile } from 'fs/promises';
8
- import { existsSync } from 'fs';
9
- import { extname } from 'path';
5
+ import { aggregateRisk, isV22Envelope } from '../types.js';
10
6
  // =============================================================================
11
7
  // Repair Pass (v2.2)
12
8
  // =============================================================================
@@ -409,601 +405,3 @@ function looksLikeCode(str) {
409
405
  ];
410
406
  return codeIndicators.some(re => re.test(str));
411
407
  }
412
- /**
413
- * Create a new streaming session
414
- */
415
- function createStreamingSession(moduleName) {
416
- return {
417
- session_id: `sess_${randomUUID().slice(0, 12)}`,
418
- module_name: moduleName,
419
- started_at: Date.now(),
420
- chunks_sent: 0,
421
- accumulated_data: {},
422
- accumulated_text: {}
423
- };
424
- }
425
- /**
426
- * Create meta chunk (initial streaming response)
427
- */
428
- function createMetaChunk(session, meta) {
429
- return {
430
- ok: true,
431
- streaming: true,
432
- session_id: session.session_id,
433
- meta
434
- };
435
- }
436
- /**
437
- * Create delta chunk (incremental content)
438
- * Note: Delta chunks don't include session_id per v2.5 spec
439
- */
440
- function createDeltaChunk(session, field, delta) {
441
- session.chunks_sent++;
442
- return {
443
- chunk: {
444
- seq: session.chunks_sent,
445
- type: 'delta',
446
- field,
447
- delta
448
- }
449
- };
450
- }
451
- /**
452
- * Create progress chunk
453
- * Note: Progress chunks don't include session_id per v2.5 spec
454
- */
455
- function createProgressChunk(_session, percent, stage, message) {
456
- return {
457
- progress: {
458
- percent,
459
- stage,
460
- message
461
- }
462
- };
463
- }
464
- /**
465
- * Create final chunk (completion signal)
466
- * Note: Final chunks don't include session_id per v2.5 spec
467
- */
468
- function createFinalChunk(_session, meta, data, usage) {
469
- return {
470
- final: true,
471
- meta,
472
- data,
473
- usage
474
- };
475
- }
476
- /**
477
- * Create error chunk
478
- */
479
- function createErrorChunk(session, code, message, recoverable = false, partialData) {
480
- return {
481
- ok: false,
482
- streaming: true,
483
- session_id: session.session_id,
484
- error: {
485
- code,
486
- message,
487
- recoverable
488
- },
489
- partial_data: partialData
490
- };
491
- }
492
- /**
493
- * Run module with streaming response
494
- *
495
- * @param module - The cognitive module to execute
496
- * @param provider - The LLM provider
497
- * @param options - Run options including streaming callbacks
498
- * @yields Streaming chunks
499
- */
500
- export async function* runModuleStream(module, provider, options = {}) {
501
- const { onChunk, onProgress, heartbeatInterval = 15000, maxDuration = 300000, ...runOptions } = options;
502
- // Create streaming session
503
- const session = createStreamingSession(module.name);
504
- const startTime = Date.now();
505
- // Check if module supports streaming
506
- if (!moduleSupportsStreaming(module)) {
507
- // Fallback to sync execution
508
- const result = await runModule(module, provider, runOptions);
509
- // Emit as single final chunk
510
- if (result.ok && 'meta' in result) {
511
- const finalChunk = createFinalChunk(session, result.meta, result.data);
512
- yield finalChunk;
513
- onChunk?.(finalChunk);
514
- return result;
515
- }
516
- return result;
517
- }
518
- // Check if provider supports streaming
519
- if (!isProviderV25(provider) || !provider.supportsStreaming?.()) {
520
- // Fallback to sync with warning
521
- console.warn('[cognitive] Provider does not support streaming, falling back to sync');
522
- const result = await runModule(module, provider, runOptions);
523
- if (result.ok && 'meta' in result) {
524
- const finalChunk = createFinalChunk(session, result.meta, result.data);
525
- yield finalChunk;
526
- onChunk?.(finalChunk);
527
- }
528
- return result;
529
- }
530
- // Emit initial meta chunk
531
- const metaChunk = createMetaChunk(session, {
532
- confidence: undefined,
533
- risk: 'low',
534
- explain: 'Processing...'
535
- });
536
- yield metaChunk;
537
- onChunk?.(metaChunk);
538
- // Build prompt and messages (same as sync)
539
- const { input, args, verbose = false, useEnvelope, useV22 } = runOptions;
540
- const shouldUseEnvelope = useEnvelope ?? (module.output?.envelope === true || module.format === 'v2');
541
- const isV22Module = module.tier !== undefined || module.formatVersion === 'v2.2';
542
- const shouldUseV22 = useV22 ?? (isV22Module || module.compat?.runtime_auto_wrap === true);
543
- const riskRule = module.metaConfig?.risk_rule ?? 'max_changes_risk';
544
- const inputData = input || {};
545
- if (args && !inputData.code && !inputData.query) {
546
- if (looksLikeCode(args)) {
547
- inputData.code = args;
548
- }
549
- else {
550
- inputData.query = args;
551
- }
552
- }
553
- // Extract media from input
554
- const mediaInputs = extractMediaInputs(inputData);
555
- // Build prompt with media placeholders
556
- const prompt = buildPromptWithMedia(module, inputData, mediaInputs);
557
- // Build system message
558
- const systemParts = buildSystemMessage(module, shouldUseEnvelope, shouldUseV22);
559
- const messages = [
560
- { role: 'system', content: systemParts.join('\n') },
561
- { role: 'user', content: prompt },
562
- ];
563
- try {
564
- // Start streaming invocation
565
- const streamResult = await provider.invokeStream({
566
- messages,
567
- jsonSchema: module.outputSchema,
568
- temperature: 0.3,
569
- stream: true,
570
- images: mediaInputs.images,
571
- audio: mediaInputs.audio,
572
- video: mediaInputs.video
573
- });
574
- let accumulatedContent = '';
575
- let lastProgressTime = Date.now();
576
- // Process stream
577
- for await (const chunk of streamResult.stream) {
578
- // Check timeout
579
- if (Date.now() - startTime > maxDuration) {
580
- const errorChunk = createErrorChunk(session, ErrorCodesV25.STREAM_TIMEOUT, `Stream exceeded max duration of ${maxDuration}ms`, false, { partial_content: accumulatedContent });
581
- yield errorChunk;
582
- onChunk?.(errorChunk);
583
- return undefined;
584
- }
585
- // Accumulate content
586
- accumulatedContent += chunk;
587
- // Emit delta chunk
588
- const deltaChunk = createDeltaChunk(session, 'data.rationale', chunk);
589
- yield deltaChunk;
590
- onChunk?.(deltaChunk);
591
- // Emit progress periodically
592
- const now = Date.now();
593
- if (now - lastProgressTime > 1000) {
594
- const elapsed = now - startTime;
595
- const estimatedPercent = Math.min(90, Math.floor(elapsed / maxDuration * 100));
596
- const progressChunk = createProgressChunk(session, estimatedPercent, 'generating', 'Generating response...');
597
- yield progressChunk;
598
- onProgress?.(estimatedPercent, 'Generating response...');
599
- lastProgressTime = now;
600
- }
601
- }
602
- // Parse accumulated response
603
- let parsed;
604
- try {
605
- const jsonMatch = accumulatedContent.match(/```(?:json)?\s*([\s\S]*?)\s*```/);
606
- const jsonStr = jsonMatch ? jsonMatch[1] : accumulatedContent;
607
- parsed = JSON.parse(jsonStr.trim());
608
- }
609
- catch {
610
- // Try to extract partial JSON
611
- const errorChunk = createErrorChunk(session, 'E3001', `Failed to parse JSON response`, false, { raw: accumulatedContent });
612
- yield errorChunk;
613
- onChunk?.(errorChunk);
614
- return undefined;
615
- }
616
- // Process parsed response
617
- let result;
618
- if (shouldUseEnvelope && typeof parsed === 'object' && parsed !== null && 'ok' in parsed) {
619
- const response = parseEnvelopeResponseLocal(parsed, accumulatedContent);
620
- if (shouldUseV22 && response.ok && !('meta' in response && response.meta)) {
621
- const upgraded = wrapV21ToV22Local(parsed, riskRule);
622
- result = {
623
- ok: true,
624
- meta: upgraded.meta,
625
- data: upgraded.data,
626
- raw: accumulatedContent
627
- };
628
- }
629
- else {
630
- result = response;
631
- }
632
- }
633
- else {
634
- result = parseLegacyResponseLocal(parsed, accumulatedContent);
635
- if (shouldUseV22 && result.ok) {
636
- const data = (result.data ?? {});
637
- result = {
638
- ok: true,
639
- meta: {
640
- confidence: data.confidence ?? 0.5,
641
- risk: aggregateRisk(data, riskRule),
642
- explain: (data.rationale ?? '').slice(0, 280) || 'No explanation provided'
643
- },
644
- data: result.data,
645
- raw: accumulatedContent
646
- };
647
- }
648
- }
649
- // Emit final chunk
650
- if (result.ok && 'meta' in result) {
651
- const finalChunk = createFinalChunk(session, result.meta, result.data, streamResult.usage ? {
652
- input_tokens: streamResult.usage.promptTokens,
653
- output_tokens: streamResult.usage.completionTokens,
654
- total_tokens: streamResult.usage.totalTokens
655
- } : undefined);
656
- yield finalChunk;
657
- onChunk?.(finalChunk);
658
- onProgress?.(100, 'Complete');
659
- }
660
- return result;
661
- }
662
- catch (error) {
663
- const errorChunk = createErrorChunk(session, ErrorCodesV25.STREAM_INTERRUPTED, error instanceof Error ? error.message : 'Stream interrupted', true);
664
- yield errorChunk;
665
- onChunk?.(errorChunk);
666
- return undefined;
667
- }
668
- }
669
- // Local versions of helper functions to avoid circular issues
670
- function parseEnvelopeResponseLocal(response, raw) {
671
- if (isV22Envelope(response)) {
672
- if (response.ok) {
673
- return {
674
- ok: true,
675
- meta: response.meta,
676
- data: response.data,
677
- raw,
678
- };
679
- }
680
- else {
681
- return {
682
- ok: false,
683
- meta: response.meta,
684
- error: response.error,
685
- partial_data: response.partial_data,
686
- raw,
687
- };
688
- }
689
- }
690
- if (response.ok) {
691
- const data = (response.data ?? {});
692
- return {
693
- ok: true,
694
- data: {
695
- ...data,
696
- confidence: typeof data.confidence === 'number' ? data.confidence : 0.5,
697
- rationale: typeof data.rationale === 'string' ? data.rationale : '',
698
- behavior_equivalence: data.behavior_equivalence,
699
- },
700
- raw,
701
- };
702
- }
703
- else {
704
- return {
705
- ok: false,
706
- error: response.error,
707
- partial_data: response.partial_data,
708
- raw,
709
- };
710
- }
711
- }
712
- function wrapV21ToV22Local(response, riskRule = 'max_changes_risk') {
713
- if (isV22Envelope(response)) {
714
- return response;
715
- }
716
- if (response.ok) {
717
- const data = (response.data ?? {});
718
- const confidence = data.confidence ?? 0.5;
719
- const rationale = data.rationale ?? '';
720
- return {
721
- ok: true,
722
- meta: {
723
- confidence,
724
- risk: aggregateRisk(data, riskRule),
725
- explain: rationale.slice(0, 280) || 'No explanation provided'
726
- },
727
- data: data
728
- };
729
- }
730
- else {
731
- const errorMsg = response.error?.message ?? 'Unknown error';
732
- return {
733
- ok: false,
734
- meta: {
735
- confidence: 0,
736
- risk: 'high',
737
- explain: errorMsg.slice(0, 280)
738
- },
739
- error: response.error ?? { code: 'UNKNOWN', message: errorMsg },
740
- partial_data: response.partial_data
741
- };
742
- }
743
- }
744
- function parseLegacyResponseLocal(output, raw) {
745
- const outputObj = output;
746
- const confidence = typeof outputObj.confidence === 'number' ? outputObj.confidence : 0.5;
747
- const rationale = typeof outputObj.rationale === 'string' ? outputObj.rationale : '';
748
- const behaviorEquivalence = typeof outputObj.behavior_equivalence === 'boolean'
749
- ? outputObj.behavior_equivalence
750
- : undefined;
751
- if (outputObj.error && typeof outputObj.error === 'object') {
752
- const errorObj = outputObj.error;
753
- if (typeof errorObj.code === 'string') {
754
- return {
755
- ok: false,
756
- error: {
757
- code: errorObj.code,
758
- message: typeof errorObj.message === 'string' ? errorObj.message : 'Unknown error',
759
- },
760
- raw,
761
- };
762
- }
763
- }
764
- return {
765
- ok: true,
766
- data: {
767
- ...outputObj,
768
- confidence,
769
- rationale,
770
- behavior_equivalence: behaviorEquivalence,
771
- },
772
- raw,
773
- };
774
- }
775
- /**
776
- * Extract media inputs from module input data
777
- */
778
- function extractMediaInputs(input) {
779
- const images = [];
780
- const audio = [];
781
- const video = [];
782
- // Check for images array
783
- if (Array.isArray(input.images)) {
784
- for (const img of input.images) {
785
- if (isValidMediaInput(img)) {
786
- images.push(img);
787
- }
788
- }
789
- }
790
- // Check for audio array
791
- if (Array.isArray(input.audio)) {
792
- for (const aud of input.audio) {
793
- if (isValidMediaInput(aud)) {
794
- audio.push(aud);
795
- }
796
- }
797
- }
798
- // Check for video array
799
- if (Array.isArray(input.video)) {
800
- for (const vid of input.video) {
801
- if (isValidMediaInput(vid)) {
802
- video.push(vid);
803
- }
804
- }
805
- }
806
- return { images, audio, video };
807
- }
808
- /**
809
- * Validate media input structure
810
- */
811
- function isValidMediaInput(input) {
812
- if (typeof input !== 'object' || input === null)
813
- return false;
814
- const obj = input;
815
- if (obj.type === 'url' && typeof obj.url === 'string')
816
- return true;
817
- if (obj.type === 'base64' && typeof obj.data === 'string' && typeof obj.media_type === 'string')
818
- return true;
819
- if (obj.type === 'file' && typeof obj.path === 'string')
820
- return true;
821
- return false;
822
- }
823
- /**
824
- * Build prompt with media placeholders
825
- */
826
- function buildPromptWithMedia(module, input, media) {
827
- let prompt = buildPrompt(module, input);
828
- // Replace $MEDIA_INPUTS placeholder
829
- if (prompt.includes('$MEDIA_INPUTS')) {
830
- const mediaSummary = buildMediaSummary(media);
831
- prompt = prompt.replace(/\$MEDIA_INPUTS/g, mediaSummary);
832
- }
833
- return prompt;
834
- }
835
- /**
836
- * Build summary of media inputs for prompt
837
- */
838
- function buildMediaSummary(media) {
839
- const parts = [];
840
- if (media.images.length > 0) {
841
- parts.push(`[${media.images.length} image(s) attached]`);
842
- }
843
- if (media.audio.length > 0) {
844
- parts.push(`[${media.audio.length} audio file(s) attached]`);
845
- }
846
- if (media.video.length > 0) {
847
- parts.push(`[${media.video.length} video file(s) attached]`);
848
- }
849
- return parts.length > 0 ? parts.join('\n') : '[No media attached]';
850
- }
851
- /**
852
- * Build system message for module execution
853
- */
854
- function buildSystemMessage(module, shouldUseEnvelope, shouldUseV22) {
855
- const systemParts = [
856
- `You are executing the "${module.name}" Cognitive Module.`,
857
- '',
858
- `RESPONSIBILITY: ${module.responsibility}`,
859
- ];
860
- if (module.excludes.length > 0) {
861
- systemParts.push('', 'YOU MUST NOT:');
862
- module.excludes.forEach(e => systemParts.push(`- ${e}`));
863
- }
864
- if (module.constraints) {
865
- systemParts.push('', 'CONSTRAINTS:');
866
- if (module.constraints.no_network)
867
- systemParts.push('- No network access');
868
- if (module.constraints.no_side_effects)
869
- systemParts.push('- No side effects');
870
- if (module.constraints.no_file_write)
871
- systemParts.push('- No file writes');
872
- if (module.constraints.no_inventing_data)
873
- systemParts.push('- Do not invent data');
874
- }
875
- if (module.output?.require_behavior_equivalence) {
876
- systemParts.push('', 'BEHAVIOR EQUIVALENCE:');
877
- systemParts.push('- You MUST set behavior_equivalence=true ONLY if the output is functionally identical');
878
- systemParts.push('- If unsure, set behavior_equivalence=false and explain in rationale');
879
- const maxConfidence = module.constraints?.behavior_equivalence_false_max_confidence ?? 0.7;
880
- systemParts.push(`- If behavior_equivalence=false, confidence MUST be <= ${maxConfidence}`);
881
- }
882
- // Add multimodal instructions if module supports it
883
- if (isModuleV25(module) && moduleSupportsMultimodal(module)) {
884
- const inputModalities = getModuleInputModalities(module);
885
- systemParts.push('', 'MULTIMODAL INPUT:');
886
- systemParts.push(`- This module accepts: ${inputModalities.join(', ')}`);
887
- systemParts.push('- Analyze any attached media carefully');
888
- systemParts.push('- Reference specific elements from the media in your analysis');
889
- }
890
- // Add envelope format instructions
891
- if (shouldUseEnvelope) {
892
- if (shouldUseV22) {
893
- systemParts.push('', 'RESPONSE FORMAT (Envelope v2.2):');
894
- systemParts.push('- Wrap your response in the v2.2 envelope format with separate meta and data');
895
- systemParts.push('- Success: { "ok": true, "meta": { "confidence": 0.9, "risk": "low", "explain": "short summary" }, "data": { ...payload... } }');
896
- systemParts.push('- Error: { "ok": false, "meta": { "confidence": 0.0, "risk": "high", "explain": "error summary" }, "error": { "code": "ERROR_CODE", "message": "..." } }');
897
- systemParts.push('- meta.explain must be ≤280 characters. data.rationale can be longer for detailed reasoning.');
898
- systemParts.push('- meta.risk must be one of: "none", "low", "medium", "high"');
899
- }
900
- else {
901
- systemParts.push('', 'RESPONSE FORMAT (Envelope):');
902
- systemParts.push('- Wrap your response in the envelope format');
903
- systemParts.push('- Success: { "ok": true, "data": { ...your output... } }');
904
- systemParts.push('- Error: { "ok": false, "error": { "code": "ERROR_CODE", "message": "..." } }');
905
- systemParts.push('- Include "confidence" (0-1) and "rationale" in data');
906
- }
907
- if (module.output?.require_behavior_equivalence) {
908
- systemParts.push('- Include "behavior_equivalence" (boolean) in data');
909
- }
910
- }
911
- else {
912
- systemParts.push('', 'OUTPUT FORMAT:');
913
- systemParts.push('- Respond with ONLY valid JSON');
914
- systemParts.push('- Include "confidence" (0-1) and "rationale" fields');
915
- if (module.output?.require_behavior_equivalence) {
916
- systemParts.push('- Include "behavior_equivalence" (boolean) field');
917
- }
918
- }
919
- return systemParts;
920
- }
921
- /**
922
- * Load media file as base64
923
- */
924
- export async function loadMediaAsBase64(path) {
925
- try {
926
- if (!existsSync(path)) {
927
- return null;
928
- }
929
- const buffer = await readFile(path);
930
- const data = buffer.toString('base64');
931
- const media_type = getMediaTypeFromExtension(extname(path));
932
- return { data, media_type };
933
- }
934
- catch {
935
- return null;
936
- }
937
- }
938
- /**
939
- * Get MIME type from file extension
940
- */
941
- function getMediaTypeFromExtension(ext) {
942
- const mimeTypes = {
943
- '.jpg': 'image/jpeg',
944
- '.jpeg': 'image/jpeg',
945
- '.png': 'image/png',
946
- '.gif': 'image/gif',
947
- '.webp': 'image/webp',
948
- '.mp3': 'audio/mpeg',
949
- '.wav': 'audio/wav',
950
- '.ogg': 'audio/ogg',
951
- '.webm': 'audio/webm',
952
- '.mp4': 'video/mp4',
953
- '.mov': 'video/quicktime',
954
- '.pdf': 'application/pdf'
955
- };
956
- return mimeTypes[ext.toLowerCase()] ?? 'application/octet-stream';
957
- }
958
- /**
959
- * Validate media input against module constraints
960
- */
961
- export function validateMediaInput(media, module, maxSizeMb = 20) {
962
- // Check if module supports multimodal
963
- if (!moduleSupportsMultimodal(module)) {
964
- return {
965
- valid: false,
966
- error: 'Module does not support multimodal input',
967
- code: ErrorCodesV25.MULTIMODAL_NOT_SUPPORTED
968
- };
969
- }
970
- // Validate media type
971
- if (media.type === 'base64') {
972
- const mediaType = media.media_type;
973
- if (!isValidMediaType(mediaType)) {
974
- return {
975
- valid: false,
976
- error: `Unsupported media type: ${mediaType}`,
977
- code: ErrorCodesV25.UNSUPPORTED_MEDIA_TYPE
978
- };
979
- }
980
- }
981
- // Size validation would require fetching/checking actual data
982
- // This is a placeholder for the check
983
- return { valid: true };
984
- }
985
- /**
986
- * Check if media type is supported
987
- */
988
- function isValidMediaType(mediaType) {
989
- const supported = [
990
- 'image/jpeg', 'image/png', 'image/webp', 'image/gif',
991
- 'audio/mpeg', 'audio/wav', 'audio/ogg', 'audio/webm',
992
- 'video/mp4', 'video/webm', 'video/quicktime',
993
- 'application/pdf'
994
- ];
995
- return supported.includes(mediaType);
996
- }
997
- /**
998
- * Get runtime capabilities
999
- */
1000
- export function getRuntimeCapabilities() {
1001
- return { ...DEFAULT_RUNTIME_CAPABILITIES };
1002
- }
1003
- /**
1004
- * Check if runtime supports a specific modality
1005
- */
1006
- export function runtimeSupportsModality(modality, direction = 'input') {
1007
- const caps = getRuntimeCapabilities();
1008
- return caps.multimodal[direction].includes(modality);
1009
- }