@llumiverse/common 0.23.0 → 0.24.0-dev.20260203.164053Z
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/capability/openai.ts +25 -8
- package/src/capability.ts +19 -2
- package/src/types.ts +52 -2
- package/lib/cjs/capability/azure_foundry.js +0 -160
- package/lib/cjs/capability/azure_foundry.js.map +0 -1
- package/lib/cjs/capability/bedrock.js +0 -158
- package/lib/cjs/capability/bedrock.js.map +0 -1
- package/lib/cjs/capability/openai.js +0 -82
- package/lib/cjs/capability/openai.js.map +0 -1
- package/lib/cjs/capability/vertexai.js +0 -80
- package/lib/cjs/capability/vertexai.js.map +0 -1
- package/lib/cjs/capability.js +0 -64
- package/lib/cjs/capability.js.map +0 -1
- package/lib/cjs/index.js +0 -25
- package/lib/cjs/index.js.map +0 -1
- package/lib/cjs/options/azure_foundry.js +0 -428
- package/lib/cjs/options/azure_foundry.js.map +0 -1
- package/lib/cjs/options/bedrock.js +0 -462
- package/lib/cjs/options/bedrock.js.map +0 -1
- package/lib/cjs/options/fallback.js +0 -35
- package/lib/cjs/options/fallback.js.map +0 -1
- package/lib/cjs/options/groq.js +0 -37
- package/lib/cjs/options/groq.js.map +0 -1
- package/lib/cjs/options/openai.js +0 -126
- package/lib/cjs/options/openai.js.map +0 -1
- package/lib/cjs/options/vertexai.js +0 -503
- package/lib/cjs/options/vertexai.js.map +0 -1
- package/lib/cjs/options.js +0 -30
- package/lib/cjs/options.js.map +0 -1
- package/lib/cjs/package.json +0 -3
- package/lib/cjs/types.js +0 -178
- package/lib/cjs/types.js.map +0 -1
- package/lib/esm/capability/azure_foundry.js +0 -157
- package/lib/esm/capability/azure_foundry.js.map +0 -1
- package/lib/esm/capability/bedrock.js +0 -155
- package/lib/esm/capability/bedrock.js.map +0 -1
- package/lib/esm/capability/openai.js +0 -79
- package/lib/esm/capability/openai.js.map +0 -1
- package/lib/esm/capability/vertexai.js +0 -77
- package/lib/esm/capability/vertexai.js.map +0 -1
- package/lib/esm/capability.js +0 -59
- package/lib/esm/capability.js.map +0 -1
- package/lib/esm/index.js +0 -9
- package/lib/esm/index.js.map +0 -1
- package/lib/esm/options/azure_foundry.js +0 -424
- package/lib/esm/options/azure_foundry.js.map +0 -1
- package/lib/esm/options/bedrock.js +0 -458
- package/lib/esm/options/bedrock.js.map +0 -1
- package/lib/esm/options/fallback.js +0 -32
- package/lib/esm/options/fallback.js.map +0 -1
- package/lib/esm/options/groq.js +0 -34
- package/lib/esm/options/groq.js.map +0 -1
- package/lib/esm/options/openai.js +0 -123
- package/lib/esm/options/openai.js.map +0 -1
- package/lib/esm/options/vertexai.js +0 -498
- package/lib/esm/options/vertexai.js.map +0 -1
- package/lib/esm/options.js +0 -27
- package/lib/esm/options.js.map +0 -1
- package/lib/esm/types.js +0 -175
- package/lib/esm/types.js.map +0 -1
- package/lib/types/capability/azure_foundry.d.ts +0 -7
- package/lib/types/capability/azure_foundry.d.ts.map +0 -1
- package/lib/types/capability/bedrock.d.ts +0 -7
- package/lib/types/capability/bedrock.d.ts.map +0 -1
- package/lib/types/capability/openai.d.ts +0 -11
- package/lib/types/capability/openai.d.ts.map +0 -1
- package/lib/types/capability/vertexai.d.ts +0 -11
- package/lib/types/capability/vertexai.d.ts.map +0 -1
- package/lib/types/capability.d.ts +0 -5
- package/lib/types/capability.d.ts.map +0 -1
- package/lib/types/index.d.ts +0 -9
- package/lib/types/index.d.ts.map +0 -1
- package/lib/types/options/azure_foundry.d.ts +0 -52
- package/lib/types/options/azure_foundry.d.ts.map +0 -1
- package/lib/types/options/bedrock.d.ts +0 -52
- package/lib/types/options/bedrock.d.ts.map +0 -1
- package/lib/types/options/fallback.d.ts +0 -13
- package/lib/types/options/fallback.d.ts.map +0 -1
- package/lib/types/options/groq.d.ts +0 -12
- package/lib/types/options/groq.d.ts.map +0 -1
- package/lib/types/options/openai.d.ts +0 -21
- package/lib/types/options/openai.d.ts.map +0 -1
- package/lib/types/options/vertexai.d.ts +0 -74
- package/lib/types/options/vertexai.d.ts.map +0 -1
- package/lib/types/options.d.ts +0 -3
- package/lib/types/options.d.ts.map +0 -1
- package/lib/types/types.d.ts +0 -408
- package/lib/types/types.d.ts.map +0 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@llumiverse/common",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.24.0-dev.20260203.164053Z",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Public types, enums and options used by Llumiverse API.",
|
|
6
6
|
"files": [
|
|
@@ -66,7 +66,7 @@
|
|
|
66
66
|
"rimraf": "^6.1.2",
|
|
67
67
|
"ts-dual-module": "^0.6.3",
|
|
68
68
|
"typescript": "^5.9.3",
|
|
69
|
-
"vitest": "^
|
|
69
|
+
"vitest": "^4.0.16",
|
|
70
70
|
"@types/node": "^22.19.1"
|
|
71
71
|
},
|
|
72
72
|
"dependencies": {},
|
package/src/capability/openai.ts
CHANGED
|
@@ -1,15 +1,19 @@
|
|
|
1
1
|
import { ModelModalities } from "../types.js";
|
|
2
2
|
|
|
3
|
+
// OpenAI model capability type - tool_support_streaming defaults to tool_support for OpenAI models
|
|
4
|
+
type OpenAIModelCapability = { input: ModelModalities; output: ModelModalities; tool_support?: boolean; tool_support_streaming?: boolean };
|
|
5
|
+
|
|
3
6
|
// Record of OpenAI model capabilities keyed by model ID (lowercased)
|
|
4
|
-
const RECORD_MODEL_CAPABILITIES: Record<string,
|
|
5
|
-
"chatgpt-4o-latest": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support:
|
|
7
|
+
const RECORD_MODEL_CAPABILITIES: Record<string, OpenAIModelCapability> = {
|
|
8
|
+
"chatgpt-4o-latest": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true }
|
|
6
9
|
};
|
|
7
10
|
|
|
8
11
|
// Populate RECORD_FAMILY_CAPABILITIES as a const record (lowest common denominator for each family)
|
|
9
|
-
|
|
12
|
+
// For OpenAI, tool_support_streaming matches tool_support since the API supports tools while streaming
|
|
13
|
+
const RECORD_FAMILY_CAPABILITIES: Record<string, OpenAIModelCapability> = {
|
|
10
14
|
"gpt": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true },
|
|
11
15
|
"gpt-3.5": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false },
|
|
12
|
-
"gpt-4": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support:
|
|
16
|
+
"gpt-4": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true },
|
|
13
17
|
"gpt-4-turbo": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true },
|
|
14
18
|
"gpt-4o": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true },
|
|
15
19
|
"gpt-4.1": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true },
|
|
@@ -49,11 +53,18 @@ function normalizeOpenAIModelName(modelName: string): string {
|
|
|
49
53
|
/**
|
|
50
54
|
* Get the full ModelCapabilities for an OpenAI model.
|
|
51
55
|
* Checks RECORD_MODEL_CAPABILITIES first, then falls back to pattern-based inference.
|
|
56
|
+
* For OpenAI models, tool_support_streaming defaults to tool_support since the API supports tools while streaming.
|
|
52
57
|
*/
|
|
53
|
-
export function getModelCapabilitiesOpenAI(model: string): { input: ModelModalities; output: ModelModalities; tool_support?: boolean } {
|
|
58
|
+
export function getModelCapabilitiesOpenAI(model: string): { input: ModelModalities; output: ModelModalities; tool_support?: boolean; tool_support_streaming?: boolean } {
|
|
54
59
|
const normalized = normalizeOpenAIModelName(model);
|
|
55
60
|
const record = RECORD_MODEL_CAPABILITIES[normalized];
|
|
56
|
-
if (record)
|
|
61
|
+
if (record) {
|
|
62
|
+
// Default tool_support_streaming to tool_support for OpenAI models
|
|
63
|
+
return {
|
|
64
|
+
...record,
|
|
65
|
+
tool_support_streaming: record.tool_support_streaming ?? record.tool_support
|
|
66
|
+
};
|
|
67
|
+
}
|
|
57
68
|
let bestFamilyKey = undefined;
|
|
58
69
|
let bestFamilyLength = 0;
|
|
59
70
|
for (const key of Object.keys(RECORD_FAMILY_CAPABILITIES)) {
|
|
@@ -63,7 +74,12 @@ export function getModelCapabilitiesOpenAI(model: string): { input: ModelModalit
|
|
|
63
74
|
}
|
|
64
75
|
}
|
|
65
76
|
if (bestFamilyKey) {
|
|
66
|
-
|
|
77
|
+
const family = RECORD_FAMILY_CAPABILITIES[bestFamilyKey];
|
|
78
|
+
// Default tool_support_streaming to tool_support for OpenAI models
|
|
79
|
+
return {
|
|
80
|
+
...family,
|
|
81
|
+
tool_support_streaming: family.tool_support_streaming ?? family.tool_support
|
|
82
|
+
};
|
|
67
83
|
}
|
|
68
84
|
const input: ModelModalities = {
|
|
69
85
|
text: modelMatches(normalized, TEXT_INPUT_MODELS) || undefined,
|
|
@@ -80,5 +96,6 @@ export function getModelCapabilitiesOpenAI(model: string): { input: ModelModalit
|
|
|
80
96
|
embed: modelMatches(normalized, EMBEDDING_OUTPUT_MODELS) || undefined
|
|
81
97
|
};
|
|
82
98
|
const tool_support = modelMatches(normalized, TOOL_SUPPORT_MODELS) || undefined;
|
|
83
|
-
|
|
99
|
+
// Default tool_support_streaming to tool_support for OpenAI models
|
|
100
|
+
return { input, output, tool_support, tool_support_streaming: tool_support };
|
|
84
101
|
}
|
package/src/capability.ts
CHANGED
|
@@ -14,12 +14,12 @@ export function getModelCapabilities(model: string, provider?: string | Provider
|
|
|
14
14
|
}
|
|
15
15
|
const capabilities = _getModelCapabilities(model, provider);
|
|
16
16
|
// Globally disable audio and video for all models, as we don't support them yet
|
|
17
|
-
// We also do not support tool use while streaming
|
|
18
17
|
// TODO: Remove this when we add support.
|
|
19
18
|
capabilities.input.audio = false;
|
|
20
19
|
capabilities.output.audio = false;
|
|
21
20
|
capabilities.output.video = false;
|
|
22
|
-
|
|
21
|
+
// Preserve tool_support_streaming from provider-specific capabilities if set,
|
|
22
|
+
// otherwise default to false for providers that haven't been verified
|
|
23
23
|
return capabilities;
|
|
24
24
|
}
|
|
25
25
|
|
|
@@ -28,16 +28,33 @@ function _getModelCapabilities(model: string, provider?: string | Providers): Mo
|
|
|
28
28
|
case Providers.vertexai:
|
|
29
29
|
return getModelCapabilitiesVertexAI(model);
|
|
30
30
|
case Providers.openai:
|
|
31
|
+
case Providers.openai_compatible:
|
|
31
32
|
return getModelCapabilitiesOpenAI(model);
|
|
32
33
|
case Providers.bedrock:
|
|
33
34
|
return getModelCapabilitiesBedrock(model);
|
|
34
35
|
case Providers.azure_foundry:
|
|
35
36
|
// Azure Foundry uses OpenAI capabilities
|
|
36
37
|
return getModelCapabilitiesAzureFoundry(model);
|
|
38
|
+
case Providers.xai:
|
|
39
|
+
// xAI (Grok) models support tool use and are text-based
|
|
40
|
+
return {
|
|
41
|
+
input: { text: true, image: model.includes("vision") },
|
|
42
|
+
output: { text: true },
|
|
43
|
+
tool_support: true,
|
|
44
|
+
tool_support_streaming: false, // Conservative - may work but not tested
|
|
45
|
+
};
|
|
37
46
|
default:
|
|
38
47
|
// Guess the provider based on the model name
|
|
39
48
|
if (model.startsWith("gpt")) {
|
|
40
49
|
return getModelCapabilitiesOpenAI(model);
|
|
50
|
+
} else if (model.startsWith("grok")) {
|
|
51
|
+
// xAI Grok models
|
|
52
|
+
return {
|
|
53
|
+
input: { text: true, image: model.includes("vision") },
|
|
54
|
+
output: { text: true },
|
|
55
|
+
tool_support: true,
|
|
56
|
+
tool_support_streaming: false,
|
|
57
|
+
};
|
|
41
58
|
} else if (model.startsWith("publishers/")) {
|
|
42
59
|
return getModelCapabilitiesVertexAI(model);
|
|
43
60
|
} else if (model.startsWith("arn:aws")) {
|
package/src/types.ts
CHANGED
|
@@ -8,6 +8,7 @@ import { VertexAIOptions } from './options/vertexai.js';
|
|
|
8
8
|
|
|
9
9
|
export enum Providers {
|
|
10
10
|
openai = 'openai',
|
|
11
|
+
openai_compatible = 'openai_compatible',
|
|
11
12
|
azure_openai = 'azure_openai',
|
|
12
13
|
azure_foundry = 'azure_foundry',
|
|
13
14
|
huggingface_ie = 'huggingface_ie',
|
|
@@ -17,7 +18,8 @@ export enum Providers {
|
|
|
17
18
|
togetherai = 'togetherai',
|
|
18
19
|
mistralai = 'mistralai',
|
|
19
20
|
groq = 'groq',
|
|
20
|
-
watsonx = 'watsonx'
|
|
21
|
+
watsonx = 'watsonx',
|
|
22
|
+
xai = 'xai'
|
|
21
23
|
}
|
|
22
24
|
|
|
23
25
|
export interface ProviderParams {
|
|
@@ -113,6 +115,21 @@ export const ProviderList: Record<Providers, ProviderParams> = {
|
|
|
113
115
|
requiresEndpointUrl: true,
|
|
114
116
|
supportSearch: false
|
|
115
117
|
},
|
|
118
|
+
xai: {
|
|
119
|
+
id: Providers.xai,
|
|
120
|
+
name: "xAI (Grok)",
|
|
121
|
+
requiresApiKey: true,
|
|
122
|
+
requiresEndpointUrl: false,
|
|
123
|
+
supportSearch: false
|
|
124
|
+
},
|
|
125
|
+
openai_compatible: {
|
|
126
|
+
id: Providers.openai_compatible,
|
|
127
|
+
name: "OpenAI Compatible",
|
|
128
|
+
requiresApiKey: true,
|
|
129
|
+
requiresEndpointUrl: true,
|
|
130
|
+
endpointPlaceholder: "https://api.example.com/v1",
|
|
131
|
+
supportSearch: false
|
|
132
|
+
},
|
|
116
133
|
}
|
|
117
134
|
|
|
118
135
|
// ============== Embeddings ===============
|
|
@@ -184,6 +201,11 @@ export interface CompletionChunkObject {
|
|
|
184
201
|
result: CompletionResult[];
|
|
185
202
|
token_usage?: ExecutionTokenUsage;
|
|
186
203
|
finish_reason?: "stop" | "length" | string;
|
|
204
|
+
/**
|
|
205
|
+
* Tool calls returned by the model during streaming.
|
|
206
|
+
* Each chunk may contain partial tool call information that needs to be aggregated.
|
|
207
|
+
*/
|
|
208
|
+
tool_use?: ToolUse[];
|
|
187
209
|
}
|
|
188
210
|
|
|
189
211
|
export interface ToolDefinition {
|
|
@@ -202,7 +224,12 @@ export interface ToolDefinition {
|
|
|
202
224
|
export interface ToolUse<ParamsT = JSONObject> {
|
|
203
225
|
id: string,
|
|
204
226
|
tool_name: string,
|
|
205
|
-
tool_input: ParamsT | null
|
|
227
|
+
tool_input: ParamsT | null,
|
|
228
|
+
/**
|
|
229
|
+
* Gemini thinking models require thought_signature to be passed back with tool results.
|
|
230
|
+
* This preserves the model's reasoning state during multi-turn tool use.
|
|
231
|
+
*/
|
|
232
|
+
thought_signature?: string,
|
|
206
233
|
}
|
|
207
234
|
|
|
208
235
|
export interface Completion {
|
|
@@ -349,6 +376,24 @@ export interface ExecutionOptions extends StatelessExecutionOptions {
|
|
|
349
376
|
* that can be passed here to restore the context when a new prompt is sent to the model.
|
|
350
377
|
*/
|
|
351
378
|
conversation?: unknown | null;
|
|
379
|
+
/**
|
|
380
|
+
* Number of turns to keep images in conversation history before stripping them.
|
|
381
|
+
* - 0 (default): Strip images immediately after each turn
|
|
382
|
+
* - 1: Keep images for current turn only, strip in next turn
|
|
383
|
+
* - N: Keep images for N turns before stripping
|
|
384
|
+
* - undefined: Same as 0, strip immediately
|
|
385
|
+
*
|
|
386
|
+
* Images are stripped to prevent JSON.stringify corruption (Uint8Array) and reduce storage bloat (base64).
|
|
387
|
+
*/
|
|
388
|
+
stripImagesAfterTurns?: number;
|
|
389
|
+
|
|
390
|
+
/**
|
|
391
|
+
* Maximum tokens to keep for text content in tool results.
|
|
392
|
+
* Text exceeding this limit will be truncated with a "[Content truncated...]" marker.
|
|
393
|
+
* - undefined/0: No text truncation (default)
|
|
394
|
+
* - N > 0: Truncate text to approximately N tokens (using ~4 chars/token estimate)
|
|
395
|
+
*/
|
|
396
|
+
stripTextMaxTokens?: number;
|
|
352
397
|
}
|
|
353
398
|
|
|
354
399
|
//Common names to share between different models
|
|
@@ -446,6 +491,11 @@ export interface PromptSegment {
|
|
|
446
491
|
* The tool use id if the segment is a tool response
|
|
447
492
|
*/
|
|
448
493
|
tool_use_id?: string;
|
|
494
|
+
/**
|
|
495
|
+
* Gemini thinking models require thought_signature to be passed back with tool results.
|
|
496
|
+
* This should be copied from the ToolUse.thought_signature when sending tool responses.
|
|
497
|
+
*/
|
|
498
|
+
thought_signature?: string;
|
|
449
499
|
files?: DataSource[]
|
|
450
500
|
}
|
|
451
501
|
|
|
@@ -1,160 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getModelCapabilitiesAzureFoundry = getModelCapabilitiesAzureFoundry;
|
|
4
|
-
// Global feature flags - temporarily disable tool support for non-OpenAI models
|
|
5
|
-
const ENABLE_TOOL_SUPPORT_NON_OPENAI = false;
|
|
6
|
-
// Record of Azure Foundry model capabilities keyed by model ID (lowercased)
|
|
7
|
-
// Only include models with specific exceptions that differ from their family patterns
|
|
8
|
-
const RECORD_MODEL_CAPABILITIES = {
|
|
9
|
-
// OpenAI O-series exceptions - o1-mini doesn't have tool support like other o1 models
|
|
10
|
-
"o1-mini": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
11
|
-
// OpenAI o3 is text-only unlike other o-series models
|
|
12
|
-
"o3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
13
|
-
// Models with special properties not covered by family patterns
|
|
14
|
-
"deepseek-r1-0528": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
15
|
-
"deepseek-v3-0324": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
16
|
-
"mistral-medium-2505": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
17
|
-
"mistral-nemo": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
18
|
-
"llama-4-scout-17b-16e-instruct": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
19
|
-
};
|
|
20
|
-
// Populate RECORD_FAMILY_CAPABILITIES as a const record (lowest common denominator for each family)
|
|
21
|
-
const RECORD_FAMILY_CAPABILITIES = {
|
|
22
|
-
// OpenAI GPT families
|
|
23
|
-
"gpt-3.5-turbo": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
24
|
-
"gpt-35-turbo": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
25
|
-
"gpt-35": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
26
|
-
"gpt-4": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
27
|
-
"gpt-4.1": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
28
|
-
"gpt-4.5": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
29
|
-
"gpt-4o": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
30
|
-
"gpt": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
31
|
-
// OpenAI O-series families
|
|
32
|
-
"o1": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
33
|
-
"o1-preview": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
34
|
-
"o1-pro": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
35
|
-
"o3-mini": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
36
|
-
"o4-mini": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
37
|
-
"o4": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
38
|
-
"o": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
39
|
-
// Llama families
|
|
40
|
-
"llama-3.1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
41
|
-
"llama-3.2": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
42
|
-
"llama-3.3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
43
|
-
"llama-3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
44
|
-
"llama-4": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
45
|
-
"llama": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
46
|
-
// Mistral families
|
|
47
|
-
"mistral-large": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
48
|
-
"mistral-small": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
49
|
-
"mistral": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
50
|
-
// Microsoft Phi families
|
|
51
|
-
"phi-4": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
52
|
-
"phi": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
53
|
-
// DeepSeek families
|
|
54
|
-
"deepseek-r1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
55
|
-
"deepseek-v3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
56
|
-
"deepseek": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
57
|
-
// AI21 families
|
|
58
|
-
"ai21-jamba": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
59
|
-
"ai21": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
60
|
-
"jamba": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
61
|
-
// Cohere families
|
|
62
|
-
"cohere-command": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
63
|
-
"cohere": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
64
|
-
"command": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
65
|
-
// xAI families
|
|
66
|
-
"grok-3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
67
|
-
"grok": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true }
|
|
68
|
-
};
|
|
69
|
-
// Fallback pattern lists for inferring modalities and tool support
|
|
70
|
-
const IMAGE_INPUT_MODELS = ["image", "vision"];
|
|
71
|
-
const VIDEO_INPUT_MODELS = ["video"];
|
|
72
|
-
const AUDIO_INPUT_MODELS = ["audio"];
|
|
73
|
-
const TEXT_INPUT_MODELS = ["text"];
|
|
74
|
-
const IMAGE_OUTPUT_MODELS = ["image"];
|
|
75
|
-
const VIDEO_OUTPUT_MODELS = ["video"];
|
|
76
|
-
const AUDIO_OUTPUT_MODELS = ["audio"];
|
|
77
|
-
const TEXT_OUTPUT_MODELS = ["text"];
|
|
78
|
-
const EMBEDDING_OUTPUT_MODELS = ["embed"];
|
|
79
|
-
const TOOL_SUPPORT_MODELS = ["tool", "gpt-4", "gpt-4o", "o1", "o3", "o4", "llama-3", "mistral-large", "mistral-small", "jamba", "cohere", "command", "grok"];
|
|
80
|
-
function modelMatches(modelName, patterns) {
|
|
81
|
-
return patterns.some(pattern => modelName.includes(pattern));
|
|
82
|
-
}
|
|
83
|
-
/**
|
|
84
|
-
* Get the full ModelCapabilities for an Azure Foundry model.
|
|
85
|
-
* Checks RECORD_MODEL_CAPABILITIES first, then falls back to family pattern matching.
|
|
86
|
-
*/
|
|
87
|
-
function getModelCapabilitiesAzureFoundry(model) {
|
|
88
|
-
// Extract base model from composite ID (deployment::baseModel)
|
|
89
|
-
const { baseModel } = parseAzureFoundryModelId(model);
|
|
90
|
-
const normalized = baseModel.toLowerCase();
|
|
91
|
-
// 1. Exact match in record
|
|
92
|
-
const record = RECORD_MODEL_CAPABILITIES[normalized];
|
|
93
|
-
if (record) {
|
|
94
|
-
return applyGlobalToolSupportDisable(record, normalized);
|
|
95
|
-
}
|
|
96
|
-
// 2. Fallback: find the longest matching family prefix in RECORD_FAMILY_CAPABILITIES
|
|
97
|
-
let bestFamilyKey = undefined;
|
|
98
|
-
let bestFamilyLength = 0;
|
|
99
|
-
for (const key of Object.keys(RECORD_FAMILY_CAPABILITIES)) {
|
|
100
|
-
if (normalized.startsWith(key) && key.length > bestFamilyLength) {
|
|
101
|
-
bestFamilyKey = key;
|
|
102
|
-
bestFamilyLength = key.length;
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
if (bestFamilyKey) {
|
|
106
|
-
return applyGlobalToolSupportDisable(RECORD_FAMILY_CAPABILITIES[bestFamilyKey], normalized);
|
|
107
|
-
}
|
|
108
|
-
// 3. Fallback: infer from normalized name using patterns
|
|
109
|
-
const input = {
|
|
110
|
-
text: modelMatches(normalized, TEXT_INPUT_MODELS) || true, // Default to text input
|
|
111
|
-
image: modelMatches(normalized, IMAGE_INPUT_MODELS) || undefined,
|
|
112
|
-
video: modelMatches(normalized, VIDEO_INPUT_MODELS) || undefined,
|
|
113
|
-
audio: modelMatches(normalized, AUDIO_INPUT_MODELS) || undefined,
|
|
114
|
-
embed: false
|
|
115
|
-
};
|
|
116
|
-
const output = {
|
|
117
|
-
text: modelMatches(normalized, TEXT_OUTPUT_MODELS) || true, // Default to text output
|
|
118
|
-
image: modelMatches(normalized, IMAGE_OUTPUT_MODELS) || undefined,
|
|
119
|
-
video: modelMatches(normalized, VIDEO_OUTPUT_MODELS) || undefined,
|
|
120
|
-
audio: modelMatches(normalized, AUDIO_OUTPUT_MODELS) || undefined,
|
|
121
|
-
embed: modelMatches(normalized, EMBEDDING_OUTPUT_MODELS) || undefined
|
|
122
|
-
};
|
|
123
|
-
const tool_support = modelMatches(normalized, TOOL_SUPPORT_MODELS) || undefined;
|
|
124
|
-
const tool_support_streaming = tool_support || undefined;
|
|
125
|
-
const inferredCapabilities = { input, output, tool_support, tool_support_streaming };
|
|
126
|
-
return applyGlobalToolSupportDisable(inferredCapabilities, normalized);
|
|
127
|
-
}
|
|
128
|
-
/**
|
|
129
|
-
* Apply global tool support disable for non-OpenAI models.
|
|
130
|
-
* Preserves model-specific information for future use while temporarily disabling tool support.
|
|
131
|
-
*/
|
|
132
|
-
function applyGlobalToolSupportDisable(capabilities, modelName) {
|
|
133
|
-
// Check if this is an OpenAI model
|
|
134
|
-
const isOpenAIModel = modelName.startsWith('gpt-') || modelName.startsWith('o1') || modelName.startsWith('o3') || modelName.startsWith('o4');
|
|
135
|
-
if (!ENABLE_TOOL_SUPPORT_NON_OPENAI && !isOpenAIModel) {
|
|
136
|
-
// Disable tool support for non-OpenAI models while preserving other capabilities
|
|
137
|
-
return {
|
|
138
|
-
...capabilities,
|
|
139
|
-
tool_support: false,
|
|
140
|
-
tool_support_streaming: false
|
|
141
|
-
};
|
|
142
|
-
}
|
|
143
|
-
return capabilities;
|
|
144
|
-
}
|
|
145
|
-
// Helper function to parse composite model IDs
|
|
146
|
-
function parseAzureFoundryModelId(compositeId) {
|
|
147
|
-
const parts = compositeId.split('::');
|
|
148
|
-
if (parts.length === 2) {
|
|
149
|
-
return {
|
|
150
|
-
deploymentName: parts[0],
|
|
151
|
-
baseModel: parts[1]
|
|
152
|
-
};
|
|
153
|
-
}
|
|
154
|
-
// Backwards compatibility: if no delimiter found, treat as deployment name
|
|
155
|
-
return {
|
|
156
|
-
deploymentName: compositeId,
|
|
157
|
-
baseModel: compositeId
|
|
158
|
-
};
|
|
159
|
-
}
|
|
160
|
-
//# sourceMappingURL=azure_foundry.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"azure_foundry.js","sourceRoot":"","sources":["../../../src/capability/azure_foundry.ts"],"names":[],"mappings":";;AAmGA,4EA4CC;AA7ID,gFAAgF;AAChF,MAAM,8BAA8B,GAAG,KAAK,CAAC;AAE7C,6EAA6E;AAC7E,sFAAsF;AACtF,MAAM,yBAAyB,GAAsC;IACjE,sFAAsF;IACtF,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC/N,sDAAsD;IACtD,IAAI,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAE1N,gEAAgE;IAChE,kBAAkB,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACzO,kBAAkB,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACzO,qBAAqB,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC5O,cAAc,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACrO,gCAAgC,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;CACzP,CAAC;AAEF,oGAAoG;AACpG,MAAM,0BAA0B,GAAsC;IAClE,sBAAsB;IACtB,eAAe,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACtO,cAAc,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACrO,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC/N,OAAO,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC3N,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC7N,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC7N,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC5N,KAAK,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAC;IAExN,2BAA2B;IAC3B,IAAI,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACzN,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACjO,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC7N,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC9N,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC9N,IAAI,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACzN,GAAG,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAExN,iBAAiB;IACjB,WAAW,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACjO,WAAW,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACjO,WAAW,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACjO,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC/N,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC/N,OAAO,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAE9N,mBAAmB;IACnB,eAAe,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACrO,eAAe,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACrO,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAEhO,yBAAyB;IACzB,OAAO,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAC9N,KAAK,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAE5N,oBAAoB;IACpB,aAAa,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACpO,aAAa,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE;IACnO,UAAU,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE,sBAAsB,EAAE,KAAK,EAAE;IAEjO,gBAAgB;IAChB,YAAY,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IACjO,MAAM,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC3N,OAAO,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAE5N,kBAAkB;IAClB,gBAAgB,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IACrO,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC7N,SAAS,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAE9N,eAAe;IACf,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;IAC7N,MAAM,EAAE,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,sBAAsB,EAAE,IAAI,EAAE;CAC9N,CAAC;AAEF,mEAAmE;AACnE,MAAM,kBAAkB,GAAG,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;AAC/C,MAAM,kBAAkB,GAAG,CAAC,OAAO,CAAC,CAAC;AACrC,MAAM,kBAAkB,GAAG,CAAC,OAAO,CAAC,CAAC;AACrC,MAAM,iBAAiB,GAAG,CAAC,MAAM,CAAC,CAAC;AACnC,MAAM,mBAAmB,GAAG,CAAC,OAAO,CAAC,CAAC;AACtC,MAAM,mBAAmB,GAAG,CAAC,OAAO,CAAC,CAAC;AACtC,MAAM,mBAAmB,GAAG,CAAC,OAAO,CAAC,CAAC;AACtC,MAAM,kBAAkB,GAAG,CAAC,MAAM,CAAC,CAAC;AACpC,MAAM,uBAAuB,GAAG,CAAC,OAAO,CAAC,CAAC;AAC1C,MAAM,mBAAmB,GAAG,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE,eAAe,EAAE,eAAe,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,CAAC,CAAC;AAE7J,SAAS,YAAY,CAAC,SAAiB,EAAE,QAAkB;IACvD,OAAO,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;AACjE,CAAC;AAED;;;GAGG;AACH,SAAgB,gCAAgC,CAAC,KAAa;IAC1D,+DAA+D;IAC/D,MAAM,EAAE,SAAS,EAAE,GAAG,wBAAwB,CAAC,KAAK,CAAC,CAAC;IACtD,MAAM,UAAU,GAAG,SAAS,CAAC,WAAW,EAAE,CAAC;IAE3C,2BAA2B;IAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,UAAU,CAAC,CAAC;IACrD,IAAI,MAAM,EAAE,CAAC;QACT,OAAO,6BAA6B,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IAC7D,CAAC;IAED,qFAAqF;IACrF,IAAI,aAAa,GAAG,SAAS,CAAC;IAC9B,IAAI,gBAAgB,GAAG,CAAC,CAAC;IACzB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,0BAA0B,CAAC,EAAE,CAAC;QACxD,IAAI,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,GAAG,gBAAgB,EAAE,CAAC;YAC9D,aAAa,GAAG,GAAG,CAAC;YACpB,gBAAgB,GAAG,GAAG,CAAC,MAAM,CAAC;QAClC,CAAC;IACL,CAAC;IACD,IAAI,aAAa,EAAE,CAAC;QAChB,OAAO,6BAA6B,CAAC,0BAA0B,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAChG,CAAC;IAED,yDAAyD;IACzD,MAAM,KAAK,GAAoB;QAC3B,IAAI,EAAE,YAAY,CAAC,UAAU,EAAE,iBAAiB,CAAC,IAAI,IAAI,EAAE,wBAAwB;QACnF,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,kBAAkB,CAAC,IAAI,SAAS;QAChE,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,kBAAkB,CAAC,IAAI,SAAS;QAChE,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,kBAAkB,CAAC,IAAI,SAAS;QAChE,KAAK,EAAE,KAAK;KACf,CAAC;IACF,MAAM,MAAM,GAAoB;QAC5B,IAAI,EAAE,YAAY,CAAC,UAAU,EAAE,kBAAkB,CAAC,IAAI,IAAI,EAAE,yBAAyB;QACrF,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,mBAAmB,CAAC,IAAI,SAAS;QACjE,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,mBAAmB,CAAC,IAAI,SAAS;QACjE,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,mBAAmB,CAAC,IAAI,SAAS;QACjE,KAAK,EAAE,YAAY,CAAC,UAAU,EAAE,uBAAuB,CAAC,IAAI,SAAS;KACxE,CAAC;IACF,MAAM,YAAY,GAAG,YAAY,CAAC,UAAU,EAAE,mBAAmB,CAAC,IAAI,SAAS,CAAC;IAChF,MAAM,sBAAsB,GAAG,YAAY,IAAI,SAAS,CAAC;IAEzD,MAAM,oBAAoB,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,sBAAsB,EAAE,CAAC;IACrF,OAAO,6BAA6B,CAAC,oBAAoB,EAAE,UAAU,CAAC,CAAC;AAC3E,CAAC;AAED;;;GAGG;AACH,SAAS,6BAA6B,CAAC,YAA+B,EAAE,SAAiB;IACrF,mCAAmC;IACnC,MAAM,aAAa,GAAG,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;IAE7I,IAAI,CAAC,8BAA8B,IAAI,CAAC,aAAa,EAAE,CAAC;QACpD,iFAAiF;QACjF,OAAO;YACH,GAAG,YAAY;YACf,YAAY,EAAE,KAAK;YACnB,sBAAsB,EAAE,KAAK;SAChC,CAAC;IACN,CAAC;IAED,OAAO,YAAY,CAAC;AACxB,CAAC;AAED,+CAA+C;AAC/C,SAAS,wBAAwB,CAAC,WAAmB;IACjD,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACrB,OAAO;YACH,cAAc,EAAE,KAAK,CAAC,CAAC,CAAC;YACxB,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;SACtB,CAAC;IACN,CAAC;IAED,2EAA2E;IAC3E,OAAO;QACH,cAAc,EAAE,WAAW;QAC3B,SAAS,EAAE,WAAW;KACzB,CAAC;AACN,CAAC"}
|
|
@@ -1,158 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getModelCapabilitiesBedrock = getModelCapabilitiesBedrock;
|
|
4
|
-
// Explicit exception lists keyed by the model identifier (last segment after the prefix)
|
|
5
|
-
const RECORD_FOUNDATION_EXCEPTIONS = {};
|
|
6
|
-
const RECORD_PROFILE_EXCEPTIONS = {
|
|
7
|
-
"meta.llama3-1-70b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
8
|
-
"meta.llama3-1-8b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
9
|
-
};
|
|
10
|
-
// Record of Bedrock model capabilities keyed by model identifier.
|
|
11
|
-
// Only include models that differ from their family defaults
|
|
12
|
-
const RECORD_MODEL_CAPABILITIES = {
|
|
13
|
-
// Models with specific exceptions that differ from family
|
|
14
|
-
"ai21.jamba-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
15
|
-
"amazon.nova-canvas-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { image: true, text: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
16
|
-
"amazon.nova-micro-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
17
|
-
"anthropic.claude-3-5-haiku-20241022-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
18
|
-
"meta.llama3-2-11b-instruct-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
19
|
-
"meta.llama3-2-90b-instruct-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
20
|
-
};
|
|
21
|
-
// Family capabilities (longest prefix match)
|
|
22
|
-
const RECORD_FAMILY_CAPABILITIES = {
|
|
23
|
-
"ai21.jamba": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
24
|
-
"amazon.nova": { input: { text: true, image: true, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
25
|
-
"amazon.titan": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
26
|
-
"anthropic.claude-3-5-haiku": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
27
|
-
"anthropic.claude": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
28
|
-
"cohere.command-r": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
|
|
29
|
-
"cohere.command": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
30
|
-
"deepseek.r1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
31
|
-
"meta.llama3-1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
32
|
-
"meta.llama3-2": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
33
|
-
"meta.llama3-3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
34
|
-
"meta.llama3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
35
|
-
"meta.llama4": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
36
|
-
"mistral.mistral-large": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
37
|
-
"mistral.mistral": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
38
|
-
"mistral.mixtral": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
39
|
-
"mistral.pixtral": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
40
|
-
"openai.gpt-oss": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
41
|
-
"qwen.": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
|
|
42
|
-
"twelvelabs.": { input: { text: true, image: false, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
43
|
-
"writer.palmyra": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
|
|
44
|
-
};
|
|
45
|
-
function extractModelLookupKey(modelName) {
|
|
46
|
-
const lower = modelName.toLowerCase();
|
|
47
|
-
const lastSlashIdx = lower.lastIndexOf("/");
|
|
48
|
-
let key = lastSlashIdx === -1 ? lower : lower.slice(lastSlashIdx + 1);
|
|
49
|
-
if (lower.includes("inference-profile/")) {
|
|
50
|
-
key = key.replace(/^[^.]+\./, "");
|
|
51
|
-
}
|
|
52
|
-
return key;
|
|
53
|
-
}
|
|
54
|
-
function findFamilyCapability(lookupKey, families) {
|
|
55
|
-
let bestKey;
|
|
56
|
-
for (const key of Object.keys(families)) {
|
|
57
|
-
if (lookupKey.startsWith(key) && (!bestKey || key.length > bestKey.length)) {
|
|
58
|
-
bestKey = key;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
return bestKey ? families[bestKey] : undefined;
|
|
62
|
-
}
|
|
63
|
-
/**
|
|
64
|
-
* Extract the model identifier from an ARN or inference profile
|
|
65
|
-
* @param modelName The full model ARN or name
|
|
66
|
-
* @returns The normalized model identifier
|
|
67
|
-
*/
|
|
68
|
-
function normalizeModelName(modelName) {
|
|
69
|
-
const modelLower = modelName.toLowerCase();
|
|
70
|
-
if (modelLower.includes("inference-profile")) {
|
|
71
|
-
const parts = modelLower.split("/");
|
|
72
|
-
if (parts.length > 1) {
|
|
73
|
-
const providerModel = parts[parts.length - 1];
|
|
74
|
-
const modelParts = providerModel.split(".");
|
|
75
|
-
if (modelParts.length > 1 && modelParts[1] === "deepseek") {
|
|
76
|
-
return `deepseek-${modelParts.slice(2).join(".")}`;
|
|
77
|
-
}
|
|
78
|
-
return modelParts.length > 2 ? modelParts.slice(2).join(".") : providerModel;
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
return modelLower;
|
|
82
|
-
}
|
|
83
|
-
// Fallback pattern lists for inferring modalities and tool support
|
|
84
|
-
const IMAGE_INPUT_MODELS = ["image"]; // fallback: if model id contains 'image', supports image input
|
|
85
|
-
const VIDEO_INPUT_MODELS = ["video"];
|
|
86
|
-
const AUDIO_INPUT_MODELS = ["audio"];
|
|
87
|
-
const TEXT_INPUT_MODELS = ["text"];
|
|
88
|
-
const IMAGE_OUTPUT_MODELS = ["image"];
|
|
89
|
-
const VIDEO_OUTPUT_MODELS = ["video"];
|
|
90
|
-
const AUDIO_OUTPUT_MODELS = ["audio"];
|
|
91
|
-
const TEXT_OUTPUT_MODELS = ["text"];
|
|
92
|
-
const EMBEDDING_OUTPUT_MODELS = ["embed"];
|
|
93
|
-
const TOOL_SUPPORT_MODELS = ["tool", "sonnet", "opus", "nova", "palmyra", "command-r", "mistral-large", "pixtral"];
|
|
94
|
-
function modelMatches(modelName, patterns) {
|
|
95
|
-
return patterns.some(pattern => modelName.includes(pattern));
|
|
96
|
-
}
|
|
97
|
-
/**
|
|
98
|
-
* Get the full ModelCapabilities for a Bedrock model.
|
|
99
|
-
* Checks RECORD_MODEL_CAPABILITIES first, then falls back to pattern-based inference.
|
|
100
|
-
*/
|
|
101
|
-
function getModelCapabilitiesBedrock(model) {
|
|
102
|
-
// Normalize ARN or inference-profile to model ID
|
|
103
|
-
const modelLower = model.toLowerCase();
|
|
104
|
-
let normalized = modelLower;
|
|
105
|
-
const arnPattern = /^arn:aws:bedrock:[^:]+:[^:]*:(inference-profile|foundation-model)\/.+/i;
|
|
106
|
-
if (arnPattern.test(modelLower)) {
|
|
107
|
-
// Extract after last occurrence of 'foundation-model/' or 'inference-profile/'
|
|
108
|
-
const foundationIdx = modelLower.lastIndexOf('foundation-model/');
|
|
109
|
-
const inferenceIdx = modelLower.lastIndexOf('inference-profile/');
|
|
110
|
-
if (foundationIdx !== -1) {
|
|
111
|
-
normalized = modelLower.substring(foundationIdx);
|
|
112
|
-
}
|
|
113
|
-
else if (inferenceIdx !== -1) {
|
|
114
|
-
normalized = modelLower.substring(inferenceIdx);
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
const isInferenceProfile = normalized.startsWith("inference-profile/");
|
|
118
|
-
const isFoundationModel = normalized.startsWith("foundation-model/");
|
|
119
|
-
const lookupKey = extractModelLookupKey(normalized);
|
|
120
|
-
// 1. Check exceptions
|
|
121
|
-
if (isFoundationModel) {
|
|
122
|
-
const exception = RECORD_FOUNDATION_EXCEPTIONS[lookupKey];
|
|
123
|
-
if (exception)
|
|
124
|
-
return exception;
|
|
125
|
-
}
|
|
126
|
-
else if (isInferenceProfile) {
|
|
127
|
-
const exception = RECORD_PROFILE_EXCEPTIONS[lookupKey];
|
|
128
|
-
if (exception)
|
|
129
|
-
return exception;
|
|
130
|
-
}
|
|
131
|
-
// 2. Check recorded capabilities by model
|
|
132
|
-
const modelCapability = RECORD_MODEL_CAPABILITIES[lookupKey];
|
|
133
|
-
if (modelCapability)
|
|
134
|
-
return modelCapability;
|
|
135
|
-
// 3. Check recorded capabilities by family
|
|
136
|
-
const familyCapability = findFamilyCapability(lookupKey, RECORD_FAMILY_CAPABILITIES);
|
|
137
|
-
if (familyCapability)
|
|
138
|
-
return familyCapability;
|
|
139
|
-
// 4. Fallback: infer from normalized name
|
|
140
|
-
const inferredName = normalizeModelName(lookupKey);
|
|
141
|
-
const input = {
|
|
142
|
-
text: modelMatches(inferredName, TEXT_INPUT_MODELS) || undefined,
|
|
143
|
-
image: modelMatches(inferredName, IMAGE_INPUT_MODELS) || undefined,
|
|
144
|
-
video: modelMatches(inferredName, VIDEO_INPUT_MODELS) || undefined,
|
|
145
|
-
audio: modelMatches(inferredName, AUDIO_INPUT_MODELS) || undefined,
|
|
146
|
-
embed: false
|
|
147
|
-
};
|
|
148
|
-
const output = {
|
|
149
|
-
text: modelMatches(inferredName, TEXT_OUTPUT_MODELS) || undefined,
|
|
150
|
-
image: modelMatches(inferredName, IMAGE_OUTPUT_MODELS) || undefined,
|
|
151
|
-
video: modelMatches(inferredName, VIDEO_OUTPUT_MODELS) || undefined,
|
|
152
|
-
audio: modelMatches(inferredName, AUDIO_OUTPUT_MODELS) || undefined,
|
|
153
|
-
embed: modelMatches(inferredName, EMBEDDING_OUTPUT_MODELS) || undefined
|
|
154
|
-
};
|
|
155
|
-
const tool_support = modelMatches(inferredName, TOOL_SUPPORT_MODELS) || undefined;
|
|
156
|
-
return { input, output, tool_support };
|
|
157
|
-
}
|
|
158
|
-
//# sourceMappingURL=bedrock.js.map
|