llm-strings 1.1.0 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -3
- package/dist/index.cjs +2 -31
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -2
- package/dist/index.d.ts +1 -2
- package/dist/index.js +1 -30
- package/dist/normalize.d.cts +1 -1
- package/dist/normalize.d.ts +1 -1
- package/dist/{provider-core-BUaKKLpd.d.cts → provider-core-DinpG40u.d.cts} +1 -1
- package/dist/{provider-core-BUaKKLpd.d.ts → provider-core-DinpG40u.d.ts} +1 -1
- package/dist/providers.cjs +188 -23
- package/dist/providers.cjs.map +1 -1
- package/dist/providers.d.cts +2 -2
- package/dist/providers.d.ts +2 -2
- package/dist/providers.js +170 -5
- package/dist/providers.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-6P5GSSNW.js +0 -176
- package/dist/chunk-6P5GSSNW.js.map +0 -1
- package/dist/chunk-RR3VXIW2.cjs +0 -176
- package/dist/chunk-RR3VXIW2.cjs.map +0 -1
package/README.md
CHANGED
|
@@ -247,7 +247,8 @@ const url = build({
|
|
|
247
247
|
### AWS Bedrock with cross-region inference
|
|
248
248
|
|
|
249
249
|
```ts
|
|
250
|
-
import { parse, normalize
|
|
250
|
+
import { parse, normalize } from "llm-strings";
|
|
251
|
+
import { detectBedrockModelFamily } from "llm-strings/providers";
|
|
251
252
|
|
|
252
253
|
const config = parse(
|
|
253
254
|
"llm://bedrock-runtime.us-east-1.amazonaws.com/us.anthropic.claude-sonnet-4-5-20250929-v1:0?temp=0.5&max=4096"
|
|
@@ -407,6 +408,7 @@ Returns `true` if the Bedrock model supports prompt caching (Claude and Nova mod
|
|
|
407
408
|
Full type definitions ship with the package:
|
|
408
409
|
|
|
409
410
|
```ts
|
|
411
|
+
// Core types from the main entry
|
|
410
412
|
import type {
|
|
411
413
|
LlmConnectionConfig,
|
|
412
414
|
NormalizeResult,
|
|
@@ -414,12 +416,16 @@ import type {
|
|
|
414
416
|
NormalizeOptions,
|
|
415
417
|
ValidateOptions,
|
|
416
418
|
ValidationIssue,
|
|
419
|
+
} from "llm-strings";
|
|
420
|
+
|
|
421
|
+
// Provider types from the providers sub-path
|
|
422
|
+
import type {
|
|
417
423
|
Provider,
|
|
418
424
|
BedrockModelFamily,
|
|
419
425
|
ParamSpec,
|
|
420
426
|
ProviderMeta,
|
|
421
427
|
CanonicalParamSpec,
|
|
422
|
-
} from "llm-strings";
|
|
428
|
+
} from "llm-strings/providers";
|
|
423
429
|
```
|
|
424
430
|
|
|
425
431
|
## Provider Metadata (for UI integrations)
|
|
@@ -427,7 +433,7 @@ import type {
|
|
|
427
433
|
The library exports metadata useful for building UIs — provider names, brand colors, suggested models, and canonical parameter specs:
|
|
428
434
|
|
|
429
435
|
```ts
|
|
430
|
-
import { PROVIDER_META, MODELS, CANONICAL_PARAM_SPECS } from "llm-strings";
|
|
436
|
+
import { PROVIDER_META, MODELS, CANONICAL_PARAM_SPECS } from "llm-strings/providers";
|
|
431
437
|
|
|
432
438
|
// Provider display info
|
|
433
439
|
PROVIDER_META.forEach((p) => console.log(`${p.name}: ${p.host} (${p.color})`));
|
package/dist/index.cjs
CHANGED
|
@@ -1,10 +1,5 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
var _chunkRR3VXIW2cjs = require('./chunk-RR3VXIW2.cjs');
|
|
6
|
-
|
|
7
|
-
|
|
8
3
|
var _chunkRSUXM42Xcjs = require('./chunk-RSUXM42X.cjs');
|
|
9
4
|
|
|
10
5
|
|
|
@@ -13,35 +8,11 @@ var _chunkMGWGNZDJcjs = require('./chunk-MGWGNZDJ.cjs');
|
|
|
13
8
|
|
|
14
9
|
|
|
15
10
|
var _chunkN6NVBE43cjs = require('./chunk-N6NVBE43.cjs');
|
|
11
|
+
require('./chunk-NSCBY4VD.cjs');
|
|
16
12
|
|
|
17
13
|
|
|
18
14
|
|
|
19
15
|
|
|
20
16
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
var _chunkNSCBY4VDcjs = require('./chunk-NSCBY4VD.cjs');
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
exports.ALIASES = _chunkNSCBY4VDcjs.ALIASES; exports.CANONICAL_PARAM_SPECS = _chunkRR3VXIW2cjs.CANONICAL_PARAM_SPECS; exports.MODELS = _chunkRR3VXIW2cjs.MODELS; exports.PARAM_SPECS = _chunkNSCBY4VDcjs.PARAM_SPECS; exports.PROVIDER_META = _chunkRR3VXIW2cjs.PROVIDER_META; exports.PROVIDER_PARAMS = _chunkNSCBY4VDcjs.PROVIDER_PARAMS; exports.REASONING_MODEL_UNSUPPORTED = _chunkNSCBY4VDcjs.REASONING_MODEL_UNSUPPORTED; exports.build = _chunkN6NVBE43cjs.build; exports.canHostOpenAIModels = _chunkNSCBY4VDcjs.canHostOpenAIModels; exports.detectBedrockModelFamily = _chunkNSCBY4VDcjs.detectBedrockModelFamily; exports.detectGatewaySubProvider = _chunkNSCBY4VDcjs.detectGatewaySubProvider; exports.detectProvider = _chunkNSCBY4VDcjs.detectProvider; exports.isGatewayProvider = _chunkNSCBY4VDcjs.isGatewayProvider; exports.isReasoningModel = _chunkNSCBY4VDcjs.isReasoningModel; exports.normalize = _chunkMGWGNZDJcjs.normalize; exports.parse = _chunkN6NVBE43cjs.parse; exports.validate = _chunkRSUXM42Xcjs.validate;
|
|
17
|
+
exports.build = _chunkN6NVBE43cjs.build; exports.normalize = _chunkMGWGNZDJcjs.normalize; exports.parse = _chunkN6NVBE43cjs.parse; exports.validate = _chunkRSUXM42Xcjs.validate;
|
|
47
18
|
//# sourceMappingURL=index.cjs.map
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/dan/code/oss/llm-strings/dist/index.cjs"],"names":[],"mappings":"AAAA;AACE;
|
|
1
|
+
{"version":3,"sources":["/Users/dan/code/oss/llm-strings/dist/index.cjs"],"names":[],"mappings":"AAAA;AACE;AACF,wDAA6B;AAC7B;AACE;AACF,wDAA6B;AAC7B;AACE;AACA;AACF,wDAA6B;AAC7B,gCAA6B;AAC7B;AACE;AACA;AACA;AACA;AACF,iLAAC","file":"/Users/dan/code/oss/llm-strings/dist/index.cjs"}
|
package/dist/index.d.cts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
export { LlmConnectionConfig, build, parse } from './parse.cjs';
|
|
2
2
|
export { NormalizeChange, NormalizeOptions, NormalizeResult, normalize } from './normalize.cjs';
|
|
3
3
|
export { ValidateOptions, ValidationIssue, validate } from './validate.cjs';
|
|
4
|
-
|
|
5
|
-
export { CANONICAL_PARAM_SPECS, CanonicalParamSpec, MODELS, PROVIDER_META, ProviderMeta } from './providers.cjs';
|
|
4
|
+
import './provider-core-DinpG40u.cjs';
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
export { LlmConnectionConfig, build, parse } from './parse.js';
|
|
2
2
|
export { NormalizeChange, NormalizeOptions, NormalizeResult, normalize } from './normalize.js';
|
|
3
3
|
export { ValidateOptions, ValidationIssue, validate } from './validate.js';
|
|
4
|
-
|
|
5
|
-
export { CANONICAL_PARAM_SPECS, CanonicalParamSpec, MODELS, PROVIDER_META, ProviderMeta } from './providers.js';
|
|
4
|
+
import './provider-core-DinpG40u.js';
|
package/dist/index.js
CHANGED
|
@@ -1,8 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
CANONICAL_PARAM_SPECS,
|
|
3
|
-
MODELS,
|
|
4
|
-
PROVIDER_META
|
|
5
|
-
} from "./chunk-6P5GSSNW.js";
|
|
6
1
|
import {
|
|
7
2
|
validate
|
|
8
3
|
} from "./chunk-UYMVUTLV.js";
|
|
@@ -13,33 +8,9 @@ import {
|
|
|
13
8
|
build,
|
|
14
9
|
parse
|
|
15
10
|
} from "./chunk-FCEV23OT.js";
|
|
16
|
-
import
|
|
17
|
-
ALIASES,
|
|
18
|
-
PARAM_SPECS,
|
|
19
|
-
PROVIDER_PARAMS,
|
|
20
|
-
REASONING_MODEL_UNSUPPORTED,
|
|
21
|
-
canHostOpenAIModels,
|
|
22
|
-
detectBedrockModelFamily,
|
|
23
|
-
detectGatewaySubProvider,
|
|
24
|
-
detectProvider,
|
|
25
|
-
isGatewayProvider,
|
|
26
|
-
isReasoningModel
|
|
27
|
-
} from "./chunk-XID353H7.js";
|
|
11
|
+
import "./chunk-XID353H7.js";
|
|
28
12
|
export {
|
|
29
|
-
ALIASES,
|
|
30
|
-
CANONICAL_PARAM_SPECS,
|
|
31
|
-
MODELS,
|
|
32
|
-
PARAM_SPECS,
|
|
33
|
-
PROVIDER_META,
|
|
34
|
-
PROVIDER_PARAMS,
|
|
35
|
-
REASONING_MODEL_UNSUPPORTED,
|
|
36
13
|
build,
|
|
37
|
-
canHostOpenAIModels,
|
|
38
|
-
detectBedrockModelFamily,
|
|
39
|
-
detectGatewaySubProvider,
|
|
40
|
-
detectProvider,
|
|
41
|
-
isGatewayProvider,
|
|
42
|
-
isReasoningModel,
|
|
43
14
|
normalize,
|
|
44
15
|
parse,
|
|
45
16
|
validate
|
package/dist/normalize.d.cts
CHANGED
package/dist/normalize.d.ts
CHANGED
|
@@ -50,4 +50,4 @@ declare const CACHE_TTLS: Record<Provider, string[] | undefined>;
|
|
|
50
50
|
/** Match a duration expression like "5m", "1h", "30m". */
|
|
51
51
|
declare const DURATION_RE: RegExp;
|
|
52
52
|
|
|
53
|
-
export { ALIASES as A, type BedrockModelFamily as B, CACHE_TTLS as C, DURATION_RE as D,
|
|
53
|
+
export { ALIASES as A, type BedrockModelFamily as B, CACHE_TTLS as C, DURATION_RE as D, type Provider as P, REASONING_MODEL_UNSUPPORTED as R, CACHE_VALUES as a, PARAM_SPECS as b, PROVIDER_PARAMS as c, type ParamSpec as d, bedrockSupportsCaching as e, canHostOpenAIModels as f, detectBedrockModelFamily as g, detectGatewaySubProvider as h, detectProvider as i, isGatewayProvider as j, isReasoningModel as k };
|
|
@@ -50,4 +50,4 @@ declare const CACHE_TTLS: Record<Provider, string[] | undefined>;
|
|
|
50
50
|
/** Match a duration expression like "5m", "1h", "30m". */
|
|
51
51
|
declare const DURATION_RE: RegExp;
|
|
52
52
|
|
|
53
|
-
export { ALIASES as A, type BedrockModelFamily as B, CACHE_TTLS as C, DURATION_RE as D,
|
|
53
|
+
export { ALIASES as A, type BedrockModelFamily as B, CACHE_TTLS as C, DURATION_RE as D, type Provider as P, REASONING_MODEL_UNSUPPORTED as R, CACHE_VALUES as a, PARAM_SPECS as b, PROVIDER_PARAMS as c, type ParamSpec as d, bedrockSupportsCaching as e, canHostOpenAIModels as f, detectBedrockModelFamily as g, detectGatewaySubProvider as h, detectProvider as i, isGatewayProvider as j, isReasoningModel as k };
|
package/dist/providers.cjs
CHANGED
|
@@ -2,11 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var _chunkRR3VXIW2cjs = require('./chunk-RR3VXIW2.cjs');
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
5
|
|
|
11
6
|
|
|
12
7
|
|
|
@@ -20,22 +15,192 @@ var _chunkRR3VXIW2cjs = require('./chunk-RR3VXIW2.cjs');
|
|
|
20
15
|
|
|
21
16
|
var _chunkNSCBY4VDcjs = require('./chunk-NSCBY4VD.cjs');
|
|
22
17
|
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
18
|
+
// src/provider-meta.ts
|
|
19
|
+
var PROVIDER_META = [
|
|
20
|
+
{ id: "openai", name: "OpenAI", host: "api.openai.com", color: "#10a37f" },
|
|
21
|
+
{ id: "anthropic", name: "Anthropic", host: "api.anthropic.com", color: "#e8956a" },
|
|
22
|
+
{ id: "google", name: "Google", host: "generativelanguage.googleapis.com", color: "#4285f4" },
|
|
23
|
+
{ id: "mistral", name: "Mistral", host: "api.mistral.ai", color: "#ff7000" },
|
|
24
|
+
{ id: "cohere", name: "Cohere", host: "api.cohere.com", color: "#39594d" },
|
|
25
|
+
{ id: "bedrock", name: "Bedrock", host: "bedrock-runtime.us-east-1.amazonaws.com", color: "#ff9900" },
|
|
26
|
+
{ id: "openrouter", name: "OpenRouter", host: "openrouter.ai", color: "#818cf8" },
|
|
27
|
+
{ id: "vercel", name: "Vercel", host: "gateway.ai.vercel.app", color: "#ededed" }
|
|
28
|
+
];
|
|
29
|
+
var MODELS = {
|
|
30
|
+
openai: [
|
|
31
|
+
"gpt-5.2",
|
|
32
|
+
"gpt-5.2-pro",
|
|
33
|
+
"gpt-4.1",
|
|
34
|
+
"gpt-4.1-mini",
|
|
35
|
+
"gpt-4.1-nano",
|
|
36
|
+
"o3",
|
|
37
|
+
"o3-mini",
|
|
38
|
+
"o4-mini",
|
|
39
|
+
"o1-pro"
|
|
40
|
+
],
|
|
41
|
+
anthropic: [
|
|
42
|
+
"claude-opus-4-6",
|
|
43
|
+
"claude-sonnet-4-6",
|
|
44
|
+
"claude-sonnet-4-5",
|
|
45
|
+
"claude-haiku-4-5"
|
|
46
|
+
],
|
|
47
|
+
google: [
|
|
48
|
+
"gemini-3-pro-preview",
|
|
49
|
+
"gemini-3-flash-preview",
|
|
50
|
+
"gemini-2.5-pro",
|
|
51
|
+
"gemini-2.5-flash"
|
|
52
|
+
],
|
|
53
|
+
mistral: [
|
|
54
|
+
"mistral-large-latest",
|
|
55
|
+
"mistral-medium-latest",
|
|
56
|
+
"mistral-small-latest",
|
|
57
|
+
"codestral-latest",
|
|
58
|
+
"magistral-medium-latest"
|
|
59
|
+
],
|
|
60
|
+
cohere: [
|
|
61
|
+
"command-a-03-2025",
|
|
62
|
+
"command-r-plus-08-2024",
|
|
63
|
+
"command-r-08-2024",
|
|
64
|
+
"command-r7b-12-2024"
|
|
65
|
+
],
|
|
66
|
+
bedrock: [
|
|
67
|
+
"anthropic.claude-opus-4-6-v1",
|
|
68
|
+
"anthropic.claude-sonnet-4-6-v1",
|
|
69
|
+
"anthropic.claude-haiku-4-5-v1",
|
|
70
|
+
"amazon.nova-pro-v1",
|
|
71
|
+
"amazon.nova-lite-v1",
|
|
72
|
+
"meta.llama3-70b-instruct-v1:0"
|
|
73
|
+
],
|
|
74
|
+
openrouter: [
|
|
75
|
+
"openai/gpt-5.2",
|
|
76
|
+
"anthropic/claude-opus-4-6",
|
|
77
|
+
"google/gemini-2.5-pro",
|
|
78
|
+
"mistral/mistral-large-latest"
|
|
79
|
+
],
|
|
80
|
+
vercel: [
|
|
81
|
+
"openai/gpt-5.2",
|
|
82
|
+
"anthropic/claude-opus-4-6",
|
|
83
|
+
"google/gemini-2.5-pro",
|
|
84
|
+
"google/gemini-3-pro-preview",
|
|
85
|
+
"google/gemini-3-flash-preview",
|
|
86
|
+
"mistral/mistral-large-latest",
|
|
87
|
+
"qwen/qwen2.5-pro"
|
|
88
|
+
]
|
|
89
|
+
};
|
|
90
|
+
var CANONICAL_PARAM_SPECS = {
|
|
91
|
+
openai: {
|
|
92
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
93
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
94
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
95
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
96
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
97
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
98
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
99
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
100
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
101
|
+
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
102
|
+
},
|
|
103
|
+
anthropic: {
|
|
104
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
105
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
106
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
107
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
108
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
109
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
110
|
+
effort: { type: "enum", values: ["low", "medium", "high", "max"], default: "medium", description: "Thinking effort" },
|
|
111
|
+
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
112
|
+
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
113
|
+
},
|
|
114
|
+
google: {
|
|
115
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
116
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
117
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
118
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
119
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
120
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
121
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
122
|
+
n: { type: "number", min: 1, default: 1, description: "Candidate count" },
|
|
123
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
124
|
+
seed: { type: "number", default: "", description: "Random seed" }
|
|
125
|
+
},
|
|
126
|
+
mistral: {
|
|
127
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
128
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
129
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
130
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
131
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
132
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
133
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
134
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
135
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
136
|
+
safe_prompt: { type: "boolean", default: false, description: "Enable safe prompt" },
|
|
137
|
+
min_tokens: { type: "number", min: 0, default: 0, description: "Minimum tokens" }
|
|
138
|
+
},
|
|
139
|
+
cohere: {
|
|
140
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
141
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
142
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling (p)" },
|
|
143
|
+
top_k: { type: "number", min: 0, max: 500, default: 40, description: "Top-K sampling (k)" },
|
|
144
|
+
frequency_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize frequent tokens" },
|
|
145
|
+
presence_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize repeated topics" },
|
|
146
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
147
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
148
|
+
seed: { type: "number", default: "", description: "Random seed" }
|
|
149
|
+
},
|
|
150
|
+
bedrock: {
|
|
151
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
152
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
153
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
154
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
155
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
156
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
157
|
+
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
158
|
+
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
159
|
+
},
|
|
160
|
+
openrouter: {
|
|
161
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
162
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
163
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
164
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
165
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
166
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
167
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
168
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
169
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
170
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
171
|
+
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
172
|
+
},
|
|
173
|
+
vercel: {
|
|
174
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
175
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
176
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
177
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
178
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
179
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
180
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
181
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
182
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
183
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
184
|
+
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
exports.ALIASES = _chunkNSCBY4VDcjs.ALIASES; exports.CACHE_TTLS = _chunkNSCBY4VDcjs.CACHE_TTLS; exports.CACHE_VALUES = _chunkNSCBY4VDcjs.CACHE_VALUES; exports.CANONICAL_PARAM_SPECS = CANONICAL_PARAM_SPECS; exports.DURATION_RE = _chunkNSCBY4VDcjs.DURATION_RE; exports.MODELS = MODELS; exports.PARAM_SPECS = _chunkNSCBY4VDcjs.PARAM_SPECS; exports.PROVIDER_META = PROVIDER_META; exports.PROVIDER_PARAMS = _chunkNSCBY4VDcjs.PROVIDER_PARAMS; exports.REASONING_MODEL_UNSUPPORTED = _chunkNSCBY4VDcjs.REASONING_MODEL_UNSUPPORTED; exports.bedrockSupportsCaching = _chunkNSCBY4VDcjs.bedrockSupportsCaching; exports.canHostOpenAIModels = _chunkNSCBY4VDcjs.canHostOpenAIModels; exports.detectBedrockModelFamily = _chunkNSCBY4VDcjs.detectBedrockModelFamily; exports.detectGatewaySubProvider = _chunkNSCBY4VDcjs.detectGatewaySubProvider; exports.detectProvider = _chunkNSCBY4VDcjs.detectProvider; exports.isGatewayProvider = _chunkNSCBY4VDcjs.isGatewayProvider; exports.isReasoningModel = _chunkNSCBY4VDcjs.isReasoningModel;
|
|
41
206
|
//# sourceMappingURL=providers.cjs.map
|
package/dist/providers.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/dan/code/oss/llm-strings/dist/providers.cjs"],"names":[],"mappings":"AAAA;AACE;AACA;AACA;AACF,wDAA6B;AAC7B;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wiCAAC","file":"/Users/dan/code/oss/llm-strings/dist/providers.cjs"}
|
|
1
|
+
{"version":3,"sources":["/Users/dan/code/oss/llm-strings/dist/providers.cjs","../src/provider-meta.ts"],"names":[],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACAO,IAAM,cAAA,EAAgC;AAAA,EAC3C,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAc,IAAA,EAAM,gBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC1G,EAAE,EAAA,EAAI,WAAA,EAAc,IAAA,EAAM,WAAA,EAAe,IAAA,EAAM,mBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAe,IAAA,EAAM,mCAAA,EAA4C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC5G,EAAE,EAAA,EAAI,SAAA,EAAc,IAAA,EAAM,SAAA,EAAe,IAAA,EAAM,gBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAe,IAAA,EAAM,gBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,SAAA,EAAc,IAAA,EAAM,SAAA,EAAe,IAAA,EAAM,yCAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,YAAA,EAAc,IAAA,EAAM,YAAA,EAAe,IAAA,EAAM,eAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAe,IAAA,EAAM,uBAAA,EAA2C,KAAA,EAAO,UAAU;AAC7G,CAAA;AAMO,IAAM,OAAA,EAAqC;AAAA,EAChD,MAAA,EAAQ;AAAA,IACN,SAAA;AAAA,IAAW,aAAA;AAAA,IACX,SAAA;AAAA,IAAW,cAAA;AAAA,IAAgB,cAAA;AAAA,IAC3B,IAAA;AAAA,IAAM,SAAA;AAAA,IAAW,SAAA;AAAA,IAAW;AAAA,EAC9B,CAAA;AAAA,EACA,SAAA,EAAW;AAAA,IACT,iBAAA;AAAA,IAAmB,mBAAA;AAAA,IACnB,mBAAA;AAAA,IAAqB;AAAA,EACvB,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,sBAAA;AAAA,IAAwB,wBAAA;AAAA,IACxB,gBAAA;AAAA,IAAkB;AAAA,EACpB,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,sBAAA;AAAA,IAAwB,uBAAA;AAAA,IACxB,sBAAA;AAAA,IAAwB,kBAAA;AAAA,IACxB;AAAA,EACF,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,mBAAA;AAAA,IACA,wBAAA;AAAA,IAA0B,mBAAA;AAAA,IAC1B;AAAA,EACF,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,8BAAA;AAAA,IAAgC,gCAAA;AAAA,IAChC,+BAAA;AAAA,IACA,oBAAA;AAAA,IAAsB,qBAAA;AAAA,IACtB;AAAA,EACF,CAAA;AAAA,EACA,UAAA,EAAY;AAAA,IACV,gBAAA;AAAA,IAAkB,2BAAA;AAAA,IAClB,uBAAA;AAAA,IAAyB;AAAA,EAC3B,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,gBAAA;AAAA,IAAkB,2BAAA;AAAA,IAClB,uBAAA;AAAA,IAAyB,6BAAA;AAAA,IACzB,+BAAA;AAAA,IAAiC,8BAAA;AAAA,IACjC;AAAA,EACF;AACF,CAAA;AAeO,IAAM,sBAAA,EAA8E;AAAA,EACzF,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,MAAA,EAAmB,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,mBAAmB;AAAA,EACvJ,CAAA;AAAA,EACA,SAAA,EAAW;AAAA,IACT,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAClG,UAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IACpG,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IAC/F,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC7F,IAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAA4B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC9F,MAAA,EAAa,EAAE,IAAA,EAAM,SAAA,EAA4B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAChG,MAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,KAAK,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACzH,KAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,WAAW,CAAA,EAAK,OAAA,EAAS,WAAA,EAAa,WAAA,EAAa,gBAAgB,CAAA;AAAA,IACzG,SAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAI,CAAA,EAAM,OAAA,EAAS,IAAA,EAAa,WAAA,EAAa,YAAY;AAAA,EACvG,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACpG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACtG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc;AAAA,EACpG,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,WAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,qBAAqB,CAAA;AAAA,IAC1G,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,iBAAiB;AAAA,EACtG,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAC9G,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAiB,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAChH,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,uBAAuB,CAAA;AAAA,IAC/G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,GAAA,EAAS,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,qBAAqB,CAAA;AAAA,IAC7G,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACnH,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACnH,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAkC,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC1G,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAAkC,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC5G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAkC,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc;AAAA,EACzG,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAClG,UAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IACpG,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IAC/F,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC7F,IAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAA4B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC9F,MAAA,EAAa,EAAE,IAAA,EAAM,SAAA,EAA4B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAChG,KAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,WAAW,CAAA,EAAK,OAAA,EAAS,WAAA,EAAa,WAAA,EAAa,gBAAgB,CAAA;AAAA,IACzG,SAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAI,CAAA,EAAM,OAAA,EAAS,IAAA,EAAa,WAAA,EAAa,YAAY;AAAA,EACvG,CAAA;AAAA,EACA,UAAA,EAAY;AAAA,IACV,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACpG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,MAAA,EAAmB,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,mBAAmB;AAAA,EACvJ,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACpG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,MAAA,EAAmB,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,mBAAmB;AAAA,EACvJ;AACF,CAAA;ADGA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,k/BAAC","file":"/Users/dan/code/oss/llm-strings/dist/providers.cjs","sourcesContent":[null,"import type { Provider } from \"./provider-core.js\";\n\n/* ------------------------------------------------------------------ */\n/* UI-consumable metadata for 3rd-party integrations */\n/* ------------------------------------------------------------------ */\n\nexport interface ProviderMeta {\n /** Provider identifier — matches the Provider union type. */\n id: Provider;\n /** Human-readable display name. */\n name: string;\n /** Default / canonical API hostname. */\n host: string;\n /** Brand color as a CSS hex value. */\n color: string;\n}\n\nexport const PROVIDER_META: ProviderMeta[] = [\n { id: \"openai\", name: \"OpenAI\", host: \"api.openai.com\", color: \"#10a37f\" },\n { id: \"anthropic\", name: \"Anthropic\", host: \"api.anthropic.com\", color: \"#e8956a\" },\n { id: \"google\", name: \"Google\", host: \"generativelanguage.googleapis.com\", color: \"#4285f4\" },\n { id: \"mistral\", name: \"Mistral\", host: \"api.mistral.ai\", color: \"#ff7000\" },\n { id: \"cohere\", name: \"Cohere\", host: \"api.cohere.com\", color: \"#39594d\" },\n { id: \"bedrock\", name: \"Bedrock\", host: \"bedrock-runtime.us-east-1.amazonaws.com\", color: \"#ff9900\" },\n { id: \"openrouter\", name: \"OpenRouter\", host: \"openrouter.ai\", color: \"#818cf8\" },\n { id: \"vercel\", name: \"Vercel\", host: \"gateway.ai.vercel.app\", color: \"#ededed\" },\n];\n\n/**\n * Suggested / common model IDs per provider, ordered by recency.\n * Not exhaustive — providers add models frequently.\n */\nexport const MODELS: Record<Provider, string[]> = {\n openai: [\n \"gpt-5.2\", \"gpt-5.2-pro\",\n \"gpt-4.1\", \"gpt-4.1-mini\", \"gpt-4.1-nano\",\n \"o3\", \"o3-mini\", \"o4-mini\", \"o1-pro\",\n ],\n anthropic: [\n \"claude-opus-4-6\", \"claude-sonnet-4-6\",\n \"claude-sonnet-4-5\", \"claude-haiku-4-5\",\n ],\n google: [\n \"gemini-3-pro-preview\", \"gemini-3-flash-preview\",\n \"gemini-2.5-pro\", \"gemini-2.5-flash\",\n ],\n mistral: [\n \"mistral-large-latest\", \"mistral-medium-latest\",\n \"mistral-small-latest\", \"codestral-latest\",\n \"magistral-medium-latest\",\n ],\n cohere: [\n \"command-a-03-2025\",\n \"command-r-plus-08-2024\", \"command-r-08-2024\",\n \"command-r7b-12-2024\",\n ],\n bedrock: [\n \"anthropic.claude-opus-4-6-v1\", \"anthropic.claude-sonnet-4-6-v1\",\n \"anthropic.claude-haiku-4-5-v1\",\n \"amazon.nova-pro-v1\", \"amazon.nova-lite-v1\",\n \"meta.llama3-70b-instruct-v1:0\",\n ],\n openrouter: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"mistral/mistral-large-latest\",\n ],\n vercel: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"google/gemini-3-pro-preview\",\n \"google/gemini-3-flash-preview\", \"mistral/mistral-large-latest\",\n \"qwen/qwen2.5-pro\",\n ],\n};\n\n/**\n * Canonical parameter spec — keyed by canonical (snake_case) param names\n * with defaults and descriptions for UI consumption.\n */\nexport interface CanonicalParamSpec {\n type: \"number\" | \"string\" | \"boolean\" | \"enum\";\n min?: number;\n max?: number;\n values?: string[];\n default?: string | number | boolean;\n description?: string;\n}\n\nexport const CANONICAL_PARAM_SPECS: Record<Provider, Record<string, CanonicalParamSpec>> = {\n openai: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n anthropic: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"low\", \"medium\", \"high\", \"max\"], default: \"medium\", description: \"Thinking effort\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n google: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Candidate count\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n mistral: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n safe_prompt: { type: \"boolean\", default: false, description: \"Enable safe prompt\" },\n min_tokens: { type: \"number\", min: 0, default: 0, description: \"Minimum tokens\" },\n },\n cohere: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling (p)\" },\n top_k: { type: \"number\", min: 0, max: 500, default: 40, description: \"Top-K sampling (k)\" },\n frequency_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n bedrock: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n openrouter: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n vercel: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n};\n"]}
|
package/dist/providers.d.cts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export { A as ALIASES, B as BedrockModelFamily, C as CACHE_TTLS,
|
|
1
|
+
import { P as Provider } from './provider-core-DinpG40u.cjs';
|
|
2
|
+
export { A as ALIASES, B as BedrockModelFamily, C as CACHE_TTLS, a as CACHE_VALUES, D as DURATION_RE, b as PARAM_SPECS, c as PROVIDER_PARAMS, d as ParamSpec, R as REASONING_MODEL_UNSUPPORTED, e as bedrockSupportsCaching, f as canHostOpenAIModels, g as detectBedrockModelFamily, h as detectGatewaySubProvider, i as detectProvider, j as isGatewayProvider, k as isReasoningModel } from './provider-core-DinpG40u.cjs';
|
|
3
3
|
|
|
4
4
|
interface ProviderMeta {
|
|
5
5
|
/** Provider identifier — matches the Provider union type. */
|
package/dist/providers.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export { A as ALIASES, B as BedrockModelFamily, C as CACHE_TTLS,
|
|
1
|
+
import { P as Provider } from './provider-core-DinpG40u.js';
|
|
2
|
+
export { A as ALIASES, B as BedrockModelFamily, C as CACHE_TTLS, a as CACHE_VALUES, D as DURATION_RE, b as PARAM_SPECS, c as PROVIDER_PARAMS, d as ParamSpec, R as REASONING_MODEL_UNSUPPORTED, e as bedrockSupportsCaching, f as canHostOpenAIModels, g as detectBedrockModelFamily, h as detectGatewaySubProvider, i as detectProvider, j as isGatewayProvider, k as isReasoningModel } from './provider-core-DinpG40u.js';
|
|
3
3
|
|
|
4
4
|
interface ProviderMeta {
|
|
5
5
|
/** Provider identifier — matches the Provider union type. */
|
package/dist/providers.js
CHANGED
|
@@ -1,8 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
CANONICAL_PARAM_SPECS,
|
|
3
|
-
MODELS,
|
|
4
|
-
PROVIDER_META
|
|
5
|
-
} from "./chunk-6P5GSSNW.js";
|
|
6
1
|
import {
|
|
7
2
|
ALIASES,
|
|
8
3
|
CACHE_TTLS,
|
|
@@ -19,6 +14,176 @@ import {
|
|
|
19
14
|
isGatewayProvider,
|
|
20
15
|
isReasoningModel
|
|
21
16
|
} from "./chunk-XID353H7.js";
|
|
17
|
+
|
|
18
|
+
// src/provider-meta.ts
|
|
19
|
+
var PROVIDER_META = [
|
|
20
|
+
{ id: "openai", name: "OpenAI", host: "api.openai.com", color: "#10a37f" },
|
|
21
|
+
{ id: "anthropic", name: "Anthropic", host: "api.anthropic.com", color: "#e8956a" },
|
|
22
|
+
{ id: "google", name: "Google", host: "generativelanguage.googleapis.com", color: "#4285f4" },
|
|
23
|
+
{ id: "mistral", name: "Mistral", host: "api.mistral.ai", color: "#ff7000" },
|
|
24
|
+
{ id: "cohere", name: "Cohere", host: "api.cohere.com", color: "#39594d" },
|
|
25
|
+
{ id: "bedrock", name: "Bedrock", host: "bedrock-runtime.us-east-1.amazonaws.com", color: "#ff9900" },
|
|
26
|
+
{ id: "openrouter", name: "OpenRouter", host: "openrouter.ai", color: "#818cf8" },
|
|
27
|
+
{ id: "vercel", name: "Vercel", host: "gateway.ai.vercel.app", color: "#ededed" }
|
|
28
|
+
];
|
|
29
|
+
var MODELS = {
|
|
30
|
+
openai: [
|
|
31
|
+
"gpt-5.2",
|
|
32
|
+
"gpt-5.2-pro",
|
|
33
|
+
"gpt-4.1",
|
|
34
|
+
"gpt-4.1-mini",
|
|
35
|
+
"gpt-4.1-nano",
|
|
36
|
+
"o3",
|
|
37
|
+
"o3-mini",
|
|
38
|
+
"o4-mini",
|
|
39
|
+
"o1-pro"
|
|
40
|
+
],
|
|
41
|
+
anthropic: [
|
|
42
|
+
"claude-opus-4-6",
|
|
43
|
+
"claude-sonnet-4-6",
|
|
44
|
+
"claude-sonnet-4-5",
|
|
45
|
+
"claude-haiku-4-5"
|
|
46
|
+
],
|
|
47
|
+
google: [
|
|
48
|
+
"gemini-3-pro-preview",
|
|
49
|
+
"gemini-3-flash-preview",
|
|
50
|
+
"gemini-2.5-pro",
|
|
51
|
+
"gemini-2.5-flash"
|
|
52
|
+
],
|
|
53
|
+
mistral: [
|
|
54
|
+
"mistral-large-latest",
|
|
55
|
+
"mistral-medium-latest",
|
|
56
|
+
"mistral-small-latest",
|
|
57
|
+
"codestral-latest",
|
|
58
|
+
"magistral-medium-latest"
|
|
59
|
+
],
|
|
60
|
+
cohere: [
|
|
61
|
+
"command-a-03-2025",
|
|
62
|
+
"command-r-plus-08-2024",
|
|
63
|
+
"command-r-08-2024",
|
|
64
|
+
"command-r7b-12-2024"
|
|
65
|
+
],
|
|
66
|
+
bedrock: [
|
|
67
|
+
"anthropic.claude-opus-4-6-v1",
|
|
68
|
+
"anthropic.claude-sonnet-4-6-v1",
|
|
69
|
+
"anthropic.claude-haiku-4-5-v1",
|
|
70
|
+
"amazon.nova-pro-v1",
|
|
71
|
+
"amazon.nova-lite-v1",
|
|
72
|
+
"meta.llama3-70b-instruct-v1:0"
|
|
73
|
+
],
|
|
74
|
+
openrouter: [
|
|
75
|
+
"openai/gpt-5.2",
|
|
76
|
+
"anthropic/claude-opus-4-6",
|
|
77
|
+
"google/gemini-2.5-pro",
|
|
78
|
+
"mistral/mistral-large-latest"
|
|
79
|
+
],
|
|
80
|
+
vercel: [
|
|
81
|
+
"openai/gpt-5.2",
|
|
82
|
+
"anthropic/claude-opus-4-6",
|
|
83
|
+
"google/gemini-2.5-pro",
|
|
84
|
+
"google/gemini-3-pro-preview",
|
|
85
|
+
"google/gemini-3-flash-preview",
|
|
86
|
+
"mistral/mistral-large-latest",
|
|
87
|
+
"qwen/qwen2.5-pro"
|
|
88
|
+
]
|
|
89
|
+
};
|
|
90
|
+
var CANONICAL_PARAM_SPECS = {
|
|
91
|
+
openai: {
|
|
92
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
93
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
94
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
95
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
96
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
97
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
98
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
99
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
100
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
101
|
+
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
102
|
+
},
|
|
103
|
+
anthropic: {
|
|
104
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
105
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
106
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
107
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
108
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
109
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
110
|
+
effort: { type: "enum", values: ["low", "medium", "high", "max"], default: "medium", description: "Thinking effort" },
|
|
111
|
+
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
112
|
+
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
113
|
+
},
|
|
114
|
+
google: {
|
|
115
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
116
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
117
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
118
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
119
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
120
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
121
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
122
|
+
n: { type: "number", min: 1, default: 1, description: "Candidate count" },
|
|
123
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
124
|
+
seed: { type: "number", default: "", description: "Random seed" }
|
|
125
|
+
},
|
|
126
|
+
mistral: {
|
|
127
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
128
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
129
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
130
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
131
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
132
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
133
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
134
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
135
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
136
|
+
safe_prompt: { type: "boolean", default: false, description: "Enable safe prompt" },
|
|
137
|
+
min_tokens: { type: "number", min: 0, default: 0, description: "Minimum tokens" }
|
|
138
|
+
},
|
|
139
|
+
cohere: {
|
|
140
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
141
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
142
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling (p)" },
|
|
143
|
+
top_k: { type: "number", min: 0, max: 500, default: 40, description: "Top-K sampling (k)" },
|
|
144
|
+
frequency_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize frequent tokens" },
|
|
145
|
+
presence_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize repeated topics" },
|
|
146
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
147
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
148
|
+
seed: { type: "number", default: "", description: "Random seed" }
|
|
149
|
+
},
|
|
150
|
+
bedrock: {
|
|
151
|
+
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
152
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
153
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
154
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
155
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
156
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
157
|
+
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
158
|
+
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
159
|
+
},
|
|
160
|
+
openrouter: {
|
|
161
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
162
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
163
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
164
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
165
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
166
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
167
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
168
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
169
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
170
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
171
|
+
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
172
|
+
},
|
|
173
|
+
vercel: {
|
|
174
|
+
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
175
|
+
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
176
|
+
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
177
|
+
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
178
|
+
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
179
|
+
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
180
|
+
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
181
|
+
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
182
|
+
seed: { type: "number", default: "", description: "Random seed" },
|
|
183
|
+
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
184
|
+
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
185
|
+
}
|
|
186
|
+
};
|
|
22
187
|
export {
|
|
23
188
|
ALIASES,
|
|
24
189
|
CACHE_TTLS,
|
package/dist/providers.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/provider-meta.ts"],"sourcesContent":["import type { Provider } from \"./provider-core.js\";\n\n/* ------------------------------------------------------------------ */\n/* UI-consumable metadata for 3rd-party integrations */\n/* ------------------------------------------------------------------ */\n\nexport interface ProviderMeta {\n /** Provider identifier — matches the Provider union type. */\n id: Provider;\n /** Human-readable display name. */\n name: string;\n /** Default / canonical API hostname. */\n host: string;\n /** Brand color as a CSS hex value. */\n color: string;\n}\n\nexport const PROVIDER_META: ProviderMeta[] = [\n { id: \"openai\", name: \"OpenAI\", host: \"api.openai.com\", color: \"#10a37f\" },\n { id: \"anthropic\", name: \"Anthropic\", host: \"api.anthropic.com\", color: \"#e8956a\" },\n { id: \"google\", name: \"Google\", host: \"generativelanguage.googleapis.com\", color: \"#4285f4\" },\n { id: \"mistral\", name: \"Mistral\", host: \"api.mistral.ai\", color: \"#ff7000\" },\n { id: \"cohere\", name: \"Cohere\", host: \"api.cohere.com\", color: \"#39594d\" },\n { id: \"bedrock\", name: \"Bedrock\", host: \"bedrock-runtime.us-east-1.amazonaws.com\", color: \"#ff9900\" },\n { id: \"openrouter\", name: \"OpenRouter\", host: \"openrouter.ai\", color: \"#818cf8\" },\n { id: \"vercel\", name: \"Vercel\", host: \"gateway.ai.vercel.app\", color: \"#ededed\" },\n];\n\n/**\n * Suggested / common model IDs per provider, ordered by recency.\n * Not exhaustive — providers add models frequently.\n */\nexport const MODELS: Record<Provider, string[]> = {\n openai: [\n \"gpt-5.2\", \"gpt-5.2-pro\",\n \"gpt-4.1\", \"gpt-4.1-mini\", \"gpt-4.1-nano\",\n \"o3\", \"o3-mini\", \"o4-mini\", \"o1-pro\",\n ],\n anthropic: [\n \"claude-opus-4-6\", \"claude-sonnet-4-6\",\n \"claude-sonnet-4-5\", \"claude-haiku-4-5\",\n ],\n google: [\n \"gemini-3-pro-preview\", \"gemini-3-flash-preview\",\n \"gemini-2.5-pro\", \"gemini-2.5-flash\",\n ],\n mistral: [\n \"mistral-large-latest\", \"mistral-medium-latest\",\n \"mistral-small-latest\", \"codestral-latest\",\n \"magistral-medium-latest\",\n ],\n cohere: [\n \"command-a-03-2025\",\n \"command-r-plus-08-2024\", \"command-r-08-2024\",\n \"command-r7b-12-2024\",\n ],\n bedrock: [\n \"anthropic.claude-opus-4-6-v1\", \"anthropic.claude-sonnet-4-6-v1\",\n \"anthropic.claude-haiku-4-5-v1\",\n \"amazon.nova-pro-v1\", \"amazon.nova-lite-v1\",\n \"meta.llama3-70b-instruct-v1:0\",\n ],\n openrouter: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"mistral/mistral-large-latest\",\n ],\n vercel: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"google/gemini-3-pro-preview\",\n \"google/gemini-3-flash-preview\", \"mistral/mistral-large-latest\",\n \"qwen/qwen2.5-pro\",\n ],\n};\n\n/**\n * Canonical parameter spec — keyed by canonical (snake_case) param names\n * with defaults and descriptions for UI consumption.\n */\nexport interface CanonicalParamSpec {\n type: \"number\" | \"string\" | \"boolean\" | \"enum\";\n min?: number;\n max?: number;\n values?: string[];\n default?: string | number | boolean;\n description?: string;\n}\n\nexport const CANONICAL_PARAM_SPECS: Record<Provider, Record<string, CanonicalParamSpec>> = {\n openai: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n anthropic: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"low\", \"medium\", \"high\", \"max\"], default: \"medium\", description: \"Thinking effort\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n google: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Candidate count\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n mistral: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n safe_prompt: { type: \"boolean\", default: false, description: \"Enable safe prompt\" },\n min_tokens: { type: \"number\", min: 0, default: 0, description: \"Minimum tokens\" },\n },\n cohere: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling (p)\" },\n top_k: { type: \"number\", min: 0, max: 500, default: 40, description: \"Top-K sampling (k)\" },\n frequency_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n bedrock: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n openrouter: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n vercel: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAiBO,IAAM,gBAAgC;AAAA,EAC3C,EAAE,IAAI,UAAc,MAAM,UAAc,MAAM,kBAA2C,OAAO,UAAU;AAAA,EAC1G,EAAE,IAAI,aAAc,MAAM,aAAe,MAAM,qBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,UAAc,MAAM,UAAe,MAAM,qCAA4C,OAAO,UAAU;AAAA,EAC5G,EAAE,IAAI,WAAc,MAAM,WAAe,MAAM,kBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,UAAc,MAAM,UAAe,MAAM,kBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,WAAc,MAAM,WAAe,MAAM,2CAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,cAAc,MAAM,cAAe,MAAM,iBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,UAAc,MAAM,UAAe,MAAM,yBAA2C,OAAO,UAAU;AAC7G;AAMO,IAAM,SAAqC;AAAA,EAChD,QAAQ;AAAA,IACN;AAAA,IAAW;AAAA,IACX;AAAA,IAAW;AAAA,IAAgB;AAAA,IAC3B;AAAA,IAAM;AAAA,IAAW;AAAA,IAAW;AAAA,EAC9B;AAAA,EACA,WAAW;AAAA,IACT;AAAA,IAAmB;AAAA,IACnB;AAAA,IAAqB;AAAA,EACvB;AAAA,EACA,QAAQ;AAAA,IACN;AAAA,IAAwB;AAAA,IACxB;AAAA,IAAkB;AAAA,EACpB;AAAA,EACA,SAAS;AAAA,IACP;AAAA,IAAwB;AAAA,IACxB;AAAA,IAAwB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN;AAAA,IACA;AAAA,IAA0B;AAAA,IAC1B;AAAA,EACF;AAAA,EACA,SAAS;AAAA,IACP;AAAA,IAAgC;AAAA,IAChC;AAAA,IACA;AAAA,IAAsB;AAAA,IACtB;AAAA,EACF;AAAA,EACA,YAAY;AAAA,IACV;AAAA,IAAkB;AAAA,IAClB;AAAA,IAAyB;AAAA,EAC3B;AAAA,EACA,QAAQ;AAAA,IACN;AAAA,IAAkB;AAAA,IAClB;AAAA,IAAyB;AAAA,IACzB;AAAA,IAAiC;AAAA,IACjC;AAAA,EACF;AACF;AAeO,IAAM,wBAA8E;AAAA,EACzF,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,QAAmB,EAAE,MAAM,QAAQ,QAAQ,CAAC,QAAQ,WAAW,OAAO,UAAU,QAAQ,OAAO,GAAG,SAAS,UAAU,aAAa,mBAAmB;AAAA,EACvJ;AAAA,EACA,WAAW;AAAA,IACT,aAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,KAAM,aAAa,sBAAsB;AAAA,IAClG,YAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,MAAM,aAAa,wBAAwB;AAAA,IACpG,OAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,GAAM,aAAa,mBAAmB;AAAA,IAC/F,OAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC7F,MAAa,EAAE,MAAM,UAA4B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC9F,QAAa,EAAE,MAAM,WAA4B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IAChG,QAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,OAAO,UAAU,QAAQ,KAAK,GAAG,SAAS,UAAU,aAAa,kBAAkB;AAAA,IACzH,OAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,WAAW,GAAK,SAAS,aAAa,aAAa,gBAAgB;AAAA,IACzG,WAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,MAAM,IAAI,GAAM,SAAS,MAAa,aAAa,YAAY;AAAA,EACvG;AAAA,EACA,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACpG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,kBAAkB;AAAA,IACtG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,EACpG;AAAA,EACA,SAAS;AAAA,IACP,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,aAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,qBAAqB;AAAA,IAC1G,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,GAAM,aAAa,iBAAiB;AAAA,EACtG;AAAA,EACA,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,KAAM,aAAa,sBAAsB;AAAA,IAC9G,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAiB,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAChH,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,GAAM,aAAa,uBAAuB;AAAA,IAC/G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,KAAS,SAAS,IAAM,aAAa,qBAAqB;AAAA,IAC7G,mBAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,GAAM,aAAa,2BAA2B;AAAA,IACnH,kBAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,GAAM,aAAa,2BAA2B;AAAA,IACnH,MAAmB,EAAE,MAAM,UAAkC,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC1G,QAAmB,EAAE,MAAM,WAAkC,SAAS,OAAO,aAAa,kBAAkB;AAAA,IAC5G,MAAmB,EAAE,MAAM,UAAkC,SAAS,IAAM,aAAa,cAAc;AAAA,EACzG;AAAA,EACA,SAAS;AAAA,IACP,aAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,KAAM,aAAa,sBAAsB;AAAA,IAClG,YAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,MAAM,aAAa,wBAAwB;AAAA,IACpG,OAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,GAAM,aAAa,mBAAmB;AAAA,IAC/F,OAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC7F,MAAa,EAAE,MAAM,UAA4B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC9F,QAAa,EAAE,MAAM,WAA4B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IAChG,OAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,WAAW,GAAK,SAAS,aAAa,aAAa,gBAAgB;AAAA,IACzG,WAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,MAAM,IAAI,GAAM,SAAS,MAAa,aAAa,YAAY;AAAA,EACvG;AAAA,EACA,YAAY;AAAA,IACV,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACpG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,QAAmB,EAAE,MAAM,QAAQ,QAAQ,CAAC,QAAQ,WAAW,OAAO,UAAU,QAAQ,OAAO,GAAG,SAAS,UAAU,aAAa,mBAAmB;AAAA,EACvJ;AAAA,EACA,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACpG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,QAAmB,EAAE,MAAM,QAAQ,QAAQ,CAAC,QAAQ,WAAW,OAAO,UAAU,QAAQ,OAAO,GAAG,SAAS,UAAU,aAAa,mBAAmB;AAAA,EACvJ;AACF;","names":[]}
|
package/package.json
CHANGED
package/dist/chunk-6P5GSSNW.js
DELETED
|
@@ -1,176 +0,0 @@
|
|
|
1
|
-
// src/provider-meta.ts
|
|
2
|
-
var PROVIDER_META = [
|
|
3
|
-
{ id: "openai", name: "OpenAI", host: "api.openai.com", color: "#10a37f" },
|
|
4
|
-
{ id: "anthropic", name: "Anthropic", host: "api.anthropic.com", color: "#e8956a" },
|
|
5
|
-
{ id: "google", name: "Google", host: "generativelanguage.googleapis.com", color: "#4285f4" },
|
|
6
|
-
{ id: "mistral", name: "Mistral", host: "api.mistral.ai", color: "#ff7000" },
|
|
7
|
-
{ id: "cohere", name: "Cohere", host: "api.cohere.com", color: "#39594d" },
|
|
8
|
-
{ id: "bedrock", name: "Bedrock", host: "bedrock-runtime.us-east-1.amazonaws.com", color: "#ff9900" },
|
|
9
|
-
{ id: "openrouter", name: "OpenRouter", host: "openrouter.ai", color: "#818cf8" },
|
|
10
|
-
{ id: "vercel", name: "Vercel", host: "gateway.ai.vercel.app", color: "#ededed" }
|
|
11
|
-
];
|
|
12
|
-
var MODELS = {
|
|
13
|
-
openai: [
|
|
14
|
-
"gpt-5.2",
|
|
15
|
-
"gpt-5.2-pro",
|
|
16
|
-
"gpt-4.1",
|
|
17
|
-
"gpt-4.1-mini",
|
|
18
|
-
"gpt-4.1-nano",
|
|
19
|
-
"o3",
|
|
20
|
-
"o3-mini",
|
|
21
|
-
"o4-mini",
|
|
22
|
-
"o1-pro"
|
|
23
|
-
],
|
|
24
|
-
anthropic: [
|
|
25
|
-
"claude-opus-4-6",
|
|
26
|
-
"claude-sonnet-4-6",
|
|
27
|
-
"claude-sonnet-4-5",
|
|
28
|
-
"claude-haiku-4-5"
|
|
29
|
-
],
|
|
30
|
-
google: [
|
|
31
|
-
"gemini-3-pro-preview",
|
|
32
|
-
"gemini-3-flash-preview",
|
|
33
|
-
"gemini-2.5-pro",
|
|
34
|
-
"gemini-2.5-flash"
|
|
35
|
-
],
|
|
36
|
-
mistral: [
|
|
37
|
-
"mistral-large-latest",
|
|
38
|
-
"mistral-medium-latest",
|
|
39
|
-
"mistral-small-latest",
|
|
40
|
-
"codestral-latest",
|
|
41
|
-
"magistral-medium-latest"
|
|
42
|
-
],
|
|
43
|
-
cohere: [
|
|
44
|
-
"command-a-03-2025",
|
|
45
|
-
"command-r-plus-08-2024",
|
|
46
|
-
"command-r-08-2024",
|
|
47
|
-
"command-r7b-12-2024"
|
|
48
|
-
],
|
|
49
|
-
bedrock: [
|
|
50
|
-
"anthropic.claude-opus-4-6-v1",
|
|
51
|
-
"anthropic.claude-sonnet-4-6-v1",
|
|
52
|
-
"anthropic.claude-haiku-4-5-v1",
|
|
53
|
-
"amazon.nova-pro-v1",
|
|
54
|
-
"amazon.nova-lite-v1",
|
|
55
|
-
"meta.llama3-70b-instruct-v1:0"
|
|
56
|
-
],
|
|
57
|
-
openrouter: [
|
|
58
|
-
"openai/gpt-5.2",
|
|
59
|
-
"anthropic/claude-opus-4-6",
|
|
60
|
-
"google/gemini-2.5-pro",
|
|
61
|
-
"mistral/mistral-large-latest"
|
|
62
|
-
],
|
|
63
|
-
vercel: [
|
|
64
|
-
"openai/gpt-5.2",
|
|
65
|
-
"anthropic/claude-opus-4-6",
|
|
66
|
-
"google/gemini-2.5-pro",
|
|
67
|
-
"google/gemini-3-pro-preview",
|
|
68
|
-
"google/gemini-3-flash-preview",
|
|
69
|
-
"mistral/mistral-large-latest",
|
|
70
|
-
"qwen/qwen2.5-pro"
|
|
71
|
-
]
|
|
72
|
-
};
|
|
73
|
-
var CANONICAL_PARAM_SPECS = {
|
|
74
|
-
openai: {
|
|
75
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
76
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
77
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
78
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
79
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
80
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
81
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
82
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
83
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
84
|
-
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
85
|
-
},
|
|
86
|
-
anthropic: {
|
|
87
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
88
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
89
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
90
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
91
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
92
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
93
|
-
effort: { type: "enum", values: ["low", "medium", "high", "max"], default: "medium", description: "Thinking effort" },
|
|
94
|
-
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
95
|
-
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
96
|
-
},
|
|
97
|
-
google: {
|
|
98
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
99
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
100
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
101
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
102
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
103
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
104
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
105
|
-
n: { type: "number", min: 1, default: 1, description: "Candidate count" },
|
|
106
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
107
|
-
seed: { type: "number", default: "", description: "Random seed" }
|
|
108
|
-
},
|
|
109
|
-
mistral: {
|
|
110
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
111
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
112
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
113
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
114
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
115
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
116
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
117
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
118
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
119
|
-
safe_prompt: { type: "boolean", default: false, description: "Enable safe prompt" },
|
|
120
|
-
min_tokens: { type: "number", min: 0, default: 0, description: "Minimum tokens" }
|
|
121
|
-
},
|
|
122
|
-
cohere: {
|
|
123
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
124
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
125
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling (p)" },
|
|
126
|
-
top_k: { type: "number", min: 0, max: 500, default: 40, description: "Top-K sampling (k)" },
|
|
127
|
-
frequency_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize frequent tokens" },
|
|
128
|
-
presence_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize repeated topics" },
|
|
129
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
130
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
131
|
-
seed: { type: "number", default: "", description: "Random seed" }
|
|
132
|
-
},
|
|
133
|
-
bedrock: {
|
|
134
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
135
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
136
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
137
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
138
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
139
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
140
|
-
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
141
|
-
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
142
|
-
},
|
|
143
|
-
openrouter: {
|
|
144
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
145
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
146
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
147
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
148
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
149
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
150
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
151
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
152
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
153
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
154
|
-
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
155
|
-
},
|
|
156
|
-
vercel: {
|
|
157
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
158
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
159
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
160
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
161
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
162
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
163
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
164
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
165
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
166
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
167
|
-
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
168
|
-
}
|
|
169
|
-
};
|
|
170
|
-
|
|
171
|
-
export {
|
|
172
|
-
PROVIDER_META,
|
|
173
|
-
MODELS,
|
|
174
|
-
CANONICAL_PARAM_SPECS
|
|
175
|
-
};
|
|
176
|
-
//# sourceMappingURL=chunk-6P5GSSNW.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/provider-meta.ts"],"sourcesContent":["import type { Provider } from \"./provider-core.js\";\n\n/* ------------------------------------------------------------------ */\n/* UI-consumable metadata for 3rd-party integrations */\n/* ------------------------------------------------------------------ */\n\nexport interface ProviderMeta {\n /** Provider identifier — matches the Provider union type. */\n id: Provider;\n /** Human-readable display name. */\n name: string;\n /** Default / canonical API hostname. */\n host: string;\n /** Brand color as a CSS hex value. */\n color: string;\n}\n\nexport const PROVIDER_META: ProviderMeta[] = [\n { id: \"openai\", name: \"OpenAI\", host: \"api.openai.com\", color: \"#10a37f\" },\n { id: \"anthropic\", name: \"Anthropic\", host: \"api.anthropic.com\", color: \"#e8956a\" },\n { id: \"google\", name: \"Google\", host: \"generativelanguage.googleapis.com\", color: \"#4285f4\" },\n { id: \"mistral\", name: \"Mistral\", host: \"api.mistral.ai\", color: \"#ff7000\" },\n { id: \"cohere\", name: \"Cohere\", host: \"api.cohere.com\", color: \"#39594d\" },\n { id: \"bedrock\", name: \"Bedrock\", host: \"bedrock-runtime.us-east-1.amazonaws.com\", color: \"#ff9900\" },\n { id: \"openrouter\", name: \"OpenRouter\", host: \"openrouter.ai\", color: \"#818cf8\" },\n { id: \"vercel\", name: \"Vercel\", host: \"gateway.ai.vercel.app\", color: \"#ededed\" },\n];\n\n/**\n * Suggested / common model IDs per provider, ordered by recency.\n * Not exhaustive — providers add models frequently.\n */\nexport const MODELS: Record<Provider, string[]> = {\n openai: [\n \"gpt-5.2\", \"gpt-5.2-pro\",\n \"gpt-4.1\", \"gpt-4.1-mini\", \"gpt-4.1-nano\",\n \"o3\", \"o3-mini\", \"o4-mini\", \"o1-pro\",\n ],\n anthropic: [\n \"claude-opus-4-6\", \"claude-sonnet-4-6\",\n \"claude-sonnet-4-5\", \"claude-haiku-4-5\",\n ],\n google: [\n \"gemini-3-pro-preview\", \"gemini-3-flash-preview\",\n \"gemini-2.5-pro\", \"gemini-2.5-flash\",\n ],\n mistral: [\n \"mistral-large-latest\", \"mistral-medium-latest\",\n \"mistral-small-latest\", \"codestral-latest\",\n \"magistral-medium-latest\",\n ],\n cohere: [\n \"command-a-03-2025\",\n \"command-r-plus-08-2024\", \"command-r-08-2024\",\n \"command-r7b-12-2024\",\n ],\n bedrock: [\n \"anthropic.claude-opus-4-6-v1\", \"anthropic.claude-sonnet-4-6-v1\",\n \"anthropic.claude-haiku-4-5-v1\",\n \"amazon.nova-pro-v1\", \"amazon.nova-lite-v1\",\n \"meta.llama3-70b-instruct-v1:0\",\n ],\n openrouter: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"mistral/mistral-large-latest\",\n ],\n vercel: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"google/gemini-3-pro-preview\",\n \"google/gemini-3-flash-preview\", \"mistral/mistral-large-latest\",\n \"qwen/qwen2.5-pro\",\n ],\n};\n\n/**\n * Canonical parameter spec — keyed by canonical (snake_case) param names\n * with defaults and descriptions for UI consumption.\n */\nexport interface CanonicalParamSpec {\n type: \"number\" | \"string\" | \"boolean\" | \"enum\";\n min?: number;\n max?: number;\n values?: string[];\n default?: string | number | boolean;\n description?: string;\n}\n\nexport const CANONICAL_PARAM_SPECS: Record<Provider, Record<string, CanonicalParamSpec>> = {\n openai: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n anthropic: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"low\", \"medium\", \"high\", \"max\"], default: \"medium\", description: \"Thinking effort\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n google: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Candidate count\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n mistral: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n safe_prompt: { type: \"boolean\", default: false, description: \"Enable safe prompt\" },\n min_tokens: { type: \"number\", min: 0, default: 0, description: \"Minimum tokens\" },\n },\n cohere: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling (p)\" },\n top_k: { type: \"number\", min: 0, max: 500, default: 40, description: \"Top-K sampling (k)\" },\n frequency_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n bedrock: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n openrouter: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n vercel: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n};\n"],"mappings":";AAiBO,IAAM,gBAAgC;AAAA,EAC3C,EAAE,IAAI,UAAc,MAAM,UAAc,MAAM,kBAA2C,OAAO,UAAU;AAAA,EAC1G,EAAE,IAAI,aAAc,MAAM,aAAe,MAAM,qBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,UAAc,MAAM,UAAe,MAAM,qCAA4C,OAAO,UAAU;AAAA,EAC5G,EAAE,IAAI,WAAc,MAAM,WAAe,MAAM,kBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,UAAc,MAAM,UAAe,MAAM,kBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,WAAc,MAAM,WAAe,MAAM,2CAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,cAAc,MAAM,cAAe,MAAM,iBAA2C,OAAO,UAAU;AAAA,EAC3G,EAAE,IAAI,UAAc,MAAM,UAAe,MAAM,yBAA2C,OAAO,UAAU;AAC7G;AAMO,IAAM,SAAqC;AAAA,EAChD,QAAQ;AAAA,IACN;AAAA,IAAW;AAAA,IACX;AAAA,IAAW;AAAA,IAAgB;AAAA,IAC3B;AAAA,IAAM;AAAA,IAAW;AAAA,IAAW;AAAA,EAC9B;AAAA,EACA,WAAW;AAAA,IACT;AAAA,IAAmB;AAAA,IACnB;AAAA,IAAqB;AAAA,EACvB;AAAA,EACA,QAAQ;AAAA,IACN;AAAA,IAAwB;AAAA,IACxB;AAAA,IAAkB;AAAA,EACpB;AAAA,EACA,SAAS;AAAA,IACP;AAAA,IAAwB;AAAA,IACxB;AAAA,IAAwB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN;AAAA,IACA;AAAA,IAA0B;AAAA,IAC1B;AAAA,EACF;AAAA,EACA,SAAS;AAAA,IACP;AAAA,IAAgC;AAAA,IAChC;AAAA,IACA;AAAA,IAAsB;AAAA,IACtB;AAAA,EACF;AAAA,EACA,YAAY;AAAA,IACV;AAAA,IAAkB;AAAA,IAClB;AAAA,IAAyB;AAAA,EAC3B;AAAA,EACA,QAAQ;AAAA,IACN;AAAA,IAAkB;AAAA,IAClB;AAAA,IAAyB;AAAA,IACzB;AAAA,IAAiC;AAAA,IACjC;AAAA,EACF;AACF;AAeO,IAAM,wBAA8E;AAAA,EACzF,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,QAAmB,EAAE,MAAM,QAAQ,QAAQ,CAAC,QAAQ,WAAW,OAAO,UAAU,QAAQ,OAAO,GAAG,SAAS,UAAU,aAAa,mBAAmB;AAAA,EACvJ;AAAA,EACA,WAAW;AAAA,IACT,aAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,KAAM,aAAa,sBAAsB;AAAA,IAClG,YAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,MAAM,aAAa,wBAAwB;AAAA,IACpG,OAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,GAAM,aAAa,mBAAmB;AAAA,IAC/F,OAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC7F,MAAa,EAAE,MAAM,UAA4B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC9F,QAAa,EAAE,MAAM,WAA4B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IAChG,QAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,OAAO,UAAU,QAAQ,KAAK,GAAG,SAAS,UAAU,aAAa,kBAAkB;AAAA,IACzH,OAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,WAAW,GAAK,SAAS,aAAa,aAAa,gBAAgB;AAAA,IACzG,WAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,MAAM,IAAI,GAAM,SAAS,MAAa,aAAa,YAAY;AAAA,EACvG;AAAA,EACA,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACpG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,kBAAkB;AAAA,IACtG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,EACpG;AAAA,EACA,SAAS;AAAA,IACP,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,aAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,qBAAqB;AAAA,IAC1G,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,GAAM,aAAa,iBAAiB;AAAA,EACtG;AAAA,EACA,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,KAAM,aAAa,sBAAsB;AAAA,IAC9G,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAiB,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAChH,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,GAAM,aAAa,uBAAuB;AAAA,IAC/G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,KAAS,SAAS,IAAM,aAAa,qBAAqB;AAAA,IAC7G,mBAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,GAAM,aAAa,2BAA2B;AAAA,IACnH,kBAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAS,SAAS,GAAM,aAAa,2BAA2B;AAAA,IACnH,MAAmB,EAAE,MAAM,UAAkC,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC1G,QAAmB,EAAE,MAAM,WAAkC,SAAS,OAAO,aAAa,kBAAkB;AAAA,IAC5G,MAAmB,EAAE,MAAM,UAAkC,SAAS,IAAM,aAAa,cAAc;AAAA,EACzG;AAAA,EACA,SAAS;AAAA,IACP,aAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,KAAM,aAAa,sBAAsB;AAAA,IAClG,YAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,MAAM,aAAa,wBAAwB;AAAA,IACpG,OAAa,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAG,SAAS,GAAM,aAAa,mBAAmB;AAAA,IAC/F,OAAa,EAAE,MAAM,UAAW,KAAK,GAAW,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC7F,MAAa,EAAE,MAAM,UAA4B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IAC9F,QAAa,EAAE,MAAM,WAA4B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IAChG,OAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,WAAW,GAAK,SAAS,aAAa,aAAa,gBAAgB;AAAA,IACzG,WAAa,EAAE,MAAM,QAAQ,QAAQ,CAAC,MAAM,IAAI,GAAM,SAAS,MAAa,aAAa,YAAY;AAAA,EACvG;AAAA,EACA,YAAY;AAAA,IACV,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACpG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,QAAmB,EAAE,MAAM,QAAQ,QAAQ,CAAC,QAAQ,WAAW,OAAO,UAAU,QAAQ,OAAO,GAAG,SAAS,UAAU,aAAa,mBAAmB;AAAA,EACvJ;AAAA,EACA,QAAQ;AAAA,IACN,aAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,KAAM,aAAa,sBAAsB;AAAA,IACzG,YAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,MAAM,aAAa,wBAAwB;AAAA,IAC3G,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAG,KAAK,GAAI,SAAS,GAAM,aAAa,mBAAmB;AAAA,IACtG,OAAmB,EAAE,MAAM,UAAW,KAAK,GAAY,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACpG,mBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,kBAAmB,EAAE,MAAM,UAAW,KAAK,IAAI,KAAK,GAAG,SAAS,GAAM,aAAa,2BAA2B;AAAA,IAC9G,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,iBAAiB;AAAA,IACrG,GAAmB,EAAE,MAAM,UAAW,KAAK,GAAa,SAAS,GAAM,aAAa,oBAAoB;AAAA,IACxG,MAAmB,EAAE,MAAM,UAA6B,SAAS,IAAM,aAAa,cAAc;AAAA,IAClG,QAAmB,EAAE,MAAM,WAA6B,SAAS,OAAO,aAAa,kBAAkB;AAAA,IACvG,QAAmB,EAAE,MAAM,QAAQ,QAAQ,CAAC,QAAQ,WAAW,OAAO,UAAU,QAAQ,OAAO,GAAG,SAAS,UAAU,aAAa,mBAAmB;AAAA,EACvJ;AACF;","names":[]}
|
package/dist/chunk-RR3VXIW2.cjs
DELETED
|
@@ -1,176 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/provider-meta.ts
|
|
2
|
-
var PROVIDER_META = [
|
|
3
|
-
{ id: "openai", name: "OpenAI", host: "api.openai.com", color: "#10a37f" },
|
|
4
|
-
{ id: "anthropic", name: "Anthropic", host: "api.anthropic.com", color: "#e8956a" },
|
|
5
|
-
{ id: "google", name: "Google", host: "generativelanguage.googleapis.com", color: "#4285f4" },
|
|
6
|
-
{ id: "mistral", name: "Mistral", host: "api.mistral.ai", color: "#ff7000" },
|
|
7
|
-
{ id: "cohere", name: "Cohere", host: "api.cohere.com", color: "#39594d" },
|
|
8
|
-
{ id: "bedrock", name: "Bedrock", host: "bedrock-runtime.us-east-1.amazonaws.com", color: "#ff9900" },
|
|
9
|
-
{ id: "openrouter", name: "OpenRouter", host: "openrouter.ai", color: "#818cf8" },
|
|
10
|
-
{ id: "vercel", name: "Vercel", host: "gateway.ai.vercel.app", color: "#ededed" }
|
|
11
|
-
];
|
|
12
|
-
var MODELS = {
|
|
13
|
-
openai: [
|
|
14
|
-
"gpt-5.2",
|
|
15
|
-
"gpt-5.2-pro",
|
|
16
|
-
"gpt-4.1",
|
|
17
|
-
"gpt-4.1-mini",
|
|
18
|
-
"gpt-4.1-nano",
|
|
19
|
-
"o3",
|
|
20
|
-
"o3-mini",
|
|
21
|
-
"o4-mini",
|
|
22
|
-
"o1-pro"
|
|
23
|
-
],
|
|
24
|
-
anthropic: [
|
|
25
|
-
"claude-opus-4-6",
|
|
26
|
-
"claude-sonnet-4-6",
|
|
27
|
-
"claude-sonnet-4-5",
|
|
28
|
-
"claude-haiku-4-5"
|
|
29
|
-
],
|
|
30
|
-
google: [
|
|
31
|
-
"gemini-3-pro-preview",
|
|
32
|
-
"gemini-3-flash-preview",
|
|
33
|
-
"gemini-2.5-pro",
|
|
34
|
-
"gemini-2.5-flash"
|
|
35
|
-
],
|
|
36
|
-
mistral: [
|
|
37
|
-
"mistral-large-latest",
|
|
38
|
-
"mistral-medium-latest",
|
|
39
|
-
"mistral-small-latest",
|
|
40
|
-
"codestral-latest",
|
|
41
|
-
"magistral-medium-latest"
|
|
42
|
-
],
|
|
43
|
-
cohere: [
|
|
44
|
-
"command-a-03-2025",
|
|
45
|
-
"command-r-plus-08-2024",
|
|
46
|
-
"command-r-08-2024",
|
|
47
|
-
"command-r7b-12-2024"
|
|
48
|
-
],
|
|
49
|
-
bedrock: [
|
|
50
|
-
"anthropic.claude-opus-4-6-v1",
|
|
51
|
-
"anthropic.claude-sonnet-4-6-v1",
|
|
52
|
-
"anthropic.claude-haiku-4-5-v1",
|
|
53
|
-
"amazon.nova-pro-v1",
|
|
54
|
-
"amazon.nova-lite-v1",
|
|
55
|
-
"meta.llama3-70b-instruct-v1:0"
|
|
56
|
-
],
|
|
57
|
-
openrouter: [
|
|
58
|
-
"openai/gpt-5.2",
|
|
59
|
-
"anthropic/claude-opus-4-6",
|
|
60
|
-
"google/gemini-2.5-pro",
|
|
61
|
-
"mistral/mistral-large-latest"
|
|
62
|
-
],
|
|
63
|
-
vercel: [
|
|
64
|
-
"openai/gpt-5.2",
|
|
65
|
-
"anthropic/claude-opus-4-6",
|
|
66
|
-
"google/gemini-2.5-pro",
|
|
67
|
-
"google/gemini-3-pro-preview",
|
|
68
|
-
"google/gemini-3-flash-preview",
|
|
69
|
-
"mistral/mistral-large-latest",
|
|
70
|
-
"qwen/qwen2.5-pro"
|
|
71
|
-
]
|
|
72
|
-
};
|
|
73
|
-
var CANONICAL_PARAM_SPECS = {
|
|
74
|
-
openai: {
|
|
75
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
76
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
77
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
78
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
79
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
80
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
81
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
82
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
83
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
84
|
-
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
85
|
-
},
|
|
86
|
-
anthropic: {
|
|
87
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
88
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
89
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
90
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
91
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
92
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
93
|
-
effort: { type: "enum", values: ["low", "medium", "high", "max"], default: "medium", description: "Thinking effort" },
|
|
94
|
-
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
95
|
-
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
96
|
-
},
|
|
97
|
-
google: {
|
|
98
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
99
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
100
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
101
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
102
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
103
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
104
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
105
|
-
n: { type: "number", min: 1, default: 1, description: "Candidate count" },
|
|
106
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
107
|
-
seed: { type: "number", default: "", description: "Random seed" }
|
|
108
|
-
},
|
|
109
|
-
mistral: {
|
|
110
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
111
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
112
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
113
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
114
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
115
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
116
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
117
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
118
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
119
|
-
safe_prompt: { type: "boolean", default: false, description: "Enable safe prompt" },
|
|
120
|
-
min_tokens: { type: "number", min: 0, default: 0, description: "Minimum tokens" }
|
|
121
|
-
},
|
|
122
|
-
cohere: {
|
|
123
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
124
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
125
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling (p)" },
|
|
126
|
-
top_k: { type: "number", min: 0, max: 500, default: 40, description: "Top-K sampling (k)" },
|
|
127
|
-
frequency_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize frequent tokens" },
|
|
128
|
-
presence_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize repeated topics" },
|
|
129
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
130
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
131
|
-
seed: { type: "number", default: "", description: "Random seed" }
|
|
132
|
-
},
|
|
133
|
-
bedrock: {
|
|
134
|
-
temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
|
|
135
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
136
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
137
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
138
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
139
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
140
|
-
cache: { type: "enum", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
|
|
141
|
-
cache_ttl: { type: "enum", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
|
|
142
|
-
},
|
|
143
|
-
openrouter: {
|
|
144
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
145
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
146
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
147
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
148
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
149
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
150
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
151
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
152
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
153
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
154
|
-
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
155
|
-
},
|
|
156
|
-
vercel: {
|
|
157
|
-
temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
|
|
158
|
-
max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
|
|
159
|
-
top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
|
|
160
|
-
top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
|
|
161
|
-
frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
|
|
162
|
-
presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
|
|
163
|
-
stop: { type: "string", default: "", description: "Stop sequences" },
|
|
164
|
-
n: { type: "number", min: 1, default: 1, description: "Completions count" },
|
|
165
|
-
seed: { type: "number", default: "", description: "Random seed" },
|
|
166
|
-
stream: { type: "boolean", default: false, description: "Stream response" },
|
|
167
|
-
effort: { type: "enum", values: ["none", "minimal", "low", "medium", "high", "xhigh"], default: "medium", description: "Reasoning effort" }
|
|
168
|
-
}
|
|
169
|
-
};
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
exports.PROVIDER_META = PROVIDER_META; exports.MODELS = MODELS; exports.CANONICAL_PARAM_SPECS = CANONICAL_PARAM_SPECS;
|
|
176
|
-
//# sourceMappingURL=chunk-RR3VXIW2.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/dan/code/oss/llm-strings/dist/chunk-RR3VXIW2.cjs","../src/provider-meta.ts"],"names":[],"mappings":"AAAA;ACiBO,IAAM,cAAA,EAAgC;AAAA,EAC3C,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAc,IAAA,EAAM,gBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC1G,EAAE,EAAA,EAAI,WAAA,EAAc,IAAA,EAAM,WAAA,EAAe,IAAA,EAAM,mBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAe,IAAA,EAAM,mCAAA,EAA4C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC5G,EAAE,EAAA,EAAI,SAAA,EAAc,IAAA,EAAM,SAAA,EAAe,IAAA,EAAM,gBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAe,IAAA,EAAM,gBAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,SAAA,EAAc,IAAA,EAAM,SAAA,EAAe,IAAA,EAAM,yCAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,YAAA,EAAc,IAAA,EAAM,YAAA,EAAe,IAAA,EAAM,eAAA,EAA2C,KAAA,EAAO,UAAU,CAAA;AAAA,EAC3G,EAAE,EAAA,EAAI,QAAA,EAAc,IAAA,EAAM,QAAA,EAAe,IAAA,EAAM,uBAAA,EAA2C,KAAA,EAAO,UAAU;AAC7G,CAAA;AAMO,IAAM,OAAA,EAAqC;AAAA,EAChD,MAAA,EAAQ;AAAA,IACN,SAAA;AAAA,IAAW,aAAA;AAAA,IACX,SAAA;AAAA,IAAW,cAAA;AAAA,IAAgB,cAAA;AAAA,IAC3B,IAAA;AAAA,IAAM,SAAA;AAAA,IAAW,SAAA;AAAA,IAAW;AAAA,EAC9B,CAAA;AAAA,EACA,SAAA,EAAW;AAAA,IACT,iBAAA;AAAA,IAAmB,mBAAA;AAAA,IACnB,mBAAA;AAAA,IAAqB;AAAA,EACvB,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,sBAAA;AAAA,IAAwB,wBAAA;AAAA,IACxB,gBAAA;AAAA,IAAkB;AAAA,EACpB,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,sBAAA;AAAA,IAAwB,uBAAA;AAAA,IACxB,sBAAA;AAAA,IAAwB,kBAAA;AAAA,IACxB;AAAA,EACF,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,mBAAA;AAAA,IACA,wBAAA;AAAA,IAA0B,mBAAA;AAAA,IAC1B;AAAA,EACF,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,8BAAA;AAAA,IAAgC,gCAAA;AAAA,IAChC,+BAAA;AAAA,IACA,oBAAA;AAAA,IAAsB,qBAAA;AAAA,IACtB;AAAA,EACF,CAAA;AAAA,EACA,UAAA,EAAY;AAAA,IACV,gBAAA;AAAA,IAAkB,2BAAA;AAAA,IAClB,uBAAA;AAAA,IAAyB;AAAA,EAC3B,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,gBAAA;AAAA,IAAkB,2BAAA;AAAA,IAClB,uBAAA;AAAA,IAAyB,6BAAA;AAAA,IACzB,+BAAA;AAAA,IAAiC,8BAAA;AAAA,IACjC;AAAA,EACF;AACF,CAAA;AAeO,IAAM,sBAAA,EAA8E;AAAA,EACzF,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,MAAA,EAAmB,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,mBAAmB;AAAA,EACvJ,CAAA;AAAA,EACA,SAAA,EAAW;AAAA,IACT,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAClG,UAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IACpG,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IAC/F,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC7F,IAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAA4B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC9F,MAAA,EAAa,EAAE,IAAA,EAAM,SAAA,EAA4B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAChG,MAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,KAAK,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACzH,KAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,WAAW,CAAA,EAAK,OAAA,EAAS,WAAA,EAAa,WAAA,EAAa,gBAAgB,CAAA;AAAA,IACzG,SAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAI,CAAA,EAAM,OAAA,EAAS,IAAA,EAAa,WAAA,EAAa,YAAY;AAAA,EACvG,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACpG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACtG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc;AAAA,EACpG,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,WAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,qBAAqB,CAAA;AAAA,IAC1G,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,iBAAiB;AAAA,EACtG,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAC9G,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAiB,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAChH,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,uBAAuB,CAAA;AAAA,IAC/G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,GAAA,EAAS,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,qBAAqB,CAAA;AAAA,IAC7G,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACnH,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAS,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACnH,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAkC,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC1G,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAAkC,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC5G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAkC,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc;AAAA,EACzG,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAClG,UAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IACpG,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IAC/F,KAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAW,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC7F,IAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAA4B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC9F,MAAA,EAAa,EAAE,IAAA,EAAM,SAAA,EAA4B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAChG,KAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,WAAW,CAAA,EAAK,OAAA,EAAS,WAAA,EAAa,WAAA,EAAa,gBAAgB,CAAA;AAAA,IACzG,SAAA,EAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAI,CAAA,EAAM,OAAA,EAAS,IAAA,EAAa,WAAA,EAAa,YAAY;AAAA,EACvG,CAAA;AAAA,EACA,UAAA,EAAY;AAAA,IACV,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACpG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,MAAA,EAAmB,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,mBAAmB;AAAA,EACvJ,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,GAAA,EAAM,WAAA,EAAa,sBAAsB,CAAA;AAAA,IACzG,UAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC3G,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAI,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACtG,KAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAY,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACpG,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,gBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC9G,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACrG,CAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAW,GAAA,EAAK,CAAA,EAAa,OAAA,EAAS,CAAA,EAAM,WAAA,EAAa,oBAAoB,CAAA;AAAA,IACxG,IAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAA6B,OAAA,EAAS,EAAA,EAAM,WAAA,EAAa,cAAc,CAAA;AAAA,IAClG,MAAA,EAAmB,EAAE,IAAA,EAAM,SAAA,EAA6B,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACvG,MAAA,EAAmB,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,mBAAmB;AAAA,EACvJ;AACF,CAAA;ADdA;AACA;AACE;AACA;AACA;AACF,sHAAC","file":"/Users/dan/code/oss/llm-strings/dist/chunk-RR3VXIW2.cjs","sourcesContent":[null,"import type { Provider } from \"./provider-core.js\";\n\n/* ------------------------------------------------------------------ */\n/* UI-consumable metadata for 3rd-party integrations */\n/* ------------------------------------------------------------------ */\n\nexport interface ProviderMeta {\n /** Provider identifier — matches the Provider union type. */\n id: Provider;\n /** Human-readable display name. */\n name: string;\n /** Default / canonical API hostname. */\n host: string;\n /** Brand color as a CSS hex value. */\n color: string;\n}\n\nexport const PROVIDER_META: ProviderMeta[] = [\n { id: \"openai\", name: \"OpenAI\", host: \"api.openai.com\", color: \"#10a37f\" },\n { id: \"anthropic\", name: \"Anthropic\", host: \"api.anthropic.com\", color: \"#e8956a\" },\n { id: \"google\", name: \"Google\", host: \"generativelanguage.googleapis.com\", color: \"#4285f4\" },\n { id: \"mistral\", name: \"Mistral\", host: \"api.mistral.ai\", color: \"#ff7000\" },\n { id: \"cohere\", name: \"Cohere\", host: \"api.cohere.com\", color: \"#39594d\" },\n { id: \"bedrock\", name: \"Bedrock\", host: \"bedrock-runtime.us-east-1.amazonaws.com\", color: \"#ff9900\" },\n { id: \"openrouter\", name: \"OpenRouter\", host: \"openrouter.ai\", color: \"#818cf8\" },\n { id: \"vercel\", name: \"Vercel\", host: \"gateway.ai.vercel.app\", color: \"#ededed\" },\n];\n\n/**\n * Suggested / common model IDs per provider, ordered by recency.\n * Not exhaustive — providers add models frequently.\n */\nexport const MODELS: Record<Provider, string[]> = {\n openai: [\n \"gpt-5.2\", \"gpt-5.2-pro\",\n \"gpt-4.1\", \"gpt-4.1-mini\", \"gpt-4.1-nano\",\n \"o3\", \"o3-mini\", \"o4-mini\", \"o1-pro\",\n ],\n anthropic: [\n \"claude-opus-4-6\", \"claude-sonnet-4-6\",\n \"claude-sonnet-4-5\", \"claude-haiku-4-5\",\n ],\n google: [\n \"gemini-3-pro-preview\", \"gemini-3-flash-preview\",\n \"gemini-2.5-pro\", \"gemini-2.5-flash\",\n ],\n mistral: [\n \"mistral-large-latest\", \"mistral-medium-latest\",\n \"mistral-small-latest\", \"codestral-latest\",\n \"magistral-medium-latest\",\n ],\n cohere: [\n \"command-a-03-2025\",\n \"command-r-plus-08-2024\", \"command-r-08-2024\",\n \"command-r7b-12-2024\",\n ],\n bedrock: [\n \"anthropic.claude-opus-4-6-v1\", \"anthropic.claude-sonnet-4-6-v1\",\n \"anthropic.claude-haiku-4-5-v1\",\n \"amazon.nova-pro-v1\", \"amazon.nova-lite-v1\",\n \"meta.llama3-70b-instruct-v1:0\",\n ],\n openrouter: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"mistral/mistral-large-latest\",\n ],\n vercel: [\n \"openai/gpt-5.2\", \"anthropic/claude-opus-4-6\",\n \"google/gemini-2.5-pro\", \"google/gemini-3-pro-preview\",\n \"google/gemini-3-flash-preview\", \"mistral/mistral-large-latest\",\n \"qwen/qwen2.5-pro\",\n ],\n};\n\n/**\n * Canonical parameter spec — keyed by canonical (snake_case) param names\n * with defaults and descriptions for UI consumption.\n */\nexport interface CanonicalParamSpec {\n type: \"number\" | \"string\" | \"boolean\" | \"enum\";\n min?: number;\n max?: number;\n values?: string[];\n default?: string | number | boolean;\n description?: string;\n}\n\nexport const CANONICAL_PARAM_SPECS: Record<Provider, Record<string, CanonicalParamSpec>> = {\n openai: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n anthropic: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"low\", \"medium\", \"high\", \"max\"], default: \"medium\", description: \"Thinking effort\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n google: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Candidate count\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n mistral: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n safe_prompt: { type: \"boolean\", default: false, description: \"Enable safe prompt\" },\n min_tokens: { type: \"number\", min: 0, default: 0, description: \"Minimum tokens\" },\n },\n cohere: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling (p)\" },\n top_k: { type: \"number\", min: 0, max: 500, default: 40, description: \"Top-K sampling (k)\" },\n frequency_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n },\n bedrock: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n cache: { type: \"enum\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"enum\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n openrouter: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n vercel: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", default: \"\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", default: \"\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"enum\", values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"], default: \"medium\", description: \"Reasoning effort\" },\n },\n};\n"]}
|