llm-strings 1.0.1 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/README.md +95 -14
  2. package/dist/chunk-FCEV23OT.js +37 -0
  3. package/dist/chunk-FCEV23OT.js.map +1 -0
  4. package/dist/chunk-MGWGNZDJ.cjs +116 -0
  5. package/dist/chunk-MGWGNZDJ.cjs.map +1 -0
  6. package/dist/chunk-MPIHGH6L.js +116 -0
  7. package/dist/chunk-MPIHGH6L.js.map +1 -0
  8. package/dist/chunk-N6NVBE43.cjs +37 -0
  9. package/dist/chunk-N6NVBE43.cjs.map +1 -0
  10. package/dist/chunk-NSCBY4VD.cjs +370 -0
  11. package/dist/chunk-NSCBY4VD.cjs.map +1 -0
  12. package/dist/chunk-RSUXM42X.cjs +180 -0
  13. package/dist/chunk-RSUXM42X.cjs.map +1 -0
  14. package/dist/chunk-UYMVUTLV.js +180 -0
  15. package/dist/chunk-UYMVUTLV.js.map +1 -0
  16. package/dist/chunk-XID353H7.js +370 -0
  17. package/dist/chunk-XID353H7.js.map +1 -0
  18. package/dist/index.cjs +12 -806
  19. package/dist/index.cjs.map +1 -1
  20. package/dist/index.d.cts +4 -80
  21. package/dist/index.d.ts +4 -80
  22. package/dist/index.js +11 -764
  23. package/dist/index.js.map +1 -1
  24. package/dist/normalize.cjs +8 -0
  25. package/dist/normalize.cjs.map +1 -0
  26. package/dist/normalize.d.cts +33 -0
  27. package/dist/normalize.d.ts +33 -0
  28. package/dist/normalize.js +8 -0
  29. package/dist/normalize.js.map +1 -0
  30. package/dist/parse.cjs +9 -0
  31. package/dist/parse.cjs.map +1 -0
  32. package/dist/parse.d.cts +32 -0
  33. package/dist/parse.d.ts +32 -0
  34. package/dist/parse.js +9 -0
  35. package/dist/parse.js.map +1 -0
  36. package/dist/provider-core-DinpG40u.d.cts +53 -0
  37. package/dist/provider-core-DinpG40u.d.ts +53 -0
  38. package/dist/providers.cjs +37 -392
  39. package/dist/providers.cjs.map +1 -1
  40. package/dist/providers.d.cts +4 -42
  41. package/dist/providers.d.ts +4 -42
  42. package/dist/providers.js +20 -336
  43. package/dist/providers.js.map +1 -1
  44. package/dist/validate.cjs +10 -0
  45. package/dist/validate.cjs.map +1 -0
  46. package/dist/validate.d.cts +21 -0
  47. package/dist/validate.d.ts +21 -0
  48. package/dist/validate.js +10 -0
  49. package/dist/validate.js.map +1 -0
  50. package/package.json +33 -1
@@ -0,0 +1,370 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/provider-core.ts
2
+ function detectProvider(host) {
3
+ if (host.includes("openrouter")) return "openrouter";
4
+ if (host.includes("gateway.ai.vercel")) return "vercel";
5
+ if (host.includes("amazonaws") || host.includes("bedrock")) return "bedrock";
6
+ if (host.includes("openai")) return "openai";
7
+ if (host.includes("anthropic") || host.includes("claude")) return "anthropic";
8
+ if (host.includes("googleapis") || host.includes("google")) return "google";
9
+ if (host.includes("mistral")) return "mistral";
10
+ if (host.includes("cohere")) return "cohere";
11
+ return void 0;
12
+ }
13
+ var ALIASES = {
14
+ // temperature
15
+ temp: "temperature",
16
+ // max_tokens
17
+ max: "max_tokens",
18
+ max_out: "max_tokens",
19
+ max_output: "max_tokens",
20
+ max_output_tokens: "max_tokens",
21
+ max_completion_tokens: "max_tokens",
22
+ maxOutputTokens: "max_tokens",
23
+ maxTokens: "max_tokens",
24
+ // top_p
25
+ topp: "top_p",
26
+ topP: "top_p",
27
+ nucleus: "top_p",
28
+ // top_k
29
+ topk: "top_k",
30
+ topK: "top_k",
31
+ // frequency_penalty
32
+ freq: "frequency_penalty",
33
+ freq_penalty: "frequency_penalty",
34
+ frequencyPenalty: "frequency_penalty",
35
+ repetition_penalty: "frequency_penalty",
36
+ // presence_penalty
37
+ pres: "presence_penalty",
38
+ pres_penalty: "presence_penalty",
39
+ presencePenalty: "presence_penalty",
40
+ // stop
41
+ stop_sequences: "stop",
42
+ stopSequences: "stop",
43
+ stop_sequence: "stop",
44
+ // seed
45
+ random_seed: "seed",
46
+ randomSeed: "seed",
47
+ // n (completions count)
48
+ candidateCount: "n",
49
+ candidate_count: "n",
50
+ num_completions: "n",
51
+ // effort / reasoning
52
+ reasoning_effort: "effort",
53
+ reasoning: "effort",
54
+ // cache
55
+ cache_control: "cache",
56
+ cacheControl: "cache",
57
+ cachePoint: "cache",
58
+ cache_point: "cache"
59
+ };
60
+ var PROVIDER_PARAMS = {
61
+ openai: {
62
+ temperature: "temperature",
63
+ max_tokens: "max_tokens",
64
+ top_p: "top_p",
65
+ frequency_penalty: "frequency_penalty",
66
+ presence_penalty: "presence_penalty",
67
+ stop: "stop",
68
+ n: "n",
69
+ seed: "seed",
70
+ stream: "stream",
71
+ effort: "reasoning_effort"
72
+ },
73
+ anthropic: {
74
+ temperature: "temperature",
75
+ max_tokens: "max_tokens",
76
+ top_p: "top_p",
77
+ top_k: "top_k",
78
+ stop: "stop_sequences",
79
+ stream: "stream",
80
+ effort: "effort",
81
+ cache: "cache_control",
82
+ cache_ttl: "cache_ttl"
83
+ },
84
+ google: {
85
+ temperature: "temperature",
86
+ max_tokens: "maxOutputTokens",
87
+ top_p: "topP",
88
+ top_k: "topK",
89
+ frequency_penalty: "frequencyPenalty",
90
+ presence_penalty: "presencePenalty",
91
+ stop: "stopSequences",
92
+ n: "candidateCount",
93
+ stream: "stream",
94
+ seed: "seed",
95
+ responseMimeType: "responseMimeType",
96
+ responseSchema: "responseSchema"
97
+ },
98
+ mistral: {
99
+ temperature: "temperature",
100
+ max_tokens: "max_tokens",
101
+ top_p: "top_p",
102
+ frequency_penalty: "frequency_penalty",
103
+ presence_penalty: "presence_penalty",
104
+ stop: "stop",
105
+ n: "n",
106
+ seed: "random_seed",
107
+ stream: "stream",
108
+ safe_prompt: "safe_prompt",
109
+ min_tokens: "min_tokens"
110
+ },
111
+ cohere: {
112
+ temperature: "temperature",
113
+ max_tokens: "max_tokens",
114
+ top_p: "p",
115
+ top_k: "k",
116
+ frequency_penalty: "frequency_penalty",
117
+ presence_penalty: "presence_penalty",
118
+ stop: "stop_sequences",
119
+ stream: "stream",
120
+ seed: "seed"
121
+ },
122
+ bedrock: {
123
+ // Bedrock Converse API uses camelCase
124
+ temperature: "temperature",
125
+ max_tokens: "maxTokens",
126
+ top_p: "topP",
127
+ top_k: "topK",
128
+ // Claude models via additionalModelRequestFields
129
+ stop: "stopSequences",
130
+ stream: "stream",
131
+ cache: "cache_control",
132
+ cache_ttl: "cache_ttl"
133
+ },
134
+ openrouter: {
135
+ // OpenAI-compatible API with extra routing params
136
+ temperature: "temperature",
137
+ max_tokens: "max_tokens",
138
+ top_p: "top_p",
139
+ top_k: "top_k",
140
+ frequency_penalty: "frequency_penalty",
141
+ presence_penalty: "presence_penalty",
142
+ stop: "stop",
143
+ n: "n",
144
+ seed: "seed",
145
+ stream: "stream",
146
+ effort: "reasoning_effort"
147
+ },
148
+ vercel: {
149
+ // OpenAI-compatible gateway
150
+ temperature: "temperature",
151
+ max_tokens: "max_tokens",
152
+ top_p: "top_p",
153
+ top_k: "top_k",
154
+ frequency_penalty: "frequency_penalty",
155
+ presence_penalty: "presence_penalty",
156
+ stop: "stop",
157
+ n: "n",
158
+ seed: "seed",
159
+ stream: "stream",
160
+ effort: "reasoning_effort"
161
+ }
162
+ };
163
+ var PARAM_SPECS = {
164
+ openai: {
165
+ temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
166
+ max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
167
+ top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
168
+ frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
169
+ presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
170
+ stop: { type: "string", description: "Stop sequences" },
171
+ n: { type: "number", min: 1, default: 1, description: "Completions count" },
172
+ seed: { type: "number", description: "Random seed" },
173
+ stream: { type: "boolean", default: false, description: "Stream response" },
174
+ reasoning_effort: {
175
+ type: "string",
176
+ values: ["none", "minimal", "low", "medium", "high", "xhigh"],
177
+ default: "medium",
178
+ description: "Reasoning effort"
179
+ }
180
+ },
181
+ anthropic: {
182
+ temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
183
+ max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
184
+ top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
185
+ top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
186
+ stop_sequences: { type: "string", description: "Stop sequences" },
187
+ stream: { type: "boolean", default: false, description: "Stream response" },
188
+ effort: { type: "string", values: ["low", "medium", "high", "max"], default: "medium", description: "Thinking effort" },
189
+ cache_control: { type: "string", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
190
+ cache_ttl: { type: "string", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
191
+ },
192
+ google: {
193
+ temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
194
+ maxOutputTokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
195
+ topP: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
196
+ topK: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
197
+ frequencyPenalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
198
+ presencePenalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
199
+ stopSequences: { type: "string", description: "Stop sequences" },
200
+ candidateCount: { type: "number", min: 1, default: 1, description: "Candidate count" },
201
+ stream: { type: "boolean", default: false, description: "Stream response" },
202
+ seed: { type: "number", description: "Random seed" },
203
+ responseMimeType: { type: "string", description: "Response MIME type" },
204
+ responseSchema: { type: "string", description: "Response schema" }
205
+ },
206
+ mistral: {
207
+ temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
208
+ max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
209
+ top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
210
+ frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
211
+ presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
212
+ stop: { type: "string", description: "Stop sequences" },
213
+ n: { type: "number", min: 1, default: 1, description: "Completions count" },
214
+ random_seed: { type: "number", description: "Random seed" },
215
+ stream: { type: "boolean", default: false, description: "Stream response" },
216
+ safe_prompt: { type: "boolean", default: false, description: "Enable safe prompt" },
217
+ min_tokens: { type: "number", min: 0, default: 0, description: "Minimum tokens" }
218
+ },
219
+ cohere: {
220
+ temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
221
+ max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
222
+ p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling (p)" },
223
+ k: { type: "number", min: 0, max: 500, default: 40, description: "Top-K sampling (k)" },
224
+ frequency_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize frequent tokens" },
225
+ presence_penalty: { type: "number", min: 0, max: 1, default: 0, description: "Penalize repeated topics" },
226
+ stop_sequences: { type: "string", description: "Stop sequences" },
227
+ stream: { type: "boolean", default: false, description: "Stream response" },
228
+ seed: { type: "number", description: "Random seed" }
229
+ },
230
+ bedrock: {
231
+ // Converse API inferenceConfig params
232
+ temperature: { type: "number", min: 0, max: 1, default: 0.7, description: "Controls randomness" },
233
+ maxTokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
234
+ topP: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
235
+ topK: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
236
+ stopSequences: { type: "string", description: "Stop sequences" },
237
+ stream: { type: "boolean", default: false, description: "Stream response" },
238
+ cache_control: { type: "string", values: ["ephemeral"], default: "ephemeral", description: "Cache control" },
239
+ cache_ttl: { type: "string", values: ["5m", "1h"], default: "5m", description: "Cache TTL" }
240
+ },
241
+ openrouter: {
242
+ // Loose validation — proxies to many providers with varying ranges
243
+ temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
244
+ max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
245
+ top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
246
+ top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
247
+ frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
248
+ presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
249
+ stop: { type: "string", description: "Stop sequences" },
250
+ n: { type: "number", min: 1, default: 1, description: "Completions count" },
251
+ seed: { type: "number", description: "Random seed" },
252
+ stream: { type: "boolean", default: false, description: "Stream response" },
253
+ reasoning_effort: {
254
+ type: "string",
255
+ values: ["none", "minimal", "low", "medium", "high", "xhigh"],
256
+ default: "medium",
257
+ description: "Reasoning effort"
258
+ }
259
+ },
260
+ vercel: {
261
+ // Loose validation — proxies to many providers with varying ranges
262
+ temperature: { type: "number", min: 0, max: 2, default: 0.7, description: "Controls randomness" },
263
+ max_tokens: { type: "number", min: 1, default: 4096, description: "Maximum output tokens" },
264
+ top_p: { type: "number", min: 0, max: 1, default: 1, description: "Nucleus sampling" },
265
+ top_k: { type: "number", min: 0, default: 40, description: "Top-K sampling" },
266
+ frequency_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize frequent tokens" },
267
+ presence_penalty: { type: "number", min: -2, max: 2, default: 0, description: "Penalize repeated topics" },
268
+ stop: { type: "string", description: "Stop sequences" },
269
+ n: { type: "number", min: 1, default: 1, description: "Completions count" },
270
+ seed: { type: "number", description: "Random seed" },
271
+ stream: { type: "boolean", default: false, description: "Stream response" },
272
+ reasoning_effort: {
273
+ type: "string",
274
+ values: ["none", "minimal", "low", "medium", "high", "xhigh"],
275
+ default: "medium",
276
+ description: "Reasoning effort"
277
+ }
278
+ }
279
+ };
280
+ function isReasoningModel(model) {
281
+ const name = model.includes("/") ? model.split("/").pop() : model;
282
+ return /^o[134]/.test(name);
283
+ }
284
+ function canHostOpenAIModels(provider) {
285
+ return provider === "openai" || provider === "openrouter" || provider === "vercel";
286
+ }
287
+ function isGatewayProvider(provider) {
288
+ return provider === "openrouter" || provider === "vercel";
289
+ }
290
+ function detectGatewaySubProvider(model) {
291
+ const slash = model.indexOf("/");
292
+ if (slash < 1) return void 0;
293
+ const prefix = model.slice(0, slash);
294
+ const direct = ["openai", "anthropic", "google", "mistral", "cohere"];
295
+ return direct.find((p) => p === prefix);
296
+ }
297
+ var REASONING_MODEL_UNSUPPORTED = /* @__PURE__ */ new Set([
298
+ "temperature",
299
+ "top_p",
300
+ "frequency_penalty",
301
+ "presence_penalty",
302
+ "n"
303
+ ]);
304
+ function detectBedrockModelFamily(model) {
305
+ const parts = model.split(".");
306
+ let prefix = parts[0];
307
+ if (["us", "eu", "apac", "global"].includes(prefix) && parts.length > 1) {
308
+ prefix = parts[1];
309
+ }
310
+ const families = [
311
+ "anthropic",
312
+ "meta",
313
+ "amazon",
314
+ "mistral",
315
+ "cohere",
316
+ "ai21"
317
+ ];
318
+ return families.find((f) => prefix === f);
319
+ }
320
+ function bedrockSupportsCaching(model) {
321
+ const family = detectBedrockModelFamily(model);
322
+ if (family === "anthropic") return true;
323
+ if (family === "amazon" && model.includes("nova")) return true;
324
+ return false;
325
+ }
326
+ var CACHE_VALUES = {
327
+ openai: void 0,
328
+ // OpenAI auto-caches; no explicit param
329
+ anthropic: "ephemeral",
330
+ google: void 0,
331
+ // Google uses explicit caching API, not a param
332
+ mistral: void 0,
333
+ cohere: void 0,
334
+ bedrock: "ephemeral",
335
+ // Supported for Claude models on Bedrock
336
+ openrouter: void 0,
337
+ // Depends on underlying provider
338
+ vercel: void 0
339
+ // Depends on underlying provider
340
+ };
341
+ var CACHE_TTLS = {
342
+ openai: void 0,
343
+ anthropic: ["5m", "1h"],
344
+ google: void 0,
345
+ mistral: void 0,
346
+ cohere: void 0,
347
+ bedrock: ["5m", "1h"],
348
+ // Claude on Bedrock uses same TTLs as direct Anthropic
349
+ openrouter: void 0,
350
+ vercel: void 0
351
+ };
352
+ var DURATION_RE = /^\d+[mh]$/;
353
+
354
+
355
+
356
+
357
+
358
+
359
+
360
+
361
+
362
+
363
+
364
+
365
+
366
+
367
+
368
+
369
+ exports.detectProvider = detectProvider; exports.ALIASES = ALIASES; exports.PROVIDER_PARAMS = PROVIDER_PARAMS; exports.PARAM_SPECS = PARAM_SPECS; exports.isReasoningModel = isReasoningModel; exports.canHostOpenAIModels = canHostOpenAIModels; exports.isGatewayProvider = isGatewayProvider; exports.detectGatewaySubProvider = detectGatewaySubProvider; exports.REASONING_MODEL_UNSUPPORTED = REASONING_MODEL_UNSUPPORTED; exports.detectBedrockModelFamily = detectBedrockModelFamily; exports.bedrockSupportsCaching = bedrockSupportsCaching; exports.CACHE_VALUES = CACHE_VALUES; exports.CACHE_TTLS = CACHE_TTLS; exports.DURATION_RE = DURATION_RE;
370
+ //# sourceMappingURL=chunk-NSCBY4VD.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/dan/code/oss/llm-strings/dist/chunk-NSCBY4VD.cjs","../src/provider-core.ts"],"names":[],"mappings":"AAAA;ACUO,SAAS,cAAA,CAAe,IAAA,EAAoC;AAEjE,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,YAAY,CAAA,EAAG,OAAO,YAAA;AACxC,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,mBAAmB,CAAA,EAAG,OAAO,QAAA;AAE/C,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,WAAW,EAAA,GAAK,IAAA,CAAK,QAAA,CAAS,SAAS,CAAA,EAAG,OAAO,SAAA;AACnE,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,QAAQ,CAAA,EAAG,OAAO,QAAA;AACpC,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,WAAW,EAAA,GAAK,IAAA,CAAK,QAAA,CAAS,QAAQ,CAAA,EAAG,OAAO,WAAA;AAClE,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,YAAY,EAAA,GAAK,IAAA,CAAK,QAAA,CAAS,QAAQ,CAAA,EAAG,OAAO,QAAA;AACnE,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,SAAS,CAAA,EAAG,OAAO,SAAA;AACrC,EAAA,GAAA,CAAI,IAAA,CAAK,QAAA,CAAS,QAAQ,CAAA,EAAG,OAAO,QAAA;AACpC,EAAA,OAAO,KAAA,CAAA;AACT;AAMO,IAAM,QAAA,EAAkC;AAAA;AAAA,EAE7C,IAAA,EAAM,aAAA;AAAA;AAAA,EAGN,GAAA,EAAK,YAAA;AAAA,EACL,OAAA,EAAS,YAAA;AAAA,EACT,UAAA,EAAY,YAAA;AAAA,EACZ,iBAAA,EAAmB,YAAA;AAAA,EACnB,qBAAA,EAAuB,YAAA;AAAA,EACvB,eAAA,EAAiB,YAAA;AAAA,EACjB,SAAA,EAAW,YAAA;AAAA;AAAA,EAGX,IAAA,EAAM,OAAA;AAAA,EACN,IAAA,EAAM,OAAA;AAAA,EACN,OAAA,EAAS,OAAA;AAAA;AAAA,EAGT,IAAA,EAAM,OAAA;AAAA,EACN,IAAA,EAAM,OAAA;AAAA;AAAA,EAGN,IAAA,EAAM,mBAAA;AAAA,EACN,YAAA,EAAc,mBAAA;AAAA,EACd,gBAAA,EAAkB,mBAAA;AAAA,EAClB,kBAAA,EAAoB,mBAAA;AAAA;AAAA,EAGpB,IAAA,EAAM,kBAAA;AAAA,EACN,YAAA,EAAc,kBAAA;AAAA,EACd,eAAA,EAAiB,kBAAA;AAAA;AAAA,EAGjB,cAAA,EAAgB,MAAA;AAAA,EAChB,aAAA,EAAe,MAAA;AAAA,EACf,aAAA,EAAe,MAAA;AAAA;AAAA,EAGf,WAAA,EAAa,MAAA;AAAA,EACb,UAAA,EAAY,MAAA;AAAA;AAAA,EAGZ,cAAA,EAAgB,GAAA;AAAA,EAChB,eAAA,EAAiB,GAAA;AAAA,EACjB,eAAA,EAAiB,GAAA;AAAA;AAAA,EAGjB,gBAAA,EAAkB,QAAA;AAAA,EAClB,SAAA,EAAW,QAAA;AAAA;AAAA,EAGX,aAAA,EAAe,OAAA;AAAA,EACf,YAAA,EAAc,OAAA;AAAA,EACd,UAAA,EAAY,OAAA;AAAA,EACZ,WAAA,EAAa;AACf,CAAA;AAMO,IAAM,gBAAA,EAA4D;AAAA,EACvE,MAAA,EAAQ;AAAA,IACN,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,YAAA;AAAA,IACZ,KAAA,EAAO,OAAA;AAAA,IACP,iBAAA,EAAmB,mBAAA;AAAA,IACnB,gBAAA,EAAkB,kBAAA;AAAA,IAClB,IAAA,EAAM,MAAA;AAAA,IACN,CAAA,EAAG,GAAA;AAAA,IACH,IAAA,EAAM,MAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,MAAA,EAAQ;AAAA,EACV,CAAA;AAAA,EACA,SAAA,EAAW;AAAA,IACT,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,YAAA;AAAA,IACZ,KAAA,EAAO,OAAA;AAAA,IACP,KAAA,EAAO,OAAA;AAAA,IACP,IAAA,EAAM,gBAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,MAAA,EAAQ,QAAA;AAAA,IACR,KAAA,EAAO,eAAA;AAAA,IACP,SAAA,EAAW;AAAA,EACb,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,iBAAA;AAAA,IACZ,KAAA,EAAO,MAAA;AAAA,IACP,KAAA,EAAO,MAAA;AAAA,IACP,iBAAA,EAAmB,kBAAA;AAAA,IACnB,gBAAA,EAAkB,iBAAA;AAAA,IAClB,IAAA,EAAM,eAAA;AAAA,IACN,CAAA,EAAG,gBAAA;AAAA,IACH,MAAA,EAAQ,QAAA;AAAA,IACR,IAAA,EAAM,MAAA;AAAA,IACN,gBAAA,EAAkB,kBAAA;AAAA,IAClB,cAAA,EAAgB;AAAA,EAClB,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,YAAA;AAAA,IACZ,KAAA,EAAO,OAAA;AAAA,IACP,iBAAA,EAAmB,mBAAA;AAAA,IACnB,gBAAA,EAAkB,kBAAA;AAAA,IAClB,IAAA,EAAM,MAAA;AAAA,IACN,CAAA,EAAG,GAAA;AAAA,IACH,IAAA,EAAM,aAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY;AAAA,EACd,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,YAAA;AAAA,IACZ,KAAA,EAAO,GAAA;AAAA,IACP,KAAA,EAAO,GAAA;AAAA,IACP,iBAAA,EAAmB,mBAAA;AAAA,IACnB,gBAAA,EAAkB,kBAAA;AAAA,IAClB,IAAA,EAAM,gBAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,IAAA,EAAM;AAAA,EACR,CAAA;AAAA,EACA,OAAA,EAAS;AAAA;AAAA,IAEP,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,WAAA;AAAA,IACZ,KAAA,EAAO,MAAA;AAAA,IACP,KAAA,EAAO,MAAA;AAAA;AAAA,IACP,IAAA,EAAM,eAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,KAAA,EAAO,eAAA;AAAA,IACP,SAAA,EAAW;AAAA,EACb,CAAA;AAAA,EACA,UAAA,EAAY;AAAA;AAAA,IAEV,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,YAAA;AAAA,IACZ,KAAA,EAAO,OAAA;AAAA,IACP,KAAA,EAAO,OAAA;AAAA,IACP,iBAAA,EAAmB,mBAAA;AAAA,IACnB,gBAAA,EAAkB,kBAAA;AAAA,IAClB,IAAA,EAAM,MAAA;AAAA,IACN,CAAA,EAAG,GAAA;AAAA,IACH,IAAA,EAAM,MAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,MAAA,EAAQ;AAAA,EACV,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA;AAAA,IAEN,WAAA,EAAa,aAAA;AAAA,IACb,UAAA,EAAY,YAAA;AAAA,IACZ,KAAA,EAAO,OAAA;AAAA,IACP,KAAA,EAAO,OAAA;AAAA,IACP,iBAAA,EAAmB,mBAAA;AAAA,IACnB,gBAAA,EAAkB,kBAAA;AAAA,IAClB,IAAA,EAAM,MAAA;AAAA,IACN,CAAA,EAAG,GAAA;AAAA,IACH,IAAA,EAAM,MAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,MAAA,EAAQ;AAAA,EACV;AACF,CAAA;AAcO,IAAM,YAAA,EAA2D;AAAA,EACtE,MAAA,EAAQ;AAAA,IACN,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC1F,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACrF,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC1G,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACzG,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACtD,CAAA,EAAG,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,oBAAoB,CAAA;AAAA,IAC1E,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,cAAc,CAAA;AAAA,IACnD,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,gBAAA,EAAkB;AAAA,MAChB,IAAA,EAAM,QAAA;AAAA,MACN,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA;AAAA,MAC5D,OAAA,EAAS,QAAA;AAAA,MACT,WAAA,EAAa;AAAA,IACf;AAAA,EACF,CAAA;AAAA,EACA,SAAA,EAAW;AAAA,IACT,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC1F,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACrF,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,EAAA,EAAI,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC5E,cAAA,EAAgB,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAChE,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,MAAA,EAAQ,EAAE,IAAA,EAAM,QAAA,EAAU,MAAA,EAAQ,CAAC,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,KAAK,CAAA,EAAG,OAAA,EAAS,QAAA,EAAU,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACtH,aAAA,EAAe,EAAE,IAAA,EAAM,QAAA,EAAU,MAAA,EAAQ,CAAC,WAAW,CAAA,EAAG,OAAA,EAAS,WAAA,EAAa,WAAA,EAAa,gBAAgB,CAAA;AAAA,IAC3G,SAAA,EAAW,EAAE,IAAA,EAAM,QAAA,EAAU,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAI,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,YAAY;AAAA,EAC7F,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,eAAA,EAAiB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC/F,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACpF,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,EAAA,EAAI,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC3E,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACzG,eAAA,EAAiB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACxG,aAAA,EAAe,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC/D,cAAA,EAAgB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,kBAAkB,CAAA;AAAA,IACrF,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,cAAc,CAAA;AAAA,IACnD,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,qBAAqB,CAAA;AAAA,IACtE,cAAA,EAAgB,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,kBAAkB;AAAA,EACnE,CAAA;AAAA,EACA,OAAA,EAAS;AAAA,IACP,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC1F,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACrF,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC1G,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACzG,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACtD,CAAA,EAAG,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,oBAAoB,CAAA;AAAA,IAC1E,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,cAAc,CAAA;AAAA,IAC1D,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,WAAA,EAAa,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,qBAAqB,CAAA;AAAA,IAClF,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,iBAAiB;AAAA,EAClF,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA,IACN,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC1F,CAAA,EAAG,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,uBAAuB,CAAA;AAAA,IACrF,CAAA,EAAG,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,GAAA,EAAK,OAAA,EAAS,EAAA,EAAI,WAAA,EAAa,qBAAqB,CAAA;AAAA,IACtF,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACzG,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACxG,cAAA,EAAgB,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAChE,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,cAAc;AAAA,EACrD,CAAA;AAAA,EACA,OAAA,EAAS;AAAA;AAAA,IAEP,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,SAAA,EAAW,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IACzF,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACpF,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,EAAA,EAAI,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC3E,aAAA,EAAe,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC/D,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,aAAA,EAAe,EAAE,IAAA,EAAM,QAAA,EAAU,MAAA,EAAQ,CAAC,WAAW,CAAA,EAAG,OAAA,EAAS,WAAA,EAAa,WAAA,EAAa,gBAAgB,CAAA;AAAA,IAC3G,SAAA,EAAW,EAAE,IAAA,EAAM,QAAA,EAAU,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAI,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,YAAY;AAAA,EAC7F,CAAA;AAAA,EACA,UAAA,EAAY;AAAA;AAAA,IAEV,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC1F,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACrF,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,EAAA,EAAI,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC5E,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC1G,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACzG,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACtD,CAAA,EAAG,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,oBAAoB,CAAA;AAAA,IAC1E,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,cAAc,CAAA;AAAA,IACnD,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,gBAAA,EAAkB;AAAA,MAChB,IAAA,EAAM,QAAA;AAAA,MACN,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA;AAAA,MAC5D,OAAA,EAAS,QAAA;AAAA,MACT,WAAA,EAAa;AAAA,IACf;AAAA,EACF,CAAA;AAAA,EACA,MAAA,EAAQ;AAAA;AAAA,IAEN,WAAA,EAAa,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,GAAA,EAAK,WAAA,EAAa,sBAAsB,CAAA;AAAA,IAChG,UAAA,EAAY,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,IAAA,EAAM,WAAA,EAAa,wBAAwB,CAAA;AAAA,IAC1F,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,mBAAmB,CAAA;AAAA,IACrF,KAAA,EAAO,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,EAAA,EAAI,WAAA,EAAa,iBAAiB,CAAA;AAAA,IAC5E,iBAAA,EAAmB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IAC1G,gBAAA,EAAkB,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,CAAA,EAAI,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,2BAA2B,CAAA;AAAA,IACzG,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,iBAAiB,CAAA;AAAA,IACtD,CAAA,EAAG,EAAE,IAAA,EAAM,QAAA,EAAU,GAAA,EAAK,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,WAAA,EAAa,oBAAoB,CAAA;AAAA,IAC1E,IAAA,EAAM,EAAE,IAAA,EAAM,QAAA,EAAU,WAAA,EAAa,cAAc,CAAA;AAAA,IACnD,MAAA,EAAQ,EAAE,IAAA,EAAM,SAAA,EAAW,OAAA,EAAS,KAAA,EAAO,WAAA,EAAa,kBAAkB,CAAA;AAAA,IAC1E,gBAAA,EAAkB;AAAA,MAChB,IAAA,EAAM,QAAA;AAAA,MACN,MAAA,EAAQ,CAAC,MAAA,EAAQ,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,MAAA,EAAQ,OAAO,CAAA;AAAA,MAC5D,OAAA,EAAS,QAAA;AAAA,MACT,WAAA,EAAa;AAAA,IACf;AAAA,EACF;AACF,CAAA;AAGO,SAAS,gBAAA,CAAiB,KAAA,EAAwB;AAEvD,EAAA,MAAM,KAAA,EAAO,KAAA,CAAM,QAAA,CAAS,GAAG,EAAA,EAAI,KAAA,CAAM,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,CAAI,EAAA,EAAK,KAAA;AAC7D,EAAA,OAAO,SAAA,CAAU,IAAA,CAAK,IAAI,CAAA;AAC5B;AAGO,SAAS,mBAAA,CAAoB,QAAA,EAA6B;AAC/D,EAAA,OAAO,SAAA,IAAa,SAAA,GAAY,SAAA,IAAa,aAAA,GAAgB,SAAA,IAAa,QAAA;AAC5E;AAGO,SAAS,iBAAA,CAAkB,QAAA,EAA6B;AAC7D,EAAA,OAAO,SAAA,IAAa,aAAA,GAAgB,SAAA,IAAa,QAAA;AACnD;AAOO,SAAS,wBAAA,CACd,KAAA,EACsB;AACtB,EAAA,MAAM,MAAA,EAAQ,KAAA,CAAM,OAAA,CAAQ,GAAG,CAAA;AAC/B,EAAA,GAAA,CAAI,MAAA,EAAQ,CAAA,EAAG,OAAO,KAAA,CAAA;AACtB,EAAA,MAAM,OAAA,EAAS,KAAA,CAAM,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AACnC,EAAA,MAAM,OAAA,EAAqB,CAAC,QAAA,EAAU,WAAA,EAAa,QAAA,EAAU,SAAA,EAAW,QAAQ,CAAA;AAChF,EAAA,OAAO,MAAA,CAAO,IAAA,CAAK,CAAC,CAAA,EAAA,GAAM,EAAA,IAAM,MAAM,CAAA;AACxC;AAEO,IAAM,4BAAA,kBAA8B,IAAI,GAAA,CAAI;AAAA,EACjD,aAAA;AAAA,EACA,OAAA;AAAA,EACA,mBAAA;AAAA,EACA,kBAAA;AAAA,EACA;AACF,CAAC,CAAA;AAcM,SAAS,wBAAA,CACd,KAAA,EACgC;AAGhC,EAAA,MAAM,MAAA,EAAQ,KAAA,CAAM,KAAA,CAAM,GAAG,CAAA;AAG7B,EAAA,IAAI,OAAA,EAAS,KAAA,CAAM,CAAC,CAAA;AACpB,EAAA,GAAA,CAAI,CAAC,IAAA,EAAM,IAAA,EAAM,MAAA,EAAQ,QAAQ,CAAA,CAAE,QAAA,CAAS,MAAM,EAAA,GAAK,KAAA,CAAM,OAAA,EAAS,CAAA,EAAG;AACvE,IAAA,OAAA,EAAS,KAAA,CAAM,CAAC,CAAA;AAAA,EAClB;AAEA,EAAA,MAAM,SAAA,EAAiC;AAAA,IACrC,WAAA;AAAA,IACA,MAAA;AAAA,IACA,QAAA;AAAA,IACA,SAAA;AAAA,IACA,QAAA;AAAA,IACA;AAAA,EACF,CAAA;AACA,EAAA,OAAO,QAAA,CAAS,IAAA,CAAK,CAAC,CAAA,EAAA,GAAM,OAAA,IAAW,CAAC,CAAA;AAC1C;AAGO,SAAS,sBAAA,CAAuB,KAAA,EAAwB;AAC7D,EAAA,MAAM,OAAA,EAAS,wBAAA,CAAyB,KAAK,CAAA;AAC7C,EAAA,GAAA,CAAI,OAAA,IAAW,WAAA,EAAa,OAAO,IAAA;AACnC,EAAA,GAAA,CAAI,OAAA,IAAW,SAAA,GAAY,KAAA,CAAM,QAAA,CAAS,MAAM,CAAA,EAAG,OAAO,IAAA;AAC1D,EAAA,OAAO,KAAA;AACT;AAGO,IAAM,aAAA,EAAqD;AAAA,EAChE,MAAA,EAAQ,KAAA,CAAA;AAAA;AAAA,EACR,SAAA,EAAW,WAAA;AAAA,EACX,MAAA,EAAQ,KAAA,CAAA;AAAA;AAAA,EACR,OAAA,EAAS,KAAA,CAAA;AAAA,EACT,MAAA,EAAQ,KAAA,CAAA;AAAA,EACR,OAAA,EAAS,WAAA;AAAA;AAAA,EACT,UAAA,EAAY,KAAA,CAAA;AAAA;AAAA,EACZ,MAAA,EAAQ,KAAA;AAAA;AACV,CAAA;AAGO,IAAM,WAAA,EAAqD;AAAA,EAChE,MAAA,EAAQ,KAAA,CAAA;AAAA,EACR,SAAA,EAAW,CAAC,IAAA,EAAM,IAAI,CAAA;AAAA,EACtB,MAAA,EAAQ,KAAA,CAAA;AAAA,EACR,OAAA,EAAS,KAAA,CAAA;AAAA,EACT,MAAA,EAAQ,KAAA,CAAA;AAAA,EACR,OAAA,EAAS,CAAC,IAAA,EAAM,IAAI,CAAA;AAAA;AAAA,EACpB,UAAA,EAAY,KAAA,CAAA;AAAA,EACZ,MAAA,EAAQ,KAAA;AACV,CAAA;AAGO,IAAM,YAAA,EAAc,WAAA;ADhF3B;AACA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,+nBAAC","file":"/Users/dan/code/oss/llm-strings/dist/chunk-NSCBY4VD.cjs","sourcesContent":[null,"export type Provider =\n | \"openai\"\n | \"anthropic\"\n | \"google\"\n | \"mistral\"\n | \"cohere\"\n | \"bedrock\"\n | \"openrouter\"\n | \"vercel\";\n\nexport function detectProvider(host: string): Provider | undefined {\n // Gateways and aggregators first — they proxy to other providers\n if (host.includes(\"openrouter\")) return \"openrouter\";\n if (host.includes(\"gateway.ai.vercel\")) return \"vercel\";\n // Bedrock before native providers since it hosts models from multiple vendors\n if (host.includes(\"amazonaws\") || host.includes(\"bedrock\")) return \"bedrock\";\n if (host.includes(\"openai\")) return \"openai\";\n if (host.includes(\"anthropic\") || host.includes(\"claude\")) return \"anthropic\";\n if (host.includes(\"googleapis\") || host.includes(\"google\")) return \"google\";\n if (host.includes(\"mistral\")) return \"mistral\";\n if (host.includes(\"cohere\")) return \"cohere\";\n return undefined;\n}\n\n/**\n * Shorthand aliases → canonical param name.\n * Canonical names use snake_case and follow OpenAI conventions where possible.\n */\nexport const ALIASES: Record<string, string> = {\n // temperature\n temp: \"temperature\",\n\n // max_tokens\n max: \"max_tokens\",\n max_out: \"max_tokens\",\n max_output: \"max_tokens\",\n max_output_tokens: \"max_tokens\",\n max_completion_tokens: \"max_tokens\",\n maxOutputTokens: \"max_tokens\",\n maxTokens: \"max_tokens\",\n\n // top_p\n topp: \"top_p\",\n topP: \"top_p\",\n nucleus: \"top_p\",\n\n // top_k\n topk: \"top_k\",\n topK: \"top_k\",\n\n // frequency_penalty\n freq: \"frequency_penalty\",\n freq_penalty: \"frequency_penalty\",\n frequencyPenalty: \"frequency_penalty\",\n repetition_penalty: \"frequency_penalty\",\n\n // presence_penalty\n pres: \"presence_penalty\",\n pres_penalty: \"presence_penalty\",\n presencePenalty: \"presence_penalty\",\n\n // stop\n stop_sequences: \"stop\",\n stopSequences: \"stop\",\n stop_sequence: \"stop\",\n\n // seed\n random_seed: \"seed\",\n randomSeed: \"seed\",\n\n // n (completions count)\n candidateCount: \"n\",\n candidate_count: \"n\",\n num_completions: \"n\",\n\n // effort / reasoning\n reasoning_effort: \"effort\",\n reasoning: \"effort\",\n\n // cache\n cache_control: \"cache\",\n cacheControl: \"cache\",\n cachePoint: \"cache\",\n cache_point: \"cache\",\n};\n\n/**\n * Canonical param name → provider-specific API param name.\n * Only includes params the provider actually supports.\n */\nexport const PROVIDER_PARAMS: Record<Provider, Record<string, string>> = {\n openai: {\n temperature: \"temperature\",\n max_tokens: \"max_tokens\",\n top_p: \"top_p\",\n frequency_penalty: \"frequency_penalty\",\n presence_penalty: \"presence_penalty\",\n stop: \"stop\",\n n: \"n\",\n seed: \"seed\",\n stream: \"stream\",\n effort: \"reasoning_effort\",\n },\n anthropic: {\n temperature: \"temperature\",\n max_tokens: \"max_tokens\",\n top_p: \"top_p\",\n top_k: \"top_k\",\n stop: \"stop_sequences\",\n stream: \"stream\",\n effort: \"effort\",\n cache: \"cache_control\",\n cache_ttl: \"cache_ttl\",\n },\n google: {\n temperature: \"temperature\",\n max_tokens: \"maxOutputTokens\",\n top_p: \"topP\",\n top_k: \"topK\",\n frequency_penalty: \"frequencyPenalty\",\n presence_penalty: \"presencePenalty\",\n stop: \"stopSequences\",\n n: \"candidateCount\",\n stream: \"stream\",\n seed: \"seed\",\n responseMimeType: \"responseMimeType\",\n responseSchema: \"responseSchema\",\n },\n mistral: {\n temperature: \"temperature\",\n max_tokens: \"max_tokens\",\n top_p: \"top_p\",\n frequency_penalty: \"frequency_penalty\",\n presence_penalty: \"presence_penalty\",\n stop: \"stop\",\n n: \"n\",\n seed: \"random_seed\",\n stream: \"stream\",\n safe_prompt: \"safe_prompt\",\n min_tokens: \"min_tokens\",\n },\n cohere: {\n temperature: \"temperature\",\n max_tokens: \"max_tokens\",\n top_p: \"p\",\n top_k: \"k\",\n frequency_penalty: \"frequency_penalty\",\n presence_penalty: \"presence_penalty\",\n stop: \"stop_sequences\",\n stream: \"stream\",\n seed: \"seed\",\n },\n bedrock: {\n // Bedrock Converse API uses camelCase\n temperature: \"temperature\",\n max_tokens: \"maxTokens\",\n top_p: \"topP\",\n top_k: \"topK\", // Claude models via additionalModelRequestFields\n stop: \"stopSequences\",\n stream: \"stream\",\n cache: \"cache_control\",\n cache_ttl: \"cache_ttl\",\n },\n openrouter: {\n // OpenAI-compatible API with extra routing params\n temperature: \"temperature\",\n max_tokens: \"max_tokens\",\n top_p: \"top_p\",\n top_k: \"top_k\",\n frequency_penalty: \"frequency_penalty\",\n presence_penalty: \"presence_penalty\",\n stop: \"stop\",\n n: \"n\",\n seed: \"seed\",\n stream: \"stream\",\n effort: \"reasoning_effort\",\n },\n vercel: {\n // OpenAI-compatible gateway\n temperature: \"temperature\",\n max_tokens: \"max_tokens\",\n top_p: \"top_p\",\n top_k: \"top_k\",\n frequency_penalty: \"frequency_penalty\",\n presence_penalty: \"presence_penalty\",\n stop: \"stop\",\n n: \"n\",\n seed: \"seed\",\n stream: \"stream\",\n effort: \"reasoning_effort\",\n },\n};\n\n/**\n * Validation specs per provider, keyed by provider-specific param name.\n */\nexport interface ParamSpec {\n type: \"number\" | \"string\" | \"boolean\";\n min?: number;\n max?: number;\n values?: string[];\n default?: string | number | boolean;\n description?: string;\n}\n\nexport const PARAM_SPECS: Record<Provider, Record<string, ParamSpec>> = {\n openai: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n reasoning_effort: {\n type: \"string\",\n values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"],\n default: \"medium\",\n description: \"Reasoning effort\",\n },\n },\n anthropic: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stop_sequences: { type: \"string\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n effort: { type: \"string\", values: [\"low\", \"medium\", \"high\", \"max\"], default: \"medium\", description: \"Thinking effort\" },\n cache_control: { type: \"string\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"string\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n google: {\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n maxOutputTokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n topP: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n topK: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequencyPenalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presencePenalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stopSequences: { type: \"string\", description: \"Stop sequences\" },\n candidateCount: { type: \"number\", min: 1, default: 1, description: \"Candidate count\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", description: \"Random seed\" },\n responseMimeType: { type: \"string\", description: \"Response MIME type\" },\n responseSchema: { type: \"string\", description: \"Response schema\" },\n },\n mistral: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n random_seed: { type: \"number\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n safe_prompt: { type: \"boolean\", default: false, description: \"Enable safe prompt\" },\n min_tokens: { type: \"number\", min: 0, default: 0, description: \"Minimum tokens\" },\n },\n cohere: {\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling (p)\" },\n k: { type: \"number\", min: 0, max: 500, default: 40, description: \"Top-K sampling (k)\" },\n frequency_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: 0, max: 1, default: 0, description: \"Penalize repeated topics\" },\n stop_sequences: { type: \"string\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n seed: { type: \"number\", description: \"Random seed\" },\n },\n bedrock: {\n // Converse API inferenceConfig params\n temperature: { type: \"number\", min: 0, max: 1, default: 0.7, description: \"Controls randomness\" },\n maxTokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n topP: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n topK: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n stopSequences: { type: \"string\", description: \"Stop sequences\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n cache_control: { type: \"string\", values: [\"ephemeral\"], default: \"ephemeral\", description: \"Cache control\" },\n cache_ttl: { type: \"string\", values: [\"5m\", \"1h\"], default: \"5m\", description: \"Cache TTL\" },\n },\n openrouter: {\n // Loose validation — proxies to many providers with varying ranges\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n reasoning_effort: {\n type: \"string\",\n values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"],\n default: \"medium\",\n description: \"Reasoning effort\",\n },\n },\n vercel: {\n // Loose validation — proxies to many providers with varying ranges\n temperature: { type: \"number\", min: 0, max: 2, default: 0.7, description: \"Controls randomness\" },\n max_tokens: { type: \"number\", min: 1, default: 4096, description: \"Maximum output tokens\" },\n top_p: { type: \"number\", min: 0, max: 1, default: 1, description: \"Nucleus sampling\" },\n top_k: { type: \"number\", min: 0, default: 40, description: \"Top-K sampling\" },\n frequency_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize frequent tokens\" },\n presence_penalty: { type: \"number\", min: -2, max: 2, default: 0, description: \"Penalize repeated topics\" },\n stop: { type: \"string\", description: \"Stop sequences\" },\n n: { type: \"number\", min: 1, default: 1, description: \"Completions count\" },\n seed: { type: \"number\", description: \"Random seed\" },\n stream: { type: \"boolean\", default: false, description: \"Stream response\" },\n reasoning_effort: {\n type: \"string\",\n values: [\"none\", \"minimal\", \"low\", \"medium\", \"high\", \"xhigh\"],\n default: \"medium\",\n description: \"Reasoning effort\",\n },\n },\n};\n\n/** OpenAI reasoning models don't support standard sampling params. */\nexport function isReasoningModel(model: string): boolean {\n // Strip gateway prefix: \"openai/o3\" → \"o3\"\n const name = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n return /^o[134]/.test(name);\n}\n\n/** Providers that can route to OpenAI models (and need reasoning-model checks). */\nexport function canHostOpenAIModels(provider: Provider): boolean {\n return provider === \"openai\" || provider === \"openrouter\" || provider === \"vercel\";\n}\n\n/** Whether this provider is a gateway/router that proxies to other providers. */\nexport function isGatewayProvider(provider: Provider): boolean {\n return provider === \"openrouter\" || provider === \"vercel\";\n}\n\n/**\n * Extract the underlying provider from a gateway model string.\n * e.g. \"anthropic/claude-sonnet-4-5\" → \"anthropic\"\n * Returns undefined for unknown prefixes (qwen, deepseek, etc.) or models without \"/\".\n */\nexport function detectGatewaySubProvider(\n model: string,\n): Provider | undefined {\n const slash = model.indexOf(\"/\");\n if (slash < 1) return undefined;\n const prefix = model.slice(0, slash);\n const direct: Provider[] = [\"openai\", \"anthropic\", \"google\", \"mistral\", \"cohere\"];\n return direct.find((p) => p === prefix);\n}\n\nexport const REASONING_MODEL_UNSUPPORTED = new Set([\n \"temperature\",\n \"top_p\",\n \"frequency_penalty\",\n \"presence_penalty\",\n \"n\",\n]);\n\n/**\n * Bedrock model IDs are prefixed with the vendor name.\n * e.g. \"anthropic.claude-sonnet-4-5-20250929-v1:0\"\n */\nexport type BedrockModelFamily =\n | \"anthropic\"\n | \"meta\"\n | \"amazon\"\n | \"mistral\"\n | \"cohere\"\n | \"ai21\";\n\nexport function detectBedrockModelFamily(\n model: string,\n): BedrockModelFamily | undefined {\n // Handle cross-region inference profiles (e.g. \"us.anthropic.claude-sonnet-4-5...\")\n // and global inference profiles (e.g. \"global.anthropic.claude-sonnet-4-5...\")\n const parts = model.split(\".\");\n\n // If first part is a region prefix (us, eu, apac) or global, skip it\n let prefix = parts[0];\n if ([\"us\", \"eu\", \"apac\", \"global\"].includes(prefix) && parts.length > 1) {\n prefix = parts[1];\n }\n\n const families: BedrockModelFamily[] = [\n \"anthropic\",\n \"meta\",\n \"amazon\",\n \"mistral\",\n \"cohere\",\n \"ai21\",\n ];\n return families.find((f) => prefix === f);\n}\n\n/** Whether a Bedrock model supports prompt caching (Claude and Nova only). */\nexport function bedrockSupportsCaching(model: string): boolean {\n const family = detectBedrockModelFamily(model);\n if (family === \"anthropic\") return true;\n if (family === \"amazon\" && model.includes(\"nova\")) return true;\n return false;\n}\n\n/** Cache value normalization per provider. */\nexport const CACHE_VALUES: Record<Provider, string | undefined> = {\n openai: undefined, // OpenAI auto-caches; no explicit param\n anthropic: \"ephemeral\",\n google: undefined, // Google uses explicit caching API, not a param\n mistral: undefined,\n cohere: undefined,\n bedrock: \"ephemeral\", // Supported for Claude models on Bedrock\n openrouter: undefined, // Depends on underlying provider\n vercel: undefined, // Depends on underlying provider\n};\n\n/** Valid cache TTL values per provider. */\nexport const CACHE_TTLS: Record<Provider, string[] | undefined> = {\n openai: undefined,\n anthropic: [\"5m\", \"1h\"],\n google: undefined,\n mistral: undefined,\n cohere: undefined,\n bedrock: [\"5m\", \"1h\"], // Claude on Bedrock uses same TTLs as direct Anthropic\n openrouter: undefined,\n vercel: undefined,\n};\n\n/** Match a duration expression like \"5m\", \"1h\", \"30m\". */\nexport const DURATION_RE = /^\\d+[mh]$/;\n"]}
@@ -0,0 +1,180 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
+
3
+ var _chunkMGWGNZDJcjs = require('./chunk-MGWGNZDJ.cjs');
4
+
5
+
6
+ var _chunkN6NVBE43cjs = require('./chunk-N6NVBE43.cjs');
7
+
8
+
9
+
10
+
11
+
12
+
13
+
14
+
15
+ var _chunkNSCBY4VDcjs = require('./chunk-NSCBY4VD.cjs');
16
+
17
+ // src/validate.ts
18
+ function buildReverseParamMap(provider) {
19
+ const map = {};
20
+ for (const [canonical, specific] of Object.entries(
21
+ _chunkNSCBY4VDcjs.PROVIDER_PARAMS[provider]
22
+ )) {
23
+ map[specific] = canonical;
24
+ }
25
+ return map;
26
+ }
27
+ function lookupSubProviderSpec(gatewayParamName, gatewayReverseMap, subProvider) {
28
+ const canonical = _nullishCoalesce(gatewayReverseMap[gatewayParamName], () => ( gatewayParamName));
29
+ const subProviderKey = _optionalChain([_chunkNSCBY4VDcjs.PROVIDER_PARAMS, 'access', _ => _[subProvider], 'optionalAccess', _2 => _2[canonical]]);
30
+ if (!subProviderKey) return { spec: void 0, canonical };
31
+ return { spec: _optionalChain([_chunkNSCBY4VDcjs.PARAM_SPECS, 'access', _3 => _3[subProvider], 'optionalAccess', _4 => _4[subProviderKey]]), canonical };
32
+ }
33
+ function buildSubProviderKnownParams(gateway, subProvider) {
34
+ const known = /* @__PURE__ */ new Set();
35
+ const subProviderCanonicals = new Set(
36
+ Object.keys(_chunkNSCBY4VDcjs.PROVIDER_PARAMS[subProvider])
37
+ );
38
+ for (const [canonical, gatewaySpecific] of Object.entries(
39
+ _chunkNSCBY4VDcjs.PROVIDER_PARAMS[gateway]
40
+ )) {
41
+ if (subProviderCanonicals.has(canonical)) {
42
+ known.add(gatewaySpecific);
43
+ }
44
+ }
45
+ return known;
46
+ }
47
+ function validate(connectionString, options = {}) {
48
+ const parsed = _chunkN6NVBE43cjs.parse.call(void 0, connectionString);
49
+ const { config, provider, subProvider } = _chunkMGWGNZDJcjs.normalize.call(void 0, parsed);
50
+ const issues = [];
51
+ if (!provider) {
52
+ issues.push({
53
+ param: "host",
54
+ value: config.host,
55
+ message: `Unknown provider for host "${config.host}". Validation skipped.`,
56
+ severity: options.strict ? "error" : "warning"
57
+ });
58
+ return issues;
59
+ }
60
+ const effectiveProvider = _nullishCoalesce(subProvider, () => ( provider));
61
+ const specs = _chunkNSCBY4VDcjs.PARAM_SPECS[effectiveProvider];
62
+ const gatewayReverseMap = subProvider ? buildReverseParamMap(provider) : void 0;
63
+ const knownParams = subProvider ? buildSubProviderKnownParams(provider, subProvider) : new Set(Object.values(_chunkNSCBY4VDcjs.PROVIDER_PARAMS[provider]));
64
+ for (const [key, value] of Object.entries(config.params)) {
65
+ if (_chunkNSCBY4VDcjs.canHostOpenAIModels.call(void 0, provider) && _chunkNSCBY4VDcjs.isReasoningModel.call(void 0, config.model) && _chunkNSCBY4VDcjs.REASONING_MODEL_UNSUPPORTED.has(key)) {
66
+ issues.push({
67
+ param: key,
68
+ value,
69
+ message: `"${key}" is not supported by OpenAI reasoning model "${config.model}". Use "reasoning_effort" instead of temperature for controlling output.`,
70
+ severity: "error"
71
+ });
72
+ continue;
73
+ }
74
+ if (provider === "bedrock") {
75
+ const family = _chunkNSCBY4VDcjs.detectBedrockModelFamily.call(void 0, config.model);
76
+ if (key === "topK" && family && family !== "anthropic" && family !== "cohere" && family !== "mistral") {
77
+ issues.push({
78
+ param: key,
79
+ value,
80
+ message: `"topK" is not supported by ${family} models on Bedrock.`,
81
+ severity: "error"
82
+ });
83
+ continue;
84
+ }
85
+ if (key === "cache_control" && !_chunkNSCBY4VDcjs.bedrockSupportsCaching.call(void 0, config.model)) {
86
+ issues.push({
87
+ param: key,
88
+ value,
89
+ message: `Prompt caching is only supported for Anthropic Claude and Amazon Nova models on Bedrock, not ${_nullishCoalesce(family, () => ( "unknown"))} models.`,
90
+ severity: "error"
91
+ });
92
+ continue;
93
+ }
94
+ }
95
+ if (!knownParams.has(key) && !specs[key]) {
96
+ issues.push({
97
+ param: key,
98
+ value,
99
+ message: `Unknown param "${key}" for ${effectiveProvider}.`,
100
+ severity: options.strict ? "error" : "warning"
101
+ });
102
+ continue;
103
+ }
104
+ let spec = specs[key];
105
+ if (subProvider && gatewayReverseMap && !spec) {
106
+ const result = lookupSubProviderSpec(
107
+ key,
108
+ gatewayReverseMap,
109
+ subProvider
110
+ );
111
+ spec = result.spec;
112
+ }
113
+ if (!spec) continue;
114
+ if ((effectiveProvider === "anthropic" || provider === "bedrock" && _chunkNSCBY4VDcjs.detectBedrockModelFamily.call(void 0, config.model) === "anthropic") && (key === "temperature" || key === "top_p" || key === "topP")) {
115
+ const otherKey = key === "temperature" ? provider === "bedrock" ? "topP" : "top_p" : "temperature";
116
+ if (key === "temperature" && config.params[otherKey] !== void 0) {
117
+ issues.push({
118
+ param: key,
119
+ value,
120
+ message: `Cannot specify both "temperature" and "${otherKey}" for Anthropic models.`,
121
+ severity: "error"
122
+ });
123
+ }
124
+ }
125
+ if (spec.type === "number") {
126
+ const num = Number(value);
127
+ if (isNaN(num)) {
128
+ issues.push({
129
+ param: key,
130
+ value,
131
+ message: `"${key}" should be a number, got "${value}".`,
132
+ severity: "error"
133
+ });
134
+ continue;
135
+ }
136
+ if (spec.min !== void 0 && num < spec.min) {
137
+ issues.push({
138
+ param: key,
139
+ value,
140
+ message: `"${key}" must be >= ${spec.min}, got ${num}.`,
141
+ severity: "error"
142
+ });
143
+ }
144
+ if (spec.max !== void 0 && num > spec.max) {
145
+ issues.push({
146
+ param: key,
147
+ value,
148
+ message: `"${key}" must be <= ${spec.max}, got ${num}.`,
149
+ severity: "error"
150
+ });
151
+ }
152
+ }
153
+ if (spec.type === "boolean") {
154
+ if (!["true", "false", "0", "1"].includes(value)) {
155
+ issues.push({
156
+ param: key,
157
+ value,
158
+ message: `"${key}" should be a boolean (true/false), got "${value}".`,
159
+ severity: "error"
160
+ });
161
+ }
162
+ }
163
+ if (spec.type === "string" && spec.values) {
164
+ if (!spec.values.includes(value)) {
165
+ issues.push({
166
+ param: key,
167
+ value,
168
+ message: `"${key}" must be one of [${spec.values.join(", ")}], got "${value}".`,
169
+ severity: "error"
170
+ });
171
+ }
172
+ }
173
+ }
174
+ return issues;
175
+ }
176
+
177
+
178
+
179
+ exports.validate = validate;
180
+ //# sourceMappingURL=chunk-RSUXM42X.cjs.map