@dexto/core 1.5.0 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dist/agent/schemas.d.ts +48 -0
  2. package/dist/agent/schemas.d.ts.map +1 -1
  3. package/dist/events/index.cjs +4 -1
  4. package/dist/events/index.d.ts +20 -4
  5. package/dist/events/index.d.ts.map +1 -1
  6. package/dist/events/index.js +3 -1
  7. package/dist/llm/executor/provider-options.cjs +87 -0
  8. package/dist/llm/executor/provider-options.d.ts +49 -0
  9. package/dist/llm/executor/provider-options.d.ts.map +1 -0
  10. package/dist/llm/executor/provider-options.js +63 -0
  11. package/dist/llm/executor/stream-processor.cjs +11 -8
  12. package/dist/llm/executor/stream-processor.d.ts.map +1 -1
  13. package/dist/llm/executor/stream-processor.js +11 -8
  14. package/dist/llm/executor/turn-executor.cjs +10 -0
  15. package/dist/llm/executor/turn-executor.d.ts +1 -0
  16. package/dist/llm/executor/turn-executor.d.ts.map +1 -1
  17. package/dist/llm/executor/turn-executor.js +10 -0
  18. package/dist/llm/formatters/vercel.cjs +9 -1
  19. package/dist/llm/formatters/vercel.d.ts.map +1 -1
  20. package/dist/llm/formatters/vercel.js +9 -1
  21. package/dist/llm/registry.cjs +69 -0
  22. package/dist/llm/registry.d.ts +9 -0
  23. package/dist/llm/registry.d.ts.map +1 -1
  24. package/dist/llm/registry.js +68 -0
  25. package/dist/llm/schemas.cjs +17 -1
  26. package/dist/llm/schemas.d.ts +23 -0
  27. package/dist/llm/schemas.d.ts.map +1 -1
  28. package/dist/llm/schemas.js +17 -1
  29. package/dist/llm/services/vercel.cjs +3 -1
  30. package/dist/llm/services/vercel.d.ts.map +1 -1
  31. package/dist/llm/services/vercel.js +3 -1
  32. package/dist/logger/logger.cjs +7 -3
  33. package/dist/logger/logger.d.ts.map +1 -1
  34. package/dist/logger/logger.js +7 -3
  35. package/dist/memory/schemas.d.ts +2 -2
  36. package/dist/providers/discovery.cjs +14 -0
  37. package/dist/providers/discovery.d.ts +4 -2
  38. package/dist/providers/discovery.d.ts.map +1 -1
  39. package/dist/providers/discovery.js +14 -0
  40. package/dist/session/history/database.cjs +49 -15
  41. package/dist/session/history/database.d.ts.map +1 -1
  42. package/dist/session/history/database.js +49 -15
  43. package/dist/session/session-manager.cjs +2 -1
  44. package/dist/session/session-manager.d.ts.map +1 -1
  45. package/dist/session/session-manager.js +2 -1
  46. package/dist/storage/database/postgres-store.cjs +174 -78
  47. package/dist/storage/database/postgres-store.d.ts +19 -0
  48. package/dist/storage/database/postgres-store.d.ts.map +1 -1
  49. package/dist/storage/database/postgres-store.js +174 -78
  50. package/dist/storage/database/schemas.cjs +4 -1
  51. package/dist/storage/database/schemas.d.ts +8 -0
  52. package/dist/storage/database/schemas.d.ts.map +1 -1
  53. package/dist/storage/database/schemas.js +4 -1
  54. package/dist/storage/schemas.d.ts +7 -0
  55. package/dist/storage/schemas.d.ts.map +1 -1
  56. package/dist/tools/custom-tool-registry.d.ts +9 -3
  57. package/dist/tools/custom-tool-registry.d.ts.map +1 -1
  58. package/dist/tools/internal-tools/provider.cjs +5 -2
  59. package/dist/tools/internal-tools/provider.d.ts.map +1 -1
  60. package/dist/tools/internal-tools/provider.js +5 -2
  61. package/package.json +1 -1
@@ -0,0 +1,87 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var provider_options_exports = {};
20
+ __export(provider_options_exports, {
21
+ buildProviderOptions: () => buildProviderOptions,
22
+ getDefaultReasoningEffort: () => getDefaultReasoningEffort
23
+ });
24
+ module.exports = __toCommonJS(provider_options_exports);
25
+ var import_registry = require("../registry.js");
26
+ function buildProviderOptions(config) {
27
+ const { provider, model, reasoningEffort } = config;
28
+ const modelLower = model.toLowerCase();
29
+ if (provider === "anthropic") {
30
+ return {
31
+ anthropic: {
32
+ // Enable prompt caching - saves money and improves latency
33
+ cacheControl: { type: "ephemeral" },
34
+ // Stream reasoning/thinking content when model supports it
35
+ sendReasoning: true
36
+ }
37
+ };
38
+ }
39
+ if (provider === "bedrock" && modelLower.includes("claude")) {
40
+ return {
41
+ bedrock: {
42
+ cacheControl: { type: "ephemeral" },
43
+ sendReasoning: true
44
+ }
45
+ };
46
+ }
47
+ if (provider === "vertex" && modelLower.includes("claude")) {
48
+ return {
49
+ "vertex-anthropic": {
50
+ cacheControl: { type: "ephemeral" },
51
+ sendReasoning: true
52
+ }
53
+ };
54
+ }
55
+ if (provider === "google" || provider === "vertex" && !modelLower.includes("claude")) {
56
+ return {
57
+ google: {
58
+ thinkingConfig: {
59
+ // Include thoughts in the response for transparency
60
+ includeThoughts: true
61
+ }
62
+ }
63
+ };
64
+ }
65
+ if (provider === "openai") {
66
+ const effectiveEffort = reasoningEffort ?? getDefaultReasoningEffort(model);
67
+ if (effectiveEffort) {
68
+ return {
69
+ openai: {
70
+ reasoningEffort: effectiveEffort
71
+ }
72
+ };
73
+ }
74
+ }
75
+ return void 0;
76
+ }
77
+ function getDefaultReasoningEffort(model) {
78
+ if ((0, import_registry.isReasoningCapableModel)(model)) {
79
+ return "medium";
80
+ }
81
+ return void 0;
82
+ }
83
+ // Annotate the CommonJS export names for ESM import in node:
84
+ 0 && (module.exports = {
85
+ buildProviderOptions,
86
+ getDefaultReasoningEffort
87
+ });
@@ -0,0 +1,49 @@
1
+ /**
2
+ * Provider-specific options builder for Vercel AI SDK's streamText/generateText.
3
+ *
4
+ * Centralizes provider-specific configuration that requires explicit opt-in:
5
+ * - Anthropic: cacheControl for prompt caching, sendReasoning for extended thinking
6
+ * - Bedrock/Vertex Claude: Same as Anthropic (Claude models on these platforms)
7
+ * - Google: thinkingConfig for Gemini thinking models
8
+ * - OpenAI: reasoningEffort for o1/o3/codex/gpt-5 models
9
+ *
10
+ * Caching notes:
11
+ * - Anthropic: Requires explicit cacheControl option (we enable it)
12
+ * - OpenAI: Automatic for prompts ≥1024 tokens (no config needed)
13
+ * - Google: Implicit caching automatic for Gemini 2.5+ (≥1024 tokens for Flash,
14
+ * ≥2048 for Pro). Explicit caching requires pre-created cachedContent IDs.
15
+ * All providers return cached token counts in the response (cachedInputTokens).
16
+ */
17
+ import type { LLMProvider } from '../types.js';
18
+ export type ReasoningEffort = 'none' | 'minimal' | 'low' | 'medium' | 'high' | 'xhigh';
19
+ export interface ProviderOptionsConfig {
20
+ provider: LLMProvider;
21
+ model: string;
22
+ reasoningEffort?: ReasoningEffort | undefined;
23
+ }
24
+ /**
25
+ * Build provider-specific options for streamText/generateText.
26
+ *
27
+ * @param config Provider, model, and optional reasoning effort configuration
28
+ * @returns Provider options object or undefined if no special options needed
29
+ */
30
+ export declare function buildProviderOptions(config: ProviderOptionsConfig): Record<string, Record<string, unknown>> | undefined;
31
+ /**
32
+ * Determine the default reasoning effort for OpenAI models.
33
+ *
34
+ * OpenAI reasoning effort levels (from lowest to highest):
35
+ * - 'none': No reasoning, fastest responses
36
+ * - 'low': Minimal reasoning, fast responses
37
+ * - 'medium': Balanced reasoning (OpenAI's recommended daily driver)
38
+ * - 'high': Thorough reasoning for complex tasks
39
+ * - 'xhigh': Extra high reasoning for quality-critical, non-latency-sensitive tasks
40
+ *
41
+ * Default strategy:
42
+ * - Reasoning-capable models (codex, o1, o3, gpt-5): 'medium' - OpenAI's recommended default
43
+ * - Other models: undefined (no reasoning effort needed)
44
+ *
45
+ * @param model The model name
46
+ * @returns Reasoning effort level or undefined if not applicable
47
+ */
48
+ export declare function getDefaultReasoningEffort(model: string): Exclude<ReasoningEffort, 'none'> | undefined;
49
+ //# sourceMappingURL=provider-options.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"provider-options.d.ts","sourceRoot":"","sources":["../../../src/llm/executor/provider-options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AAEH,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAG/C,MAAM,MAAM,eAAe,GAAG,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;AAEvF,MAAM,WAAW,qBAAqB;IAClC,QAAQ,EAAE,WAAW,CAAC;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,eAAe,CAAC,EAAE,eAAe,GAAG,SAAS,CAAC;CACjD;AAED;;;;;GAKG;AACH,wBAAgB,oBAAoB,CAChC,MAAM,EAAE,qBAAqB,GAC9B,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,GAAG,SAAS,CAgErD;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,yBAAyB,CACrC,KAAK,EAAE,MAAM,GACd,OAAO,CAAC,eAAe,EAAE,MAAM,CAAC,GAAG,SAAS,CAS9C"}
@@ -0,0 +1,63 @@
1
+ import "../../chunk-PTJYTZNU.js";
2
+ import { isReasoningCapableModel } from "../registry.js";
3
+ function buildProviderOptions(config) {
4
+ const { provider, model, reasoningEffort } = config;
5
+ const modelLower = model.toLowerCase();
6
+ if (provider === "anthropic") {
7
+ return {
8
+ anthropic: {
9
+ // Enable prompt caching - saves money and improves latency
10
+ cacheControl: { type: "ephemeral" },
11
+ // Stream reasoning/thinking content when model supports it
12
+ sendReasoning: true
13
+ }
14
+ };
15
+ }
16
+ if (provider === "bedrock" && modelLower.includes("claude")) {
17
+ return {
18
+ bedrock: {
19
+ cacheControl: { type: "ephemeral" },
20
+ sendReasoning: true
21
+ }
22
+ };
23
+ }
24
+ if (provider === "vertex" && modelLower.includes("claude")) {
25
+ return {
26
+ "vertex-anthropic": {
27
+ cacheControl: { type: "ephemeral" },
28
+ sendReasoning: true
29
+ }
30
+ };
31
+ }
32
+ if (provider === "google" || provider === "vertex" && !modelLower.includes("claude")) {
33
+ return {
34
+ google: {
35
+ thinkingConfig: {
36
+ // Include thoughts in the response for transparency
37
+ includeThoughts: true
38
+ }
39
+ }
40
+ };
41
+ }
42
+ if (provider === "openai") {
43
+ const effectiveEffort = reasoningEffort ?? getDefaultReasoningEffort(model);
44
+ if (effectiveEffort) {
45
+ return {
46
+ openai: {
47
+ reasoningEffort: effectiveEffort
48
+ }
49
+ };
50
+ }
51
+ }
52
+ return void 0;
53
+ }
54
+ function getDefaultReasoningEffort(model) {
55
+ if (isReasoningCapableModel(model)) {
56
+ return "medium";
57
+ }
58
+ return void 0;
59
+ }
60
+ export {
61
+ buildProviderOptions,
62
+ getDefaultReasoningEffort
63
+ };
@@ -158,8 +158,10 @@ class StreamProcessor {
158
158
  }
159
159
  case "finish-step":
160
160
  if (event.usage) {
161
- const cacheWriteTokens = event.providerMetadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? // @ts-expect-error - Bedrock metadata typing not in Vercel SDK
162
- event.providerMetadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? 0;
161
+ const anthropicMeta = event.providerMetadata?.["anthropic"];
162
+ const bedrockMeta = event.providerMetadata?.["bedrock"];
163
+ const cacheWriteTokens = anthropicMeta?.["cacheCreationInputTokens"] ?? bedrockMeta?.usage?.["cacheWriteInputTokens"] ?? 0;
164
+ const cacheReadTokens = anthropicMeta?.["cacheReadInputTokens"] ?? bedrockMeta?.usage?.["cacheReadInputTokens"] ?? event.usage.cachedInputTokens ?? 0;
163
165
  this.actualTokens = {
164
166
  inputTokens: (this.actualTokens.inputTokens ?? 0) + (event.usage.inputTokens ?? 0),
165
167
  outputTokens: (this.actualTokens.outputTokens ?? 0) + (event.usage.outputTokens ?? 0),
@@ -168,16 +170,17 @@ class StreamProcessor {
168
170
  reasoningTokens: (this.actualTokens.reasoningTokens ?? 0) + event.usage.reasoningTokens
169
171
  },
170
172
  // Cache tokens
171
- cacheReadTokens: (this.actualTokens.cacheReadTokens ?? 0) + (event.usage.cachedInputTokens ?? 0),
173
+ cacheReadTokens: (this.actualTokens.cacheReadTokens ?? 0) + cacheReadTokens,
172
174
  cacheWriteTokens: (this.actualTokens.cacheWriteTokens ?? 0) + cacheWriteTokens
173
175
  };
174
176
  }
175
177
  break;
176
178
  case "finish": {
177
179
  this.finishReason = event.finishReason;
178
- const cachedInputTokens = event.totalUsage.cachedInputTokens ?? 0;
180
+ const cacheReadTokens = this.actualTokens.cacheReadTokens ?? event.totalUsage.cachedInputTokens ?? 0;
181
+ const cacheWriteTokens = this.actualTokens.cacheWriteTokens ?? 0;
179
182
  const providerExcludesCached = this.config.provider === "anthropic" || this.config.provider === "bedrock";
180
- const adjustedInputTokens = providerExcludesCached ? event.totalUsage.inputTokens ?? 0 : (event.totalUsage.inputTokens ?? 0) - cachedInputTokens;
183
+ const adjustedInputTokens = providerExcludesCached ? event.totalUsage.inputTokens ?? 0 : (event.totalUsage.inputTokens ?? 0) - cacheReadTokens;
181
184
  const usage = {
182
185
  inputTokens: adjustedInputTokens,
183
186
  outputTokens: event.totalUsage.outputTokens ?? 0,
@@ -186,9 +189,9 @@ class StreamProcessor {
186
189
  ...event.totalUsage.reasoningTokens !== void 0 && {
187
190
  reasoningTokens: event.totalUsage.reasoningTokens
188
191
  },
189
- // Cache tokens - read from totalUsage, write from accumulated finish-step events
190
- cacheReadTokens: cachedInputTokens,
191
- cacheWriteTokens: this.actualTokens.cacheWriteTokens ?? 0
192
+ // Cache tokens from accumulated finish-step events or totalUsage fallback
193
+ cacheReadTokens,
194
+ cacheWriteTokens
192
195
  };
193
196
  this.actualTokens = usage;
194
197
  this.logger.info("LLM response complete", {
@@ -1 +1 @@
1
- {"version":3,"file":"stream-processor.d.ts","sourceRoot":"","sources":["../../../src/llm/executor/stream-processor.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,OAAO,IAAI,aAAa,EAAE,MAAM,IAAI,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAC1D,OAAO,EAAE,eAAe,EAAmB,MAAM,uBAAuB,CAAC;AACzE,OAAO,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAE3D,OAAO,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAC;AAGnD,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAExD,OAAO,EAAE,WAAW,EAAc,MAAM,aAAa,CAAC;AAEtD,MAAM,WAAW,qBAAqB;IAClC,QAAQ,EAAE,WAAW,CAAC;IACtB,KAAK,EAAE,MAAM,CAAC;CACjB;AAED,qBAAa,eAAe;IAwBpB,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,eAAe;IACvB,OAAO,CAAC,WAAW;IACnB,OAAO,CAAC,MAAM;IAEd,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,gBAAgB,CAAC;IA9B7B,OAAO,CAAC,kBAAkB,CAAuB;IACjD,OAAO,CAAC,YAAY,CAAmE;IACvF,OAAO,CAAC,YAAY,CAA8B;IAClD,OAAO,CAAC,aAAa,CAAc;IACnC,OAAO,CAAC,eAAe,CAAc;IACrC,OAAO,CAAC,MAAM,CAAe;IAC7B;;;OAGG;IACH,OAAO,CAAC,gBAAgB,CAAgD;IAExE;;;;;;;;;OASG;gBAES,cAAc,EAAE,cAAc,EAC9B,QAAQ,EAAE,eAAe,EACzB,eAAe,EAAE,eAAe,EAChC,WAAW,EAAE,WAAW,EACxB,MAAM,EAAE,qBAAqB,EACrC,MAAM,EAAE,YAAY,EACZ,SAAS,GAAE,OAAc,EACzB,gBAAgB,CAAC,EAAE,GAAG,CAC1B,MAAM,EACN;QAAE,eAAe,EAAE,OAAO,CAAC;QAAC,cAAc,CAAC,EAAE,UAAU,GAAG,UAAU,CAAA;KAAE,CACzE,YAAA;IAKC,OAAO,CACT,QAAQ,EAAE,MAAM,gBAAgB,CAAC,aAAa,EAAE,OAAO,CAAC,GACzD,OAAO,CAAC,qBAAqB,CAAC;YAoZnB,sBAAsB;YAKtB,gBAAgB;IAO9B;;;;OAIG;YACW,2BAA2B;CAmC5C"}
1
+ {"version":3,"file":"stream-processor.d.ts","sourceRoot":"","sources":["../../../src/llm/executor/stream-processor.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,OAAO,IAAI,aAAa,EAAE,MAAM,IAAI,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAC1D,OAAO,EAAE,eAAe,EAAmB,MAAM,uBAAuB,CAAC;AACzE,OAAO,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAE3D,OAAO,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAC;AAGnD,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAExD,OAAO,EAAE,WAAW,EAAc,MAAM,aAAa,CAAC;AAEtD,MAAM,WAAW,qBAAqB;IAClC,QAAQ,EAAE,WAAW,CAAC;IACtB,KAAK,EAAE,MAAM,CAAC;CACjB;AAED,qBAAa,eAAe;IAwBpB,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,eAAe;IACvB,OAAO,CAAC,WAAW;IACnB,OAAO,CAAC,MAAM;IAEd,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,gBAAgB,CAAC;IA9B7B,OAAO,CAAC,kBAAkB,CAAuB;IACjD,OAAO,CAAC,YAAY,CAAmE;IACvF,OAAO,CAAC,YAAY,CAA8B;IAClD,OAAO,CAAC,aAAa,CAAc;IACnC,OAAO,CAAC,eAAe,CAAc;IACrC,OAAO,CAAC,MAAM,CAAe;IAC7B;;;OAGG;IACH,OAAO,CAAC,gBAAgB,CAAgD;IAExE;;;;;;;;;OASG;gBAES,cAAc,EAAE,cAAc,EAC9B,QAAQ,EAAE,eAAe,EACzB,eAAe,EAAE,eAAe,EAChC,WAAW,EAAE,WAAW,EACxB,MAAM,EAAE,qBAAqB,EACrC,MAAM,EAAE,YAAY,EACZ,SAAS,GAAE,OAAc,EACzB,gBAAgB,CAAC,EAAE,GAAG,CAC1B,MAAM,EACN;QAAE,eAAe,EAAE,OAAO,CAAC;QAAC,cAAc,CAAC,EAAE,UAAU,GAAG,UAAU,CAAA;KAAE,CACzE,YAAA;IAKC,OAAO,CACT,QAAQ,EAAE,MAAM,gBAAgB,CAAC,aAAa,EAAE,OAAO,CAAC,GACzD,OAAO,CAAC,qBAAqB,CAAC;YAmanB,sBAAsB;YAKtB,gBAAgB;IAO9B;;;;OAIG;YACW,2BAA2B;CAmC5C"}
@@ -136,8 +136,10 @@ class StreamProcessor {
136
136
  }
137
137
  case "finish-step":
138
138
  if (event.usage) {
139
- const cacheWriteTokens = event.providerMetadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? // @ts-expect-error - Bedrock metadata typing not in Vercel SDK
140
- event.providerMetadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? 0;
139
+ const anthropicMeta = event.providerMetadata?.["anthropic"];
140
+ const bedrockMeta = event.providerMetadata?.["bedrock"];
141
+ const cacheWriteTokens = anthropicMeta?.["cacheCreationInputTokens"] ?? bedrockMeta?.usage?.["cacheWriteInputTokens"] ?? 0;
142
+ const cacheReadTokens = anthropicMeta?.["cacheReadInputTokens"] ?? bedrockMeta?.usage?.["cacheReadInputTokens"] ?? event.usage.cachedInputTokens ?? 0;
141
143
  this.actualTokens = {
142
144
  inputTokens: (this.actualTokens.inputTokens ?? 0) + (event.usage.inputTokens ?? 0),
143
145
  outputTokens: (this.actualTokens.outputTokens ?? 0) + (event.usage.outputTokens ?? 0),
@@ -146,16 +148,17 @@ class StreamProcessor {
146
148
  reasoningTokens: (this.actualTokens.reasoningTokens ?? 0) + event.usage.reasoningTokens
147
149
  },
148
150
  // Cache tokens
149
- cacheReadTokens: (this.actualTokens.cacheReadTokens ?? 0) + (event.usage.cachedInputTokens ?? 0),
151
+ cacheReadTokens: (this.actualTokens.cacheReadTokens ?? 0) + cacheReadTokens,
150
152
  cacheWriteTokens: (this.actualTokens.cacheWriteTokens ?? 0) + cacheWriteTokens
151
153
  };
152
154
  }
153
155
  break;
154
156
  case "finish": {
155
157
  this.finishReason = event.finishReason;
156
- const cachedInputTokens = event.totalUsage.cachedInputTokens ?? 0;
158
+ const cacheReadTokens = this.actualTokens.cacheReadTokens ?? event.totalUsage.cachedInputTokens ?? 0;
159
+ const cacheWriteTokens = this.actualTokens.cacheWriteTokens ?? 0;
157
160
  const providerExcludesCached = this.config.provider === "anthropic" || this.config.provider === "bedrock";
158
- const adjustedInputTokens = providerExcludesCached ? event.totalUsage.inputTokens ?? 0 : (event.totalUsage.inputTokens ?? 0) - cachedInputTokens;
161
+ const adjustedInputTokens = providerExcludesCached ? event.totalUsage.inputTokens ?? 0 : (event.totalUsage.inputTokens ?? 0) - cacheReadTokens;
159
162
  const usage = {
160
163
  inputTokens: adjustedInputTokens,
161
164
  outputTokens: event.totalUsage.outputTokens ?? 0,
@@ -164,9 +167,9 @@ class StreamProcessor {
164
167
  ...event.totalUsage.reasoningTokens !== void 0 && {
165
168
  reasoningTokens: event.totalUsage.reasoningTokens
166
169
  },
167
- // Cache tokens - read from totalUsage, write from accumulated finish-step events
168
- cacheReadTokens: cachedInputTokens,
169
- cacheWriteTokens: this.actualTokens.cacheWriteTokens ?? 0
170
+ // Cache tokens from accumulated finish-step events or totalUsage fallback
171
+ cacheReadTokens,
172
+ cacheWriteTokens
170
173
  };
171
174
  this.actualTokens = usage;
172
175
  this.logger.info("LLM response complete", {
@@ -79,6 +79,7 @@ module.exports = __toCommonJS(turn_executor_exports);
79
79
  var import_ai = require("ai");
80
80
  var import_api = require("@opentelemetry/api");
81
81
  var import_stream_processor = require("./stream-processor.js");
82
+ var import_provider_options = require("./provider-options.js");
82
83
  var import_types4 = require("../../logger/v2/types.js");
83
84
  var import_defer = require("../../utils/defer.js");
84
85
  var import_DextoRuntimeError = require("../../errors/DextoRuntimeError.js");
@@ -203,6 +204,11 @@ class TurnExecutor {
203
204
  streaming,
204
205
  this.approvalMetadata
205
206
  );
207
+ const providerOptions = (0, import_provider_options.buildProviderOptions)({
208
+ provider: this.llmContext.provider,
209
+ model: this.llmContext.model,
210
+ reasoningEffort: this.config.reasoningEffort
211
+ });
206
212
  const result = await streamProcessor.process(
207
213
  () => (0, import_ai.streamText)({
208
214
  model: this.model,
@@ -216,6 +222,10 @@ class TurnExecutor {
216
222
  ...this.config.temperature !== void 0 && {
217
223
  temperature: this.config.temperature
218
224
  },
225
+ // Provider-specific options (caching, reasoning, etc.)
226
+ ...providerOptions !== void 0 && {
227
+ providerOptions
228
+ },
219
229
  // Log stream-level errors (tool errors, API errors during streaming)
220
230
  onError: (error) => {
221
231
  this.logger.error("Stream error", { error });
@@ -51,6 +51,7 @@ export declare class TurnExecutor {
51
51
  maxOutputTokens?: number | undefined;
52
52
  temperature?: number | undefined;
53
53
  baseURL?: string | undefined;
54
+ reasoningEffort?: 'none' | 'minimal' | 'low' | 'medium' | 'high' | 'xhigh' | undefined;
54
55
  }, llmContext: LLMContext, logger: IDextoLogger, messageQueue: MessageQueueService, modelLimits?: ModelLimits | undefined, externalSignal?: AbortSignal | undefined, compactionStrategy?: ICompactionStrategy | null);
55
56
  /**
56
57
  * Get StreamProcessor config from TurnExecutor state.
@@ -1 +1 @@
1
- {"version":3,"file":"turn-executor.d.ts","sourceRoot":"","sources":["../../../src/llm/executor/turn-executor.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,aAAa,EAMb,KAAK,YAAY,EAEpB,MAAM,IAAI,CAAC;AAEZ,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAE1D,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAG1D,OAAO,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAE5C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAE7D,OAAO,KAAK,EAAE,eAAe,EAAmB,MAAM,uBAAuB,CAAC;AAC9E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,yBAAyB,EAAE,MAAM,6BAA6B,CAAC;AACxE,OAAO,EAAE,UAAU,EAAoB,MAAM,aAAa,CAAC;AAC3D,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AAQ1E,OAAO,EAAc,KAAK,WAAW,EAAE,MAAM,sCAAsC,CAAC;AAEpF,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAc7E;;;;;;;;;;;GAWG;AACH,qBAAa,YAAY;IAkBjB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,WAAW;IACnB,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,eAAe;IACvB,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,MAAM;IAMd,OAAO,CAAC,UAAU;IAElB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,WAAW,CAAC;IACpB,OAAO,CAAC,cAAc,CAAC;IAjC3B,OAAO,CAAC,MAAM,CAAe;IAC7B;;;OAGG;IACH,OAAO,CAAC,mBAAmB,CAAkB;IAC7C,OAAO,CAAC,kBAAkB,CAAoC;IAC9D;;;OAGG;IACH,OAAO,CAAC,gBAAgB,CAGpB;gBAGQ,KAAK,EAAE,aAAa,EACpB,WAAW,EAAE,WAAW,EACxB,cAAc,EAAE,cAAc,CAAC,YAAY,CAAC,EAC5C,QAAQ,EAAE,eAAe,EACzB,eAAe,EAAE,eAAe,EAChC,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE;QACZ,QAAQ,EAAE,MAAM,CAAC;QACjB,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QACrC,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QACjC,OAAO,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;KAChC,EACO,UAAU,EAAE,UAAU,EAC9B,MAAM,EAAE,YAAY,EACZ,YAAY,EAAE,mBAAmB,EACjC,WAAW,CAAC,EAAE,WAAW,YAAA,EACzB,cAAc,CAAC,EAAE,WAAW,YAAA,EACpC,kBAAkB,CAAC,EAAE,mBAAmB,GAAG,IAAI;IAqBnD;;OAEG;IACH,OAAO,CAAC,wBAAwB;IAOhC;;;;;;OAMG;IACG,OAAO,CACT,kBAAkB,EAAE,yBAAyB,EAC7C,SAAS,GAAE,OAAc,GAC1B,OAAO,CAAC,cAAc,CAAC;IA0M1B;;;OAGG;IACH,KAAK,IAAI,IAAI;IAIb;;;OAGG;YACW,oBAAoB;IAmBlC;;;;;;OAMG;YACW,mBAAmB;IA0EjC;;;;;;;OAOG;YACW,WAAW;IA0GzB;;;;;OAKG;IACH,OAAO,CAAC,sBAAsB;IAuF9B;;OAEG;IACH,OAAO,CAAC,gBAAgB;IAyBxB;;OAEG;IACH,OAAO,CAAC,eAAe;IAavB;;OAEG;IACH,OAAO,CAAC,oBAAoB;IAS5B;;OAEG;IACH,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAAU;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAAU;IAE/C;;;;;;;;;;;OAWG;YACW,mBAAmB;IAkDjC;;;;;;OAMG;IACH,OAAO,CAAC,kBAAkB;IAgB1B;;;OAGG;IACH,OAAO,CAAC,OAAO;IAYf;;OAEG;IACH,OAAO,CAAC,sBAAsB;IAO9B;;;;;;;;OAQG;YACW,QAAQ;IA8CtB;;OAEG;IACH,OAAO,CAAC,sBAAsB;IAoB9B;;OAEG;IACH,OAAO,CAAC,gBAAgB;CA0D3B"}
1
+ {"version":3,"file":"turn-executor.d.ts","sourceRoot":"","sources":["../../../src/llm/executor/turn-executor.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,aAAa,EAMb,KAAK,YAAY,EAEpB,MAAM,IAAI,CAAC;AAEZ,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAE1D,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAG1D,OAAO,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAG5C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAE7D,OAAO,KAAK,EAAE,eAAe,EAAmB,MAAM,uBAAuB,CAAC;AAC9E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EAAE,yBAAyB,EAAE,MAAM,6BAA6B,CAAC;AACxE,OAAO,EAAE,UAAU,EAAoB,MAAM,aAAa,CAAC;AAC3D,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AAQ1E,OAAO,EAAc,KAAK,WAAW,EAAE,MAAM,sCAAsC,CAAC;AAEpF,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAc7E;;;;;;;;;;;GAWG;AACH,qBAAa,YAAY;IAkBjB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,WAAW;IACnB,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,eAAe;IACvB,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,MAAM;IAQd,OAAO,CAAC,UAAU;IAElB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,WAAW,CAAC;IACpB,OAAO,CAAC,cAAc,CAAC;IAnC3B,OAAO,CAAC,MAAM,CAAe;IAC7B;;;OAGG;IACH,OAAO,CAAC,mBAAmB,CAAkB;IAC7C,OAAO,CAAC,kBAAkB,CAAoC;IAC9D;;;OAGG;IACH,OAAO,CAAC,gBAAgB,CAGpB;gBAGQ,KAAK,EAAE,aAAa,EACpB,WAAW,EAAE,WAAW,EACxB,cAAc,EAAE,cAAc,CAAC,YAAY,CAAC,EAC5C,QAAQ,EAAE,eAAe,EACzB,eAAe,EAAE,eAAe,EAChC,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE;QACZ,QAAQ,EAAE,MAAM,CAAC;QACjB,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QACrC,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QACjC,OAAO,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QAE7B,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,SAAS,CAAC;KAC1F,EACO,UAAU,EAAE,UAAU,EAC9B,MAAM,EAAE,YAAY,EACZ,YAAY,EAAE,mBAAmB,EACjC,WAAW,CAAC,EAAE,WAAW,YAAA,EACzB,cAAc,CAAC,EAAE,WAAW,YAAA,EACpC,kBAAkB,CAAC,EAAE,mBAAmB,GAAG,IAAI;IAqBnD;;OAEG;IACH,OAAO,CAAC,wBAAwB;IAOhC;;;;;;OAMG;IACG,OAAO,CACT,kBAAkB,EAAE,yBAAyB,EAC7C,SAAS,GAAE,OAAc,GAC1B,OAAO,CAAC,cAAc,CAAC;IAsN1B;;;OAGG;IACH,KAAK,IAAI,IAAI;IAIb;;;OAGG;YACW,oBAAoB;IAmBlC;;;;;;OAMG;YACW,mBAAmB;IA0EjC;;;;;;;OAOG;YACW,WAAW;IA0GzB;;;;;OAKG;IACH,OAAO,CAAC,sBAAsB;IAuF9B;;OAEG;IACH,OAAO,CAAC,gBAAgB;IAyBxB;;OAEG;IACH,OAAO,CAAC,eAAe;IAavB;;OAEG;IACH,OAAO,CAAC,oBAAoB;IAS5B;;OAEG;IACH,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAAU;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAAU;IAE/C;;;;;;;;;;;OAWG;YACW,mBAAmB;IAkDjC;;;;;;OAMG;IACH,OAAO,CAAC,kBAAkB;IAgB1B;;;OAGG;IACH,OAAO,CAAC,OAAO;IAYf;;OAEG;IACH,OAAO,CAAC,sBAAsB;IAO9B;;;;;;;;OAQG;YACW,QAAQ;IA8CtB;;OAEG;IACH,OAAO,CAAC,sBAAsB;IAoB9B;;OAEG;IACH,OAAO,CAAC,gBAAgB;CA0D3B"}
@@ -11,6 +11,7 @@ import {
11
11
  } from "ai";
12
12
  import { trace } from "@opentelemetry/api";
13
13
  import { StreamProcessor } from "./stream-processor.js";
14
+ import { buildProviderOptions } from "./provider-options.js";
14
15
  import { DextoLogComponent } from "../../logger/v2/types.js";
15
16
  import { defer } from "../../utils/defer.js";
16
17
  import { DextoRuntimeError } from "../../errors/DextoRuntimeError.js";
@@ -135,6 +136,11 @@ class TurnExecutor {
135
136
  streaming,
136
137
  this.approvalMetadata
137
138
  );
139
+ const providerOptions = buildProviderOptions({
140
+ provider: this.llmContext.provider,
141
+ model: this.llmContext.model,
142
+ reasoningEffort: this.config.reasoningEffort
143
+ });
138
144
  const result = await streamProcessor.process(
139
145
  () => streamText({
140
146
  model: this.model,
@@ -148,6 +154,10 @@ class TurnExecutor {
148
154
  ...this.config.temperature !== void 0 && {
149
155
  temperature: this.config.temperature
150
156
  },
157
+ // Provider-specific options (caching, reasoning, etc.)
158
+ ...providerOptions !== void 0 && {
159
+ providerOptions
160
+ },
151
161
  // Log stream-level errors (tool errors, API errors during streaming)
152
162
  onError: (error) => {
153
163
  this.logger.error("Stream error", { error });
@@ -59,9 +59,17 @@ class VercelMessageFormatter {
59
59
  filteredHistory = [...history];
60
60
  }
61
61
  if (systemPrompt) {
62
+ const modelLower = context.model.toLowerCase();
63
+ const isClaudeModel = modelLower.includes("claude");
64
+ const isAnthropicProvider = context.provider === "anthropic" || context.provider === "bedrock" && isClaudeModel || context.provider === "vertex" && isClaudeModel;
62
65
  formatted.push({
63
66
  role: "system",
64
- content: systemPrompt
67
+ content: systemPrompt,
68
+ ...isAnthropicProvider && {
69
+ providerOptions: {
70
+ anthropic: { cacheControl: { type: "ephemeral" } }
71
+ }
72
+ }
65
73
  });
66
74
  }
67
75
  const pendingToolCalls = /* @__PURE__ */ new Map();
@@ -1 +1 @@
1
- {"version":3,"file":"vercel.d.ts","sourceRoot":"","sources":["../../../src/llm/formatters/vercel.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAiD,MAAM,IAAI,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,KAAK,EAAE,eAAe,EAAiC,MAAM,wBAAwB,CAAC;AAE7F,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAmB7D;;;;;;;;;;GAUG;AACH,qBAAa,sBAAsB;IAC/B,OAAO,CAAC,MAAM,CAAe;gBAEjB,MAAM,EAAE,YAAY;IAGhC;;;;;;OAMG;IACH,MAAM,CACF,OAAO,EAAE,QAAQ,CAAC,eAAe,EAAE,CAAC,EACpC,OAAO,EAAE,UAAU,EACnB,YAAY,EAAE,MAAM,GAAG,IAAI,GAC5B,YAAY,EAAE;IAwIjB;;;;;;OAMG;IACH,kBAAkB,IAAI,IAAI;IAK1B,OAAO,CAAC,sBAAsB;IA8E9B,OAAO,CAAC,iBAAiB;CAoE5B"}
1
+ {"version":3,"file":"vercel.d.ts","sourceRoot":"","sources":["../../../src/llm/formatters/vercel.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAiD,MAAM,IAAI,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,KAAK,EAAE,eAAe,EAAiC,MAAM,wBAAwB,CAAC;AAE7F,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAmB7D;;;;;;;;;;GAUG;AACH,qBAAa,sBAAsB;IAC/B,OAAO,CAAC,MAAM,CAAe;gBAEjB,MAAM,EAAE,YAAY;IAGhC;;;;;;OAMG;IACH,MAAM,CACF,OAAO,EAAE,QAAQ,CAAC,eAAe,EAAE,CAAC,EACpC,OAAO,EAAE,UAAU,EACnB,YAAY,EAAE,MAAM,GAAG,IAAI,GAC5B,YAAY,EAAE;IAsJjB;;;;;;OAMG;IACH,kBAAkB,IAAI,IAAI;IAK1B,OAAO,CAAC,sBAAsB;IA8E9B,OAAO,CAAC,iBAAiB;CAoE5B"}
@@ -37,9 +37,17 @@ class VercelMessageFormatter {
37
37
  filteredHistory = [...history];
38
38
  }
39
39
  if (systemPrompt) {
40
+ const modelLower = context.model.toLowerCase();
41
+ const isClaudeModel = modelLower.includes("claude");
42
+ const isAnthropicProvider = context.provider === "anthropic" || context.provider === "bedrock" && isClaudeModel || context.provider === "vertex" && isClaudeModel;
40
43
  formatted.push({
41
44
  role: "system",
42
- content: systemPrompt
45
+ content: systemPrompt,
46
+ ...isAnthropicProvider && {
47
+ providerOptions: {
48
+ anthropic: { cacheControl: { type: "ephemeral" } }
49
+ }
50
+ }
43
51
  });
44
52
  }
45
53
  const pendingToolCalls = /* @__PURE__ */ new Map();
@@ -34,6 +34,7 @@ __export(registry_exports, {
34
34
  getSupportedFileTypesForModel: () => getSupportedFileTypesForModel,
35
35
  getSupportedModels: () => getSupportedModels,
36
36
  getSupportedProviders: () => getSupportedProviders,
37
+ isReasoningCapableModel: () => isReasoningCapableModel,
37
38
  isValidProviderModel: () => isValidProviderModel,
38
39
  modelSupportsFileType: () => modelSupportsFileType,
39
40
  requiresApiKey: () => requiresApiKey,
@@ -74,6 +75,60 @@ const DEFAULT_MAX_INPUT_TOKENS = 128e3;
74
75
  const LLM_REGISTRY = {
75
76
  openai: {
76
77
  models: [
78
+ // GPT-5.2 series (latest, released Dec 2025)
79
+ {
80
+ name: "gpt-5.2-chat-latest",
81
+ displayName: "GPT-5.2 Instant",
82
+ maxInputTokens: 4e5,
83
+ supportedFileTypes: ["pdf", "image"],
84
+ pricing: {
85
+ inputPerM: 1.75,
86
+ outputPerM: 14,
87
+ cacheReadPerM: 0.175,
88
+ currency: "USD",
89
+ unit: "per_million_tokens"
90
+ }
91
+ },
92
+ {
93
+ name: "gpt-5.2",
94
+ displayName: "GPT-5.2 Thinking",
95
+ maxInputTokens: 4e5,
96
+ supportedFileTypes: ["pdf", "image"],
97
+ pricing: {
98
+ inputPerM: 1.75,
99
+ outputPerM: 14,
100
+ cacheReadPerM: 0.175,
101
+ currency: "USD",
102
+ unit: "per_million_tokens"
103
+ }
104
+ },
105
+ {
106
+ name: "gpt-5.2-pro",
107
+ displayName: "GPT-5.2 Pro",
108
+ maxInputTokens: 4e5,
109
+ supportedFileTypes: ["pdf", "image"],
110
+ pricing: {
111
+ inputPerM: 21,
112
+ outputPerM: 168,
113
+ cacheReadPerM: 2.1,
114
+ currency: "USD",
115
+ unit: "per_million_tokens"
116
+ }
117
+ },
118
+ {
119
+ name: "gpt-5.2-codex",
120
+ displayName: "GPT-5.2 Codex",
121
+ maxInputTokens: 4e5,
122
+ supportedFileTypes: ["pdf", "image"],
123
+ pricing: {
124
+ inputPerM: 1.75,
125
+ outputPerM: 14,
126
+ cacheReadPerM: 0.175,
127
+ currency: "USD",
128
+ unit: "per_million_tokens"
129
+ }
130
+ },
131
+ // GPT-5.1 series
77
132
  {
78
133
  name: "gpt-5.1-chat-latest",
79
134
  displayName: "GPT-5.1 Instant",
@@ -1525,6 +1580,19 @@ function getModelDisplayName(model, provider) {
1525
1580
  const modelInfo = providerInfo.models.find((m) => m.name.toLowerCase() === normalizedModel);
1526
1581
  return modelInfo?.displayName ?? model;
1527
1582
  }
1583
+ function isReasoningCapableModel(model, provider) {
1584
+ const modelLower = model.toLowerCase();
1585
+ if (modelLower.includes("codex")) {
1586
+ return true;
1587
+ }
1588
+ if (modelLower.startsWith("o1") || modelLower.startsWith("o3") || modelLower.startsWith("o4")) {
1589
+ return true;
1590
+ }
1591
+ if (modelLower.includes("gpt-5") || modelLower.includes("gpt-5.1") || modelLower.includes("gpt-5.2")) {
1592
+ return true;
1593
+ }
1594
+ return false;
1595
+ }
1528
1596
  function calculateCost(usage, pricing) {
1529
1597
  const inputCost = (usage.inputTokens ?? 0) * pricing.inputPerM / 1e6;
1530
1598
  const outputCost = (usage.outputTokens ?? 0) * pricing.outputPerM / 1e6;
@@ -1551,6 +1619,7 @@ function calculateCost(usage, pricing) {
1551
1619
  getSupportedFileTypesForModel,
1552
1620
  getSupportedModels,
1553
1621
  getSupportedProviders,
1622
+ isReasoningCapableModel,
1554
1623
  isValidProviderModel,
1555
1624
  modelSupportsFileType,
1556
1625
  requiresApiKey,
@@ -205,6 +205,15 @@ export declare function getModelPricing(provider: LLMProvider, model: string): M
205
205
  * Gets the display name for a model, falling back to the model ID if not found.
206
206
  */
207
207
  export declare function getModelDisplayName(model: string, provider?: LLMProvider): string;
208
+ /**
209
+ * Checks if a model supports configurable reasoning effort.
210
+ * Currently only OpenAI reasoning models (o1, o3, codex, gpt-5.x) support this.
211
+ *
212
+ * @param model The model name to check.
213
+ * @param provider Optional provider for context (defaults to detecting from model name).
214
+ * @returns True if the model supports reasoning effort configuration.
215
+ */
216
+ export declare function isReasoningCapableModel(model: string, provider?: LLMProvider): boolean;
208
217
  /**
209
218
  * Calculates the cost for a given token usage based on model pricing.
210
219
  *
@@ -1 +1 @@
1
- {"version":3,"file":"registry.d.ts","sourceRoot":"","sources":["../../src/llm/registry.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAIzC,OAAO,EAEH,KAAK,WAAW,EAChB,KAAK,iBAAiB,EACtB,KAAK,UAAU,EAClB,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAG1D;;;GAGG;AACH,MAAM,WAAW,YAAY;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,KAAK,CAAC;IACjB,IAAI,CAAC,EAAE,oBAAoB,CAAC;CAC/B;AAED,MAAM,WAAW,SAAS;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB,OAAO,CAAC,EAAE,YAAY,CAAC;CAE1B;AAOD,eAAO,MAAM,sBAAsB,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,CAiBpE,CAAC;AAGF,wBAAgB,mBAAmB,IAAI,MAAM,EAAE,CAE9C;AAED,MAAM,WAAW,YAAY;IACzB,MAAM,EAAE,SAAS,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,GAAG,UAAU,GAAG,UAAU,CAAC;IACjD,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAElC;AAED,sEAAsE;AACtE,eAAO,MAAM,wBAAwB,SAAS,CAAC;AAI/C;;;;;;;;GAQG;AACH,eAAO,MAAM,YAAY,EAAE,MAAM,CAAC,WAAW,EAAE,YAAY,CA2pC1D,CAAC;AAEF;;;;;GAKG;AACH,wBAAgB,wBAAwB,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAQ9D;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,GAAG,IAAI,CAG/E;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,IAAI,WAAW,EAAE,CAErD;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,EAAE,CAGlE;AAED;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CACrC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,MAAM,CAAC,EAAE,YAAY,GACtB,MAAM,CAeR;AAED;;;;;;GAMG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAIlF;AAED;;;;;;;GAOG;AACH,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,WAAW,CAS/D;AAED;;GAEG;AACH,wBAAgB,qBAAqB,IAAI,MAAM,EAAE,CAEhD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;;GAKG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAGnE;AAmBD;;;;;;;;;GASG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAE7D;AAED;;;;;;GAMG;AACH,wBAAgB,6BAA6B,CACzC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,GACd,iBAAiB,EAAE,CAkBrB;AAED;;;;;;GAMG;AACH,wBAAgB,qBAAqB,CACjC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,iBAAiB,GAC5B,OAAO,CAGT;AAED;;;;;;GAMG;AACH,wBAAgB,wBAAwB,CACpC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,GACjB;IACC,WAAW,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,iBAAiB,CAAC;IAC7B,KAAK,CAAC,EAAE,MAAM,CAAC;CAClB,CAkCA;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,0BAA0B,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAqH1F;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,YAAY,GAAG,SAAS,CAW9F;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,WAAW,GAAG,MAAM,CAkBjF;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,YAAY,GAAG,MAAM,CAU9E"}
1
+ {"version":3,"file":"registry.d.ts","sourceRoot":"","sources":["../../src/llm/registry.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAIzC,OAAO,EAEH,KAAK,WAAW,EAChB,KAAK,iBAAiB,EACtB,KAAK,UAAU,EAClB,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAG1D;;;GAGG;AACH,MAAM,WAAW,YAAY;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,KAAK,CAAC;IACjB,IAAI,CAAC,EAAE,oBAAoB,CAAC;CAC/B;AAED,MAAM,WAAW,SAAS;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB,OAAO,CAAC,EAAE,YAAY,CAAC;CAE1B;AAOD,eAAO,MAAM,sBAAsB,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,CAiBpE,CAAC;AAGF,wBAAgB,mBAAmB,IAAI,MAAM,EAAE,CAE9C;AAED,MAAM,WAAW,YAAY;IACzB,MAAM,EAAE,SAAS,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,GAAG,UAAU,GAAG,UAAU,CAAC;IACjD,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAElC;AAED,sEAAsE;AACtE,eAAO,MAAM,wBAAwB,SAAS,CAAC;AAI/C;;;;;;;;GAQG;AACH,eAAO,MAAM,YAAY,EAAE,MAAM,CAAC,WAAW,EAAE,YAAY,CAitC1D,CAAC;AAEF;;;;;GAKG;AACH,wBAAgB,wBAAwB,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAQ9D;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,GAAG,IAAI,CAG/E;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,IAAI,WAAW,EAAE,CAErD;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,EAAE,CAGlE;AAED;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CACrC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,MAAM,CAAC,EAAE,YAAY,GACtB,MAAM,CAeR;AAED;;;;;;GAMG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAIlF;AAED;;;;;;;GAOG;AACH,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,WAAW,CAS/D;AAED;;GAEG;AACH,wBAAgB,qBAAqB,IAAI,MAAM,EAAE,CAEhD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;;GAKG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAGnE;AAmBD;;;;;;;;;GASG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAE7D;AAED;;;;;;GAMG;AACH,wBAAgB,6BAA6B,CACzC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,GACd,iBAAiB,EAAE,CAkBrB;AAED;;;;;;GAMG;AACH,wBAAgB,qBAAqB,CACjC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,iBAAiB,GAC5B,OAAO,CAGT;AAED;;;;;;GAMG;AACH,wBAAgB,wBAAwB,CACpC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,GACjB;IACC,WAAW,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,iBAAiB,CAAC;IAC7B,KAAK,CAAC,EAAE,MAAM,CAAC;CAClB,CAkCA;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,0BAA0B,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAqH1F;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,YAAY,GAAG,SAAS,CAW9F;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,WAAW,GAAG,MAAM,CAkBjF;AAED;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,WAAW,GAAG,OAAO,CAuBtF;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,YAAY,GAAG,MAAM,CAU9E"}
@@ -31,6 +31,60 @@ const DEFAULT_MAX_INPUT_TOKENS = 128e3;
31
31
  const LLM_REGISTRY = {
32
32
  openai: {
33
33
  models: [
34
+ // GPT-5.2 series (latest, released Dec 2025)
35
+ {
36
+ name: "gpt-5.2-chat-latest",
37
+ displayName: "GPT-5.2 Instant",
38
+ maxInputTokens: 4e5,
39
+ supportedFileTypes: ["pdf", "image"],
40
+ pricing: {
41
+ inputPerM: 1.75,
42
+ outputPerM: 14,
43
+ cacheReadPerM: 0.175,
44
+ currency: "USD",
45
+ unit: "per_million_tokens"
46
+ }
47
+ },
48
+ {
49
+ name: "gpt-5.2",
50
+ displayName: "GPT-5.2 Thinking",
51
+ maxInputTokens: 4e5,
52
+ supportedFileTypes: ["pdf", "image"],
53
+ pricing: {
54
+ inputPerM: 1.75,
55
+ outputPerM: 14,
56
+ cacheReadPerM: 0.175,
57
+ currency: "USD",
58
+ unit: "per_million_tokens"
59
+ }
60
+ },
61
+ {
62
+ name: "gpt-5.2-pro",
63
+ displayName: "GPT-5.2 Pro",
64
+ maxInputTokens: 4e5,
65
+ supportedFileTypes: ["pdf", "image"],
66
+ pricing: {
67
+ inputPerM: 21,
68
+ outputPerM: 168,
69
+ cacheReadPerM: 2.1,
70
+ currency: "USD",
71
+ unit: "per_million_tokens"
72
+ }
73
+ },
74
+ {
75
+ name: "gpt-5.2-codex",
76
+ displayName: "GPT-5.2 Codex",
77
+ maxInputTokens: 4e5,
78
+ supportedFileTypes: ["pdf", "image"],
79
+ pricing: {
80
+ inputPerM: 1.75,
81
+ outputPerM: 14,
82
+ cacheReadPerM: 0.175,
83
+ currency: "USD",
84
+ unit: "per_million_tokens"
85
+ }
86
+ },
87
+ // GPT-5.1 series
34
88
  {
35
89
  name: "gpt-5.1-chat-latest",
36
90
  displayName: "GPT-5.1 Instant",
@@ -1482,6 +1536,19 @@ function getModelDisplayName(model, provider) {
1482
1536
  const modelInfo = providerInfo.models.find((m) => m.name.toLowerCase() === normalizedModel);
1483
1537
  return modelInfo?.displayName ?? model;
1484
1538
  }
1539
+ function isReasoningCapableModel(model, provider) {
1540
+ const modelLower = model.toLowerCase();
1541
+ if (modelLower.includes("codex")) {
1542
+ return true;
1543
+ }
1544
+ if (modelLower.startsWith("o1") || modelLower.startsWith("o3") || modelLower.startsWith("o4")) {
1545
+ return true;
1546
+ }
1547
+ if (modelLower.includes("gpt-5") || modelLower.includes("gpt-5.1") || modelLower.includes("gpt-5.2")) {
1548
+ return true;
1549
+ }
1550
+ return false;
1551
+ }
1485
1552
  function calculateCost(usage, pricing) {
1486
1553
  const inputCost = (usage.inputTokens ?? 0) * pricing.inputPerM / 1e6;
1487
1554
  const outputCost = (usage.outputTokens ?? 0) * pricing.outputPerM / 1e6;
@@ -1507,6 +1574,7 @@ export {
1507
1574
  getSupportedFileTypesForModel,
1508
1575
  getSupportedModels,
1509
1576
  getSupportedProviders,
1577
+ isReasoningCapableModel,
1510
1578
  isValidProviderModel,
1511
1579
  modelSupportsFileType,
1512
1580
  requiresApiKey,