@dexto/core 1.5.0 → 1.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/agent/schemas.d.ts +60 -12
  2. package/dist/agent/schemas.d.ts.map +1 -1
  3. package/dist/context/manager.cjs +4 -0
  4. package/dist/context/manager.d.ts.map +1 -1
  5. package/dist/context/manager.js +4 -0
  6. package/dist/context/types.d.ts +5 -0
  7. package/dist/context/types.d.ts.map +1 -1
  8. package/dist/events/index.cjs +4 -1
  9. package/dist/events/index.d.ts +20 -4
  10. package/dist/events/index.d.ts.map +1 -1
  11. package/dist/events/index.js +3 -1
  12. package/dist/llm/executor/provider-options.cjs +87 -0
  13. package/dist/llm/executor/provider-options.d.ts +49 -0
  14. package/dist/llm/executor/provider-options.d.ts.map +1 -0
  15. package/dist/llm/executor/provider-options.js +63 -0
  16. package/dist/llm/executor/stream-processor.cjs +11 -8
  17. package/dist/llm/executor/stream-processor.d.ts.map +1 -1
  18. package/dist/llm/executor/stream-processor.js +11 -8
  19. package/dist/llm/executor/turn-executor.cjs +12 -1
  20. package/dist/llm/executor/turn-executor.d.ts +2 -1
  21. package/dist/llm/executor/turn-executor.d.ts.map +1 -1
  22. package/dist/llm/executor/turn-executor.js +12 -1
  23. package/dist/llm/formatters/vercel.cjs +9 -1
  24. package/dist/llm/formatters/vercel.d.ts.map +1 -1
  25. package/dist/llm/formatters/vercel.js +9 -1
  26. package/dist/llm/registry.cjs +69 -0
  27. package/dist/llm/registry.d.ts +9 -0
  28. package/dist/llm/registry.d.ts.map +1 -1
  29. package/dist/llm/registry.js +68 -0
  30. package/dist/llm/schemas.cjs +18 -2
  31. package/dist/llm/schemas.d.ts +34 -11
  32. package/dist/llm/schemas.d.ts.map +1 -1
  33. package/dist/llm/schemas.js +18 -2
  34. package/dist/llm/services/vercel.cjs +3 -1
  35. package/dist/llm/services/vercel.d.ts.map +1 -1
  36. package/dist/llm/services/vercel.js +3 -1
  37. package/dist/logger/logger.cjs +7 -3
  38. package/dist/logger/logger.d.ts.map +1 -1
  39. package/dist/logger/logger.js +7 -3
  40. package/dist/memory/schemas.d.ts +2 -2
  41. package/dist/providers/discovery.cjs +14 -0
  42. package/dist/providers/discovery.d.ts +4 -2
  43. package/dist/providers/discovery.d.ts.map +1 -1
  44. package/dist/providers/discovery.js +14 -0
  45. package/dist/session/history/database.cjs +49 -15
  46. package/dist/session/history/database.d.ts.map +1 -1
  47. package/dist/session/history/database.js +49 -15
  48. package/dist/session/session-manager.cjs +2 -1
  49. package/dist/session/session-manager.d.ts.map +1 -1
  50. package/dist/session/session-manager.js +2 -1
  51. package/dist/storage/database/postgres-store.cjs +174 -78
  52. package/dist/storage/database/postgres-store.d.ts +19 -0
  53. package/dist/storage/database/postgres-store.d.ts.map +1 -1
  54. package/dist/storage/database/postgres-store.js +174 -78
  55. package/dist/storage/database/schemas.cjs +4 -1
  56. package/dist/storage/database/schemas.d.ts +8 -0
  57. package/dist/storage/database/schemas.d.ts.map +1 -1
  58. package/dist/storage/database/schemas.js +4 -1
  59. package/dist/storage/schemas.d.ts +7 -0
  60. package/dist/storage/schemas.d.ts.map +1 -1
  61. package/dist/tools/custom-tool-registry.d.ts +9 -3
  62. package/dist/tools/custom-tool-registry.d.ts.map +1 -1
  63. package/dist/tools/internal-tools/provider.cjs +5 -2
  64. package/dist/tools/internal-tools/provider.d.ts.map +1 -1
  65. package/dist/tools/internal-tools/provider.js +5 -2
  66. package/dist/tools/tool-manager.cjs +1 -1
  67. package/dist/tools/tool-manager.d.ts.map +1 -1
  68. package/dist/tools/tool-manager.js +1 -1
  69. package/dist/utils/safe-stringify.cjs +10 -6
  70. package/dist/utils/safe-stringify.d.ts +5 -2
  71. package/dist/utils/safe-stringify.d.ts.map +1 -1
  72. package/dist/utils/safe-stringify.js +10 -6
  73. package/package.json +1 -1
@@ -34,6 +34,7 @@ __export(registry_exports, {
34
34
  getSupportedFileTypesForModel: () => getSupportedFileTypesForModel,
35
35
  getSupportedModels: () => getSupportedModels,
36
36
  getSupportedProviders: () => getSupportedProviders,
37
+ isReasoningCapableModel: () => isReasoningCapableModel,
37
38
  isValidProviderModel: () => isValidProviderModel,
38
39
  modelSupportsFileType: () => modelSupportsFileType,
39
40
  requiresApiKey: () => requiresApiKey,
@@ -74,6 +75,60 @@ const DEFAULT_MAX_INPUT_TOKENS = 128e3;
74
75
  const LLM_REGISTRY = {
75
76
  openai: {
76
77
  models: [
78
+ // GPT-5.2 series (latest, released Dec 2025)
79
+ {
80
+ name: "gpt-5.2-chat-latest",
81
+ displayName: "GPT-5.2 Instant",
82
+ maxInputTokens: 4e5,
83
+ supportedFileTypes: ["pdf", "image"],
84
+ pricing: {
85
+ inputPerM: 1.75,
86
+ outputPerM: 14,
87
+ cacheReadPerM: 0.175,
88
+ currency: "USD",
89
+ unit: "per_million_tokens"
90
+ }
91
+ },
92
+ {
93
+ name: "gpt-5.2",
94
+ displayName: "GPT-5.2 Thinking",
95
+ maxInputTokens: 4e5,
96
+ supportedFileTypes: ["pdf", "image"],
97
+ pricing: {
98
+ inputPerM: 1.75,
99
+ outputPerM: 14,
100
+ cacheReadPerM: 0.175,
101
+ currency: "USD",
102
+ unit: "per_million_tokens"
103
+ }
104
+ },
105
+ {
106
+ name: "gpt-5.2-pro",
107
+ displayName: "GPT-5.2 Pro",
108
+ maxInputTokens: 4e5,
109
+ supportedFileTypes: ["pdf", "image"],
110
+ pricing: {
111
+ inputPerM: 21,
112
+ outputPerM: 168,
113
+ cacheReadPerM: 2.1,
114
+ currency: "USD",
115
+ unit: "per_million_tokens"
116
+ }
117
+ },
118
+ {
119
+ name: "gpt-5.2-codex",
120
+ displayName: "GPT-5.2 Codex",
121
+ maxInputTokens: 4e5,
122
+ supportedFileTypes: ["pdf", "image"],
123
+ pricing: {
124
+ inputPerM: 1.75,
125
+ outputPerM: 14,
126
+ cacheReadPerM: 0.175,
127
+ currency: "USD",
128
+ unit: "per_million_tokens"
129
+ }
130
+ },
131
+ // GPT-5.1 series
77
132
  {
78
133
  name: "gpt-5.1-chat-latest",
79
134
  displayName: "GPT-5.1 Instant",
@@ -1525,6 +1580,19 @@ function getModelDisplayName(model, provider) {
1525
1580
  const modelInfo = providerInfo.models.find((m) => m.name.toLowerCase() === normalizedModel);
1526
1581
  return modelInfo?.displayName ?? model;
1527
1582
  }
1583
+ function isReasoningCapableModel(model, _provider) {
1584
+ const modelLower = model.toLowerCase();
1585
+ if (modelLower.includes("codex")) {
1586
+ return true;
1587
+ }
1588
+ if (modelLower.startsWith("o1") || modelLower.startsWith("o3") || modelLower.startsWith("o4")) {
1589
+ return true;
1590
+ }
1591
+ if (modelLower.includes("gpt-5") || modelLower.includes("gpt-5.1") || modelLower.includes("gpt-5.2")) {
1592
+ return true;
1593
+ }
1594
+ return false;
1595
+ }
1528
1596
  function calculateCost(usage, pricing) {
1529
1597
  const inputCost = (usage.inputTokens ?? 0) * pricing.inputPerM / 1e6;
1530
1598
  const outputCost = (usage.outputTokens ?? 0) * pricing.outputPerM / 1e6;
@@ -1551,6 +1619,7 @@ function calculateCost(usage, pricing) {
1551
1619
  getSupportedFileTypesForModel,
1552
1620
  getSupportedModels,
1553
1621
  getSupportedProviders,
1622
+ isReasoningCapableModel,
1554
1623
  isValidProviderModel,
1555
1624
  modelSupportsFileType,
1556
1625
  requiresApiKey,
@@ -205,6 +205,15 @@ export declare function getModelPricing(provider: LLMProvider, model: string): M
205
205
  * Gets the display name for a model, falling back to the model ID if not found.
206
206
  */
207
207
  export declare function getModelDisplayName(model: string, provider?: LLMProvider): string;
208
+ /**
209
+ * Checks if a model supports configurable reasoning effort.
210
+ * Currently only OpenAI reasoning models (o1, o3, codex, gpt-5.x) support this.
211
+ *
212
+ * @param model The model name to check.
213
+ * @param provider Optional provider for context (defaults to detecting from model name).
214
+ * @returns True if the model supports reasoning effort configuration.
215
+ */
216
+ export declare function isReasoningCapableModel(model: string, _provider?: LLMProvider): boolean;
208
217
  /**
209
218
  * Calculates the cost for a given token usage based on model pricing.
210
219
  *
@@ -1 +1 @@
1
- {"version":3,"file":"registry.d.ts","sourceRoot":"","sources":["../../src/llm/registry.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAIzC,OAAO,EAEH,KAAK,WAAW,EAChB,KAAK,iBAAiB,EACtB,KAAK,UAAU,EAClB,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAG1D;;;GAGG;AACH,MAAM,WAAW,YAAY;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,KAAK,CAAC;IACjB,IAAI,CAAC,EAAE,oBAAoB,CAAC;CAC/B;AAED,MAAM,WAAW,SAAS;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB,OAAO,CAAC,EAAE,YAAY,CAAC;CAE1B;AAOD,eAAO,MAAM,sBAAsB,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,CAiBpE,CAAC;AAGF,wBAAgB,mBAAmB,IAAI,MAAM,EAAE,CAE9C;AAED,MAAM,WAAW,YAAY;IACzB,MAAM,EAAE,SAAS,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,GAAG,UAAU,GAAG,UAAU,CAAC;IACjD,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAElC;AAED,sEAAsE;AACtE,eAAO,MAAM,wBAAwB,SAAS,CAAC;AAI/C;;;;;;;;GAQG;AACH,eAAO,MAAM,YAAY,EAAE,MAAM,CAAC,WAAW,EAAE,YAAY,CA2pC1D,CAAC;AAEF;;;;;GAKG;AACH,wBAAgB,wBAAwB,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAQ9D;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,GAAG,IAAI,CAG/E;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,IAAI,WAAW,EAAE,CAErD;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,EAAE,CAGlE;AAED;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CACrC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,MAAM,CAAC,EAAE,YAAY,GACtB,MAAM,CAeR;AAED;;;;;;GAMG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAIlF;AAED;;;;;;;GAOG;AACH,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,WAAW,CAS/D;AAED;;GAEG;AACH,wBAAgB,qBAAqB,IAAI,MAAM,EAAE,CAEhD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;;GAKG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAGnE;AAmBD;;;;;;;;;GASG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAE7D;AAED;;;;;;GAMG;AACH,wBAAgB,6BAA6B,CACzC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,GACd,iBAAiB,EAAE,CAkBrB;AAED;;;;;;GAMG;AACH,wBAAgB,qBAAqB,CACjC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,iBAAiB,GAC5B,OAAO,CAGT;AAED;;;;;;GAMG;AACH,wBAAgB,wBAAwB,CACpC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,GACjB;IACC,WAAW,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,iBAAiB,CAAC;IAC7B,KAAK,CAAC,EAAE,MAAM,CAAC;CAClB,CAkCA;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,0BAA0B,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAqH1F;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,YAAY,GAAG,SAAS,CAW9F;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,WAAW,GAAG,MAAM,CAkBjF;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,YAAY,GAAG,MAAM,CAU9E"}
1
+ {"version":3,"file":"registry.d.ts","sourceRoot":"","sources":["../../src/llm/registry.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAIzC,OAAO,EAEH,KAAK,WAAW,EAChB,KAAK,iBAAiB,EACtB,KAAK,UAAU,EAClB,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAG1D;;;GAGG;AACH,MAAM,WAAW,YAAY;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,KAAK,CAAC;IACjB,IAAI,CAAC,EAAE,oBAAoB,CAAC;CAC/B;AAED,MAAM,WAAW,SAAS;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB,OAAO,CAAC,EAAE,YAAY,CAAC;CAE1B;AAOD,eAAO,MAAM,sBAAsB,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,CAiBpE,CAAC;AAGF,wBAAgB,mBAAmB,IAAI,MAAM,EAAE,CAE9C;AAED,MAAM,WAAW,YAAY;IACzB,MAAM,EAAE,SAAS,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,GAAG,UAAU,GAAG,UAAU,CAAC;IACjD,kBAAkB,EAAE,iBAAiB,EAAE,CAAC;IACxC,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAElC;AAED,sEAAsE;AACtE,eAAO,MAAM,wBAAwB,SAAS,CAAC;AAI/C;;;;;;;;GAQG;AACH,eAAO,MAAM,YAAY,EAAE,MAAM,CAAC,WAAW,EAAE,YAAY,CAitC1D,CAAC;AAEF;;;;;GAKG;AACH,wBAAgB,wBAAwB,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAQ9D;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,GAAG,IAAI,CAG/E;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,IAAI,WAAW,EAAE,CAErD;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,WAAW,GAAG,MAAM,EAAE,CAGlE;AAED;;;;;;;GAOG;AACH,wBAAgB,yBAAyB,CACrC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,MAAM,CAAC,EAAE,YAAY,GACtB,MAAM,CAeR;AAED;;;;;;GAMG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAIlF;AAED;;;;;;;GAOG;AACH,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,WAAW,CAS/D;AAED;;GAEG;AACH,wBAAgB,qBAAqB,IAAI,MAAM,EAAE,CAEhD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAG9D;AAED;;;;;GAKG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAGnE;AAmBD;;;;;;;;;GASG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAE7D;AAED;;;;;;GAMG;AACH,wBAAgB,6BAA6B,CACzC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,GACd,iBAAiB,EAAE,CAkBrB;AAED;;;;;;GAMG;AACH,wBAAgB,qBAAqB,CACjC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,iBAAiB,GAC5B,OAAO,CAGT;AAED;;;;;;GAMG;AACH,wBAAgB,wBAAwB,CACpC,QAAQ,EAAE,WAAW,EACrB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,GACjB;IACC,WAAW,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,iBAAiB,CAAC;IAC7B,KAAK,CAAC,EAAE,MAAM,CAAC;CAClB,CAkCA;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,0BAA0B,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,YAAY,GAAG,MAAM,CAqH1F;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,GAAG,YAAY,GAAG,SAAS,CAW9F;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,WAAW,GAAG,MAAM,CAkBjF;AAGD;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,WAAW,GAAG,OAAO,CAuBvF;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,YAAY,GAAG,MAAM,CAU9E"}
@@ -31,6 +31,60 @@ const DEFAULT_MAX_INPUT_TOKENS = 128e3;
31
31
  const LLM_REGISTRY = {
32
32
  openai: {
33
33
  models: [
34
+ // GPT-5.2 series (latest, released Dec 2025)
35
+ {
36
+ name: "gpt-5.2-chat-latest",
37
+ displayName: "GPT-5.2 Instant",
38
+ maxInputTokens: 4e5,
39
+ supportedFileTypes: ["pdf", "image"],
40
+ pricing: {
41
+ inputPerM: 1.75,
42
+ outputPerM: 14,
43
+ cacheReadPerM: 0.175,
44
+ currency: "USD",
45
+ unit: "per_million_tokens"
46
+ }
47
+ },
48
+ {
49
+ name: "gpt-5.2",
50
+ displayName: "GPT-5.2 Thinking",
51
+ maxInputTokens: 4e5,
52
+ supportedFileTypes: ["pdf", "image"],
53
+ pricing: {
54
+ inputPerM: 1.75,
55
+ outputPerM: 14,
56
+ cacheReadPerM: 0.175,
57
+ currency: "USD",
58
+ unit: "per_million_tokens"
59
+ }
60
+ },
61
+ {
62
+ name: "gpt-5.2-pro",
63
+ displayName: "GPT-5.2 Pro",
64
+ maxInputTokens: 4e5,
65
+ supportedFileTypes: ["pdf", "image"],
66
+ pricing: {
67
+ inputPerM: 21,
68
+ outputPerM: 168,
69
+ cacheReadPerM: 2.1,
70
+ currency: "USD",
71
+ unit: "per_million_tokens"
72
+ }
73
+ },
74
+ {
75
+ name: "gpt-5.2-codex",
76
+ displayName: "GPT-5.2 Codex",
77
+ maxInputTokens: 4e5,
78
+ supportedFileTypes: ["pdf", "image"],
79
+ pricing: {
80
+ inputPerM: 1.75,
81
+ outputPerM: 14,
82
+ cacheReadPerM: 0.175,
83
+ currency: "USD",
84
+ unit: "per_million_tokens"
85
+ }
86
+ },
87
+ // GPT-5.1 series
34
88
  {
35
89
  name: "gpt-5.1-chat-latest",
36
90
  displayName: "GPT-5.1 Instant",
@@ -1482,6 +1536,19 @@ function getModelDisplayName(model, provider) {
1482
1536
  const modelInfo = providerInfo.models.find((m) => m.name.toLowerCase() === normalizedModel);
1483
1537
  return modelInfo?.displayName ?? model;
1484
1538
  }
1539
+ function isReasoningCapableModel(model, _provider) {
1540
+ const modelLower = model.toLowerCase();
1541
+ if (modelLower.includes("codex")) {
1542
+ return true;
1543
+ }
1544
+ if (modelLower.startsWith("o1") || modelLower.startsWith("o3") || modelLower.startsWith("o4")) {
1545
+ return true;
1546
+ }
1547
+ if (modelLower.includes("gpt-5") || modelLower.includes("gpt-5.1") || modelLower.includes("gpt-5.2")) {
1548
+ return true;
1549
+ }
1550
+ return false;
1551
+ }
1485
1552
  function calculateCost(usage, pricing) {
1486
1553
  const inputCost = (usage.inputTokens ?? 0) * pricing.inputPerM / 1e6;
1487
1554
  const outputCost = (usage.outputTokens ?? 0) * pricing.outputPerM / 1e6;
@@ -1507,6 +1574,7 @@ export {
1507
1574
  getSupportedFileTypesForModel,
1508
1575
  getSupportedModels,
1509
1576
  getSupportedProviders,
1577
+ isReasoningCapableModel,
1510
1578
  isValidProviderModel,
1511
1579
  modelSupportsFileType,
1512
1580
  requiresApiKey,
@@ -48,6 +48,20 @@ const LLMConfigFields = {
48
48
  temperature: import_zod.z.coerce.number().min(0).max(1).optional().describe("Randomness: 0 deterministic, 1 creative"),
49
49
  allowedMediaTypes: import_zod.z.array(import_zod.z.string()).optional().describe(
50
50
  'MIME type patterns for media expansion (e.g., "image/*", "application/pdf"). If omitted, uses model capabilities from registry. Supports wildcards.'
51
+ ),
52
+ // Provider-specific options
53
+ /**
54
+ * OpenAI reasoning effort level for reasoning-capable models (o1, o3, codex, gpt-5.x).
55
+ * Controls how many reasoning tokens the model generates before producing a response.
56
+ * - 'none': No reasoning, fastest responses
57
+ * - 'minimal': Barely any reasoning, very fast responses
58
+ * - 'low': Light reasoning, fast responses
59
+ * - 'medium': Balanced reasoning (OpenAI's recommended daily driver)
60
+ * - 'high': Thorough reasoning for complex tasks
61
+ * - 'xhigh': Extra high reasoning for quality-critical, non-latency-sensitive tasks
62
+ */
63
+ reasoningEffort: import_zod.z.enum(["none", "minimal", "low", "medium", "high", "xhigh"]).optional().describe(
64
+ "OpenAI reasoning effort level for reasoning models (o1, o3, codex). Options: 'none', 'minimal', 'low', 'medium' (recommended), 'high', 'xhigh'"
51
65
  )
52
66
  };
53
67
  const LLMConfigBaseSchema = import_zod.z.object({
@@ -56,12 +70,14 @@ const LLMConfigBaseSchema = import_zod.z.object({
56
70
  // apiKey is optional at schema level - validated based on provider in superRefine
57
71
  apiKey: LLMConfigFields.apiKey,
58
72
  // Apply defaults only for complete config validation
59
- maxIterations: import_zod.z.coerce.number().int().positive().default(50),
73
+ maxIterations: import_zod.z.coerce.number().int().positive().optional(),
60
74
  baseURL: LLMConfigFields.baseURL,
61
75
  maxInputTokens: LLMConfigFields.maxInputTokens,
62
76
  maxOutputTokens: LLMConfigFields.maxOutputTokens,
63
77
  temperature: LLMConfigFields.temperature,
64
- allowedMediaTypes: LLMConfigFields.allowedMediaTypes
78
+ allowedMediaTypes: LLMConfigFields.allowedMediaTypes,
79
+ // Provider-specific options
80
+ reasoningEffort: LLMConfigFields.reasoningEffort
65
81
  }).strict();
66
82
  function createLLMConfigSchema(options = {}) {
67
83
  const { strict = true } = options;
@@ -24,22 +24,24 @@ export declare const LLMConfigBaseSchema: z.ZodObject<{
24
24
  provider: z.ZodEnum<["openai", "openai-compatible", "anthropic", "google", "groq", "xai", "cohere", "openrouter", "litellm", "glama", "vertex", "bedrock", "local", "ollama"]>;
25
25
  model: z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>;
26
26
  apiKey: z.ZodOptional<z.ZodEffects<z.ZodString, string, string>>;
27
- maxIterations: z.ZodDefault<z.ZodNumber>;
27
+ maxIterations: z.ZodOptional<z.ZodNumber>;
28
28
  baseURL: z.ZodOptional<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, string | undefined, string>>;
29
29
  maxInputTokens: z.ZodOptional<z.ZodNumber>;
30
30
  maxOutputTokens: z.ZodOptional<z.ZodNumber>;
31
31
  temperature: z.ZodOptional<z.ZodNumber>;
32
32
  allowedMediaTypes: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
33
+ reasoningEffort: z.ZodOptional<z.ZodEnum<["none", "minimal", "low", "medium", "high", "xhigh"]>>;
33
34
  }, "strict", z.ZodTypeAny, {
34
35
  model: string;
35
36
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
36
- maxIterations: number;
37
37
  apiKey?: string | undefined;
38
+ maxIterations?: number | undefined;
38
39
  baseURL?: string | undefined;
39
40
  maxInputTokens?: number | undefined;
40
41
  maxOutputTokens?: number | undefined;
41
42
  temperature?: number | undefined;
42
43
  allowedMediaTypes?: string[] | undefined;
44
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
43
45
  }, {
44
46
  model: string;
45
47
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -50,6 +52,7 @@ export declare const LLMConfigBaseSchema: z.ZodObject<{
50
52
  maxOutputTokens?: number | undefined;
51
53
  temperature?: number | undefined;
52
54
  allowedMediaTypes?: string[] | undefined;
55
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
53
56
  }>;
54
57
  /**
55
58
  * Creates an LLM config schema with configurable validation strictness.
@@ -61,22 +64,24 @@ export declare function createLLMConfigSchema(options?: LLMValidationOptions): z
61
64
  provider: z.ZodEnum<["openai", "openai-compatible", "anthropic", "google", "groq", "xai", "cohere", "openrouter", "litellm", "glama", "vertex", "bedrock", "local", "ollama"]>;
62
65
  model: z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>;
63
66
  apiKey: z.ZodOptional<z.ZodEffects<z.ZodString, string, string>>;
64
- maxIterations: z.ZodDefault<z.ZodNumber>;
67
+ maxIterations: z.ZodOptional<z.ZodNumber>;
65
68
  baseURL: z.ZodOptional<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, string | undefined, string>>;
66
69
  maxInputTokens: z.ZodOptional<z.ZodNumber>;
67
70
  maxOutputTokens: z.ZodOptional<z.ZodNumber>;
68
71
  temperature: z.ZodOptional<z.ZodNumber>;
69
72
  allowedMediaTypes: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
73
+ reasoningEffort: z.ZodOptional<z.ZodEnum<["none", "minimal", "low", "medium", "high", "xhigh"]>>;
70
74
  }, "strict", z.ZodTypeAny, {
71
75
  model: string;
72
76
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
73
- maxIterations: number;
74
77
  apiKey?: string | undefined;
78
+ maxIterations?: number | undefined;
75
79
  baseURL?: string | undefined;
76
80
  maxInputTokens?: number | undefined;
77
81
  maxOutputTokens?: number | undefined;
78
82
  temperature?: number | undefined;
79
83
  allowedMediaTypes?: string[] | undefined;
84
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
80
85
  }, {
81
86
  model: string;
82
87
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -87,16 +92,18 @@ export declare function createLLMConfigSchema(options?: LLMValidationOptions): z
87
92
  maxOutputTokens?: number | undefined;
88
93
  temperature?: number | undefined;
89
94
  allowedMediaTypes?: string[] | undefined;
95
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
90
96
  }>, {
91
97
  model: string;
92
98
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
93
- maxIterations: number;
94
99
  apiKey?: string | undefined;
100
+ maxIterations?: number | undefined;
95
101
  baseURL?: string | undefined;
96
102
  maxInputTokens?: number | undefined;
97
103
  maxOutputTokens?: number | undefined;
98
104
  temperature?: number | undefined;
99
105
  allowedMediaTypes?: string[] | undefined;
106
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
100
107
  }, {
101
108
  model: string;
102
109
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -107,6 +114,7 @@ export declare function createLLMConfigSchema(options?: LLMValidationOptions): z
107
114
  maxOutputTokens?: number | undefined;
108
115
  temperature?: number | undefined;
109
116
  allowedMediaTypes?: string[] | undefined;
117
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
110
118
  }>, "ValidatedLLMConfig">;
111
119
  /**
112
120
  * Default LLM config schema with strict validation (backwards compatible).
@@ -116,22 +124,24 @@ export declare const LLMConfigSchema: z.ZodBranded<z.ZodEffects<z.ZodObject<{
116
124
  provider: z.ZodEnum<["openai", "openai-compatible", "anthropic", "google", "groq", "xai", "cohere", "openrouter", "litellm", "glama", "vertex", "bedrock", "local", "ollama"]>;
117
125
  model: z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>;
118
126
  apiKey: z.ZodOptional<z.ZodEffects<z.ZodString, string, string>>;
119
- maxIterations: z.ZodDefault<z.ZodNumber>;
127
+ maxIterations: z.ZodOptional<z.ZodNumber>;
120
128
  baseURL: z.ZodOptional<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, string | undefined, string>>;
121
129
  maxInputTokens: z.ZodOptional<z.ZodNumber>;
122
130
  maxOutputTokens: z.ZodOptional<z.ZodNumber>;
123
131
  temperature: z.ZodOptional<z.ZodNumber>;
124
132
  allowedMediaTypes: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
133
+ reasoningEffort: z.ZodOptional<z.ZodEnum<["none", "minimal", "low", "medium", "high", "xhigh"]>>;
125
134
  }, "strict", z.ZodTypeAny, {
126
135
  model: string;
127
136
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
128
- maxIterations: number;
129
137
  apiKey?: string | undefined;
138
+ maxIterations?: number | undefined;
130
139
  baseURL?: string | undefined;
131
140
  maxInputTokens?: number | undefined;
132
141
  maxOutputTokens?: number | undefined;
133
142
  temperature?: number | undefined;
134
143
  allowedMediaTypes?: string[] | undefined;
144
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
135
145
  }, {
136
146
  model: string;
137
147
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -142,16 +152,18 @@ export declare const LLMConfigSchema: z.ZodBranded<z.ZodEffects<z.ZodObject<{
142
152
  maxOutputTokens?: number | undefined;
143
153
  temperature?: number | undefined;
144
154
  allowedMediaTypes?: string[] | undefined;
155
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
145
156
  }>, {
146
157
  model: string;
147
158
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
148
- maxIterations: number;
149
159
  apiKey?: string | undefined;
160
+ maxIterations?: number | undefined;
150
161
  baseURL?: string | undefined;
151
162
  maxInputTokens?: number | undefined;
152
163
  maxOutputTokens?: number | undefined;
153
164
  temperature?: number | undefined;
154
165
  allowedMediaTypes?: string[] | undefined;
166
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
155
167
  }, {
156
168
  model: string;
157
169
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -162,6 +174,7 @@ export declare const LLMConfigSchema: z.ZodBranded<z.ZodEffects<z.ZodObject<{
162
174
  maxOutputTokens?: number | undefined;
163
175
  temperature?: number | undefined;
164
176
  allowedMediaTypes?: string[] | undefined;
177
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
165
178
  }>, "ValidatedLLMConfig">;
166
179
  /**
167
180
  * Relaxed LLM config schema that allows missing API keys and baseURLs.
@@ -171,22 +184,24 @@ export declare const LLMConfigSchemaRelaxed: z.ZodBranded<z.ZodEffects<z.ZodObje
171
184
  provider: z.ZodEnum<["openai", "openai-compatible", "anthropic", "google", "groq", "xai", "cohere", "openrouter", "litellm", "glama", "vertex", "bedrock", "local", "ollama"]>;
172
185
  model: z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>;
173
186
  apiKey: z.ZodOptional<z.ZodEffects<z.ZodString, string, string>>;
174
- maxIterations: z.ZodDefault<z.ZodNumber>;
187
+ maxIterations: z.ZodOptional<z.ZodNumber>;
175
188
  baseURL: z.ZodOptional<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, string | undefined, string>>;
176
189
  maxInputTokens: z.ZodOptional<z.ZodNumber>;
177
190
  maxOutputTokens: z.ZodOptional<z.ZodNumber>;
178
191
  temperature: z.ZodOptional<z.ZodNumber>;
179
192
  allowedMediaTypes: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
193
+ reasoningEffort: z.ZodOptional<z.ZodEnum<["none", "minimal", "low", "medium", "high", "xhigh"]>>;
180
194
  }, "strict", z.ZodTypeAny, {
181
195
  model: string;
182
196
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
183
- maxIterations: number;
184
197
  apiKey?: string | undefined;
198
+ maxIterations?: number | undefined;
185
199
  baseURL?: string | undefined;
186
200
  maxInputTokens?: number | undefined;
187
201
  maxOutputTokens?: number | undefined;
188
202
  temperature?: number | undefined;
189
203
  allowedMediaTypes?: string[] | undefined;
204
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
190
205
  }, {
191
206
  model: string;
192
207
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -197,16 +212,18 @@ export declare const LLMConfigSchemaRelaxed: z.ZodBranded<z.ZodEffects<z.ZodObje
197
212
  maxOutputTokens?: number | undefined;
198
213
  temperature?: number | undefined;
199
214
  allowedMediaTypes?: string[] | undefined;
215
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
200
216
  }>, {
201
217
  model: string;
202
218
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
203
- maxIterations: number;
204
219
  apiKey?: string | undefined;
220
+ maxIterations?: number | undefined;
205
221
  baseURL?: string | undefined;
206
222
  maxInputTokens?: number | undefined;
207
223
  maxOutputTokens?: number | undefined;
208
224
  temperature?: number | undefined;
209
225
  allowedMediaTypes?: string[] | undefined;
226
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
210
227
  }, {
211
228
  model: string;
212
229
  provider: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama";
@@ -217,6 +234,7 @@ export declare const LLMConfigSchemaRelaxed: z.ZodBranded<z.ZodEffects<z.ZodObje
217
234
  maxOutputTokens?: number | undefined;
218
235
  temperature?: number | undefined;
219
236
  allowedMediaTypes?: string[] | undefined;
237
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
220
238
  }>, "ValidatedLLMConfig">;
221
239
  export type LLMConfig = z.input<typeof LLMConfigSchema>;
222
240
  export type ValidatedLLMConfig = z.output<typeof LLMConfigSchema>;
@@ -230,6 +248,7 @@ export declare const LLMUpdatesSchema: z.ZodEffects<z.ZodObject<{
230
248
  maxOutputTokens: z.ZodOptional<z.ZodOptional<z.ZodNumber>>;
231
249
  temperature: z.ZodOptional<z.ZodOptional<z.ZodNumber>>;
232
250
  allowedMediaTypes: z.ZodOptional<z.ZodOptional<z.ZodArray<z.ZodString, "many">>>;
251
+ reasoningEffort: z.ZodOptional<z.ZodOptional<z.ZodEnum<["none", "minimal", "low", "medium", "high", "xhigh"]>>>;
233
252
  }, "strip", z.ZodTypeAny, {
234
253
  model?: string | undefined;
235
254
  provider?: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama" | undefined;
@@ -240,6 +259,7 @@ export declare const LLMUpdatesSchema: z.ZodEffects<z.ZodObject<{
240
259
  maxOutputTokens?: number | undefined;
241
260
  temperature?: number | undefined;
242
261
  allowedMediaTypes?: string[] | undefined;
262
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
243
263
  }, {
244
264
  model?: string | undefined;
245
265
  provider?: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama" | undefined;
@@ -250,6 +270,7 @@ export declare const LLMUpdatesSchema: z.ZodEffects<z.ZodObject<{
250
270
  maxOutputTokens?: number | undefined;
251
271
  temperature?: number | undefined;
252
272
  allowedMediaTypes?: string[] | undefined;
273
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
253
274
  }>, {
254
275
  model?: string | undefined;
255
276
  provider?: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama" | undefined;
@@ -260,6 +281,7 @@ export declare const LLMUpdatesSchema: z.ZodEffects<z.ZodObject<{
260
281
  maxOutputTokens?: number | undefined;
261
282
  temperature?: number | undefined;
262
283
  allowedMediaTypes?: string[] | undefined;
284
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
263
285
  }, {
264
286
  model?: string | undefined;
265
287
  provider?: "openai" | "openai-compatible" | "anthropic" | "google" | "groq" | "xai" | "cohere" | "openrouter" | "litellm" | "glama" | "vertex" | "bedrock" | "local" | "ollama" | undefined;
@@ -270,6 +292,7 @@ export declare const LLMUpdatesSchema: z.ZodEffects<z.ZodObject<{
270
292
  maxOutputTokens?: number | undefined;
271
293
  temperature?: number | undefined;
272
294
  allowedMediaTypes?: string[] | undefined;
295
+ reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | undefined;
273
296
  }>;
274
297
  export type LLMUpdates = z.input<typeof LLMUpdatesSchema>;
275
298
  export type { LLMUpdateContext } from '../llm/types.js';
@@ -1 +1 @@
1
- {"version":3,"file":"schemas.d.ts","sourceRoot":"","sources":["../../src/llm/schemas.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAaxB;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACjC;;;;;;;;;;;;;OAaG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACpB;AAsDD,4CAA4C;AAG5C,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAcnB,CAAC;AAEd;;;;;GAKG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,GAAE,oBAAyB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0BA4IvE;AAED;;;GAGG;AACH,eAAO,MAAM,eAAe;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAA0C,CAAC;AAEvE;;;GAGG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAA2C,CAAC;AAG/E,MAAM,MAAM,SAAS,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,eAAe,CAAC,CAAC;AACxD,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,MAAM,CAAC,OAAO,eAAe,CAAC,CAAC;AAIlE,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAYvB,CAAC;AACP,MAAM,MAAM,UAAU,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,gBAAgB,CAAC,CAAC;AAE1D,YAAY,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC"}
1
+ {"version":3,"file":"schemas.d.ts","sourceRoot":"","sources":["../../src/llm/schemas.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAaxB;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACjC;;;;;;;;;;;;;OAaG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACpB;AA0ED,4CAA4C;AAG5C,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAgBnB,CAAC;AAEd;;;;;GAKG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,GAAE,oBAAyB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0BA4IvE;AAED;;;GAGG;AACH,eAAO,MAAM,eAAe;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAA0C,CAAC;AAEvE;;;GAGG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAA2C,CAAC;AAG/E,MAAM,MAAM,SAAS,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,eAAe,CAAC,CAAC;AACxD,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,MAAM,CAAC,OAAO,eAAe,CAAC,CAAC;AAIlE,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAYvB,CAAC;AACP,MAAM,MAAM,UAAU,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,gBAAgB,CAAC,CAAC;AAE1D,YAAY,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC"}
@@ -31,6 +31,20 @@ const LLMConfigFields = {
31
31
  temperature: z.coerce.number().min(0).max(1).optional().describe("Randomness: 0 deterministic, 1 creative"),
32
32
  allowedMediaTypes: z.array(z.string()).optional().describe(
33
33
  'MIME type patterns for media expansion (e.g., "image/*", "application/pdf"). If omitted, uses model capabilities from registry. Supports wildcards.'
34
+ ),
35
+ // Provider-specific options
36
+ /**
37
+ * OpenAI reasoning effort level for reasoning-capable models (o1, o3, codex, gpt-5.x).
38
+ * Controls how many reasoning tokens the model generates before producing a response.
39
+ * - 'none': No reasoning, fastest responses
40
+ * - 'minimal': Barely any reasoning, very fast responses
41
+ * - 'low': Light reasoning, fast responses
42
+ * - 'medium': Balanced reasoning (OpenAI's recommended daily driver)
43
+ * - 'high': Thorough reasoning for complex tasks
44
+ * - 'xhigh': Extra high reasoning for quality-critical, non-latency-sensitive tasks
45
+ */
46
+ reasoningEffort: z.enum(["none", "minimal", "low", "medium", "high", "xhigh"]).optional().describe(
47
+ "OpenAI reasoning effort level for reasoning models (o1, o3, codex). Options: 'none', 'minimal', 'low', 'medium' (recommended), 'high', 'xhigh'"
34
48
  )
35
49
  };
36
50
  const LLMConfigBaseSchema = z.object({
@@ -39,12 +53,14 @@ const LLMConfigBaseSchema = z.object({
39
53
  // apiKey is optional at schema level - validated based on provider in superRefine
40
54
  apiKey: LLMConfigFields.apiKey,
41
55
  // Apply defaults only for complete config validation
42
- maxIterations: z.coerce.number().int().positive().default(50),
56
+ maxIterations: z.coerce.number().int().positive().optional(),
43
57
  baseURL: LLMConfigFields.baseURL,
44
58
  maxInputTokens: LLMConfigFields.maxInputTokens,
45
59
  maxOutputTokens: LLMConfigFields.maxOutputTokens,
46
60
  temperature: LLMConfigFields.temperature,
47
- allowedMediaTypes: LLMConfigFields.allowedMediaTypes
61
+ allowedMediaTypes: LLMConfigFields.allowedMediaTypes,
62
+ // Provider-specific options
63
+ reasoningEffort: LLMConfigFields.reasoningEffort
48
64
  }).strict();
49
65
  function createLLMConfigSchema(options = {}) {
50
66
  const { strict = true } = options;
@@ -142,7 +142,9 @@ class VercelLLMService {
142
142
  maxSteps: this.config.maxIterations,
143
143
  maxOutputTokens: this.config.maxOutputTokens,
144
144
  temperature: this.config.temperature,
145
- baseURL: this.config.baseURL
145
+ baseURL: this.config.baseURL,
146
+ // Provider-specific options
147
+ reasoningEffort: this.config.reasoningEffort
146
148
  },
147
149
  { provider: this.config.provider, model: this.getModelId() },
148
150
  this.logger,
@@ -1 +1 @@
1
- {"version":3,"file":"vercel.d.ts","sourceRoot":"","sources":["../../../src/llm/services/vercel.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAqB,MAAM,IAAI,CAAC;AACtD,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAE7D,OAAO,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAC/C,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAG1D,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC7D,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,gCAAgC,CAAC;AACnF,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AAEzE,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAIxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AACrE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAGhE,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AAEzD;;;;;;;;;;;;;GAaG;AACH,qBAIa,gBAAgB;IACzB,OAAO,CAAC,KAAK,CAAgB;IAC7B,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,WAAW,CAAc;IACjC,OAAO,CAAC,cAAc,CAA+B;IACrD,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAS;IACnC,OAAO,CAAC,MAAM,CAAe;IAC7B,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,YAAY,CAAsB;IAC1C,OAAO,CAAC,kBAAkB,CAEf;IAEX;;OAEG;IACH,OAAO,CAAC,UAAU;gBAKd,WAAW,EAAE,WAAW,EACxB,KAAK,EAAE,aAAa,EACpB,mBAAmB,EAAE,mBAAmB,EACxC,eAAe,EAAE,4BAA4B,EAC7C,eAAe,EAAE,eAAe,EAChC,MAAM,EAAE,kBAAkB,EAC1B,SAAS,EAAE,MAAM,EACjB,eAAe,EAAE,eAAe,EAChC,MAAM,EAAE,YAAY,EACpB,kBAAkB,CAAC,EAAE,OAAO,mCAAmC,EAAE,mBAAmB,GAAG,IAAI;IAkC/F,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC;IAI/B;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAuB1B;;;;;;;OAOG;IACG,MAAM,CAAC,OAAO,EAAE,YAAY,EAAE,OAAO,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,WAAW,CAAA;KAAE,GAAG,OAAO,CAAC,MAAM,CAAC;IAsDxF;;;OAGG;IACH,SAAS,IAAI,gBAAgB;IA+B7B;;OAEG;IACH,iBAAiB,IAAI,cAAc,CAAC,OAAO,CAAC;IAI5C;;OAEG;IACH,eAAe,IAAI,mBAAmB;CAGzC"}
1
+ {"version":3,"file":"vercel.d.ts","sourceRoot":"","sources":["../../../src/llm/services/vercel.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAqB,MAAM,IAAI,CAAC;AACtD,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAE7D,OAAO,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAC/C,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAG1D,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC7D,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,gCAAgC,CAAC;AACnF,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AAEzE,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAIxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AACrE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAGhE,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AAEzD;;;;;;;;;;;;;GAaG;AACH,qBAIa,gBAAgB;IACzB,OAAO,CAAC,KAAK,CAAgB;IAC7B,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,WAAW,CAAc;IACjC,OAAO,CAAC,cAAc,CAA+B;IACrD,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAS;IACnC,OAAO,CAAC,MAAM,CAAe;IAC7B,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,YAAY,CAAsB;IAC1C,OAAO,CAAC,kBAAkB,CAEf;IAEX;;OAEG;IACH,OAAO,CAAC,UAAU;gBAKd,WAAW,EAAE,WAAW,EACxB,KAAK,EAAE,aAAa,EACpB,mBAAmB,EAAE,mBAAmB,EACxC,eAAe,EAAE,4BAA4B,EAC7C,eAAe,EAAE,eAAe,EAChC,MAAM,EAAE,kBAAkB,EAC1B,SAAS,EAAE,MAAM,EACjB,eAAe,EAAE,eAAe,EAChC,MAAM,EAAE,YAAY,EACpB,kBAAkB,CAAC,EAAE,OAAO,mCAAmC,EAAE,mBAAmB,GAAG,IAAI;IAkC/F,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC;IAI/B;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAyB1B;;;;;;;OAOG;IACG,MAAM,CAAC,OAAO,EAAE,YAAY,EAAE,OAAO,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,WAAW,CAAA;KAAE,GAAG,OAAO,CAAC,MAAM,CAAC;IAsDxF;;;OAGG;IACH,SAAS,IAAI,gBAAgB;IA+B7B;;OAEG;IACH,iBAAiB,IAAI,cAAc,CAAC,OAAO,CAAC;IAI5C;;OAEG;IACH,eAAe,IAAI,mBAAmB;CAGzC"}
@@ -79,7 +79,9 @@ class VercelLLMService {
79
79
  maxSteps: this.config.maxIterations,
80
80
  maxOutputTokens: this.config.maxOutputTokens,
81
81
  temperature: this.config.temperature,
82
- baseURL: this.config.baseURL
82
+ baseURL: this.config.baseURL,
83
+ // Provider-specific options
84
+ reasoningEffort: this.config.reasoningEffort
83
85
  },
84
86
  { provider: this.config.provider, model: this.getModelId() },
85
87
  this.logger,
@@ -375,9 +375,13 @@ ${import_chalk.default.white(JSON.stringify(args, null, 2))}`,
375
375
  console.log(`\u{1F4AC} ${import_chalk.default.bold("Session:")} ${import_chalk.default.blue(info.sessionId)}`);
376
376
  }
377
377
  if (info.logLevel && info.logFile) {
378
- console.log(
379
- `\u{1F4CB} ${import_chalk.default.bold("Log Level:")} ${import_chalk.default.cyan(info.logLevel)} ${import_chalk.default.dim(`(file: ${info.logFile})`)}`
380
- );
378
+ if (process.env.DEXTO_PRIVACY_MODE === "true") {
379
+ console.log(`\u{1F4CB} ${import_chalk.default.bold("Log Level:")} ${import_chalk.default.cyan(info.logLevel)}`);
380
+ } else {
381
+ console.log(
382
+ `\u{1F4CB} ${import_chalk.default.bold("Log Level:")} ${import_chalk.default.cyan(info.logLevel)} ${import_chalk.default.dim(`(file: ${info.logFile})`)}`
383
+ );
384
+ }
381
385
  }
382
386
  }
383
387
  displayError(message, error) {
@@ -1 +1 @@
1
- {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/logger/logger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAyCG;AAwCH,KAAK,UAAU,GACT,OAAO,GACP,KAAK,GACL,OAAO,GACP,QAAQ,GACR,MAAM,GACN,SAAS,GACT,MAAM,GACN,OAAO,GACP,MAAM,GACN,MAAM,GACN,aAAa,GACb,WAAW,GACX,aAAa,GACb,cAAc,GACd,YAAY,GACZ,eAAe,GACf,YAAY,GACZ,aAAa,CAAC;AA2CpB,MAAM,WAAW,aAAa;IAC1B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;CAC1B;AAWD,qBAAa,MAAM;IACf,OAAO,CAAC,MAAM,CAAiB;IAC/B,OAAO,CAAC,QAAQ,CAAkB;IAClC,OAAO,CAAC,WAAW,CAAuB;IAC1C,OAAO,CAAC,YAAY,CAAkB;gBAE1B,OAAO,GAAE,aAAkB;IAsBvC,OAAO,CAAC,oBAAoB;IAe5B,OAAO,CAAC,gBAAgB;IA4ExB,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IASrD,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQpD,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQpD,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQpD,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQvD,KAAK,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAU9D,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IASrD,iBAAiB,CAAC,QAAQ,EAAE,GAAG;IAkB/B,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG;IAUpC,UAAU,CAAC,MAAM,EAAE,GAAG;IA4EtB,QAAQ,CAAC,KAAK,EAAE,MAAM;IAatB,cAAc,IAAI,MAAM,GAAG,IAAI;IAK/B,QAAQ,IAAI,MAAM;IAKlB,kBAAkB,CAAC,IAAI,EAAE;QACrB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,gBAAgB,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,EAAE,CAAA;SAAE,CAAC;QACtD,iBAAiB,CAAC,EAAE;YAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;SAAE,CAAC;QAC9C,SAAS,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAC;YAAC,QAAQ,EAAE,MAAM,CAAA;SAAE,CAAC;QAC7D,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,CAAC;KACpB;IAoDD,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,KAAK;CAkB9C;AAGD,eAAO,MAAM,MAAM,QAAe,CAAC"}
1
+ {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/logger/logger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAyCG;AAwCH,KAAK,UAAU,GACT,OAAO,GACP,KAAK,GACL,OAAO,GACP,QAAQ,GACR,MAAM,GACN,SAAS,GACT,MAAM,GACN,OAAO,GACP,MAAM,GACN,MAAM,GACN,aAAa,GACb,WAAW,GACX,aAAa,GACb,cAAc,GACd,YAAY,GACZ,eAAe,GACf,YAAY,GACZ,aAAa,CAAC;AA2CpB,MAAM,WAAW,aAAa;IAC1B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;CAC1B;AAWD,qBAAa,MAAM;IACf,OAAO,CAAC,MAAM,CAAiB;IAC/B,OAAO,CAAC,QAAQ,CAAkB;IAClC,OAAO,CAAC,WAAW,CAAuB;IAC1C,OAAO,CAAC,YAAY,CAAkB;gBAE1B,OAAO,GAAE,aAAkB;IAsBvC,OAAO,CAAC,oBAAoB;IAe5B,OAAO,CAAC,gBAAgB;IA4ExB,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IASrD,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQpD,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQpD,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQpD,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAQvD,KAAK,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IAU9D,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,UAAU;IASrD,iBAAiB,CAAC,QAAQ,EAAE,GAAG;IAkB/B,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG;IAUpC,UAAU,CAAC,MAAM,EAAE,GAAG;IA4EtB,QAAQ,CAAC,KAAK,EAAE,MAAM;IAatB,cAAc,IAAI,MAAM,GAAG,IAAI;IAK/B,QAAQ,IAAI,MAAM;IAKlB,kBAAkB,CAAC,IAAI,EAAE;QACrB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,gBAAgB,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,EAAE,CAAA;SAAE,CAAC;QACtD,iBAAiB,CAAC,EAAE;YAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;SAAE,CAAC;QAC9C,SAAS,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAC;YAAC,QAAQ,EAAE,MAAM,CAAA;SAAE,CAAC;QAC7D,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,CAAC;KACpB;IAyDD,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,KAAK;CAkB9C;AAGD,eAAO,MAAM,MAAM,QAAe,CAAC"}
@@ -342,9 +342,13 @@ ${chalk.white(JSON.stringify(args, null, 2))}`,
342
342
  console.log(`\u{1F4AC} ${chalk.bold("Session:")} ${chalk.blue(info.sessionId)}`);
343
343
  }
344
344
  if (info.logLevel && info.logFile) {
345
- console.log(
346
- `\u{1F4CB} ${chalk.bold("Log Level:")} ${chalk.cyan(info.logLevel)} ${chalk.dim(`(file: ${info.logFile})`)}`
347
- );
345
+ if (process.env.DEXTO_PRIVACY_MODE === "true") {
346
+ console.log(`\u{1F4CB} ${chalk.bold("Log Level:")} ${chalk.cyan(info.logLevel)}`);
347
+ } else {
348
+ console.log(
349
+ `\u{1F4CB} ${chalk.bold("Log Level:")} ${chalk.cyan(info.logLevel)} ${chalk.dim(`(file: ${info.logFile})`)}`
350
+ );
351
+ }
348
352
  }
349
353
  }
350
354
  displayError(message, error) {