@anonx3247/universal-agent-harness 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +265 -0
  3. package/dist/agent-harness.d.ts +3 -0
  4. package/dist/agent-harness.d.ts.map +1 -0
  5. package/dist/agent-harness.js +326 -0
  6. package/dist/agent-harness.js.map +1 -0
  7. package/dist/db/index.d.ts +6 -0
  8. package/dist/db/index.d.ts.map +1 -0
  9. package/dist/db/index.js +11 -0
  10. package/dist/db/index.js.map +1 -0
  11. package/dist/db/schema.d.ts +335 -0
  12. package/dist/db/schema.d.ts.map +1 -0
  13. package/dist/db/schema.js +39 -0
  14. package/dist/db/schema.js.map +1 -0
  15. package/dist/index.d.ts +99 -0
  16. package/dist/index.d.ts.map +1 -0
  17. package/dist/index.js +156 -0
  18. package/dist/index.js.map +1 -0
  19. package/dist/lib/assert.d.ts +2 -0
  20. package/dist/lib/assert.d.ts.map +1 -0
  21. package/dist/lib/assert.js +4 -0
  22. package/dist/lib/assert.js.map +1 -0
  23. package/dist/lib/async.d.ts +18 -0
  24. package/dist/lib/async.d.ts.map +1 -0
  25. package/dist/lib/async.js +41 -0
  26. package/dist/lib/async.js.map +1 -0
  27. package/dist/lib/error.d.ts +47 -0
  28. package/dist/lib/error.d.ts.map +1 -0
  29. package/dist/lib/error.js +101 -0
  30. package/dist/lib/error.js.map +1 -0
  31. package/dist/lib/fs.d.ts +3 -0
  32. package/dist/lib/fs.d.ts.map +1 -0
  33. package/dist/lib/fs.js +17 -0
  34. package/dist/lib/fs.js.map +1 -0
  35. package/dist/lib/mcp-config.d.ts +62 -0
  36. package/dist/lib/mcp-config.d.ts.map +1 -0
  37. package/dist/lib/mcp-config.js +80 -0
  38. package/dist/lib/mcp-config.js.map +1 -0
  39. package/dist/lib/mcp.d.ts +19 -0
  40. package/dist/lib/mcp.d.ts.map +1 -0
  41. package/dist/lib/mcp.js +105 -0
  42. package/dist/lib/mcp.js.map +1 -0
  43. package/dist/lib/prompts.d.ts +5 -0
  44. package/dist/lib/prompts.d.ts.map +1 -0
  45. package/dist/lib/prompts.js +63 -0
  46. package/dist/lib/prompts.js.map +1 -0
  47. package/dist/lib/utils.d.ts +5 -0
  48. package/dist/lib/utils.d.ts.map +1 -0
  49. package/dist/lib/utils.js +13 -0
  50. package/dist/lib/utils.js.map +1 -0
  51. package/dist/models/anthropic.d.ts +20 -0
  52. package/dist/models/anthropic.d.ts.map +1 -0
  53. package/dist/models/anthropic.js +312 -0
  54. package/dist/models/anthropic.js.map +1 -0
  55. package/dist/models/deepseek/index.d.ts +20 -0
  56. package/dist/models/deepseek/index.d.ts.map +1 -0
  57. package/dist/models/deepseek/index.js +210 -0
  58. package/dist/models/deepseek/index.js.map +1 -0
  59. package/dist/models/gemini.d.ts +20 -0
  60. package/dist/models/gemini.d.ts.map +1 -0
  61. package/dist/models/gemini.js +250 -0
  62. package/dist/models/gemini.js.map +1 -0
  63. package/dist/models/index.d.ts +70 -0
  64. package/dist/models/index.d.ts.map +1 -0
  65. package/dist/models/index.js +30 -0
  66. package/dist/models/index.js.map +1 -0
  67. package/dist/models/mistral.d.ts +19 -0
  68. package/dist/models/mistral.d.ts.map +1 -0
  69. package/dist/models/mistral.js +259 -0
  70. package/dist/models/mistral.js.map +1 -0
  71. package/dist/models/moonshotai.d.ts +20 -0
  72. package/dist/models/moonshotai.d.ts.map +1 -0
  73. package/dist/models/moonshotai.js +220 -0
  74. package/dist/models/moonshotai.js.map +1 -0
  75. package/dist/models/openai.d.ts +21 -0
  76. package/dist/models/openai.d.ts.map +1 -0
  77. package/dist/models/openai.js +302 -0
  78. package/dist/models/openai.js.map +1 -0
  79. package/dist/models/provider.d.ts +19 -0
  80. package/dist/models/provider.d.ts.map +1 -0
  81. package/dist/models/provider.js +95 -0
  82. package/dist/models/provider.js.map +1 -0
  83. package/dist/resources/experiment.d.ts +18 -0
  84. package/dist/resources/experiment.d.ts.map +1 -0
  85. package/dist/resources/experiment.js +55 -0
  86. package/dist/resources/experiment.js.map +1 -0
  87. package/dist/resources/messages.d.ts +23 -0
  88. package/dist/resources/messages.d.ts.map +1 -0
  89. package/dist/resources/messages.js +81 -0
  90. package/dist/resources/messages.js.map +1 -0
  91. package/dist/runner/config.d.ts +4 -0
  92. package/dist/runner/config.d.ts.map +1 -0
  93. package/dist/runner/config.js +2 -0
  94. package/dist/runner/config.js.map +1 -0
  95. package/dist/runner/index.d.ts +41 -0
  96. package/dist/runner/index.d.ts.map +1 -0
  97. package/dist/runner/index.js +370 -0
  98. package/dist/runner/index.js.map +1 -0
  99. package/dist/tools/index.d.ts +2 -0
  100. package/dist/tools/index.d.ts.map +1 -0
  101. package/dist/tools/index.js +4 -0
  102. package/dist/tools/index.js.map +1 -0
  103. package/package.json +57 -0
  104. package/profiles/example/prompt.md +54 -0
  105. package/profiles/example/settings.json +11 -0
  106. package/profiles/example/settings.json.example +38 -0
@@ -0,0 +1,250 @@
1
+ import { FunctionCallingConfigMode, GoogleGenAI, } from "@google/genai";
2
+ import { LLM, } from "./index";
3
+ import { err, ok } from "@app/lib/error";
4
+ import { assertNever } from "@app/lib/assert";
5
+ import { removeNulls } from "@app/lib/utils";
6
+ export function isGeminiModel(model) {
7
+ return [
8
+ "gemini-3-pro-preview",
9
+ "gemini-2.5-pro",
10
+ "gemini-2.5-flash",
11
+ "gemini-2.5-flash-lite",
12
+ ].includes(model);
13
+ }
14
+ function normalizeTokenPrices(costPerMillionInputTokens, costPerMillionOutputTokens) {
15
+ return {
16
+ input: costPerMillionInputTokens / 1_000_000,
17
+ output: costPerMillionOutputTokens / 1_000_000,
18
+ };
19
+ }
20
+ // https://ai.google.dev/gemini-api/docs/pricing
21
+ const TOKEN_PRICING = {
22
+ "gemini-3-pro-preview": normalizeTokenPrices(2, 12),
23
+ "gemini-2.5-pro": normalizeTokenPrices(1.25, 10),
24
+ "gemini-2.5-flash": normalizeTokenPrices(0.3, 2.5),
25
+ "gemini-2.5-flash-lite": normalizeTokenPrices(0.1, 0.4),
26
+ };
27
+ export class GeminiLLM extends LLM {
28
+ client;
29
+ model;
30
+ constructor(config, model = "gemini-2.5-flash-lite") {
31
+ super(config);
32
+ this.client = new GoogleGenAI({});
33
+ this.model = model;
34
+ }
35
+ contents(messages) {
36
+ const contents = messages.map((msg) => {
37
+ return {
38
+ role: msg.role === "agent" ? "model" : "user",
39
+ parts: removeNulls(msg.content.map((content) => {
40
+ switch (content.type) {
41
+ case "text":
42
+ return {
43
+ text: content.text,
44
+ thoughtSignature: content.provider?.gemini?.thoughtSignature,
45
+ };
46
+ case "tool_use":
47
+ return {
48
+ functionCall: {
49
+ args: content.input,
50
+ id: content.id,
51
+ name: content.name,
52
+ },
53
+ thoughtSignature: content.provider?.gemini?.thoughtSignature,
54
+ };
55
+ case "tool_result":
56
+ return {
57
+ functionResponse: {
58
+ id: content.toolUseId,
59
+ name: content.toolUseName,
60
+ response: content.isError
61
+ ? {
62
+ error: content.content,
63
+ }
64
+ : {
65
+ output: content.content,
66
+ },
67
+ },
68
+ };
69
+ case "thinking": {
70
+ if (content.provider?.gemini) {
71
+ return {
72
+ thought: true,
73
+ text: content.thinking,
74
+ thoughtSignature: content.provider.gemini.thoughtSignature,
75
+ };
76
+ }
77
+ return null;
78
+ }
79
+ default:
80
+ assertNever(content);
81
+ }
82
+ })),
83
+ };
84
+ });
85
+ return contents;
86
+ }
87
+ async run(messages, prompt, tools) {
88
+ try {
89
+ const response = await this.client.models.generateContent({
90
+ model: this.model,
91
+ contents: [
92
+ {
93
+ role: "user",
94
+ parts: [{ text: prompt }],
95
+ },
96
+ ...this.contents(messages),
97
+ ],
98
+ config: {
99
+ thinkingConfig: {
100
+ // thinking: true/undefined = enabled (-1 = unlimited), false = disabled (0)
101
+ thinkingBudget: this.config.thinking === false ? 0 : -1,
102
+ includeThoughts: this.config.thinking !== false,
103
+ },
104
+ toolConfig: {
105
+ functionCallingConfig: {
106
+ mode: FunctionCallingConfigMode.AUTO,
107
+ },
108
+ },
109
+ tools: [
110
+ {
111
+ functionDeclarations: tools.map((tool) => {
112
+ return {
113
+ name: tool.name,
114
+ description: tool.description ?? "",
115
+ parametersJsonSchema: tool.inputSchema,
116
+ };
117
+ }),
118
+ },
119
+ ],
120
+ },
121
+ });
122
+ if (!response.candidates || response.candidates.length !== 1) {
123
+ return err("model_error", "Gemini model returned no candidates");
124
+ }
125
+ const candidate = response.candidates[0];
126
+ const content = candidate.content;
127
+ if (!content) {
128
+ return ok({
129
+ message: {
130
+ role: "agent",
131
+ content: [],
132
+ },
133
+ });
134
+ }
135
+ const tokenUsage = response.usageMetadata &&
136
+ response.usageMetadata.totalTokenCount &&
137
+ response.usageMetadata.promptTokenCount &&
138
+ response.usageMetadata.candidatesTokenCount
139
+ ? this.tokenUsage(response.usageMetadata)
140
+ : undefined;
141
+ return ok({
142
+ message: {
143
+ role: content.role === "model" ? "agent" : "user",
144
+ content: removeNulls((content.parts ?? []).map((part) => {
145
+ if (part.text) {
146
+ if (part.thought) {
147
+ return {
148
+ type: "thinking",
149
+ thinking: part.text,
150
+ provider: {
151
+ gemini: {
152
+ thought: true,
153
+ thoughtSignature: part.thoughtSignature,
154
+ },
155
+ },
156
+ };
157
+ }
158
+ else {
159
+ const c = {
160
+ type: "text",
161
+ text: part.text,
162
+ provider: null,
163
+ };
164
+ if (part.thoughtSignature) {
165
+ c.provider = {
166
+ gemini: { thoughtSignature: part.thoughtSignature },
167
+ };
168
+ }
169
+ return c;
170
+ }
171
+ }
172
+ if (part.functionCall) {
173
+ const c = {
174
+ type: "tool_use",
175
+ id: part.functionCall.id ??
176
+ `tool_use_${Math.random().toString(36).substring(2)}`,
177
+ name: part.functionCall.name ?? "tool_use_gemini_no_name",
178
+ input: part.functionCall.args,
179
+ provider: null,
180
+ };
181
+ if (part.thoughtSignature) {
182
+ c.provider = {
183
+ gemini: { thoughtSignature: part.thoughtSignature },
184
+ };
185
+ }
186
+ return c;
187
+ }
188
+ return null;
189
+ })),
190
+ },
191
+ tokenUsage,
192
+ });
193
+ }
194
+ catch (error) {
195
+ return err("model_error", "Failed to run model", error);
196
+ }
197
+ }
198
+ tokenUsage(usage) {
199
+ return {
200
+ total: usage.totalTokenCount ?? 0,
201
+ input: usage.promptTokenCount ?? 0,
202
+ output: usage.candidatesTokenCount ?? 0,
203
+ cached: usage.cachedContentTokenCount ?? 0,
204
+ thinking: usage.thoughtsTokenCount ?? 0,
205
+ };
206
+ }
207
+ costPerTokenUsage(tokenUsage) {
208
+ const pricing = TOKEN_PRICING[this.model];
209
+ const c = tokenUsage.input * pricing.input +
210
+ tokenUsage.output * pricing.output;
211
+ return c;
212
+ }
213
+ async tokens(messages, prompt, _tools) {
214
+ try {
215
+ const response = await this.client.models.countTokens({
216
+ model: this.model,
217
+ contents: [
218
+ {
219
+ role: "user",
220
+ parts: [{ text: prompt }],
221
+ },
222
+ ...this.contents(messages),
223
+ ],
224
+ config: {
225
+ // No tools for countTokens
226
+ },
227
+ });
228
+ if (!response.totalTokens) {
229
+ return err("model_error", "Gemini model returned no token counts");
230
+ }
231
+ return ok(response.totalTokens);
232
+ }
233
+ catch (error) {
234
+ return err("model_error", "Failed to count tokens", error);
235
+ }
236
+ }
237
+ maxTokens() {
238
+ switch (this.model) {
239
+ case "gemini-2.5-pro":
240
+ case "gemini-2.5-flash":
241
+ case "gemini-2.5-flash-lite":
242
+ return 1048576 - 65536;
243
+ case "gemini-3-pro-preview":
244
+ return 200000 - 65536;
245
+ default:
246
+ assertNever(this.model);
247
+ }
248
+ }
249
+ }
250
+ //# sourceMappingURL=gemini.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"gemini.js","sourceRoot":"","sources":["../../src/models/gemini.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,yBAAyB,EAGzB,WAAW,GACZ,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,GAAG,GAOJ,MAAM,SAAS,CAAC;AACjB,OAAO,EAAU,GAAG,EAAE,EAAE,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAO7C,MAAM,UAAU,aAAa,CAAC,KAAa;IACzC,OAAO;QACL,sBAAsB;QACtB,gBAAgB;QAChB,kBAAkB;QAClB,uBAAuB;KACxB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACpB,CAAC;AAOD,SAAS,oBAAoB,CAC3B,yBAAiC,EACjC,0BAAkC;IAElC,OAAO;QACL,KAAK,EAAE,yBAAyB,GAAG,SAAS;QAC5C,MAAM,EAAE,0BAA0B,GAAG,SAAS;KAC/C,CAAC;AACJ,CAAC;AAED,gDAAgD;AAChD,MAAM,aAAa,GAA2C;IAC5D,sBAAsB,EAAE,oBAAoB,CAAC,CAAC,EAAE,EAAE,CAAC;IACnD,gBAAgB,EAAE,oBAAoB,CAAC,IAAI,EAAE,EAAE,CAAC;IAChD,kBAAkB,EAAE,oBAAoB,CAAC,GAAG,EAAE,GAAG,CAAC;IAClD,uBAAuB,EAAE,oBAAoB,CAAC,GAAG,EAAE,GAAG,CAAC;CACxD,CAAC;AAEF,MAAM,OAAO,SAAU,SAAQ,GAAG;IACxB,MAAM,CAAc;IACpB,KAAK,CAAc;IAE3B,YACE,MAAmB,EACnB,QAAqB,uBAAuB;QAE5C,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,MAAM,GAAG,IAAI,WAAW,CAAC,EAAE,CAAC,CAAC;QAClC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACrB,CAAC;IAED,QAAQ,CAAC,QAAmB;QAC1B,MAAM,QAAQ,GAAc,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;YAC/C,OAAO;gBACL,IAAI,EAAE,GAAG,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM;gBAC7C,KAAK,EAAE,WAAW,CAChB,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,EAAE;oBAC1B,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;wBACrB,KAAK,MAAM;4BACT,OAAO;gCACL,IAAI,EAAE,OAAO,CAAC,IAAI;gCAClB,gBAAgB,EAAE,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,gBAAgB;6BAC7D,CAAC;wBACJ,KAAK,UAAU;4BACb,OAAO;gCACL,YAAY,EAAE;oCACZ,IAAI,EAAE,OAAO,CAAC,KAAK;oCACnB,EAAE,EAAE,OAAO,CAAC,EAAE;oCACd,IAAI,EAAE,OAAO,CAAC,IAAI;iCACnB;gCACD,gBAAgB,EAAE,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,gBAAgB;6BAC7D,CAAC;wBACJ,KAAK,aAAa;4BAChB,OAAO;gCACL,gBAAgB,EAAE;oCAChB,EAAE,EAAE,OAAO,CAAC,SAAS;oCACrB,IAAI,EAAE,OAAO,CAAC,WAAW;oCACzB,QAAQ,EAAE,OAAO,CAAC,OAAO;wCACvB,CAAC,CAAC;4CACA,KAAK,EAAE,OAAO,CAAC,OAAO;yCACvB;wCACD,CAAC,CAAC;4CACA,MAAM,EAAE,OAAO,CAAC,OAAO;yCACxB;iCACJ;6BACF,CAAC;wBACJ,KAAK,UAAU,CAAC,CAAC,CAAC;4BAChB,IAAI,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,CAAC;gCAC7B,OAAO;oCACL,OAAO,EAAE,IAAI;oCACb,IAAI,EAAE,OAAO,CAAC,QAAQ;oCACtB,gBAAgB,EAAE,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,gBAAgB;iCAC3D,CAAC;4BACJ,CAAC;4BACD,OAAO,IAAI,CAAC;wBACd,CAAC;wBACD;4BACE,WAAW,CAAC,OAAO,CAAC,CAAC;oBACzB,CAAC;gBACH,CAAC,CAAC,CACH;aACF,CAAC;QACJ,CAAC,CAAC,CAAC;QACH,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,KAAK,CAAC,GAAG,CACP,QAAmB,EACnB,MAAc,EACd,KAAa;QAEb,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,eAAe,CAAC;gBACxD,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE;oBACR;wBACE,IAAI,EAAE,MAAM;wBACZ,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;qBAC1B;oBACD,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC;iBAC3B;gBACD,MAAM,EAAE;oBACN,cAAc,EAAE;wBACd,4EAA4E;wBAC5E,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;wBACvD,eAAe,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,KAAK,KAAK;qBAChD;oBACD,UAAU,EAAE;wBACV,qBAAqB,EAAE;4BACrB,IAAI,EAAE,yBAAyB,CAAC,IAAI;yBACrC;qBACF;oBACD,KAAK,EAAE;wBACL;4BACE,oBAAoB,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;gCACvC,OAAO;oCACL,IAAI,EAAE,IAAI,CAAC,IAAI;oCACf,WAAW,EAAE,IAAI,CAAC,WAAW,IAAI,EAAE;oCACnC,oBAAoB,EAAE,IAAI,CAAC,WAAW;iCAChB,CAAC;4BAC3B,CAAC,CAAC;yBACH;qBACF;iBACF;aACF,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBAC7D,OAAO,GAAG,CAAC,aAAa,EAAE,qCAAqC,CAAC,CAAC;YACnE,CAAC;YACD,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACzC,MAAM,OAAO,GAAG,SAAS,CAAC,OAAO,CAAC;YAClC,IAAI,CAAC,OAAO,EAAE,CAAC;gBACb,OAAO,EAAE,CAAC;oBACR,OAAO,EAAE;wBACP,IAAI,EAAE,OAAO;wBACb,OAAO,EAAE,EAAE;qBACZ;iBACF,CAAC,CAAC;YACL,CAAC;YAED,MAAM,UAAU,GACd,QAAQ,CAAC,aAAa;gBACpB,QAAQ,CAAC,aAAa,CAAC,eAAe;gBACtC,QAAQ,CAAC,aAAa,CAAC,gBAAgB;gBACvC,QAAQ,CAAC,aAAa,CAAC,oBAAoB;gBAC3C,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,aAAa,CAAC;gBACzC,CAAC,CAAC,SAAS,CAAC;YAEhB,OAAO,EAAE,CAAC;gBACR,OAAO,EAAE;oBACP,IAAI,EAAE,OAAO,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM;oBACjD,OAAO,EAAE,WAAW,CAClB,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;wBACjC,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;4BACd,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;gCACjB,OAAO;oCACL,IAAI,EAAE,UAAU;oCAChB,QAAQ,EAAE,IAAI,CAAC,IAAI;oCACnB,QAAQ,EAAE;wCACR,MAAM,EAAE;4CACN,OAAO,EAAE,IAAI;4CACb,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;yCACxC;qCACF;iCACF,CAAC;4BACJ,CAAC;iCAAM,CAAC;gCACN,MAAM,CAAC,GAAgB;oCACrB,IAAI,EAAE,MAAM;oCACZ,IAAI,EAAE,IAAI,CAAC,IAAI;oCACf,QAAQ,EAAE,IAAI;iCACf,CAAC;gCACF,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;oCAC1B,CAAC,CAAC,QAAQ,GAAG;wCACX,MAAM,EAAE,EAAE,gBAAgB,EAAE,IAAI,CAAC,gBAAgB,EAAE;qCACpD,CAAC;gCACJ,CAAC;gCACD,OAAO,CAAC,CAAC;4BACX,CAAC;wBACH,CAAC;wBACD,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;4BACtB,MAAM,CAAC,GAAY;gCACjB,IAAI,EAAE,UAAU;gCAChB,EAAE,EACA,IAAI,CAAC,YAAY,CAAC,EAAE;oCACpB,YAAY,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;gCACvD,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,yBAAyB;gCACzD,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI;gCAC7B,QAAQ,EAAE,IAAI;6BACf,CAAC;4BACF,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;gCAC1B,CAAC,CAAC,QAAQ,GAAG;oCACX,MAAM,EAAE,EAAE,gBAAgB,EAAE,IAAI,CAAC,gBAAgB,EAAE;iCACpD,CAAC;4BACJ,CAAC;4BACD,OAAO,CAAC,CAAC;wBACX,CAAC;wBACD,OAAO,IAAI,CAAC;oBACd,CAAC,CAAC,CACH;iBACF;gBACD,UAAU;aACX,CAAC,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,GAAG,CAAC,aAAa,EAAE,qBAAqB,EAAE,KAAK,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAEO,UAAU,CAAC,KAA2C;QAC5D,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,IAAI,CAAC;YACjC,KAAK,EAAE,KAAK,CAAC,gBAAgB,IAAI,CAAC;YAClC,MAAM,EAAE,KAAK,CAAC,oBAAoB,IAAI,CAAC;YACvC,MAAM,EAAE,KAAK,CAAC,uBAAuB,IAAI,CAAC;YAC1C,QAAQ,EAAE,KAAK,CAAC,kBAAkB,IAAI,CAAC;SACxC,CAAC;IACJ,CAAC;IAES,iBAAiB,CAAC,UAAsB;QAChD,MAAM,OAAO,GAAG,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1C,MAAM,CAAC,GACL,UAAU,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK;YAChC,UAAU,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;QACrC,OAAO,CAAC,CAAC;IACX,CAAC;IAED,KAAK,CAAC,MAAM,CACV,QAAmB,EACnB,MAAc,EACd,MAAc;QAEd,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC;gBACpD,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE;oBACR;wBACE,IAAI,EAAE,MAAM;wBACZ,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;qBAC1B;oBACD,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC;iBAC3B;gBACD,MAAM,EAAE;gBACN,2BAA2B;iBAC5B;aACF,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC;gBAC1B,OAAO,GAAG,CAAC,aAAa,EAAE,uCAAuC,CAAC,CAAC;YACrE,CAAC;YAED,OAAO,EAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;QAClC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,GAAG,CACR,aAAa,EACb,wBAAwB,EACxB,KAAK,CACN,CAAC;QACJ,CAAC;IACH,CAAC;IAED,SAAS;QACP,QAAQ,IAAI,CAAC,KAAK,EAAE,CAAC;YACnB,KAAK,gBAAgB,CAAC;YACtB,KAAK,kBAAkB,CAAC;YACxB,KAAK,uBAAuB;gBAC1B,OAAO,OAAO,GAAG,KAAK,CAAC;YACzB,KAAK,sBAAsB;gBACzB,OAAO,MAAM,GAAG,KAAK,CAAC;YACxB;gBACE,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;CACF"}
@@ -0,0 +1,70 @@
1
+ import type { JSONSchema7 as JSONSchema } from "json-schema";
2
+ import { Result } from "@app/lib/error";
3
+ import type { CallToolResult } from "@modelcontextprotocol/sdk/types.js";
4
+ import { provider } from "./provider";
5
+ export type ProviderData = Partial<Record<provider, any>>;
6
+ export type TokenUsage = {
7
+ total: number;
8
+ input: number;
9
+ output: number;
10
+ cached: number;
11
+ thinking: number;
12
+ };
13
+ export interface TextContent {
14
+ type: "text";
15
+ text: string;
16
+ provider: ProviderData | null;
17
+ }
18
+ export interface ToolUse {
19
+ type: "tool_use";
20
+ id: string;
21
+ name: string;
22
+ input: any;
23
+ provider: ProviderData | null;
24
+ }
25
+ export interface Thinking {
26
+ type: "thinking";
27
+ thinking: string;
28
+ provider: ProviderData | null;
29
+ }
30
+ export interface ToolResult {
31
+ type: "tool_result";
32
+ toolUseId: string;
33
+ toolUseName: string;
34
+ content: CallToolResult["content"];
35
+ isError: boolean;
36
+ }
37
+ export interface Message {
38
+ role: "user" | "agent";
39
+ content: (TextContent | ToolUse | ToolResult | Thinking)[];
40
+ }
41
+ export interface ModelConfig {
42
+ maxTokens?: number;
43
+ thinking?: boolean;
44
+ }
45
+ export interface Tool {
46
+ name: string;
47
+ description?: string;
48
+ inputSchema: JSONSchema;
49
+ }
50
+ export declare abstract class LLM {
51
+ protected config: ModelConfig;
52
+ constructor(config: ModelConfig);
53
+ abstract run(messages: Message[], prompt: string, tools: Tool[]): Promise<Result<{
54
+ message: Message;
55
+ tokenUsage?: TokenUsage;
56
+ }>>;
57
+ abstract tokens(messages: Message[], prompt: string, tools: Tool[]): Promise<Result<number>>;
58
+ abstract maxTokens(): number;
59
+ /**
60
+ * Calculate the cost for a single TokenUsage.
61
+ * Each provider implements its own pricing logic.
62
+ */
63
+ protected abstract costPerTokenUsage(tokenUsage: TokenUsage): number;
64
+ /**
65
+ * Calculate the total cost for a list of TokenUsage objects.
66
+ * Accumulates tokens and calculates the total price.
67
+ */
68
+ cost(tokenUsages: TokenUsage[]): number;
69
+ }
70
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/models/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,IAAI,UAAU,EAAE,MAAM,aAAa,CAAC;AAC7D,OAAO,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AACxC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,oCAAoC,CAAC;AACzE,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAEtC,MAAM,MAAM,YAAY,GAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;AAE1D,MAAM,MAAM,UAAU,GAAG;IACvB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,YAAY,GAAG,IAAI,CAAC;CAC/B;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,UAAU,CAAC;IACjB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,GAAG,CAAC;IACX,QAAQ,EAAE,YAAY,GAAG,IAAI,CAAC;CAC/B;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,YAAY,GAAG,IAAI,CAAC;CAC/B;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,aAAa,CAAC;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,cAAc,CAAC,SAAS,CAAC,CAAC;IACnC,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC;IACvB,OAAO,EAAE,CAAC,WAAW,GAAG,OAAO,GAAG,UAAU,GAAG,QAAQ,CAAC,EAAE,CAAC;CAC5D;AAED,MAAM,WAAW,WAAW;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,IAAI;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,UAAU,CAAC;CACzB;AAED,8BAAsB,GAAG;IACvB,SAAS,CAAC,MAAM,EAAE,WAAW,CAAC;gBAElB,MAAM,EAAE,WAAW;IAI/B,QAAQ,CAAC,GAAG,CACV,QAAQ,EAAE,OAAO,EAAE,EACnB,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,IAAI,EAAE,GACZ,OAAO,CAAC,MAAM,CAAC;QAAE,OAAO,EAAE,OAAO,CAAC;QAAC,UAAU,CAAC,EAAE,UAAU,CAAA;KAAE,CAAC,CAAC;IAEjE,QAAQ,CAAC,MAAM,CACb,QAAQ,EAAE,OAAO,EAAE,EACnB,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,IAAI,EAAE,GACZ,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAE1B,QAAQ,CAAC,SAAS,IAAI,MAAM;IAE5B;;;OAGG;IACH,SAAS,CAAC,QAAQ,CAAC,iBAAiB,CAAC,UAAU,EAAE,UAAU,GAAG,MAAM;IAEpE;;;OAGG;IACI,IAAI,CAAC,WAAW,EAAE,UAAU,EAAE,GAAG,MAAM;CAqB/C"}
@@ -0,0 +1,30 @@
1
+ export class LLM {
2
+ config;
3
+ constructor(config) {
4
+ this.config = config;
5
+ }
6
+ /**
7
+ * Calculate the total cost for a list of TokenUsage objects.
8
+ * Accumulates tokens and calculates the total price.
9
+ */
10
+ cost(tokenUsages) {
11
+ // Accumulate all token usages
12
+ const accumulated = {
13
+ total: 0,
14
+ input: 0,
15
+ output: 0,
16
+ cached: 0,
17
+ thinking: 0,
18
+ };
19
+ for (const usage of tokenUsages) {
20
+ accumulated.total += usage.total;
21
+ accumulated.input += usage.input;
22
+ accumulated.output += usage.output;
23
+ accumulated.cached += usage.cached;
24
+ accumulated.thinking += usage.thinking;
25
+ }
26
+ // Calculate cost for the accumulated usage
27
+ return this.costPerTokenUsage(accumulated);
28
+ }
29
+ }
30
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/models/index.ts"],"names":[],"mappings":"AA2DA,MAAM,OAAgB,GAAG;IACb,MAAM,CAAc;IAE9B,YAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAsBD;;;OAGG;IACI,IAAI,CAAC,WAAyB;QACnC,8BAA8B;QAC9B,MAAM,WAAW,GAAe;YAC9B,KAAK,EAAE,CAAC;YACR,KAAK,EAAE,CAAC;YACR,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,CAAC;YACT,QAAQ,EAAE,CAAC;SACZ,CAAC;QAEF,KAAK,MAAM,KAAK,IAAI,WAAW,EAAE,CAAC;YAChC,WAAW,CAAC,KAAK,IAAI,KAAK,CAAC,KAAK,CAAC;YACjC,WAAW,CAAC,KAAK,IAAI,KAAK,CAAC,KAAK,CAAC;YACjC,WAAW,CAAC,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC;YACnC,WAAW,CAAC,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC;YACnC,WAAW,CAAC,QAAQ,IAAI,KAAK,CAAC,QAAQ,CAAC;QACzC,CAAC;QAED,2CAA2C;QAC3C,OAAO,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAC7C,CAAC;CACF"}
@@ -0,0 +1,19 @@
1
+ import { LLM, ModelConfig, Message, Tool, TokenUsage } from "./index";
2
+ import { Result } from "@app/lib/error";
3
+ export type MistralModel = "devstral-medium-latest" | "mistral-large-latest" | "mistral-small-latest" | "codestral-latest";
4
+ export declare function isMistralModel(model: string): model is MistralModel;
5
+ export declare class MistralLLM extends LLM {
6
+ private client;
7
+ private model;
8
+ constructor(config: ModelConfig, model?: MistralModel);
9
+ messages(messages: Message[]): ChatCompletionStreamRequest[];
10
+ run(messages: Message[], prompt: string, tools: Tool[]): Promise<Result<{
11
+ message: Message;
12
+ tokenUsage?: TokenUsage;
13
+ }>>;
14
+ private tokenUsage;
15
+ protected costPerTokenUsage(tokenUsage: TokenUsage): number;
16
+ tokens(messages: Message[], _prompt: string, _tools: Tool[]): Promise<Result<number>>;
17
+ maxTokens(): number;
18
+ }
19
+ //# sourceMappingURL=mistral.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mistral.d.ts","sourceRoot":"","sources":["../../src/models/mistral.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,GAAG,EACH,WAAW,EACX,OAAO,EACP,IAAI,EAIJ,UAAU,EACX,MAAM,SAAS,CAAC;AACjB,OAAO,EAAE,MAAM,EAAW,MAAM,gBAAgB,CAAC;AAmCjD,MAAM,MAAM,YAAY,GACpB,wBAAwB,GACxB,sBAAsB,GACtB,sBAAsB,GACtB,kBAAkB,CAAC;AAEvB,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,KAAK,IAAI,YAAY,CAOnE;AAmBD,qBAAa,UAAW,SAAQ,GAAG;IACjC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,KAAK,CAAe;gBAG1B,MAAM,EAAE,WAAW,EACnB,KAAK,GAAE,YAAqC;IAO9C,QAAQ,CAAC,QAAQ,EAAE,OAAO,EAAE;IAmEtB,GAAG,CACP,QAAQ,EAAE,OAAO,EAAE,EACnB,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,IAAI,EAAE,GACZ,OAAO,CAAC,MAAM,CAAC;QAAE,OAAO,EAAE,OAAO,CAAC;QAAC,UAAU,CAAC,EAAE,UAAU,CAAA;KAAE,CAAC,CAAC;IAsGjE,OAAO,CAAC,UAAU;IAUlB,SAAS,CAAC,iBAAiB,CAAC,UAAU,EAAE,UAAU,GAAG,MAAM;IAQrD,MAAM,CACV,QAAQ,EAAE,OAAO,EAAE,EACnB,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,IAAI,EAAE,GACb,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAoC1B,SAAS,IAAI,MAAM;CAcpB"}
@@ -0,0 +1,259 @@
1
+ import { LLM, } from "./index";
2
+ import { err, ok } from "@app/lib/error";
3
+ import { assertNever } from "@app/lib/assert";
4
+ import { Mistral } from "@mistralai/mistralai";
5
+ import { isString, removeNulls } from "@app/lib/utils";
6
+ function normalizeTokenPrices(costPerMillionInputTokens, costPerMillionOutputTokens) {
7
+ return {
8
+ input: costPerMillionInputTokens / 1_000_000,
9
+ output: costPerMillionOutputTokens / 1_000_000,
10
+ };
11
+ }
12
+ // https://mistral.ai/pricing#api-pricing
13
+ const TOKEN_PRICING = {
14
+ "devstral-medium-latest": normalizeTokenPrices(0.4, 2),
15
+ "mistral-large-latest": normalizeTokenPrices(0.5, 1.5),
16
+ "mistral-small-latest": normalizeTokenPrices(0.1, 0.3),
17
+ "codestral-latest": normalizeTokenPrices(0.3, 0.9),
18
+ };
19
+ export function isMistralModel(model) {
20
+ return [
21
+ "devstral-medium-latest",
22
+ "mistral-large-latest",
23
+ "mistral-small-latest",
24
+ "codestral-latest",
25
+ ].includes(model);
26
+ }
27
+ // Sometimes mistral models send back wrong function names so we validate them here
28
+ function validateName(name) {
29
+ if (!(name.length <= 256)) {
30
+ return {
31
+ valid: false,
32
+ reason: `name: ${name} must be less than 256 characters`,
33
+ };
34
+ }
35
+ if (!name.match(/^[a-zA-Z0-9_-]+$/)) {
36
+ return {
37
+ valid: false,
38
+ reason: `name: ${name} must be alphanumeric`,
39
+ };
40
+ }
41
+ return { valid: true };
42
+ }
43
+ export class MistralLLM extends LLM {
44
+ client;
45
+ model;
46
+ constructor(config, model = "mistral-large-latest") {
47
+ super(config);
48
+ this.client = new Mistral();
49
+ this.model = model;
50
+ }
51
+ messages(messages) {
52
+ const mistralMessages = [];
53
+ for (const msg of messages) {
54
+ switch (msg.role) {
55
+ case "user":
56
+ mistralMessages.push(...msg.content
57
+ .filter((c) => c.type === "tool_result")
58
+ .map((c) => ({
59
+ role: "tool",
60
+ toolCallId: c.toolUseId,
61
+ name: c.toolUseName,
62
+ content: c.content
63
+ .filter((c) => c.type === "text")
64
+ .map((c) => ({
65
+ type: "text",
66
+ text: c.text,
67
+ })),
68
+ })));
69
+ if (msg.content.find((c) => c.type === "text")) {
70
+ mistralMessages.push({
71
+ role: "user",
72
+ content: msg.content
73
+ .filter((c) => c.type === "text")
74
+ .map((c) => ({
75
+ type: "text",
76
+ text: c.text,
77
+ })),
78
+ });
79
+ }
80
+ break;
81
+ case "agent":
82
+ const agentMsg = {
83
+ role: "assistant",
84
+ content: msg.content
85
+ .filter((c) => c.type === "text") // We don't support thinking atm
86
+ .map((c) => {
87
+ switch (c.type) {
88
+ case "text":
89
+ return {
90
+ type: "text",
91
+ text: c.text,
92
+ };
93
+ }
94
+ }),
95
+ toolCalls: msg.content
96
+ .filter((c) => c.type === "tool_use")
97
+ .map((c) => {
98
+ return {
99
+ id: c.id,
100
+ type: "function",
101
+ function: {
102
+ name: c.name,
103
+ arguments: c.input,
104
+ },
105
+ };
106
+ }),
107
+ };
108
+ mistralMessages.push(agentMsg);
109
+ }
110
+ }
111
+ return mistralMessages;
112
+ }
113
+ async run(messages, prompt, tools) {
114
+ try {
115
+ const chatResponse = await this.client.chat.complete({
116
+ model: this.model,
117
+ messages: [
118
+ {
119
+ role: "system",
120
+ content: prompt,
121
+ },
122
+ ...this.messages(messages),
123
+ ],
124
+ toolChoice: "auto",
125
+ tools: tools.map((t) => ({
126
+ type: "function",
127
+ function: {
128
+ name: t.name,
129
+ description: t.description,
130
+ parameters: t.inputSchema,
131
+ },
132
+ })),
133
+ });
134
+ const usage = chatResponse.usage;
135
+ const tokenUsage = !usage.totalTokens || !usage.promptTokens || !usage.completionTokens
136
+ ? undefined
137
+ : this.tokenUsage(usage);
138
+ const msg = chatResponse.choices[0].message;
139
+ const finishReason = chatResponse.choices[0].finishReason;
140
+ if (finishReason !== "stop" && finishReason !== "tool_calls") {
141
+ return err("model_error", `Unexpected finish reason: ${finishReason}`, new Error(`Unexpected finish reason: ${finishReason}`));
142
+ }
143
+ const content = [];
144
+ if (msg.toolCalls) {
145
+ for (const toolCall of msg.toolCalls) {
146
+ const { valid, reason } = validateName(toolCall.function.name);
147
+ if (valid) {
148
+ content.push({
149
+ type: "tool_use",
150
+ id: toolCall.id ?? "",
151
+ name: toolCall.function.name,
152
+ input: isString(toolCall.function.arguments)
153
+ ? JSON.parse(toolCall.function.arguments)
154
+ : toolCall.function.arguments,
155
+ provider: null,
156
+ });
157
+ }
158
+ else {
159
+ console.warn(`Mistral model received invalid tool name: ${toolCall.function.name}.
160
+ Reason: ${reason}`);
161
+ }
162
+ }
163
+ }
164
+ if (msg.content) {
165
+ if (isString(msg.content)) {
166
+ content.push({
167
+ type: "text",
168
+ text: msg.content,
169
+ provider: null,
170
+ });
171
+ }
172
+ else {
173
+ content.push(...removeNulls(msg.content.map((c) => {
174
+ switch (c.type) {
175
+ case "text":
176
+ return {
177
+ type: "text",
178
+ text: c.text,
179
+ provider: null,
180
+ };
181
+ default: // Note: thinking is not implemented yet for mistral
182
+ return null;
183
+ }
184
+ })));
185
+ }
186
+ }
187
+ return ok({
188
+ message: {
189
+ role: "agent",
190
+ content,
191
+ },
192
+ tokenUsage,
193
+ });
194
+ }
195
+ catch (error) {
196
+ return err("model_error", "Failed to run model", error);
197
+ }
198
+ }
199
+ tokenUsage(usage) {
200
+ return {
201
+ total: usage.totalTokens ?? 0,
202
+ input: usage.promptTokens ?? 0,
203
+ output: usage.completionTokens ?? 0,
204
+ cached: 0,
205
+ thinking: 0,
206
+ };
207
+ }
208
+ costPerTokenUsage(tokenUsage) {
209
+ const pricing = TOKEN_PRICING[this.model];
210
+ const c = tokenUsage.input * pricing.input +
211
+ tokenUsage.output * pricing.output;
212
+ return c;
213
+ }
214
+ async tokens(messages, _prompt, _tools) {
215
+ try {
216
+ // Mistral's doesn't have a token counting API so we approximate with token ~= 4 chars.
217
+ const tokens = messages.reduce((acc, m) => {
218
+ const contentLength = m.content.reduce((acc, c) => {
219
+ switch (c.type) {
220
+ case "text":
221
+ return acc + c.text.length;
222
+ case "tool_use":
223
+ return acc + c.name.length + JSON.stringify(c.input).length;
224
+ case "thinking":
225
+ // We don't have any thinking models yet
226
+ // return acc + c.thinking.length;
227
+ throw new Error("Thinking not implemented yet for mistral");
228
+ return acc;
229
+ case "tool_result":
230
+ const contentLength = c.content
231
+ .filter((c) => c.type === "text")
232
+ .reduce((acc, c) => acc + c.text.length, 0);
233
+ return acc + c.toolUseName.length + contentLength;
234
+ }
235
+ }, 0);
236
+ return contentLength + acc;
237
+ }, 0) / 4;
238
+ return ok(Math.floor(tokens));
239
+ }
240
+ catch (error) {
241
+ return err("model_error", "Failed to count tokens", error);
242
+ }
243
+ }
244
+ maxTokens() {
245
+ switch (this.model) {
246
+ case "mistral-large-latest":
247
+ return 256000;
248
+ case "mistral-small-latest":
249
+ return 128000;
250
+ case "codestral-latest":
251
+ return 32000;
252
+ case "devstral-medium-latest":
253
+ return 128000;
254
+ default:
255
+ assertNever(this.model);
256
+ }
257
+ }
258
+ }
259
+ //# sourceMappingURL=mistral.js.map