ai 3.0.9 → 3.1.0-canary.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/dist/index.d.mts +480 -0
- package/core/dist/index.d.ts +480 -0
- package/core/dist/index.js +1532 -0
- package/core/dist/index.js.map +1 -0
- package/core/dist/index.mjs +1483 -0
- package/core/dist/index.mjs.map +1 -0
- package/package.json +21 -3
- package/provider/dist/chunk-3DTRVHCT.mjs +5046 -0
- package/provider/dist/chunk-3DTRVHCT.mjs.map +1 -0
- package/provider/dist/chunk-4OUDS3CP.mjs +30 -0
- package/provider/dist/chunk-4OUDS3CP.mjs.map +1 -0
- package/provider/dist/chunk-5IYCPJBV.mjs +56 -0
- package/provider/dist/chunk-5IYCPJBV.mjs.map +1 -0
- package/provider/dist/chunk-VB2TCVQ4.mjs +6746 -0
- package/provider/dist/chunk-VB2TCVQ4.mjs.map +1 -0
- package/provider/dist/chunk-VYIXVZ6L.mjs +317 -0
- package/provider/dist/chunk-VYIXVZ6L.mjs.map +1 -0
- package/provider/dist/chunk-WTOUHN6A.mjs +2251 -0
- package/provider/dist/chunk-WTOUHN6A.mjs.map +1 -0
- package/provider/dist/client-22WAAXR7.mjs +10 -0
- package/provider/dist/client-22WAAXR7.mjs.map +1 -0
- package/provider/dist/fileFromPath-23RINPB2.mjs +115 -0
- package/provider/dist/fileFromPath-23RINPB2.mjs.map +1 -0
- package/provider/dist/index.d.mts +387 -0
- package/provider/dist/index.d.ts +387 -0
- package/provider/dist/index.js +26487 -0
- package/provider/dist/index.js.map +1 -0
- package/provider/dist/index.mjs +8087 -0
- package/provider/dist/index.mjs.map +1 -0
- package/provider/dist/lib-BZMMM4HX.mjs +20 -0
- package/provider/dist/lib-BZMMM4HX.mjs.map +1 -0
- package/provider/dist/openai-3YL4AWLI.mjs +3451 -0
- package/provider/dist/openai-3YL4AWLI.mjs.map +1 -0
@@ -0,0 +1,317 @@
|
|
1
|
+
import {
|
2
|
+
__esm,
|
3
|
+
__export,
|
4
|
+
__publicField
|
5
|
+
} from "./chunk-5IYCPJBV.mjs";
|
6
|
+
|
7
|
+
// ../../node_modules/.pnpm/@mistralai+mistralai@0.1.3/node_modules/@mistralai/mistralai/src/client.js
|
8
|
+
var client_exports = {};
|
9
|
+
__export(client_exports, {
|
10
|
+
default: () => client_default
|
11
|
+
});
|
12
|
+
async function initializeFetch() {
|
13
|
+
if (typeof window === "undefined" || typeof globalThis.fetch === "undefined") {
|
14
|
+
const nodeFetch = await import("./lib-BZMMM4HX.mjs");
|
15
|
+
fetch = nodeFetch.default;
|
16
|
+
isNode = true;
|
17
|
+
} else {
|
18
|
+
fetch = globalThis.fetch;
|
19
|
+
}
|
20
|
+
}
|
21
|
+
var isNode, VERSION, RETRY_STATUS_CODES, ENDPOINT, MistralAPIError, MistralClient, client_default;
|
22
|
+
var init_client = __esm({
|
23
|
+
"../../node_modules/.pnpm/@mistralai+mistralai@0.1.3/node_modules/@mistralai/mistralai/src/client.js"() {
|
24
|
+
isNode = false;
|
25
|
+
VERSION = "0.0.3";
|
26
|
+
RETRY_STATUS_CODES = [429, 500, 502, 503, 504];
|
27
|
+
ENDPOINT = "https://api.mistral.ai";
|
28
|
+
initializeFetch();
|
29
|
+
MistralAPIError = class extends Error {
|
30
|
+
/**
|
31
|
+
* A simple error class for Mistral API errors
|
32
|
+
* @param {*} message
|
33
|
+
*/
|
34
|
+
constructor(message) {
|
35
|
+
super(message);
|
36
|
+
this.name = "MistralAPIError";
|
37
|
+
}
|
38
|
+
};
|
39
|
+
MistralClient = class {
|
40
|
+
/**
|
41
|
+
* A simple and lightweight client for the Mistral API
|
42
|
+
* @param {*} apiKey can be set as an environment variable MISTRAL_API_KEY,
|
43
|
+
* or provided in this parameter
|
44
|
+
* @param {*} endpoint defaults to https://api.mistral.ai
|
45
|
+
* @param {*} maxRetries defaults to 5
|
46
|
+
* @param {*} timeout defaults to 120 seconds
|
47
|
+
*/
|
48
|
+
constructor(apiKey = process.env.MISTRAL_API_KEY, endpoint = ENDPOINT, maxRetries = 5, timeout = 120) {
|
49
|
+
/**
|
50
|
+
*
|
51
|
+
* @param {*} method
|
52
|
+
* @param {*} path
|
53
|
+
* @param {*} request
|
54
|
+
* @return {Promise<*>}
|
55
|
+
*/
|
56
|
+
__publicField(this, "_request", async function(method, path, request) {
|
57
|
+
const url = `${this.endpoint}/${path}`;
|
58
|
+
const options = {
|
59
|
+
method,
|
60
|
+
headers: {
|
61
|
+
"User-Agent": `mistral-client-js/${VERSION}`,
|
62
|
+
"Accept": (request == null ? void 0 : request.stream) ? "text/event-stream" : "application/json",
|
63
|
+
"Content-Type": "application/json",
|
64
|
+
"Authorization": `Bearer ${this.apiKey}`
|
65
|
+
},
|
66
|
+
body: method !== "get" ? JSON.stringify(request) : null,
|
67
|
+
timeout: this.timeout * 1e3
|
68
|
+
};
|
69
|
+
for (let attempts = 0; attempts < this.maxRetries; attempts++) {
|
70
|
+
try {
|
71
|
+
const response = await fetch(url, options);
|
72
|
+
if (response.ok) {
|
73
|
+
if (request == null ? void 0 : request.stream) {
|
74
|
+
if (isNode) {
|
75
|
+
return response.body;
|
76
|
+
} else {
|
77
|
+
const reader = response.body.getReader();
|
78
|
+
const asyncIterator = async function* () {
|
79
|
+
try {
|
80
|
+
while (true) {
|
81
|
+
const { done, value } = await reader.read();
|
82
|
+
if (done)
|
83
|
+
return;
|
84
|
+
yield value;
|
85
|
+
}
|
86
|
+
} finally {
|
87
|
+
reader.releaseLock();
|
88
|
+
}
|
89
|
+
};
|
90
|
+
return asyncIterator();
|
91
|
+
}
|
92
|
+
}
|
93
|
+
return await response.json();
|
94
|
+
} else if (RETRY_STATUS_CODES.includes(response.status)) {
|
95
|
+
console.debug(
|
96
|
+
`Retrying request on response status: ${response.status}`,
|
97
|
+
`Response: ${await response.text()}`,
|
98
|
+
`Attempt: ${attempts + 1}`
|
99
|
+
);
|
100
|
+
await new Promise(
|
101
|
+
(resolve) => setTimeout(resolve, Math.pow(2, attempts + 1) * 500)
|
102
|
+
);
|
103
|
+
} else {
|
104
|
+
throw new MistralAPIError(
|
105
|
+
`HTTP error! status: ${response.status} Response:
|
106
|
+
${await response.text()}`
|
107
|
+
);
|
108
|
+
}
|
109
|
+
} catch (error) {
|
110
|
+
console.error(`Request failed: ${error.message}`);
|
111
|
+
if (error.name === "MistralAPIError") {
|
112
|
+
throw error;
|
113
|
+
}
|
114
|
+
if (attempts === this.maxRetries - 1)
|
115
|
+
throw error;
|
116
|
+
await new Promise(
|
117
|
+
(resolve) => setTimeout(resolve, Math.pow(2, attempts + 1) * 500)
|
118
|
+
);
|
119
|
+
}
|
120
|
+
}
|
121
|
+
throw new Error("Max retries reached");
|
122
|
+
});
|
123
|
+
/**
|
124
|
+
* Creates a chat completion request
|
125
|
+
* @param {*} model
|
126
|
+
* @param {*} messages
|
127
|
+
* @param {*} tools
|
128
|
+
* @param {*} temperature
|
129
|
+
* @param {*} maxTokens
|
130
|
+
* @param {*} topP
|
131
|
+
* @param {*} randomSeed
|
132
|
+
* @param {*} stream
|
133
|
+
* @param {*} safeMode deprecated use safePrompt instead
|
134
|
+
* @param {*} safePrompt
|
135
|
+
* @param {*} toolChoice
|
136
|
+
* @param {*} responseFormat
|
137
|
+
* @return {Promise<Object>}
|
138
|
+
*/
|
139
|
+
__publicField(this, "_makeChatCompletionRequest", function(model, messages, tools, temperature, maxTokens, topP, randomSeed, stream, safeMode, safePrompt, toolChoice, responseFormat) {
|
140
|
+
var _a;
|
141
|
+
if (!model && !this.modelDefault) {
|
142
|
+
throw new MistralAPIError(
|
143
|
+
"You must provide a model name"
|
144
|
+
);
|
145
|
+
}
|
146
|
+
return {
|
147
|
+
model: model != null ? model : this.modelDefault,
|
148
|
+
messages,
|
149
|
+
tools: tools != null ? tools : void 0,
|
150
|
+
temperature: temperature != null ? temperature : void 0,
|
151
|
+
max_tokens: maxTokens != null ? maxTokens : void 0,
|
152
|
+
top_p: topP != null ? topP : void 0,
|
153
|
+
random_seed: randomSeed != null ? randomSeed : void 0,
|
154
|
+
stream: stream != null ? stream : void 0,
|
155
|
+
safe_prompt: (_a = safeMode || safePrompt) != null ? _a : void 0,
|
156
|
+
tool_choice: toolChoice != null ? toolChoice : void 0,
|
157
|
+
response_format: responseFormat != null ? responseFormat : void 0
|
158
|
+
};
|
159
|
+
});
|
160
|
+
/**
|
161
|
+
* Returns a list of the available models
|
162
|
+
* @return {Promise<Object>}
|
163
|
+
*/
|
164
|
+
__publicField(this, "listModels", async function() {
|
165
|
+
const response = await this._request("get", "v1/models");
|
166
|
+
return response;
|
167
|
+
});
|
168
|
+
/**
|
169
|
+
* A chat endpoint without streaming
|
170
|
+
* @param {*} model the name of the model to chat with, e.g. mistral-tiny
|
171
|
+
* @param {*} messages an array of messages to chat with, e.g.
|
172
|
+
* [{role: 'user', content: 'What is the best French cheese?'}]
|
173
|
+
* @param {*} tools a list of tools to use.
|
174
|
+
* @param {*} temperature the temperature to use for sampling, e.g. 0.5
|
175
|
+
* @param {*} maxTokens the maximum number of tokens to generate, e.g. 100
|
176
|
+
* @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9
|
177
|
+
* @param {*} randomSeed the random seed to use for sampling, e.g. 42
|
178
|
+
* @param {*} safeMode deprecated use safePrompt instead
|
179
|
+
* @param {*} safePrompt whether to use safe mode, e.g. true
|
180
|
+
* @param {*} toolChoice the tool to use, e.g. 'auto'
|
181
|
+
* @param {*} responseFormat the format of the response, e.g. 'json_format'
|
182
|
+
* @return {Promise<Object>}
|
183
|
+
*/
|
184
|
+
__publicField(this, "chat", async function({
|
185
|
+
model,
|
186
|
+
messages,
|
187
|
+
tools,
|
188
|
+
temperature,
|
189
|
+
maxTokens,
|
190
|
+
topP,
|
191
|
+
randomSeed,
|
192
|
+
safeMode,
|
193
|
+
safePrompt,
|
194
|
+
toolChoice,
|
195
|
+
responseFormat
|
196
|
+
}) {
|
197
|
+
const request = this._makeChatCompletionRequest(
|
198
|
+
model,
|
199
|
+
messages,
|
200
|
+
tools,
|
201
|
+
temperature,
|
202
|
+
maxTokens,
|
203
|
+
topP,
|
204
|
+
randomSeed,
|
205
|
+
false,
|
206
|
+
safeMode,
|
207
|
+
safePrompt,
|
208
|
+
toolChoice,
|
209
|
+
responseFormat
|
210
|
+
);
|
211
|
+
const response = await this._request(
|
212
|
+
"post",
|
213
|
+
"v1/chat/completions",
|
214
|
+
request
|
215
|
+
);
|
216
|
+
return response;
|
217
|
+
});
|
218
|
+
/**
|
219
|
+
* A chat endpoint that streams responses.
|
220
|
+
* @param {*} model the name of the model to chat with, e.g. mistral-tiny
|
221
|
+
* @param {*} messages an array of messages to chat with, e.g.
|
222
|
+
* [{role: 'user', content: 'What is the best French cheese?'}]
|
223
|
+
* @param {*} tools a list of tools to use.
|
224
|
+
* @param {*} temperature the temperature to use for sampling, e.g. 0.5
|
225
|
+
* @param {*} maxTokens the maximum number of tokens to generate, e.g. 100
|
226
|
+
* @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9
|
227
|
+
* @param {*} randomSeed the random seed to use for sampling, e.g. 42
|
228
|
+
* @param {*} safeMode deprecated use safePrompt instead
|
229
|
+
* @param {*} safePrompt whether to use safe mode, e.g. true
|
230
|
+
* @param {*} toolChoice the tool to use, e.g. 'auto'
|
231
|
+
* @param {*} responseFormat the format of the response, e.g. 'json_format'
|
232
|
+
* @return {Promise<Object>}
|
233
|
+
*/
|
234
|
+
__publicField(this, "chatStream", async function* ({
|
235
|
+
model,
|
236
|
+
messages,
|
237
|
+
tools,
|
238
|
+
temperature,
|
239
|
+
maxTokens,
|
240
|
+
topP,
|
241
|
+
randomSeed,
|
242
|
+
safeMode,
|
243
|
+
safePrompt,
|
244
|
+
toolChoice,
|
245
|
+
responseFormat
|
246
|
+
}) {
|
247
|
+
const request = this._makeChatCompletionRequest(
|
248
|
+
model,
|
249
|
+
messages,
|
250
|
+
tools,
|
251
|
+
temperature,
|
252
|
+
maxTokens,
|
253
|
+
topP,
|
254
|
+
randomSeed,
|
255
|
+
true,
|
256
|
+
safeMode,
|
257
|
+
safePrompt,
|
258
|
+
toolChoice,
|
259
|
+
responseFormat
|
260
|
+
);
|
261
|
+
const response = await this._request(
|
262
|
+
"post",
|
263
|
+
"v1/chat/completions",
|
264
|
+
request
|
265
|
+
);
|
266
|
+
let buffer = "";
|
267
|
+
const decoder = new TextDecoder();
|
268
|
+
for await (const chunk of response) {
|
269
|
+
buffer += decoder.decode(chunk, { stream: true });
|
270
|
+
let firstNewline;
|
271
|
+
while ((firstNewline = buffer.indexOf("\n")) !== -1) {
|
272
|
+
const chunkLine = buffer.substring(0, firstNewline);
|
273
|
+
buffer = buffer.substring(firstNewline + 1);
|
274
|
+
if (chunkLine.startsWith("data:")) {
|
275
|
+
const json = chunkLine.substring(6).trim();
|
276
|
+
if (json !== "[DONE]") {
|
277
|
+
yield JSON.parse(json);
|
278
|
+
}
|
279
|
+
}
|
280
|
+
}
|
281
|
+
}
|
282
|
+
});
|
283
|
+
/**
|
284
|
+
* An embeddings endpoint that returns embeddings for a single,
|
285
|
+
* or batch of inputs
|
286
|
+
* @param {*} model The embedding model to use, e.g. mistral-embed
|
287
|
+
* @param {*} input The input to embed,
|
288
|
+
* e.g. ['What is the best French cheese?']
|
289
|
+
* @return {Promise<Object>}
|
290
|
+
*/
|
291
|
+
__publicField(this, "embeddings", async function({ model, input }) {
|
292
|
+
const request = {
|
293
|
+
model,
|
294
|
+
input
|
295
|
+
};
|
296
|
+
const response = await this._request("post", "v1/embeddings", request);
|
297
|
+
return response;
|
298
|
+
});
|
299
|
+
this.endpoint = endpoint;
|
300
|
+
this.apiKey = apiKey;
|
301
|
+
this.maxRetries = maxRetries;
|
302
|
+
this.timeout = timeout;
|
303
|
+
if (this.endpoint.indexOf("inference.azure.com")) {
|
304
|
+
this.modelDefault = "mistral";
|
305
|
+
}
|
306
|
+
}
|
307
|
+
};
|
308
|
+
client_default = MistralClient;
|
309
|
+
}
|
310
|
+
});
|
311
|
+
|
312
|
+
export {
|
313
|
+
client_default,
|
314
|
+
client_exports,
|
315
|
+
init_client
|
316
|
+
};
|
317
|
+
//# sourceMappingURL=chunk-VYIXVZ6L.mjs.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"sources":["../../../../node_modules/.pnpm/@mistralai+mistralai@0.1.3/node_modules/@mistralai/mistralai/src/client.js"],"sourcesContent":["let isNode = false;\n\nconst VERSION = '0.0.3';\nconst RETRY_STATUS_CODES = [429, 500, 502, 503, 504];\nconst ENDPOINT = 'https://api.mistral.ai';\n\n/**\n * Initialize fetch\n * @return {Promise<void>}\n */\nasync function initializeFetch() {\n if (typeof window === 'undefined' ||\n typeof globalThis.fetch === 'undefined') {\n const nodeFetch = await import('node-fetch');\n fetch = nodeFetch.default;\n isNode = true;\n } else {\n fetch = globalThis.fetch;\n }\n}\n\ninitializeFetch();\n\n/**\n * MistralAPIError\n * @return {MistralAPIError}\n * @extends {Error}\n */\nclass MistralAPIError extends Error {\n /**\n * A simple error class for Mistral API errors\n * @param {*} message\n */\n constructor(message) {\n super(message);\n this.name = 'MistralAPIError';\n }\n};\n\n/**\n * MistralClient\n * @return {MistralClient}\n */\nclass MistralClient {\n /**\n * A simple and lightweight client for the Mistral API\n * @param {*} apiKey can be set as an environment variable MISTRAL_API_KEY,\n * or provided in this parameter\n * @param {*} endpoint defaults to https://api.mistral.ai\n * @param {*} maxRetries defaults to 5\n * @param {*} timeout defaults to 120 seconds\n */\n constructor(\n apiKey = process.env.MISTRAL_API_KEY,\n endpoint = ENDPOINT,\n maxRetries = 5,\n timeout = 120,\n ) {\n this.endpoint = endpoint;\n this.apiKey = apiKey;\n\n this.maxRetries = maxRetries;\n this.timeout = timeout;\n\n if (this.endpoint.indexOf('inference.azure.com')) {\n this.modelDefault = 'mistral';\n }\n }\n\n /**\n *\n * @param {*} method\n * @param {*} path\n * @param {*} request\n * @return {Promise<*>}\n */\n _request = async function(method, path, request) {\n const url = `${this.endpoint}/${path}`;\n const options = {\n method: method,\n headers: {\n 'User-Agent': `mistral-client-js/${VERSION}`,\n 'Accept': request?.stream ? 'text/event-stream' : 'application/json',\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this.apiKey}`,\n },\n body: method !== 'get' ? JSON.stringify(request) : null,\n timeout: this.timeout * 1000,\n };\n\n for (let attempts = 0; attempts < this.maxRetries; attempts++) {\n try {\n const response = await fetch(url, options);\n\n if (response.ok) {\n if (request?.stream) {\n if (isNode) {\n return response.body;\n } else {\n const reader = response.body.getReader();\n // Chrome does not support async iterators yet, so polyfill it\n const asyncIterator = async function* () {\n try {\n while (true) {\n // Read from the stream\n const {done, value} = await reader.read();\n // Exit if we're done\n if (done) return;\n // Else yield the chunk\n yield value;\n }\n } finally {\n reader.releaseLock();\n }\n };\n\n return asyncIterator();\n }\n }\n return await response.json();\n } else if (RETRY_STATUS_CODES.includes(response.status)) {\n console.debug(\n `Retrying request on response status: ${response.status}`,\n `Response: ${await response.text()}`,\n `Attempt: ${attempts + 1}`,\n );\n // eslint-disable-next-line max-len\n await new Promise((resolve) =>\n setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500),\n );\n } else {\n throw new MistralAPIError(\n `HTTP error! status: ${response.status} ` +\n `Response: \\n${await response.text()}`,\n );\n }\n } catch (error) {\n console.error(`Request failed: ${error.message}`);\n if (error.name === 'MistralAPIError') {\n throw error;\n }\n if (attempts === this.maxRetries - 1) throw error;\n // eslint-disable-next-line max-len\n await new Promise((resolve) =>\n setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500),\n );\n }\n }\n throw new Error('Max retries reached');\n };\n\n /**\n * Creates a chat completion request\n * @param {*} model\n * @param {*} messages\n * @param {*} tools\n * @param {*} temperature\n * @param {*} maxTokens\n * @param {*} topP\n * @param {*} randomSeed\n * @param {*} stream\n * @param {*} safeMode deprecated use safePrompt instead\n * @param {*} safePrompt\n * @param {*} toolChoice\n * @param {*} responseFormat\n * @return {Promise<Object>}\n */\n _makeChatCompletionRequest = function(\n model,\n messages,\n tools,\n temperature,\n maxTokens,\n topP,\n randomSeed,\n stream,\n safeMode,\n safePrompt,\n toolChoice,\n responseFormat,\n ) {\n // if modelDefault and model are undefined, throw an error\n if (!model && !this.modelDefault) {\n throw new MistralAPIError(\n 'You must provide a model name',\n );\n }\n return {\n model: model ?? this.modelDefault,\n messages: messages,\n tools: tools ?? undefined,\n temperature: temperature ?? undefined,\n max_tokens: maxTokens ?? undefined,\n top_p: topP ?? undefined,\n random_seed: randomSeed ?? undefined,\n stream: stream ?? undefined,\n safe_prompt: (safeMode || safePrompt) ?? undefined,\n tool_choice: toolChoice ?? undefined,\n response_format: responseFormat ?? undefined,\n };\n };\n\n /**\n * Returns a list of the available models\n * @return {Promise<Object>}\n */\n listModels = async function() {\n const response = await this._request('get', 'v1/models');\n return response;\n };\n\n /**\n * A chat endpoint without streaming\n * @param {*} model the name of the model to chat with, e.g. mistral-tiny\n * @param {*} messages an array of messages to chat with, e.g.\n * [{role: 'user', content: 'What is the best French cheese?'}]\n * @param {*} tools a list of tools to use.\n * @param {*} temperature the temperature to use for sampling, e.g. 0.5\n * @param {*} maxTokens the maximum number of tokens to generate, e.g. 100\n * @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9\n * @param {*} randomSeed the random seed to use for sampling, e.g. 42\n * @param {*} safeMode deprecated use safePrompt instead\n * @param {*} safePrompt whether to use safe mode, e.g. true\n * @param {*} toolChoice the tool to use, e.g. 'auto'\n * @param {*} responseFormat the format of the response, e.g. 'json_format'\n * @return {Promise<Object>}\n */\n chat = async function({\n model,\n messages,\n tools,\n temperature,\n maxTokens,\n topP,\n randomSeed,\n safeMode,\n safePrompt,\n toolChoice,\n responseFormat,\n }) {\n const request = this._makeChatCompletionRequest(\n model,\n messages,\n tools,\n temperature,\n maxTokens,\n topP,\n randomSeed,\n false,\n safeMode,\n safePrompt,\n toolChoice,\n responseFormat,\n );\n const response = await this._request(\n 'post',\n 'v1/chat/completions',\n request,\n );\n return response;\n };\n\n /**\n * A chat endpoint that streams responses.\n * @param {*} model the name of the model to chat with, e.g. mistral-tiny\n * @param {*} messages an array of messages to chat with, e.g.\n * [{role: 'user', content: 'What is the best French cheese?'}]\n * @param {*} tools a list of tools to use.\n * @param {*} temperature the temperature to use for sampling, e.g. 0.5\n * @param {*} maxTokens the maximum number of tokens to generate, e.g. 100\n * @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9\n * @param {*} randomSeed the random seed to use for sampling, e.g. 42\n * @param {*} safeMode deprecated use safePrompt instead\n * @param {*} safePrompt whether to use safe mode, e.g. true\n * @param {*} toolChoice the tool to use, e.g. 'auto'\n * @param {*} responseFormat the format of the response, e.g. 'json_format'\n * @return {Promise<Object>}\n */\n chatStream = async function* ({\n model,\n messages,\n tools,\n temperature,\n maxTokens,\n topP,\n randomSeed,\n safeMode,\n safePrompt,\n toolChoice,\n responseFormat,\n }) {\n const request = this._makeChatCompletionRequest(\n model,\n messages,\n tools,\n temperature,\n maxTokens,\n topP,\n randomSeed,\n true,\n safeMode,\n safePrompt,\n toolChoice,\n responseFormat,\n );\n const response = await this._request(\n 'post',\n 'v1/chat/completions',\n request,\n );\n\n let buffer = '';\n const decoder = new TextDecoder();\n for await (const chunk of response) {\n buffer += decoder.decode(chunk, {stream: true});\n let firstNewline;\n while ((firstNewline = buffer.indexOf('\\n')) !== -1) {\n const chunkLine = buffer.substring(0, firstNewline);\n buffer = buffer.substring(firstNewline + 1);\n if (chunkLine.startsWith('data:')) {\n const json = chunkLine.substring(6).trim();\n if (json !== '[DONE]') {\n yield JSON.parse(json);\n }\n }\n }\n }\n };\n\n /**\n * An embeddings endpoint that returns embeddings for a single,\n * or batch of inputs\n * @param {*} model The embedding model to use, e.g. mistral-embed\n * @param {*} input The input to embed,\n * e.g. ['What is the best French cheese?']\n * @return {Promise<Object>}\n */\n embeddings = async function({model, input}) {\n const request = {\n model: model,\n input: input,\n };\n const response = await this._request('post', 'v1/embeddings', request);\n return response;\n };\n}\n\nexport default MistralClient;\n"],"mappings":";;;;;;;AAAA;AAAA;AAAA;AAAA;AAUA,eAAe,kBAAkB;AAC/B,MAAI,OAAO,WAAW,eACpB,OAAO,WAAW,UAAU,aAAa;AACzC,UAAM,YAAY,MAAM,OAAO,oBAAY;AAC3C,YAAQ,UAAU;AAClB,aAAS;AAAA,EACX,OAAO;AACL,YAAQ,WAAW;AAAA,EACrB;AACF;AAnBA,IAAI,QAEE,SACA,oBACA,UAwBA,iBAeA,eAgTC;AA3VP;AAAA;AAAA,IAAI,SAAS;AAEb,IAAM,UAAU;AAChB,IAAM,qBAAqB,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG;AACnD,IAAM,WAAW;AAiBjB,oBAAgB;AAOhB,IAAM,kBAAN,cAA8B,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,MAKlC,YAAY,SAAS;AACnB,cAAM,OAAO;AACb,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAMA,IAAM,gBAAN,MAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASlB,YACE,SAAS,QAAQ,IAAI,iBACrB,WAAW,UACX,aAAa,GACb,UAAU,KACV;AAmBF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wCAAW,eAAe,QAAQ,MAAM,SAAS;AAC/C,gBAAM,MAAM,GAAG,KAAK,QAAQ,IAAI,IAAI;AACpC,gBAAM,UAAU;AAAA,YACd;AAAA,YACA,SAAS;AAAA,cACP,cAAc,qBAAqB,OAAO;AAAA,cAC1C,WAAU,mCAAS,UAAS,sBAAsB;AAAA,cAClD,gBAAgB;AAAA,cAChB,iBAAiB,UAAU,KAAK,MAAM;AAAA,YACxC;AAAA,YACA,MAAM,WAAW,QAAQ,KAAK,UAAU,OAAO,IAAI;AAAA,YACnD,SAAS,KAAK,UAAU;AAAA,UAC1B;AAEA,mBAAS,WAAW,GAAG,WAAW,KAAK,YAAY,YAAY;AAC7D,gBAAI;AACF,oBAAM,WAAW,MAAM,MAAM,KAAK,OAAO;AAEzC,kBAAI,SAAS,IAAI;AACf,oBAAI,mCAAS,QAAQ;AACnB,sBAAI,QAAQ;AACV,2BAAO,SAAS;AAAA,kBAClB,OAAO;AACL,0BAAM,SAAS,SAAS,KAAK,UAAU;AAEvC,0BAAM,gBAAgB,mBAAmB;AACvC,0BAAI;AACF,+BAAO,MAAM;AAEX,gCAAM,EAAC,MAAM,MAAK,IAAI,MAAM,OAAO,KAAK;AAExC,8BAAI;AAAM;AAEV,gCAAM;AAAA,wBACR;AAAA,sBACF,UAAE;AACA,+BAAO,YAAY;AAAA,sBACrB;AAAA,oBACF;AAEA,2BAAO,cAAc;AAAA,kBACvB;AAAA,gBACF;AACA,uBAAO,MAAM,SAAS,KAAK;AAAA,cAC7B,WAAW,mBAAmB,SAAS,SAAS,MAAM,GAAG;AACvD,wBAAQ;AAAA,kBACN,wCAAwC,SAAS,MAAM;AAAA,kBACvD,aAAa,MAAM,SAAS,KAAK,CAAC;AAAA,kBAClC,YAAY,WAAW,CAAC;AAAA,gBAC1B;AAEA,sBAAM,IAAI;AAAA,kBAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,IAAI,GAAI,WAAW,CAAE,IAAI,GAAG;AAAA,gBACvD;AAAA,cACF,OAAO;AACL,sBAAM,IAAI;AAAA,kBACR,uBAAuB,SAAS,MAAM;AAAA,EACvB,MAAM,SAAS,KAAK,CAAC;AAAA,gBACtC;AAAA,cACF;AAAA,YACF,SAAS,OAAO;AACd,sBAAQ,MAAM,mBAAmB,MAAM,OAAO,EAAE;AAChD,kBAAI,MAAM,SAAS,mBAAmB;AACpC,sBAAM;AAAA,cACR;AACA,kBAAI,aAAa,KAAK,aAAa;AAAG,sBAAM;AAE5C,oBAAM,IAAI;AAAA,gBAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,IAAI,GAAI,WAAW,CAAE,IAAI,GAAG;AAAA,cACvD;AAAA,YACF;AAAA,UACF;AACA,gBAAM,IAAI,MAAM,qBAAqB;AAAA,QACvC;AAkBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAA6B,SAC3B,OACA,UACA,OACA,aACA,WACA,MACA,YACA,QACA,UACA,YACA,YACA,gBACA;AApLJ;AAsLI,cAAI,CAAC,SAAS,CAAC,KAAK,cAAc;AAChC,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AACA,iBAAO;AAAA,YACL,OAAO,wBAAS,KAAK;AAAA,YACrB;AAAA,YACA,OAAO,wBAAS;AAAA,YAChB,aAAa,oCAAe;AAAA,YAC5B,YAAY,gCAAa;AAAA,YACzB,OAAO,sBAAQ;AAAA,YACf,aAAa,kCAAc;AAAA,YAC3B,QAAQ,0BAAU;AAAA,YAClB,cAAc,iBAAY,eAAZ,YAA2B;AAAA,YACzC,aAAa,kCAAc;AAAA,YAC3B,iBAAiB,0CAAkB;AAAA,UACrC;AAAA,QACF;AAMA;AAAA;AAAA;AAAA;AAAA,0CAAa,iBAAiB;AAC5B,gBAAM,WAAW,MAAM,KAAK,SAAS,OAAO,WAAW;AACvD,iBAAO;AAAA,QACT;AAkBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAO,eAAe;AAAA,UACpB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,GAAG;AACD,gBAAM,UAAU,KAAK;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,gBAAM,WAAW,MAAM,KAAK;AAAA,YAC1B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,iBAAO;AAAA,QACT;AAkBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0CAAa,iBAAiB;AAAA,UAC5B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,GAAG;AACD,gBAAM,UAAU,KAAK;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,gBAAM,WAAW,MAAM,KAAK;AAAA,YAC1B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,cAAI,SAAS;AACb,gBAAM,UAAU,IAAI,YAAY;AAChC,2BAAiB,SAAS,UAAU;AAClC,sBAAU,QAAQ,OAAO,OAAO,EAAC,QAAQ,KAAI,CAAC;AAC9C,gBAAI;AACJ,oBAAQ,eAAe,OAAO,QAAQ,IAAI,OAAO,IAAI;AACnD,oBAAM,YAAY,OAAO,UAAU,GAAG,YAAY;AAClD,uBAAS,OAAO,UAAU,eAAe,CAAC;AAC1C,kBAAI,UAAU,WAAW,OAAO,GAAG;AACjC,sBAAM,OAAO,UAAU,UAAU,CAAC,EAAE,KAAK;AACzC,oBAAI,SAAS,UAAU;AACrB,wBAAM,KAAK,MAAM,IAAI;AAAA,gBACvB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAUA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0CAAa,eAAe,EAAC,OAAO,MAAK,GAAG;AAC1C,gBAAM,UAAU;AAAA,YACd;AAAA,YACA;AAAA,UACF;AACA,gBAAM,WAAW,MAAM,KAAK,SAAS,QAAQ,iBAAiB,OAAO;AACrE,iBAAO;AAAA,QACT;AA9RE,aAAK,WAAW;AAChB,aAAK,SAAS;AAEd,aAAK,aAAa;AAClB,aAAK,UAAU;AAEf,YAAI,KAAK,SAAS,QAAQ,qBAAqB,GAAG;AAChD,eAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAAA,IAsRF;AAEA,IAAO,iBAAQ;AAAA;AAAA;","names":[]}
|