@cencori/ai-sdk 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -6
- package/dist/index.js +75 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +75 -7
- package/dist/index.mjs.map +1 -1
- package/dist/tanstack/index.js +31 -2
- package/dist/tanstack/index.js.map +1 -1
- package/dist/tanstack/index.mjs +31 -2
- package/dist/tanstack/index.mjs.map +1 -1
- package/dist/vercel/index.d.mts +8 -0
- package/dist/vercel/index.d.ts +8 -0
- package/dist/vercel/index.js +75 -7
- package/dist/vercel/index.js.map +1 -1
- package/dist/vercel/index.mjs +75 -7
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -24,17 +24,70 @@ for await (const chunk of result.textStream) {
|
|
|
24
24
|
}
|
|
25
25
|
```
|
|
26
26
|
|
|
27
|
-
|
|
27
|
+
### Tool Calling (Vercel AI SDK)
|
|
28
|
+
|
|
29
|
+
```typescript
|
|
30
|
+
import { cencori } from '@cencori/ai-sdk/vercel';
|
|
31
|
+
import { generateText, tool } from 'ai';
|
|
32
|
+
import { z } from 'zod';
|
|
33
|
+
|
|
34
|
+
const result = await generateText({
|
|
35
|
+
model: cencori('gpt-4o'),
|
|
36
|
+
prompt: 'What is the weather in San Francisco?',
|
|
37
|
+
tools: {
|
|
38
|
+
getWeather: tool({
|
|
39
|
+
description: 'Get the current weather for a location',
|
|
40
|
+
parameters: z.object({
|
|
41
|
+
location: z.string().describe('The city name'),
|
|
42
|
+
}),
|
|
43
|
+
execute: async ({ location }) => {
|
|
44
|
+
return { temperature: 72, condition: 'sunny' };
|
|
45
|
+
},
|
|
46
|
+
}),
|
|
47
|
+
},
|
|
48
|
+
});
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## TanStack AI Integration
|
|
28
52
|
|
|
29
53
|
```typescript
|
|
30
54
|
import { cencori } from '@cencori/ai-sdk/tanstack';
|
|
31
|
-
import { chat } from '@tanstack/ai';
|
|
32
55
|
|
|
33
|
-
const
|
|
34
|
-
|
|
35
|
-
|
|
56
|
+
const adapter = cencori('gpt-4o');
|
|
57
|
+
|
|
58
|
+
for await (const chunk of adapter.chatStream({
|
|
36
59
|
messages: [{ role: 'user', content: 'Hello!' }]
|
|
37
|
-
})
|
|
60
|
+
})) {
|
|
61
|
+
if (chunk.type === 'content') {
|
|
62
|
+
process.stdout.write(chunk.delta);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
### Tool Calling (TanStack AI)
|
|
68
|
+
|
|
69
|
+
```typescript
|
|
70
|
+
import { cencori } from '@cencori/ai-sdk/tanstack';
|
|
71
|
+
|
|
72
|
+
const adapter = cencori('gpt-4o');
|
|
73
|
+
|
|
74
|
+
for await (const chunk of adapter.chatStream({
|
|
75
|
+
messages: [{ role: 'user', content: 'Get weather for NYC' }],
|
|
76
|
+
tools: {
|
|
77
|
+
getWeather: {
|
|
78
|
+
name: 'getWeather',
|
|
79
|
+
description: 'Get weather for a location',
|
|
80
|
+
inputSchema: {
|
|
81
|
+
type: 'object',
|
|
82
|
+
properties: { location: { type: 'string' } }
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
},
|
|
86
|
+
})) {
|
|
87
|
+
if (chunk.type === 'tool_call') {
|
|
88
|
+
console.log('Tool call:', chunk.toolCall);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
38
91
|
```
|
|
39
92
|
|
|
40
93
|
## Configuration
|
|
@@ -74,6 +127,7 @@ const cencori = createCencori({
|
|
|
74
127
|
- 📊 **Observability** — Request logs, latency metrics, cost tracking
|
|
75
128
|
- 💰 **Cost Control** — Budgets, alerts, per-route analytics
|
|
76
129
|
- 🔌 **Multi-Provider** — One API key for all AI providers
|
|
130
|
+
- 🛠️ **Tool Calling** — Full support for function calling across providers
|
|
77
131
|
|
|
78
132
|
## License
|
|
79
133
|
|
package/dist/index.js
CHANGED
|
@@ -110,8 +110,45 @@ var CencoriChatLanguageModel = class {
|
|
|
110
110
|
}
|
|
111
111
|
};
|
|
112
112
|
}
|
|
113
|
+
/**
|
|
114
|
+
* Convert Vercel AI SDK tools to Cencori format
|
|
115
|
+
*/
|
|
116
|
+
convertTools(options) {
|
|
117
|
+
if (!options.tools || options.tools.length === 0) {
|
|
118
|
+
return void 0;
|
|
119
|
+
}
|
|
120
|
+
return options.tools.filter((t) => t.type === "function").map((t) => ({
|
|
121
|
+
type: "function",
|
|
122
|
+
function: {
|
|
123
|
+
name: t.name,
|
|
124
|
+
description: t.description || "",
|
|
125
|
+
parameters: t.inputSchema
|
|
126
|
+
}
|
|
127
|
+
}));
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Convert Vercel AI SDK tool choice to Cencori format
|
|
131
|
+
*/
|
|
132
|
+
convertToolChoice(options) {
|
|
133
|
+
const tc = options.toolChoice;
|
|
134
|
+
if (!tc) return void 0;
|
|
135
|
+
switch (tc.type) {
|
|
136
|
+
case "auto":
|
|
137
|
+
return "auto";
|
|
138
|
+
case "none":
|
|
139
|
+
return "none";
|
|
140
|
+
case "required":
|
|
141
|
+
return "required";
|
|
142
|
+
case "tool":
|
|
143
|
+
return { type: "function", function: { name: tc.toolName } };
|
|
144
|
+
default:
|
|
145
|
+
return void 0;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
113
148
|
async doGenerate(options) {
|
|
114
149
|
const messages = this.convertMessages(options);
|
|
150
|
+
const tools = this.convertTools(options);
|
|
151
|
+
const toolChoice = this.convertToolChoice(options);
|
|
115
152
|
const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {
|
|
116
153
|
method: "POST",
|
|
117
154
|
headers: this.getHeaders(),
|
|
@@ -121,7 +158,9 @@ var CencoriChatLanguageModel = class {
|
|
|
121
158
|
temperature: options.temperature,
|
|
122
159
|
maxTokens: options.maxOutputTokens,
|
|
123
160
|
stream: false,
|
|
124
|
-
userId: this.settings.userId
|
|
161
|
+
userId: this.settings.userId,
|
|
162
|
+
tools,
|
|
163
|
+
toolChoice
|
|
125
164
|
}),
|
|
126
165
|
signal: options.abortSignal
|
|
127
166
|
});
|
|
@@ -130,11 +169,25 @@ var CencoriChatLanguageModel = class {
|
|
|
130
169
|
throw new Error(`Cencori API error: ${error.error || response.statusText}`);
|
|
131
170
|
}
|
|
132
171
|
const data = await response.json();
|
|
133
|
-
const content = [
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
172
|
+
const content = [];
|
|
173
|
+
if (data.content) {
|
|
174
|
+
content.push({
|
|
175
|
+
type: "text",
|
|
176
|
+
text: data.content,
|
|
177
|
+
providerMetadata: void 0
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
if (data.tool_calls && data.tool_calls.length > 0) {
|
|
181
|
+
for (const tc of data.tool_calls) {
|
|
182
|
+
content.push({
|
|
183
|
+
type: "tool-call",
|
|
184
|
+
toolCallId: tc.id,
|
|
185
|
+
toolName: tc.function.name,
|
|
186
|
+
input: tc.function.arguments,
|
|
187
|
+
providerMetadata: void 0
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
}
|
|
138
191
|
const warnings = [];
|
|
139
192
|
return {
|
|
140
193
|
content,
|
|
@@ -145,6 +198,8 @@ var CencoriChatLanguageModel = class {
|
|
|
145
198
|
}
|
|
146
199
|
async doStream(options) {
|
|
147
200
|
const messages = this.convertMessages(options);
|
|
201
|
+
const tools = this.convertTools(options);
|
|
202
|
+
const toolChoice = this.convertToolChoice(options);
|
|
148
203
|
const self = this;
|
|
149
204
|
const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {
|
|
150
205
|
method: "POST",
|
|
@@ -155,7 +210,9 @@ var CencoriChatLanguageModel = class {
|
|
|
155
210
|
temperature: options.temperature,
|
|
156
211
|
maxTokens: options.maxOutputTokens,
|
|
157
212
|
stream: true,
|
|
158
|
-
userId: this.settings.userId
|
|
213
|
+
userId: this.settings.userId,
|
|
214
|
+
tools,
|
|
215
|
+
toolChoice
|
|
159
216
|
}),
|
|
160
217
|
signal: options.abortSignal
|
|
161
218
|
});
|
|
@@ -231,6 +288,17 @@ var CencoriChatLanguageModel = class {
|
|
|
231
288
|
delta: chunk.delta
|
|
232
289
|
});
|
|
233
290
|
}
|
|
291
|
+
if (chunk.tool_calls && chunk.tool_calls.length > 0) {
|
|
292
|
+
for (const tc of chunk.tool_calls) {
|
|
293
|
+
controller.enqueue({
|
|
294
|
+
type: "tool-call",
|
|
295
|
+
toolCallId: tc.id,
|
|
296
|
+
toolName: tc.function.name,
|
|
297
|
+
input: tc.function.arguments,
|
|
298
|
+
providerMetadata: void 0
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
}
|
|
234
302
|
if (chunk.finish_reason) {
|
|
235
303
|
if (started) {
|
|
236
304
|
controller.enqueue({
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/vercel/cencori-chat-model.ts","../src/vercel/cencori-provider.ts"],"sourcesContent":["/**\n * Cencori AI SDK - Main Entry Point\n * \n * This package provides integrations for multiple AI SDK ecosystems:\n * \n * @example Vercel AI SDK\n * import { cencori } from '@cencori/ai-sdk/vercel';\n * \n * @example TanStack AI (coming soon)\n * import { cencori } from '@cencori/ai-sdk/tanstack';\n */\n\n// Re-export Vercel integration as default for backwards compatibility\nexport * from './vercel';\n","/**\n * Cencori Chat Language Model\n * \n * Implements the Vercel AI SDK's LanguageModelV3 interface (AI SDK v6 compatible)\n */\n\nimport type {\n LanguageModelV3,\n LanguageModelV3CallOptions,\n LanguageModelV3GenerateResult,\n LanguageModelV3StreamResult,\n LanguageModelV3StreamPart,\n LanguageModelV3Content,\n LanguageModelV3Usage,\n LanguageModelV3FinishReason,\n SharedV3Warning,\n} from '@ai-sdk/provider';\n\nexport interface CencoriChatModelSettings {\n apiKey: string;\n baseUrl: string;\n headers?: Record<string, string>;\n userId?: string;\n}\n\ninterface CencoriMessage {\n role: 'system' | 'user' | 'assistant';\n content: string;\n}\n\ninterface CencoriResponse {\n content: string;\n model: string;\n provider: string;\n usage: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n cost_usd: number;\n finish_reason?: string;\n}\n\ninterface CencoriStreamChunk {\n delta: string;\n finish_reason?: string;\n}\n\nexport class CencoriChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3' as const;\n readonly provider = 'cencori';\n\n readonly modelId: string;\n readonly supportedUrls: Record<string, RegExp[]> = {};\n private readonly settings: CencoriChatModelSettings;\n\n constructor(modelId: string, settings: CencoriChatModelSettings) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private getHeaders(): Record<string, string> {\n return {\n 'Content-Type': 'application/json',\n 'CENCORI_API_KEY': this.settings.apiKey,\n ...this.settings.headers,\n };\n }\n\n private convertMessages(options: LanguageModelV3CallOptions): CencoriMessage[] {\n const messages: CencoriMessage[] = [];\n\n // In V3, options.prompt is directly an array of LanguageModelV3Message\n const promptMessages = options.prompt;\n\n if (!promptMessages || !Array.isArray(promptMessages)) {\n return messages;\n }\n\n for (const msg of promptMessages) {\n let content = '';\n\n if (msg.role === 'system') {\n // System messages have content as string directly\n content = msg.content as string;\n } else if (msg.role === 'user' || msg.role === 'assistant') {\n // User and assistant messages have content as array of parts\n const msgContent = msg.content;\n if (Array.isArray(msgContent)) {\n content = msgContent\n .filter((part: { type: string }) => part.type === 'text')\n .map((part: { type: string; text?: string }) => part.text || '')\n .join('');\n } else if (typeof msgContent === 'string') {\n content = msgContent;\n }\n }\n\n if (content && (msg.role === 'system' || msg.role === 'user' || msg.role === 'assistant')) {\n messages.push({\n role: msg.role as 'system' | 'user' | 'assistant',\n content,\n });\n }\n }\n\n return messages;\n }\n\n private mapFinishReason(reason?: string): LanguageModelV3FinishReason {\n let unified: 'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other';\n\n switch (reason) {\n case 'stop':\n case 'end_turn':\n unified = 'stop';\n break;\n case 'length':\n case 'max_tokens':\n unified = 'length';\n break;\n case 'content_filter':\n unified = 'content-filter';\n break;\n case 'tool_calls':\n case 'tool-calls':\n unified = 'tool-calls';\n break;\n case 'error':\n unified = 'error';\n break;\n default:\n unified = 'stop';\n }\n\n return { unified, raw: reason };\n }\n\n private buildUsage(inputTokens: number, outputTokens: number): LanguageModelV3Usage {\n return {\n inputTokens: {\n total: inputTokens,\n noCache: inputTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: outputTokens,\n text: outputTokens,\n reasoning: undefined,\n },\n };\n }\n\n async doGenerate(options: LanguageModelV3CallOptions): Promise<LanguageModelV3GenerateResult> {\n const messages = this.convertMessages(options);\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: false,\n userId: this.settings.userId,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const data = await response.json() as CencoriResponse;\n\n const content: LanguageModelV3Content[] = [{\n type: 'text',\n text: data.content,\n providerMetadata: undefined,\n }];\n\n const warnings: SharedV3Warning[] = [];\n\n return {\n content,\n finishReason: this.mapFinishReason(data.finish_reason),\n usage: this.buildUsage(data.usage.prompt_tokens, data.usage.completion_tokens),\n warnings,\n };\n }\n\n async doStream(options: LanguageModelV3CallOptions): Promise<LanguageModelV3StreamResult> {\n const messages = this.convertMessages(options);\n const self = this;\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: true,\n userId: this.settings.userId,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const reader = response.body?.getReader();\n if (!reader) {\n throw new Error('Response body is null');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n let inputTokens = 0;\n let outputTokens = 0;\n const textPartId = 'text-0';\n let started = false;\n\n const stream = new ReadableStream<LanguageModelV3StreamPart>({\n async pull(controller) {\n try {\n const { done, value } = await reader.read();\n\n if (done) {\n // End text block and finish\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.trim() === '') continue;\n if (!line.startsWith('data: ')) continue;\n\n const data = line.slice(6);\n if (data === '[DONE]') {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n try {\n const chunk = JSON.parse(data) as CencoriStreamChunk;\n\n if (chunk.delta) {\n // Start text if not started\n if (!started) {\n started = true;\n controller.enqueue({\n type: 'text-start',\n id: textPartId,\n });\n }\n\n outputTokens += Math.ceil(chunk.delta.length / 4); // Rough estimate\n controller.enqueue({\n type: 'text-delta',\n id: textPartId,\n delta: chunk.delta,\n });\n }\n\n if (chunk.finish_reason) {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason(chunk.finish_reason),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n } catch {\n // Skip malformed JSON\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader.cancel();\n },\n });\n\n return {\n stream,\n };\n }\n}\n","/**\n * Cencori AI Provider for Vercel AI SDK\n * \n * Use Cencori with streamText(), generateText(), and useChat()\n */\n\nimport { CencoriChatLanguageModel } from './cencori-chat-model';\nimport type { CencoriProviderSettings, CencoriChatSettings } from './types';\n\nexport interface CencoriProvider {\n /**\n * Create a Cencori chat model for use with Vercel AI SDK\n * \n * @param modelId - The model ID (e.g., 'gemini-2.5-flash', 'gpt-4o', 'claude-3-opus')\n * @param settings - Optional model-specific settings\n * @returns A LanguageModelV1 compatible model\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\n (modelId: string, settings?: CencoriChatSettings): CencoriChatLanguageModel;\n\n /**\n * Create a chat model (alias for the provider function)\n */\n chat: (modelId: string, settings?: CencoriChatSettings) => CencoriChatLanguageModel;\n}\n\n/**\n * Create a Cencori provider instance\n * \n * @param options - Provider configuration options\n * @returns A Cencori provider\n * \n * @example\n * import { createCencori } from '@cencori/ai-sdk';\n * \n * const cencori = createCencori({\n * apiKey: process.env.CENCORI_API_KEY\n * });\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport function createCencori(options: CencoriProviderSettings = {}): CencoriProvider {\n const baseUrl = options.baseUrl ?? 'https://cencori.com';\n const apiKey = options.apiKey ?? process.env.CENCORI_API_KEY;\n\n if (!apiKey) {\n throw new Error('Cencori API key is required. Pass it via options.apiKey or set CENCORI_API_KEY environment variable.');\n }\n\n const createModel = (modelId: string, settings: CencoriChatSettings = {}) => {\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl,\n headers: options.headers,\n ...settings,\n });\n };\n\n const provider = function (modelId: string, settings?: CencoriChatSettings) {\n return createModel(modelId, settings);\n } as CencoriProvider;\n\n provider.chat = createModel;\n\n return provider;\n}\n\n/**\n * Default Cencori provider instance\n * Uses CENCORI_API_KEY environment variable (lazy initialization)\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport const cencori: CencoriProvider = function (modelId: string, settings?: CencoriChatSettings) {\n const apiKey = process.env.CENCORI_API_KEY;\n if (!apiKey) {\n throw new Error('CENCORI_API_KEY environment variable is required. Set it or use createCencori({ apiKey: \"...\" }) instead.');\n }\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl: 'https://cencori.com',\n ...settings,\n });\n} as CencoriProvider;\n\ncencori.chat = cencori;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACgDO,IAAM,2BAAN,MAA0D;AAAA,EAQ7D,YAAY,SAAiB,UAAoC;AAPjE,SAAS,uBAAuB;AAChC,SAAS,WAAW;AAGpB,SAAS,gBAA0C,CAAC;AAIhD,SAAK,UAAU;AACf,SAAK,WAAW;AAAA,EACpB;AAAA,EAEQ,aAAqC;AACzC,WAAO;AAAA,MACH,gBAAgB;AAAA,MAChB,mBAAmB,KAAK,SAAS;AAAA,MACjC,GAAG,KAAK,SAAS;AAAA,IACrB;AAAA,EACJ;AAAA,EAEQ,gBAAgB,SAAuD;AAC3E,UAAM,WAA6B,CAAC;AAGpC,UAAM,iBAAiB,QAAQ;AAE/B,QAAI,CAAC,kBAAkB,CAAC,MAAM,QAAQ,cAAc,GAAG;AACnD,aAAO;AAAA,IACX;AAEA,eAAW,OAAO,gBAAgB;AAC9B,UAAI,UAAU;AAEd,UAAI,IAAI,SAAS,UAAU;AAEvB,kBAAU,IAAI;AAAA,MAClB,WAAW,IAAI,SAAS,UAAU,IAAI,SAAS,aAAa;AAExD,cAAM,aAAa,IAAI;AACvB,YAAI,MAAM,QAAQ,UAAU,GAAG;AAC3B,oBAAU,WACL,OAAO,CAAC,SAA2B,KAAK,SAAS,MAAM,EACvD,IAAI,CAAC,SAA0C,KAAK,QAAQ,EAAE,EAC9D,KAAK,EAAE;AAAA,QAChB,WAAW,OAAO,eAAe,UAAU;AACvC,oBAAU;AAAA,QACd;AAAA,MACJ;AAEA,UAAI,YAAY,IAAI,SAAS,YAAY,IAAI,SAAS,UAAU,IAAI,SAAS,cAAc;AACvF,iBAAS,KAAK;AAAA,UACV,MAAM,IAAI;AAAA,UACV;AAAA,QACJ,CAAC;AAAA,MACL;AAAA,IACJ;AAEA,WAAO;AAAA,EACX;AAAA,EAEQ,gBAAgB,QAA8C;AAClE,QAAI;AAEJ,YAAQ,QAAQ;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ;AACI,kBAAU;AAAA,IAClB;AAEA,WAAO,EAAE,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA,EAEQ,WAAW,aAAqB,cAA4C;AAChF,WAAO;AAAA,MACH,aAAa;AAAA,QACT,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MAChB;AAAA,MACA,cAAc;AAAA,QACV,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACf;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAA6E;AAC1F,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAE7C,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,MAC1B,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,UAAM,UAAoC,CAAC;AAAA,MACvC,MAAM;AAAA,MACN,MAAM,KAAK;AAAA,MACX,kBAAkB;AAAA,IACtB,CAAC;AAED,UAAM,WAA8B,CAAC;AAErC,WAAO;AAAA,MACH;AAAA,MACA,cAAc,KAAK,gBAAgB,KAAK,aAAa;AAAA,MACrD,OAAO,KAAK,WAAW,KAAK,MAAM,eAAe,KAAK,MAAM,iBAAiB;AAAA,MAC7E;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,SAAS,SAA2E;AACtF,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAC7C,UAAM,OAAO;AAEb,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,MAC1B,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,uBAAuB;AAAA,IAC3C;AAEA,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,SAAS;AACb,QAAI,cAAc;AAClB,QAAI,eAAe;AACnB,UAAM,aAAa;AACnB,QAAI,UAAU;AAEd,UAAM,SAAS,IAAI,eAA0C;AAAA,MACzD,MAAM,KAAK,YAAY;AACnB,YAAI;AACA,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,cAAI,MAAM;AAEN,gBAAI,SAAS;AACT,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,IAAI;AAAA,cACR,CAAC;AAAA,YACL;AACA,uBAAW,QAAQ;AAAA,cACf,MAAM;AAAA,cACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,cACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,YACpD,CAAC;AACD,uBAAW,MAAM;AACjB;AAAA,UACJ;AAEA,oBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,gBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,mBAAS,MAAM,IAAI,KAAK;AAExB,qBAAW,QAAQ,OAAO;AACtB,gBAAI,KAAK,KAAK,MAAM,GAAI;AACxB,gBAAI,CAAC,KAAK,WAAW,QAAQ,EAAG;AAEhC,kBAAM,OAAO,KAAK,MAAM,CAAC;AACzB,gBAAI,SAAS,UAAU;AACnB,kBAAI,SAAS;AACT,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,gBACR,CAAC;AAAA,cACL;AACA,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,gBACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,cACpD,CAAC;AACD,yBAAW,MAAM;AACjB;AAAA,YACJ;AAEA,gBAAI;AACA,oBAAM,QAAQ,KAAK,MAAM,IAAI;AAE7B,kBAAI,MAAM,OAAO;AAEb,oBAAI,CAAC,SAAS;AACV,4BAAU;AACV,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AAEA,gCAAgB,KAAK,KAAK,MAAM,MAAM,SAAS,CAAC;AAChD,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO,MAAM;AAAA,gBACjB,CAAC;AAAA,cACL;AAEA,kBAAI,MAAM,eAAe;AACrB,oBAAI,SAAS;AACT,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AACA,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,cAAc,KAAK,gBAAgB,MAAM,aAAa;AAAA,kBACtD,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,gBACpD,CAAC;AACD,2BAAW,MAAM;AACjB;AAAA,cACJ;AAAA,YACJ,QAAQ;AAAA,YAER;AAAA,UACJ;AAAA,QACJ,SAAS,OAAO;AACZ,qBAAW,MAAM,KAAK;AAAA,QAC1B;AAAA,MACJ;AAAA,MACA,SAAS;AACL,eAAO,OAAO;AAAA,MAClB;AAAA,IACJ,CAAC;AAED,WAAO;AAAA,MACH;AAAA,IACJ;AAAA,EACJ;AACJ;;;ACrRO,SAAS,cAAc,UAAmC,CAAC,GAAoB;AAClF,QAAM,UAAU,QAAQ,WAAW;AACnC,QAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,sGAAsG;AAAA,EAC1H;AAEA,QAAM,cAAc,CAAC,SAAiB,WAAgC,CAAC,MAAM;AACzE,WAAO,IAAI,yBAAyB,SAAS;AAAA,MACzC;AAAA,MACA;AAAA,MACA,SAAS,QAAQ;AAAA,MACjB,GAAG;AAAA,IACP,CAAC;AAAA,EACL;AAEA,QAAM,WAAW,SAAU,SAAiB,UAAgC;AACxE,WAAO,YAAY,SAAS,QAAQ;AAAA,EACxC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACX;AAeO,IAAM,UAA2B,SAAU,SAAiB,UAAgC;AAC/F,QAAM,SAAS,QAAQ,IAAI;AAC3B,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,2GAA2G;AAAA,EAC/H;AACA,SAAO,IAAI,yBAAyB,SAAS;AAAA,IACzC;AAAA,IACA,SAAS;AAAA,IACT,GAAG;AAAA,EACP,CAAC;AACL;AAEA,QAAQ,OAAO;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/vercel/cencori-chat-model.ts","../src/vercel/cencori-provider.ts"],"sourcesContent":["/**\n * Cencori AI SDK - Main Entry Point\n * \n * This package provides integrations for multiple AI SDK ecosystems:\n * \n * @example Vercel AI SDK\n * import { cencori } from '@cencori/ai-sdk/vercel';\n * \n * @example TanStack AI (coming soon)\n * import { cencori } from '@cencori/ai-sdk/tanstack';\n */\n\n// Re-export Vercel integration as default for backwards compatibility\nexport * from './vercel';\n","/**\n * Cencori Chat Language Model\n * \n * Implements the Vercel AI SDK's LanguageModelV3 interface (AI SDK v6 compatible)\n */\n\nimport type {\n LanguageModelV3,\n LanguageModelV3CallOptions,\n LanguageModelV3GenerateResult,\n LanguageModelV3StreamResult,\n LanguageModelV3StreamPart,\n LanguageModelV3Content,\n LanguageModelV3Usage,\n LanguageModelV3FinishReason,\n SharedV3Warning,\n} from '@ai-sdk/provider';\n\nexport interface CencoriChatModelSettings {\n apiKey: string;\n baseUrl: string;\n headers?: Record<string, string>;\n userId?: string;\n}\n\ninterface CencoriMessage {\n role: 'system' | 'user' | 'assistant' | 'tool';\n content: string;\n toolCallId?: string;\n}\n\n/**\n * Tool definition in Cencori format (OpenAI-compatible)\n */\ninterface CencoriTool {\n type: 'function';\n function: {\n name: string;\n description: string;\n parameters: Record<string, any>;\n };\n}\n\n/**\n * Tool call from the model\n */\ninterface CencoriToolCall {\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n}\n\ninterface CencoriResponse {\n content: string;\n model: string;\n provider: string;\n usage: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n cost_usd: number;\n finish_reason?: string;\n tool_calls?: CencoriToolCall[];\n}\n\ninterface CencoriStreamChunk {\n delta: string;\n finish_reason?: string;\n tool_calls?: CencoriToolCall[];\n}\n\nexport class CencoriChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3' as const;\n readonly provider = 'cencori';\n\n readonly modelId: string;\n readonly supportedUrls: Record<string, RegExp[]> = {};\n private readonly settings: CencoriChatModelSettings;\n\n constructor(modelId: string, settings: CencoriChatModelSettings) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private getHeaders(): Record<string, string> {\n return {\n 'Content-Type': 'application/json',\n 'CENCORI_API_KEY': this.settings.apiKey,\n ...this.settings.headers,\n };\n }\n\n private convertMessages(options: LanguageModelV3CallOptions): CencoriMessage[] {\n const messages: CencoriMessage[] = [];\n\n // In V3, options.prompt is directly an array of LanguageModelV3Message\n const promptMessages = options.prompt;\n\n if (!promptMessages || !Array.isArray(promptMessages)) {\n return messages;\n }\n\n for (const msg of promptMessages) {\n let content = '';\n\n if (msg.role === 'system') {\n // System messages have content as string directly\n content = msg.content as string;\n } else if (msg.role === 'user' || msg.role === 'assistant') {\n // User and assistant messages have content as array of parts\n const msgContent = msg.content;\n if (Array.isArray(msgContent)) {\n content = msgContent\n .filter((part: { type: string }) => part.type === 'text')\n .map((part: { type: string; text?: string }) => part.text || '')\n .join('');\n } else if (typeof msgContent === 'string') {\n content = msgContent;\n }\n }\n\n if (content && (msg.role === 'system' || msg.role === 'user' || msg.role === 'assistant')) {\n messages.push({\n role: msg.role as 'system' | 'user' | 'assistant',\n content,\n });\n }\n }\n\n return messages;\n }\n\n private mapFinishReason(reason?: string): LanguageModelV3FinishReason {\n let unified: 'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other';\n\n switch (reason) {\n case 'stop':\n case 'end_turn':\n unified = 'stop';\n break;\n case 'length':\n case 'max_tokens':\n unified = 'length';\n break;\n case 'content_filter':\n unified = 'content-filter';\n break;\n case 'tool_calls':\n case 'tool-calls':\n unified = 'tool-calls';\n break;\n case 'error':\n unified = 'error';\n break;\n default:\n unified = 'stop';\n }\n\n return { unified, raw: reason };\n }\n\n private buildUsage(inputTokens: number, outputTokens: number): LanguageModelV3Usage {\n return {\n inputTokens: {\n total: inputTokens,\n noCache: inputTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: outputTokens,\n text: outputTokens,\n reasoning: undefined,\n },\n };\n }\n\n /**\n * Convert Vercel AI SDK tools to Cencori format\n */\n private convertTools(options: LanguageModelV3CallOptions): CencoriTool[] | undefined {\n if (!options.tools || options.tools.length === 0) {\n return undefined;\n }\n\n return options.tools\n .filter(t => t.type === 'function')\n .map(t => ({\n type: 'function' as const,\n function: {\n name: t.name,\n description: t.description || '',\n parameters: t.inputSchema as Record<string, any>,\n },\n }));\n }\n\n /**\n * Convert Vercel AI SDK tool choice to Cencori format\n */\n private convertToolChoice(options: LanguageModelV3CallOptions): string | { type: 'function'; function: { name: string } } | undefined {\n const tc = options.toolChoice;\n if (!tc) return undefined;\n\n switch (tc.type) {\n case 'auto':\n return 'auto';\n case 'none':\n return 'none';\n case 'required':\n return 'required';\n case 'tool':\n return { type: 'function', function: { name: tc.toolName } };\n default:\n return undefined;\n }\n }\n\n async doGenerate(options: LanguageModelV3CallOptions): Promise<LanguageModelV3GenerateResult> {\n const messages = this.convertMessages(options);\n const tools = this.convertTools(options);\n const toolChoice = this.convertToolChoice(options);\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: false,\n userId: this.settings.userId,\n tools,\n toolChoice,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const data = await response.json() as CencoriResponse;\n\n // Build content array\n const content: LanguageModelV3Content[] = [];\n\n // Add text content if present\n if (data.content) {\n content.push({\n type: 'text',\n text: data.content,\n providerMetadata: undefined,\n });\n }\n\n // Add tool calls if present\n if (data.tool_calls && data.tool_calls.length > 0) {\n for (const tc of data.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: tc.id,\n toolName: tc.function.name,\n input: tc.function.arguments,\n providerMetadata: undefined,\n });\n }\n }\n\n const warnings: SharedV3Warning[] = [];\n\n return {\n content,\n finishReason: this.mapFinishReason(data.finish_reason),\n usage: this.buildUsage(data.usage.prompt_tokens, data.usage.completion_tokens),\n warnings,\n };\n }\n\n async doStream(options: LanguageModelV3CallOptions): Promise<LanguageModelV3StreamResult> {\n const messages = this.convertMessages(options);\n const tools = this.convertTools(options);\n const toolChoice = this.convertToolChoice(options);\n const self = this;\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: true,\n userId: this.settings.userId,\n tools,\n toolChoice,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const reader = response.body?.getReader();\n if (!reader) {\n throw new Error('Response body is null');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n let inputTokens = 0;\n let outputTokens = 0;\n const textPartId = 'text-0';\n let started = false;\n\n const stream = new ReadableStream<LanguageModelV3StreamPart>({\n async pull(controller) {\n try {\n const { done, value } = await reader.read();\n\n if (done) {\n // End text block and finish\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.trim() === '') continue;\n if (!line.startsWith('data: ')) continue;\n\n const data = line.slice(6);\n if (data === '[DONE]') {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n try {\n const chunk = JSON.parse(data) as CencoriStreamChunk;\n\n // Handle text delta\n if (chunk.delta) {\n // Start text if not started\n if (!started) {\n started = true;\n controller.enqueue({\n type: 'text-start',\n id: textPartId,\n });\n }\n\n outputTokens += Math.ceil(chunk.delta.length / 4); // Rough estimate\n controller.enqueue({\n type: 'text-delta',\n id: textPartId,\n delta: chunk.delta,\n });\n }\n\n // Handle tool calls\n if (chunk.tool_calls && chunk.tool_calls.length > 0) {\n for (const tc of chunk.tool_calls) {\n // Emit complete tool-call event\n controller.enqueue({\n type: 'tool-call',\n toolCallId: tc.id,\n toolName: tc.function.name,\n input: tc.function.arguments,\n providerMetadata: undefined,\n });\n }\n }\n\n if (chunk.finish_reason) {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason(chunk.finish_reason),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n } catch {\n // Skip malformed JSON\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader.cancel();\n },\n });\n\n return {\n stream,\n };\n }\n}\n","/**\n * Cencori AI Provider for Vercel AI SDK\n * \n * Use Cencori with streamText(), generateText(), and useChat()\n */\n\nimport { CencoriChatLanguageModel } from './cencori-chat-model';\nimport type { CencoriProviderSettings, CencoriChatSettings } from './types';\n\nexport interface CencoriProvider {\n /**\n * Create a Cencori chat model for use with Vercel AI SDK\n * \n * @param modelId - The model ID (e.g., 'gemini-2.5-flash', 'gpt-4o', 'claude-3-opus')\n * @param settings - Optional model-specific settings\n * @returns A LanguageModelV1 compatible model\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\n (modelId: string, settings?: CencoriChatSettings): CencoriChatLanguageModel;\n\n /**\n * Create a chat model (alias for the provider function)\n */\n chat: (modelId: string, settings?: CencoriChatSettings) => CencoriChatLanguageModel;\n}\n\n/**\n * Create a Cencori provider instance\n * \n * @param options - Provider configuration options\n * @returns A Cencori provider\n * \n * @example\n * import { createCencori } from '@cencori/ai-sdk';\n * \n * const cencori = createCencori({\n * apiKey: process.env.CENCORI_API_KEY\n * });\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport function createCencori(options: CencoriProviderSettings = {}): CencoriProvider {\n const baseUrl = options.baseUrl ?? 'https://cencori.com';\n const apiKey = options.apiKey ?? process.env.CENCORI_API_KEY;\n\n if (!apiKey) {\n throw new Error('Cencori API key is required. Pass it via options.apiKey or set CENCORI_API_KEY environment variable.');\n }\n\n const createModel = (modelId: string, settings: CencoriChatSettings = {}) => {\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl,\n headers: options.headers,\n ...settings,\n });\n };\n\n const provider = function (modelId: string, settings?: CencoriChatSettings) {\n return createModel(modelId, settings);\n } as CencoriProvider;\n\n provider.chat = createModel;\n\n return provider;\n}\n\n/**\n * Default Cencori provider instance\n * Uses CENCORI_API_KEY environment variable (lazy initialization)\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport const cencori: CencoriProvider = function (modelId: string, settings?: CencoriChatSettings) {\n const apiKey = process.env.CENCORI_API_KEY;\n if (!apiKey) {\n throw new Error('CENCORI_API_KEY environment variable is required. Set it or use createCencori({ apiKey: \"...\" }) instead.');\n }\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl: 'https://cencori.com',\n ...settings,\n });\n} as CencoriProvider;\n\ncencori.chat = cencori;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC2EO,IAAM,2BAAN,MAA0D;AAAA,EAQ7D,YAAY,SAAiB,UAAoC;AAPjE,SAAS,uBAAuB;AAChC,SAAS,WAAW;AAGpB,SAAS,gBAA0C,CAAC;AAIhD,SAAK,UAAU;AACf,SAAK,WAAW;AAAA,EACpB;AAAA,EAEQ,aAAqC;AACzC,WAAO;AAAA,MACH,gBAAgB;AAAA,MAChB,mBAAmB,KAAK,SAAS;AAAA,MACjC,GAAG,KAAK,SAAS;AAAA,IACrB;AAAA,EACJ;AAAA,EAEQ,gBAAgB,SAAuD;AAC3E,UAAM,WAA6B,CAAC;AAGpC,UAAM,iBAAiB,QAAQ;AAE/B,QAAI,CAAC,kBAAkB,CAAC,MAAM,QAAQ,cAAc,GAAG;AACnD,aAAO;AAAA,IACX;AAEA,eAAW,OAAO,gBAAgB;AAC9B,UAAI,UAAU;AAEd,UAAI,IAAI,SAAS,UAAU;AAEvB,kBAAU,IAAI;AAAA,MAClB,WAAW,IAAI,SAAS,UAAU,IAAI,SAAS,aAAa;AAExD,cAAM,aAAa,IAAI;AACvB,YAAI,MAAM,QAAQ,UAAU,GAAG;AAC3B,oBAAU,WACL,OAAO,CAAC,SAA2B,KAAK,SAAS,MAAM,EACvD,IAAI,CAAC,SAA0C,KAAK,QAAQ,EAAE,EAC9D,KAAK,EAAE;AAAA,QAChB,WAAW,OAAO,eAAe,UAAU;AACvC,oBAAU;AAAA,QACd;AAAA,MACJ;AAEA,UAAI,YAAY,IAAI,SAAS,YAAY,IAAI,SAAS,UAAU,IAAI,SAAS,cAAc;AACvF,iBAAS,KAAK;AAAA,UACV,MAAM,IAAI;AAAA,UACV;AAAA,QACJ,CAAC;AAAA,MACL;AAAA,IACJ;AAEA,WAAO;AAAA,EACX;AAAA,EAEQ,gBAAgB,QAA8C;AAClE,QAAI;AAEJ,YAAQ,QAAQ;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ;AACI,kBAAU;AAAA,IAClB;AAEA,WAAO,EAAE,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA,EAEQ,WAAW,aAAqB,cAA4C;AAChF,WAAO;AAAA,MACH,aAAa;AAAA,QACT,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MAChB;AAAA,MACA,cAAc;AAAA,QACV,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACf;AAAA,IACJ;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAgE;AACjF,QAAI,CAAC,QAAQ,SAAS,QAAQ,MAAM,WAAW,GAAG;AAC9C,aAAO;AAAA,IACX;AAEA,WAAO,QAAQ,MACV,OAAO,OAAK,EAAE,SAAS,UAAU,EACjC,IAAI,QAAM;AAAA,MACP,MAAM;AAAA,MACN,UAAU;AAAA,QACN,MAAM,EAAE;AAAA,QACR,aAAa,EAAE,eAAe;AAAA,QAC9B,YAAY,EAAE;AAAA,MAClB;AAAA,IACJ,EAAE;AAAA,EACV;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,SAA4G;AAClI,UAAM,KAAK,QAAQ;AACnB,QAAI,CAAC,GAAI,QAAO;AAEhB,YAAQ,GAAG,MAAM;AAAA,MACb,KAAK;AACD,eAAO;AAAA,MACX,KAAK;AACD,eAAO;AAAA,MACX,KAAK;AACD,eAAO;AAAA,MACX,KAAK;AACD,eAAO,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,GAAG,SAAS,EAAE;AAAA,MAC/D;AACI,eAAO;AAAA,IACf;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAA6E;AAC1F,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAC7C,UAAM,QAAQ,KAAK,aAAa,OAAO;AACvC,UAAM,aAAa,KAAK,kBAAkB,OAAO;AAEjD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,QACtB;AAAA,QACA;AAAA,MACJ,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,UAAM,UAAoC,CAAC;AAG3C,QAAI,KAAK,SAAS;AACd,cAAQ,KAAK;AAAA,QACT,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,kBAAkB;AAAA,MACtB,CAAC;AAAA,IACL;AAGA,QAAI,KAAK,cAAc,KAAK,WAAW,SAAS,GAAG;AAC/C,iBAAW,MAAM,KAAK,YAAY;AAC9B,gBAAQ,KAAK;AAAA,UACT,MAAM;AAAA,UACN,YAAY,GAAG;AAAA,UACf,UAAU,GAAG,SAAS;AAAA,UACtB,OAAO,GAAG,SAAS;AAAA,UACnB,kBAAkB;AAAA,QACtB,CAAC;AAAA,MACL;AAAA,IACJ;AAEA,UAAM,WAA8B,CAAC;AAErC,WAAO;AAAA,MACH;AAAA,MACA,cAAc,KAAK,gBAAgB,KAAK,aAAa;AAAA,MACrD,OAAO,KAAK,WAAW,KAAK,MAAM,eAAe,KAAK,MAAM,iBAAiB;AAAA,MAC7E;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,SAAS,SAA2E;AACtF,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAC7C,UAAM,QAAQ,KAAK,aAAa,OAAO;AACvC,UAAM,aAAa,KAAK,kBAAkB,OAAO;AACjD,UAAM,OAAO;AAEb,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,QACtB;AAAA,QACA;AAAA,MACJ,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,uBAAuB;AAAA,IAC3C;AAEA,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,SAAS;AACb,QAAI,cAAc;AAClB,QAAI,eAAe;AACnB,UAAM,aAAa;AACnB,QAAI,UAAU;AAEd,UAAM,SAAS,IAAI,eAA0C;AAAA,MACzD,MAAM,KAAK,YAAY;AACnB,YAAI;AACA,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,cAAI,MAAM;AAEN,gBAAI,SAAS;AACT,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,IAAI;AAAA,cACR,CAAC;AAAA,YACL;AACA,uBAAW,QAAQ;AAAA,cACf,MAAM;AAAA,cACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,cACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,YACpD,CAAC;AACD,uBAAW,MAAM;AACjB;AAAA,UACJ;AAEA,oBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,gBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,mBAAS,MAAM,IAAI,KAAK;AAExB,qBAAW,QAAQ,OAAO;AACtB,gBAAI,KAAK,KAAK,MAAM,GAAI;AACxB,gBAAI,CAAC,KAAK,WAAW,QAAQ,EAAG;AAEhC,kBAAM,OAAO,KAAK,MAAM,CAAC;AACzB,gBAAI,SAAS,UAAU;AACnB,kBAAI,SAAS;AACT,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,gBACR,CAAC;AAAA,cACL;AACA,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,gBACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,cACpD,CAAC;AACD,yBAAW,MAAM;AACjB;AAAA,YACJ;AAEA,gBAAI;AACA,oBAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,kBAAI,MAAM,OAAO;AAEb,oBAAI,CAAC,SAAS;AACV,4BAAU;AACV,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AAEA,gCAAgB,KAAK,KAAK,MAAM,MAAM,SAAS,CAAC;AAChD,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO,MAAM;AAAA,gBACjB,CAAC;AAAA,cACL;AAGA,kBAAI,MAAM,cAAc,MAAM,WAAW,SAAS,GAAG;AACjD,2BAAW,MAAM,MAAM,YAAY;AAE/B,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,YAAY,GAAG;AAAA,oBACf,UAAU,GAAG,SAAS;AAAA,oBACtB,OAAO,GAAG,SAAS;AAAA,oBACnB,kBAAkB;AAAA,kBACtB,CAAC;AAAA,gBACL;AAAA,cACJ;AAEA,kBAAI,MAAM,eAAe;AACrB,oBAAI,SAAS;AACT,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AACA,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,cAAc,KAAK,gBAAgB,MAAM,aAAa;AAAA,kBACtD,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,gBACpD,CAAC;AACD,2BAAW,MAAM;AACjB;AAAA,cACJ;AAAA,YACJ,QAAQ;AAAA,YAER;AAAA,UACJ;AAAA,QACJ,SAAS,OAAO;AACZ,qBAAW,MAAM,KAAK;AAAA,QAC1B;AAAA,MACJ;AAAA,MACA,SAAS;AACL,eAAO,OAAO;AAAA,MAClB;AAAA,IACJ,CAAC;AAED,WAAO;AAAA,MACH;AAAA,IACJ;AAAA,EACJ;AACJ;;;ACnYO,SAAS,cAAc,UAAmC,CAAC,GAAoB;AAClF,QAAM,UAAU,QAAQ,WAAW;AACnC,QAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,sGAAsG;AAAA,EAC1H;AAEA,QAAM,cAAc,CAAC,SAAiB,WAAgC,CAAC,MAAM;AACzE,WAAO,IAAI,yBAAyB,SAAS;AAAA,MACzC;AAAA,MACA;AAAA,MACA,SAAS,QAAQ;AAAA,MACjB,GAAG;AAAA,IACP,CAAC;AAAA,EACL;AAEA,QAAM,WAAW,SAAU,SAAiB,UAAgC;AACxE,WAAO,YAAY,SAAS,QAAQ;AAAA,EACxC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACX;AAeO,IAAM,UAA2B,SAAU,SAAiB,UAAgC;AAC/F,QAAM,SAAS,QAAQ,IAAI;AAC3B,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,2GAA2G;AAAA,EAC/H;AACA,SAAO,IAAI,yBAAyB,SAAS;AAAA,IACzC;AAAA,IACA,SAAS;AAAA,IACT,GAAG;AAAA,EACP,CAAC;AACL;AAEA,QAAQ,OAAO;","names":[]}
|
package/dist/index.mjs
CHANGED
|
@@ -82,8 +82,45 @@ var CencoriChatLanguageModel = class {
|
|
|
82
82
|
}
|
|
83
83
|
};
|
|
84
84
|
}
|
|
85
|
+
/**
|
|
86
|
+
* Convert Vercel AI SDK tools to Cencori format
|
|
87
|
+
*/
|
|
88
|
+
convertTools(options) {
|
|
89
|
+
if (!options.tools || options.tools.length === 0) {
|
|
90
|
+
return void 0;
|
|
91
|
+
}
|
|
92
|
+
return options.tools.filter((t) => t.type === "function").map((t) => ({
|
|
93
|
+
type: "function",
|
|
94
|
+
function: {
|
|
95
|
+
name: t.name,
|
|
96
|
+
description: t.description || "",
|
|
97
|
+
parameters: t.inputSchema
|
|
98
|
+
}
|
|
99
|
+
}));
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Convert Vercel AI SDK tool choice to Cencori format
|
|
103
|
+
*/
|
|
104
|
+
convertToolChoice(options) {
|
|
105
|
+
const tc = options.toolChoice;
|
|
106
|
+
if (!tc) return void 0;
|
|
107
|
+
switch (tc.type) {
|
|
108
|
+
case "auto":
|
|
109
|
+
return "auto";
|
|
110
|
+
case "none":
|
|
111
|
+
return "none";
|
|
112
|
+
case "required":
|
|
113
|
+
return "required";
|
|
114
|
+
case "tool":
|
|
115
|
+
return { type: "function", function: { name: tc.toolName } };
|
|
116
|
+
default:
|
|
117
|
+
return void 0;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
85
120
|
async doGenerate(options) {
|
|
86
121
|
const messages = this.convertMessages(options);
|
|
122
|
+
const tools = this.convertTools(options);
|
|
123
|
+
const toolChoice = this.convertToolChoice(options);
|
|
87
124
|
const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {
|
|
88
125
|
method: "POST",
|
|
89
126
|
headers: this.getHeaders(),
|
|
@@ -93,7 +130,9 @@ var CencoriChatLanguageModel = class {
|
|
|
93
130
|
temperature: options.temperature,
|
|
94
131
|
maxTokens: options.maxOutputTokens,
|
|
95
132
|
stream: false,
|
|
96
|
-
userId: this.settings.userId
|
|
133
|
+
userId: this.settings.userId,
|
|
134
|
+
tools,
|
|
135
|
+
toolChoice
|
|
97
136
|
}),
|
|
98
137
|
signal: options.abortSignal
|
|
99
138
|
});
|
|
@@ -102,11 +141,25 @@ var CencoriChatLanguageModel = class {
|
|
|
102
141
|
throw new Error(`Cencori API error: ${error.error || response.statusText}`);
|
|
103
142
|
}
|
|
104
143
|
const data = await response.json();
|
|
105
|
-
const content = [
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
144
|
+
const content = [];
|
|
145
|
+
if (data.content) {
|
|
146
|
+
content.push({
|
|
147
|
+
type: "text",
|
|
148
|
+
text: data.content,
|
|
149
|
+
providerMetadata: void 0
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
if (data.tool_calls && data.tool_calls.length > 0) {
|
|
153
|
+
for (const tc of data.tool_calls) {
|
|
154
|
+
content.push({
|
|
155
|
+
type: "tool-call",
|
|
156
|
+
toolCallId: tc.id,
|
|
157
|
+
toolName: tc.function.name,
|
|
158
|
+
input: tc.function.arguments,
|
|
159
|
+
providerMetadata: void 0
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
}
|
|
110
163
|
const warnings = [];
|
|
111
164
|
return {
|
|
112
165
|
content,
|
|
@@ -117,6 +170,8 @@ var CencoriChatLanguageModel = class {
|
|
|
117
170
|
}
|
|
118
171
|
async doStream(options) {
|
|
119
172
|
const messages = this.convertMessages(options);
|
|
173
|
+
const tools = this.convertTools(options);
|
|
174
|
+
const toolChoice = this.convertToolChoice(options);
|
|
120
175
|
const self = this;
|
|
121
176
|
const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {
|
|
122
177
|
method: "POST",
|
|
@@ -127,7 +182,9 @@ var CencoriChatLanguageModel = class {
|
|
|
127
182
|
temperature: options.temperature,
|
|
128
183
|
maxTokens: options.maxOutputTokens,
|
|
129
184
|
stream: true,
|
|
130
|
-
userId: this.settings.userId
|
|
185
|
+
userId: this.settings.userId,
|
|
186
|
+
tools,
|
|
187
|
+
toolChoice
|
|
131
188
|
}),
|
|
132
189
|
signal: options.abortSignal
|
|
133
190
|
});
|
|
@@ -203,6 +260,17 @@ var CencoriChatLanguageModel = class {
|
|
|
203
260
|
delta: chunk.delta
|
|
204
261
|
});
|
|
205
262
|
}
|
|
263
|
+
if (chunk.tool_calls && chunk.tool_calls.length > 0) {
|
|
264
|
+
for (const tc of chunk.tool_calls) {
|
|
265
|
+
controller.enqueue({
|
|
266
|
+
type: "tool-call",
|
|
267
|
+
toolCallId: tc.id,
|
|
268
|
+
toolName: tc.function.name,
|
|
269
|
+
input: tc.function.arguments,
|
|
270
|
+
providerMetadata: void 0
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
}
|
|
206
274
|
if (chunk.finish_reason) {
|
|
207
275
|
if (started) {
|
|
208
276
|
controller.enqueue({
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/vercel/cencori-chat-model.ts","../src/vercel/cencori-provider.ts"],"sourcesContent":["/**\n * Cencori Chat Language Model\n * \n * Implements the Vercel AI SDK's LanguageModelV3 interface (AI SDK v6 compatible)\n */\n\nimport type {\n LanguageModelV3,\n LanguageModelV3CallOptions,\n LanguageModelV3GenerateResult,\n LanguageModelV3StreamResult,\n LanguageModelV3StreamPart,\n LanguageModelV3Content,\n LanguageModelV3Usage,\n LanguageModelV3FinishReason,\n SharedV3Warning,\n} from '@ai-sdk/provider';\n\nexport interface CencoriChatModelSettings {\n apiKey: string;\n baseUrl: string;\n headers?: Record<string, string>;\n userId?: string;\n}\n\ninterface CencoriMessage {\n role: 'system' | 'user' | 'assistant';\n content: string;\n}\n\ninterface CencoriResponse {\n content: string;\n model: string;\n provider: string;\n usage: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n cost_usd: number;\n finish_reason?: string;\n}\n\ninterface CencoriStreamChunk {\n delta: string;\n finish_reason?: string;\n}\n\nexport class CencoriChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3' as const;\n readonly provider = 'cencori';\n\n readonly modelId: string;\n readonly supportedUrls: Record<string, RegExp[]> = {};\n private readonly settings: CencoriChatModelSettings;\n\n constructor(modelId: string, settings: CencoriChatModelSettings) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private getHeaders(): Record<string, string> {\n return {\n 'Content-Type': 'application/json',\n 'CENCORI_API_KEY': this.settings.apiKey,\n ...this.settings.headers,\n };\n }\n\n private convertMessages(options: LanguageModelV3CallOptions): CencoriMessage[] {\n const messages: CencoriMessage[] = [];\n\n // In V3, options.prompt is directly an array of LanguageModelV3Message\n const promptMessages = options.prompt;\n\n if (!promptMessages || !Array.isArray(promptMessages)) {\n return messages;\n }\n\n for (const msg of promptMessages) {\n let content = '';\n\n if (msg.role === 'system') {\n // System messages have content as string directly\n content = msg.content as string;\n } else if (msg.role === 'user' || msg.role === 'assistant') {\n // User and assistant messages have content as array of parts\n const msgContent = msg.content;\n if (Array.isArray(msgContent)) {\n content = msgContent\n .filter((part: { type: string }) => part.type === 'text')\n .map((part: { type: string; text?: string }) => part.text || '')\n .join('');\n } else if (typeof msgContent === 'string') {\n content = msgContent;\n }\n }\n\n if (content && (msg.role === 'system' || msg.role === 'user' || msg.role === 'assistant')) {\n messages.push({\n role: msg.role as 'system' | 'user' | 'assistant',\n content,\n });\n }\n }\n\n return messages;\n }\n\n private mapFinishReason(reason?: string): LanguageModelV3FinishReason {\n let unified: 'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other';\n\n switch (reason) {\n case 'stop':\n case 'end_turn':\n unified = 'stop';\n break;\n case 'length':\n case 'max_tokens':\n unified = 'length';\n break;\n case 'content_filter':\n unified = 'content-filter';\n break;\n case 'tool_calls':\n case 'tool-calls':\n unified = 'tool-calls';\n break;\n case 'error':\n unified = 'error';\n break;\n default:\n unified = 'stop';\n }\n\n return { unified, raw: reason };\n }\n\n private buildUsage(inputTokens: number, outputTokens: number): LanguageModelV3Usage {\n return {\n inputTokens: {\n total: inputTokens,\n noCache: inputTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: outputTokens,\n text: outputTokens,\n reasoning: undefined,\n },\n };\n }\n\n async doGenerate(options: LanguageModelV3CallOptions): Promise<LanguageModelV3GenerateResult> {\n const messages = this.convertMessages(options);\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: false,\n userId: this.settings.userId,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const data = await response.json() as CencoriResponse;\n\n const content: LanguageModelV3Content[] = [{\n type: 'text',\n text: data.content,\n providerMetadata: undefined,\n }];\n\n const warnings: SharedV3Warning[] = [];\n\n return {\n content,\n finishReason: this.mapFinishReason(data.finish_reason),\n usage: this.buildUsage(data.usage.prompt_tokens, data.usage.completion_tokens),\n warnings,\n };\n }\n\n async doStream(options: LanguageModelV3CallOptions): Promise<LanguageModelV3StreamResult> {\n const messages = this.convertMessages(options);\n const self = this;\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: true,\n userId: this.settings.userId,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const reader = response.body?.getReader();\n if (!reader) {\n throw new Error('Response body is null');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n let inputTokens = 0;\n let outputTokens = 0;\n const textPartId = 'text-0';\n let started = false;\n\n const stream = new ReadableStream<LanguageModelV3StreamPart>({\n async pull(controller) {\n try {\n const { done, value } = await reader.read();\n\n if (done) {\n // End text block and finish\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.trim() === '') continue;\n if (!line.startsWith('data: ')) continue;\n\n const data = line.slice(6);\n if (data === '[DONE]') {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n try {\n const chunk = JSON.parse(data) as CencoriStreamChunk;\n\n if (chunk.delta) {\n // Start text if not started\n if (!started) {\n started = true;\n controller.enqueue({\n type: 'text-start',\n id: textPartId,\n });\n }\n\n outputTokens += Math.ceil(chunk.delta.length / 4); // Rough estimate\n controller.enqueue({\n type: 'text-delta',\n id: textPartId,\n delta: chunk.delta,\n });\n }\n\n if (chunk.finish_reason) {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason(chunk.finish_reason),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n } catch {\n // Skip malformed JSON\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader.cancel();\n },\n });\n\n return {\n stream,\n };\n }\n}\n","/**\n * Cencori AI Provider for Vercel AI SDK\n * \n * Use Cencori with streamText(), generateText(), and useChat()\n */\n\nimport { CencoriChatLanguageModel } from './cencori-chat-model';\nimport type { CencoriProviderSettings, CencoriChatSettings } from './types';\n\nexport interface CencoriProvider {\n /**\n * Create a Cencori chat model for use with Vercel AI SDK\n * \n * @param modelId - The model ID (e.g., 'gemini-2.5-flash', 'gpt-4o', 'claude-3-opus')\n * @param settings - Optional model-specific settings\n * @returns A LanguageModelV1 compatible model\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\n (modelId: string, settings?: CencoriChatSettings): CencoriChatLanguageModel;\n\n /**\n * Create a chat model (alias for the provider function)\n */\n chat: (modelId: string, settings?: CencoriChatSettings) => CencoriChatLanguageModel;\n}\n\n/**\n * Create a Cencori provider instance\n * \n * @param options - Provider configuration options\n * @returns A Cencori provider\n * \n * @example\n * import { createCencori } from '@cencori/ai-sdk';\n * \n * const cencori = createCencori({\n * apiKey: process.env.CENCORI_API_KEY\n * });\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport function createCencori(options: CencoriProviderSettings = {}): CencoriProvider {\n const baseUrl = options.baseUrl ?? 'https://cencori.com';\n const apiKey = options.apiKey ?? process.env.CENCORI_API_KEY;\n\n if (!apiKey) {\n throw new Error('Cencori API key is required. Pass it via options.apiKey or set CENCORI_API_KEY environment variable.');\n }\n\n const createModel = (modelId: string, settings: CencoriChatSettings = {}) => {\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl,\n headers: options.headers,\n ...settings,\n });\n };\n\n const provider = function (modelId: string, settings?: CencoriChatSettings) {\n return createModel(modelId, settings);\n } as CencoriProvider;\n\n provider.chat = createModel;\n\n return provider;\n}\n\n/**\n * Default Cencori provider instance\n * Uses CENCORI_API_KEY environment variable (lazy initialization)\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport const cencori: CencoriProvider = function (modelId: string, settings?: CencoriChatSettings) {\n const apiKey = process.env.CENCORI_API_KEY;\n if (!apiKey) {\n throw new Error('CENCORI_API_KEY environment variable is required. Set it or use createCencori({ apiKey: \"...\" }) instead.');\n }\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl: 'https://cencori.com',\n ...settings,\n });\n} as CencoriProvider;\n\ncencori.chat = cencori;\n"],"mappings":";AAgDO,IAAM,2BAAN,MAA0D;AAAA,EAQ7D,YAAY,SAAiB,UAAoC;AAPjE,SAAS,uBAAuB;AAChC,SAAS,WAAW;AAGpB,SAAS,gBAA0C,CAAC;AAIhD,SAAK,UAAU;AACf,SAAK,WAAW;AAAA,EACpB;AAAA,EAEQ,aAAqC;AACzC,WAAO;AAAA,MACH,gBAAgB;AAAA,MAChB,mBAAmB,KAAK,SAAS;AAAA,MACjC,GAAG,KAAK,SAAS;AAAA,IACrB;AAAA,EACJ;AAAA,EAEQ,gBAAgB,SAAuD;AAC3E,UAAM,WAA6B,CAAC;AAGpC,UAAM,iBAAiB,QAAQ;AAE/B,QAAI,CAAC,kBAAkB,CAAC,MAAM,QAAQ,cAAc,GAAG;AACnD,aAAO;AAAA,IACX;AAEA,eAAW,OAAO,gBAAgB;AAC9B,UAAI,UAAU;AAEd,UAAI,IAAI,SAAS,UAAU;AAEvB,kBAAU,IAAI;AAAA,MAClB,WAAW,IAAI,SAAS,UAAU,IAAI,SAAS,aAAa;AAExD,cAAM,aAAa,IAAI;AACvB,YAAI,MAAM,QAAQ,UAAU,GAAG;AAC3B,oBAAU,WACL,OAAO,CAAC,SAA2B,KAAK,SAAS,MAAM,EACvD,IAAI,CAAC,SAA0C,KAAK,QAAQ,EAAE,EAC9D,KAAK,EAAE;AAAA,QAChB,WAAW,OAAO,eAAe,UAAU;AACvC,oBAAU;AAAA,QACd;AAAA,MACJ;AAEA,UAAI,YAAY,IAAI,SAAS,YAAY,IAAI,SAAS,UAAU,IAAI,SAAS,cAAc;AACvF,iBAAS,KAAK;AAAA,UACV,MAAM,IAAI;AAAA,UACV;AAAA,QACJ,CAAC;AAAA,MACL;AAAA,IACJ;AAEA,WAAO;AAAA,EACX;AAAA,EAEQ,gBAAgB,QAA8C;AAClE,QAAI;AAEJ,YAAQ,QAAQ;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ;AACI,kBAAU;AAAA,IAClB;AAEA,WAAO,EAAE,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA,EAEQ,WAAW,aAAqB,cAA4C;AAChF,WAAO;AAAA,MACH,aAAa;AAAA,QACT,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MAChB;AAAA,MACA,cAAc;AAAA,QACV,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACf;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAA6E;AAC1F,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAE7C,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,MAC1B,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,UAAM,UAAoC,CAAC;AAAA,MACvC,MAAM;AAAA,MACN,MAAM,KAAK;AAAA,MACX,kBAAkB;AAAA,IACtB,CAAC;AAED,UAAM,WAA8B,CAAC;AAErC,WAAO;AAAA,MACH;AAAA,MACA,cAAc,KAAK,gBAAgB,KAAK,aAAa;AAAA,MACrD,OAAO,KAAK,WAAW,KAAK,MAAM,eAAe,KAAK,MAAM,iBAAiB;AAAA,MAC7E;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,SAAS,SAA2E;AACtF,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAC7C,UAAM,OAAO;AAEb,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,MAC1B,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,uBAAuB;AAAA,IAC3C;AAEA,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,SAAS;AACb,QAAI,cAAc;AAClB,QAAI,eAAe;AACnB,UAAM,aAAa;AACnB,QAAI,UAAU;AAEd,UAAM,SAAS,IAAI,eAA0C;AAAA,MACzD,MAAM,KAAK,YAAY;AACnB,YAAI;AACA,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,cAAI,MAAM;AAEN,gBAAI,SAAS;AACT,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,IAAI;AAAA,cACR,CAAC;AAAA,YACL;AACA,uBAAW,QAAQ;AAAA,cACf,MAAM;AAAA,cACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,cACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,YACpD,CAAC;AACD,uBAAW,MAAM;AACjB;AAAA,UACJ;AAEA,oBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,gBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,mBAAS,MAAM,IAAI,KAAK;AAExB,qBAAW,QAAQ,OAAO;AACtB,gBAAI,KAAK,KAAK,MAAM,GAAI;AACxB,gBAAI,CAAC,KAAK,WAAW,QAAQ,EAAG;AAEhC,kBAAM,OAAO,KAAK,MAAM,CAAC;AACzB,gBAAI,SAAS,UAAU;AACnB,kBAAI,SAAS;AACT,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,gBACR,CAAC;AAAA,cACL;AACA,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,gBACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,cACpD,CAAC;AACD,yBAAW,MAAM;AACjB;AAAA,YACJ;AAEA,gBAAI;AACA,oBAAM,QAAQ,KAAK,MAAM,IAAI;AAE7B,kBAAI,MAAM,OAAO;AAEb,oBAAI,CAAC,SAAS;AACV,4BAAU;AACV,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AAEA,gCAAgB,KAAK,KAAK,MAAM,MAAM,SAAS,CAAC;AAChD,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO,MAAM;AAAA,gBACjB,CAAC;AAAA,cACL;AAEA,kBAAI,MAAM,eAAe;AACrB,oBAAI,SAAS;AACT,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AACA,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,cAAc,KAAK,gBAAgB,MAAM,aAAa;AAAA,kBACtD,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,gBACpD,CAAC;AACD,2BAAW,MAAM;AACjB;AAAA,cACJ;AAAA,YACJ,QAAQ;AAAA,YAER;AAAA,UACJ;AAAA,QACJ,SAAS,OAAO;AACZ,qBAAW,MAAM,KAAK;AAAA,QAC1B;AAAA,MACJ;AAAA,MACA,SAAS;AACL,eAAO,OAAO;AAAA,MAClB;AAAA,IACJ,CAAC;AAED,WAAO;AAAA,MACH;AAAA,IACJ;AAAA,EACJ;AACJ;;;ACrRO,SAAS,cAAc,UAAmC,CAAC,GAAoB;AAClF,QAAM,UAAU,QAAQ,WAAW;AACnC,QAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,sGAAsG;AAAA,EAC1H;AAEA,QAAM,cAAc,CAAC,SAAiB,WAAgC,CAAC,MAAM;AACzE,WAAO,IAAI,yBAAyB,SAAS;AAAA,MACzC;AAAA,MACA;AAAA,MACA,SAAS,QAAQ;AAAA,MACjB,GAAG;AAAA,IACP,CAAC;AAAA,EACL;AAEA,QAAM,WAAW,SAAU,SAAiB,UAAgC;AACxE,WAAO,YAAY,SAAS,QAAQ;AAAA,EACxC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACX;AAeO,IAAM,UAA2B,SAAU,SAAiB,UAAgC;AAC/F,QAAM,SAAS,QAAQ,IAAI;AAC3B,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,2GAA2G;AAAA,EAC/H;AACA,SAAO,IAAI,yBAAyB,SAAS;AAAA,IACzC;AAAA,IACA,SAAS;AAAA,IACT,GAAG;AAAA,EACP,CAAC;AACL;AAEA,QAAQ,OAAO;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/vercel/cencori-chat-model.ts","../src/vercel/cencori-provider.ts"],"sourcesContent":["/**\n * Cencori Chat Language Model\n * \n * Implements the Vercel AI SDK's LanguageModelV3 interface (AI SDK v6 compatible)\n */\n\nimport type {\n LanguageModelV3,\n LanguageModelV3CallOptions,\n LanguageModelV3GenerateResult,\n LanguageModelV3StreamResult,\n LanguageModelV3StreamPart,\n LanguageModelV3Content,\n LanguageModelV3Usage,\n LanguageModelV3FinishReason,\n SharedV3Warning,\n} from '@ai-sdk/provider';\n\nexport interface CencoriChatModelSettings {\n apiKey: string;\n baseUrl: string;\n headers?: Record<string, string>;\n userId?: string;\n}\n\ninterface CencoriMessage {\n role: 'system' | 'user' | 'assistant' | 'tool';\n content: string;\n toolCallId?: string;\n}\n\n/**\n * Tool definition in Cencori format (OpenAI-compatible)\n */\ninterface CencoriTool {\n type: 'function';\n function: {\n name: string;\n description: string;\n parameters: Record<string, any>;\n };\n}\n\n/**\n * Tool call from the model\n */\ninterface CencoriToolCall {\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n}\n\ninterface CencoriResponse {\n content: string;\n model: string;\n provider: string;\n usage: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n cost_usd: number;\n finish_reason?: string;\n tool_calls?: CencoriToolCall[];\n}\n\ninterface CencoriStreamChunk {\n delta: string;\n finish_reason?: string;\n tool_calls?: CencoriToolCall[];\n}\n\nexport class CencoriChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3' as const;\n readonly provider = 'cencori';\n\n readonly modelId: string;\n readonly supportedUrls: Record<string, RegExp[]> = {};\n private readonly settings: CencoriChatModelSettings;\n\n constructor(modelId: string, settings: CencoriChatModelSettings) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private getHeaders(): Record<string, string> {\n return {\n 'Content-Type': 'application/json',\n 'CENCORI_API_KEY': this.settings.apiKey,\n ...this.settings.headers,\n };\n }\n\n private convertMessages(options: LanguageModelV3CallOptions): CencoriMessage[] {\n const messages: CencoriMessage[] = [];\n\n // In V3, options.prompt is directly an array of LanguageModelV3Message\n const promptMessages = options.prompt;\n\n if (!promptMessages || !Array.isArray(promptMessages)) {\n return messages;\n }\n\n for (const msg of promptMessages) {\n let content = '';\n\n if (msg.role === 'system') {\n // System messages have content as string directly\n content = msg.content as string;\n } else if (msg.role === 'user' || msg.role === 'assistant') {\n // User and assistant messages have content as array of parts\n const msgContent = msg.content;\n if (Array.isArray(msgContent)) {\n content = msgContent\n .filter((part: { type: string }) => part.type === 'text')\n .map((part: { type: string; text?: string }) => part.text || '')\n .join('');\n } else if (typeof msgContent === 'string') {\n content = msgContent;\n }\n }\n\n if (content && (msg.role === 'system' || msg.role === 'user' || msg.role === 'assistant')) {\n messages.push({\n role: msg.role as 'system' | 'user' | 'assistant',\n content,\n });\n }\n }\n\n return messages;\n }\n\n private mapFinishReason(reason?: string): LanguageModelV3FinishReason {\n let unified: 'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other';\n\n switch (reason) {\n case 'stop':\n case 'end_turn':\n unified = 'stop';\n break;\n case 'length':\n case 'max_tokens':\n unified = 'length';\n break;\n case 'content_filter':\n unified = 'content-filter';\n break;\n case 'tool_calls':\n case 'tool-calls':\n unified = 'tool-calls';\n break;\n case 'error':\n unified = 'error';\n break;\n default:\n unified = 'stop';\n }\n\n return { unified, raw: reason };\n }\n\n private buildUsage(inputTokens: number, outputTokens: number): LanguageModelV3Usage {\n return {\n inputTokens: {\n total: inputTokens,\n noCache: inputTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: outputTokens,\n text: outputTokens,\n reasoning: undefined,\n },\n };\n }\n\n /**\n * Convert Vercel AI SDK tools to Cencori format\n */\n private convertTools(options: LanguageModelV3CallOptions): CencoriTool[] | undefined {\n if (!options.tools || options.tools.length === 0) {\n return undefined;\n }\n\n return options.tools\n .filter(t => t.type === 'function')\n .map(t => ({\n type: 'function' as const,\n function: {\n name: t.name,\n description: t.description || '',\n parameters: t.inputSchema as Record<string, any>,\n },\n }));\n }\n\n /**\n * Convert Vercel AI SDK tool choice to Cencori format\n */\n private convertToolChoice(options: LanguageModelV3CallOptions): string | { type: 'function'; function: { name: string } } | undefined {\n const tc = options.toolChoice;\n if (!tc) return undefined;\n\n switch (tc.type) {\n case 'auto':\n return 'auto';\n case 'none':\n return 'none';\n case 'required':\n return 'required';\n case 'tool':\n return { type: 'function', function: { name: tc.toolName } };\n default:\n return undefined;\n }\n }\n\n async doGenerate(options: LanguageModelV3CallOptions): Promise<LanguageModelV3GenerateResult> {\n const messages = this.convertMessages(options);\n const tools = this.convertTools(options);\n const toolChoice = this.convertToolChoice(options);\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: false,\n userId: this.settings.userId,\n tools,\n toolChoice,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const data = await response.json() as CencoriResponse;\n\n // Build content array\n const content: LanguageModelV3Content[] = [];\n\n // Add text content if present\n if (data.content) {\n content.push({\n type: 'text',\n text: data.content,\n providerMetadata: undefined,\n });\n }\n\n // Add tool calls if present\n if (data.tool_calls && data.tool_calls.length > 0) {\n for (const tc of data.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: tc.id,\n toolName: tc.function.name,\n input: tc.function.arguments,\n providerMetadata: undefined,\n });\n }\n }\n\n const warnings: SharedV3Warning[] = [];\n\n return {\n content,\n finishReason: this.mapFinishReason(data.finish_reason),\n usage: this.buildUsage(data.usage.prompt_tokens, data.usage.completion_tokens),\n warnings,\n };\n }\n\n async doStream(options: LanguageModelV3CallOptions): Promise<LanguageModelV3StreamResult> {\n const messages = this.convertMessages(options);\n const tools = this.convertTools(options);\n const toolChoice = this.convertToolChoice(options);\n const self = this;\n\n const response = await fetch(`${this.settings.baseUrl}/api/ai/chat`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify({\n messages,\n model: this.modelId,\n temperature: options.temperature,\n maxTokens: options.maxOutputTokens,\n stream: true,\n userId: this.settings.userId,\n tools,\n toolChoice,\n }),\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(`Cencori API error: ${error.error || response.statusText}`);\n }\n\n const reader = response.body?.getReader();\n if (!reader) {\n throw new Error('Response body is null');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n let inputTokens = 0;\n let outputTokens = 0;\n const textPartId = 'text-0';\n let started = false;\n\n const stream = new ReadableStream<LanguageModelV3StreamPart>({\n async pull(controller) {\n try {\n const { done, value } = await reader.read();\n\n if (done) {\n // End text block and finish\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.trim() === '') continue;\n if (!line.startsWith('data: ')) continue;\n\n const data = line.slice(6);\n if (data === '[DONE]') {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason('stop'),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n\n try {\n const chunk = JSON.parse(data) as CencoriStreamChunk;\n\n // Handle text delta\n if (chunk.delta) {\n // Start text if not started\n if (!started) {\n started = true;\n controller.enqueue({\n type: 'text-start',\n id: textPartId,\n });\n }\n\n outputTokens += Math.ceil(chunk.delta.length / 4); // Rough estimate\n controller.enqueue({\n type: 'text-delta',\n id: textPartId,\n delta: chunk.delta,\n });\n }\n\n // Handle tool calls\n if (chunk.tool_calls && chunk.tool_calls.length > 0) {\n for (const tc of chunk.tool_calls) {\n // Emit complete tool-call event\n controller.enqueue({\n type: 'tool-call',\n toolCallId: tc.id,\n toolName: tc.function.name,\n input: tc.function.arguments,\n providerMetadata: undefined,\n });\n }\n }\n\n if (chunk.finish_reason) {\n if (started) {\n controller.enqueue({\n type: 'text-end',\n id: textPartId,\n });\n }\n controller.enqueue({\n type: 'finish',\n finishReason: self.mapFinishReason(chunk.finish_reason),\n usage: self.buildUsage(inputTokens, outputTokens),\n });\n controller.close();\n return;\n }\n } catch {\n // Skip malformed JSON\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader.cancel();\n },\n });\n\n return {\n stream,\n };\n }\n}\n","/**\n * Cencori AI Provider for Vercel AI SDK\n * \n * Use Cencori with streamText(), generateText(), and useChat()\n */\n\nimport { CencoriChatLanguageModel } from './cencori-chat-model';\nimport type { CencoriProviderSettings, CencoriChatSettings } from './types';\n\nexport interface CencoriProvider {\n /**\n * Create a Cencori chat model for use with Vercel AI SDK\n * \n * @param modelId - The model ID (e.g., 'gemini-2.5-flash', 'gpt-4o', 'claude-3-opus')\n * @param settings - Optional model-specific settings\n * @returns A LanguageModelV1 compatible model\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\n (modelId: string, settings?: CencoriChatSettings): CencoriChatLanguageModel;\n\n /**\n * Create a chat model (alias for the provider function)\n */\n chat: (modelId: string, settings?: CencoriChatSettings) => CencoriChatLanguageModel;\n}\n\n/**\n * Create a Cencori provider instance\n * \n * @param options - Provider configuration options\n * @returns A Cencori provider\n * \n * @example\n * import { createCencori } from '@cencori/ai-sdk';\n * \n * const cencori = createCencori({\n * apiKey: process.env.CENCORI_API_KEY\n * });\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport function createCencori(options: CencoriProviderSettings = {}): CencoriProvider {\n const baseUrl = options.baseUrl ?? 'https://cencori.com';\n const apiKey = options.apiKey ?? process.env.CENCORI_API_KEY;\n\n if (!apiKey) {\n throw new Error('Cencori API key is required. Pass it via options.apiKey or set CENCORI_API_KEY environment variable.');\n }\n\n const createModel = (modelId: string, settings: CencoriChatSettings = {}) => {\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl,\n headers: options.headers,\n ...settings,\n });\n };\n\n const provider = function (modelId: string, settings?: CencoriChatSettings) {\n return createModel(modelId, settings);\n } as CencoriProvider;\n\n provider.chat = createModel;\n\n return provider;\n}\n\n/**\n * Default Cencori provider instance\n * Uses CENCORI_API_KEY environment variable (lazy initialization)\n * \n * @example\n * import { cencori } from '@cencori/ai-sdk';\n * import { streamText } from 'ai';\n * \n * const result = await streamText({\n * model: cencori('gemini-2.5-flash'),\n * messages: [{ role: 'user', content: 'Hello!' }]\n * });\n */\nexport const cencori: CencoriProvider = function (modelId: string, settings?: CencoriChatSettings) {\n const apiKey = process.env.CENCORI_API_KEY;\n if (!apiKey) {\n throw new Error('CENCORI_API_KEY environment variable is required. Set it or use createCencori({ apiKey: \"...\" }) instead.');\n }\n return new CencoriChatLanguageModel(modelId, {\n apiKey,\n baseUrl: 'https://cencori.com',\n ...settings,\n });\n} as CencoriProvider;\n\ncencori.chat = cencori;\n"],"mappings":";AA2EO,IAAM,2BAAN,MAA0D;AAAA,EAQ7D,YAAY,SAAiB,UAAoC;AAPjE,SAAS,uBAAuB;AAChC,SAAS,WAAW;AAGpB,SAAS,gBAA0C,CAAC;AAIhD,SAAK,UAAU;AACf,SAAK,WAAW;AAAA,EACpB;AAAA,EAEQ,aAAqC;AACzC,WAAO;AAAA,MACH,gBAAgB;AAAA,MAChB,mBAAmB,KAAK,SAAS;AAAA,MACjC,GAAG,KAAK,SAAS;AAAA,IACrB;AAAA,EACJ;AAAA,EAEQ,gBAAgB,SAAuD;AAC3E,UAAM,WAA6B,CAAC;AAGpC,UAAM,iBAAiB,QAAQ;AAE/B,QAAI,CAAC,kBAAkB,CAAC,MAAM,QAAQ,cAAc,GAAG;AACnD,aAAO;AAAA,IACX;AAEA,eAAW,OAAO,gBAAgB;AAC9B,UAAI,UAAU;AAEd,UAAI,IAAI,SAAS,UAAU;AAEvB,kBAAU,IAAI;AAAA,MAClB,WAAW,IAAI,SAAS,UAAU,IAAI,SAAS,aAAa;AAExD,cAAM,aAAa,IAAI;AACvB,YAAI,MAAM,QAAQ,UAAU,GAAG;AAC3B,oBAAU,WACL,OAAO,CAAC,SAA2B,KAAK,SAAS,MAAM,EACvD,IAAI,CAAC,SAA0C,KAAK,QAAQ,EAAE,EAC9D,KAAK,EAAE;AAAA,QAChB,WAAW,OAAO,eAAe,UAAU;AACvC,oBAAU;AAAA,QACd;AAAA,MACJ;AAEA,UAAI,YAAY,IAAI,SAAS,YAAY,IAAI,SAAS,UAAU,IAAI,SAAS,cAAc;AACvF,iBAAS,KAAK;AAAA,UACV,MAAM,IAAI;AAAA,UACV;AAAA,QACJ,CAAC;AAAA,MACL;AAAA,IACJ;AAEA,WAAO;AAAA,EACX;AAAA,EAEQ,gBAAgB,QAA8C;AAClE,QAAI;AAEJ,YAAQ,QAAQ;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AACD,kBAAU;AACV;AAAA,MACJ,KAAK;AACD,kBAAU;AACV;AAAA,MACJ;AACI,kBAAU;AAAA,IAClB;AAEA,WAAO,EAAE,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA,EAEQ,WAAW,aAAqB,cAA4C;AAChF,WAAO;AAAA,MACH,aAAa;AAAA,QACT,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MAChB;AAAA,MACA,cAAc;AAAA,QACV,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACf;AAAA,IACJ;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAgE;AACjF,QAAI,CAAC,QAAQ,SAAS,QAAQ,MAAM,WAAW,GAAG;AAC9C,aAAO;AAAA,IACX;AAEA,WAAO,QAAQ,MACV,OAAO,OAAK,EAAE,SAAS,UAAU,EACjC,IAAI,QAAM;AAAA,MACP,MAAM;AAAA,MACN,UAAU;AAAA,QACN,MAAM,EAAE;AAAA,QACR,aAAa,EAAE,eAAe;AAAA,QAC9B,YAAY,EAAE;AAAA,MAClB;AAAA,IACJ,EAAE;AAAA,EACV;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,SAA4G;AAClI,UAAM,KAAK,QAAQ;AACnB,QAAI,CAAC,GAAI,QAAO;AAEhB,YAAQ,GAAG,MAAM;AAAA,MACb,KAAK;AACD,eAAO;AAAA,MACX,KAAK;AACD,eAAO;AAAA,MACX,KAAK;AACD,eAAO;AAAA,MACX,KAAK;AACD,eAAO,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,GAAG,SAAS,EAAE;AAAA,MAC/D;AACI,eAAO;AAAA,IACf;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAA6E;AAC1F,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAC7C,UAAM,QAAQ,KAAK,aAAa,OAAO;AACvC,UAAM,aAAa,KAAK,kBAAkB,OAAO;AAEjD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,QACtB;AAAA,QACA;AAAA,MACJ,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,UAAM,UAAoC,CAAC;AAG3C,QAAI,KAAK,SAAS;AACd,cAAQ,KAAK;AAAA,QACT,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,kBAAkB;AAAA,MACtB,CAAC;AAAA,IACL;AAGA,QAAI,KAAK,cAAc,KAAK,WAAW,SAAS,GAAG;AAC/C,iBAAW,MAAM,KAAK,YAAY;AAC9B,gBAAQ,KAAK;AAAA,UACT,MAAM;AAAA,UACN,YAAY,GAAG;AAAA,UACf,UAAU,GAAG,SAAS;AAAA,UACtB,OAAO,GAAG,SAAS;AAAA,UACnB,kBAAkB;AAAA,QACtB,CAAC;AAAA,MACL;AAAA,IACJ;AAEA,UAAM,WAA8B,CAAC;AAErC,WAAO;AAAA,MACH;AAAA,MACA,cAAc,KAAK,gBAAgB,KAAK,aAAa;AAAA,MACrD,OAAO,KAAK,WAAW,KAAK,MAAM,eAAe,KAAK,MAAM,iBAAiB;AAAA,MAC7E;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,MAAM,SAAS,SAA2E;AACtF,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAC7C,UAAM,QAAQ,KAAK,aAAa,OAAO;AACvC,UAAM,aAAa,KAAK,kBAAkB,OAAO;AACjD,UAAM,OAAO;AAEb,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,SAAS,OAAO,gBAAgB;AAAA,MACjE,QAAQ;AAAA,MACR,SAAS,KAAK,WAAW;AAAA,MACzB,MAAM,KAAK,UAAU;AAAA,QACjB;AAAA,QACA,OAAO,KAAK;AAAA,QACZ,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,QACtB;AAAA,QACA;AAAA,MACJ,CAAC;AAAA,MACD,QAAQ,QAAQ;AAAA,IACpB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AACd,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,YAAM,IAAI,MAAM,sBAAsB,MAAM,SAAS,SAAS,UAAU,EAAE;AAAA,IAC9E;AAEA,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,uBAAuB;AAAA,IAC3C;AAEA,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,SAAS;AACb,QAAI,cAAc;AAClB,QAAI,eAAe;AACnB,UAAM,aAAa;AACnB,QAAI,UAAU;AAEd,UAAM,SAAS,IAAI,eAA0C;AAAA,MACzD,MAAM,KAAK,YAAY;AACnB,YAAI;AACA,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,cAAI,MAAM;AAEN,gBAAI,SAAS;AACT,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,IAAI;AAAA,cACR,CAAC;AAAA,YACL;AACA,uBAAW,QAAQ;AAAA,cACf,MAAM;AAAA,cACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,cACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,YACpD,CAAC;AACD,uBAAW,MAAM;AACjB;AAAA,UACJ;AAEA,oBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,gBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,mBAAS,MAAM,IAAI,KAAK;AAExB,qBAAW,QAAQ,OAAO;AACtB,gBAAI,KAAK,KAAK,MAAM,GAAI;AACxB,gBAAI,CAAC,KAAK,WAAW,QAAQ,EAAG;AAEhC,kBAAM,OAAO,KAAK,MAAM,CAAC;AACzB,gBAAI,SAAS,UAAU;AACnB,kBAAI,SAAS;AACT,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,gBACR,CAAC;AAAA,cACL;AACA,yBAAW,QAAQ;AAAA,gBACf,MAAM;AAAA,gBACN,cAAc,KAAK,gBAAgB,MAAM;AAAA,gBACzC,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,cACpD,CAAC;AACD,yBAAW,MAAM;AACjB;AAAA,YACJ;AAEA,gBAAI;AACA,oBAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,kBAAI,MAAM,OAAO;AAEb,oBAAI,CAAC,SAAS;AACV,4BAAU;AACV,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AAEA,gCAAgB,KAAK,KAAK,MAAM,MAAM,SAAS,CAAC;AAChD,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO,MAAM;AAAA,gBACjB,CAAC;AAAA,cACL;AAGA,kBAAI,MAAM,cAAc,MAAM,WAAW,SAAS,GAAG;AACjD,2BAAW,MAAM,MAAM,YAAY;AAE/B,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,YAAY,GAAG;AAAA,oBACf,UAAU,GAAG,SAAS;AAAA,oBACtB,OAAO,GAAG,SAAS;AAAA,oBACnB,kBAAkB;AAAA,kBACtB,CAAC;AAAA,gBACL;AAAA,cACJ;AAEA,kBAAI,MAAM,eAAe;AACrB,oBAAI,SAAS;AACT,6BAAW,QAAQ;AAAA,oBACf,MAAM;AAAA,oBACN,IAAI;AAAA,kBACR,CAAC;AAAA,gBACL;AACA,2BAAW,QAAQ;AAAA,kBACf,MAAM;AAAA,kBACN,cAAc,KAAK,gBAAgB,MAAM,aAAa;AAAA,kBACtD,OAAO,KAAK,WAAW,aAAa,YAAY;AAAA,gBACpD,CAAC;AACD,2BAAW,MAAM;AACjB;AAAA,cACJ;AAAA,YACJ,QAAQ;AAAA,YAER;AAAA,UACJ;AAAA,QACJ,SAAS,OAAO;AACZ,qBAAW,MAAM,KAAK;AAAA,QAC1B;AAAA,MACJ;AAAA,MACA,SAAS;AACL,eAAO,OAAO;AAAA,MAClB;AAAA,IACJ,CAAC;AAED,WAAO;AAAA,MACH;AAAA,IACJ;AAAA,EACJ;AACJ;;;ACnYO,SAAS,cAAc,UAAmC,CAAC,GAAoB;AAClF,QAAM,UAAU,QAAQ,WAAW;AACnC,QAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,sGAAsG;AAAA,EAC1H;AAEA,QAAM,cAAc,CAAC,SAAiB,WAAgC,CAAC,MAAM;AACzE,WAAO,IAAI,yBAAyB,SAAS;AAAA,MACzC;AAAA,MACA;AAAA,MACA,SAAS,QAAQ;AAAA,MACjB,GAAG;AAAA,IACP,CAAC;AAAA,EACL;AAEA,QAAM,WAAW,SAAU,SAAiB,UAAgC;AACxE,WAAO,YAAY,SAAS,QAAQ;AAAA,EACxC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACX;AAeO,IAAM,UAA2B,SAAU,SAAiB,UAAgC;AAC/F,QAAM,SAAS,QAAQ,IAAI;AAC3B,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,2GAA2G;AAAA,EAC/H;AACA,SAAO,IAAI,yBAAyB,SAAS;AAAA,IACzC;AAAA,IACA,SAAS;AAAA,IACT,GAAG;AAAA,EACP,CAAC;AACL;AAEA,QAAQ,OAAO;","names":[]}
|
package/dist/tanstack/index.js
CHANGED
|
@@ -76,6 +76,14 @@ var CencoriTextAdapter = class {
|
|
|
76
76
|
* Stream chat completions from the model
|
|
77
77
|
*/
|
|
78
78
|
async *chatStream(options) {
|
|
79
|
+
const tools = options.tools ? Object.values(options.tools).map((t) => ({
|
|
80
|
+
type: "function",
|
|
81
|
+
function: {
|
|
82
|
+
name: t.name,
|
|
83
|
+
description: t.description,
|
|
84
|
+
parameters: t.inputSchema || {}
|
|
85
|
+
}
|
|
86
|
+
})) : void 0;
|
|
79
87
|
const response = await fetch(`${this.config.baseUrl}/api/ai/chat`, {
|
|
80
88
|
method: "POST",
|
|
81
89
|
headers: {
|
|
@@ -89,7 +97,8 @@ var CencoriTextAdapter = class {
|
|
|
89
97
|
temperature: options.temperature,
|
|
90
98
|
maxTokens: options.maxTokens,
|
|
91
99
|
stream: true,
|
|
92
|
-
userId: options.modelOptions?.userId
|
|
100
|
+
userId: options.modelOptions?.userId,
|
|
101
|
+
tools
|
|
93
102
|
}),
|
|
94
103
|
signal: options.abortController?.signal
|
|
95
104
|
});
|
|
@@ -175,13 +184,33 @@ var CencoriTextAdapter = class {
|
|
|
175
184
|
};
|
|
176
185
|
yield contentChunk;
|
|
177
186
|
}
|
|
187
|
+
if (chunk.tool_calls && chunk.tool_calls.length > 0) {
|
|
188
|
+
for (const tc of chunk.tool_calls) {
|
|
189
|
+
const toolCallChunk = {
|
|
190
|
+
type: "tool_call",
|
|
191
|
+
id: this.generateId(),
|
|
192
|
+
model: this.model,
|
|
193
|
+
timestamp: Date.now(),
|
|
194
|
+
toolCall: {
|
|
195
|
+
id: tc.id,
|
|
196
|
+
type: "function",
|
|
197
|
+
function: {
|
|
198
|
+
name: tc.function.name,
|
|
199
|
+
arguments: tc.function.arguments
|
|
200
|
+
}
|
|
201
|
+
},
|
|
202
|
+
index: 0
|
|
203
|
+
};
|
|
204
|
+
yield toolCallChunk;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
178
207
|
if (chunk.finish_reason) {
|
|
179
208
|
const doneChunk = {
|
|
180
209
|
type: "done",
|
|
181
210
|
id: this.generateId(),
|
|
182
211
|
model: this.model,
|
|
183
212
|
timestamp: Date.now(),
|
|
184
|
-
finishReason: chunk.finish_reason === "stop" ? "stop" : null,
|
|
213
|
+
finishReason: chunk.finish_reason === "tool_calls" ? "tool_calls" : chunk.finish_reason === "stop" ? "stop" : null,
|
|
185
214
|
usage: {
|
|
186
215
|
promptTokens,
|
|
187
216
|
completionTokens,
|