@nordlys-labs/nordlys-ai-provider 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,1055 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ createNordlys: () => createNordlys,
24
+ nordlys: () => nordlys
25
+ });
26
+ module.exports = __toCommonJS(index_exports);
27
+
28
+ // src/nordlys-provider.ts
29
+ var import_provider4 = require("@ai-sdk/provider");
30
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
31
+
32
+ // src/nordlys-chat-language-model.ts
33
+ var import_provider3 = require("@ai-sdk/provider");
34
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
35
+ var import_zod2 = require("zod");
36
+
37
+ // src/convert-to-nordlys-chat-messages.ts
38
+ var import_provider = require("@ai-sdk/provider");
39
+ var import_provider_utils = require("@ai-sdk/provider-utils");
40
+ function convertToolOutput(output) {
41
+ switch (output.type) {
42
+ case "text":
43
+ case "error-text":
44
+ return output.value;
45
+ case "json":
46
+ case "error-json":
47
+ return JSON.stringify(output.value);
48
+ case "content":
49
+ return JSON.stringify(output.value);
50
+ case "execution-denied":
51
+ return "";
52
+ default:
53
+ return "";
54
+ }
55
+ }
56
+ function convertToNordlysChatMessages({
57
+ prompt,
58
+ systemMessageMode = "system"
59
+ }) {
60
+ const messages = [];
61
+ const warnings = [];
62
+ for (const { role, content } of prompt) {
63
+ switch (role) {
64
+ case "system": {
65
+ switch (systemMessageMode) {
66
+ case "system": {
67
+ messages.push({ role: "system", content });
68
+ break;
69
+ }
70
+ case "developer": {
71
+ messages.push({ role: "developer", content });
72
+ break;
73
+ }
74
+ case "remove": {
75
+ warnings.push({
76
+ type: "other",
77
+ message: "system messages are removed for this model"
78
+ });
79
+ break;
80
+ }
81
+ default: {
82
+ const _exhaustiveCheck = systemMessageMode;
83
+ throw new Error(
84
+ `Unsupported system message mode: ${_exhaustiveCheck}`
85
+ );
86
+ }
87
+ }
88
+ break;
89
+ }
90
+ case "user": {
91
+ if (content.length === 1 && content[0].type === "text") {
92
+ messages.push({ role: "user", content: content[0].text });
93
+ break;
94
+ }
95
+ messages.push({
96
+ role: "user",
97
+ content: content.map((part, index) => {
98
+ var _a, _b;
99
+ switch (part.type) {
100
+ case "text": {
101
+ return { type: "text", text: part.text };
102
+ }
103
+ case "file": {
104
+ if ((_a = part.mediaType) == null ? void 0 : _a.startsWith("image/")) {
105
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
106
+ return {
107
+ type: "image_url",
108
+ image_url: {
109
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${(0, import_provider_utils.convertToBase64)(part.data)}`
110
+ }
111
+ };
112
+ }
113
+ if (part.mediaType && (part.mediaType === "audio/wav" || part.mediaType === "audio/mp3" || part.mediaType === "audio/mpeg")) {
114
+ if (part.data instanceof URL) {
115
+ throw new import_provider.UnsupportedFunctionalityError({
116
+ functionality: "audio file parts with URLs"
117
+ });
118
+ }
119
+ return {
120
+ type: "input_audio",
121
+ input_audio: {
122
+ data: (0, import_provider_utils.convertToBase64)(part.data),
123
+ format: part.mediaType === "audio/wav" ? "wav" : "mp3"
124
+ }
125
+ };
126
+ }
127
+ if (part.mediaType && part.mediaType === "application/pdf") {
128
+ if (part.data instanceof URL) {
129
+ throw new import_provider.UnsupportedFunctionalityError({
130
+ functionality: "PDF file parts with URLs"
131
+ });
132
+ }
133
+ return {
134
+ type: "file",
135
+ file: {
136
+ filename: (_b = part.filename) != null ? _b : `part-${index}.pdf`,
137
+ file_data: `data:application/pdf;base64,${(0, import_provider_utils.convertToBase64)(part.data)}`
138
+ }
139
+ };
140
+ }
141
+ throw new import_provider.UnsupportedFunctionalityError({
142
+ functionality: `file part media type ${part.mediaType}`
143
+ });
144
+ }
145
+ default: {
146
+ throw new Error(`Unsupported content part type`);
147
+ }
148
+ }
149
+ })
150
+ });
151
+ break;
152
+ }
153
+ case "assistant": {
154
+ const textParts = [];
155
+ const reasoningParts = [];
156
+ const generatedFiles = [];
157
+ const toolCalls = [];
158
+ for (const part of content) {
159
+ switch (part.type) {
160
+ case "text": {
161
+ textParts.push(part.text);
162
+ break;
163
+ }
164
+ case "reasoning": {
165
+ reasoningParts.push(part.text);
166
+ break;
167
+ }
168
+ case "file": {
169
+ const dataString = typeof part.data === "string" ? part.data : part.data instanceof URL ? (() => {
170
+ throw new Error(
171
+ "URL data not supported for generated files"
172
+ );
173
+ })() : Buffer.from(part.data).toString("base64");
174
+ generatedFiles.push({
175
+ media_type: part.mediaType,
176
+ data: dataString
177
+ });
178
+ break;
179
+ }
180
+ case "tool-call": {
181
+ toolCalls.push({
182
+ id: part.toolCallId,
183
+ type: "function",
184
+ function: {
185
+ name: part.toolName,
186
+ arguments: JSON.stringify(part.input)
187
+ }
188
+ });
189
+ break;
190
+ }
191
+ }
192
+ }
193
+ const text = textParts.join("");
194
+ const reasoning = reasoningParts.join("");
195
+ const message = {
196
+ role: "assistant",
197
+ content: text,
198
+ ...toolCalls.length > 0 && { tool_calls: toolCalls },
199
+ ...reasoning && { reasoning_content: reasoning },
200
+ ...generatedFiles.length > 0 && { generated_files: generatedFiles }
201
+ };
202
+ messages.push(message);
203
+ break;
204
+ }
205
+ case "tool": {
206
+ for (const toolResponse of content) {
207
+ if (toolResponse.type === "tool-result") {
208
+ const contentValue = convertToolOutput(toolResponse.output);
209
+ if (contentValue) {
210
+ messages.push({
211
+ role: "tool",
212
+ tool_call_id: toolResponse.toolCallId,
213
+ content: contentValue
214
+ });
215
+ }
216
+ }
217
+ }
218
+ break;
219
+ }
220
+ default: {
221
+ const _exhaustiveCheck = role;
222
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
223
+ }
224
+ }
225
+ }
226
+ return { messages, warnings };
227
+ }
228
+
229
+ // src/get-response-metadata.ts
230
+ function getResponseMetadata({
231
+ id,
232
+ model,
233
+ created
234
+ }) {
235
+ return {
236
+ id: id != null ? id : void 0,
237
+ modelId: model != null ? model : void 0,
238
+ timestamp: created != null ? new Date(created * 1e3) : void 0
239
+ };
240
+ }
241
+
242
+ // src/map-nordlys-finish-reason.ts
243
+ function mapNordlysFinishReason(reason) {
244
+ switch (reason) {
245
+ case "stop":
246
+ return { unified: "stop", raw: reason };
247
+ case "length":
248
+ return { unified: "length", raw: reason };
249
+ case "content_filter":
250
+ return { unified: "content-filter", raw: reason };
251
+ case "tool_calls":
252
+ return { unified: "tool-calls", raw: reason };
253
+ default:
254
+ return { unified: "other", raw: reason };
255
+ }
256
+ }
257
+
258
+ // src/nordlys-chat-options.ts
259
+ var import_v4 = require("zod/v4");
260
+ var nordlysProviderOptions = import_v4.z.object({
261
+ /**
262
+ * Model name (required for API requests).
263
+ */
264
+ model: import_v4.z.string().optional(),
265
+ /**
266
+ * Modify the likelihood of specified tokens appearing in the completion.
267
+ */
268
+ logit_bias: import_v4.z.record(import_v4.z.string(), import_v4.z.number()).optional(),
269
+ /**
270
+ * Number of completions to generate for each prompt.
271
+ */
272
+ n: import_v4.z.number().optional(),
273
+ /**
274
+ * Whether to stream responses.
275
+ */
276
+ stream: import_v4.z.boolean().optional(),
277
+ /**
278
+ * Unique identifier representing your end-user.
279
+ */
280
+ user: import_v4.z.string().optional(),
281
+ /**
282
+ * Audio parameter for chat completion.
283
+ */
284
+ audio: import_v4.z.object({
285
+ format: import_v4.z.string().optional(),
286
+ voice: import_v4.z.string().optional()
287
+ }).optional(),
288
+ /**
289
+ * Whether to return log probabilities of the output tokens.
290
+ */
291
+ logprobs: import_v4.z.boolean().optional(),
292
+ /**
293
+ * Maximum number of completion tokens.
294
+ */
295
+ max_completion_tokens: import_v4.z.number().optional(),
296
+ /**
297
+ * Metadata for the request.
298
+ */
299
+ metadata: import_v4.z.record(import_v4.z.string(), import_v4.z.string()).optional(),
300
+ /**
301
+ * Modalities for the request.
302
+ */
303
+ modalities: import_v4.z.array(import_v4.z.string()).optional(),
304
+ /**
305
+ * Whether to allow parallel tool calls.
306
+ */
307
+ parallel_tool_calls: import_v4.z.boolean().optional(),
308
+ /**
309
+ * Prediction content parameter.
310
+ */
311
+ prediction: import_v4.z.object({
312
+ type: import_v4.z.string().optional(),
313
+ content: import_v4.z.object({
314
+ OfString: import_v4.z.string().optional(),
315
+ OfArrayOfContentParts: import_v4.z.array(import_v4.z.object({ type: import_v4.z.literal("text"), text: import_v4.z.string() })).optional()
316
+ }).optional()
317
+ }).optional(),
318
+ /**
319
+ * Reasoning effort level.
320
+ */
321
+ reasoning_effort: import_v4.z.string().optional(),
322
+ /**
323
+ * Response format parameter.
324
+ */
325
+ response_format: import_v4.z.object({
326
+ OfText: import_v4.z.object({ type: import_v4.z.string() }).optional(),
327
+ OfJSONObject: import_v4.z.object({ type: import_v4.z.string() }).optional(),
328
+ OfJSONSchema: import_v4.z.object({
329
+ type: import_v4.z.string(),
330
+ json_schema: import_v4.z.object({
331
+ name: import_v4.z.string(),
332
+ schema: import_v4.z.unknown(),
333
+ description: import_v4.z.string().optional(),
334
+ strict: import_v4.z.boolean().optional()
335
+ }).optional()
336
+ }).optional()
337
+ }).optional(),
338
+ /**
339
+ * Seed for deterministic outputs.
340
+ */
341
+ seed: import_v4.z.number().optional(),
342
+ /**
343
+ * Service tier to use.
344
+ */
345
+ service_tier: import_v4.z.string().optional(),
346
+ /**
347
+ * Whether to store the conversation.
348
+ */
349
+ store: import_v4.z.boolean().optional(),
350
+ /**
351
+ * Number of top logprobs to return.
352
+ */
353
+ top_logprobs: import_v4.z.number().optional(),
354
+ /**
355
+ * Web search options.
356
+ */
357
+ web_search_options: import_v4.z.object({
358
+ search_context_size: import_v4.z.string().optional(),
359
+ user_location: import_v4.z.object({
360
+ type: import_v4.z.string().optional(),
361
+ approximate: import_v4.z.object({
362
+ city: import_v4.z.string().optional(),
363
+ country: import_v4.z.string().optional(),
364
+ region: import_v4.z.string().optional(),
365
+ timezone: import_v4.z.string().optional()
366
+ }).optional()
367
+ }).optional()
368
+ }).optional()
369
+ });
370
+
371
+ // src/nordlys-error.ts
372
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
373
+ var import_zod = require("zod");
374
+ var nordlysErrorDataSchema = import_zod.z.object({
375
+ error: import_zod.z.object({
376
+ message: import_zod.z.string(),
377
+ type: import_zod.z.string().nullish(),
378
+ param: import_zod.z.any().nullish(),
379
+ code: import_zod.z.union([import_zod.z.string(), import_zod.z.number()]).nullish()
380
+ })
381
+ });
382
+ var nordlysFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
383
+ errorSchema: nordlysErrorDataSchema,
384
+ errorToMessage: (data) => data.error.message
385
+ });
386
+
387
+ // src/nordlys-prepare-tools.ts
388
+ var import_provider2 = require("@ai-sdk/provider");
389
+ function prepareTools({
390
+ tools,
391
+ toolChoice
392
+ }) {
393
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
394
+ const toolWarnings = [];
395
+ if (tools == null) {
396
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
397
+ }
398
+ const openaiCompatTools = [];
399
+ for (const tool of tools) {
400
+ if (tool.type === "provider") {
401
+ toolWarnings.push({ type: "unsupported", feature: `tool: ${tool.name}` });
402
+ } else if (tool.type === "function") {
403
+ openaiCompatTools.push({
404
+ type: "function",
405
+ function: {
406
+ name: tool.name,
407
+ description: tool.description,
408
+ parameters: tool.inputSchema
409
+ }
410
+ });
411
+ }
412
+ }
413
+ if (toolChoice == null) {
414
+ return { tools: openaiCompatTools, toolChoice: void 0, toolWarnings };
415
+ }
416
+ const type = toolChoice.type;
417
+ switch (type) {
418
+ case "auto":
419
+ case "none":
420
+ case "required":
421
+ return { tools: openaiCompatTools, toolChoice: type, toolWarnings };
422
+ case "tool":
423
+ return {
424
+ tools: openaiCompatTools,
425
+ toolChoice: {
426
+ type: "function",
427
+ function: { name: toolChoice.toolName }
428
+ },
429
+ toolWarnings
430
+ };
431
+ default: {
432
+ const _exhaustiveCheck = type;
433
+ throw new import_provider2.UnsupportedFunctionalityError({
434
+ functionality: `tool choice type: ${_exhaustiveCheck}`
435
+ });
436
+ }
437
+ }
438
+ }
439
+
440
+ // src/nordlys-chat-language-model.ts
441
+ var nordlysChatResponseSchema = import_zod2.z.object({
442
+ id: import_zod2.z.string().nullish(),
443
+ created: import_zod2.z.number().nullish(),
444
+ model: import_zod2.z.string().nullish(),
445
+ choices: import_zod2.z.array(
446
+ import_zod2.z.object({
447
+ message: import_zod2.z.object({
448
+ role: import_zod2.z.enum(["assistant", ""]).nullish(),
449
+ content: import_zod2.z.string().nullish(),
450
+ tool_calls: import_zod2.z.array(
451
+ import_zod2.z.object({
452
+ id: import_zod2.z.string().nullish(),
453
+ type: import_zod2.z.literal("function"),
454
+ function: import_zod2.z.object({
455
+ name: import_zod2.z.string(),
456
+ arguments: import_zod2.z.string()
457
+ })
458
+ })
459
+ ).nullish(),
460
+ reasoning_content: import_zod2.z.string().optional(),
461
+ generated_files: import_zod2.z.array(
462
+ import_zod2.z.object({
463
+ media_type: import_zod2.z.string(),
464
+ data: import_zod2.z.string()
465
+ })
466
+ ).optional()
467
+ }),
468
+ index: import_zod2.z.number(),
469
+ logprobs: import_zod2.z.object({
470
+ content: import_zod2.z.array(
471
+ import_zod2.z.object({
472
+ token: import_zod2.z.string(),
473
+ logprob: import_zod2.z.number(),
474
+ top_logprobs: import_zod2.z.array(
475
+ import_zod2.z.object({
476
+ token: import_zod2.z.string(),
477
+ logprob: import_zod2.z.number()
478
+ })
479
+ )
480
+ })
481
+ ).nullish()
482
+ }).nullish(),
483
+ finish_reason: import_zod2.z.string().nullish()
484
+ })
485
+ ).optional(),
486
+ usage: import_zod2.z.object({
487
+ completion_tokens: import_zod2.z.number(),
488
+ prompt_tokens: import_zod2.z.number(),
489
+ total_tokens: import_zod2.z.number(),
490
+ reasoning_tokens: import_zod2.z.number().optional(),
491
+ cached_input_tokens: import_zod2.z.number().optional()
492
+ }).optional(),
493
+ system_fingerprint: import_zod2.z.string().optional(),
494
+ service_tier: import_zod2.z.string().optional(),
495
+ provider: import_zod2.z.string().optional(),
496
+ error: import_zod2.z.object({
497
+ message: import_zod2.z.string(),
498
+ type: import_zod2.z.string(),
499
+ param: import_zod2.z.any().nullish(),
500
+ code: import_zod2.z.any().nullish()
501
+ }).optional()
502
+ });
503
+ var nordlysChatChunkSchema = import_zod2.z.union([
504
+ import_zod2.z.object({
505
+ id: import_zod2.z.string().nullish(),
506
+ created: import_zod2.z.number().nullish(),
507
+ model: import_zod2.z.string().nullish(),
508
+ choices: import_zod2.z.array(
509
+ import_zod2.z.object({
510
+ delta: import_zod2.z.object({
511
+ role: import_zod2.z.enum(["assistant", ""]).nullish(),
512
+ content: import_zod2.z.string().nullish(),
513
+ tool_calls: import_zod2.z.array(
514
+ import_zod2.z.object({
515
+ index: import_zod2.z.number(),
516
+ id: import_zod2.z.string().nullish(),
517
+ type: import_zod2.z.union([import_zod2.z.literal("function"), import_zod2.z.literal("")]).nullish(),
518
+ function: import_zod2.z.object({
519
+ name: import_zod2.z.string().nullish(),
520
+ arguments: import_zod2.z.string().nullish()
521
+ })
522
+ })
523
+ ).nullish(),
524
+ reasoning_content: import_zod2.z.string().optional(),
525
+ generated_files: import_zod2.z.array(
526
+ import_zod2.z.object({
527
+ media_type: import_zod2.z.string(),
528
+ data: import_zod2.z.string()
529
+ })
530
+ ).optional()
531
+ }).nullish(),
532
+ logprobs: import_zod2.z.object({
533
+ content: import_zod2.z.array(
534
+ import_zod2.z.object({
535
+ token: import_zod2.z.string(),
536
+ logprob: import_zod2.z.number(),
537
+ top_logprobs: import_zod2.z.array(
538
+ import_zod2.z.object({
539
+ token: import_zod2.z.string(),
540
+ logprob: import_zod2.z.number()
541
+ })
542
+ )
543
+ })
544
+ ).nullish()
545
+ }).nullish(),
546
+ finish_reason: import_zod2.z.string().nullish(),
547
+ index: import_zod2.z.number()
548
+ })
549
+ ),
550
+ usage: import_zod2.z.object({
551
+ completion_tokens: import_zod2.z.number(),
552
+ prompt_tokens: import_zod2.z.number(),
553
+ total_tokens: import_zod2.z.number(),
554
+ reasoning_tokens: import_zod2.z.number().optional(),
555
+ cached_input_tokens: import_zod2.z.number().optional()
556
+ }).optional(),
557
+ provider: import_zod2.z.string().optional(),
558
+ service_tier: import_zod2.z.string().optional(),
559
+ system_fingerprint: import_zod2.z.string().optional()
560
+ }),
561
+ import_zod2.z.object({
562
+ error: import_zod2.z.object({
563
+ message: import_zod2.z.string(),
564
+ type: import_zod2.z.string(),
565
+ param: import_zod2.z.any().nullish(),
566
+ code: import_zod2.z.any().nullish()
567
+ }),
568
+ provider: import_zod2.z.string().optional()
569
+ })
570
+ ]);
571
+ var NordlysChatLanguageModel = class {
572
+ constructor(modelId, config) {
573
+ this.specificationVersion = "v3";
574
+ this.supportedUrls = {
575
+ "application/pdf": [/^https:\/\/.*$/]
576
+ };
577
+ this.modelId = modelId;
578
+ this.config = config;
579
+ }
580
+ get provider() {
581
+ return this.config.provider;
582
+ }
583
+ async getArgs({
584
+ prompt,
585
+ maxOutputTokens,
586
+ temperature,
587
+ topP,
588
+ topK,
589
+ frequencyPenalty,
590
+ presencePenalty,
591
+ stopSequences,
592
+ responseFormat,
593
+ providerOptions,
594
+ tools,
595
+ toolChoice
596
+ }) {
597
+ const warnings = [];
598
+ if (topK != null) {
599
+ warnings.push({ type: "unsupported", feature: "topK" });
600
+ }
601
+ if (responseFormat != null) {
602
+ warnings.push({ type: "unsupported", feature: "responseFormat" });
603
+ }
604
+ const result = nordlysProviderOptions.safeParse(providerOptions != null ? providerOptions : {});
605
+ const nordlysOptions = result.success ? result.data : {};
606
+ const {
607
+ tools: nordlysTools,
608
+ toolChoice: nordlysToolChoice,
609
+ toolWarnings
610
+ } = prepareTools({
611
+ tools,
612
+ toolChoice
613
+ });
614
+ warnings.push(...toolWarnings);
615
+ const { messages, warnings: messageWarnings } = convertToNordlysChatMessages({ prompt });
616
+ warnings.push(...messageWarnings);
617
+ const standardizedArgs = {
618
+ messages,
619
+ model: this.modelId,
620
+ max_tokens: typeof maxOutputTokens === "number" ? maxOutputTokens : void 0,
621
+ max_completion_tokens: nordlysOptions.max_completion_tokens,
622
+ temperature,
623
+ top_p: topP,
624
+ stop: stopSequences,
625
+ presence_penalty: presencePenalty,
626
+ frequency_penalty: frequencyPenalty,
627
+ user: nordlysOptions.user,
628
+ tools: nordlysTools,
629
+ tool_choice: nordlysToolChoice
630
+ };
631
+ const args = {
632
+ ...standardizedArgs,
633
+ ...nordlysOptions.logit_bias ? { logit_bias: nordlysOptions.logit_bias } : {},
634
+ ...nordlysOptions.audio ? { audio: nordlysOptions.audio } : {},
635
+ ...nordlysOptions.logprobs !== void 0 ? { logprobs: nordlysOptions.logprobs } : {},
636
+ ...nordlysOptions.metadata ? { metadata: nordlysOptions.metadata } : {},
637
+ ...nordlysOptions.modalities ? { modalities: nordlysOptions.modalities } : {},
638
+ ...nordlysOptions.parallel_tool_calls !== void 0 ? { parallel_tool_calls: nordlysOptions.parallel_tool_calls } : {},
639
+ ...nordlysOptions.prediction ? { prediction: nordlysOptions.prediction } : {},
640
+ ...nordlysOptions.reasoning_effort ? { reasoning_effort: nordlysOptions.reasoning_effort } : {},
641
+ ...nordlysOptions.response_format ? { response_format: nordlysOptions.response_format } : {},
642
+ ...nordlysOptions.seed !== void 0 ? { seed: nordlysOptions.seed } : {},
643
+ ...nordlysOptions.service_tier ? { service_tier: nordlysOptions.service_tier } : {},
644
+ ...nordlysOptions.store !== void 0 ? { store: nordlysOptions.store } : {},
645
+ ...nordlysOptions.top_logprobs !== void 0 ? { top_logprobs: nordlysOptions.top_logprobs } : {},
646
+ ...nordlysOptions.web_search_options ? { web_search_options: nordlysOptions.web_search_options } : {}
647
+ };
648
+ return {
649
+ args,
650
+ warnings
651
+ };
652
+ }
653
+ async doGenerate(options) {
654
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
655
+ const { args: body, warnings } = await this.getArgs(options);
656
+ const { responseHeaders, value, rawValue } = await (0, import_provider_utils3.postJsonToApi)({
657
+ url: `${this.config.baseURL}/chat/completions`,
658
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
659
+ body,
660
+ failedResponseHandler: nordlysFailedResponseHandler,
661
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
662
+ nordlysChatResponseSchema
663
+ ),
664
+ abortSignal: options.abortSignal,
665
+ fetch: this.config.fetch
666
+ });
667
+ if (!value) {
668
+ throw new Error("Failed to parse Nordlys API response");
669
+ }
670
+ if (value.error) {
671
+ throw new Error(`Nordlys API Error: ${value.error.message}`);
672
+ }
673
+ if (!value.choices || value.choices.length === 0) {
674
+ throw new Error("No choices returned from Nordlys API");
675
+ }
676
+ const choice = value.choices[0];
677
+ const content = [];
678
+ if ((_a = choice.message) == null ? void 0 : _a.content) {
679
+ content.push({ type: "text", text: choice.message.content });
680
+ }
681
+ if ((_b = choice.message) == null ? void 0 : _b.reasoning_content) {
682
+ content.push({
683
+ type: "reasoning",
684
+ text: choice.message.reasoning_content
685
+ });
686
+ }
687
+ if (((_c = choice.message) == null ? void 0 : _c.generated_files) && choice.message.generated_files.length > 0) {
688
+ for (const file of choice.message.generated_files) {
689
+ content.push({
690
+ type: "file",
691
+ mediaType: file.media_type,
692
+ data: file.data
693
+ });
694
+ }
695
+ }
696
+ if (((_d = choice.message) == null ? void 0 : _d.tool_calls) && choice.message.tool_calls.length > 0) {
697
+ for (const toolCall of choice.message.tool_calls) {
698
+ content.push({
699
+ type: "tool-call",
700
+ toolCallId: toolCall.id || "",
701
+ toolName: ((_e = toolCall.function) == null ? void 0 : _e.name) || "",
702
+ input: ((_f = toolCall.function) == null ? void 0 : _f.arguments) || "{}"
703
+ });
704
+ }
705
+ }
706
+ const {
707
+ prompt_tokens,
708
+ completion_tokens,
709
+ reasoning_tokens,
710
+ cached_input_tokens
711
+ } = (_g = value.usage) != null ? _g : {};
712
+ return {
713
+ content,
714
+ finishReason: choice.finish_reason ? mapNordlysFinishReason(choice.finish_reason) : { unified: "stop", raw: void 0 },
715
+ usage: value.usage && prompt_tokens != null ? {
716
+ inputTokens: {
717
+ total: prompt_tokens,
718
+ noCache: cached_input_tokens != null ? prompt_tokens - cached_input_tokens : void 0,
719
+ cacheRead: cached_input_tokens,
720
+ cacheWrite: void 0
721
+ },
722
+ outputTokens: {
723
+ total: completion_tokens,
724
+ text: completion_tokens != null && reasoning_tokens != null ? completion_tokens - reasoning_tokens : void 0,
725
+ reasoning: reasoning_tokens
726
+ }
727
+ } : {
728
+ inputTokens: {
729
+ total: 0,
730
+ noCache: void 0,
731
+ cacheRead: void 0,
732
+ cacheWrite: void 0
733
+ },
734
+ outputTokens: { total: 0, text: void 0, reasoning: void 0 }
735
+ },
736
+ providerMetadata: value.provider ? {
737
+ nordlys: {
738
+ provider: value.provider,
739
+ service_tier: value.service_tier,
740
+ system_fingerprint: value.system_fingerprint
741
+ }
742
+ } : void 0,
743
+ request: { body },
744
+ response: {
745
+ id: (_h = value.id) != null ? _h : "",
746
+ modelId: (_i = value.model) != null ? _i : "",
747
+ timestamp: new Date(((_j = value.created) != null ? _j : 0) * 1e3),
748
+ headers: responseHeaders,
749
+ body: rawValue
750
+ },
751
+ warnings
752
+ };
753
+ }
754
+ async doStream(options) {
755
+ const { args, warnings } = await this.getArgs(options);
756
+ const body = {
757
+ ...args,
758
+ stream: true,
759
+ stream_options: { include_usage: true }
760
+ };
761
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
762
+ url: `${this.config.baseURL}/chat/completions`,
763
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
764
+ body,
765
+ failedResponseHandler: nordlysFailedResponseHandler,
766
+ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
767
+ nordlysChatChunkSchema
768
+ ),
769
+ abortSignal: options.abortSignal,
770
+ fetch: this.config.fetch
771
+ });
772
+ const toolCalls = [];
773
+ const state = {
774
+ finishReason: { unified: "other", raw: void 0 },
775
+ usage: {
776
+ inputTokens: {
777
+ total: void 0,
778
+ noCache: void 0,
779
+ cacheRead: void 0,
780
+ cacheWrite: void 0
781
+ },
782
+ outputTokens: {
783
+ total: void 0,
784
+ text: void 0,
785
+ reasoning: void 0
786
+ }
787
+ },
788
+ isFirstChunk: true,
789
+ isActiveText: false,
790
+ provider: void 0,
791
+ serviceTier: void 0,
792
+ systemFingerprint: void 0
793
+ };
794
+ return {
795
+ stream: response.pipeThrough(
796
+ new TransformStream({
797
+ start(controller) {
798
+ controller.enqueue({ type: "stream-start", warnings });
799
+ },
800
+ async transform(chunk, controller) {
801
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
802
+ if (!chunk.success) {
803
+ state.finishReason = { unified: "error", raw: void 0 };
804
+ controller.enqueue({ type: "error", error: chunk.error });
805
+ return;
806
+ }
807
+ const value = chunk.value;
808
+ if ("error" in value) {
809
+ state.finishReason = { unified: "error", raw: void 0 };
810
+ controller.enqueue({
811
+ type: "error",
812
+ error: new Error(value.error.message)
813
+ });
814
+ return;
815
+ }
816
+ if (state.isFirstChunk) {
817
+ state.isFirstChunk = false;
818
+ controller.enqueue({
819
+ type: "response-metadata",
820
+ ...getResponseMetadata({
821
+ id: (_a = value.id) != null ? _a : "",
822
+ model: (_b = value.model) != null ? _b : "",
823
+ created: (_c = value.created) != null ? _c : 0
824
+ })
825
+ });
826
+ }
827
+ if (value.usage != null) {
828
+ state.usage.inputTokens.total = (_d = value.usage.prompt_tokens) != null ? _d : void 0;
829
+ state.usage.inputTokens.cacheRead = (_e = value.usage.cached_input_tokens) != null ? _e : void 0;
830
+ state.usage.inputTokens.noCache = value.usage.prompt_tokens != null && value.usage.cached_input_tokens != null ? value.usage.prompt_tokens - value.usage.cached_input_tokens : void 0;
831
+ state.usage.outputTokens.total = (_f = value.usage.completion_tokens) != null ? _f : void 0;
832
+ state.usage.outputTokens.reasoning = (_g = value.usage.reasoning_tokens) != null ? _g : void 0;
833
+ state.usage.outputTokens.text = value.usage.completion_tokens != null && value.usage.reasoning_tokens != null ? value.usage.completion_tokens - value.usage.reasoning_tokens : void 0;
834
+ }
835
+ if (value.provider) {
836
+ state.provider = value.provider;
837
+ }
838
+ if (value.service_tier) {
839
+ state.serviceTier = value.service_tier;
840
+ }
841
+ if (value.system_fingerprint) {
842
+ state.systemFingerprint = value.system_fingerprint;
843
+ }
844
+ const choice = value.choices[0];
845
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
846
+ state.finishReason = mapNordlysFinishReason(choice.finish_reason);
847
+ }
848
+ if (!(choice == null ? void 0 : choice.delta)) {
849
+ return;
850
+ }
851
+ const delta = choice.delta;
852
+ if (delta.content != null) {
853
+ if (!state.isActiveText) {
854
+ controller.enqueue({ type: "text-start", id: "text-1" });
855
+ state.isActiveText = true;
856
+ }
857
+ controller.enqueue({
858
+ type: "text-delta",
859
+ id: "text-1",
860
+ delta: delta.content
861
+ });
862
+ }
863
+ if (delta.reasoning_content != null) {
864
+ controller.enqueue({
865
+ type: "reasoning-delta",
866
+ id: "reasoning-1",
867
+ delta: delta.reasoning_content
868
+ });
869
+ }
870
+ if (delta.generated_files != null && Array.isArray(delta.generated_files)) {
871
+ for (const file of delta.generated_files) {
872
+ controller.enqueue({
873
+ type: "file",
874
+ mediaType: file.media_type,
875
+ data: file.data
876
+ });
877
+ }
878
+ }
879
+ if (delta.tool_calls != null && Array.isArray(delta.tool_calls)) {
880
+ for (const toolCallDelta of delta.tool_calls) {
881
+ const index = toolCallDelta.index;
882
+ if (toolCalls[index] == null) {
883
+ if (toolCallDelta.type !== "function" && toolCallDelta.type !== "") {
884
+ throw new import_provider3.InvalidResponseDataError({
885
+ data: toolCallDelta,
886
+ message: `Expected 'function' type.`
887
+ });
888
+ }
889
+ if (toolCallDelta.id == null) {
890
+ throw new import_provider3.InvalidResponseDataError({
891
+ data: toolCallDelta,
892
+ message: `Expected 'id' to be a string.`
893
+ });
894
+ }
895
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.name) == null) {
896
+ throw new import_provider3.InvalidResponseDataError({
897
+ data: toolCallDelta,
898
+ message: `Expected 'function.name' to be a string.`
899
+ });
900
+ }
901
+ controller.enqueue({
902
+ type: "tool-input-start",
903
+ id: toolCallDelta.id,
904
+ toolName: toolCallDelta.function.name
905
+ });
906
+ toolCalls[index] = {
907
+ id: toolCallDelta.id,
908
+ type: "function",
909
+ function: {
910
+ name: toolCallDelta.function.name,
911
+ arguments: (_i = toolCallDelta.function.arguments) != null ? _i : ""
912
+ },
913
+ hasFinished: false
914
+ };
915
+ const toolCall2 = toolCalls[index];
916
+ if (((_j = toolCall2.function) == null ? void 0 : _j.name) != null && ((_k = toolCall2.function) == null ? void 0 : _k.arguments) != null) {
917
+ if (toolCall2.function.arguments.length > 0) {
918
+ controller.enqueue({
919
+ type: "tool-input-delta",
920
+ id: toolCall2.id,
921
+ delta: toolCall2.function.arguments
922
+ });
923
+ }
924
+ if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
925
+ controller.enqueue({
926
+ type: "tool-input-end",
927
+ id: toolCall2.id
928
+ });
929
+ controller.enqueue({
930
+ type: "tool-call",
931
+ toolCallId: (_l = toolCall2.id) != null ? _l : (0, import_provider_utils3.generateId)(),
932
+ toolName: toolCall2.function.name,
933
+ input: toolCall2.function.arguments
934
+ });
935
+ toolCall2.hasFinished = true;
936
+ }
937
+ }
938
+ continue;
939
+ }
940
+ const toolCall = toolCalls[index];
941
+ if (toolCall.hasFinished) {
942
+ continue;
943
+ }
944
+ if (((_m = toolCallDelta.function) == null ? void 0 : _m.arguments) != null) {
945
+ toolCall.function.arguments += (_o = (_n = toolCallDelta.function) == null ? void 0 : _n.arguments) != null ? _o : "";
946
+ }
947
+ controller.enqueue({
948
+ type: "tool-input-delta",
949
+ id: toolCall.id,
950
+ delta: (_p = toolCallDelta.function.arguments) != null ? _p : ""
951
+ });
952
+ if (((_q = toolCall.function) == null ? void 0 : _q.name) != null && ((_r = toolCall.function) == null ? void 0 : _r.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
953
+ controller.enqueue({
954
+ type: "tool-input-end",
955
+ id: toolCall.id
956
+ });
957
+ controller.enqueue({
958
+ type: "tool-call",
959
+ toolCallId: (_s = toolCall.id) != null ? _s : (0, import_provider_utils3.generateId)(),
960
+ toolName: toolCall.function.name,
961
+ input: toolCall.function.arguments
962
+ });
963
+ toolCall.hasFinished = true;
964
+ }
965
+ }
966
+ }
967
+ },
968
+ flush(controller) {
969
+ var _a, _b;
970
+ if (state.isActiveText) {
971
+ controller.enqueue({ type: "text-end", id: "text-1" });
972
+ }
973
+ controller.enqueue({
974
+ type: "finish",
975
+ finishReason: (_a = state.finishReason) != null ? _a : {
976
+ unified: "stop",
977
+ raw: void 0
978
+ },
979
+ usage: (_b = state.usage) != null ? _b : {
980
+ inputTokens: {
981
+ total: 0,
982
+ noCache: void 0,
983
+ cacheRead: void 0,
984
+ cacheWrite: void 0
985
+ },
986
+ outputTokens: {
987
+ total: 0,
988
+ text: void 0,
989
+ reasoning: void 0
990
+ }
991
+ },
992
+ providerMetadata: state.provider || state.serviceTier || state.systemFingerprint ? {
993
+ nordlys: {
994
+ provider: state.provider,
995
+ service_tier: state.serviceTier,
996
+ system_fingerprint: state.systemFingerprint
997
+ }
998
+ } : void 0
999
+ });
1000
+ }
1001
+ })
1002
+ ),
1003
+ request: { body },
1004
+ response: {
1005
+ headers: responseHeaders
1006
+ }
1007
+ };
1008
+ }
1009
+ };
1010
+
1011
+ // src/nordlys-provider.ts
1012
+ function createNordlys(options = {}) {
1013
+ var _a;
1014
+ const baseURL = (_a = (0, import_provider_utils4.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://backend.mangoplant-a7a21605.swedencentral.azurecontainerapps.io/v1";
1015
+ const getHeaders = () => ({
1016
+ Authorization: `Bearer ${(0, import_provider_utils4.loadApiKey)({
1017
+ apiKey: options.apiKey,
1018
+ environmentVariableName: "NORDLYS_API_KEY",
1019
+ description: "Nordlys"
1020
+ })}`,
1021
+ "Content-Type": "application/json",
1022
+ ...options.headers
1023
+ });
1024
+ const createChatModel = (modelId) => new NordlysChatLanguageModel(modelId, {
1025
+ provider: "nordlys.chat",
1026
+ baseURL,
1027
+ headers: getHeaders,
1028
+ fetch: options.fetch
1029
+ });
1030
+ const provider = function(modelId) {
1031
+ if (new.target) {
1032
+ throw new Error(
1033
+ "The Nordlys model function cannot be called with the new keyword."
1034
+ );
1035
+ }
1036
+ return createChatModel(modelId);
1037
+ };
1038
+ provider.languageModel = createChatModel;
1039
+ provider.chat = createChatModel;
1040
+ provider.embeddingModel = (modelId) => {
1041
+ throw new import_provider4.NoSuchModelError({ modelId, modelType: "embeddingModel" });
1042
+ };
1043
+ provider.imageModel = (modelId) => {
1044
+ throw new import_provider4.NoSuchModelError({ modelId, modelType: "imageModel" });
1045
+ };
1046
+ provider.specificationVersion = "v3";
1047
+ return Object.freeze(provider);
1048
+ }
1049
+ var nordlys = createNordlys();
1050
+ // Annotate the CommonJS export names for ESM import in node:
1051
+ 0 && (module.exports = {
1052
+ createNordlys,
1053
+ nordlys
1054
+ });
1055
+ //# sourceMappingURL=index.cjs.map