@juspay/neurolink 7.45.0 → 7.47.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/adapters/providerImageAdapter.js +12 -0
- package/dist/cli/commands/config.d.ts +2 -2
- package/dist/core/constants.js +1 -1
- package/dist/evaluation/contextBuilder.d.ts +48 -0
- package/dist/evaluation/contextBuilder.js +134 -0
- package/dist/evaluation/index.d.ts +36 -0
- package/dist/evaluation/index.js +61 -0
- package/dist/evaluation/prompts.d.ts +22 -0
- package/dist/evaluation/prompts.js +73 -0
- package/dist/evaluation/ragasEvaluator.d.ts +28 -0
- package/dist/evaluation/ragasEvaluator.js +90 -0
- package/dist/evaluation/retryManager.d.ts +40 -0
- package/dist/evaluation/retryManager.js +78 -0
- package/dist/evaluation/scoring.d.ts +16 -0
- package/dist/evaluation/scoring.js +35 -0
- package/dist/lib/adapters/providerImageAdapter.js +12 -0
- package/dist/lib/core/constants.js +1 -1
- package/dist/lib/evaluation/contextBuilder.d.ts +48 -0
- package/dist/lib/evaluation/contextBuilder.js +134 -0
- package/dist/lib/evaluation/index.d.ts +36 -0
- package/dist/lib/evaluation/index.js +61 -0
- package/dist/lib/evaluation/prompts.d.ts +22 -0
- package/dist/lib/evaluation/prompts.js +73 -0
- package/dist/lib/evaluation/ragasEvaluator.d.ts +28 -0
- package/dist/lib/evaluation/ragasEvaluator.js +90 -0
- package/dist/lib/evaluation/retryManager.d.ts +40 -0
- package/dist/lib/evaluation/retryManager.js +78 -0
- package/dist/lib/evaluation/scoring.d.ts +16 -0
- package/dist/lib/evaluation/scoring.js +35 -0
- package/dist/lib/middleware/builtin/autoEvaluation.d.ts +14 -0
- package/dist/lib/middleware/builtin/autoEvaluation.js +181 -0
- package/dist/lib/middleware/factory.js +6 -0
- package/dist/lib/providers/azureOpenai.js +36 -3
- package/dist/lib/providers/googleAiStudio.js +37 -3
- package/dist/lib/providers/googleVertex.js +37 -3
- package/dist/lib/types/evaluation.d.ts +2 -0
- package/dist/lib/types/evaluationTypes.d.ts +142 -0
- package/dist/lib/types/evaluationTypes.js +1 -0
- package/dist/lib/types/middlewareTypes.d.ts +28 -2
- package/dist/lib/utils/imageProcessor.d.ts +44 -0
- package/dist/lib/utils/imageProcessor.js +159 -8
- package/dist/lib/utils/messageBuilder.d.ts +4 -6
- package/dist/lib/utils/messageBuilder.js +145 -1
- package/dist/middleware/builtin/autoEvaluation.d.ts +14 -0
- package/dist/middleware/builtin/autoEvaluation.js +181 -0
- package/dist/middleware/factory.js +6 -0
- package/dist/providers/azureOpenai.js +36 -3
- package/dist/providers/googleAiStudio.js +37 -3
- package/dist/providers/googleVertex.js +37 -3
- package/dist/types/evaluation.d.ts +2 -0
- package/dist/types/evaluationTypes.d.ts +142 -0
- package/dist/types/evaluationTypes.js +1 -0
- package/dist/types/middlewareTypes.d.ts +28 -2
- package/dist/utils/imageProcessor.d.ts +44 -0
- package/dist/utils/imageProcessor.js +159 -8
- package/dist/utils/messageBuilder.d.ts +4 -6
- package/dist/utils/messageBuilder.js +145 -1
- package/package.json +1 -1
|
@@ -151,6 +151,8 @@ export class ImageProcessor {
|
|
|
151
151
|
bmp: "image/bmp",
|
|
152
152
|
tiff: "image/tiff",
|
|
153
153
|
tif: "image/tiff",
|
|
154
|
+
svg: "image/svg+xml",
|
|
155
|
+
avif: "image/avif",
|
|
154
156
|
};
|
|
155
157
|
return imageTypes[extension || ""] || "image/jpeg";
|
|
156
158
|
}
|
|
@@ -183,6 +185,21 @@ export class ImageProcessor {
|
|
|
183
185
|
return "image/webp";
|
|
184
186
|
}
|
|
185
187
|
}
|
|
188
|
+
// SVG: check for "<svg" or "<?xml" at start (text-based)
|
|
189
|
+
if (input.length >= 4) {
|
|
190
|
+
const start = input.subarray(0, 4).toString();
|
|
191
|
+
if (start === "<svg" || start === "<?xm") {
|
|
192
|
+
return "image/svg+xml";
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// AVIF: check for "ftypavif" signature at bytes 4-11
|
|
196
|
+
if (input.length >= 12) {
|
|
197
|
+
const ftyp = input.subarray(4, 8).toString();
|
|
198
|
+
const brand = input.subarray(8, 12).toString();
|
|
199
|
+
if (ftyp === "ftyp" && brand === "avif") {
|
|
200
|
+
return "image/avif";
|
|
201
|
+
}
|
|
202
|
+
}
|
|
186
203
|
}
|
|
187
204
|
return "image/jpeg"; // Default fallback
|
|
188
205
|
}
|
|
@@ -217,6 +234,8 @@ export class ImageProcessor {
|
|
|
217
234
|
"image/webp",
|
|
218
235
|
"image/bmp",
|
|
219
236
|
"image/tiff",
|
|
237
|
+
"image/svg+xml",
|
|
238
|
+
"image/avif",
|
|
220
239
|
];
|
|
221
240
|
return supportedFormats.includes(mediaType.toLowerCase());
|
|
222
241
|
}
|
|
@@ -332,14 +351,7 @@ export const imageUtils = {
|
|
|
332
351
|
/**
|
|
333
352
|
* Check if a string is base64 encoded
|
|
334
353
|
*/
|
|
335
|
-
isBase64: (str) =>
|
|
336
|
-
try {
|
|
337
|
-
return btoa(atob(str)) === str;
|
|
338
|
-
}
|
|
339
|
-
catch {
|
|
340
|
-
return false;
|
|
341
|
-
}
|
|
342
|
-
},
|
|
354
|
+
isBase64: (str) => imageUtils.isValidBase64(str),
|
|
343
355
|
/**
|
|
344
356
|
* Extract file extension from filename or URL
|
|
345
357
|
*/
|
|
@@ -359,4 +371,143 @@ export const imageUtils = {
|
|
|
359
371
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
360
372
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i];
|
|
361
373
|
},
|
|
374
|
+
/**
|
|
375
|
+
* Convert Buffer to base64 string
|
|
376
|
+
*/
|
|
377
|
+
bufferToBase64: (buffer) => {
|
|
378
|
+
return buffer.toString("base64");
|
|
379
|
+
},
|
|
380
|
+
/**
|
|
381
|
+
* Convert base64 string to Buffer
|
|
382
|
+
*/
|
|
383
|
+
base64ToBuffer: (base64) => {
|
|
384
|
+
// Remove data URI prefix if present
|
|
385
|
+
const cleanBase64 = base64.includes(",") ? base64.split(",")[1] : base64;
|
|
386
|
+
return Buffer.from(cleanBase64, "base64");
|
|
387
|
+
},
|
|
388
|
+
/**
|
|
389
|
+
* Convert file path to base64 data URI
|
|
390
|
+
*/
|
|
391
|
+
fileToBase64DataUri: async (filePath, maxBytes = 10 * 1024 * 1024) => {
|
|
392
|
+
try {
|
|
393
|
+
const fs = await import("fs/promises");
|
|
394
|
+
// File existence and type validation
|
|
395
|
+
const stat = await fs.stat(filePath);
|
|
396
|
+
if (!stat.isFile()) {
|
|
397
|
+
throw new Error("Not a file");
|
|
398
|
+
}
|
|
399
|
+
// Size check before reading - prevent memory exhaustion
|
|
400
|
+
if (stat.size > maxBytes) {
|
|
401
|
+
throw new Error(`File too large: ${stat.size} bytes (max: ${maxBytes} bytes)`);
|
|
402
|
+
}
|
|
403
|
+
const buffer = await fs.readFile(filePath);
|
|
404
|
+
// Enhanced MIME detection: try buffer content first, fallback to filename
|
|
405
|
+
const mimeType = ImageProcessor.detectImageType(buffer) ||
|
|
406
|
+
ImageProcessor.detectImageType(filePath);
|
|
407
|
+
const base64 = buffer.toString("base64");
|
|
408
|
+
return `data:${mimeType};base64,${base64}`;
|
|
409
|
+
}
|
|
410
|
+
catch (error) {
|
|
411
|
+
throw new Error(`Failed to convert file to base64: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
412
|
+
}
|
|
413
|
+
},
|
|
414
|
+
/**
|
|
415
|
+
* Convert URL to base64 data URI by downloading the image
|
|
416
|
+
*/
|
|
417
|
+
urlToBase64DataUri: async (url, { timeoutMs = 15000, maxBytes = 10 * 1024 * 1024 } = {}) => {
|
|
418
|
+
try {
|
|
419
|
+
// Basic protocol whitelist
|
|
420
|
+
if (!/^https?:\/\//i.test(url)) {
|
|
421
|
+
throw new Error("Unsupported protocol");
|
|
422
|
+
}
|
|
423
|
+
const controller = new AbortController();
|
|
424
|
+
const t = setTimeout(() => controller.abort(), timeoutMs);
|
|
425
|
+
try {
|
|
426
|
+
const response = await fetch(url, { signal: controller.signal });
|
|
427
|
+
if (!response.ok) {
|
|
428
|
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
429
|
+
}
|
|
430
|
+
const contentType = response.headers.get("content-type") || "";
|
|
431
|
+
if (!/^image\//i.test(contentType)) {
|
|
432
|
+
throw new Error(`Unsupported content-type: ${contentType || "unknown"}`);
|
|
433
|
+
}
|
|
434
|
+
const len = Number(response.headers.get("content-length") || 0);
|
|
435
|
+
if (len && len > maxBytes) {
|
|
436
|
+
throw new Error(`Content too large: ${len} bytes`);
|
|
437
|
+
}
|
|
438
|
+
const buffer = await response.arrayBuffer();
|
|
439
|
+
if (buffer.byteLength > maxBytes) {
|
|
440
|
+
throw new Error(`Downloaded content too large: ${buffer.byteLength} bytes`);
|
|
441
|
+
}
|
|
442
|
+
const base64 = Buffer.from(buffer).toString("base64");
|
|
443
|
+
return `data:${contentType || "image/jpeg"};base64,${base64}`;
|
|
444
|
+
}
|
|
445
|
+
finally {
|
|
446
|
+
clearTimeout(t);
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
catch (error) {
|
|
450
|
+
throw new Error(`Failed to download and convert URL to base64: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
451
|
+
}
|
|
452
|
+
},
|
|
453
|
+
/**
|
|
454
|
+
* Extract base64 data from data URI
|
|
455
|
+
*/
|
|
456
|
+
extractBase64FromDataUri: (dataUri) => {
|
|
457
|
+
if (!dataUri.includes(",")) {
|
|
458
|
+
return dataUri; // Already just base64
|
|
459
|
+
}
|
|
460
|
+
return dataUri.split(",")[1];
|
|
461
|
+
},
|
|
462
|
+
/**
|
|
463
|
+
* Extract MIME type from data URI
|
|
464
|
+
*/
|
|
465
|
+
extractMimeTypeFromDataUri: (dataUri) => {
|
|
466
|
+
const match = dataUri.match(/^data:([^;]+);base64,/);
|
|
467
|
+
return match ? match[1] : "image/jpeg";
|
|
468
|
+
},
|
|
469
|
+
/**
|
|
470
|
+
* Create data URI from base64 and MIME type
|
|
471
|
+
*/
|
|
472
|
+
createDataUri: (base64, mimeType = "image/jpeg") => {
|
|
473
|
+
// Remove data URI prefix if already present
|
|
474
|
+
const cleanBase64 = base64.includes(",") ? base64.split(",")[1] : base64;
|
|
475
|
+
return `data:${mimeType};base64,${cleanBase64}`;
|
|
476
|
+
},
|
|
477
|
+
/**
|
|
478
|
+
* Validate base64 string format
|
|
479
|
+
*/
|
|
480
|
+
isValidBase64: (str) => {
|
|
481
|
+
try {
|
|
482
|
+
// Remove data URI prefix if present
|
|
483
|
+
const cleanBase64 = str.includes(",") ? str.split(",")[1] : str;
|
|
484
|
+
// Check if it's valid base64
|
|
485
|
+
const decoded = Buffer.from(cleanBase64, "base64");
|
|
486
|
+
const reencoded = decoded.toString("base64");
|
|
487
|
+
// Remove padding for comparison (base64 can have different padding)
|
|
488
|
+
const normalizeBase64 = (b64) => b64.replace(/=+$/, "");
|
|
489
|
+
return normalizeBase64(cleanBase64) === normalizeBase64(reencoded);
|
|
490
|
+
}
|
|
491
|
+
catch {
|
|
492
|
+
return false;
|
|
493
|
+
}
|
|
494
|
+
},
|
|
495
|
+
/**
|
|
496
|
+
* Get base64 string size in bytes
|
|
497
|
+
*/
|
|
498
|
+
getBase64Size: (base64) => {
|
|
499
|
+
// Remove data URI prefix if present
|
|
500
|
+
const cleanBase64 = base64.includes(",") ? base64.split(",")[1] : base64;
|
|
501
|
+
return Buffer.byteLength(cleanBase64, "base64");
|
|
502
|
+
},
|
|
503
|
+
/**
|
|
504
|
+
* Compress base64 image by reducing quality (basic implementation)
|
|
505
|
+
* Note: This is a placeholder - for production use, consider using sharp or similar
|
|
506
|
+
*/
|
|
507
|
+
compressBase64: (base64, _quality = 0.8) => {
|
|
508
|
+
// This is a basic implementation that just returns the original
|
|
509
|
+
// In a real implementation, you'd use an image processing library
|
|
510
|
+
logger.warn("Base64 compression not implemented - returning original");
|
|
511
|
+
return base64;
|
|
512
|
+
},
|
|
362
513
|
};
|
|
@@ -7,13 +7,12 @@ import type { MultimodalChatMessage } from "../types/conversation.js";
|
|
|
7
7
|
import type { TextGenerationOptions } from "../types/index.js";
|
|
8
8
|
import type { StreamOptions } from "../types/streamTypes.js";
|
|
9
9
|
import type { GenerateOptions } from "../types/generateTypes.js";
|
|
10
|
+
import type { CoreMessage } from "ai";
|
|
10
11
|
/**
|
|
11
|
-
*
|
|
12
|
+
* Type-safe conversion from MultimodalChatMessage[] to CoreMessage[]
|
|
13
|
+
* Filters out invalid content and ensures strict CoreMessage contract compliance
|
|
12
14
|
*/
|
|
13
|
-
|
|
14
|
-
role: "user" | "assistant" | "system";
|
|
15
|
-
content: string;
|
|
16
|
-
};
|
|
15
|
+
export declare function convertToCoreMessages(messages: MultimodalChatMessage[]): CoreMessage[];
|
|
17
16
|
/**
|
|
18
17
|
* Build a properly formatted message array for AI providers
|
|
19
18
|
* Combines system prompt, conversation history, and current user prompt
|
|
@@ -25,4 +24,3 @@ export declare function buildMessagesArray(options: TextGenerationOptions | Stre
|
|
|
25
24
|
* Detects when images are present and routes through provider adapter
|
|
26
25
|
*/
|
|
27
26
|
export declare function buildMultimodalMessagesArray(options: GenerateOptions, provider: string, model: string): Promise<MultimodalChatMessage[]>;
|
|
28
|
-
export {};
|
|
@@ -8,6 +8,147 @@ import { ProviderImageAdapter, MultimodalLogger, } from "../adapters/providerIma
|
|
|
8
8
|
import { logger } from "./logger.js";
|
|
9
9
|
import { request } from "undici";
|
|
10
10
|
import { readFileSync, existsSync } from "fs";
|
|
11
|
+
/**
|
|
12
|
+
* Type guard for validating message roles
|
|
13
|
+
*/
|
|
14
|
+
function isValidRole(role) {
|
|
15
|
+
return (typeof role === "string" &&
|
|
16
|
+
(role === "user" || role === "assistant" || role === "system"));
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Type guard for validating content items
|
|
20
|
+
*/
|
|
21
|
+
function isValidContentItem(item) {
|
|
22
|
+
if (!item || typeof item !== "object") {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
const contentItem = item;
|
|
26
|
+
if (contentItem.type === "text") {
|
|
27
|
+
return typeof contentItem.text === "string";
|
|
28
|
+
}
|
|
29
|
+
if (contentItem.type === "image") {
|
|
30
|
+
return (typeof contentItem.image === "string" &&
|
|
31
|
+
(contentItem.mimeType === undefined ||
|
|
32
|
+
typeof contentItem.mimeType === "string"));
|
|
33
|
+
}
|
|
34
|
+
return false;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Safely convert content item to AI SDK content format
|
|
38
|
+
*/
|
|
39
|
+
function convertContentItem(item) {
|
|
40
|
+
if (!isValidContentItem(item)) {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
const contentItem = item;
|
|
44
|
+
if (contentItem.type === "text" && typeof contentItem.text === "string") {
|
|
45
|
+
return { type: "text", text: contentItem.text };
|
|
46
|
+
}
|
|
47
|
+
if (contentItem.type === "image" && typeof contentItem.image === "string") {
|
|
48
|
+
return {
|
|
49
|
+
type: "image",
|
|
50
|
+
image: contentItem.image,
|
|
51
|
+
...(contentItem.mimeType && { mimeType: contentItem.mimeType }),
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
return null;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Type-safe conversion from MultimodalChatMessage[] to CoreMessage[]
|
|
58
|
+
* Filters out invalid content and ensures strict CoreMessage contract compliance
|
|
59
|
+
*/
|
|
60
|
+
export function convertToCoreMessages(messages) {
|
|
61
|
+
return messages
|
|
62
|
+
.map((msg) => {
|
|
63
|
+
// Validate role
|
|
64
|
+
if (!isValidRole(msg.role)) {
|
|
65
|
+
logger.warn("Invalid message role found, skipping", { role: msg.role });
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
// Handle string content
|
|
69
|
+
if (typeof msg.content === "string") {
|
|
70
|
+
// Create properly typed discriminated union messages
|
|
71
|
+
if (msg.role === "system") {
|
|
72
|
+
return {
|
|
73
|
+
role: "system",
|
|
74
|
+
content: msg.content,
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
else if (msg.role === "user") {
|
|
78
|
+
return {
|
|
79
|
+
role: "user",
|
|
80
|
+
content: msg.content,
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
else if (msg.role === "assistant") {
|
|
84
|
+
return {
|
|
85
|
+
role: "assistant",
|
|
86
|
+
content: msg.content,
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// Handle array content (multimodal) - only user messages support full multimodal content
|
|
91
|
+
if (Array.isArray(msg.content)) {
|
|
92
|
+
const validContent = msg.content
|
|
93
|
+
.map(convertContentItem)
|
|
94
|
+
.filter((item) => item !== null);
|
|
95
|
+
// If no valid content items, skip the message
|
|
96
|
+
if (validContent.length === 0) {
|
|
97
|
+
logger.warn("No valid content items found in multimodal message, skipping");
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
if (msg.role === "user") {
|
|
101
|
+
// User messages support both text and image content
|
|
102
|
+
return {
|
|
103
|
+
role: "user",
|
|
104
|
+
content: validContent,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
else if (msg.role === "assistant") {
|
|
108
|
+
// Assistant messages only support text content, filter out images
|
|
109
|
+
const textOnlyContent = validContent.filter((item) => item.type === "text");
|
|
110
|
+
if (textOnlyContent.length === 0) {
|
|
111
|
+
// If no text content, convert to empty string
|
|
112
|
+
return {
|
|
113
|
+
role: "assistant",
|
|
114
|
+
content: "",
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
else if (textOnlyContent.length === 1) {
|
|
118
|
+
// Single text item, use string content
|
|
119
|
+
return {
|
|
120
|
+
role: "assistant",
|
|
121
|
+
content: textOnlyContent[0].text,
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
else {
|
|
125
|
+
// Multiple text items, concatenate them
|
|
126
|
+
const combinedText = textOnlyContent
|
|
127
|
+
.map((item) => item.text)
|
|
128
|
+
.join(" ");
|
|
129
|
+
return {
|
|
130
|
+
role: "assistant",
|
|
131
|
+
content: combinedText,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
else {
|
|
136
|
+
// System messages cannot have multimodal content, convert to text
|
|
137
|
+
const textContent = validContent.find((item) => item.type === "text")?.text || "";
|
|
138
|
+
return {
|
|
139
|
+
role: "system",
|
|
140
|
+
content: textContent,
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
// Invalid content type
|
|
145
|
+
logger.warn("Invalid message content type found, skipping", {
|
|
146
|
+
contentType: typeof msg.content,
|
|
147
|
+
});
|
|
148
|
+
return null;
|
|
149
|
+
})
|
|
150
|
+
.filter((msg) => msg !== null);
|
|
151
|
+
}
|
|
11
152
|
/**
|
|
12
153
|
* Convert ChatMessage to CoreMessage for AI SDK compatibility
|
|
13
154
|
*/
|
|
@@ -84,7 +225,10 @@ export async function buildMultimodalMessagesArray(options, provider, model) {
|
|
|
84
225
|
// If no images, use standard message building and convert to MultimodalChatMessage[]
|
|
85
226
|
if (!hasImages) {
|
|
86
227
|
const standardMessages = buildMessagesArray(options);
|
|
87
|
-
return standardMessages.map((msg) => ({
|
|
228
|
+
return standardMessages.map((msg) => ({
|
|
229
|
+
role: msg.role,
|
|
230
|
+
content: typeof msg.content === "string" ? msg.content : msg.content,
|
|
231
|
+
}));
|
|
88
232
|
}
|
|
89
233
|
// Validate provider supports vision
|
|
90
234
|
if (!ProviderImageAdapter.supportsVision(provider, model)) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "7.
|
|
3
|
+
"version": "7.47.0",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|