@juspay/neurolink 9.49.0 → 9.50.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## [9.50.0](https://github.com/juspay/neurolink/compare/v9.49.0...v9.50.0) (2026-04-08)
2
+
3
+ ### Features
4
+
5
+ - **(image-compression):** add sharp-based compression for AI providers ([75645bf](https://github.com/juspay/neurolink/commit/75645bf69d9eb21bf46763c5b058cb92146c71f2)), closes [#553](https://github.com/juspay/neurolink/issues/553)
6
+
1
7
  ## [9.49.0](https://github.com/juspay/neurolink/compare/v9.48.3...v9.49.0) (2026-04-08)
2
8
 
3
9
  ### Features
@@ -0,0 +1,45 @@
1
+ import type { ProviderName } from "../types/providers.js";
2
+ declare const SUPPORTED_FORMATS: readonly ["jpeg", "png", "webp"];
3
+ type SupportedFormat = (typeof SUPPORTED_FORMATS)[number];
4
+ /**
5
+ * Provider-specific image size limits in bytes
6
+ */
7
+ export declare const PROVIDER_IMAGE_LIMITS: Record<ProviderName, number>;
8
+ export interface CompressionOptions {
9
+ provider: ProviderName;
10
+ quality?: number;
11
+ maxDimension?: number;
12
+ format?: SupportedFormat;
13
+ }
14
+ export interface CompressionResult {
15
+ buffer: Buffer;
16
+ originalSize: number;
17
+ compressedSize: number;
18
+ compressionRatio: number;
19
+ metadata: {
20
+ width: number;
21
+ height: number;
22
+ format: string;
23
+ };
24
+ }
25
+ /**
26
+ * Compress an image to meet provider-specific size limits
27
+ * @param imageBuffer - Input image buffer
28
+ * @param options - Compression options including provider name
29
+ * @returns Compressed image buffer with metadata
30
+ */
31
+ export declare function compressImage(imageBuffer: Buffer, options: CompressionOptions): Promise<CompressionResult>;
32
+ /**
33
+ * Check if an image needs compression for a specific provider
34
+ * @param imageBuffer - Input image buffer
35
+ * @param provider - AI provider name
36
+ * @returns True if compression is needed
37
+ */
38
+ export declare function needsCompression(imageBuffer: Buffer, provider: ProviderName): boolean;
39
+ /**
40
+ * Get the size limit for a specific provider
41
+ * @param provider - AI provider name
42
+ * @returns Size limit in bytes
43
+ */
44
+ export declare function getProviderSizeLimit(provider: ProviderName): number;
45
+ export {};
@@ -0,0 +1,137 @@
1
+ import sharp from "sharp";
2
+ import { withTimeout } from "./async/index.js";
3
+ const SUPPORTED_FORMATS = ["jpeg", "png", "webp"];
4
+ const IMAGE_COMPRESSION_TIMEOUT_MS = 30_000;
5
+ /**
6
+ * Provider-specific image size limits in bytes
7
+ */
8
+ export const PROVIDER_IMAGE_LIMITS = {
9
+ openai: 20 * 1024 * 1024, // 20MB
10
+ "openai-compatible": 20 * 1024 * 1024, // 20MB (same as OpenAI)
11
+ anthropic: 5 * 1024 * 1024, // 5MB
12
+ "google-ai": 4 * 1024 * 1024, // 4MB
13
+ vertex: 4 * 1024 * 1024, // 4MB
14
+ bedrock: 5 * 1024 * 1024, // 5MB
15
+ azure: 20 * 1024 * 1024, // 20MB
16
+ mistral: 5 * 1024 * 1024, // 5MB
17
+ huggingface: 10 * 1024 * 1024, // 10MB
18
+ ollama: 100 * 1024 * 1024, // 100MB (local, no strict limit)
19
+ openrouter: 20 * 1024 * 1024, // 20MB
20
+ sagemaker: 5 * 1024 * 1024, // 5MB
21
+ litellm: 20 * 1024 * 1024, // 20MB (proxy, use OpenAI default)
22
+ auto: 5 * 1024 * 1024, // 5MB (conservative fallback)
23
+ };
24
+ /**
25
+ * Compress an image to meet provider-specific size limits
26
+ * @param imageBuffer - Input image buffer
27
+ * @param options - Compression options including provider name
28
+ * @returns Compressed image buffer with metadata
29
+ */
30
+ export async function compressImage(imageBuffer, options) {
31
+ const { provider, quality = 80, maxDimension, format } = options;
32
+ const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
33
+ const originalSize = imageBuffer.length;
34
+ // Get original metadata
35
+ const image = sharp(imageBuffer);
36
+ const metadata = await withTimeout(image.metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading image metadata");
37
+ if (!metadata.width || !metadata.height) {
38
+ throw new Error("Unable to read image dimensions");
39
+ }
40
+ // If image is already under limit and no format conversion needed, return as-is
41
+ if (originalSize <= sizeLimit && !format && !maxDimension) {
42
+ return {
43
+ buffer: imageBuffer,
44
+ originalSize,
45
+ compressedSize: originalSize,
46
+ compressionRatio: 1,
47
+ metadata: {
48
+ width: metadata.width,
49
+ height: metadata.height,
50
+ format: metadata.format ?? "unknown",
51
+ },
52
+ };
53
+ }
54
+ // Prepare compression pipeline
55
+ let pipeline = sharp(imageBuffer);
56
+ // Resize if needed
57
+ if (maxDimension) {
58
+ const needsResize = metadata.width > maxDimension || metadata.height > maxDimension;
59
+ if (needsResize) {
60
+ pipeline = pipeline.resize(maxDimension, maxDimension, {
61
+ fit: "inside",
62
+ withoutEnlargement: true,
63
+ });
64
+ }
65
+ }
66
+ // Resolve target format — validate metadata.format against supported set
67
+ const rawFormat = metadata.format;
68
+ const targetFormat = format ??
69
+ (SUPPORTED_FORMATS.includes(rawFormat)
70
+ ? rawFormat
71
+ : "jpeg");
72
+ const applyFormat = (p, q) => {
73
+ switch (targetFormat) {
74
+ case "jpeg":
75
+ return p.jpeg({ quality: q, mozjpeg: true });
76
+ case "png":
77
+ return p.png({ quality: q, compressionLevel: 9 });
78
+ case "webp":
79
+ return p.webp({ quality: q });
80
+ }
81
+ };
82
+ // Compress
83
+ let compressedBuffer = await withTimeout(applyFormat(pipeline, quality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
84
+ let currentQuality = quality;
85
+ // Iteratively reduce quality if still over limit
86
+ // Note: the sharp pipeline must be rebuilt on each iteration because
87
+ // sharp does not support modifying quality settings after creation.
88
+ while (compressedBuffer.length > sizeLimit && currentQuality > 10) {
89
+ currentQuality -= 10;
90
+ let p = sharp(imageBuffer);
91
+ if (maxDimension) {
92
+ p = p.resize(maxDimension, maxDimension, {
93
+ fit: "inside",
94
+ withoutEnlargement: true,
95
+ });
96
+ }
97
+ compressedBuffer = await withTimeout(applyFormat(p, currentQuality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
98
+ }
99
+ // Final check
100
+ if (compressedBuffer.length > sizeLimit) {
101
+ throw new Error(`Unable to compress image to ${sizeLimit} bytes for provider ${provider}. ` +
102
+ `Final size: ${compressedBuffer.length} bytes. ` +
103
+ `Try using a smaller image or lower maxDimension.`);
104
+ }
105
+ // Get final metadata
106
+ const finalMetadata = await withTimeout(sharp(compressedBuffer).metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading compressed image metadata");
107
+ return {
108
+ buffer: compressedBuffer,
109
+ originalSize,
110
+ compressedSize: compressedBuffer.length,
111
+ compressionRatio: originalSize / compressedBuffer.length,
112
+ metadata: {
113
+ width: finalMetadata.width ?? 0,
114
+ height: finalMetadata.height ?? 0,
115
+ format: targetFormat,
116
+ },
117
+ };
118
+ }
119
+ /**
120
+ * Check if an image needs compression for a specific provider
121
+ * @param imageBuffer - Input image buffer
122
+ * @param provider - AI provider name
123
+ * @returns True if compression is needed
124
+ */
125
+ export function needsCompression(imageBuffer, provider) {
126
+ const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
127
+ return imageBuffer.length > sizeLimit;
128
+ }
129
+ /**
130
+ * Get the size limit for a specific provider
131
+ * @param provider - AI provider name
132
+ * @returns Size limit in bytes
133
+ */
134
+ export function getProviderSizeLimit(provider) {
135
+ return PROVIDER_IMAGE_LIMITS[provider];
136
+ }
137
+ //# sourceMappingURL=imageCompressor.js.map
@@ -0,0 +1,45 @@
1
+ import type { ProviderName } from "../types/providers.js";
2
+ declare const SUPPORTED_FORMATS: readonly ["jpeg", "png", "webp"];
3
+ type SupportedFormat = (typeof SUPPORTED_FORMATS)[number];
4
+ /**
5
+ * Provider-specific image size limits in bytes
6
+ */
7
+ export declare const PROVIDER_IMAGE_LIMITS: Record<ProviderName, number>;
8
+ export interface CompressionOptions {
9
+ provider: ProviderName;
10
+ quality?: number;
11
+ maxDimension?: number;
12
+ format?: SupportedFormat;
13
+ }
14
+ export interface CompressionResult {
15
+ buffer: Buffer;
16
+ originalSize: number;
17
+ compressedSize: number;
18
+ compressionRatio: number;
19
+ metadata: {
20
+ width: number;
21
+ height: number;
22
+ format: string;
23
+ };
24
+ }
25
+ /**
26
+ * Compress an image to meet provider-specific size limits
27
+ * @param imageBuffer - Input image buffer
28
+ * @param options - Compression options including provider name
29
+ * @returns Compressed image buffer with metadata
30
+ */
31
+ export declare function compressImage(imageBuffer: Buffer, options: CompressionOptions): Promise<CompressionResult>;
32
+ /**
33
+ * Check if an image needs compression for a specific provider
34
+ * @param imageBuffer - Input image buffer
35
+ * @param provider - AI provider name
36
+ * @returns True if compression is needed
37
+ */
38
+ export declare function needsCompression(imageBuffer: Buffer, provider: ProviderName): boolean;
39
+ /**
40
+ * Get the size limit for a specific provider
41
+ * @param provider - AI provider name
42
+ * @returns Size limit in bytes
43
+ */
44
+ export declare function getProviderSizeLimit(provider: ProviderName): number;
45
+ export {};
@@ -0,0 +1,136 @@
1
+ import sharp from "sharp";
2
+ import { withTimeout } from "./async/index.js";
3
+ const SUPPORTED_FORMATS = ["jpeg", "png", "webp"];
4
+ const IMAGE_COMPRESSION_TIMEOUT_MS = 30_000;
5
+ /**
6
+ * Provider-specific image size limits in bytes
7
+ */
8
+ export const PROVIDER_IMAGE_LIMITS = {
9
+ openai: 20 * 1024 * 1024, // 20MB
10
+ "openai-compatible": 20 * 1024 * 1024, // 20MB (same as OpenAI)
11
+ anthropic: 5 * 1024 * 1024, // 5MB
12
+ "google-ai": 4 * 1024 * 1024, // 4MB
13
+ vertex: 4 * 1024 * 1024, // 4MB
14
+ bedrock: 5 * 1024 * 1024, // 5MB
15
+ azure: 20 * 1024 * 1024, // 20MB
16
+ mistral: 5 * 1024 * 1024, // 5MB
17
+ huggingface: 10 * 1024 * 1024, // 10MB
18
+ ollama: 100 * 1024 * 1024, // 100MB (local, no strict limit)
19
+ openrouter: 20 * 1024 * 1024, // 20MB
20
+ sagemaker: 5 * 1024 * 1024, // 5MB
21
+ litellm: 20 * 1024 * 1024, // 20MB (proxy, use OpenAI default)
22
+ auto: 5 * 1024 * 1024, // 5MB (conservative fallback)
23
+ };
24
+ /**
25
+ * Compress an image to meet provider-specific size limits
26
+ * @param imageBuffer - Input image buffer
27
+ * @param options - Compression options including provider name
28
+ * @returns Compressed image buffer with metadata
29
+ */
30
+ export async function compressImage(imageBuffer, options) {
31
+ const { provider, quality = 80, maxDimension, format } = options;
32
+ const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
33
+ const originalSize = imageBuffer.length;
34
+ // Get original metadata
35
+ const image = sharp(imageBuffer);
36
+ const metadata = await withTimeout(image.metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading image metadata");
37
+ if (!metadata.width || !metadata.height) {
38
+ throw new Error("Unable to read image dimensions");
39
+ }
40
+ // If image is already under limit and no format conversion needed, return as-is
41
+ if (originalSize <= sizeLimit && !format && !maxDimension) {
42
+ return {
43
+ buffer: imageBuffer,
44
+ originalSize,
45
+ compressedSize: originalSize,
46
+ compressionRatio: 1,
47
+ metadata: {
48
+ width: metadata.width,
49
+ height: metadata.height,
50
+ format: metadata.format ?? "unknown",
51
+ },
52
+ };
53
+ }
54
+ // Prepare compression pipeline
55
+ let pipeline = sharp(imageBuffer);
56
+ // Resize if needed
57
+ if (maxDimension) {
58
+ const needsResize = metadata.width > maxDimension || metadata.height > maxDimension;
59
+ if (needsResize) {
60
+ pipeline = pipeline.resize(maxDimension, maxDimension, {
61
+ fit: "inside",
62
+ withoutEnlargement: true,
63
+ });
64
+ }
65
+ }
66
+ // Resolve target format — validate metadata.format against supported set
67
+ const rawFormat = metadata.format;
68
+ const targetFormat = format ??
69
+ (SUPPORTED_FORMATS.includes(rawFormat)
70
+ ? rawFormat
71
+ : "jpeg");
72
+ const applyFormat = (p, q) => {
73
+ switch (targetFormat) {
74
+ case "jpeg":
75
+ return p.jpeg({ quality: q, mozjpeg: true });
76
+ case "png":
77
+ return p.png({ quality: q, compressionLevel: 9 });
78
+ case "webp":
79
+ return p.webp({ quality: q });
80
+ }
81
+ };
82
+ // Compress
83
+ let compressedBuffer = await withTimeout(applyFormat(pipeline, quality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
84
+ let currentQuality = quality;
85
+ // Iteratively reduce quality if still over limit
86
+ // Note: the sharp pipeline must be rebuilt on each iteration because
87
+ // sharp does not support modifying quality settings after creation.
88
+ while (compressedBuffer.length > sizeLimit && currentQuality > 10) {
89
+ currentQuality -= 10;
90
+ let p = sharp(imageBuffer);
91
+ if (maxDimension) {
92
+ p = p.resize(maxDimension, maxDimension, {
93
+ fit: "inside",
94
+ withoutEnlargement: true,
95
+ });
96
+ }
97
+ compressedBuffer = await withTimeout(applyFormat(p, currentQuality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
98
+ }
99
+ // Final check
100
+ if (compressedBuffer.length > sizeLimit) {
101
+ throw new Error(`Unable to compress image to ${sizeLimit} bytes for provider ${provider}. ` +
102
+ `Final size: ${compressedBuffer.length} bytes. ` +
103
+ `Try using a smaller image or lower maxDimension.`);
104
+ }
105
+ // Get final metadata
106
+ const finalMetadata = await withTimeout(sharp(compressedBuffer).metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading compressed image metadata");
107
+ return {
108
+ buffer: compressedBuffer,
109
+ originalSize,
110
+ compressedSize: compressedBuffer.length,
111
+ compressionRatio: originalSize / compressedBuffer.length,
112
+ metadata: {
113
+ width: finalMetadata.width ?? 0,
114
+ height: finalMetadata.height ?? 0,
115
+ format: targetFormat,
116
+ },
117
+ };
118
+ }
119
+ /**
120
+ * Check if an image needs compression for a specific provider
121
+ * @param imageBuffer - Input image buffer
122
+ * @param provider - AI provider name
123
+ * @returns True if compression is needed
124
+ */
125
+ export function needsCompression(imageBuffer, provider) {
126
+ const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
127
+ return imageBuffer.length > sizeLimit;
128
+ }
129
+ /**
130
+ * Get the size limit for a specific provider
131
+ * @param provider - AI provider name
132
+ * @returns Size limit in bytes
133
+ */
134
+ export function getProviderSizeLimit(provider) {
135
+ return PROVIDER_IMAGE_LIMITS[provider];
136
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@juspay/neurolink",
3
- "version": "9.49.0",
3
+ "version": "9.50.0",
4
4
  "packageManager": "pnpm@10.15.1",
5
5
  "description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 13 providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
6
6
  "author": {