@juspay/neurolink 9.55.0 → 9.55.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,140 +0,0 @@
1
- import sharp from "sharp";
2
- import { withTimeout } from "./async/index.js";
3
- const SUPPORTED_FORMATS = [
4
- "jpeg",
5
- "png",
6
- "webp",
7
- ];
8
- const IMAGE_COMPRESSION_TIMEOUT_MS = 30_000;
9
- /**
10
- * Provider-specific image size limits in bytes
11
- */
12
- export const PROVIDER_IMAGE_LIMITS = {
13
- openai: 20 * 1024 * 1024, // 20MB
14
- "openai-compatible": 20 * 1024 * 1024, // 20MB (same as OpenAI)
15
- anthropic: 5 * 1024 * 1024, // 5MB
16
- "google-ai": 4 * 1024 * 1024, // 4MB
17
- vertex: 4 * 1024 * 1024, // 4MB
18
- bedrock: 5 * 1024 * 1024, // 5MB
19
- azure: 20 * 1024 * 1024, // 20MB
20
- mistral: 5 * 1024 * 1024, // 5MB
21
- huggingface: 10 * 1024 * 1024, // 10MB
22
- ollama: 100 * 1024 * 1024, // 100MB (local, no strict limit)
23
- openrouter: 20 * 1024 * 1024, // 20MB
24
- sagemaker: 5 * 1024 * 1024, // 5MB
25
- litellm: 20 * 1024 * 1024, // 20MB (proxy, use OpenAI default)
26
- auto: 5 * 1024 * 1024, // 5MB (conservative fallback)
27
- };
28
- /**
29
- * Compress an image to meet provider-specific size limits
30
- * @param imageBuffer - Input image buffer
31
- * @param options - Compression options including provider name
32
- * @returns Compressed image buffer with metadata
33
- */
34
- export async function compressImage(imageBuffer, options) {
35
- const { provider, quality = 80, maxDimension, format } = options;
36
- const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
37
- const originalSize = imageBuffer.length;
38
- // Get original metadata
39
- const image = sharp(imageBuffer);
40
- const metadata = await withTimeout(image.metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading image metadata");
41
- if (!metadata.width || !metadata.height) {
42
- throw new Error("Unable to read image dimensions");
43
- }
44
- // If image is already under limit and no format conversion needed, return as-is
45
- if (originalSize <= sizeLimit && !format && !maxDimension) {
46
- return {
47
- buffer: imageBuffer,
48
- originalSize,
49
- compressedSize: originalSize,
50
- compressionRatio: 1,
51
- metadata: {
52
- width: metadata.width,
53
- height: metadata.height,
54
- format: metadata.format ?? "unknown",
55
- },
56
- };
57
- }
58
- // Prepare compression pipeline
59
- let pipeline = sharp(imageBuffer);
60
- // Resize if needed
61
- if (maxDimension) {
62
- const needsResize = metadata.width > maxDimension || metadata.height > maxDimension;
63
- if (needsResize) {
64
- pipeline = pipeline.resize(maxDimension, maxDimension, {
65
- fit: "inside",
66
- withoutEnlargement: true,
67
- });
68
- }
69
- }
70
- // Resolve target format — validate metadata.format against supported set
71
- const rawFormat = metadata.format;
72
- const targetFormat = format ??
73
- (SUPPORTED_FORMATS.includes(rawFormat)
74
- ? rawFormat
75
- : "jpeg");
76
- const applyFormat = (p, q) => {
77
- switch (targetFormat) {
78
- case "jpeg":
79
- return p.jpeg({ quality: q, mozjpeg: true });
80
- case "png":
81
- return p.png({ quality: q, compressionLevel: 9 });
82
- case "webp":
83
- return p.webp({ quality: q });
84
- }
85
- };
86
- // Compress
87
- let compressedBuffer = await withTimeout(applyFormat(pipeline, quality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
88
- let currentQuality = quality;
89
- // Iteratively reduce quality if still over limit
90
- // Note: the sharp pipeline must be rebuilt on each iteration because
91
- // sharp does not support modifying quality settings after creation.
92
- while (compressedBuffer.length > sizeLimit && currentQuality > 10) {
93
- currentQuality -= 10;
94
- let p = sharp(imageBuffer);
95
- if (maxDimension) {
96
- p = p.resize(maxDimension, maxDimension, {
97
- fit: "inside",
98
- withoutEnlargement: true,
99
- });
100
- }
101
- compressedBuffer = await withTimeout(applyFormat(p, currentQuality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
102
- }
103
- // Final check
104
- if (compressedBuffer.length > sizeLimit) {
105
- throw new Error(`Unable to compress image to ${sizeLimit} bytes for provider ${provider}. ` +
106
- `Final size: ${compressedBuffer.length} bytes. ` +
107
- `Try using a smaller image or lower maxDimension.`);
108
- }
109
- // Get final metadata
110
- const finalMetadata = await withTimeout(sharp(compressedBuffer).metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading compressed image metadata");
111
- return {
112
- buffer: compressedBuffer,
113
- originalSize,
114
- compressedSize: compressedBuffer.length,
115
- compressionRatio: originalSize / compressedBuffer.length,
116
- metadata: {
117
- width: finalMetadata.width ?? 0,
118
- height: finalMetadata.height ?? 0,
119
- format: targetFormat,
120
- },
121
- };
122
- }
123
- /**
124
- * Check if an image needs compression for a specific provider
125
- * @param imageBuffer - Input image buffer
126
- * @param provider - AI provider name
127
- * @returns True if compression is needed
128
- */
129
- export function needsCompression(imageBuffer, provider) {
130
- const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
131
- return imageBuffer.length > sizeLimit;
132
- }
133
- /**
134
- * Get the size limit for a specific provider
135
- * @param provider - AI provider name
136
- * @returns Size limit in bytes
137
- */
138
- export function getProviderSizeLimit(provider) {
139
- return PROVIDER_IMAGE_LIMITS[provider];
140
- }