@xenterprises/fastify-ximagepipeline 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,88 @@
1
+ // src/routes/status.js
2
+
3
+ /**
4
+ * Register status routes
5
+ * GET /image-pipeline/status/:jobId - Get processing job status
6
+ */
7
+ export async function setupStatusRoute(fastify, context) {
8
+ fastify.get("/image-pipeline/status/:jobId", async (request, reply) => {
9
+ try {
10
+ const { jobId } = request.params;
11
+
12
+ if (!jobId) {
13
+ return reply.status(400).send({
14
+ error: "jobId parameter required",
15
+ });
16
+ }
17
+
18
+ // Fetch job from database
19
+ const job = await context.db.mediaQueue.findUnique({
20
+ where: { id: jobId },
21
+ include: {
22
+ media: true,
23
+ },
24
+ });
25
+
26
+ if (!job) {
27
+ return reply.status(404).send({
28
+ error: "Job not found",
29
+ });
30
+ }
31
+
32
+ // Build response based on job status
33
+ const response = {
34
+ jobId: job.id,
35
+ status: job.status,
36
+ sourceType: job.sourceType,
37
+ sourceId: job.sourceId,
38
+ createdAt: job.createdAt,
39
+ updatedAt: job.updatedAt,
40
+ };
41
+
42
+ // Add processing details if available
43
+ if (job.status === "COMPLETE" && job.media) {
44
+ response.media = {
45
+ id: job.media.id,
46
+ urls: job.media.urls,
47
+ originalUrl: job.media.originalUrl,
48
+ width: job.media.width,
49
+ height: job.media.height,
50
+ aspectRatio: job.media.aspectRatio,
51
+ blurhash: job.media.blurhash,
52
+ focalPoint: job.media.focalPoint,
53
+ };
54
+ }
55
+
56
+ if (job.status === "REJECTED") {
57
+ response.reason = job.moderationResult || "Unknown";
58
+ if (job.moderationDetails) {
59
+ response.moderationDetails = job.moderationDetails;
60
+ }
61
+ }
62
+
63
+ if (job.status === "FAILED") {
64
+ response.error = job.errorMsg;
65
+ response.attempts = job.attempts;
66
+ }
67
+
68
+ // Determine HTTP status code
69
+ let statusCode = 200;
70
+ if (job.status === "PENDING" || job.status === "PROCESSING") {
71
+ statusCode = 202; // Still processing
72
+ } else if (job.status === "COMPLETE") {
73
+ statusCode = 200; // Done
74
+ } else if (job.status === "REJECTED") {
75
+ statusCode = 400; // Bad request (content rejected)
76
+ } else if (job.status === "FAILED") {
77
+ statusCode = 500; // Server error
78
+ }
79
+
80
+ return reply.status(statusCode).send(response);
81
+ } catch (error) {
82
+ console.error("Status check error:", error);
83
+ return reply.status(500).send({
84
+ error: "Failed to check job status",
85
+ });
86
+ }
87
+ });
88
+ }
@@ -0,0 +1,132 @@
1
+ // src/routes/upload.js
2
+ import { uploadToS3, getPublicUrl } from "../services/s3.js";
3
+ import { getVariantPresets } from "../xImagePipeline.js";
4
+
5
+ /**
6
+ * Register upload routes
7
+ * POST /image-pipeline/upload - Upload image file and create processing job
8
+ */
9
+ export async function setupUploadRoute(fastify, context) {
10
+ fastify.post("/image-pipeline/upload", async (request, reply) => {
11
+ try {
12
+ const data = await request.file();
13
+
14
+ if (!data) {
15
+ return reply.status(400).send({
16
+ error: "No file provided",
17
+ });
18
+ }
19
+
20
+ const {
21
+ file,
22
+ filename,
23
+ mimetype,
24
+ fields,
25
+ } = data;
26
+
27
+ // Validate file type
28
+ if (!context.allowedMimeTypes.includes(mimetype)) {
29
+ return reply.status(400).send({
30
+ error: `File type ${mimetype} not allowed. Allowed types: ${context.allowedMimeTypes.join(", ")}`,
31
+ });
32
+ }
33
+
34
+ // Get source information from request
35
+ const sourceType = fields.sourceType?.value;
36
+ const sourceId = fields.sourceId?.value;
37
+
38
+ if (!sourceType || !sourceId) {
39
+ return reply.status(400).send({
40
+ error: "sourceType and sourceId are required",
41
+ });
42
+ }
43
+
44
+ // Validate sourceType is in variant presets
45
+ const presets = getVariantPresets();
46
+ if (!presets[sourceType]) {
47
+ return reply.status(400).send({
48
+ error: `Unknown sourceType: ${sourceType}. Allowed types: ${Object.keys(presets).join(", ")}`,
49
+ });
50
+ }
51
+
52
+ // Read file buffer
53
+ const buffer = await file.toBuffer();
54
+
55
+ // Validate file size
56
+ if (buffer.length > context.maxFileSize) {
57
+ return reply.status(413).send({
58
+ error: `File too large. Maximum size: ${context.maxFileSize / 1024 / 1024}MB`,
59
+ });
60
+ }
61
+
62
+ // Generate unique staging key
63
+ const timestamp = Date.now();
64
+ const randomId = Math.random().toString(36).substring(7);
65
+ const stagingKey = `${context.stagingPath}/${sourceType}/${sourceId}/${timestamp}-${randomId}`;
66
+
67
+ // Upload to R2 staging bucket
68
+ try {
69
+ await uploadToS3(context.s3Client, context.r2Config.bucket, stagingKey, buffer, {
70
+ contentType: mimetype,
71
+ metadata: {
72
+ sourceType,
73
+ sourceId,
74
+ originalFilename: filename,
75
+ },
76
+ });
77
+ } catch (err) {
78
+ console.error("R2 upload failed:", err.message);
79
+ return reply.status(500).send({
80
+ error: "Failed to upload file to storage",
81
+ });
82
+ }
83
+
84
+ // Create processing job in database
85
+ let jobId;
86
+ try {
87
+ const job = await context.db.mediaQueue.create({
88
+ data: {
89
+ sourceType,
90
+ sourceId,
91
+ stagingKey,
92
+ originalFilename: filename,
93
+ mimeType: mimetype,
94
+ fileSize: buffer.length,
95
+ status: "PENDING",
96
+ },
97
+ });
98
+
99
+ jobId = job.id;
100
+ } catch (err) {
101
+ console.error("Database create failed:", err.message);
102
+ // Try to clean up uploaded file
103
+ try {
104
+ await deleteFromS3(context.s3Client, context.r2Config.bucket, stagingKey);
105
+ } catch {
106
+ // Ignore cleanup errors
107
+ }
108
+
109
+ return reply.status(500).send({
110
+ error: "Failed to create processing job",
111
+ });
112
+ }
113
+
114
+ // Return 202 Accepted with job ID
115
+ return reply.status(202).send({
116
+ jobId,
117
+ message: "File uploaded. Processing started.",
118
+ statusUrl: `/image-pipeline/status/${jobId}`,
119
+ });
120
+ } catch (error) {
121
+ console.error("Upload error:", error);
122
+ return reply.status(500).send({
123
+ error: "Upload failed",
124
+ });
125
+ }
126
+ });
127
+ }
128
+
129
+ /**
130
+ * Helper function to delete from S3 (imported from service)
131
+ */
132
+ import { deleteFromS3 } from "../services/s3.js";
@@ -0,0 +1,154 @@
1
+ // src/services/s3.js
2
+ import { S3Client, PutObjectCommand, DeleteObjectCommand, GetObjectCommand, ListObjectsV2Command } from "@aws-sdk/client-s3";
3
+ import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
4
+
5
+ /**
6
+ * Initialize S3 client for Cloudflare R2 or AWS S3
7
+ * R2 is S3-compatible with a different endpoint
8
+ */
9
+ export function initializeS3Client(config) {
10
+ if (!config.endpoint || !config.accessKeyId || !config.secretAccessKey || !config.bucket) {
11
+ throw new Error("R2 configuration must include: endpoint, accessKeyId, secretAccessKey, bucket");
12
+ }
13
+
14
+ return new S3Client({
15
+ region: config.region || "auto", // R2 uses "auto"
16
+ endpoint: config.endpoint,
17
+ credentials: {
18
+ accessKeyId: config.accessKeyId,
19
+ secretAccessKey: config.secretAccessKey,
20
+ },
21
+ });
22
+ }
23
+
24
+ /**
25
+ * Upload buffer to S3/R2
26
+ */
27
+ export async function uploadToS3(s3Client, bucket, key, buffer, options = {}) {
28
+ const command = new PutObjectCommand({
29
+ Bucket: bucket,
30
+ Key: key,
31
+ Body: buffer,
32
+ ContentType: options.contentType || "application/octet-stream",
33
+ Metadata: options.metadata || {},
34
+ CacheControl: options.cacheControl || "max-age=31536000", // 1 year for immutable content
35
+ });
36
+
37
+ try {
38
+ const response = await s3Client.send(command);
39
+ return {
40
+ key,
41
+ etag: response.ETag,
42
+ versionId: response.VersionId,
43
+ };
44
+ } catch (error) {
45
+ throw new Error(`Failed to upload to R2: ${error.message}`);
46
+ }
47
+ }
48
+
49
+ /**
50
+ * Download buffer from S3/R2
51
+ */
52
+ export async function downloadFromS3(s3Client, bucket, key) {
53
+ const command = new GetObjectCommand({
54
+ Bucket: bucket,
55
+ Key: key,
56
+ });
57
+
58
+ try {
59
+ const response = await s3Client.send(command);
60
+ return response.Body.transformToByteArray();
61
+ } catch (error) {
62
+ throw new Error(`Failed to download from R2: ${error.message}`);
63
+ }
64
+ }
65
+
66
+ /**
67
+ * Delete object from S3/R2
68
+ */
69
+ export async function deleteFromS3(s3Client, bucket, key) {
70
+ const command = new DeleteObjectCommand({
71
+ Bucket: bucket,
72
+ Key: key,
73
+ });
74
+
75
+ try {
76
+ await s3Client.send(command);
77
+ return true;
78
+ } catch (error) {
79
+ throw new Error(`Failed to delete from R2: ${error.message}`);
80
+ }
81
+ }
82
+
83
+ /**
84
+ * List objects with prefix (for batch operations)
85
+ */
86
+ export async function listFromS3(s3Client, bucket, prefix) {
87
+ const command = new ListObjectsV2Command({
88
+ Bucket: bucket,
89
+ Prefix: prefix,
90
+ });
91
+
92
+ try {
93
+ const response = await s3Client.send(command);
94
+ return response.Contents || [];
95
+ } catch (error) {
96
+ throw new Error(`Failed to list from R2: ${error.message}`);
97
+ }
98
+ }
99
+
100
+ /**
101
+ * Get signed URL for S3 object (for private/protected content)
102
+ */
103
+ export async function getSignedUrlForS3(s3Client, bucket, key, expiresIn = 3600) {
104
+ const command = new GetObjectCommand({
105
+ Bucket: bucket,
106
+ Key: key,
107
+ });
108
+
109
+ try {
110
+ return await getSignedUrl(s3Client, command, { expiresIn });
111
+ } catch (error) {
112
+ throw new Error(`Failed to generate signed URL: ${error.message}`);
113
+ }
114
+ }
115
+
116
+ /**
117
+ * Generate public URL for R2 object
118
+ * Assumes bucket is publicly accessible for media/originals paths
119
+ */
120
+ export function getPublicUrl(r2Config, key) {
121
+ // Remove trailing slash from endpoint if present
122
+ const endpoint = r2Config.endpoint.endsWith("/")
123
+ ? r2Config.endpoint.slice(0, -1)
124
+ : r2Config.endpoint;
125
+
126
+ return `${endpoint}/${r2Config.bucket}/${key}`;
127
+ }
128
+
129
+ /**
130
+ * Batch delete objects with prefix
131
+ */
132
+ export async function batchDeleteFromS3(s3Client, bucket, prefix) {
133
+ try {
134
+ const objects = await listFromS3(s3Client, bucket, prefix);
135
+
136
+ if (objects.length === 0) {
137
+ return { deleted: 0 };
138
+ }
139
+
140
+ // Delete up to 1000 at a time (S3 batch limit)
141
+ let deleted = 0;
142
+ for (let i = 0; i < objects.length; i += 1000) {
143
+ const batch = objects.slice(i, i + 1000);
144
+ await Promise.all(
145
+ batch.map((obj) => deleteFromS3(s3Client, bucket, obj.Key))
146
+ );
147
+ deleted += batch.length;
148
+ }
149
+
150
+ return { deleted };
151
+ } catch (error) {
152
+ throw new Error(`Failed to batch delete from R2: ${error.message}`);
153
+ }
154
+ }
@@ -0,0 +1,308 @@
1
+ // src/utils/image.js
2
+ import sharp from "sharp";
3
+ import { encode } from "blurhash";
4
+
5
+ /**
6
+ * Strip EXIF data from image while preserving orientation
7
+ * Ensures the image is properly rotated according to EXIF orientation before stripping
8
+ */
9
+ export async function stripExif(buffer) {
10
+ try {
11
+ const image = sharp(buffer);
12
+
13
+ // Get metadata including orientation
14
+ const metadata = await image.metadata();
15
+
16
+ // Sharp automatically rotates based on EXIF orientation, then removes EXIF
17
+ // when we don't include metadata in output
18
+ const result = await image
19
+ .withMetadata(false) // This removes all EXIF/metadata
20
+ .toBuffer();
21
+
22
+ return result;
23
+ } catch (error) {
24
+ throw new Error(`Failed to strip EXIF: ${error.message}`);
25
+ }
26
+ }
27
+
28
+ /**
29
+ * Get image metadata
30
+ */
31
+ export async function getImageMetadata(buffer) {
32
+ try {
33
+ const metadata = await sharp(buffer).metadata();
34
+
35
+ return {
36
+ width: metadata.width,
37
+ height: metadata.height,
38
+ format: metadata.format,
39
+ colorspace: metadata.space,
40
+ hasAlpha: metadata.hasAlpha,
41
+ density: metadata.density,
42
+ };
43
+ } catch (error) {
44
+ throw new Error(`Failed to get image metadata: ${error.message}`);
45
+ }
46
+ }
47
+
48
+ /**
49
+ * Compress image to JPEG format for storage
50
+ * Used for storing originals in a space-efficient format
51
+ * @param {Buffer} buffer - Image buffer
52
+ * @param {number} quality - JPEG quality (0-100, default 85)
53
+ */
54
+ export async function compressToJpeg(buffer, quality = 85) {
55
+ try {
56
+ const compressed = await sharp(buffer)
57
+ .jpeg({ quality, mozjpeg: true })
58
+ .toBuffer();
59
+
60
+ return compressed;
61
+ } catch (error) {
62
+ throw new Error(`Failed to compress image to JPEG: ${error.message}`);
63
+ }
64
+ }
65
+
66
+ /**
67
+ * Generate image variants in WebP format
68
+ * Only generates variants where source is larger than target
69
+ */
70
+ export async function generateVariants(buffer, variantSpecs, sourceType) {
71
+ try {
72
+ // First strip EXIF
73
+ const cleanBuffer = await stripExif(buffer);
74
+
75
+ // Get original dimensions
76
+ const originalMetadata = await getImageMetadata(cleanBuffer);
77
+ const { width: srcWidth, height: srcHeight } = originalMetadata;
78
+
79
+ const variants = {};
80
+
81
+ // Generate each variant
82
+ for (const [variantName, spec] of Object.entries(variantSpecs)) {
83
+ const { width: targetWidth, height: targetHeight, fit } = spec;
84
+
85
+ // Skip if source is too small
86
+ if (srcWidth < targetWidth || (targetHeight && srcHeight < targetHeight)) {
87
+ continue;
88
+ }
89
+
90
+ try {
91
+ let sharpInstance = sharp(cleanBuffer);
92
+
93
+ // Apply resize
94
+ if (fit === "cover") {
95
+ sharpInstance = sharpInstance.resize(targetWidth, targetHeight, {
96
+ fit: "cover",
97
+ position: "center",
98
+ });
99
+ } else if (fit === "inside") {
100
+ sharpInstance = sharpInstance.resize(targetWidth, targetHeight, {
101
+ fit: "inside",
102
+ withoutEnlargement: true,
103
+ });
104
+ }
105
+
106
+ // Convert to WebP
107
+ const variantBuffer = await sharpInstance
108
+ .webp({ quality: 85 })
109
+ .toBuffer();
110
+
111
+ variants[variantName] = variantBuffer;
112
+ } catch (err) {
113
+ console.error(`Failed to generate variant ${variantName}:`, err.message);
114
+ // Continue with other variants
115
+ }
116
+ }
117
+
118
+ return variants;
119
+ } catch (error) {
120
+ throw new Error(`Failed to generate variants: ${error.message}`);
121
+ }
122
+ }
123
+
124
+ /**
125
+ * Generate blurhash for image (used as loading placeholder)
126
+ * Creates a low-quality placeholder for quick loading
127
+ */
128
+ export async function generateBlurhash(buffer) {
129
+ try {
130
+ // Resize to small thumbnail for blurhash (10x10 is typical)
131
+ const thumbnail = await sharp(buffer)
132
+ .resize(10, 10, { fit: "inside" })
133
+ .raw()
134
+ .toBuffer({ resolveWithObject: true });
135
+
136
+ const { data, info } = thumbnail;
137
+ const blurhashValue = encode(
138
+ new Uint8ClampedArray(data),
139
+ info.width,
140
+ info.height,
141
+ 4, // x components
142
+ 3 // y components
143
+ );
144
+
145
+ return blurhashValue;
146
+ } catch (error) {
147
+ throw new Error(`Failed to generate blurhash: ${error.message}`);
148
+ }
149
+ }
150
+
151
+ /**
152
+ * Calculate optimal dimensions for fit="inside"
153
+ * Maintains aspect ratio while fitting within maxWidth x maxHeight
154
+ */
155
+ export function calculateFitDimensions(srcWidth, srcHeight, maxWidth, maxHeight) {
156
+ const aspectRatio = srcWidth / srcHeight;
157
+ let width = maxWidth;
158
+ let height = maxHeight;
159
+
160
+ if (aspectRatio > maxWidth / maxHeight) {
161
+ // Image is wider - fit to width
162
+ height = Math.round(maxWidth / aspectRatio);
163
+ } else {
164
+ // Image is taller - fit to height
165
+ width = Math.round(maxHeight * aspectRatio);
166
+ }
167
+
168
+ return { width, height };
169
+ }
170
+
171
+ /**
172
+ * Get aspect ratio string (e.g., "16:9", "4:3", "1:1")
173
+ */
174
+ export function getAspectRatio(width, height) {
175
+ const gcd = (a, b) => (b === 0 ? a : gcd(b, a % b));
176
+ const divisor = gcd(width, height);
177
+ return `${width / divisor}:${height / divisor}`;
178
+ }
179
+
180
+ /**
181
+ * Validate image buffer
182
+ */
183
+ export async function validateImage(buffer, options = {}) {
184
+ try {
185
+ const metadata = await getImageMetadata(buffer);
186
+
187
+ const errors = [];
188
+
189
+ // Check dimensions
190
+ if (options.minWidth && metadata.width < options.minWidth) {
191
+ errors.push(`Image width must be at least ${options.minWidth}px`);
192
+ }
193
+ if (options.maxWidth && metadata.width > options.maxWidth) {
194
+ errors.push(`Image width must not exceed ${options.maxWidth}px`);
195
+ }
196
+ if (options.minHeight && metadata.height < options.minHeight) {
197
+ errors.push(`Image height must be at least ${options.minHeight}px`);
198
+ }
199
+ if (options.maxHeight && metadata.height > options.maxHeight) {
200
+ errors.push(`Image height must not exceed ${options.maxHeight}px`);
201
+ }
202
+
203
+ // Check format
204
+ if (options.allowedFormats && !options.allowedFormats.includes(metadata.format)) {
205
+ errors.push(`Image format ${metadata.format} is not allowed`);
206
+ }
207
+
208
+ if (errors.length > 0) {
209
+ return { valid: false, errors, metadata };
210
+ }
211
+
212
+ return { valid: true, metadata };
213
+ } catch (error) {
214
+ return {
215
+ valid: false,
216
+ errors: [error.message],
217
+ metadata: null,
218
+ };
219
+ }
220
+ }
221
+
222
+ /**
223
+ * Process image with configurable variants and formats
224
+ * Returns all generated variants and metadata
225
+ *
226
+ * @param {Buffer} buffer - Image buffer
227
+ * @param {string} sourceType - Source type (avatar, background, etc.)
228
+ * @param {Object} config - Configuration object
229
+ * @param {Object} config.sourceTypeConfig - Source type specific config
230
+ * @param {Array<string>} config.sourceTypeConfig.variants - Variant names to generate
231
+ * @param {Array<string>} config.sourceTypeConfig.formats - Formats to generate ['webp', 'avif']
232
+ * @param {number} config.sourceTypeConfig.quality - Quality (0-100)
233
+ * @param {boolean} config.sourceTypeConfig.storeOriginal - Whether to store original
234
+ * @param {Object} config.variantSpecs - Variant dimension specs
235
+ * @returns {Promise<Object>} Processed image data with variants, metadata, blurhash
236
+ */
237
+ export async function processImage(buffer, sourceType, config) {
238
+ try {
239
+ const sourceTypeConfig = config.sourceTypeConfig;
240
+
241
+ if (!sourceTypeConfig) {
242
+ throw new Error(`No configuration found for source type: ${sourceType}`);
243
+ }
244
+
245
+ // Step 1: Strip EXIF
246
+ const cleanBuffer = await stripExif(buffer);
247
+
248
+ // Step 2: Get metadata
249
+ const metadata = await getImageMetadata(cleanBuffer);
250
+
251
+ // Step 3: Generate variants
252
+ const variantSpecs = {};
253
+ for (const variantName of sourceTypeConfig.variants) {
254
+ if (config.variantSpecs[variantName]) {
255
+ variantSpecs[variantName] = config.variantSpecs[variantName];
256
+ }
257
+ }
258
+
259
+ const webpVariants = await generateVariants(cleanBuffer, variantSpecs, sourceType);
260
+
261
+ // Step 4: Generate blurhash
262
+ const blurhash = await generateBlurhash(cleanBuffer);
263
+
264
+ // Step 5: Prepare result object
265
+ const result = {
266
+ metadata,
267
+ blurhash,
268
+ variants: {
269
+ webp: webpVariants,
270
+ },
271
+ };
272
+
273
+ // Step 6: Handle original (if configured)
274
+ if (sourceTypeConfig.storeOriginal) {
275
+ const originalCompressed = await compressToJpeg(cleanBuffer, sourceTypeConfig.quality);
276
+ result.original = {
277
+ format: 'jpeg',
278
+ buffer: originalCompressed,
279
+ };
280
+ }
281
+
282
+ // Step 7: Generate AVIF variants if configured (only for small files)
283
+ if (sourceTypeConfig.formats.includes('avif')) {
284
+ const avifVariants = {};
285
+ for (const [variantName, webpBuffer] of Object.entries(webpVariants)) {
286
+ try {
287
+ // Only generate AVIF if WebP is under 100KB
288
+ if (webpBuffer.length < 100 * 1024) {
289
+ const avifBuffer = await sharp(webpBuffer)
290
+ .avif({ quality: sourceTypeConfig.quality })
291
+ .toBuffer();
292
+ avifVariants[variantName] = avifBuffer;
293
+ }
294
+ } catch (err) {
295
+ console.warn(`Failed to generate AVIF for ${variantName}: ${err.message}`);
296
+ // Continue with WebP only
297
+ }
298
+ }
299
+ if (Object.keys(avifVariants).length > 0) {
300
+ result.variants.avif = avifVariants;
301
+ }
302
+ }
303
+
304
+ return result;
305
+ } catch (error) {
306
+ throw new Error(`Failed to process image: ${error.message}`);
307
+ }
308
+ }