@xenterprises/fastify-ximagepipeline 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,264 @@
1
+ // src/workers/processor.js
2
+ import { downloadFromS3, deleteFromS3, uploadToS3, getPublicUrl, batchDeleteFromS3 } from "../services/s3.js";
3
+ import { stripExif, generateVariants, generateBlurhash, getImageMetadata, compressToJpeg } from "../utils/image.js";
4
+ import { getVariantPresets } from "../xImagePipeline.js";
5
+
6
+ /**
7
+ * Setup media processor worker
8
+ * Polls job queue and processes media files
9
+ */
10
+ export function setupWorker(fastify, context, config) {
11
+ // Worker state
12
+ const workerId = `worker-${Date.now()}-${Math.random().toString(36).substring(7)}`;
13
+ let isRunning = true;
14
+
15
+ console.info(` 📊 Media Worker initialized (${workerId})`);
16
+
17
+ // Start polling
18
+ const pollInterval = setInterval(() => {
19
+ if (isRunning) {
20
+ processNextJob(fastify, context, config, workerId).catch((err) => {
21
+ console.error("Worker error:", err.message);
22
+ });
23
+ }
24
+ }, config.pollInterval);
25
+
26
+ // Cleanup on fastify close
27
+ fastify.addHook("onClose", async () => {
28
+ isRunning = false;
29
+ clearInterval(pollInterval);
30
+ console.info(" 📊 Media Worker stopped");
31
+ });
32
+
33
+ return { workerId, stop: () => { isRunning = false; } };
34
+ }
35
+
36
+ /**
37
+ * Process next job in queue
38
+ */
39
+ async function processNextJob(fastify, context, config, workerId) {
40
+ try {
41
+ // Find next available job with locking
42
+ const job = await context.db.mediaQueue.findFirst({
43
+ where: {
44
+ status: "PENDING",
45
+ lockedAt: null,
46
+ attempts: {
47
+ lt: config.maxAttempts,
48
+ },
49
+ },
50
+ orderBy: {
51
+ createdAt: "asc",
52
+ },
53
+ });
54
+
55
+ if (!job) {
56
+ return; // No jobs available
57
+ }
58
+
59
+ // Lock the job
60
+ const lockExpiry = new Date(Date.now() + config.lockTimeout);
61
+ const locked = await context.db.mediaQueue.updateMany({
62
+ where: {
63
+ id: job.id,
64
+ lockedAt: null, // Ensure no one else locked it
65
+ },
66
+ data: {
67
+ lockedAt: new Date(),
68
+ lockedBy: workerId,
69
+ status: "PROCESSING",
70
+ },
71
+ });
72
+
73
+ if (locked.count === 0) {
74
+ return; // Job was locked by another worker
75
+ }
76
+
77
+ console.info(`Processing job ${job.id}...`);
78
+
79
+ try {
80
+ // Download file from staging
81
+ const buffer = await downloadFromS3(
82
+ context.s3Client,
83
+ context.r2Config.bucket,
84
+ job.stagingKey
85
+ );
86
+
87
+ // Step 1: Strip EXIF
88
+ const cleanBuffer = await stripExif(buffer);
89
+
90
+ // Step 2: Get image metadata
91
+ const metadata = await getImageMetadata(cleanBuffer);
92
+
93
+ // Step 3: Content moderation (if configured)
94
+ if (context.moderation) {
95
+ try {
96
+ const moderationResult = await moderateImage(cleanBuffer, context.moderation);
97
+
98
+ if (!moderationResult.passed) {
99
+ // Content rejected
100
+ await context.db.mediaQueue.update({
101
+ where: { id: job.id },
102
+ data: {
103
+ status: "REJECTED",
104
+ moderationResult: "REJECTED",
105
+ moderationDetails: moderationResult,
106
+ lockedAt: null,
107
+ lockedBy: null,
108
+ },
109
+ });
110
+
111
+ // Clean up staging
112
+ await deleteFromS3(context.s3Client, context.r2Config.bucket, job.stagingKey);
113
+ console.info(`Job ${job.id} rejected by moderation`);
114
+ return;
115
+ }
116
+ } catch (err) {
117
+ console.error(`Moderation failed: ${err.message}`);
118
+ throw err;
119
+ }
120
+ }
121
+
122
+ // Step 4: Generate variants
123
+ const variantPresets = getVariantPresets();
124
+ const preset = variantPresets[job.sourceType] || [];
125
+ const variantSpecs = {};
126
+ for (const variantName of preset) {
127
+ if (context.variants[variantName]) {
128
+ variantSpecs[variantName] = context.variants[variantName];
129
+ }
130
+ }
131
+
132
+ const variants = await generateVariants(cleanBuffer, variantSpecs, job.sourceType);
133
+
134
+ // Step 5: Generate blurhash
135
+ const blurhash = await generateBlurhash(cleanBuffer);
136
+
137
+ // Step 6: Upload to R2
138
+ const mediaId = `media-${Date.now()}-${Math.random().toString(36).substring(7)}`;
139
+ const mediaPath = `${context.mediaPath}/${job.sourceType}/${job.sourceId}/${mediaId}`;
140
+ const originalPath = `${context.originalsPath}/${job.sourceType}/${job.sourceId}/${mediaId}`;
141
+
142
+ const urls = {};
143
+
144
+ // Upload variants
145
+ for (const [variantName, variantBuffer] of Object.entries(variants)) {
146
+ const variantKey = `${mediaPath}/${variantName}.webp`;
147
+ await uploadToS3(context.s3Client, context.r2Config.bucket, variantKey, variantBuffer, {
148
+ contentType: "image/webp",
149
+ });
150
+ urls[variantName] = getPublicUrl(context.r2Config, variantKey);
151
+ }
152
+
153
+ // Upload compressed original (JPEG for space efficiency)
154
+ let originalUrl = null;
155
+ if (context.storeOriginal) {
156
+ const compressedOriginal = await compressToJpeg(cleanBuffer, context.originalQuality);
157
+ const originalKey = `${originalPath}/original.jpg`;
158
+ await uploadToS3(context.s3Client, context.r2Config.bucket, originalKey, compressedOriginal, {
159
+ contentType: "image/jpeg",
160
+ });
161
+ originalUrl = getPublicUrl(context.r2Config, originalKey);
162
+ }
163
+
164
+ // Step 7: Create Media record
165
+ const media = await context.db.media.create({
166
+ data: {
167
+ sourceType: job.sourceType,
168
+ sourceId: job.sourceId,
169
+ urls,
170
+ originalUrl,
171
+ width: metadata.width,
172
+ height: metadata.height,
173
+ format: metadata.format,
174
+ aspectRatio: `${metadata.width}:${metadata.height}`,
175
+ blurhash,
176
+ originalFilename: job.originalFilename,
177
+ mimeType: job.mimeType,
178
+ fileSize: job.fileSize,
179
+ },
180
+ });
181
+
182
+ // Step 8: Update job
183
+ await context.db.mediaQueue.update({
184
+ where: { id: job.id },
185
+ data: {
186
+ status: "COMPLETE",
187
+ mediaId: media.id,
188
+ lockedAt: null,
189
+ lockedBy: null,
190
+ },
191
+ });
192
+
193
+ // Step 9: Clean up staging
194
+ await deleteFromS3(context.s3Client, context.r2Config.bucket, job.stagingKey);
195
+
196
+ console.info(`Job ${job.id} completed successfully`);
197
+ } catch (error) {
198
+ console.error(`Job ${job.id} processing error: ${error.message}`);
199
+
200
+ // Update job with error
201
+ const nextAttempt = job.attempts + 1;
202
+ const shouldRetry = nextAttempt < config.maxAttempts;
203
+
204
+ await context.db.mediaQueue.update({
205
+ where: { id: job.id },
206
+ data: {
207
+ status: shouldRetry ? "PENDING" : "FAILED",
208
+ errorMsg: error.message,
209
+ attempts: nextAttempt,
210
+ lockedAt: null,
211
+ lockedBy: null,
212
+ },
213
+ });
214
+
215
+ if (!shouldRetry) {
216
+ // Clean up staging after max retries
217
+ try {
218
+ await deleteFromS3(context.s3Client, context.r2Config.bucket, job.stagingKey);
219
+ } catch (cleanupErr) {
220
+ console.error(`Failed to cleanup staging: ${cleanupErr.message}`);
221
+ }
222
+ }
223
+ }
224
+ } catch (error) {
225
+ console.error("Worker process error:", error.message);
226
+ }
227
+ }
228
+
229
+ /**
230
+ * Moderate image content
231
+ * Currently a stub - implement with actual API (Rekognition, Vision, etc.)
232
+ */
233
+ async function moderateImage(buffer, moderationConfig) {
234
+ // TODO: Implement actual moderation API call
235
+ // For now, always approve
236
+ return {
237
+ passed: true,
238
+ flags: [],
239
+ confidence: {},
240
+ };
241
+ }
242
+
243
+ /**
244
+ * Recover stale locks (jobs locked > lockTimeout)
245
+ */
246
+ export async function recoverStaleLocks(db, lockTimeout) {
247
+ const staleThreshold = new Date(Date.now() - lockTimeout);
248
+
249
+ const recovered = await db.mediaQueue.updateMany({
250
+ where: {
251
+ lockedAt: {
252
+ lt: staleThreshold,
253
+ },
254
+ status: "PROCESSING",
255
+ },
256
+ data: {
257
+ lockedAt: null,
258
+ lockedBy: null,
259
+ status: "PENDING",
260
+ },
261
+ });
262
+
263
+ return recovered.count;
264
+ }
@@ -0,0 +1,164 @@
1
+ // src/xImagePipeline.js
2
+ import fp from "fastify-plugin";
3
+ import { initializeS3Client } from "./services/s3.js";
4
+ import { setupUploadRoute } from "./routes/upload.js";
5
+ import { setupStatusRoute } from "./routes/status.js";
6
+ import { setupWorker } from "./workers/processor.js";
7
+
8
+ /**
9
+ * xImagePipeline Plugin for Fastify
10
+ * Handles image uploads with EXIF stripping, moderation, variant generation, and R2 storage
11
+ *
12
+ * @param {Object} fastify - Fastify instance
13
+ * @param {Object} options - Plugin options
14
+ * @param {Object} options.r2 - R2 configuration (endpoint, accessKeyId, secretAccessKey, bucket)
15
+ * @param {Object} options.db - Database instance (Prisma client or similar)
16
+ * @param {Object} options.moderation - Moderation config (provider, apiKey, etc.)
17
+ * @param {Object} options.variants - Variant size definitions (optional, uses defaults)
18
+ * @param {Object} options.sourceTypes - Source type configurations (optional, uses defaults)
19
+ * Each source type defines: variants[], formats[], quality, storeOriginal
20
+ * @param {Object} options.worker - Worker configuration (enabled, pollInterval, maxAttempts)
21
+ */
22
+ async function xImagePipeline(fastify, options) {
23
+ // Validate required configuration
24
+ if (!options.r2) {
25
+ throw new Error("R2 configuration is required");
26
+ }
27
+ if (!options.db) {
28
+ throw new Error("Database instance (Prisma client) is required");
29
+ }
30
+
31
+ console.info("\n 🎬 Starting xImagePipeline...\n");
32
+
33
+ // Initialize R2 S3 client
34
+ const s3Client = initializeS3Client(options.r2);
35
+
36
+ // Store configuration in plugin context
37
+ const context = {
38
+ s3Client,
39
+ db: options.db,
40
+ moderation: options.moderation || null,
41
+ variants: options.variants || getDefaultVariants(),
42
+ sourceTypes: options.sourceTypes || getDefaultSourceTypes(),
43
+ r2Config: options.r2,
44
+ maxFileSize: options.maxFileSize || 50 * 1024 * 1024, // 50MB default
45
+ allowedMimeTypes: options.allowedMimeTypes || [
46
+ "image/jpeg",
47
+ "image/png",
48
+ "image/webp",
49
+ "image/gif",
50
+ ],
51
+ stagingPath: options.stagingPath || "staging",
52
+ mediaPath: options.mediaPath || "media",
53
+ originalsPath: options.originalsPath || "originals",
54
+ };
55
+
56
+ // Decorate fastify instance with image pipeline utilities
57
+ fastify.decorate("xImagePipeline", {
58
+ upload: async (file, metadata) => {
59
+ // Placeholder - implemented in upload route
60
+ },
61
+ getStatus: async (jobId) => {
62
+ // Placeholder - implemented in status route
63
+ },
64
+ deleteMedia: async (mediaId) => {
65
+ // Placeholder - implemented as utility
66
+ },
67
+ });
68
+
69
+ // Register routes
70
+ await setupUploadRoute(fastify, context);
71
+ await setupStatusRoute(fastify, context);
72
+
73
+ // Start worker if enabled
74
+ if (options.worker?.enabled !== false) {
75
+ const workerConfig = {
76
+ pollInterval: options.worker?.pollInterval || 5000,
77
+ maxAttempts: options.worker?.maxAttempts || 3,
78
+ lockTimeout: options.worker?.lockTimeout || 300000, // 5 minutes
79
+ };
80
+
81
+ try {
82
+ setupWorker(fastify, context, workerConfig);
83
+ console.info(" ✅ Image Pipeline Worker Started");
84
+ } catch (err) {
85
+ console.error(" ❌ Failed to start image pipeline worker:", err.message);
86
+ if (options.worker?.failOnError !== false) {
87
+ throw err;
88
+ }
89
+ }
90
+ }
91
+
92
+ console.info("\n 🎬 xImagePipeline Ready!\n");
93
+ }
94
+
95
+ /**
96
+ * Get default variant specifications
97
+ */
98
+ function getDefaultVariants() {
99
+ return {
100
+ xs: { width: 80, height: 80, fit: "cover" },
101
+ sm: { width: 200, height: 200, fit: "cover" },
102
+ md: { width: 600, height: null, fit: "inside" },
103
+ lg: { width: 1200, height: null, fit: "inside" },
104
+ xl: { width: 1920, height: null, fit: "inside" },
105
+ "2xl": { width: 2560, height: null, fit: "inside" },
106
+ };
107
+ }
108
+
109
+ /**
110
+ * Get default source type configurations
111
+ * Each source type can have different processing settings
112
+ */
113
+ function getDefaultSourceTypes() {
114
+ return {
115
+ avatar: {
116
+ variants: ["xs", "sm"],
117
+ formats: ["webp"],
118
+ quality: 85,
119
+ storeOriginal: true,
120
+ },
121
+ member_photo: {
122
+ variants: ["xs", "sm", "md"],
123
+ formats: ["webp"],
124
+ quality: 85,
125
+ storeOriginal: true,
126
+ },
127
+ gallery: {
128
+ variants: ["md", "lg", "xl"],
129
+ formats: ["webp"],
130
+ quality: 85,
131
+ storeOriginal: false,
132
+ },
133
+ hero: {
134
+ variants: ["lg", "xl", "2xl"],
135
+ formats: ["webp"],
136
+ quality: 80,
137
+ storeOriginal: false,
138
+ },
139
+ content: {
140
+ variants: ["md", "lg"],
141
+ formats: ["webp"],
142
+ quality: 85,
143
+ storeOriginal: true,
144
+ },
145
+ };
146
+ }
147
+
148
+ /**
149
+ * Get variant presets for different source types
150
+ */
151
+ export function getVariantPresets() {
152
+ return {
153
+ avatar: ["xs", "sm"],
154
+ member_photo: ["xs", "sm", "md"],
155
+ gallery: ["md", "lg", "xl"],
156
+ hero: ["lg", "xl", "2xl"],
157
+ content: ["md", "lg"],
158
+ };
159
+ }
160
+
161
+ export default fp(xImagePipeline, {
162
+ name: "xImagePipeline",
163
+ fastify: "5.x",
164
+ });
@@ -0,0 +1,196 @@
1
+ // test/xImagePipeline.test.js
2
+ import { test } from "node:test";
3
+ import assert from "node:assert";
4
+ import Fastify from "fastify";
5
+ import multipart from "@fastify/multipart";
6
+ import xImagePipeline from "../src/xImagePipeline.js";
7
+
8
+ // Mock database
9
+ const mockDb = {
10
+ mediaQueue: {
11
+ create: async (data) => ({
12
+ id: "test-job-" + Date.now(),
13
+ ...data.data,
14
+ }),
15
+ findUnique: async (query) => ({
16
+ id: query.where.id,
17
+ status: "COMPLETE",
18
+ sourceType: "avatar",
19
+ sourceId: "user123",
20
+ }),
21
+ findFirst: async () => null,
22
+ update: async (query) => ({ count: 1 }),
23
+ updateMany: async () => ({ count: 0 }),
24
+ },
25
+ media: {
26
+ create: async (data) => ({
27
+ id: "media-123",
28
+ ...data.data,
29
+ }),
30
+ },
31
+ };
32
+
33
+ // Mock R2 config
34
+ const mockR2Config = {
35
+ endpoint: "https://example.r2.cloudflarestorage.com",
36
+ region: "auto",
37
+ accessKeyId: "mock-key",
38
+ secretAccessKey: "mock-secret",
39
+ bucket: "test-bucket",
40
+ };
41
+
42
+ test("xImagePipeline Plugin - registers successfully with required config", async () => {
43
+ const fastify = Fastify({ logger: false });
44
+ try {
45
+ await fastify.register(multipart);
46
+ await fastify.register(xImagePipeline, {
47
+ r2: mockR2Config,
48
+ db: mockDb,
49
+ });
50
+ assert.ok(true, "Plugin registered successfully");
51
+ } finally {
52
+ try {
53
+ await fastify.close();
54
+ } catch {
55
+ // Ignore
56
+ }
57
+ }
58
+ });
59
+
60
+ test("xImagePipeline Plugin - throws error without R2 config", async () => {
61
+ const fastify = Fastify({ logger: false });
62
+ try {
63
+ await assert.rejects(
64
+ async () => {
65
+ await fastify.register(multipart);
66
+ await fastify.register(xImagePipeline, {
67
+ db: mockDb,
68
+ });
69
+ },
70
+ /R2 configuration is required/
71
+ );
72
+ } finally {
73
+ try {
74
+ await fastify.close();
75
+ } catch {
76
+ // Ignore
77
+ }
78
+ }
79
+ });
80
+
81
+ test("xImagePipeline Plugin - throws error without database", async () => {
82
+ const fastify = Fastify({ logger: false });
83
+ try {
84
+ await assert.rejects(
85
+ async () => {
86
+ await fastify.register(multipart);
87
+ await fastify.register(xImagePipeline, {
88
+ r2: mockR2Config,
89
+ });
90
+ },
91
+ /Database instance/
92
+ );
93
+ } finally {
94
+ try {
95
+ await fastify.close();
96
+ } catch {
97
+ // Ignore
98
+ }
99
+ }
100
+ });
101
+
102
+ test("xImagePipeline Routes - GET /media/status/:jobId returns job status", async () => {
103
+ const fastify = Fastify({ logger: false });
104
+ try {
105
+ await fastify.register(multipart);
106
+ await fastify.register(xImagePipeline, {
107
+ r2: mockR2Config,
108
+ db: mockDb,
109
+ });
110
+
111
+ const response = await fastify.inject({
112
+ method: "GET",
113
+ url: "/image-pipeline/status/test-job-123",
114
+ });
115
+
116
+ assert.equal(response.statusCode, 200);
117
+ const body = JSON.parse(response.payload);
118
+ assert.ok(body.jobId, "Response should have jobId");
119
+ assert.ok(body.status, "Response should have status");
120
+ } finally {
121
+ try {
122
+ await fastify.close();
123
+ } catch {
124
+ // Ignore
125
+ }
126
+ }
127
+ });
128
+
129
+ test("xImagePipeline Configuration - accepts custom variants config", async () => {
130
+ const fastify = Fastify({ logger: false });
131
+ const customConfig = {
132
+ r2: mockR2Config,
133
+ db: mockDb,
134
+ variants: {
135
+ custom: {
136
+ xs: { width: 100, height: 100, fit: "cover" },
137
+ lg: { width: 800, height: 600, fit: "inside" },
138
+ },
139
+ },
140
+ };
141
+ try {
142
+ await fastify.register(multipart);
143
+ await fastify.register(xImagePipeline, customConfig);
144
+ assert.ok(true, "Plugin registered with custom variants");
145
+ } finally {
146
+ try {
147
+ await fastify.close();
148
+ } catch {
149
+ // Ignore
150
+ }
151
+ }
152
+ });
153
+
154
+ test("xImagePipeline Configuration - accepts worker configuration", async () => {
155
+ const fastify = Fastify({ logger: false });
156
+ const configWithWorker = {
157
+ r2: mockR2Config,
158
+ db: mockDb,
159
+ worker: {
160
+ enabled: true,
161
+ pollInterval: 10000,
162
+ maxAttempts: 5,
163
+ },
164
+ };
165
+ try {
166
+ await fastify.register(multipart);
167
+ await fastify.register(xImagePipeline, configWithWorker);
168
+ assert.ok(true, "Plugin registered with worker config");
169
+ } finally {
170
+ try {
171
+ await fastify.close();
172
+ } catch {
173
+ // Ignore
174
+ }
175
+ }
176
+ });
177
+
178
+ test("xImagePipeline Variant Presets - has predefined variant presets", async () => {
179
+ const fastify = Fastify({ logger: false });
180
+ try {
181
+ await fastify.register(multipart);
182
+ await fastify.register(xImagePipeline, {
183
+ r2: mockR2Config,
184
+ db: mockDb,
185
+ });
186
+
187
+ // Just verify plugin registered - variants are available via plugin configuration
188
+ assert.ok(true, "Plugin has variant presets configured");
189
+ } finally {
190
+ try {
191
+ await fastify.close();
192
+ } catch {
193
+ // Ignore
194
+ }
195
+ }
196
+ });