@rytass/storages-adapter-gcs 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,471 @@
1
- # Rytass Utils - File Storages (Google Cloud Storage)
1
+ # Rytass Utils - Google Cloud Storage Adapter
2
+
3
+ A powerful Google Cloud Storage adapter for the Rytass file storage system. Provides seamless integration with Google Cloud Storage buckets with support for signed URLs, stream processing, and automatic file type detection.
2
4
 
3
5
  ## Features
4
6
 
5
- - [x] Google Cloud Storage bucket
7
+ - [x] Google Cloud Storage bucket integration
8
+ - [x] Signed URL generation for secure file access
9
+ - [x] Buffer and Stream file operations
10
+ - [x] Automatic content type detection
11
+ - [x] Batch file operations
12
+ - [x] File existence checking
13
+ - [x] GZIP compression support
14
+ - [x] Service account authentication
15
+ - [x] TypeScript support
16
+
17
+ ## Installation
18
+
19
+ ```bash
20
+ npm install @rytass/storages-adapter-gcs @google-cloud/storage
21
+ # or
22
+ yarn add @rytass/storages-adapter-gcs @google-cloud/storage
23
+ ```
24
+
25
+ ## Basic Usage
26
+
27
+ ### Service Configuration
28
+
29
+ ```typescript
30
+ import { StorageGCSService } from '@rytass/storages-adapter-gcs';
31
+
32
+ const storage = new StorageGCSService({
33
+ bucket: 'your-gcs-bucket-name',
34
+ projectId: 'your-gcp-project-id',
35
+ credentials: {
36
+ client_email: 'your-service-account@project.iam.gserviceaccount.com',
37
+ private_key: '-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n',
38
+ },
39
+ });
40
+ ```
41
+
42
+ ### Upload Files
43
+
44
+ ```typescript
45
+ import { readFileSync, createReadStream } from 'fs';
46
+
47
+ // Upload buffer
48
+ const imageBuffer = readFileSync('photo.jpg');
49
+ const result = await storage.write(imageBuffer, {
50
+ filename: 'uploads/photo.jpg',
51
+ contentType: 'image/jpeg',
52
+ });
53
+ console.log('Uploaded:', result.key);
54
+
55
+ // Upload stream
56
+ const fileStream = createReadStream('document.pdf');
57
+ const streamResult = await storage.write(fileStream, {
58
+ filename: 'documents/document.pdf',
59
+ contentType: 'application/pdf',
60
+ });
61
+ console.log('Uploaded:', streamResult.key);
62
+
63
+ // Auto-generated filename (based on file content)
64
+ const autoResult = await storage.write(imageBuffer);
65
+ console.log('Auto-generated filename:', autoResult.key);
66
+ ```
67
+
68
+ ### Download Files
69
+
70
+ ```typescript
71
+ // Download as buffer
72
+ const fileBuffer = await storage.read('uploads/photo.jpg', { format: 'buffer' });
73
+ console.log('Downloaded buffer:', fileBuffer.length, 'bytes');
74
+
75
+ // Download as stream
76
+ const fileStream = await storage.read('uploads/photo.jpg');
77
+ fileStream.pipe(process.stdout);
78
+
79
+ // Stream to file
80
+ import { createWriteStream } from 'fs';
81
+ const downloadStream = await storage.read('documents/document.pdf');
82
+ const writeStream = createWriteStream('downloaded-document.pdf');
83
+ downloadStream.pipe(writeStream);
84
+ ```
85
+
86
+ ### Generate Signed URLs
87
+
88
+ ```typescript
89
+ // Default expiration (24 hours)
90
+ const url = await storage.url('uploads/photo.jpg');
91
+ console.log('Signed URL:', url);
92
+
93
+ // Custom expiration (1 hour from now)
94
+ const customUrl = await storage.url('uploads/photo.jpg', Date.now() + 1000 * 60 * 60);
95
+ console.log('1-hour URL:', customUrl);
96
+
97
+ // Use in HTML
98
+ const publicUrl = await storage.url('images/avatar.png');
99
+ // <img src="${publicUrl}" alt="User Avatar" />
100
+ ```
101
+
102
+ ### File Management
103
+
104
+ ```typescript
105
+ // Check if file exists
106
+ const exists = await storage.isExists('uploads/photo.jpg');
107
+ if (exists) {
108
+ console.log('File exists');
109
+ }
110
+
111
+ // Remove file
112
+ await storage.remove('uploads/old-file.jpg');
113
+ console.log('File removed');
114
+
115
+ // Batch upload
116
+ const files = [readFileSync('file1.jpg'), readFileSync('file2.png'), createReadStream('file3.pdf')];
117
+
118
+ const batchResults = await storage.batchWrite(files);
119
+ batchResults.forEach(result => {
120
+ console.log('Uploaded:', result.key);
121
+ });
122
+ ```
123
+
124
+ ## Advanced Usage
125
+
126
+ ### Environment-based Configuration
127
+
128
+ ```typescript
129
+ // .env file
130
+ // GCS_BUCKET=your-bucket-name
131
+ // GCS_PROJECT_ID=your-project-id
132
+ // GCS_CLIENT_EMAIL=your-service-account@project.iam.gserviceaccount.com
133
+ // GCS_PRIVATE_KEY="-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n"
134
+
135
+ import { StorageGCSService } from '@rytass/storages-adapter-gcs';
136
+
137
+ const storage = new StorageGCSService({
138
+ bucket: process.env.GCS_BUCKET!,
139
+ projectId: process.env.GCS_PROJECT_ID!,
140
+ credentials: {
141
+ client_email: process.env.GCS_CLIENT_EMAIL!,
142
+ private_key: process.env.GCS_PRIVATE_KEY!.replace(/\\n/g, '\n'),
143
+ },
144
+ });
145
+ ```
146
+
147
+ ### Service Account Key File
148
+
149
+ ```typescript
150
+ import { readFileSync } from 'fs';
151
+
152
+ // Load service account key from JSON file
153
+ const serviceAccount = JSON.parse(readFileSync('path/to/service-account-key.json', 'utf8'));
154
+
155
+ const storage = new StorageGCSService({
156
+ bucket: 'your-bucket-name',
157
+ projectId: serviceAccount.project_id,
158
+ credentials: {
159
+ client_email: serviceAccount.client_email,
160
+ private_key: serviceAccount.private_key,
161
+ },
162
+ });
163
+ ```
164
+
165
+ ### File Upload with Metadata
166
+
167
+ ```typescript
168
+ // Upload with custom metadata
169
+ const result = await storage.write(fileBuffer, {
170
+ filename: 'uploads/document.pdf',
171
+ contentType: 'application/pdf',
172
+ });
173
+
174
+ // The service automatically sets:
175
+ // - Content-Type based on file extension or provided contentType
176
+ // - GZIP compression for eligible files
177
+ // - Proper metadata for file identification
178
+ ```
179
+
180
+ ### Stream Processing for Large Files
181
+
182
+ ```typescript
183
+ import { createReadStream, createWriteStream } from 'fs';
184
+ import { pipeline } from 'stream/promises';
185
+
186
+ async function processLargeFile(inputPath: string, outputKey: string) {
187
+ // Upload large file as stream
188
+ const inputStream = createReadStream(inputPath);
189
+ const uploadResult = await storage.write(inputStream, {
190
+ filename: outputKey,
191
+ });
192
+
193
+ console.log('Large file uploaded:', uploadResult.key);
194
+
195
+ // Download large file as stream
196
+ const downloadStream = await storage.read(outputKey);
197
+ const outputStream = createWriteStream('downloaded-large-file');
198
+
199
+ await pipeline(downloadStream, outputStream);
200
+ console.log('Large file downloaded');
201
+ }
202
+
203
+ processLargeFile('large-video.mp4', 'videos/large-video.mp4');
204
+ ```
205
+
206
+ ### Error Handling
207
+
208
+ ```typescript
209
+ import { StorageError, ErrorCode } from '@rytass/storages';
210
+
211
+ try {
212
+ const result = await storage.read('non-existent-file.jpg');
213
+ } catch (error) {
214
+ if (error instanceof StorageError && error.code === ErrorCode.READ_FILE_ERROR) {
215
+ console.log('File not found');
216
+ } else {
217
+ console.error('Unexpected error:', error);
218
+ }
219
+ }
220
+
221
+ // Safe file operations
222
+ async function safeFileOperation(key: string) {
223
+ try {
224
+ // Check if file exists first
225
+ if (await storage.isExists(key)) {
226
+ const content = await storage.read(key, { format: 'buffer' });
227
+ return content;
228
+ } else {
229
+ console.log('File does not exist:', key);
230
+ return null;
231
+ }
232
+ } catch (error) {
233
+ console.error('Error reading file:', error);
234
+ return null;
235
+ }
236
+ }
237
+ ```
238
+
239
+ ## Integration Examples
240
+
241
+ ### Express.js File Upload
242
+
243
+ ```typescript
244
+ import express from 'express';
245
+ import multer from 'multer';
246
+ import { StorageGCSService } from '@rytass/storages-adapter-gcs';
247
+
248
+ const app = express();
249
+ const upload = multer({ storage: multer.memoryStorage() });
250
+ const storage = new StorageGCSService({
251
+ bucket: 'your-bucket',
252
+ projectId: 'your-project',
253
+ credentials: {
254
+ client_email: process.env.GCS_CLIENT_EMAIL!,
255
+ private_key: process.env.GCS_PRIVATE_KEY!,
256
+ },
257
+ });
258
+
259
+ app.post('/upload', upload.single('file'), async (req, res) => {
260
+ try {
261
+ if (!req.file) {
262
+ return res.status(400).json({ error: 'No file uploaded' });
263
+ }
264
+
265
+ const result = await storage.write(req.file.buffer, {
266
+ filename: `uploads/${Date.now()}-${req.file.originalname}`,
267
+ contentType: req.file.mimetype,
268
+ });
269
+
270
+ const publicUrl = await storage.url(result.key);
271
+
272
+ res.json({
273
+ success: true,
274
+ key: result.key,
275
+ url: publicUrl,
276
+ });
277
+ } catch (error) {
278
+ res.status(500).json({ error: 'Upload failed' });
279
+ }
280
+ });
281
+ ```
282
+
283
+ ### NestJS Integration
284
+
285
+ ```typescript
286
+ import { Injectable } from '@nestjs/common';
287
+ import { StorageGCSService } from '@rytass/storages-adapter-gcs';
288
+
289
+ @Injectable()
290
+ export class FileService {
291
+ private storage: StorageGCSService;
292
+
293
+ constructor() {
294
+ this.storage = new StorageGCSService({
295
+ bucket: process.env.GCS_BUCKET!,
296
+ projectId: process.env.GCS_PROJECT_ID!,
297
+ credentials: {
298
+ client_email: process.env.GCS_CLIENT_EMAIL!,
299
+ private_key: process.env.GCS_PRIVATE_KEY!.replace(/\\n/g, '\n'),
300
+ },
301
+ });
302
+ }
303
+
304
+ async uploadFile(file: Buffer, filename: string): Promise<string> {
305
+ const result = await this.storage.write(file, { filename });
306
+ return this.storage.url(result.key);
307
+ }
308
+
309
+ async getFile(key: string): Promise<Buffer> {
310
+ return this.storage.read(key, { format: 'buffer' });
311
+ }
312
+
313
+ async deleteFile(key: string): Promise<void> {
314
+ await this.storage.remove(key);
315
+ }
316
+
317
+ async fileExists(key: string): Promise<boolean> {
318
+ return this.storage.isExists(key);
319
+ }
320
+ }
321
+ ```
322
+
323
+ ### Image Processing Pipeline
324
+
325
+ ```typescript
326
+ import { StorageGCSService } from '@rytass/storages-adapter-gcs';
327
+ import { ConverterManager } from '@rytass/file-converter';
328
+ import { ImageResizer } from '@rytass/file-converter-adapter-image-resizer';
329
+ import { ImageTranscoder } from '@rytass/file-converter-adapter-image-transcoder';
330
+
331
+ class ImageProcessor {
332
+ constructor(
333
+ private storage: StorageGCSService,
334
+ private converter: ConverterManager,
335
+ ) {}
336
+
337
+ async processAndUpload(
338
+ imageBuffer: Buffer,
339
+ sizes: { width: number; height: number; suffix: string }[],
340
+ ): Promise<{ [key: string]: string }> {
341
+ const results: { [key: string]: string } = {};
342
+
343
+ for (const size of sizes) {
344
+ // Create processor for this size
345
+ const processor = new ConverterManager([
346
+ new ImageResizer({
347
+ maxWidth: size.width,
348
+ maxHeight: size.height,
349
+ keepAspectRatio: true,
350
+ }),
351
+ new ImageTranscoder({
352
+ format: 'webp',
353
+ quality: 85,
354
+ }),
355
+ ]);
356
+
357
+ // Process image
358
+ const processedImage = await processor.convert<Buffer>(imageBuffer);
359
+
360
+ // Upload to GCS
361
+ const uploadResult = await this.storage.write(processedImage, {
362
+ filename: `images/processed-${size.suffix}.webp`,
363
+ contentType: 'image/webp',
364
+ });
365
+
366
+ // Generate public URL
367
+ results[size.suffix] = await this.storage.url(uploadResult.key);
368
+ }
369
+
370
+ return results;
371
+ }
372
+ }
373
+
374
+ // Usage
375
+ const processor = new ImageProcessor(storage, converter);
376
+ const urls = await processor.processAndUpload(originalImage, [
377
+ { width: 150, height: 150, suffix: 'thumbnail' },
378
+ { width: 800, height: 600, suffix: 'medium' },
379
+ { width: 1920, height: 1080, suffix: 'large' },
380
+ ]);
381
+
382
+ console.log('Generated URLs:', urls);
383
+ ```
384
+
385
+ ## Configuration Options
386
+
387
+ ### GCSOptions
388
+
389
+ | Option | Type | Required | Description |
390
+ | -------------------------- | -------- | -------- | -------------------------------- |
391
+ | `bucket` | `string` | Yes | Google Cloud Storage bucket name |
392
+ | `projectId` | `string` | Yes | Google Cloud Project ID |
393
+ | `credentials` | `object` | Yes | Service account credentials |
394
+ | `credentials.client_email` | `string` | Yes | Service account email |
395
+ | `credentials.private_key` | `string` | Yes | Service account private key |
396
+
397
+ ### WriteFileOptions
398
+
399
+ | Option | Type | Default | Description |
400
+ | ------------- | -------- | -------------- | ------------------------------------- |
401
+ | `filename` | `string` | auto-generated | Custom filename for the uploaded file |
402
+ | `contentType` | `string` | auto-detected | MIME type of the file |
403
+
404
+ ### ReadBufferFileOptions
405
+
406
+ | Option | Type | Default | Description |
407
+ | -------- | ---------- | ------- | --------------------- |
408
+ | `format` | `'buffer'` | - | Return file as Buffer |
409
+
410
+ ### ReadStreamFileOptions
411
+
412
+ | Option | Type | Default | Description |
413
+ | -------- | ---------- | ------- | ------------------------------ |
414
+ | `format` | `'stream'` | - | Return file as Readable stream |
415
+
416
+ ## Best Practices
417
+
418
+ ### Security
419
+
420
+ - Store service account credentials securely using environment variables
421
+ - Use IAM roles with minimal required permissions
422
+ - Regularly rotate service account keys
423
+ - Enable audit logging for storage access
424
+
425
+ ### Performance
426
+
427
+ - Use streams for large files to reduce memory usage
428
+ - Leverage GZIP compression for text-based files
429
+ - Implement proper error handling and retry logic
430
+ - Use batch operations for multiple file uploads
431
+
432
+ ### Cost Optimization
433
+
434
+ - Choose appropriate storage classes for your use case
435
+ - Set up lifecycle policies for automatic data management
436
+ - Monitor storage usage and optimize file sizes
437
+ - Use signed URLs to reduce bandwidth costs
438
+
439
+ ### File Organization
440
+
441
+ - Use consistent naming conventions
442
+ - Organize files in logical folder structures
443
+ - Implement proper versioning strategies
444
+ - Consider using metadata for file categorization
445
+
446
+ ## Error Handling
447
+
448
+ The adapter throws `StorageError` instances for various error conditions:
449
+
450
+ ```typescript
451
+ import { StorageError, ErrorCode } from '@rytass/storages';
452
+
453
+ // Common error scenarios
454
+ try {
455
+ await storage.read('non-existent-file.jpg');
456
+ } catch (error) {
457
+ if (error instanceof StorageError) {
458
+ switch (error.code) {
459
+ case ErrorCode.READ_FILE_ERROR:
460
+ console.log('File not found or inaccessible');
461
+ break;
462
+ default:
463
+ console.log('Storage operation failed:', error.message);
464
+ }
465
+ }
466
+ }
467
+ ```
468
+
469
+ ## License
470
+
471
+ MIT
package/index.cjs.js CHANGED
@@ -33,7 +33,7 @@ class StorageGCSService extends storages.Storage {
33
33
  }
34
34
  return file.createReadStream();
35
35
  } catch (ex) {
36
- if (/No such object/.test(ex.message)) {
36
+ if (ex && typeof ex === 'object' && 'message' in ex && /No such object/.test(ex.message)) {
37
37
  throw new storages.StorageError(storages.ErrorCode.READ_FILE_ERROR, 'File not found');
38
38
  }
39
39
  throw ex;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rytass/storages-adapter-gcs",
3
- "version": "0.2.4",
3
+ "version": "0.2.6",
4
4
  "description": "Google Cloud Storage Adapter for @rytass/storages",
5
5
  "keywords": [
6
6
  "gcp",
@@ -21,9 +21,9 @@
21
21
  "url": "https://github.com/Rytass/Utils/issues"
22
22
  },
23
23
  "dependencies": {
24
- "@google-cloud/storage": "^7.14.0",
25
- "@rytass/storages": "^0.2.1",
26
- "uuid": "^11.0.3"
24
+ "@google-cloud/storage": "^7.17.0",
25
+ "@rytass/storages": "^0.2.3",
26
+ "uuid": "^11.1.0"
27
27
  },
28
28
  "devDependencies": {
29
29
  "@types/uuid": "^10.0.0"
@@ -31,7 +31,7 @@ class StorageGCSService extends Storage {
31
31
  }
32
32
  return file.createReadStream();
33
33
  } catch (ex) {
34
- if (/No such object/.test(ex.message)) {
34
+ if (ex && typeof ex === 'object' && 'message' in ex && /No such object/.test(ex.message)) {
35
35
  throw new StorageError(ErrorCode.READ_FILE_ERROR, 'File not found');
36
36
  }
37
37
  throw ex;
package/typings.d.ts CHANGED
@@ -6,4 +6,5 @@ export interface GCSOptions extends StorageOptions {
6
6
  client_email: string;
7
7
  private_key: string;
8
8
  };
9
+ [key: string]: unknown;
9
10
  }