@devbro/neko-storage 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +104 -7
  2. package/dist/Storage.d.mts +14 -13
  3. package/dist/Storage.mjs +30 -8
  4. package/dist/Storage.mjs.map +1 -1
  5. package/dist/StorageProviderFactory.d.mts +15 -0
  6. package/dist/StorageProviderFactory.mjs +19 -0
  7. package/dist/StorageProviderFactory.mjs.map +1 -0
  8. package/dist/StorageProviderInterface.d.mts +18 -0
  9. package/dist/StorageProviderInterface.mjs +1 -0
  10. package/dist/StorageProviderInterface.mjs.map +1 -0
  11. package/dist/index.d.mts +11 -4
  12. package/dist/index.js +609 -221
  13. package/dist/index.js.map +1 -1
  14. package/dist/index.mjs +8 -3
  15. package/dist/index.mjs.map +1 -1
  16. package/dist/{AWSS3Storage.d.mts → providers/AWSS3StorageProvider.d.mts} +7 -7
  17. package/dist/providers/AWSS3StorageProvider.mjs +108 -0
  18. package/dist/providers/AWSS3StorageProvider.mjs.map +1 -0
  19. package/dist/providers/AzureBlobStorageProvider.d.mts +23 -0
  20. package/dist/providers/AzureBlobStorageProvider.mjs +116 -0
  21. package/dist/providers/AzureBlobStorageProvider.mjs.map +1 -0
  22. package/dist/providers/FTPStorageProvider.d.mts +22 -0
  23. package/dist/providers/FTPStorageProvider.mjs +124 -0
  24. package/dist/providers/FTPStorageProvider.mjs.map +1 -0
  25. package/dist/providers/GCPStorageProvider.d.mts +22 -0
  26. package/dist/providers/GCPStorageProvider.mjs +82 -0
  27. package/dist/providers/GCPStorageProvider.mjs.map +1 -0
  28. package/dist/{LocalStorage.d.mts → providers/LocalStorageProvider.d.mts} +7 -6
  29. package/dist/providers/LocalStorageProvider.mjs +84 -0
  30. package/dist/providers/LocalStorageProvider.mjs.map +1 -0
  31. package/dist/providers/SFTPStorageProvider.d.mts +22 -0
  32. package/dist/providers/SFTPStorageProvider.mjs +124 -0
  33. package/dist/providers/SFTPStorageProvider.mjs.map +1 -0
  34. package/dist/types.d.mts +32 -6
  35. package/package.json +10 -6
  36. package/dist/AWSS3Storage.mjs +0 -154
  37. package/dist/AWSS3Storage.mjs.map +0 -1
  38. package/dist/LocalStorage.mjs +0 -129
  39. package/dist/LocalStorage.mjs.map +0 -1
  40. package/dist/StorageFactory.d.mts +0 -13
  41. package/dist/StorageFactory.mjs +0 -24
  42. package/dist/StorageFactory.mjs.map +0 -1
package/dist/index.js CHANGED
@@ -27,298 +27,686 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
27
27
  mod
28
28
  ));
29
29
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
30
- var __async = (__this, __arguments, generator) => {
31
- return new Promise((resolve, reject) => {
32
- var fulfilled = (value) => {
33
- try {
34
- step(generator.next(value));
35
- } catch (e) {
36
- reject(e);
37
- }
38
- };
39
- var rejected = (value) => {
40
- try {
41
- step(generator.throw(value));
42
- } catch (e) {
43
- reject(e);
44
- }
45
- };
46
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
47
- step((generator = generator.apply(__this, __arguments)).next());
48
- });
49
- };
50
30
 
51
31
  // src/index.ts
52
32
  var index_exports = {};
53
33
  __export(index_exports, {
54
- AWSS3Storage: () => AWSS3Storage,
55
- LocalStorage: () => LocalStorage,
34
+ AWSS3StorageProvider: () => AWSS3StorageProvider,
35
+ AzureBlobStorageProvider: () => AzureBlobStorageProvider,
36
+ FTPStorageProvider: () => FTPStorageProvider,
37
+ GCPStorageProvider: () => GCPStorageProvider,
38
+ LocalStorageProvider: () => LocalStorageProvider,
39
+ SFTPStorageProvider: () => SFTPStorageProvider,
56
40
  Storage: () => Storage,
57
- StorageFactory: () => StorageFactory
41
+ StorageProviderFactory: () => StorageProviderFactory
58
42
  });
59
43
  module.exports = __toCommonJS(index_exports);
60
44
 
61
45
  // src/Storage.mts
62
- var _Storage = class _Storage {
63
- constructor(config) {
64
- this.config = config;
46
+ var Storage = class {
47
+ constructor(provider) {
48
+ this.provider = provider;
49
+ }
50
+ static {
51
+ __name(this, "Storage");
65
52
  }
66
- static canHandle(config) {
67
- throw new Error("Method not implemented.");
53
+ exists(path2) {
54
+ return this.provider.exists(path2);
55
+ }
56
+ put(path2, content) {
57
+ return this.provider.put(path2, content);
58
+ }
59
+ getJson(path2) {
60
+ return this.provider.getJson(path2);
61
+ }
62
+ getString(path2) {
63
+ return this.provider.getString(path2);
64
+ }
65
+ getBuffer(path2) {
66
+ return this.provider.getBuffer(path2);
67
+ }
68
+ getStream(path2) {
69
+ return this.provider.getStream(path2);
70
+ }
71
+ delete(path2) {
72
+ return this.provider.delete(path2);
73
+ }
74
+ metadata(path2) {
75
+ return this.provider.metadata(path2);
68
76
  }
69
77
  };
70
- __name(_Storage, "Storage");
71
- var Storage = _Storage;
72
78
 
73
- // src/AWSS3Storage.mts
79
+ // src/providers/AWSS3StorageProvider.mts
74
80
  var import_client_s3 = require("@aws-sdk/client-s3");
75
81
  var import_stream = __toESM(require("stream"), 1);
76
- var _AWSS3Storage = class _AWSS3Storage extends Storage {
82
+ var AWSS3StorageProvider = class {
77
83
  constructor(config) {
78
- var _a;
79
- super(config);
80
84
  this.config = config;
81
- if (!_AWSS3Storage.canHandle(config)) {
82
- throw new Error(`storage engine cannot handle this config.`);
83
- }
84
- this.s3 = new import_client_s3.S3Client(((_a = this.config) == null ? void 0 : _a.s3Config) || {});
85
+ this.s3 = new import_client_s3.S3Client(this.config);
85
86
  }
86
- static canHandle(config) {
87
- return config.engine === "s3";
87
+ static {
88
+ __name(this, "AWSS3StorageProvider");
88
89
  }
89
- exists(path2) {
90
- return __async(this, null, function* () {
91
- var _a;
92
- try {
93
- yield this.s3.send(new import_client_s3.HeadObjectCommand({ Bucket: (_a = this.config) == null ? void 0 : _a.bucket, Key: path2 }));
94
- return true;
95
- } catch (error) {
96
- if (error.name === "NotFound") {
97
- return false;
98
- }
99
- throw error;
100
- }
101
- });
102
- }
103
- put(path2, content) {
104
- return __async(this, null, function* () {
105
- let body;
106
- if (typeof content === "string" || content instanceof Buffer) {
107
- body = content;
108
- } else if (typeof content === "object" && !(content instanceof import_stream.default)) {
109
- body = JSON.stringify(content);
110
- } else if (content instanceof import_stream.default) {
111
- body = content;
112
- } else {
113
- throw new Error("Unsupported content type");
114
- }
115
- yield this.s3.send(
116
- new import_client_s3.PutObjectCommand({
117
- Bucket: this.config.bucket,
118
- Key: path2,
119
- Body: body
120
- })
121
- );
90
+ s3;
91
+ async exists(path2) {
92
+ try {
93
+ await this.s3.send(new import_client_s3.HeadObjectCommand({ Bucket: this.config.bucket, Key: path2 }));
122
94
  return true;
123
- });
95
+ } catch (error) {
96
+ if (error.name === "NotFound") {
97
+ return false;
98
+ }
99
+ throw error;
100
+ }
124
101
  }
125
- getJson(path2) {
126
- return __async(this, null, function* () {
127
- const data = yield this.s3.send(
128
- new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
129
- );
130
- const body = yield this.streamToString(data.Body);
131
- return JSON.parse(body);
132
- });
102
+ async put(path2, content) {
103
+ let body;
104
+ if (typeof content === "string" || content instanceof Buffer) {
105
+ body = content;
106
+ } else if (typeof content === "object" && !(content instanceof import_stream.default)) {
107
+ body = JSON.stringify(content);
108
+ } else if (content instanceof import_stream.default) {
109
+ body = content;
110
+ } else {
111
+ throw new Error("Unsupported content type");
112
+ }
113
+ await this.s3.send(
114
+ new import_client_s3.PutObjectCommand({
115
+ Bucket: this.config.bucket,
116
+ Key: path2,
117
+ Body: body
118
+ })
119
+ );
120
+ return true;
133
121
  }
134
- getString(path2) {
135
- return __async(this, null, function* () {
136
- const data = yield this.s3.send(
137
- new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
138
- );
139
- return yield this.streamToString(data.Body);
140
- });
122
+ async getJson(path2) {
123
+ const data = await this.s3.send(
124
+ new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
125
+ );
126
+ const body = await this.streamToString(data.Body);
127
+ return JSON.parse(body);
141
128
  }
142
- delete(path2) {
143
- return __async(this, null, function* () {
144
- yield this.s3.send(new import_client_s3.DeleteObjectCommand({ Bucket: this.config.bucket, Key: path2 }));
145
- return true;
146
- });
129
+ async getString(path2) {
130
+ const data = await this.s3.send(
131
+ new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
132
+ );
133
+ return await this.streamToString(data.Body);
147
134
  }
148
- streamToString(stream) {
149
- return __async(this, null, function* () {
150
- return new Promise((resolve, reject) => {
151
- const chunks = [];
152
- stream.on("data", (chunk) => chunks.push(chunk));
153
- stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf-8")));
154
- stream.on("error", reject);
155
- });
156
- });
135
+ async delete(path2) {
136
+ await this.s3.send(new import_client_s3.DeleteObjectCommand({ Bucket: this.config.bucket, Key: path2 }));
137
+ return true;
157
138
  }
158
- getBuffer(path2) {
159
- return __async(this, null, function* () {
160
- const data = yield this.s3.send(
161
- new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
162
- );
139
+ async streamToString(stream) {
140
+ return new Promise((resolve, reject) => {
163
141
  const chunks = [];
164
- const stream = data.Body;
165
- return new Promise((resolve, reject) => {
166
- stream.on("data", (chunk) => chunks.push(chunk));
167
- stream.on("end", () => resolve(Buffer.concat(chunks)));
168
- stream.on("error", reject);
169
- });
142
+ stream.on("data", (chunk) => chunks.push(chunk));
143
+ stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf-8")));
144
+ stream.on("error", reject);
170
145
  });
171
146
  }
172
- getStream(path2) {
173
- return __async(this, null, function* () {
174
- const data = yield this.s3.send(
175
- new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
176
- );
177
- return data.Body;
147
+ async getBuffer(path2) {
148
+ const data = await this.s3.send(
149
+ new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
150
+ );
151
+ const chunks = [];
152
+ const stream = data.Body;
153
+ return new Promise((resolve, reject) => {
154
+ stream.on("data", (chunk) => chunks.push(chunk));
155
+ stream.on("end", () => resolve(Buffer.concat(chunks)));
156
+ stream.on("error", reject);
178
157
  });
179
158
  }
180
- metadata(path2) {
181
- return __async(this, null, function* () {
182
- const metadata = yield this.s3.send(
183
- new import_client_s3.HeadObjectCommand({ Bucket: this.config.bucket, Key: path2 })
184
- );
185
- return {
186
- size: metadata.ContentLength || 0,
187
- mimeType: metadata.ContentType || "unknown",
188
- lastModifiedDate: (metadata.LastModified || /* @__PURE__ */ new Date(0)).toISOString()
189
- };
190
- });
159
+ async getStream(path2) {
160
+ const data = await this.s3.send(
161
+ new import_client_s3.GetObjectCommand({ Bucket: this.config.bucket, Key: path2 })
162
+ );
163
+ return data.Body;
164
+ }
165
+ async metadata(path2) {
166
+ const metadata = await this.s3.send(
167
+ new import_client_s3.HeadObjectCommand({ Bucket: this.config.bucket, Key: path2 })
168
+ );
169
+ return {
170
+ size: metadata.ContentLength || 0,
171
+ mimeType: metadata.ContentType || "unknown",
172
+ lastModifiedDate: (metadata.LastModified || /* @__PURE__ */ new Date(0)).toISOString()
173
+ };
191
174
  }
192
175
  };
193
- __name(_AWSS3Storage, "AWSS3Storage");
194
- var AWSS3Storage = _AWSS3Storage;
195
176
 
196
- // src/LocalStorage.mts
177
+ // src/providers/LocalStorageProvider.mts
197
178
  var import_stream2 = __toESM(require("stream"), 1);
198
179
  var fs = __toESM(require("fs/promises"), 1);
199
180
  var import_fs = require("fs");
200
181
  var path = __toESM(require("path"), 1);
201
182
  var mime = __toESM(require("mime-types"), 1);
202
- var _LocalStorage = class _LocalStorage extends Storage {
183
+ var LocalStorageProvider = class {
203
184
  constructor(config) {
204
- super(config);
205
- if (!_LocalStorage.canHandle(config)) {
206
- throw new Error(`storage engine cannot handle this config.`);
207
- }
185
+ this.config = config;
208
186
  fs.mkdir(this.config.basePath, { recursive: true }).catch((error) => {
209
187
  throw error;
210
188
  });
211
189
  }
212
- metadata(path2) {
213
- return __async(this, null, function* () {
214
- const fullPath = this.getFullPath(path2);
215
- const stats = yield fs.stat(fullPath);
216
- return {
217
- size: stats.size,
218
- mimeType: mime.lookup(fullPath) || "unknown",
219
- lastModifiedDate: stats.mtime.toISOString()
220
- };
221
- });
190
+ static {
191
+ __name(this, "LocalStorageProvider");
222
192
  }
223
- static canHandle(config) {
224
- if (config.engine === "local") {
225
- return true;
226
- }
227
- return false;
193
+ async metadata(path2) {
194
+ const fullPath = this.getFullPath(path2);
195
+ const stats = await fs.stat(fullPath);
196
+ return {
197
+ size: stats.size,
198
+ mimeType: mime.lookup(fullPath) || "unknown",
199
+ lastModifiedDate: stats.mtime.toISOString()
200
+ };
228
201
  }
229
202
  getFullPath(filePath) {
230
203
  return path.join(this.config.basePath, filePath);
231
204
  }
232
- exists(path2) {
233
- return __async(this, null, function* () {
234
- try {
235
- yield fs.access(this.getFullPath(path2));
236
- return true;
237
- } catch (e) {
238
- return false;
239
- }
205
+ async exists(path2) {
206
+ try {
207
+ await fs.access(this.getFullPath(path2));
208
+ return true;
209
+ } catch {
210
+ return false;
211
+ }
212
+ }
213
+ async put(filepath, content) {
214
+ const fullPath = this.getFullPath(filepath);
215
+ const dir = path.dirname(fullPath);
216
+ await fs.mkdir(dir, { recursive: true });
217
+ if (typeof content === "string" || content instanceof Buffer) {
218
+ await fs.writeFile(fullPath, content);
219
+ } else if (typeof content === "object" && !(content instanceof import_stream2.default)) {
220
+ await fs.writeFile(fullPath, JSON.stringify(content, null, 2));
221
+ } else if (typeof content === "object" && content instanceof import_stream2.default) {
222
+ const writeStream = (0, import_fs.createWriteStream)(fullPath);
223
+ await new Promise((resolve, reject) => {
224
+ content.pipe(writeStream);
225
+ content.on("end", resolve);
226
+ content.on("error", reject);
227
+ });
228
+ } else {
229
+ throw new Error("Unsupported content type");
230
+ }
231
+ return true;
232
+ }
233
+ async getJson(path2) {
234
+ const fullPath = this.getFullPath(path2);
235
+ const content = await fs.readFile(fullPath, "utf-8");
236
+ return JSON.parse(content);
237
+ }
238
+ async getString(path2, encoding = "utf-8") {
239
+ const fullPath = this.getFullPath(path2);
240
+ return await fs.readFile(fullPath, encoding);
241
+ }
242
+ async getBuffer(path2) {
243
+ const fullPath = this.getFullPath(path2);
244
+ return await fs.readFile(fullPath);
245
+ }
246
+ async getStream(path2) {
247
+ const fullPath = this.getFullPath(path2);
248
+ return (0, import_fs.createReadStream)(fullPath);
249
+ }
250
+ async delete(path2) {
251
+ const fullPath = this.getFullPath(path2);
252
+ await fs.unlink(fullPath);
253
+ return true;
254
+ }
255
+ };
256
+
257
+ // src/providers/GCPStorageProvider.mts
258
+ var import_storage = require("@google-cloud/storage");
259
+ var import_stream3 = __toESM(require("stream"), 1);
260
+ var mime2 = __toESM(require("mime-types"), 1);
261
+ var GCPStorageProvider = class {
262
+ constructor(config) {
263
+ this.config = config;
264
+ const { bucket, ...gcpOptions } = config;
265
+ this.storage = new import_storage.Storage(gcpOptions);
266
+ }
267
+ static {
268
+ __name(this, "GCPStorageProvider");
269
+ }
270
+ storage;
271
+ async exists(path2) {
272
+ try {
273
+ const file = this.storage.bucket(this.config.bucket).file(path2);
274
+ const [exists] = await file.exists();
275
+ return exists;
276
+ } catch (error) {
277
+ return false;
278
+ }
279
+ }
280
+ async put(path2, content) {
281
+ const file = this.storage.bucket(this.config.bucket).file(path2);
282
+ let data;
283
+ if (typeof content === "string" || content instanceof Buffer) {
284
+ data = content;
285
+ } else if (typeof content === "object" && !(content instanceof import_stream3.default)) {
286
+ data = JSON.stringify(content);
287
+ } else if (content instanceof import_stream3.default) {
288
+ data = content;
289
+ } else {
290
+ throw new Error("Unsupported content type");
291
+ }
292
+ if (data instanceof import_stream3.default) {
293
+ await new Promise((resolve, reject) => {
294
+ data.pipe(file.createWriteStream()).on("finish", () => resolve()).on("error", reject);
295
+ });
296
+ } else {
297
+ await file.save(data);
298
+ }
299
+ return true;
300
+ }
301
+ async getJson(path2) {
302
+ const data = await this.getString(path2);
303
+ return JSON.parse(data);
304
+ }
305
+ async getString(path2) {
306
+ const file = this.storage.bucket(this.config.bucket).file(path2);
307
+ const [content] = await file.download();
308
+ return content.toString("utf-8");
309
+ }
310
+ async getBuffer(path2) {
311
+ const file = this.storage.bucket(this.config.bucket).file(path2);
312
+ const [content] = await file.download();
313
+ return content;
314
+ }
315
+ async getStream(path2) {
316
+ const file = this.storage.bucket(this.config.bucket).file(path2);
317
+ return file.createReadStream();
318
+ }
319
+ async delete(path2) {
320
+ const file = this.storage.bucket(this.config.bucket).file(path2);
321
+ await file.delete();
322
+ return true;
323
+ }
324
+ async metadata(path2) {
325
+ const file = this.storage.bucket(this.config.bucket).file(path2);
326
+ const [metadata] = await file.getMetadata();
327
+ return {
328
+ size: typeof metadata.size === "number" ? metadata.size : parseInt(metadata.size || "0", 10),
329
+ mimeType: metadata.contentType || mime2.lookup(path2) || "unknown",
330
+ lastModifiedDate: metadata.updated || (/* @__PURE__ */ new Date(0)).toISOString()
331
+ };
332
+ }
333
+ };
334
+
335
+ // src/providers/AzureBlobStorageProvider.mts
336
+ var import_storage_blob = require("@azure/storage-blob");
337
+ var import_stream4 = __toESM(require("stream"), 1);
338
+ var mime3 = __toESM(require("mime-types"), 1);
339
+ var AzureBlobStorageProvider = class {
340
+ constructor(config) {
341
+ this.config = config;
342
+ const { accountName, accountKey, sasToken } = config;
343
+ if (accountKey) {
344
+ const sharedKeyCredential = new import_storage_blob.StorageSharedKeyCredential(accountName, accountKey);
345
+ this.blobServiceClient = new import_storage_blob.BlobServiceClient(
346
+ `https://${accountName}.blob.core.windows.net`,
347
+ sharedKeyCredential
348
+ );
349
+ } else if (sasToken) {
350
+ this.blobServiceClient = new import_storage_blob.BlobServiceClient(
351
+ `https://${accountName}.blob.core.windows.net?${sasToken}`
352
+ );
353
+ } else {
354
+ throw new Error("Either accountKey or sasToken is required for Azure Blob Storage");
355
+ }
356
+ }
357
+ static {
358
+ __name(this, "AzureBlobStorageProvider");
359
+ }
360
+ blobServiceClient;
361
+ async exists(path2) {
362
+ try {
363
+ const containerClient = this.blobServiceClient.getContainerClient(this.config.containerName);
364
+ const blobClient = containerClient.getBlobClient(path2);
365
+ return await blobClient.exists();
366
+ } catch (error) {
367
+ return false;
368
+ }
369
+ }
370
+ async put(path2, content) {
371
+ const containerClient = this.blobServiceClient.getContainerClient(this.config.containerName);
372
+ const blockBlobClient = containerClient.getBlockBlobClient(path2);
373
+ let data;
374
+ if (typeof content === "string" || content instanceof Buffer) {
375
+ data = content;
376
+ } else if (typeof content === "object" && !(content instanceof import_stream4.default)) {
377
+ data = JSON.stringify(content);
378
+ } else if (content instanceof import_stream4.default) {
379
+ data = content;
380
+ } else {
381
+ throw new Error("Unsupported content type");
382
+ }
383
+ if (data instanceof import_stream4.default) {
384
+ await blockBlobClient.uploadStream(data);
385
+ } else {
386
+ const buffer = typeof data === "string" ? Buffer.from(data) : data;
387
+ await blockBlobClient.upload(buffer, buffer.length);
388
+ }
389
+ return true;
390
+ }
391
+ async getJson(path2) {
392
+ const data = await this.getString(path2);
393
+ return JSON.parse(data);
394
+ }
395
+ async getString(path2) {
396
+ const buffer = await this.getBuffer(path2);
397
+ return buffer.toString("utf-8");
398
+ }
399
+ async getBuffer(path2) {
400
+ const containerClient = this.blobServiceClient.getContainerClient(this.config.containerName);
401
+ const blobClient = containerClient.getBlobClient(path2);
402
+ const downloadResponse = await blobClient.download();
403
+ if (!downloadResponse.readableStreamBody) {
404
+ throw new Error("Failed to download blob");
405
+ }
406
+ return await this.streamToBuffer(downloadResponse.readableStreamBody);
407
+ }
408
+ async getStream(path2) {
409
+ const containerClient = this.blobServiceClient.getContainerClient(this.config.containerName);
410
+ const blobClient = containerClient.getBlobClient(path2);
411
+ const downloadResponse = await blobClient.download();
412
+ if (!downloadResponse.readableStreamBody) {
413
+ throw new Error("Failed to download blob");
414
+ }
415
+ return downloadResponse.readableStreamBody;
416
+ }
417
+ async delete(path2) {
418
+ const containerClient = this.blobServiceClient.getContainerClient(this.config.containerName);
419
+ const blobClient = containerClient.getBlobClient(path2);
420
+ await blobClient.delete();
421
+ return true;
422
+ }
423
+ async metadata(path2) {
424
+ const containerClient = this.blobServiceClient.getContainerClient(this.config.containerName);
425
+ const blobClient = containerClient.getBlobClient(path2);
426
+ const properties = await blobClient.getProperties();
427
+ return {
428
+ size: properties.contentLength || 0,
429
+ mimeType: properties.contentType || mime3.lookup(path2) || "unknown",
430
+ lastModifiedDate: properties.lastModified?.toISOString() || (/* @__PURE__ */ new Date(0)).toISOString()
431
+ };
432
+ }
433
+ async streamToBuffer(readableStream) {
434
+ return new Promise((resolve, reject) => {
435
+ const chunks = [];
436
+ readableStream.on("data", (chunk) => {
437
+ chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
438
+ });
439
+ readableStream.on("end", () => {
440
+ resolve(Buffer.concat(chunks));
441
+ });
442
+ readableStream.on("error", reject);
240
443
  });
241
444
  }
242
- put(filepath, content) {
243
- return __async(this, null, function* () {
244
- const fullPath = this.getFullPath(filepath);
245
- const dir = path.dirname(fullPath);
246
- yield fs.mkdir(dir, { recursive: true });
445
+ };
446
+
447
+ // src/providers/FTPStorageProvider.mts
448
+ var import_basic_ftp = require("basic-ftp");
449
+ var import_stream5 = __toESM(require("stream"), 1);
450
+ var mime4 = __toESM(require("mime-types"), 1);
451
+ var FTPStorageProvider = class {
452
+ constructor(config) {
453
+ this.config = config;
454
+ }
455
+ static {
456
+ __name(this, "FTPStorageProvider");
457
+ }
458
+ async getClient() {
459
+ const client = new import_basic_ftp.Client();
460
+ await client.access({
461
+ host: this.config.host,
462
+ port: this.config.port || 21,
463
+ user: this.config.user || "anonymous",
464
+ password: this.config.password || "",
465
+ secure: this.config.secure || false
466
+ });
467
+ return client;
468
+ }
469
+ async exists(path2) {
470
+ const client = await this.getClient();
471
+ try {
472
+ await client.size(path2);
473
+ return true;
474
+ } catch (error) {
475
+ return false;
476
+ } finally {
477
+ client.close();
478
+ }
479
+ }
480
+ async put(path2, content) {
481
+ const client = await this.getClient();
482
+ try {
483
+ let stream;
247
484
  if (typeof content === "string" || content instanceof Buffer) {
248
- yield fs.writeFile(fullPath, content);
249
- } else if (typeof content === "object" && !(content instanceof import_stream2.default)) {
250
- yield fs.writeFile(fullPath, JSON.stringify(content, null, 2));
251
- } else if (typeof content === "object" && content instanceof import_stream2.default) {
252
- const writeStream = (0, import_fs.createWriteStream)(fullPath);
253
- yield new Promise((resolve, reject) => {
254
- content.pipe(writeStream);
255
- content.on("end", resolve);
256
- content.on("error", reject);
257
- });
485
+ const readable = new import_stream5.Readable();
486
+ readable.push(typeof content === "string" ? Buffer.from(content) : content);
487
+ readable.push(null);
488
+ stream = readable;
489
+ } else if (typeof content === "object" && !(content instanceof import_stream5.default)) {
490
+ const readable = new import_stream5.Readable();
491
+ readable.push(Buffer.from(JSON.stringify(content)));
492
+ readable.push(null);
493
+ stream = readable;
494
+ } else if (content instanceof import_stream5.default) {
495
+ stream = content;
258
496
  } else {
259
497
  throw new Error("Unsupported content type");
260
498
  }
499
+ await client.uploadFrom(stream, path2);
261
500
  return true;
262
- });
501
+ } finally {
502
+ client.close();
503
+ }
263
504
  }
264
- getJson(path2) {
265
- return __async(this, null, function* () {
266
- const fullPath = this.getFullPath(path2);
267
- const content = yield fs.readFile(fullPath, "utf-8");
268
- return JSON.parse(content);
269
- });
505
+ async getJson(path2) {
506
+ const data = await this.getString(path2);
507
+ return JSON.parse(data);
270
508
  }
271
- getString(path2, encoding = "utf-8") {
272
- return __async(this, null, function* () {
273
- const fullPath = this.getFullPath(path2);
274
- return yield fs.readFile(fullPath, encoding);
275
- });
509
+ async getString(path2) {
510
+ const buffer = await this.getBuffer(path2);
511
+ return buffer.toString("utf-8");
276
512
  }
277
- getBuffer(path2) {
278
- return __async(this, null, function* () {
279
- const fullPath = this.getFullPath(path2);
280
- return yield fs.readFile(fullPath);
281
- });
513
+ async getBuffer(path2) {
514
+ const client = await this.getClient();
515
+ try {
516
+ const chunks = [];
517
+ const writable = new import_stream5.PassThrough();
518
+ writable.on("data", (chunk) => {
519
+ chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
520
+ });
521
+ await client.downloadTo(writable, path2);
522
+ return Buffer.concat(chunks);
523
+ } finally {
524
+ client.close();
525
+ }
282
526
  }
283
- getStream(path2) {
284
- return __async(this, null, function* () {
285
- const fullPath = this.getFullPath(path2);
286
- return (0, import_fs.createReadStream)(fullPath);
527
+ async getStream(path2) {
528
+ const client = await this.getClient();
529
+ const passThrough = new import_stream5.PassThrough();
530
+ client.downloadTo(passThrough, path2).then(() => client.close()).catch((error) => {
531
+ client.close();
532
+ passThrough.destroy(error);
287
533
  });
534
+ passThrough.on("close", () => {
535
+ try {
536
+ client.close();
537
+ } catch {
538
+ }
539
+ });
540
+ return passThrough;
288
541
  }
289
- delete(path2) {
290
- return __async(this, null, function* () {
291
- const fullPath = this.getFullPath(path2);
292
- yield fs.unlink(fullPath);
542
+ async delete(path2) {
543
+ const client = await this.getClient();
544
+ try {
545
+ await client.remove(path2);
293
546
  return true;
294
- });
547
+ } finally {
548
+ client.close();
549
+ }
550
+ }
551
+ async metadata(path2) {
552
+ const client = await this.getClient();
553
+ try {
554
+ const size = await client.size(path2);
555
+ const lastMod = await client.lastMod(path2);
556
+ return {
557
+ size,
558
+ mimeType: mime4.lookup(path2) || "unknown",
559
+ lastModifiedDate: lastMod?.toISOString() || (/* @__PURE__ */ new Date(0)).toISOString()
560
+ };
561
+ } finally {
562
+ client.close();
563
+ }
295
564
  }
296
565
  };
297
- __name(_LocalStorage, "LocalStorage");
298
- var LocalStorage = _LocalStorage;
299
566
 
300
- // src/StorageFactory.mts
301
- var _StorageFactory = class _StorageFactory {
302
- registerStorageEngine(engine) {
303
- _StorageFactory.storageEngines.push(engine);
304
- }
305
- static create(config) {
306
- for (const engine of _StorageFactory.storageEngines) {
307
- if (engine.canHandle(config)) {
308
- return new engine(config);
567
+ // src/providers/SFTPStorageProvider.mts
568
+ var import_ssh2_sftp_client = __toESM(require("ssh2-sftp-client"), 1);
569
+ var import_stream6 = __toESM(require("stream"), 1);
570
+ var mime5 = __toESM(require("mime-types"), 1);
571
+ var SFTPStorageProvider = class {
572
+ constructor(config) {
573
+ this.config = config;
574
+ }
575
+ static {
576
+ __name(this, "SFTPStorageProvider");
577
+ }
578
+ async getClient() {
579
+ const client = new import_ssh2_sftp_client.default();
580
+ await client.connect({
581
+ host: this.config.host,
582
+ port: this.config.port || 22,
583
+ username: this.config.username,
584
+ password: this.config.password,
585
+ privateKey: this.config.privateKey,
586
+ passphrase: this.config.passphrase
587
+ });
588
+ return client;
589
+ }
590
+ async exists(path2) {
591
+ const client = await this.getClient();
592
+ try {
593
+ const result = await client.exists(path2);
594
+ return result !== false;
595
+ } catch (error) {
596
+ return false;
597
+ } finally {
598
+ await client.end();
599
+ }
600
+ }
601
+ async put(path2, content) {
602
+ const client = await this.getClient();
603
+ try {
604
+ let data;
605
+ if (typeof content === "string") {
606
+ data = content;
607
+ } else if (content instanceof Buffer) {
608
+ data = content;
609
+ } else if (typeof content === "object" && !(content instanceof import_stream6.default)) {
610
+ data = Buffer.from(JSON.stringify(content));
611
+ } else if (content instanceof import_stream6.default) {
612
+ data = content;
613
+ } else {
614
+ throw new Error("Unsupported content type");
309
615
  }
616
+ await client.put(data, path2);
617
+ return true;
618
+ } finally {
619
+ await client.end();
620
+ }
621
+ }
622
+ async getJson(path2) {
623
+ const data = await this.getString(path2);
624
+ return JSON.parse(data);
625
+ }
626
+ async getString(path2) {
627
+ const buffer = await this.getBuffer(path2);
628
+ return buffer.toString("utf-8");
629
+ }
630
+ async getBuffer(path2) {
631
+ const client = await this.getClient();
632
+ try {
633
+ const buffer = await client.get(path2);
634
+ return buffer;
635
+ } finally {
636
+ await client.end();
310
637
  }
311
- throw new Error("No matchin storage engine found");
638
+ }
639
+ async getStream(path2) {
640
+ const client = await this.getClient();
641
+ const passThrough = new import_stream6.PassThrough();
642
+ client.get(path2).then((data) => {
643
+ if (data instanceof Buffer) {
644
+ const readable = new import_stream6.Readable();
645
+ readable.push(data);
646
+ readable.push(null);
647
+ readable.pipe(passThrough);
648
+ } else if (data instanceof import_stream6.default) {
649
+ data.pipe(passThrough);
650
+ }
651
+ return client.end();
652
+ }).catch((error) => {
653
+ client.end().catch(() => {
654
+ });
655
+ passThrough.destroy(error);
656
+ });
657
+ passThrough.on("close", () => {
658
+ client.end().catch(() => {
659
+ });
660
+ });
661
+ return passThrough;
662
+ }
663
+ async delete(path2) {
664
+ const client = await this.getClient();
665
+ try {
666
+ await client.delete(path2);
667
+ return true;
668
+ } finally {
669
+ await client.end();
670
+ }
671
+ }
672
+ async metadata(path2) {
673
+ const client = await this.getClient();
674
+ try {
675
+ const stats = await client.stat(path2);
676
+ return {
677
+ size: stats.size || 0,
678
+ mimeType: mime5.lookup(path2) || "unknown",
679
+ lastModifiedDate: new Date((stats.modifyTime || 0) * 1e3).toISOString()
680
+ };
681
+ } finally {
682
+ await client.end();
683
+ }
684
+ }
685
+ };
686
+
687
+ // src/StorageProviderFactory.mts
688
+ var import_neko_helper = require("@devbro/neko-helper");
689
+ var StorageProviderFactory = class _StorageProviderFactory {
690
+ static {
691
+ __name(this, "StorageProviderFactory");
692
+ }
693
+ static instance = new import_neko_helper.FlexibleFactory();
694
+ static register(key, factory) {
695
+ _StorageProviderFactory.instance.register(key, factory);
696
+ }
697
+ static create(key, ...args) {
698
+ return _StorageProviderFactory.instance.create(key, ...args);
312
699
  }
313
700
  };
314
- __name(_StorageFactory, "StorageFactory");
315
- _StorageFactory.storageEngines = [LocalStorage, AWSS3Storage];
316
- var StorageFactory = _StorageFactory;
317
701
  // Annotate the CommonJS export names for ESM import in node:
318
702
  0 && (module.exports = {
319
- AWSS3Storage,
320
- LocalStorage,
703
+ AWSS3StorageProvider,
704
+ AzureBlobStorageProvider,
705
+ FTPStorageProvider,
706
+ GCPStorageProvider,
707
+ LocalStorageProvider,
708
+ SFTPStorageProvider,
321
709
  Storage,
322
- StorageFactory
710
+ StorageProviderFactory
323
711
  });
324
712
  //# sourceMappingURL=index.js.map