@ido_kawaz/storage-client 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +95 -0
- package/dist/index.js +8 -0
- package/dist/index.js.map +1 -0
- package/dist/storageClient.js +54 -0
- package/dist/storageClient.js.map +1 -0
- package/dist/types.js +11 -0
- package/dist/types.js.map +1 -0
- package/package.json +34 -0
package/README.md
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# @kawaz/storage-client
|
|
2
|
+
|
|
3
|
+
AWS S3 storage client library for Kawaz services with support for multipart uploads and bucket management.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @kawaz/storage-client
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
### Initialize StorageClient
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import { StorageClient } from '@kawaz/storage-client';
|
|
17
|
+
import { Readable } from 'stream';
|
|
18
|
+
|
|
19
|
+
const client = new StorageClient({
|
|
20
|
+
region: 'us-east-1',
|
|
21
|
+
credentials: {
|
|
22
|
+
accessKeyId: 'YOUR_ACCESS_KEY',
|
|
23
|
+
secretAccessKey: 'YOUR_SECRET_KEY'
|
|
24
|
+
},
|
|
25
|
+
partSize: 5 * 1024 * 1024, // 5MB (optional, default varies)
|
|
26
|
+
maxConcurrency: 4 // Maximum concurrent parts (optional)
|
|
27
|
+
});
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
### Ensure Bucket Exists
|
|
31
|
+
|
|
32
|
+
Checks if a bucket exists, creates it if not.
|
|
33
|
+
|
|
34
|
+
```typescript
|
|
35
|
+
await client.ensureBucket('my-bucket');
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
### Upload Objects
|
|
39
|
+
|
|
40
|
+
Upload files or streams to S3 with optional automatic bucket creation:
|
|
41
|
+
|
|
42
|
+
```typescript
|
|
43
|
+
const fileStream = fs.createReadStream('path/to/file');
|
|
44
|
+
|
|
45
|
+
await client.uploadObject('my-bucket', 'path/to/object', fileStream, {
|
|
46
|
+
ensureBucket: true // Automatically create bucket if it doesn't exist
|
|
47
|
+
});
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
The upload progress is logged to console as it progresses.
|
|
51
|
+
|
|
52
|
+
### Delete Bucket
|
|
53
|
+
|
|
54
|
+
Remove an empty bucket:
|
|
55
|
+
|
|
56
|
+
```typescript
|
|
57
|
+
await client.deleteBucket('my-bucket');
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Configuration
|
|
61
|
+
|
|
62
|
+
`StorageClientConfig` extends AWS S3ClientConfig with additional options:
|
|
63
|
+
|
|
64
|
+
- `region` (required): AWS region (e.g., 'us-east-1')
|
|
65
|
+
- `credentials` (optional): AWS credentials object with `accessKeyId` and `secretAccessKey`
|
|
66
|
+
- `endpoint` (optional): Custom S3-compatible endpoint (e.g., MinIO, LocalStack)
|
|
67
|
+
- `partSize` (optional): Size of each part in multipart uploads (bytes)
|
|
68
|
+
- `maxConcurrency` (optional): Maximum concurrent parts during upload
|
|
69
|
+
|
|
70
|
+
## Error Handling
|
|
71
|
+
|
|
72
|
+
The library throws `StorageError` exceptions with operation details:
|
|
73
|
+
|
|
74
|
+
```typescript
|
|
75
|
+
import { StorageError } from '@kawaz/storage-client';
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
await client.uploadObject('bucket', 'key', stream);
|
|
79
|
+
} catch (error) {
|
|
80
|
+
if (error instanceof StorageError) {
|
|
81
|
+
console.error('Storage operation failed:', error.message);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
## Exports
|
|
87
|
+
|
|
88
|
+
- `StorageClient`: Main client class
|
|
89
|
+
- `StorageClientConfig`: Configuration interface
|
|
90
|
+
- `StorageError`: Error class
|
|
91
|
+
- `UploadObjectOptions`: Upload options interface
|
|
92
|
+
|
|
93
|
+
## License
|
|
94
|
+
|
|
95
|
+
MIT
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StorageError = exports.StorageClient = void 0;
|
|
4
|
+
var storageClient_1 = require("./storageClient");
|
|
5
|
+
Object.defineProperty(exports, "StorageClient", { enumerable: true, get: function () { return storageClient_1.StorageClient; } });
|
|
6
|
+
var types_1 = require("./types");
|
|
7
|
+
Object.defineProperty(exports, "StorageError", { enumerable: true, get: function () { return types_1.StorageError; } });
|
|
8
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,iDAAgD;AAAvC,8GAAA,aAAa,OAAA;AACtB,iCAAiF;AAAnD,qGAAA,YAAY,OAAA"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StorageClient = void 0;
|
|
4
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
5
|
+
const lib_storage_1 = require("@aws-sdk/lib-storage");
|
|
6
|
+
const types_1 = require("./types");
|
|
7
|
+
class StorageClient {
|
|
8
|
+
constructor(config) {
|
|
9
|
+
this.config = config;
|
|
10
|
+
this.ensureBucket = async (bucketName) => {
|
|
11
|
+
await this.client.send(new client_s3_1.HeadBucketCommand({ Bucket: bucketName })).catch(async (error) => {
|
|
12
|
+
if (error.name === 'NotFound') {
|
|
13
|
+
await this.client.send(new client_s3_1.CreateBucketCommand({ Bucket: bucketName }));
|
|
14
|
+
}
|
|
15
|
+
else {
|
|
16
|
+
throw new types_1.StorageError("ensureBucket", error, { bucketName });
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
};
|
|
20
|
+
this.deleteBucket = async (bucketName) => {
|
|
21
|
+
await this.client.send(new client_s3_1.DeleteBucketCommand({ Bucket: bucketName })).catch((error) => {
|
|
22
|
+
if (error.name !== 'NoSuchBucket') {
|
|
23
|
+
throw new types_1.StorageError("deleteBucket", error, { bucketName });
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
};
|
|
27
|
+
this.uploadObject = async (bucketName, objectKey, objectData, options) => {
|
|
28
|
+
if (options?.ensureBucket) {
|
|
29
|
+
await this.ensureBucket(bucketName);
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
const upload = new lib_storage_1.Upload({
|
|
33
|
+
client: this.client,
|
|
34
|
+
params: { Bucket: bucketName, Key: objectKey, Body: objectData },
|
|
35
|
+
queueSize: this.config.maxConcurrency,
|
|
36
|
+
partSize: this.config.partSize
|
|
37
|
+
});
|
|
38
|
+
upload.on("httpUploadProgress", (progress) => {
|
|
39
|
+
console.log(`Upload progress for ${objectKey}: ${progress.loaded / progress.total * 100}%`);
|
|
40
|
+
});
|
|
41
|
+
await upload.done();
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
if (error instanceof client_s3_1.S3ServiceException) {
|
|
45
|
+
throw new types_1.StorageError("uploadObject", error, { bucketName, objectKey });
|
|
46
|
+
}
|
|
47
|
+
throw error;
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
this.client = new client_s3_1.S3Client(config);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
exports.StorageClient = StorageClient;
|
|
54
|
+
//# sourceMappingURL=storageClient.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"storageClient.js","sourceRoot":"","sources":["../src/storageClient.ts"],"names":[],"mappings":";;;AAAA,kDAA+H;AAC/H,sDAA8C;AAE9C,mCAAiF;AAEjF,MAAa,aAAa;IAEtB,YAA6B,MAA2B;QAA3B,WAAM,GAAN,MAAM,CAAqB;QAGxD,iBAAY,GAAG,KAAK,EAAE,UAAkB,EAAE,EAAE;YACxC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,6BAAiB,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,KAAyB,EAAE,EAAE;gBAC5G,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBAC5B,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,+BAAmB,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC;gBAC5E,CAAC;qBAAM,CAAC;oBACJ,MAAM,IAAI,oBAAY,CAAC,cAAc,EAAE,KAAK,EAAE,EAAE,UAAU,EAAE,CAAC,CAAC;gBAClE,CAAC;YACL,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;QACF,iBAAY,GAAG,KAAK,EAAE,UAAkB,EAAE,EAAE;YACxC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,+BAAmB,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,KAAyB,EAAE,EAAE;gBACxG,IAAI,KAAK,CAAC,IAAI,KAAK,cAAc,EAAE,CAAC;oBAChC,MAAM,IAAI,oBAAY,CAAC,cAAc,EAAE,KAAK,EAAE,EAAE,UAAU,EAAE,CAAC,CAAC;gBAClE,CAAC;YACL,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;QAEF,iBAAY,GAAG,KAAK,EAAE,UAAkB,EAAE,SAAiB,EAAE,UAAoB,EAAE,OAA6B,EAAE,EAAE;YAChH,IAAI,OAAO,EAAE,YAAY,EAAE,CAAC;gBACxB,MAAM,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC;YACxC,CAAC;YACD,IAAI,CAAC;gBACD,MAAM,MAAM,GAAG,IAAI,oBAAM,CAAC;oBACtB,MAAM,EAAE,IAAI,CAAC,MAAM;oBACnB,MAAM,EAAE,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE;oBAChE,SAAS,EAAE,IAAI,CAAC,MAAM,CAAC,cAAc;oBACrC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ;iBACjC,CAAC,CAAA;gBACF,MAAM,CAAC,EAAE,CAAC,oBAAoB,EAAE,CAAC,QAAQ,EAAE,EAAE;oBACzC,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,KAAK,QAAQ,CAAC,MAAO,GAAG,QAAQ,CAAC,KAAM,GAAG,GAAG,GAAG,CAAC,CAAC;gBAClG,CAAC,CAAC,CAAC;gBACH,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;YACxB,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACb,IAAI,KAAK,YAAY,8BAAkB,EAAE,CAAC;oBACtC,MAAM,IAAI,oBAAY,CAAC,cAAc,EAAE,KAAK,EAAE,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBACD,MAAM,KAAK,CAAC;YAChB,CAAC;QACL,CAAC,CAAA;QAxCG,IAAI,CAAC,MAAM,GAAG,IAAI,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACvC,CAAC;CAwCJ;AA5CD,sCA4CC"}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StorageError = void 0;
|
|
4
|
+
class StorageError extends Error {
|
|
5
|
+
constructor(operation, error, details) {
|
|
6
|
+
const message = `Storage error: ${JSON.stringify({ operation, error: error.name, ...details })}`;
|
|
7
|
+
super(message);
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
exports.StorageError = StorageError;
|
|
11
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":";;;AAOA,MAAa,YAAa,SAAQ,KAAK;IACnC,YAAY,SAAiB,EAAE,KAAyB,EAAE,OAAW;QACjE,MAAM,OAAO,GAAG,kBAAkB,IAAI,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,EAAE,CAAC;QACjG,KAAK,CAAC,OAAO,CAAC,CAAC;IACnB,CAAC;CACJ;AALD,oCAKC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ido_kawaz/storage-client",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Storage client library for Kawaz Plus services",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"files": [
|
|
8
|
+
"dist"
|
|
9
|
+
],
|
|
10
|
+
"scripts": {
|
|
11
|
+
"build": "npm run clean && tsc",
|
|
12
|
+
"build:watch": "tsc --watch",
|
|
13
|
+
"build:advanced": "npm run clean:advanced && npm i && tsc",
|
|
14
|
+
"clean": "rimraf dist",
|
|
15
|
+
"clean:advanced": "rimraf dist node_modules package-lock.json",
|
|
16
|
+
"package": "npm run build:advanced && npm publish --access public"
|
|
17
|
+
},
|
|
18
|
+
"keywords": [
|
|
19
|
+
"storage",
|
|
20
|
+
"s3",
|
|
21
|
+
"aws"
|
|
22
|
+
],
|
|
23
|
+
"author": "",
|
|
24
|
+
"license": "MIT",
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"@aws-sdk/client-s3": "^3.985.0",
|
|
27
|
+
"@aws-sdk/lib-storage": "^3.985.0"
|
|
28
|
+
},
|
|
29
|
+
"devDependencies": {
|
|
30
|
+
"@types/node": "^25.2.3",
|
|
31
|
+
"rimraf": "^6.0.0",
|
|
32
|
+
"typescript": "^5.9.3"
|
|
33
|
+
}
|
|
34
|
+
}
|