tawa-storage 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -0
- package/dist/index.d.mts +160 -0
- package/dist/index.d.ts +160 -0
- package/dist/index.js +394 -0
- package/dist/index.mjs +364 -0
- package/package.json +53 -0
package/README.md
ADDED
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { Readable } from 'node:stream';
|
|
2
|
+
|
|
3
|
+
/** Configuration for creating a StorageClient */
|
|
4
|
+
interface StorageClientConfig {
|
|
5
|
+
/** MinIO/S3 host (e.g., "64.23.181.20") */
|
|
6
|
+
readonly host: string;
|
|
7
|
+
/** MinIO/S3 port (default: 9000) */
|
|
8
|
+
readonly port?: number;
|
|
9
|
+
/** S3 access key ID */
|
|
10
|
+
readonly accessKey: string;
|
|
11
|
+
/** S3 secret access key */
|
|
12
|
+
readonly secretKey: string;
|
|
13
|
+
/** S3 region (default: "us-east-1") */
|
|
14
|
+
readonly region?: string;
|
|
15
|
+
/** Target bucket name */
|
|
16
|
+
readonly bucket: string;
|
|
17
|
+
/** Use SSL for connection (default: false) */
|
|
18
|
+
readonly useSSL?: boolean;
|
|
19
|
+
}
|
|
20
|
+
/** Options for upload operations */
|
|
21
|
+
interface UploadOptions {
|
|
22
|
+
/** Content-Type header. Auto-detected from filename if omitted. */
|
|
23
|
+
readonly contentType?: string;
|
|
24
|
+
/** Custom metadata key-value pairs */
|
|
25
|
+
readonly metadata?: Readonly<Record<string, string>>;
|
|
26
|
+
}
|
|
27
|
+
/** Options for presigned URL generation */
|
|
28
|
+
interface SignedUrlOptions {
|
|
29
|
+
/** Expiry time in seconds (default: 3600 = 1 hour) */
|
|
30
|
+
readonly expiresIn?: number;
|
|
31
|
+
}
|
|
32
|
+
/** Result of a stat() call */
|
|
33
|
+
interface ObjectStat {
|
|
34
|
+
/** Size in bytes */
|
|
35
|
+
readonly size: number;
|
|
36
|
+
/** ETag hash */
|
|
37
|
+
readonly etag: string;
|
|
38
|
+
/** Last modification timestamp */
|
|
39
|
+
readonly lastModified: Date;
|
|
40
|
+
/** Content-Type */
|
|
41
|
+
readonly contentType: string;
|
|
42
|
+
}
|
|
43
|
+
/** Item returned from list() */
|
|
44
|
+
interface ListItem {
|
|
45
|
+
/** Object key (path) */
|
|
46
|
+
readonly key: string;
|
|
47
|
+
/** Size in bytes */
|
|
48
|
+
readonly size: number;
|
|
49
|
+
/** ETag hash */
|
|
50
|
+
readonly etag: string;
|
|
51
|
+
/** Last modification timestamp */
|
|
52
|
+
readonly lastModified: Date;
|
|
53
|
+
}
|
|
54
|
+
/** Error codes for StorageError */
|
|
55
|
+
type StorageErrorCode = 'NOT_FOUND' | 'ACCESS_DENIED' | 'BUCKET_NOT_FOUND' | 'INVALID_CONFIG' | 'NETWORK_ERROR' | 'UPLOAD_FAILED' | 'DELETE_FAILED' | 'PRESIGN_FAILED' | 'UNKNOWN';
|
|
56
|
+
/** Accepted data types for upload */
|
|
57
|
+
type UploadData = Buffer | string | Readable;
|
|
58
|
+
|
|
59
|
+
declare class StorageClient {
|
|
60
|
+
private readonly client;
|
|
61
|
+
private readonly bucket;
|
|
62
|
+
constructor(config: StorageClientConfig);
|
|
63
|
+
/**
|
|
64
|
+
* Create a StorageClient from environment variables.
|
|
65
|
+
*
|
|
66
|
+
* Reads: S3_HOST, S3_PORT, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY,
|
|
67
|
+
* S3_REGION, S3_BUCKET
|
|
68
|
+
*/
|
|
69
|
+
static fromEnv(): StorageClient;
|
|
70
|
+
/**
|
|
71
|
+
* Create a StorageClient for a named bucket.
|
|
72
|
+
*
|
|
73
|
+
* Reads S3_{NAME}_BUCKET for the bucket name, shares
|
|
74
|
+
* connection config (S3_HOST, etc.) from the environment.
|
|
75
|
+
*/
|
|
76
|
+
static fromEnvNamed(name: string): StorageClient;
|
|
77
|
+
/**
|
|
78
|
+
* Upload data to the bucket.
|
|
79
|
+
* Accepts Buffer, string, or Readable stream.
|
|
80
|
+
* Content-type is auto-detected from the key if not provided.
|
|
81
|
+
*/
|
|
82
|
+
upload(key: string, data: UploadData, options?: UploadOptions): Promise<void>;
|
|
83
|
+
/**
|
|
84
|
+
* Download an object and return its contents as a Buffer.
|
|
85
|
+
* For large files, use downloadStream() instead.
|
|
86
|
+
*/
|
|
87
|
+
download(key: string): Promise<Buffer>;
|
|
88
|
+
/**
|
|
89
|
+
* Download an object as a Readable stream.
|
|
90
|
+
* Use this for large files to avoid loading everything into memory.
|
|
91
|
+
*/
|
|
92
|
+
downloadStream(key: string): Promise<Readable>;
|
|
93
|
+
/**
|
|
94
|
+
* Delete one or more objects.
|
|
95
|
+
* Accepts a single key or an array of keys for bulk delete.
|
|
96
|
+
*/
|
|
97
|
+
delete(keys: string | readonly string[]): Promise<void>;
|
|
98
|
+
/**
|
|
99
|
+
* List objects with an optional prefix.
|
|
100
|
+
* Returns a complete array (not a stream) for simplicity.
|
|
101
|
+
*/
|
|
102
|
+
list(prefix?: string): Promise<readonly ListItem[]>;
|
|
103
|
+
/**
|
|
104
|
+
* Check if an object exists.
|
|
105
|
+
*/
|
|
106
|
+
exists(key: string): Promise<boolean>;
|
|
107
|
+
/**
|
|
108
|
+
* Get object metadata (size, etag, lastModified, contentType).
|
|
109
|
+
*/
|
|
110
|
+
stat(key: string): Promise<ObjectStat>;
|
|
111
|
+
/**
|
|
112
|
+
* Generate a presigned download URL.
|
|
113
|
+
*/
|
|
114
|
+
getSignedUrl(key: string, options?: SignedUrlOptions): Promise<string>;
|
|
115
|
+
/**
|
|
116
|
+
* Generate a presigned upload URL (PUT).
|
|
117
|
+
*/
|
|
118
|
+
getUploadUrl(key: string, options?: SignedUrlOptions): Promise<string>;
|
|
119
|
+
private streamToBuffer;
|
|
120
|
+
private isNotFoundError;
|
|
121
|
+
private wrapError;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
declare class StorageError extends Error {
|
|
125
|
+
readonly code: StorageErrorCode;
|
|
126
|
+
readonly statusCode: number;
|
|
127
|
+
readonly cause?: unknown | undefined;
|
|
128
|
+
constructor(message: string, code: StorageErrorCode, statusCode?: number, cause?: unknown | undefined);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Detect content type from a file path or key name.
|
|
133
|
+
* Returns 'application/octet-stream' for unknown extensions.
|
|
134
|
+
*/
|
|
135
|
+
declare function detectContentType(key: string): string;
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Create a StorageClient.
|
|
139
|
+
*
|
|
140
|
+
* @example Single bucket (reads S3_BUCKET)
|
|
141
|
+
* ```typescript
|
|
142
|
+
* const storage = createStorage()
|
|
143
|
+
* await storage.upload('file.txt', Buffer.from('hello'))
|
|
144
|
+
* ```
|
|
145
|
+
*
|
|
146
|
+
* @example Named bucket (reads S3_UPLOADS_BUCKET)
|
|
147
|
+
* ```typescript
|
|
148
|
+
* const uploads = createStorage('uploads')
|
|
149
|
+
* ```
|
|
150
|
+
*
|
|
151
|
+
* @example Explicit config (for testing)
|
|
152
|
+
* ```typescript
|
|
153
|
+
* const storage = createStorage({ bucket: 'test', host: '127.0.0.1', accessKey: '...', secretKey: '...' })
|
|
154
|
+
* ```
|
|
155
|
+
*/
|
|
156
|
+
declare function createStorage(input?: string | (Partial<StorageClientConfig> & {
|
|
157
|
+
readonly bucket: string;
|
|
158
|
+
})): StorageClient;
|
|
159
|
+
|
|
160
|
+
export { type ListItem, type ObjectStat, type SignedUrlOptions, StorageClient, type StorageClientConfig, StorageError, type StorageErrorCode, type UploadData, type UploadOptions, createStorage, detectContentType };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { Readable } from 'node:stream';
|
|
2
|
+
|
|
3
|
+
/** Configuration for creating a StorageClient */
|
|
4
|
+
interface StorageClientConfig {
|
|
5
|
+
/** MinIO/S3 host (e.g., "64.23.181.20") */
|
|
6
|
+
readonly host: string;
|
|
7
|
+
/** MinIO/S3 port (default: 9000) */
|
|
8
|
+
readonly port?: number;
|
|
9
|
+
/** S3 access key ID */
|
|
10
|
+
readonly accessKey: string;
|
|
11
|
+
/** S3 secret access key */
|
|
12
|
+
readonly secretKey: string;
|
|
13
|
+
/** S3 region (default: "us-east-1") */
|
|
14
|
+
readonly region?: string;
|
|
15
|
+
/** Target bucket name */
|
|
16
|
+
readonly bucket: string;
|
|
17
|
+
/** Use SSL for connection (default: false) */
|
|
18
|
+
readonly useSSL?: boolean;
|
|
19
|
+
}
|
|
20
|
+
/** Options for upload operations */
|
|
21
|
+
interface UploadOptions {
|
|
22
|
+
/** Content-Type header. Auto-detected from filename if omitted. */
|
|
23
|
+
readonly contentType?: string;
|
|
24
|
+
/** Custom metadata key-value pairs */
|
|
25
|
+
readonly metadata?: Readonly<Record<string, string>>;
|
|
26
|
+
}
|
|
27
|
+
/** Options for presigned URL generation */
|
|
28
|
+
interface SignedUrlOptions {
|
|
29
|
+
/** Expiry time in seconds (default: 3600 = 1 hour) */
|
|
30
|
+
readonly expiresIn?: number;
|
|
31
|
+
}
|
|
32
|
+
/** Result of a stat() call */
|
|
33
|
+
interface ObjectStat {
|
|
34
|
+
/** Size in bytes */
|
|
35
|
+
readonly size: number;
|
|
36
|
+
/** ETag hash */
|
|
37
|
+
readonly etag: string;
|
|
38
|
+
/** Last modification timestamp */
|
|
39
|
+
readonly lastModified: Date;
|
|
40
|
+
/** Content-Type */
|
|
41
|
+
readonly contentType: string;
|
|
42
|
+
}
|
|
43
|
+
/** Item returned from list() */
|
|
44
|
+
interface ListItem {
|
|
45
|
+
/** Object key (path) */
|
|
46
|
+
readonly key: string;
|
|
47
|
+
/** Size in bytes */
|
|
48
|
+
readonly size: number;
|
|
49
|
+
/** ETag hash */
|
|
50
|
+
readonly etag: string;
|
|
51
|
+
/** Last modification timestamp */
|
|
52
|
+
readonly lastModified: Date;
|
|
53
|
+
}
|
|
54
|
+
/** Error codes for StorageError */
|
|
55
|
+
type StorageErrorCode = 'NOT_FOUND' | 'ACCESS_DENIED' | 'BUCKET_NOT_FOUND' | 'INVALID_CONFIG' | 'NETWORK_ERROR' | 'UPLOAD_FAILED' | 'DELETE_FAILED' | 'PRESIGN_FAILED' | 'UNKNOWN';
|
|
56
|
+
/** Accepted data types for upload */
|
|
57
|
+
type UploadData = Buffer | string | Readable;
|
|
58
|
+
|
|
59
|
+
declare class StorageClient {
|
|
60
|
+
private readonly client;
|
|
61
|
+
private readonly bucket;
|
|
62
|
+
constructor(config: StorageClientConfig);
|
|
63
|
+
/**
|
|
64
|
+
* Create a StorageClient from environment variables.
|
|
65
|
+
*
|
|
66
|
+
* Reads: S3_HOST, S3_PORT, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY,
|
|
67
|
+
* S3_REGION, S3_BUCKET
|
|
68
|
+
*/
|
|
69
|
+
static fromEnv(): StorageClient;
|
|
70
|
+
/**
|
|
71
|
+
* Create a StorageClient for a named bucket.
|
|
72
|
+
*
|
|
73
|
+
* Reads S3_{NAME}_BUCKET for the bucket name, shares
|
|
74
|
+
* connection config (S3_HOST, etc.) from the environment.
|
|
75
|
+
*/
|
|
76
|
+
static fromEnvNamed(name: string): StorageClient;
|
|
77
|
+
/**
|
|
78
|
+
* Upload data to the bucket.
|
|
79
|
+
* Accepts Buffer, string, or Readable stream.
|
|
80
|
+
* Content-type is auto-detected from the key if not provided.
|
|
81
|
+
*/
|
|
82
|
+
upload(key: string, data: UploadData, options?: UploadOptions): Promise<void>;
|
|
83
|
+
/**
|
|
84
|
+
* Download an object and return its contents as a Buffer.
|
|
85
|
+
* For large files, use downloadStream() instead.
|
|
86
|
+
*/
|
|
87
|
+
download(key: string): Promise<Buffer>;
|
|
88
|
+
/**
|
|
89
|
+
* Download an object as a Readable stream.
|
|
90
|
+
* Use this for large files to avoid loading everything into memory.
|
|
91
|
+
*/
|
|
92
|
+
downloadStream(key: string): Promise<Readable>;
|
|
93
|
+
/**
|
|
94
|
+
* Delete one or more objects.
|
|
95
|
+
* Accepts a single key or an array of keys for bulk delete.
|
|
96
|
+
*/
|
|
97
|
+
delete(keys: string | readonly string[]): Promise<void>;
|
|
98
|
+
/**
|
|
99
|
+
* List objects with an optional prefix.
|
|
100
|
+
* Returns a complete array (not a stream) for simplicity.
|
|
101
|
+
*/
|
|
102
|
+
list(prefix?: string): Promise<readonly ListItem[]>;
|
|
103
|
+
/**
|
|
104
|
+
* Check if an object exists.
|
|
105
|
+
*/
|
|
106
|
+
exists(key: string): Promise<boolean>;
|
|
107
|
+
/**
|
|
108
|
+
* Get object metadata (size, etag, lastModified, contentType).
|
|
109
|
+
*/
|
|
110
|
+
stat(key: string): Promise<ObjectStat>;
|
|
111
|
+
/**
|
|
112
|
+
* Generate a presigned download URL.
|
|
113
|
+
*/
|
|
114
|
+
getSignedUrl(key: string, options?: SignedUrlOptions): Promise<string>;
|
|
115
|
+
/**
|
|
116
|
+
* Generate a presigned upload URL (PUT).
|
|
117
|
+
*/
|
|
118
|
+
getUploadUrl(key: string, options?: SignedUrlOptions): Promise<string>;
|
|
119
|
+
private streamToBuffer;
|
|
120
|
+
private isNotFoundError;
|
|
121
|
+
private wrapError;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
declare class StorageError extends Error {
|
|
125
|
+
readonly code: StorageErrorCode;
|
|
126
|
+
readonly statusCode: number;
|
|
127
|
+
readonly cause?: unknown | undefined;
|
|
128
|
+
constructor(message: string, code: StorageErrorCode, statusCode?: number, cause?: unknown | undefined);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Detect content type from a file path or key name.
|
|
133
|
+
* Returns 'application/octet-stream' for unknown extensions.
|
|
134
|
+
*/
|
|
135
|
+
declare function detectContentType(key: string): string;
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Create a StorageClient.
|
|
139
|
+
*
|
|
140
|
+
* @example Single bucket (reads S3_BUCKET)
|
|
141
|
+
* ```typescript
|
|
142
|
+
* const storage = createStorage()
|
|
143
|
+
* await storage.upload('file.txt', Buffer.from('hello'))
|
|
144
|
+
* ```
|
|
145
|
+
*
|
|
146
|
+
* @example Named bucket (reads S3_UPLOADS_BUCKET)
|
|
147
|
+
* ```typescript
|
|
148
|
+
* const uploads = createStorage('uploads')
|
|
149
|
+
* ```
|
|
150
|
+
*
|
|
151
|
+
* @example Explicit config (for testing)
|
|
152
|
+
* ```typescript
|
|
153
|
+
* const storage = createStorage({ bucket: 'test', host: '127.0.0.1', accessKey: '...', secretKey: '...' })
|
|
154
|
+
* ```
|
|
155
|
+
*/
|
|
156
|
+
declare function createStorage(input?: string | (Partial<StorageClientConfig> & {
|
|
157
|
+
readonly bucket: string;
|
|
158
|
+
})): StorageClient;
|
|
159
|
+
|
|
160
|
+
export { type ListItem, type ObjectStat, type SignedUrlOptions, StorageClient, type StorageClientConfig, StorageError, type StorageErrorCode, type UploadData, type UploadOptions, createStorage, detectContentType };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
StorageClient: () => StorageClient,
|
|
24
|
+
StorageError: () => StorageError,
|
|
25
|
+
createStorage: () => createStorage,
|
|
26
|
+
detectContentType: () => detectContentType
|
|
27
|
+
});
|
|
28
|
+
module.exports = __toCommonJS(index_exports);
|
|
29
|
+
|
|
30
|
+
// src/client.ts
|
|
31
|
+
var import_minio = require("minio");
|
|
32
|
+
|
|
33
|
+
// src/errors.ts
|
|
34
|
+
var StorageError = class extends Error {
|
|
35
|
+
constructor(message, code, statusCode = 0, cause) {
|
|
36
|
+
super(message);
|
|
37
|
+
this.code = code;
|
|
38
|
+
this.statusCode = statusCode;
|
|
39
|
+
this.cause = cause;
|
|
40
|
+
this.name = "StorageError";
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
// src/mime.ts
|
|
45
|
+
var MIME_MAP = {
|
|
46
|
+
// Text
|
|
47
|
+
".txt": "text/plain",
|
|
48
|
+
".html": "text/html",
|
|
49
|
+
".htm": "text/html",
|
|
50
|
+
".css": "text/css",
|
|
51
|
+
".csv": "text/csv",
|
|
52
|
+
".xml": "text/xml",
|
|
53
|
+
// Application
|
|
54
|
+
".json": "application/json",
|
|
55
|
+
".js": "application/javascript",
|
|
56
|
+
".mjs": "application/javascript",
|
|
57
|
+
".pdf": "application/pdf",
|
|
58
|
+
".zip": "application/zip",
|
|
59
|
+
".gz": "application/gzip",
|
|
60
|
+
".tar": "application/x-tar",
|
|
61
|
+
".yaml": "application/x-yaml",
|
|
62
|
+
".yml": "application/x-yaml",
|
|
63
|
+
".xls": "application/vnd.ms-excel",
|
|
64
|
+
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
65
|
+
".doc": "application/msword",
|
|
66
|
+
".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
67
|
+
// Image
|
|
68
|
+
".png": "image/png",
|
|
69
|
+
".jpg": "image/jpeg",
|
|
70
|
+
".jpeg": "image/jpeg",
|
|
71
|
+
".gif": "image/gif",
|
|
72
|
+
".svg": "image/svg+xml",
|
|
73
|
+
".webp": "image/webp",
|
|
74
|
+
".ico": "image/x-icon",
|
|
75
|
+
".avif": "image/avif",
|
|
76
|
+
// Audio/Video
|
|
77
|
+
".mp3": "audio/mpeg",
|
|
78
|
+
".mp4": "video/mp4",
|
|
79
|
+
".webm": "video/webm",
|
|
80
|
+
".ogg": "audio/ogg",
|
|
81
|
+
// Font
|
|
82
|
+
".woff": "font/woff",
|
|
83
|
+
".woff2": "font/woff2",
|
|
84
|
+
".ttf": "font/ttf",
|
|
85
|
+
".otf": "font/otf"
|
|
86
|
+
};
|
|
87
|
+
var DEFAULT_MIME = "application/octet-stream";
|
|
88
|
+
function detectContentType(key) {
|
|
89
|
+
const lastDot = key.lastIndexOf(".");
|
|
90
|
+
if (lastDot === -1) return DEFAULT_MIME;
|
|
91
|
+
const ext = key.slice(lastDot).toLowerCase();
|
|
92
|
+
return MIME_MAP[ext] ?? DEFAULT_MIME;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// src/client.ts
|
|
96
|
+
var DEFAULT_PORT = 9e3;
|
|
97
|
+
var DEFAULT_REGION = "us-east-1";
|
|
98
|
+
var DEFAULT_SIGNED_URL_EXPIRY = 3600;
|
|
99
|
+
var StorageClient = class _StorageClient {
|
|
100
|
+
client;
|
|
101
|
+
bucket;
|
|
102
|
+
constructor(config) {
|
|
103
|
+
if (!config.host) {
|
|
104
|
+
throw new StorageError("host is required", "INVALID_CONFIG");
|
|
105
|
+
}
|
|
106
|
+
if (!config.accessKey) {
|
|
107
|
+
throw new StorageError("accessKey is required", "INVALID_CONFIG");
|
|
108
|
+
}
|
|
109
|
+
if (!config.secretKey) {
|
|
110
|
+
throw new StorageError("secretKey is required", "INVALID_CONFIG");
|
|
111
|
+
}
|
|
112
|
+
if (!config.bucket) {
|
|
113
|
+
throw new StorageError("bucket is required", "INVALID_CONFIG");
|
|
114
|
+
}
|
|
115
|
+
this.bucket = config.bucket;
|
|
116
|
+
this.client = new import_minio.Client({
|
|
117
|
+
endPoint: config.host,
|
|
118
|
+
port: config.port ?? DEFAULT_PORT,
|
|
119
|
+
useSSL: config.useSSL ?? false,
|
|
120
|
+
accessKey: config.accessKey,
|
|
121
|
+
secretKey: config.secretKey,
|
|
122
|
+
region: config.region ?? DEFAULT_REGION
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Create a StorageClient from environment variables.
|
|
127
|
+
*
|
|
128
|
+
* Reads: S3_HOST, S3_PORT, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY,
|
|
129
|
+
* S3_REGION, S3_BUCKET
|
|
130
|
+
*/
|
|
131
|
+
static fromEnv() {
|
|
132
|
+
const host = process.env.S3_HOST;
|
|
133
|
+
if (!host) {
|
|
134
|
+
throw new StorageError(
|
|
135
|
+
"S3_HOST environment variable is required. Declare storage in catalog-info.yaml to have credentials injected automatically.",
|
|
136
|
+
"INVALID_CONFIG"
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
return new _StorageClient({
|
|
140
|
+
host,
|
|
141
|
+
port: process.env.S3_PORT ? parseInt(process.env.S3_PORT, 10) : DEFAULT_PORT,
|
|
142
|
+
accessKey: process.env.S3_ACCESS_KEY_ID ?? "",
|
|
143
|
+
secretKey: process.env.S3_SECRET_ACCESS_KEY ?? "",
|
|
144
|
+
region: process.env.S3_REGION ?? DEFAULT_REGION,
|
|
145
|
+
bucket: process.env.S3_BUCKET ?? ""
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
/**
|
|
149
|
+
* Create a StorageClient for a named bucket.
|
|
150
|
+
*
|
|
151
|
+
* Reads S3_{NAME}_BUCKET for the bucket name, shares
|
|
152
|
+
* connection config (S3_HOST, etc.) from the environment.
|
|
153
|
+
*/
|
|
154
|
+
static fromEnvNamed(name) {
|
|
155
|
+
const envSuffix = name.toUpperCase().replace(/-/g, "_");
|
|
156
|
+
const bucket = process.env[`S3_${envSuffix}_BUCKET`];
|
|
157
|
+
if (!bucket) {
|
|
158
|
+
throw new StorageError(
|
|
159
|
+
`S3_${envSuffix}_BUCKET environment variable is required. Declare storage with name "${name}" in catalog-info.yaml.`,
|
|
160
|
+
"INVALID_CONFIG"
|
|
161
|
+
);
|
|
162
|
+
}
|
|
163
|
+
const host = process.env.S3_HOST;
|
|
164
|
+
if (!host) {
|
|
165
|
+
throw new StorageError(
|
|
166
|
+
"S3_HOST environment variable is required.",
|
|
167
|
+
"INVALID_CONFIG"
|
|
168
|
+
);
|
|
169
|
+
}
|
|
170
|
+
return new _StorageClient({
|
|
171
|
+
host,
|
|
172
|
+
port: process.env.S3_PORT ? parseInt(process.env.S3_PORT, 10) : DEFAULT_PORT,
|
|
173
|
+
accessKey: process.env.S3_ACCESS_KEY_ID ?? "",
|
|
174
|
+
secretKey: process.env.S3_SECRET_ACCESS_KEY ?? "",
|
|
175
|
+
region: process.env.S3_REGION ?? DEFAULT_REGION,
|
|
176
|
+
bucket
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
// ─── Core Operations ────────────────────────────────────────────
|
|
180
|
+
/**
|
|
181
|
+
* Upload data to the bucket.
|
|
182
|
+
* Accepts Buffer, string, or Readable stream.
|
|
183
|
+
* Content-type is auto-detected from the key if not provided.
|
|
184
|
+
*/
|
|
185
|
+
async upload(key, data, options) {
|
|
186
|
+
const contentType = options?.contentType ?? detectContentType(key);
|
|
187
|
+
const metadata = {
|
|
188
|
+
"Content-Type": contentType,
|
|
189
|
+
...options?.metadata ?? {}
|
|
190
|
+
};
|
|
191
|
+
try {
|
|
192
|
+
const input = typeof data === "string" ? Buffer.from(data) : data;
|
|
193
|
+
await this.client.putObject(this.bucket, key, input, void 0, metadata);
|
|
194
|
+
} catch (error) {
|
|
195
|
+
throw this.wrapError("Upload failed", key, error);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
/**
|
|
199
|
+
* Download an object and return its contents as a Buffer.
|
|
200
|
+
* For large files, use downloadStream() instead.
|
|
201
|
+
*/
|
|
202
|
+
async download(key) {
|
|
203
|
+
try {
|
|
204
|
+
const stream = await this.client.getObject(this.bucket, key);
|
|
205
|
+
return await this.streamToBuffer(stream);
|
|
206
|
+
} catch (error) {
|
|
207
|
+
throw this.wrapError("Download failed", key, error);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Download an object as a Readable stream.
|
|
212
|
+
* Use this for large files to avoid loading everything into memory.
|
|
213
|
+
*/
|
|
214
|
+
async downloadStream(key) {
|
|
215
|
+
try {
|
|
216
|
+
return await this.client.getObject(this.bucket, key);
|
|
217
|
+
} catch (error) {
|
|
218
|
+
throw this.wrapError("Download failed", key, error);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Delete one or more objects.
|
|
223
|
+
* Accepts a single key or an array of keys for bulk delete.
|
|
224
|
+
*/
|
|
225
|
+
async delete(keys) {
|
|
226
|
+
try {
|
|
227
|
+
const keyArray = typeof keys === "string" ? [keys] : [...keys];
|
|
228
|
+
if (keyArray.length === 1) {
|
|
229
|
+
await this.client.removeObject(this.bucket, keyArray[0]);
|
|
230
|
+
} else {
|
|
231
|
+
await this.client.removeObjects(
|
|
232
|
+
this.bucket,
|
|
233
|
+
keyArray.map((name) => ({ name }))
|
|
234
|
+
);
|
|
235
|
+
}
|
|
236
|
+
} catch (error) {
|
|
237
|
+
const keyStr = typeof keys === "string" ? keys : `${keys.length} objects`;
|
|
238
|
+
throw this.wrapError("Delete failed", keyStr, error);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
// ─── Listing & Metadata ─────────────────────────────────────────
|
|
242
|
+
/**
|
|
243
|
+
* List objects with an optional prefix.
|
|
244
|
+
* Returns a complete array (not a stream) for simplicity.
|
|
245
|
+
*/
|
|
246
|
+
async list(prefix) {
|
|
247
|
+
try {
|
|
248
|
+
const stream = this.client.listObjectsV2(this.bucket, prefix ?? "", true);
|
|
249
|
+
const items = [];
|
|
250
|
+
return new Promise((resolve, reject) => {
|
|
251
|
+
stream.on("data", (obj) => {
|
|
252
|
+
if (obj.name) {
|
|
253
|
+
items.push({
|
|
254
|
+
key: obj.name,
|
|
255
|
+
size: obj.size,
|
|
256
|
+
etag: obj.etag,
|
|
257
|
+
lastModified: obj.lastModified
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
});
|
|
261
|
+
stream.on("end", () => resolve(items));
|
|
262
|
+
stream.on("error", reject);
|
|
263
|
+
});
|
|
264
|
+
} catch (error) {
|
|
265
|
+
throw this.wrapError("List failed", prefix ?? "", error);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
/**
|
|
269
|
+
* Check if an object exists.
|
|
270
|
+
*/
|
|
271
|
+
async exists(key) {
|
|
272
|
+
try {
|
|
273
|
+
await this.client.statObject(this.bucket, key);
|
|
274
|
+
return true;
|
|
275
|
+
} catch (error) {
|
|
276
|
+
if (this.isNotFoundError(error)) {
|
|
277
|
+
return false;
|
|
278
|
+
}
|
|
279
|
+
throw this.wrapError("Exists check failed", key, error);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Get object metadata (size, etag, lastModified, contentType).
|
|
284
|
+
*/
|
|
285
|
+
async stat(key) {
|
|
286
|
+
try {
|
|
287
|
+
const result = await this.client.statObject(this.bucket, key);
|
|
288
|
+
return {
|
|
289
|
+
size: result.size,
|
|
290
|
+
etag: result.etag,
|
|
291
|
+
lastModified: result.lastModified,
|
|
292
|
+
contentType: result.metaData?.["content-type"] ?? "application/octet-stream"
|
|
293
|
+
};
|
|
294
|
+
} catch (error) {
|
|
295
|
+
throw this.wrapError("Stat failed", key, error);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
// ─── Presigned URLs ─────────────────────────────────────────────
|
|
299
|
+
/**
|
|
300
|
+
* Generate a presigned download URL.
|
|
301
|
+
*/
|
|
302
|
+
async getSignedUrl(key, options) {
|
|
303
|
+
try {
|
|
304
|
+
const expiry = options?.expiresIn ?? DEFAULT_SIGNED_URL_EXPIRY;
|
|
305
|
+
return await this.client.presignedGetObject(this.bucket, key, expiry);
|
|
306
|
+
} catch (error) {
|
|
307
|
+
throw this.wrapError("Presign download URL failed", key, error);
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
/**
|
|
311
|
+
* Generate a presigned upload URL (PUT).
|
|
312
|
+
*/
|
|
313
|
+
async getUploadUrl(key, options) {
|
|
314
|
+
try {
|
|
315
|
+
const expiry = options?.expiresIn ?? DEFAULT_SIGNED_URL_EXPIRY;
|
|
316
|
+
return await this.client.presignedPutObject(this.bucket, key, expiry);
|
|
317
|
+
} catch (error) {
|
|
318
|
+
throw this.wrapError("Presign upload URL failed", key, error);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
// ─── Internal Helpers ───────────────────────────────────────────
|
|
322
|
+
async streamToBuffer(stream) {
|
|
323
|
+
const chunks = [];
|
|
324
|
+
for await (const chunk of stream) {
|
|
325
|
+
chunks.push(typeof chunk === "string" ? Buffer.from(chunk) : chunk);
|
|
326
|
+
}
|
|
327
|
+
return Buffer.concat(chunks);
|
|
328
|
+
}
|
|
329
|
+
isNotFoundError(error) {
|
|
330
|
+
if (error && typeof error === "object") {
|
|
331
|
+
const err = error;
|
|
332
|
+
return err.code === "NotFound" || err.code === "NoSuchKey";
|
|
333
|
+
}
|
|
334
|
+
return false;
|
|
335
|
+
}
|
|
336
|
+
wrapError(action, key, error) {
|
|
337
|
+
if (error instanceof StorageError) return error;
|
|
338
|
+
const err = error;
|
|
339
|
+
const message = err?.message ?? String(error);
|
|
340
|
+
const s3Code = err?.code;
|
|
341
|
+
let code = "UNKNOWN";
|
|
342
|
+
let statusCode = 0;
|
|
343
|
+
if (s3Code === "NotFound" || s3Code === "NoSuchKey") {
|
|
344
|
+
code = "NOT_FOUND";
|
|
345
|
+
statusCode = 404;
|
|
346
|
+
} else if (s3Code === "AccessDenied" || s3Code === "InvalidAccessKeyId") {
|
|
347
|
+
code = "ACCESS_DENIED";
|
|
348
|
+
statusCode = 403;
|
|
349
|
+
} else if (s3Code === "NoSuchBucket") {
|
|
350
|
+
code = "BUCKET_NOT_FOUND";
|
|
351
|
+
statusCode = 404;
|
|
352
|
+
} else if (s3Code === "ECONNREFUSED" || s3Code === "ENOTFOUND" || s3Code === "ETIMEDOUT") {
|
|
353
|
+
code = "NETWORK_ERROR";
|
|
354
|
+
} else if (action.startsWith("Upload")) {
|
|
355
|
+
code = "UPLOAD_FAILED";
|
|
356
|
+
} else if (action.startsWith("Delete")) {
|
|
357
|
+
code = "DELETE_FAILED";
|
|
358
|
+
} else if (action.startsWith("Presign")) {
|
|
359
|
+
code = "PRESIGN_FAILED";
|
|
360
|
+
}
|
|
361
|
+
return new StorageError(
|
|
362
|
+
`${action} for "${key}": ${message}`,
|
|
363
|
+
code,
|
|
364
|
+
statusCode,
|
|
365
|
+
error
|
|
366
|
+
);
|
|
367
|
+
}
|
|
368
|
+
};
|
|
369
|
+
|
|
370
|
+
// src/index.ts
|
|
371
|
+
function createStorage(input) {
|
|
372
|
+
if (input === void 0) {
|
|
373
|
+
return StorageClient.fromEnv();
|
|
374
|
+
}
|
|
375
|
+
if (typeof input === "string") {
|
|
376
|
+
return StorageClient.fromEnvNamed(input);
|
|
377
|
+
}
|
|
378
|
+
return new StorageClient({
|
|
379
|
+
host: input.host ?? process.env.S3_HOST ?? "",
|
|
380
|
+
port: input.port ?? (process.env.S3_PORT ? parseInt(process.env.S3_PORT, 10) : void 0),
|
|
381
|
+
accessKey: input.accessKey ?? process.env.S3_ACCESS_KEY_ID ?? "",
|
|
382
|
+
secretKey: input.secretKey ?? process.env.S3_SECRET_ACCESS_KEY ?? "",
|
|
383
|
+
region: input.region ?? process.env.S3_REGION,
|
|
384
|
+
bucket: input.bucket,
|
|
385
|
+
useSSL: input.useSSL
|
|
386
|
+
});
|
|
387
|
+
}
|
|
388
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
389
|
+
0 && (module.exports = {
|
|
390
|
+
StorageClient,
|
|
391
|
+
StorageError,
|
|
392
|
+
createStorage,
|
|
393
|
+
detectContentType
|
|
394
|
+
});
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
// src/client.ts
|
|
2
|
+
import { Client as MinioClient } from "minio";
|
|
3
|
+
|
|
4
|
+
// src/errors.ts
|
|
5
|
+
var StorageError = class extends Error {
|
|
6
|
+
constructor(message, code, statusCode = 0, cause) {
|
|
7
|
+
super(message);
|
|
8
|
+
this.code = code;
|
|
9
|
+
this.statusCode = statusCode;
|
|
10
|
+
this.cause = cause;
|
|
11
|
+
this.name = "StorageError";
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
// src/mime.ts
|
|
16
|
+
var MIME_MAP = {
|
|
17
|
+
// Text
|
|
18
|
+
".txt": "text/plain",
|
|
19
|
+
".html": "text/html",
|
|
20
|
+
".htm": "text/html",
|
|
21
|
+
".css": "text/css",
|
|
22
|
+
".csv": "text/csv",
|
|
23
|
+
".xml": "text/xml",
|
|
24
|
+
// Application
|
|
25
|
+
".json": "application/json",
|
|
26
|
+
".js": "application/javascript",
|
|
27
|
+
".mjs": "application/javascript",
|
|
28
|
+
".pdf": "application/pdf",
|
|
29
|
+
".zip": "application/zip",
|
|
30
|
+
".gz": "application/gzip",
|
|
31
|
+
".tar": "application/x-tar",
|
|
32
|
+
".yaml": "application/x-yaml",
|
|
33
|
+
".yml": "application/x-yaml",
|
|
34
|
+
".xls": "application/vnd.ms-excel",
|
|
35
|
+
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
36
|
+
".doc": "application/msword",
|
|
37
|
+
".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
38
|
+
// Image
|
|
39
|
+
".png": "image/png",
|
|
40
|
+
".jpg": "image/jpeg",
|
|
41
|
+
".jpeg": "image/jpeg",
|
|
42
|
+
".gif": "image/gif",
|
|
43
|
+
".svg": "image/svg+xml",
|
|
44
|
+
".webp": "image/webp",
|
|
45
|
+
".ico": "image/x-icon",
|
|
46
|
+
".avif": "image/avif",
|
|
47
|
+
// Audio/Video
|
|
48
|
+
".mp3": "audio/mpeg",
|
|
49
|
+
".mp4": "video/mp4",
|
|
50
|
+
".webm": "video/webm",
|
|
51
|
+
".ogg": "audio/ogg",
|
|
52
|
+
// Font
|
|
53
|
+
".woff": "font/woff",
|
|
54
|
+
".woff2": "font/woff2",
|
|
55
|
+
".ttf": "font/ttf",
|
|
56
|
+
".otf": "font/otf"
|
|
57
|
+
};
|
|
58
|
+
var DEFAULT_MIME = "application/octet-stream";
|
|
59
|
+
function detectContentType(key) {
|
|
60
|
+
const lastDot = key.lastIndexOf(".");
|
|
61
|
+
if (lastDot === -1) return DEFAULT_MIME;
|
|
62
|
+
const ext = key.slice(lastDot).toLowerCase();
|
|
63
|
+
return MIME_MAP[ext] ?? DEFAULT_MIME;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// src/client.ts
|
|
67
|
+
var DEFAULT_PORT = 9e3;
|
|
68
|
+
var DEFAULT_REGION = "us-east-1";
|
|
69
|
+
var DEFAULT_SIGNED_URL_EXPIRY = 3600;
|
|
70
|
+
var StorageClient = class _StorageClient {
|
|
71
|
+
client;
|
|
72
|
+
bucket;
|
|
73
|
+
constructor(config) {
|
|
74
|
+
if (!config.host) {
|
|
75
|
+
throw new StorageError("host is required", "INVALID_CONFIG");
|
|
76
|
+
}
|
|
77
|
+
if (!config.accessKey) {
|
|
78
|
+
throw new StorageError("accessKey is required", "INVALID_CONFIG");
|
|
79
|
+
}
|
|
80
|
+
if (!config.secretKey) {
|
|
81
|
+
throw new StorageError("secretKey is required", "INVALID_CONFIG");
|
|
82
|
+
}
|
|
83
|
+
if (!config.bucket) {
|
|
84
|
+
throw new StorageError("bucket is required", "INVALID_CONFIG");
|
|
85
|
+
}
|
|
86
|
+
this.bucket = config.bucket;
|
|
87
|
+
this.client = new MinioClient({
|
|
88
|
+
endPoint: config.host,
|
|
89
|
+
port: config.port ?? DEFAULT_PORT,
|
|
90
|
+
useSSL: config.useSSL ?? false,
|
|
91
|
+
accessKey: config.accessKey,
|
|
92
|
+
secretKey: config.secretKey,
|
|
93
|
+
region: config.region ?? DEFAULT_REGION
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Create a StorageClient from environment variables.
|
|
98
|
+
*
|
|
99
|
+
* Reads: S3_HOST, S3_PORT, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY,
|
|
100
|
+
* S3_REGION, S3_BUCKET
|
|
101
|
+
*/
|
|
102
|
+
static fromEnv() {
|
|
103
|
+
const host = process.env.S3_HOST;
|
|
104
|
+
if (!host) {
|
|
105
|
+
throw new StorageError(
|
|
106
|
+
"S3_HOST environment variable is required. Declare storage in catalog-info.yaml to have credentials injected automatically.",
|
|
107
|
+
"INVALID_CONFIG"
|
|
108
|
+
);
|
|
109
|
+
}
|
|
110
|
+
return new _StorageClient({
|
|
111
|
+
host,
|
|
112
|
+
port: process.env.S3_PORT ? parseInt(process.env.S3_PORT, 10) : DEFAULT_PORT,
|
|
113
|
+
accessKey: process.env.S3_ACCESS_KEY_ID ?? "",
|
|
114
|
+
secretKey: process.env.S3_SECRET_ACCESS_KEY ?? "",
|
|
115
|
+
region: process.env.S3_REGION ?? DEFAULT_REGION,
|
|
116
|
+
bucket: process.env.S3_BUCKET ?? ""
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Create a StorageClient for a named bucket.
|
|
121
|
+
*
|
|
122
|
+
* Reads S3_{NAME}_BUCKET for the bucket name, shares
|
|
123
|
+
* connection config (S3_HOST, etc.) from the environment.
|
|
124
|
+
*/
|
|
125
|
+
static fromEnvNamed(name) {
|
|
126
|
+
const envSuffix = name.toUpperCase().replace(/-/g, "_");
|
|
127
|
+
const bucket = process.env[`S3_${envSuffix}_BUCKET`];
|
|
128
|
+
if (!bucket) {
|
|
129
|
+
throw new StorageError(
|
|
130
|
+
`S3_${envSuffix}_BUCKET environment variable is required. Declare storage with name "${name}" in catalog-info.yaml.`,
|
|
131
|
+
"INVALID_CONFIG"
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
const host = process.env.S3_HOST;
|
|
135
|
+
if (!host) {
|
|
136
|
+
throw new StorageError(
|
|
137
|
+
"S3_HOST environment variable is required.",
|
|
138
|
+
"INVALID_CONFIG"
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
return new _StorageClient({
|
|
142
|
+
host,
|
|
143
|
+
port: process.env.S3_PORT ? parseInt(process.env.S3_PORT, 10) : DEFAULT_PORT,
|
|
144
|
+
accessKey: process.env.S3_ACCESS_KEY_ID ?? "",
|
|
145
|
+
secretKey: process.env.S3_SECRET_ACCESS_KEY ?? "",
|
|
146
|
+
region: process.env.S3_REGION ?? DEFAULT_REGION,
|
|
147
|
+
bucket
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
// ─── Core Operations ────────────────────────────────────────────
|
|
151
|
+
/**
|
|
152
|
+
* Upload data to the bucket.
|
|
153
|
+
* Accepts Buffer, string, or Readable stream.
|
|
154
|
+
* Content-type is auto-detected from the key if not provided.
|
|
155
|
+
*/
|
|
156
|
+
async upload(key, data, options) {
|
|
157
|
+
const contentType = options?.contentType ?? detectContentType(key);
|
|
158
|
+
const metadata = {
|
|
159
|
+
"Content-Type": contentType,
|
|
160
|
+
...options?.metadata ?? {}
|
|
161
|
+
};
|
|
162
|
+
try {
|
|
163
|
+
const input = typeof data === "string" ? Buffer.from(data) : data;
|
|
164
|
+
await this.client.putObject(this.bucket, key, input, void 0, metadata);
|
|
165
|
+
} catch (error) {
|
|
166
|
+
throw this.wrapError("Upload failed", key, error);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Download an object and return its contents as a Buffer.
|
|
171
|
+
* For large files, use downloadStream() instead.
|
|
172
|
+
*/
|
|
173
|
+
async download(key) {
|
|
174
|
+
try {
|
|
175
|
+
const stream = await this.client.getObject(this.bucket, key);
|
|
176
|
+
return await this.streamToBuffer(stream);
|
|
177
|
+
} catch (error) {
|
|
178
|
+
throw this.wrapError("Download failed", key, error);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Download an object as a Readable stream.
|
|
183
|
+
* Use this for large files to avoid loading everything into memory.
|
|
184
|
+
*/
|
|
185
|
+
async downloadStream(key) {
|
|
186
|
+
try {
|
|
187
|
+
return await this.client.getObject(this.bucket, key);
|
|
188
|
+
} catch (error) {
|
|
189
|
+
throw this.wrapError("Download failed", key, error);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
/**
|
|
193
|
+
* Delete one or more objects.
|
|
194
|
+
* Accepts a single key or an array of keys for bulk delete.
|
|
195
|
+
*/
|
|
196
|
+
async delete(keys) {
|
|
197
|
+
try {
|
|
198
|
+
const keyArray = typeof keys === "string" ? [keys] : [...keys];
|
|
199
|
+
if (keyArray.length === 1) {
|
|
200
|
+
await this.client.removeObject(this.bucket, keyArray[0]);
|
|
201
|
+
} else {
|
|
202
|
+
await this.client.removeObjects(
|
|
203
|
+
this.bucket,
|
|
204
|
+
keyArray.map((name) => ({ name }))
|
|
205
|
+
);
|
|
206
|
+
}
|
|
207
|
+
} catch (error) {
|
|
208
|
+
const keyStr = typeof keys === "string" ? keys : `${keys.length} objects`;
|
|
209
|
+
throw this.wrapError("Delete failed", keyStr, error);
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
// ─── Listing & Metadata ─────────────────────────────────────────
|
|
213
|
+
/**
|
|
214
|
+
* List objects with an optional prefix.
|
|
215
|
+
* Returns a complete array (not a stream) for simplicity.
|
|
216
|
+
*/
|
|
217
|
+
async list(prefix) {
|
|
218
|
+
try {
|
|
219
|
+
const stream = this.client.listObjectsV2(this.bucket, prefix ?? "", true);
|
|
220
|
+
const items = [];
|
|
221
|
+
return new Promise((resolve, reject) => {
|
|
222
|
+
stream.on("data", (obj) => {
|
|
223
|
+
if (obj.name) {
|
|
224
|
+
items.push({
|
|
225
|
+
key: obj.name,
|
|
226
|
+
size: obj.size,
|
|
227
|
+
etag: obj.etag,
|
|
228
|
+
lastModified: obj.lastModified
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
});
|
|
232
|
+
stream.on("end", () => resolve(items));
|
|
233
|
+
stream.on("error", reject);
|
|
234
|
+
});
|
|
235
|
+
} catch (error) {
|
|
236
|
+
throw this.wrapError("List failed", prefix ?? "", error);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Check if an object exists.
|
|
241
|
+
*/
|
|
242
|
+
async exists(key) {
|
|
243
|
+
try {
|
|
244
|
+
await this.client.statObject(this.bucket, key);
|
|
245
|
+
return true;
|
|
246
|
+
} catch (error) {
|
|
247
|
+
if (this.isNotFoundError(error)) {
|
|
248
|
+
return false;
|
|
249
|
+
}
|
|
250
|
+
throw this.wrapError("Exists check failed", key, error);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
/**
|
|
254
|
+
* Get object metadata (size, etag, lastModified, contentType).
|
|
255
|
+
*/
|
|
256
|
+
async stat(key) {
|
|
257
|
+
try {
|
|
258
|
+
const result = await this.client.statObject(this.bucket, key);
|
|
259
|
+
return {
|
|
260
|
+
size: result.size,
|
|
261
|
+
etag: result.etag,
|
|
262
|
+
lastModified: result.lastModified,
|
|
263
|
+
contentType: result.metaData?.["content-type"] ?? "application/octet-stream"
|
|
264
|
+
};
|
|
265
|
+
} catch (error) {
|
|
266
|
+
throw this.wrapError("Stat failed", key, error);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
// ─── Presigned URLs ─────────────────────────────────────────────
|
|
270
|
+
/**
|
|
271
|
+
* Generate a presigned download URL.
|
|
272
|
+
*/
|
|
273
|
+
async getSignedUrl(key, options) {
|
|
274
|
+
try {
|
|
275
|
+
const expiry = options?.expiresIn ?? DEFAULT_SIGNED_URL_EXPIRY;
|
|
276
|
+
return await this.client.presignedGetObject(this.bucket, key, expiry);
|
|
277
|
+
} catch (error) {
|
|
278
|
+
throw this.wrapError("Presign download URL failed", key, error);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Generate a presigned upload URL (PUT).
|
|
283
|
+
*/
|
|
284
|
+
async getUploadUrl(key, options) {
|
|
285
|
+
try {
|
|
286
|
+
const expiry = options?.expiresIn ?? DEFAULT_SIGNED_URL_EXPIRY;
|
|
287
|
+
return await this.client.presignedPutObject(this.bucket, key, expiry);
|
|
288
|
+
} catch (error) {
|
|
289
|
+
throw this.wrapError("Presign upload URL failed", key, error);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
// ─── Internal Helpers ───────────────────────────────────────────
|
|
293
|
+
async streamToBuffer(stream) {
|
|
294
|
+
const chunks = [];
|
|
295
|
+
for await (const chunk of stream) {
|
|
296
|
+
chunks.push(typeof chunk === "string" ? Buffer.from(chunk) : chunk);
|
|
297
|
+
}
|
|
298
|
+
return Buffer.concat(chunks);
|
|
299
|
+
}
|
|
300
|
+
isNotFoundError(error) {
|
|
301
|
+
if (error && typeof error === "object") {
|
|
302
|
+
const err = error;
|
|
303
|
+
return err.code === "NotFound" || err.code === "NoSuchKey";
|
|
304
|
+
}
|
|
305
|
+
return false;
|
|
306
|
+
}
|
|
307
|
+
wrapError(action, key, error) {
|
|
308
|
+
if (error instanceof StorageError) return error;
|
|
309
|
+
const err = error;
|
|
310
|
+
const message = err?.message ?? String(error);
|
|
311
|
+
const s3Code = err?.code;
|
|
312
|
+
let code = "UNKNOWN";
|
|
313
|
+
let statusCode = 0;
|
|
314
|
+
if (s3Code === "NotFound" || s3Code === "NoSuchKey") {
|
|
315
|
+
code = "NOT_FOUND";
|
|
316
|
+
statusCode = 404;
|
|
317
|
+
} else if (s3Code === "AccessDenied" || s3Code === "InvalidAccessKeyId") {
|
|
318
|
+
code = "ACCESS_DENIED";
|
|
319
|
+
statusCode = 403;
|
|
320
|
+
} else if (s3Code === "NoSuchBucket") {
|
|
321
|
+
code = "BUCKET_NOT_FOUND";
|
|
322
|
+
statusCode = 404;
|
|
323
|
+
} else if (s3Code === "ECONNREFUSED" || s3Code === "ENOTFOUND" || s3Code === "ETIMEDOUT") {
|
|
324
|
+
code = "NETWORK_ERROR";
|
|
325
|
+
} else if (action.startsWith("Upload")) {
|
|
326
|
+
code = "UPLOAD_FAILED";
|
|
327
|
+
} else if (action.startsWith("Delete")) {
|
|
328
|
+
code = "DELETE_FAILED";
|
|
329
|
+
} else if (action.startsWith("Presign")) {
|
|
330
|
+
code = "PRESIGN_FAILED";
|
|
331
|
+
}
|
|
332
|
+
return new StorageError(
|
|
333
|
+
`${action} for "${key}": ${message}`,
|
|
334
|
+
code,
|
|
335
|
+
statusCode,
|
|
336
|
+
error
|
|
337
|
+
);
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
|
|
341
|
+
// src/index.ts
|
|
342
|
+
function createStorage(input) {
|
|
343
|
+
if (input === void 0) {
|
|
344
|
+
return StorageClient.fromEnv();
|
|
345
|
+
}
|
|
346
|
+
if (typeof input === "string") {
|
|
347
|
+
return StorageClient.fromEnvNamed(input);
|
|
348
|
+
}
|
|
349
|
+
return new StorageClient({
|
|
350
|
+
host: input.host ?? process.env.S3_HOST ?? "",
|
|
351
|
+
port: input.port ?? (process.env.S3_PORT ? parseInt(process.env.S3_PORT, 10) : void 0),
|
|
352
|
+
accessKey: input.accessKey ?? process.env.S3_ACCESS_KEY_ID ?? "",
|
|
353
|
+
secretKey: input.secretKey ?? process.env.S3_SECRET_ACCESS_KEY ?? "",
|
|
354
|
+
region: input.region ?? process.env.S3_REGION,
|
|
355
|
+
bucket: input.bucket,
|
|
356
|
+
useSSL: input.useSSL
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
export {
|
|
360
|
+
StorageClient,
|
|
361
|
+
StorageError,
|
|
362
|
+
createStorage,
|
|
363
|
+
detectContentType
|
|
364
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "tawa-storage",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "S3-compatible object storage SDK for the Tawa platform",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"module": "dist/index.mjs",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.mjs",
|
|
12
|
+
"require": "./dist/index.js"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"scripts": {
|
|
19
|
+
"build": "tsup src/index.ts --format cjs,esm --dts --clean",
|
|
20
|
+
"test": "vitest run",
|
|
21
|
+
"test:watch": "vitest",
|
|
22
|
+
"lint": "tsc --noEmit",
|
|
23
|
+
"prepublishOnly": "npm run build",
|
|
24
|
+
"publish:npm": "npm publish --access public",
|
|
25
|
+
"publish:forgejo": "npm publish --registry https://git.insureco.io/api/packages/insureco/npm/"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"tawa",
|
|
29
|
+
"storage",
|
|
30
|
+
"s3",
|
|
31
|
+
"minio",
|
|
32
|
+
"object-storage",
|
|
33
|
+
"insureco"
|
|
34
|
+
],
|
|
35
|
+
"author": "InsurEco",
|
|
36
|
+
"license": "MIT",
|
|
37
|
+
"repository": {
|
|
38
|
+
"type": "git",
|
|
39
|
+
"url": "https://git.insureco.io/insureco/tawa-storage"
|
|
40
|
+
},
|
|
41
|
+
"dependencies": {
|
|
42
|
+
"minio": "^8.0.0"
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"@types/node": "^22.0.0",
|
|
46
|
+
"tsup": "^8.0.0",
|
|
47
|
+
"typescript": "^5.7.0",
|
|
48
|
+
"vitest": "^2.0.0"
|
|
49
|
+
},
|
|
50
|
+
"engines": {
|
|
51
|
+
"node": ">=18.0.0"
|
|
52
|
+
}
|
|
53
|
+
}
|