@voyantjs/storage 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +46 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/lib/sigv4.d.ts +50 -0
- package/dist/lib/sigv4.d.ts.map +1 -0
- package/dist/lib/sigv4.js +165 -0
- package/dist/providers/local.d.ts +26 -0
- package/dist/providers/local.d.ts.map +1 -0
- package/dist/providers/local.js +54 -0
- package/dist/providers/r2.d.ts +52 -0
- package/dist/providers/r2.d.ts.map +1 -0
- package/dist/providers/r2.js +51 -0
- package/dist/providers/s3.d.ts +57 -0
- package/dist/providers/s3.d.ts.map +1 -0
- package/dist/providers/s3.js +138 -0
- package/dist/service.d.ts +21 -0
- package/dist/service.d.ts.map +1 -0
- package/dist/service.js +22 -0
- package/dist/types.d.ts +57 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +1 -0
- package/package.json +79 -0
package/README.md
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# @voyantjs/storage
|
|
2
|
+
|
|
3
|
+
Storage provider abstraction for Voyant. `StorageProvider` interface plus providers for local (in-memory), Cloudflare R2, and S3-compatible (AWS SigV4 via Web Crypto — works in Cloudflare Workers).
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pnpm add @voyantjs/storage
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```typescript
|
|
14
|
+
import { createStorageService } from "@voyantjs/storage"
|
|
15
|
+
import { s3Provider } from "@voyantjs/storage/providers/s3"
|
|
16
|
+
|
|
17
|
+
const storage = createStorageService(
|
|
18
|
+
s3Provider({
|
|
19
|
+
region: "us-east-1",
|
|
20
|
+
bucket: "my-bucket",
|
|
21
|
+
accessKeyId: env.AWS_ACCESS_KEY_ID,
|
|
22
|
+
secretAccessKey: env.AWS_SECRET_ACCESS_KEY,
|
|
23
|
+
}),
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
await storage.upload({ key: "files/x.pdf", body: buffer })
|
|
27
|
+
const url = await storage.signedUrl({ key: "files/x.pdf", expiresIn: 300 })
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
The S3 provider supports `forcePathStyle` and a custom `endpoint` for S3-compatible services (Wasabi, MinIO, etc.). SigV4 signing is verified against AWS canonical test vectors.
|
|
31
|
+
|
|
32
|
+
## Exports
|
|
33
|
+
|
|
34
|
+
| Entry | Description |
|
|
35
|
+
| --- | --- |
|
|
36
|
+
| `.` | Barrel re-exports |
|
|
37
|
+
| `./types` | `StorageProvider` interface |
|
|
38
|
+
| `./service` | `createStorageService` |
|
|
39
|
+
| `./providers/local` | In-memory provider |
|
|
40
|
+
| `./providers/r2` | Cloudflare R2 binding provider |
|
|
41
|
+
| `./providers/s3` | S3 provider with SigV4 |
|
|
42
|
+
| `./lib/sigv4` | `signRequest`, `presignUrl` primitives |
|
|
43
|
+
|
|
44
|
+
## License
|
|
45
|
+
|
|
46
|
+
Apache-2.0
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export type { PresignUrlInput, SignedRequestHeaders, SignRequestInput, SigV4Context, SigV4Credentials, } from "./lib/sigv4.js";
|
|
2
|
+
export { presignUrl, signRequest } from "./lib/sigv4.js";
|
|
3
|
+
export type { LocalStorageOptions } from "./providers/local.js";
|
|
4
|
+
export { createLocalStorageProvider } from "./providers/local.js";
|
|
5
|
+
export type { R2BucketLike, R2ObjectLike, R2ProviderOptions, R2PutOptionsLike, } from "./providers/r2.js";
|
|
6
|
+
export { createR2Provider } from "./providers/r2.js";
|
|
7
|
+
export type { S3Fetch, S3ProviderOptions } from "./providers/s3.js";
|
|
8
|
+
export { createS3Provider } from "./providers/s3.js";
|
|
9
|
+
export type { StorageService } from "./service.js";
|
|
10
|
+
export { createStorageService, StorageError } from "./service.js";
|
|
11
|
+
export type { StorageObject, StorageProvider, StorageUploadBody, UploadOptions, } from "./types.js";
|
|
12
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EACV,eAAe,EACf,oBAAoB,EACpB,gBAAgB,EAChB,YAAY,EACZ,gBAAgB,GACjB,MAAM,gBAAgB,CAAA;AACvB,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACxD,YAAY,EAAE,mBAAmB,EAAE,MAAM,sBAAsB,CAAA;AAC/D,OAAO,EAAE,0BAA0B,EAAE,MAAM,sBAAsB,CAAA;AACjE,YAAY,EACV,YAAY,EACZ,YAAY,EACZ,iBAAiB,EACjB,gBAAgB,GACjB,MAAM,mBAAmB,CAAA;AAC1B,OAAO,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAA;AACpD,YAAY,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAA;AACnE,OAAO,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAA;AACpD,YAAY,EAAE,cAAc,EAAE,MAAM,cAAc,CAAA;AAClD,OAAO,EAAE,oBAAoB,EAAE,YAAY,EAAE,MAAM,cAAc,CAAA;AACjE,YAAY,EACV,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,aAAa,GACd,MAAM,YAAY,CAAA"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { presignUrl, signRequest } from "./lib/sigv4.js";
|
|
2
|
+
export { createLocalStorageProvider } from "./providers/local.js";
|
|
3
|
+
export { createR2Provider } from "./providers/r2.js";
|
|
4
|
+
export { createS3Provider } from "./providers/s3.js";
|
|
5
|
+
export { createStorageService, StorageError } from "./service.js";
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Minimal AWS SigV4 signing implementation using Web Crypto. Works in
|
|
3
|
+
* Cloudflare Workers, modern Node, Deno, and browsers.
|
|
4
|
+
*
|
|
5
|
+
* Supports two use cases needed by the S3 storage provider:
|
|
6
|
+
* - `signRequest`: attach an `Authorization` header for a direct request
|
|
7
|
+
* - `presignUrl`: produce a time-limited URL via query-string signing
|
|
8
|
+
*
|
|
9
|
+
* Reference:
|
|
10
|
+
* https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_sigv_create-signed-request.html
|
|
11
|
+
*/
|
|
12
|
+
export interface SigV4Credentials {
|
|
13
|
+
accessKeyId: string;
|
|
14
|
+
secretAccessKey: string;
|
|
15
|
+
sessionToken?: string;
|
|
16
|
+
}
|
|
17
|
+
export interface SigV4Context {
|
|
18
|
+
credentials: SigV4Credentials;
|
|
19
|
+
region: string;
|
|
20
|
+
service: string;
|
|
21
|
+
}
|
|
22
|
+
export interface SignRequestInput extends SigV4Context {
|
|
23
|
+
method: string;
|
|
24
|
+
url: string;
|
|
25
|
+
headers?: Record<string, string>;
|
|
26
|
+
body?: Uint8Array;
|
|
27
|
+
/** Override "now" (milliseconds since epoch). Useful for tests. */
|
|
28
|
+
now?: number;
|
|
29
|
+
}
|
|
30
|
+
export interface SignedRequestHeaders {
|
|
31
|
+
headers: Record<string, string>;
|
|
32
|
+
}
|
|
33
|
+
export interface PresignUrlInput extends SigV4Context {
|
|
34
|
+
method: string;
|
|
35
|
+
url: string;
|
|
36
|
+
expiresIn: number;
|
|
37
|
+
/** Extra signed headers beyond the default `host`. */
|
|
38
|
+
headers?: Record<string, string>;
|
|
39
|
+
/** Override "now" (milliseconds since epoch). Useful for tests. */
|
|
40
|
+
now?: number;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Sign a request and return the `Authorization` (and related) headers.
|
|
44
|
+
*/
|
|
45
|
+
export declare function signRequest(input: SignRequestInput): Promise<SignedRequestHeaders>;
|
|
46
|
+
/**
|
|
47
|
+
* Produce a presigned URL (query-string signing) for the given method.
|
|
48
|
+
*/
|
|
49
|
+
export declare function presignUrl(input: PresignUrlInput): Promise<string>;
|
|
50
|
+
//# sourceMappingURL=sigv4.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sigv4.d.ts","sourceRoot":"","sources":["../../src/lib/sigv4.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAIH,MAAM,WAAW,gBAAgB;IAC/B,WAAW,EAAE,MAAM,CAAA;IACnB,eAAe,EAAE,MAAM,CAAA;IACvB,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,YAAY;IAC3B,WAAW,EAAE,gBAAgB,CAAA;IAC7B,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,EAAE,MAAM,CAAA;CAChB;AAED,MAAM,WAAW,gBAAiB,SAAQ,YAAY;IACpD,MAAM,EAAE,MAAM,CAAA;IACd,GAAG,EAAE,MAAM,CAAA;IACX,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAChC,IAAI,CAAC,EAAE,UAAU,CAAA;IACjB,mEAAmE;IACnE,GAAG,CAAC,EAAE,MAAM,CAAA;CACb;AAED,MAAM,WAAW,oBAAoB;IACnC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAChC;AAED,MAAM,WAAW,eAAgB,SAAQ,YAAY;IACnD,MAAM,EAAE,MAAM,CAAA;IACd,GAAG,EAAE,MAAM,CAAA;IACX,SAAS,EAAE,MAAM,CAAA;IACjB,sDAAsD;IACtD,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAChC,mEAAmE;IACnE,GAAG,CAAC,EAAE,MAAM,CAAA;CACb;AAED;;GAEG;AACH,wBAAsB,WAAW,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAqDxF;AAED;;GAEG;AACH,wBAAsB,UAAU,CAAC,KAAK,EAAE,eAAe,GAAG,OAAO,CAAC,MAAM,CAAC,CAgDxE"}
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Minimal AWS SigV4 signing implementation using Web Crypto. Works in
|
|
3
|
+
* Cloudflare Workers, modern Node, Deno, and browsers.
|
|
4
|
+
*
|
|
5
|
+
* Supports two use cases needed by the S3 storage provider:
|
|
6
|
+
* - `signRequest`: attach an `Authorization` header for a direct request
|
|
7
|
+
* - `presignUrl`: produce a time-limited URL via query-string signing
|
|
8
|
+
*
|
|
9
|
+
* Reference:
|
|
10
|
+
* https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_sigv_create-signed-request.html
|
|
11
|
+
*/
|
|
12
|
+
const encoder = new TextEncoder();
|
|
13
|
+
/**
|
|
14
|
+
* Sign a request and return the `Authorization` (and related) headers.
|
|
15
|
+
*/
|
|
16
|
+
export async function signRequest(input) {
|
|
17
|
+
const { amzDate, dateStamp } = datesFromNow(input.now);
|
|
18
|
+
const url = new URL(input.url);
|
|
19
|
+
const bodyBytes = input.body ?? new Uint8Array();
|
|
20
|
+
const payloadHash = await hexHash(bodyBytes);
|
|
21
|
+
const baseHeaders = {
|
|
22
|
+
...(input.headers ?? {}),
|
|
23
|
+
host: url.host,
|
|
24
|
+
"x-amz-date": amzDate,
|
|
25
|
+
"x-amz-content-sha256": payloadHash,
|
|
26
|
+
};
|
|
27
|
+
if (input.credentials.sessionToken) {
|
|
28
|
+
baseHeaders["x-amz-security-token"] = input.credentials.sessionToken;
|
|
29
|
+
}
|
|
30
|
+
const canonicalQuery = canonicalQueryString(url);
|
|
31
|
+
const { canonicalHeaders, signedHeaders } = canonicalizeHeaders(baseHeaders);
|
|
32
|
+
const canonicalRequest = [
|
|
33
|
+
input.method.toUpperCase(),
|
|
34
|
+
canonicalUri(url.pathname),
|
|
35
|
+
canonicalQuery,
|
|
36
|
+
canonicalHeaders,
|
|
37
|
+
signedHeaders,
|
|
38
|
+
payloadHash,
|
|
39
|
+
].join("\n");
|
|
40
|
+
const scope = `${dateStamp}/${input.region}/${input.service}/aws4_request`;
|
|
41
|
+
const stringToSign = [
|
|
42
|
+
"AWS4-HMAC-SHA256",
|
|
43
|
+
amzDate,
|
|
44
|
+
scope,
|
|
45
|
+
await hexHash(encoder.encode(canonicalRequest)),
|
|
46
|
+
].join("\n");
|
|
47
|
+
const signingKey = await deriveSigningKey(input.credentials.secretAccessKey, dateStamp, input.region, input.service);
|
|
48
|
+
const signature = hex(await hmac(signingKey, stringToSign));
|
|
49
|
+
const authHeader = `AWS4-HMAC-SHA256 Credential=${input.credentials.accessKeyId}/${scope}` +
|
|
50
|
+
`, SignedHeaders=${signedHeaders}, Signature=${signature}`;
|
|
51
|
+
return {
|
|
52
|
+
headers: {
|
|
53
|
+
...baseHeaders,
|
|
54
|
+
Authorization: authHeader,
|
|
55
|
+
},
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Produce a presigned URL (query-string signing) for the given method.
|
|
60
|
+
*/
|
|
61
|
+
export async function presignUrl(input) {
|
|
62
|
+
const { amzDate, dateStamp } = datesFromNow(input.now);
|
|
63
|
+
const url = new URL(input.url);
|
|
64
|
+
const scope = `${dateStamp}/${input.region}/${input.service}/aws4_request`;
|
|
65
|
+
const headers = {
|
|
66
|
+
...(input.headers ?? {}),
|
|
67
|
+
host: url.host,
|
|
68
|
+
};
|
|
69
|
+
const { canonicalHeaders, signedHeaders } = canonicalizeHeaders(headers);
|
|
70
|
+
const params = new URLSearchParams(url.searchParams);
|
|
71
|
+
params.set("X-Amz-Algorithm", "AWS4-HMAC-SHA256");
|
|
72
|
+
params.set("X-Amz-Credential", `${input.credentials.accessKeyId}/${scope}`);
|
|
73
|
+
params.set("X-Amz-Date", amzDate);
|
|
74
|
+
params.set("X-Amz-Expires", String(input.expiresIn));
|
|
75
|
+
params.set("X-Amz-SignedHeaders", signedHeaders);
|
|
76
|
+
if (input.credentials.sessionToken) {
|
|
77
|
+
params.set("X-Amz-Security-Token", input.credentials.sessionToken);
|
|
78
|
+
}
|
|
79
|
+
url.search = params.toString();
|
|
80
|
+
const canonicalRequest = [
|
|
81
|
+
input.method.toUpperCase(),
|
|
82
|
+
canonicalUri(url.pathname),
|
|
83
|
+
canonicalQueryString(url),
|
|
84
|
+
canonicalHeaders,
|
|
85
|
+
signedHeaders,
|
|
86
|
+
"UNSIGNED-PAYLOAD",
|
|
87
|
+
].join("\n");
|
|
88
|
+
const stringToSign = [
|
|
89
|
+
"AWS4-HMAC-SHA256",
|
|
90
|
+
amzDate,
|
|
91
|
+
scope,
|
|
92
|
+
await hexHash(encoder.encode(canonicalRequest)),
|
|
93
|
+
].join("\n");
|
|
94
|
+
const signingKey = await deriveSigningKey(input.credentials.secretAccessKey, dateStamp, input.region, input.service);
|
|
95
|
+
const signature = hex(await hmac(signingKey, stringToSign));
|
|
96
|
+
params.set("X-Amz-Signature", signature);
|
|
97
|
+
url.search = params.toString();
|
|
98
|
+
return url.toString();
|
|
99
|
+
}
|
|
100
|
+
// --- helpers --- //
|
|
101
|
+
function datesFromNow(nowMs) {
|
|
102
|
+
const d = new Date(nowMs ?? Date.now());
|
|
103
|
+
const iso = d.toISOString().replace(/[:-]|\.\d{3}/g, "");
|
|
104
|
+
// iso is like "20251005T223045Z"
|
|
105
|
+
return { amzDate: iso, dateStamp: iso.slice(0, 8) };
|
|
106
|
+
}
|
|
107
|
+
function canonicalUri(path) {
|
|
108
|
+
// S3 keys should be path-encoded but preserve "/"
|
|
109
|
+
if (!path)
|
|
110
|
+
return "/";
|
|
111
|
+
return path
|
|
112
|
+
.split("/")
|
|
113
|
+
.map((segment) => encodeRfc3986(segment))
|
|
114
|
+
.join("/");
|
|
115
|
+
}
|
|
116
|
+
function canonicalQueryString(url) {
|
|
117
|
+
const pairs = [];
|
|
118
|
+
for (const [key, value] of url.searchParams.entries()) {
|
|
119
|
+
pairs.push([encodeRfc3986(key), encodeRfc3986(value)]);
|
|
120
|
+
}
|
|
121
|
+
pairs.sort((a, b) => (a[0] === b[0] ? (a[1] < b[1] ? -1 : 1) : a[0] < b[0] ? -1 : 1));
|
|
122
|
+
return pairs.map(([k, v]) => `${k}=${v}`).join("&");
|
|
123
|
+
}
|
|
124
|
+
function canonicalizeHeaders(headers) {
|
|
125
|
+
const entries = Object.entries(headers)
|
|
126
|
+
.map(([k, v]) => [k.toLowerCase(), v.trim().replace(/\s+/g, " ")])
|
|
127
|
+
.sort(([a], [b]) => (a < b ? -1 : a > b ? 1 : 0));
|
|
128
|
+
const canonicalHeaders = `${entries.map(([k, v]) => `${k}:${v}`).join("\n")}\n`;
|
|
129
|
+
const signedHeaders = entries.map(([k]) => k).join(";");
|
|
130
|
+
return { canonicalHeaders, signedHeaders };
|
|
131
|
+
}
|
|
132
|
+
function encodeRfc3986(value) {
|
|
133
|
+
return encodeURIComponent(value).replace(/[!'()*]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`);
|
|
134
|
+
}
|
|
135
|
+
async function hexHash(data) {
|
|
136
|
+
const copy = new Uint8Array(data.byteLength);
|
|
137
|
+
copy.set(data);
|
|
138
|
+
const digest = await crypto.subtle.digest("SHA-256", copy);
|
|
139
|
+
return hex(digest);
|
|
140
|
+
}
|
|
141
|
+
async function hmac(key, data) {
|
|
142
|
+
const source = key instanceof Uint8Array ? key : new Uint8Array(key);
|
|
143
|
+
const keyBytes = new Uint8Array(source.byteLength);
|
|
144
|
+
keyBytes.set(source);
|
|
145
|
+
const cryptoKey = await crypto.subtle.importKey("raw", keyBytes, { name: "HMAC", hash: "SHA-256" }, false, ["sign"]);
|
|
146
|
+
const payloadSource = encoder.encode(data);
|
|
147
|
+
const payload = new Uint8Array(payloadSource.byteLength);
|
|
148
|
+
payload.set(payloadSource);
|
|
149
|
+
return crypto.subtle.sign("HMAC", cryptoKey, payload);
|
|
150
|
+
}
|
|
151
|
+
async function deriveSigningKey(secret, dateStamp, region, service) {
|
|
152
|
+
const kDate = await hmac(encoder.encode(`AWS4${secret}`), dateStamp);
|
|
153
|
+
const kRegion = await hmac(kDate, region);
|
|
154
|
+
const kService = await hmac(kRegion, service);
|
|
155
|
+
const kSigning = await hmac(kService, "aws4_request");
|
|
156
|
+
return kSigning;
|
|
157
|
+
}
|
|
158
|
+
function hex(buffer) {
|
|
159
|
+
const bytes = new Uint8Array(buffer);
|
|
160
|
+
let out = "";
|
|
161
|
+
for (const b of bytes) {
|
|
162
|
+
out += b.toString(16).padStart(2, "0");
|
|
163
|
+
}
|
|
164
|
+
return out;
|
|
165
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { StorageProvider } from "../types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Options for {@link createLocalStorageProvider}.
|
|
4
|
+
*/
|
|
5
|
+
export interface LocalStorageOptions {
|
|
6
|
+
/** Provider name (defaults to `"local"`). */
|
|
7
|
+
name?: string;
|
|
8
|
+
/**
|
|
9
|
+
* Base URL used to construct the string returned from `signedUrl` and
|
|
10
|
+
* `upload`. Defaults to `"local://"`. The final URL is `${baseUrl}${key}`.
|
|
11
|
+
*/
|
|
12
|
+
baseUrl?: string;
|
|
13
|
+
/**
|
|
14
|
+
* Function used to mint random keys when `UploadOptions.key` is not
|
|
15
|
+
* provided. Defaults to `crypto.randomUUID()` via the global `crypto`.
|
|
16
|
+
*/
|
|
17
|
+
generateKey?: () => string;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Create an in-memory storage provider. Useful for unit tests and for
|
|
21
|
+
* locally running workflows without touching remote storage. Data is
|
|
22
|
+
* kept in a `Map` held inside the closure and is lost when the process
|
|
23
|
+
* exits.
|
|
24
|
+
*/
|
|
25
|
+
export declare function createLocalStorageProvider(options?: LocalStorageOptions): StorageProvider;
|
|
26
|
+
//# sourceMappingURL=local.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"local.d.ts","sourceRoot":"","sources":["../../src/providers/local.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAiB,eAAe,EAAoC,MAAM,aAAa,CAAA;AAEnG;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,6CAA6C;IAC7C,IAAI,CAAC,EAAE,MAAM,CAAA;IACb;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,MAAM,CAAA;CAC3B;AAQD;;;;;GAKG;AACH,wBAAgB,0BAA0B,CAAC,OAAO,GAAE,mBAAwB,GAAG,eAAe,CAwC7F"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Create an in-memory storage provider. Useful for unit tests and for
|
|
3
|
+
* locally running workflows without touching remote storage. Data is
|
|
4
|
+
* kept in a `Map` held inside the closure and is lost when the process
|
|
5
|
+
* exits.
|
|
6
|
+
*/
|
|
7
|
+
export function createLocalStorageProvider(options = {}) {
|
|
8
|
+
const name = options.name ?? "local";
|
|
9
|
+
const baseUrl = options.baseUrl ?? "local://";
|
|
10
|
+
const generateKey = options.generateKey ??
|
|
11
|
+
(() => {
|
|
12
|
+
const g = globalThis;
|
|
13
|
+
return g.crypto?.randomUUID?.() ?? `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
|
14
|
+
});
|
|
15
|
+
const store = new Map();
|
|
16
|
+
async function upload(body, opts = {}) {
|
|
17
|
+
const key = opts.key ?? generateKey();
|
|
18
|
+
const bytes = await toBytes(body);
|
|
19
|
+
const record = { bytes };
|
|
20
|
+
if (opts.contentType !== undefined)
|
|
21
|
+
record.contentType = opts.contentType;
|
|
22
|
+
if (opts.metadata !== undefined)
|
|
23
|
+
record.metadata = opts.metadata;
|
|
24
|
+
store.set(key, record);
|
|
25
|
+
return { key, url: `${baseUrl}${key}` };
|
|
26
|
+
}
|
|
27
|
+
return {
|
|
28
|
+
name,
|
|
29
|
+
upload,
|
|
30
|
+
async delete(key) {
|
|
31
|
+
store.delete(key);
|
|
32
|
+
},
|
|
33
|
+
async signedUrl(key) {
|
|
34
|
+
return `${baseUrl}${key}`;
|
|
35
|
+
},
|
|
36
|
+
async get(key) {
|
|
37
|
+
const record = store.get(key);
|
|
38
|
+
if (!record)
|
|
39
|
+
return null;
|
|
40
|
+
// Copy into a fresh ArrayBuffer so downstream mutation can't corrupt the store.
|
|
41
|
+
const copy = new Uint8Array(record.bytes.byteLength);
|
|
42
|
+
copy.set(record.bytes);
|
|
43
|
+
return copy.buffer;
|
|
44
|
+
},
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
async function toBytes(body) {
|
|
48
|
+
if (body instanceof Uint8Array)
|
|
49
|
+
return body;
|
|
50
|
+
if (body instanceof ArrayBuffer)
|
|
51
|
+
return new Uint8Array(body);
|
|
52
|
+
const buffer = await body.arrayBuffer();
|
|
53
|
+
return new Uint8Array(buffer);
|
|
54
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import type { StorageProvider } from "../types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Subset of the Cloudflare Workers `R2Bucket` binding we depend on. Kept
|
|
4
|
+
* as a minimal structural type so this package does not need a runtime
|
|
5
|
+
* dependency on `@cloudflare/workers-types`.
|
|
6
|
+
*/
|
|
7
|
+
export interface R2BucketLike {
|
|
8
|
+
put(key: string, value: ArrayBuffer | ArrayBufferView | Blob | string | ReadableStream | null, options?: R2PutOptionsLike): Promise<unknown>;
|
|
9
|
+
delete(key: string | string[]): Promise<void>;
|
|
10
|
+
get(key: string): Promise<R2ObjectLike | null>;
|
|
11
|
+
}
|
|
12
|
+
export interface R2PutOptionsLike {
|
|
13
|
+
httpMetadata?: {
|
|
14
|
+
contentType?: string;
|
|
15
|
+
};
|
|
16
|
+
customMetadata?: Record<string, string>;
|
|
17
|
+
}
|
|
18
|
+
export interface R2ObjectLike {
|
|
19
|
+
arrayBuffer(): Promise<ArrayBuffer>;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Options for {@link createR2Provider}.
|
|
23
|
+
*/
|
|
24
|
+
export interface R2ProviderOptions {
|
|
25
|
+
/** Cloudflare R2 bucket binding (from `env.BUCKET_NAME`). */
|
|
26
|
+
bucket: R2BucketLike;
|
|
27
|
+
/**
|
|
28
|
+
* Base URL used to construct public object URLs. Typical values:
|
|
29
|
+
* - a public R2 custom domain: `https://cdn.example.com/`
|
|
30
|
+
* - a Worker route that proxies to the binding: `https://api.example.com/assets/`
|
|
31
|
+
*/
|
|
32
|
+
publicBaseUrl?: string;
|
|
33
|
+
/**
|
|
34
|
+
* Signer invoked by `signedUrl`. Cloudflare R2 bindings do not produce
|
|
35
|
+
* signed URLs directly; templates pass a custom signer that either:
|
|
36
|
+
* - returns a short-lived Worker route URL, or
|
|
37
|
+
* - calls R2's S3-compatible API with SigV4 credentials.
|
|
38
|
+
* When omitted, `signedUrl` returns `${publicBaseUrl}${key}`.
|
|
39
|
+
*/
|
|
40
|
+
signer?: (key: string, expiresIn: number) => Promise<string> | string;
|
|
41
|
+
/** Provider name (defaults to `"r2"`). */
|
|
42
|
+
name?: string;
|
|
43
|
+
/** Custom key generator; defaults to `crypto.randomUUID()`. */
|
|
44
|
+
generateKey?: () => string;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Create a Cloudflare R2 storage provider bound to an R2 bucket binding.
|
|
48
|
+
* The R2 binding handles authentication transparently at the Worker
|
|
49
|
+
* runtime boundary, so no credentials are required at this layer.
|
|
50
|
+
*/
|
|
51
|
+
export declare function createR2Provider(options: R2ProviderOptions): StorageProvider;
|
|
52
|
+
//# sourceMappingURL=r2.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"r2.d.ts","sourceRoot":"","sources":["../../src/providers/r2.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAiB,eAAe,EAAoC,MAAM,aAAa,CAAA;AAEnG;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CACD,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,WAAW,GAAG,eAAe,GAAG,IAAI,GAAG,MAAM,GAAG,cAAc,GAAG,IAAI,EAC5E,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,OAAO,CAAC,CAAA;IACnB,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC7C,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC,CAAA;CAC/C;AAED,MAAM,WAAW,gBAAgB;IAC/B,YAAY,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,MAAM,CAAA;KAAE,CAAA;IACvC,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CACxC;AAED,MAAM,WAAW,YAAY;IAC3B,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,CAAA;CACpC;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,6DAA6D;IAC7D,MAAM,EAAE,YAAY,CAAA;IACpB;;;;OAIG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB;;;;;;OAMG;IACH,MAAM,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,GAAG,MAAM,CAAA;IACrE,0CAA0C;IAC1C,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,+DAA+D;IAC/D,WAAW,CAAC,EAAE,MAAM,MAAM,CAAA;CAC3B;AAED;;;;GAIG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,iBAAiB,GAAG,eAAe,CAuC5E"}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Create a Cloudflare R2 storage provider bound to an R2 bucket binding.
|
|
3
|
+
* The R2 binding handles authentication transparently at the Worker
|
|
4
|
+
* runtime boundary, so no credentials are required at this layer.
|
|
5
|
+
*/
|
|
6
|
+
export function createR2Provider(options) {
|
|
7
|
+
const name = options.name ?? "r2";
|
|
8
|
+
const publicBaseUrl = options.publicBaseUrl ?? "";
|
|
9
|
+
const generateKey = options.generateKey ??
|
|
10
|
+
(() => {
|
|
11
|
+
const g = globalThis;
|
|
12
|
+
return g.crypto?.randomUUID?.() ?? `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
|
13
|
+
});
|
|
14
|
+
async function upload(body, opts = {}) {
|
|
15
|
+
const key = opts.key ?? generateKey();
|
|
16
|
+
const putOptions = {};
|
|
17
|
+
if (opts.contentType !== undefined) {
|
|
18
|
+
putOptions.httpMetadata = { contentType: opts.contentType };
|
|
19
|
+
}
|
|
20
|
+
if (opts.metadata !== undefined) {
|
|
21
|
+
putOptions.customMetadata = opts.metadata;
|
|
22
|
+
}
|
|
23
|
+
await options.bucket.put(key, await toPutBody(body), putOptions);
|
|
24
|
+
return { key, url: publicBaseUrl ? `${publicBaseUrl}${key}` : "" };
|
|
25
|
+
}
|
|
26
|
+
return {
|
|
27
|
+
name,
|
|
28
|
+
upload,
|
|
29
|
+
async delete(key) {
|
|
30
|
+
await options.bucket.delete(key);
|
|
31
|
+
},
|
|
32
|
+
async signedUrl(key, expiresIn) {
|
|
33
|
+
if (options.signer)
|
|
34
|
+
return options.signer(key, expiresIn);
|
|
35
|
+
return publicBaseUrl ? `${publicBaseUrl}${key}` : key;
|
|
36
|
+
},
|
|
37
|
+
async get(key) {
|
|
38
|
+
const obj = await options.bucket.get(key);
|
|
39
|
+
if (!obj)
|
|
40
|
+
return null;
|
|
41
|
+
return obj.arrayBuffer();
|
|
42
|
+
},
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
async function toPutBody(body) {
|
|
46
|
+
if (body instanceof Uint8Array)
|
|
47
|
+
return body;
|
|
48
|
+
if (body instanceof ArrayBuffer)
|
|
49
|
+
return body;
|
|
50
|
+
return body;
|
|
51
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import type { StorageProvider } from "../types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Fetch shape used by the S3 provider. Matches the global `fetch` and
|
|
4
|
+
* Cloudflare Workers `fetch`. Tests can stub this.
|
|
5
|
+
*/
|
|
6
|
+
export type S3Fetch = (input: string, init: {
|
|
7
|
+
method: string;
|
|
8
|
+
headers: Record<string, string>;
|
|
9
|
+
body?: Uint8Array;
|
|
10
|
+
}) => Promise<{
|
|
11
|
+
ok: boolean;
|
|
12
|
+
status: number;
|
|
13
|
+
arrayBuffer: () => Promise<ArrayBuffer>;
|
|
14
|
+
text: () => Promise<string>;
|
|
15
|
+
}>;
|
|
16
|
+
/**
|
|
17
|
+
* Options for {@link createS3Provider}.
|
|
18
|
+
*/
|
|
19
|
+
export interface S3ProviderOptions {
|
|
20
|
+
/** AWS access key id. */
|
|
21
|
+
accessKeyId: string;
|
|
22
|
+
/** AWS secret access key. */
|
|
23
|
+
secretAccessKey: string;
|
|
24
|
+
/** Optional session token for temporary credentials. */
|
|
25
|
+
sessionToken?: string;
|
|
26
|
+
/** S3 region (e.g. `"us-east-1"`). */
|
|
27
|
+
region: string;
|
|
28
|
+
/** S3 bucket name. */
|
|
29
|
+
bucket: string;
|
|
30
|
+
/**
|
|
31
|
+
* Endpoint URL override. Defaults to the public AWS S3 endpoint for the
|
|
32
|
+
* region (`https://s3.<region>.amazonaws.com`). Set this for S3-compatible
|
|
33
|
+
* services (MinIO, Backblaze B2, DigitalOcean Spaces, Wasabi, R2 S3 API).
|
|
34
|
+
*/
|
|
35
|
+
endpoint?: string;
|
|
36
|
+
/**
|
|
37
|
+
* When `true`, put the bucket in the URL path rather than the hostname
|
|
38
|
+
* subdomain. Defaults to `true` to stay compatible with the widest set
|
|
39
|
+
* of S3-compatible services. Set to `false` to use
|
|
40
|
+
* `https://<bucket>.s3.<region>.amazonaws.com` virtual-hosted style.
|
|
41
|
+
*/
|
|
42
|
+
forcePathStyle?: boolean;
|
|
43
|
+
/** Base URL used for the public `url` field returned from `upload`. */
|
|
44
|
+
publicBaseUrl?: string;
|
|
45
|
+
/** Override `fetch` (e.g. in tests). Defaults to global `fetch`. */
|
|
46
|
+
fetch?: S3Fetch;
|
|
47
|
+
/** Provider name (defaults to `"s3"`). */
|
|
48
|
+
name?: string;
|
|
49
|
+
/** Custom key generator; defaults to `crypto.randomUUID()`. */
|
|
50
|
+
generateKey?: () => string;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Create an S3 / S3-compatible storage provider. Uses Web Crypto to sign
|
|
54
|
+
* requests with AWS SigV4, so no AWS SDK dependency is required.
|
|
55
|
+
*/
|
|
56
|
+
export declare function createS3Provider(options: S3ProviderOptions): StorageProvider;
|
|
57
|
+
//# sourceMappingURL=s3.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/providers/s3.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAiB,eAAe,EAAoC,MAAM,aAAa,CAAA;AAEnG;;;GAGG;AACH,MAAM,MAAM,OAAO,GAAG,CACpB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE;IACJ,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC/B,IAAI,CAAC,EAAE,UAAU,CAAA;CAClB,KACE,OAAO,CAAC;IACX,EAAE,EAAE,OAAO,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;IACd,WAAW,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAA;IACvC,IAAI,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAA;CAC5B,CAAC,CAAA;AAEF;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,yBAAyB;IACzB,WAAW,EAAE,MAAM,CAAA;IACnB,6BAA6B;IAC7B,eAAe,EAAE,MAAM,CAAA;IACvB,wDAAwD;IACxD,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,sCAAsC;IACtC,MAAM,EAAE,MAAM,CAAA;IACd,sBAAsB;IACtB,MAAM,EAAE,MAAM,CAAA;IACd;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB;;;;;OAKG;IACH,cAAc,CAAC,EAAE,OAAO,CAAA;IACxB,uEAAuE;IACvE,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,oEAAoE;IACpE,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,0CAA0C;IAC1C,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,+DAA+D;IAC/D,WAAW,CAAC,EAAE,MAAM,MAAM,CAAA;CAC3B;AAED;;;GAGG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,iBAAiB,GAAG,eAAe,CAsH5E"}
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { presignUrl, signRequest } from "../lib/sigv4.js";
|
|
2
|
+
/**
|
|
3
|
+
* Create an S3 / S3-compatible storage provider. Uses Web Crypto to sign
|
|
4
|
+
* requests with AWS SigV4, so no AWS SDK dependency is required.
|
|
5
|
+
*/
|
|
6
|
+
export function createS3Provider(options) {
|
|
7
|
+
const name = options.name ?? "s3";
|
|
8
|
+
const forcePathStyle = options.forcePathStyle ?? true;
|
|
9
|
+
const endpoint = options.endpoint ??
|
|
10
|
+
(forcePathStyle
|
|
11
|
+
? `https://s3.${options.region}.amazonaws.com`
|
|
12
|
+
: `https://${options.bucket}.s3.${options.region}.amazonaws.com`);
|
|
13
|
+
const publicBaseUrl = options.publicBaseUrl ?? "";
|
|
14
|
+
const fetchImpl = options.fetch ?? globalThis.fetch;
|
|
15
|
+
const credentials = {
|
|
16
|
+
accessKeyId: options.accessKeyId,
|
|
17
|
+
secretAccessKey: options.secretAccessKey,
|
|
18
|
+
};
|
|
19
|
+
if (options.sessionToken !== undefined)
|
|
20
|
+
credentials.sessionToken = options.sessionToken;
|
|
21
|
+
const generateKey = options.generateKey ??
|
|
22
|
+
(() => {
|
|
23
|
+
const g = globalThis;
|
|
24
|
+
return g.crypto?.randomUUID?.() ?? `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
|
25
|
+
});
|
|
26
|
+
function buildUrl(key) {
|
|
27
|
+
if (forcePathStyle) {
|
|
28
|
+
return `${endpoint}/${encodeURIComponent(options.bucket)}/${encodeKey(key)}`;
|
|
29
|
+
}
|
|
30
|
+
return `${endpoint}/${encodeKey(key)}`;
|
|
31
|
+
}
|
|
32
|
+
async function upload(body, opts = {}) {
|
|
33
|
+
if (!fetchImpl)
|
|
34
|
+
throw new Error("S3 provider requires a fetch implementation");
|
|
35
|
+
const key = opts.key ?? generateKey();
|
|
36
|
+
const bytes = await toBytes(body);
|
|
37
|
+
const url = buildUrl(key);
|
|
38
|
+
const headers = {};
|
|
39
|
+
if (opts.contentType)
|
|
40
|
+
headers["content-type"] = opts.contentType;
|
|
41
|
+
if (opts.metadata) {
|
|
42
|
+
for (const [k, v] of Object.entries(opts.metadata)) {
|
|
43
|
+
headers[`x-amz-meta-${k.toLowerCase()}`] = v;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
const signed = await signRequest({
|
|
47
|
+
method: "PUT",
|
|
48
|
+
url,
|
|
49
|
+
headers,
|
|
50
|
+
body: bytes,
|
|
51
|
+
credentials,
|
|
52
|
+
region: options.region,
|
|
53
|
+
service: "s3",
|
|
54
|
+
});
|
|
55
|
+
const response = await fetchImpl(url, {
|
|
56
|
+
method: "PUT",
|
|
57
|
+
headers: signed.headers,
|
|
58
|
+
body: bytes,
|
|
59
|
+
});
|
|
60
|
+
if (!response.ok) {
|
|
61
|
+
const text = await response.text().catch(() => "");
|
|
62
|
+
throw new Error(`S3 upload failed (${response.status}): ${text}`);
|
|
63
|
+
}
|
|
64
|
+
return { key, url: publicBaseUrl ? `${publicBaseUrl}${key}` : "" };
|
|
65
|
+
}
|
|
66
|
+
return {
|
|
67
|
+
name,
|
|
68
|
+
upload,
|
|
69
|
+
async delete(key) {
|
|
70
|
+
if (!fetchImpl)
|
|
71
|
+
throw new Error("S3 provider requires a fetch implementation");
|
|
72
|
+
const url = buildUrl(key);
|
|
73
|
+
const signed = await signRequest({
|
|
74
|
+
method: "DELETE",
|
|
75
|
+
url,
|
|
76
|
+
credentials,
|
|
77
|
+
region: options.region,
|
|
78
|
+
service: "s3",
|
|
79
|
+
});
|
|
80
|
+
const response = await fetchImpl(url, {
|
|
81
|
+
method: "DELETE",
|
|
82
|
+
headers: signed.headers,
|
|
83
|
+
});
|
|
84
|
+
// S3 returns 204 on successful delete, 404 on missing — treat both as success.
|
|
85
|
+
if (!response.ok && response.status !== 404) {
|
|
86
|
+
const text = await response.text().catch(() => "");
|
|
87
|
+
throw new Error(`S3 delete failed (${response.status}): ${text}`);
|
|
88
|
+
}
|
|
89
|
+
},
|
|
90
|
+
async signedUrl(key, expiresIn) {
|
|
91
|
+
return presignUrl({
|
|
92
|
+
method: "GET",
|
|
93
|
+
url: buildUrl(key),
|
|
94
|
+
expiresIn,
|
|
95
|
+
credentials,
|
|
96
|
+
region: options.region,
|
|
97
|
+
service: "s3",
|
|
98
|
+
});
|
|
99
|
+
},
|
|
100
|
+
async get(key) {
|
|
101
|
+
if (!fetchImpl)
|
|
102
|
+
throw new Error("S3 provider requires a fetch implementation");
|
|
103
|
+
const url = buildUrl(key);
|
|
104
|
+
const signed = await signRequest({
|
|
105
|
+
method: "GET",
|
|
106
|
+
url,
|
|
107
|
+
credentials,
|
|
108
|
+
region: options.region,
|
|
109
|
+
service: "s3",
|
|
110
|
+
});
|
|
111
|
+
const response = await fetchImpl(url, {
|
|
112
|
+
method: "GET",
|
|
113
|
+
headers: signed.headers,
|
|
114
|
+
});
|
|
115
|
+
if (response.status === 404)
|
|
116
|
+
return null;
|
|
117
|
+
if (!response.ok) {
|
|
118
|
+
const text = await response.text().catch(() => "");
|
|
119
|
+
throw new Error(`S3 get failed (${response.status}): ${text}`);
|
|
120
|
+
}
|
|
121
|
+
return response.arrayBuffer();
|
|
122
|
+
},
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
function encodeKey(key) {
|
|
126
|
+
return key
|
|
127
|
+
.split("/")
|
|
128
|
+
.map((segment) => encodeURIComponent(segment).replace(/[!'()*]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`))
|
|
129
|
+
.join("/");
|
|
130
|
+
}
|
|
131
|
+
async function toBytes(body) {
|
|
132
|
+
if (body instanceof Uint8Array)
|
|
133
|
+
return body;
|
|
134
|
+
if (body instanceof ArrayBuffer)
|
|
135
|
+
return new Uint8Array(body);
|
|
136
|
+
const buffer = await body.arrayBuffer();
|
|
137
|
+
return new Uint8Array(buffer);
|
|
138
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { StorageProvider } from "./types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Thrown when a storage operation cannot find the requested provider.
|
|
4
|
+
*/
|
|
5
|
+
export declare class StorageError extends Error {
|
|
6
|
+
constructor(message: string);
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Convenience wrapper exposing a single provider. Most deployments use
|
|
10
|
+
* exactly one storage backend at a time; for those cases the service is
|
|
11
|
+
* just a named wrapper.
|
|
12
|
+
*/
|
|
13
|
+
export interface StorageService extends StorageProvider {
|
|
14
|
+
/** The wrapped provider. */
|
|
15
|
+
readonly provider: StorageProvider;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Create a storage service that delegates all calls to the given provider.
|
|
19
|
+
*/
|
|
20
|
+
export declare function createStorageService(provider: StorageProvider): StorageService;
|
|
21
|
+
//# sourceMappingURL=service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"service.d.ts","sourceRoot":"","sources":["../src/service.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,YAAY,CAAA;AAEjD;;GAEG;AACH,qBAAa,YAAa,SAAQ,KAAK;gBACzB,OAAO,EAAE,MAAM;CAI5B;AAED;;;;GAIG;AACH,MAAM,WAAW,cAAe,SAAQ,eAAe;IACrD,4BAA4B;IAC5B,QAAQ,CAAC,QAAQ,EAAE,eAAe,CAAA;CACnC;AAED;;GAEG;AACH,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,eAAe,GAAG,cAAc,CAS9E"}
|
package/dist/service.js
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Thrown when a storage operation cannot find the requested provider.
|
|
3
|
+
*/
|
|
4
|
+
export class StorageError extends Error {
|
|
5
|
+
constructor(message) {
|
|
6
|
+
super(message);
|
|
7
|
+
this.name = "StorageError";
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Create a storage service that delegates all calls to the given provider.
|
|
12
|
+
*/
|
|
13
|
+
export function createStorageService(provider) {
|
|
14
|
+
return {
|
|
15
|
+
provider,
|
|
16
|
+
name: provider.name,
|
|
17
|
+
upload: provider.upload.bind(provider),
|
|
18
|
+
delete: provider.delete.bind(provider),
|
|
19
|
+
signedUrl: provider.signedUrl.bind(provider),
|
|
20
|
+
get: provider.get.bind(provider),
|
|
21
|
+
};
|
|
22
|
+
}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Accepted body shapes for uploads. Providers normalize to `Uint8Array`
|
|
3
|
+
* or pass through to their native API.
|
|
4
|
+
*/
|
|
5
|
+
export type StorageUploadBody = ArrayBuffer | Uint8Array | Blob;
|
|
6
|
+
/**
|
|
7
|
+
* Options controlling an upload.
|
|
8
|
+
*/
|
|
9
|
+
export interface UploadOptions {
|
|
10
|
+
/**
|
|
11
|
+
* Override the object key. When omitted, providers generate a random
|
|
12
|
+
* key (typically `${randomUUID()}` — UUID v4 where available).
|
|
13
|
+
*/
|
|
14
|
+
key?: string;
|
|
15
|
+
/** MIME content type (e.g. `"image/png"`). */
|
|
16
|
+
contentType?: string;
|
|
17
|
+
/** Custom metadata; persisted by providers that support it (R2, S3). */
|
|
18
|
+
metadata?: Record<string, string>;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Result of a successful upload.
|
|
22
|
+
*/
|
|
23
|
+
export interface StorageObject {
|
|
24
|
+
/** Object key inside the bucket/store. */
|
|
25
|
+
key: string;
|
|
26
|
+
/**
|
|
27
|
+
* Public URL for the object when the provider exposes one. Empty string
|
|
28
|
+
* when the object is private and can only be accessed via `signedUrl`.
|
|
29
|
+
*/
|
|
30
|
+
url: string;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Pluggable object storage provider.
|
|
34
|
+
*
|
|
35
|
+
* Built-in implementations:
|
|
36
|
+
* - `local` — in-memory, for dev and tests
|
|
37
|
+
* - `r2` — Cloudflare R2 via the workers binding
|
|
38
|
+
* - `s3` — Amazon S3 / S3-compatible via SigV4
|
|
39
|
+
*/
|
|
40
|
+
export interface StorageProvider {
|
|
41
|
+
/** Unique provider name (e.g. `"r2"`, `"s3"`, `"local"`). */
|
|
42
|
+
readonly name: string;
|
|
43
|
+
/** Upload an object. */
|
|
44
|
+
upload(body: StorageUploadBody, options?: UploadOptions): Promise<StorageObject>;
|
|
45
|
+
/** Delete an object by key. No-op if the key does not exist. */
|
|
46
|
+
delete(key: string): Promise<void>;
|
|
47
|
+
/**
|
|
48
|
+
* Produce a time-limited URL that grants GET access to the object.
|
|
49
|
+
* `expiresIn` is in seconds.
|
|
50
|
+
*/
|
|
51
|
+
signedUrl(key: string, expiresIn: number): Promise<string>;
|
|
52
|
+
/**
|
|
53
|
+
* Fetch an object's bytes. Returns `null` when the object is absent.
|
|
54
|
+
*/
|
|
55
|
+
get(key: string): Promise<ArrayBuffer | null>;
|
|
56
|
+
}
|
|
57
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,MAAM,iBAAiB,GAAG,WAAW,GAAG,UAAU,GAAG,IAAI,CAAA;AAE/D;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,8CAA8C;IAC9C,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,wEAAwE;IACxE,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAClC;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,0CAA0C;IAC1C,GAAG,EAAE,MAAM,CAAA;IACX;;;OAGG;IACH,GAAG,EAAE,MAAM,CAAA;CACZ;AAED;;;;;;;GAOG;AACH,MAAM,WAAW,eAAe;IAC9B,6DAA6D;IAC7D,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAA;IACrB,wBAAwB;IACxB,MAAM,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,EAAE,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC,CAAA;IAChF,gEAAgE;IAChE,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAClC;;;OAGG;IACH,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IAC1D;;OAEG;IACH,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC,CAAA;CAC9C"}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/package.json
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@voyantjs/storage",
|
|
3
|
+
"version": "0.19.0",
|
|
4
|
+
"license": "Apache-2.0",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"exports": {
|
|
7
|
+
".": "./src/index.ts",
|
|
8
|
+
"./types": "./src/types.ts",
|
|
9
|
+
"./service": "./src/service.ts",
|
|
10
|
+
"./providers/local": "./src/providers/local.ts",
|
|
11
|
+
"./providers/r2": "./src/providers/r2.ts",
|
|
12
|
+
"./providers/s3": "./src/providers/s3.ts",
|
|
13
|
+
"./lib/sigv4": "./src/lib/sigv4.ts"
|
|
14
|
+
},
|
|
15
|
+
"scripts": {
|
|
16
|
+
"typecheck": "tsc --noEmit",
|
|
17
|
+
"lint": "biome check src/",
|
|
18
|
+
"test": "vitest run",
|
|
19
|
+
"build": "tsc -p tsconfig.json",
|
|
20
|
+
"clean": "rm -rf dist",
|
|
21
|
+
"prepack": "pnpm run build"
|
|
22
|
+
},
|
|
23
|
+
"files": [
|
|
24
|
+
"dist"
|
|
25
|
+
],
|
|
26
|
+
"publishConfig": {
|
|
27
|
+
"access": "public",
|
|
28
|
+
"exports": {
|
|
29
|
+
".": {
|
|
30
|
+
"types": "./dist/index.d.ts",
|
|
31
|
+
"import": "./dist/index.js",
|
|
32
|
+
"default": "./dist/index.js"
|
|
33
|
+
},
|
|
34
|
+
"./types": {
|
|
35
|
+
"types": "./dist/types.d.ts",
|
|
36
|
+
"import": "./dist/types.js",
|
|
37
|
+
"default": "./dist/types.js"
|
|
38
|
+
},
|
|
39
|
+
"./service": {
|
|
40
|
+
"types": "./dist/service.d.ts",
|
|
41
|
+
"import": "./dist/service.js",
|
|
42
|
+
"default": "./dist/service.js"
|
|
43
|
+
},
|
|
44
|
+
"./providers/local": {
|
|
45
|
+
"types": "./dist/providers/local.d.ts",
|
|
46
|
+
"import": "./dist/providers/local.js",
|
|
47
|
+
"default": "./dist/providers/local.js"
|
|
48
|
+
},
|
|
49
|
+
"./providers/r2": {
|
|
50
|
+
"types": "./dist/providers/r2.d.ts",
|
|
51
|
+
"import": "./dist/providers/r2.js",
|
|
52
|
+
"default": "./dist/providers/r2.js"
|
|
53
|
+
},
|
|
54
|
+
"./providers/s3": {
|
|
55
|
+
"types": "./dist/providers/s3.d.ts",
|
|
56
|
+
"import": "./dist/providers/s3.js",
|
|
57
|
+
"default": "./dist/providers/s3.js"
|
|
58
|
+
},
|
|
59
|
+
"./lib/sigv4": {
|
|
60
|
+
"types": "./dist/lib/sigv4.d.ts",
|
|
61
|
+
"import": "./dist/lib/sigv4.js",
|
|
62
|
+
"default": "./dist/lib/sigv4.js"
|
|
63
|
+
}
|
|
64
|
+
},
|
|
65
|
+
"main": "./dist/index.js",
|
|
66
|
+
"types": "./dist/index.d.ts"
|
|
67
|
+
},
|
|
68
|
+
"dependencies": {},
|
|
69
|
+
"devDependencies": {
|
|
70
|
+
"@voyantjs/voyant-typescript-config": "workspace:*",
|
|
71
|
+
"typescript": "^6.0.2",
|
|
72
|
+
"vitest": "^4.1.2"
|
|
73
|
+
},
|
|
74
|
+
"repository": {
|
|
75
|
+
"type": "git",
|
|
76
|
+
"url": "https://github.com/voyantjs/voyant.git",
|
|
77
|
+
"directory": "packages/storage"
|
|
78
|
+
}
|
|
79
|
+
}
|