@remix-run/file-storage-s3 0.0.0 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +46 -2
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1 -0
- package/dist/lib/s3.d.ts +47 -0
- package/dist/lib/s3.d.ts.map +1 -0
- package/dist/lib/s3.js +292 -0
- package/package.json +40 -6
- package/src/index.ts +1 -0
- package/src/lib/s3.ts +411 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Shopify Inc.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
CHANGED
|
@@ -1,3 +1,47 @@
|
|
|
1
|
-
#
|
|
1
|
+
# file-storage-s3
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
S3 backend for [`remix/file-storage`](https://github.com/remix-run/remix/tree/main/packages/file-storage).
|
|
4
|
+
Use this package when you want the `FileStorage` API backed by AWS S3 or an S3-compatible provider.
|
|
5
|
+
|
|
6
|
+
## Features
|
|
7
|
+
|
|
8
|
+
- **S3-Compatible API** - Works with AWS S3 and S3-compatible APIs (e.g. MinIO, LocalStack)
|
|
9
|
+
- **Metadata Preservation** - Preserves `File` metadata (`name`, `type`, `lastModified`)
|
|
10
|
+
- **Runtime-Agnostic Signing** - Uses [`aws4fetch`](https://github.com/mhart/aws4fetch) for SigV4 signing
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
```sh
|
|
15
|
+
npm i remix
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Usage
|
|
19
|
+
|
|
20
|
+
```ts
|
|
21
|
+
import { createS3FileStorage } from 'remix/file-storage-s3'
|
|
22
|
+
|
|
23
|
+
let storage = createS3FileStorage({
|
|
24
|
+
accessKeyId: process.env.AWS_ACCESS_KEY_ID!,
|
|
25
|
+
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!,
|
|
26
|
+
bucket: 'my-app-uploads',
|
|
27
|
+
region: 'us-east-1',
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
await storage.set(
|
|
31
|
+
'uploads/hello.txt',
|
|
32
|
+
new File(['hello world'], 'hello.txt', { type: 'text/plain' }),
|
|
33
|
+
)
|
|
34
|
+
let file = await storage.get('uploads/hello.txt')
|
|
35
|
+
await storage.remove('uploads/hello.txt')
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
For S3-compatible providers such as MinIO and LocalStack, set `endpoint` and `forcePathStyle: true`.
|
|
39
|
+
|
|
40
|
+
## Related Packages
|
|
41
|
+
|
|
42
|
+
- [`file-storage`](https://github.com/remix-run/remix/tree/main/packages/file-storage) - Core `FileStorage` interface and filesystem/memory backends
|
|
43
|
+
- [`form-data-parser`](https://github.com/remix-run/remix/tree/main/packages/form-data-parser) - Parses `multipart/form-data` uploads into `FileUpload` objects
|
|
44
|
+
|
|
45
|
+
## License
|
|
46
|
+
|
|
47
|
+
See [LICENSE](https://github.com/remix-run/remix/blob/main/LICENSE)
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,oBAAoB,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAA"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { createS3FileStorage } from "./lib/s3.js";
|
package/dist/lib/s3.d.ts
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { FileStorage } from '@remix-run/file-storage';
|
|
2
|
+
export interface S3FileStorageOptions {
|
|
3
|
+
/**
|
|
4
|
+
* AWS access key ID used to sign S3 requests.
|
|
5
|
+
*/
|
|
6
|
+
accessKeyId: string;
|
|
7
|
+
/**
|
|
8
|
+
* AWS secret access key used to sign S3 requests.
|
|
9
|
+
*/
|
|
10
|
+
secretAccessKey: string;
|
|
11
|
+
/**
|
|
12
|
+
* Bucket name used for all file storage operations.
|
|
13
|
+
*/
|
|
14
|
+
bucket: string;
|
|
15
|
+
/**
|
|
16
|
+
* AWS region for request signing.
|
|
17
|
+
*/
|
|
18
|
+
region: string;
|
|
19
|
+
/**
|
|
20
|
+
* Custom S3-compatible endpoint URL. Defaults to AWS S3 for the given region.
|
|
21
|
+
*/
|
|
22
|
+
endpoint?: string;
|
|
23
|
+
/**
|
|
24
|
+
* Whether to use path-style bucket URLs (`/bucket/key`). Defaults to `true` when `endpoint` is
|
|
25
|
+
* provided and `false` otherwise.
|
|
26
|
+
*/
|
|
27
|
+
forcePathStyle?: boolean;
|
|
28
|
+
/**
|
|
29
|
+
* Optional session token for temporary credentials.
|
|
30
|
+
*/
|
|
31
|
+
sessionToken?: string;
|
|
32
|
+
/**
|
|
33
|
+
* Optional fetch implementation.
|
|
34
|
+
*/
|
|
35
|
+
fetch?: typeof globalThis.fetch;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Creates an S3-backed implementation of `FileStorage`.
|
|
39
|
+
*
|
|
40
|
+
* This works with AWS S3 and S3-compatible providers (for example MinIO or LocalStack) by
|
|
41
|
+
* overriding the `endpoint` option.
|
|
42
|
+
*
|
|
43
|
+
* @param options Configuration for the S3 backend
|
|
44
|
+
* @returns A `FileStorage` implementation backed by S3
|
|
45
|
+
*/
|
|
46
|
+
export declare function createS3FileStorage(options: S3FileStorageOptions): FileStorage;
|
|
47
|
+
//# sourceMappingURL=s3.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/lib/s3.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAgB,WAAW,EAA2B,MAAM,yBAAyB,CAAA;AAUjG,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAA;IACnB;;OAEG;IACH,eAAe,EAAE,MAAM,CAAA;IACvB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAA;IACd;;OAEG;IACH,MAAM,EAAE,MAAM,CAAA;IACd;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB;;;OAGG;IACH,cAAc,CAAC,EAAE,OAAO,CAAA;IACxB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,UAAU,CAAC,KAAK,CAAA;CAChC;AAED;;;;;;;;GAQG;AACH,wBAAgB,mBAAmB,CAAC,OAAO,EAAE,oBAAoB,GAAG,WAAW,CAgL9E"}
|
package/dist/lib/s3.js
ADDED
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
import { AwsClient } from 'aws4fetch';
|
|
2
|
+
const CONTENTS_PATTERN = /<Contents>([\s\S]*?)<\/Contents>/g;
|
|
3
|
+
/**
|
|
4
|
+
* Creates an S3-backed implementation of `FileStorage`.
|
|
5
|
+
*
|
|
6
|
+
* This works with AWS S3 and S3-compatible providers (for example MinIO or LocalStack) by
|
|
7
|
+
* overriding the `endpoint` option.
|
|
8
|
+
*
|
|
9
|
+
* @param options Configuration for the S3 backend
|
|
10
|
+
* @returns A `FileStorage` implementation backed by S3
|
|
11
|
+
*/
|
|
12
|
+
export function createS3FileStorage(options) {
|
|
13
|
+
let endpoint = new URL(options.endpoint ?? `https://s3.${options.region}.amazonaws.com`);
|
|
14
|
+
let forcePathStyle = options.forcePathStyle ?? options.endpoint != null;
|
|
15
|
+
let aws = new AwsClient({
|
|
16
|
+
accessKeyId: options.accessKeyId,
|
|
17
|
+
secretAccessKey: options.secretAccessKey,
|
|
18
|
+
sessionToken: options.sessionToken,
|
|
19
|
+
service: 's3',
|
|
20
|
+
region: options.region,
|
|
21
|
+
});
|
|
22
|
+
async function s3Fetch(url, init) {
|
|
23
|
+
let request = await aws.sign(url, init);
|
|
24
|
+
if (options.fetch != null) {
|
|
25
|
+
return options.fetch(request);
|
|
26
|
+
}
|
|
27
|
+
return fetch(request);
|
|
28
|
+
}
|
|
29
|
+
function getBucketUrl() {
|
|
30
|
+
return createBucketUrl(endpoint, options.bucket, forcePathStyle);
|
|
31
|
+
}
|
|
32
|
+
function getObjectUrl(key) {
|
|
33
|
+
return createObjectUrl(endpoint, options.bucket, forcePathStyle, key);
|
|
34
|
+
}
|
|
35
|
+
async function putFile(key, file) {
|
|
36
|
+
let body = await file.arrayBuffer();
|
|
37
|
+
let headers = new Headers();
|
|
38
|
+
if (file.type !== '') {
|
|
39
|
+
headers.set('content-type', file.type);
|
|
40
|
+
}
|
|
41
|
+
headers.set('x-amz-meta-file-name', encodeMetadataValue(file.name));
|
|
42
|
+
headers.set('x-amz-meta-file-last-modified', String(file.lastModified));
|
|
43
|
+
let response = await s3Fetch(getObjectUrl(key), {
|
|
44
|
+
method: 'PUT',
|
|
45
|
+
headers,
|
|
46
|
+
body,
|
|
47
|
+
});
|
|
48
|
+
await assertOk(response, `PUT "${key}"`);
|
|
49
|
+
return new File([body], file.name, {
|
|
50
|
+
lastModified: file.lastModified,
|
|
51
|
+
type: file.type,
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
async function getFileMetadata(object) {
|
|
55
|
+
let response = await s3Fetch(getObjectUrl(object.key), { method: 'HEAD' });
|
|
56
|
+
if (response.status === 404) {
|
|
57
|
+
return {
|
|
58
|
+
key: object.key,
|
|
59
|
+
lastModified: object.lastModified,
|
|
60
|
+
name: getDefaultFileName(object.key),
|
|
61
|
+
size: object.size,
|
|
62
|
+
type: '',
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
await assertOk(response, `HEAD "${object.key}"`);
|
|
66
|
+
return {
|
|
67
|
+
key: object.key,
|
|
68
|
+
lastModified: parseEpochMillis(response.headers.get('x-amz-meta-file-last-modified')) ??
|
|
69
|
+
object.lastModified,
|
|
70
|
+
name: decodeMetadataValue(response.headers.get('x-amz-meta-file-name')) ??
|
|
71
|
+
getDefaultFileName(object.key),
|
|
72
|
+
size: parseInteger(response.headers.get('content-length')) ?? object.size,
|
|
73
|
+
type: response.headers.get('content-type') ?? '',
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
return {
|
|
77
|
+
async get(key) {
|
|
78
|
+
let response = await s3Fetch(getObjectUrl(key), { method: 'GET' });
|
|
79
|
+
if (response.status === 404) {
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
await assertOk(response, `GET "${key}"`);
|
|
83
|
+
let body = await response.arrayBuffer();
|
|
84
|
+
return new File([body], decodeMetadataValue(response.headers.get('x-amz-meta-file-name')) ??
|
|
85
|
+
getDefaultFileName(key), {
|
|
86
|
+
lastModified: parseEpochMillis(response.headers.get('x-amz-meta-file-last-modified')) ??
|
|
87
|
+
parseHttpDate(response.headers.get('last-modified')) ??
|
|
88
|
+
0,
|
|
89
|
+
type: response.headers.get('content-type') ?? '',
|
|
90
|
+
});
|
|
91
|
+
},
|
|
92
|
+
async has(key) {
|
|
93
|
+
let response = await s3Fetch(getObjectUrl(key), { method: 'HEAD' });
|
|
94
|
+
if (response.status === 404) {
|
|
95
|
+
return false;
|
|
96
|
+
}
|
|
97
|
+
await assertOk(response, `HEAD "${key}"`);
|
|
98
|
+
return true;
|
|
99
|
+
},
|
|
100
|
+
async list(options) {
|
|
101
|
+
let { cursor, includeMetadata = false, limit = 32, prefix } = options ?? {};
|
|
102
|
+
if (limit <= 0) {
|
|
103
|
+
return {
|
|
104
|
+
files: [],
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
let url = getBucketUrl();
|
|
108
|
+
url.searchParams.set('encoding-type', 'url');
|
|
109
|
+
url.searchParams.set('list-type', '2');
|
|
110
|
+
url.searchParams.set('max-keys', String(limit));
|
|
111
|
+
if (cursor !== undefined) {
|
|
112
|
+
url.searchParams.set('continuation-token', cursor);
|
|
113
|
+
}
|
|
114
|
+
if (prefix !== undefined) {
|
|
115
|
+
url.searchParams.set('prefix', prefix);
|
|
116
|
+
}
|
|
117
|
+
let response = await s3Fetch(url, { method: 'GET' });
|
|
118
|
+
await assertOk(response, 'LIST');
|
|
119
|
+
let xml = await response.text();
|
|
120
|
+
let objects = parseListedObjects(xml);
|
|
121
|
+
let nextCursor = parseNextCursor(xml);
|
|
122
|
+
if (!includeMetadata) {
|
|
123
|
+
return {
|
|
124
|
+
cursor: nextCursor,
|
|
125
|
+
files: objects.map((object) => ({ key: object.key })),
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
let files = await Promise.all(objects.map((object) => getFileMetadata(object)));
|
|
129
|
+
return {
|
|
130
|
+
cursor: nextCursor,
|
|
131
|
+
files: files,
|
|
132
|
+
};
|
|
133
|
+
},
|
|
134
|
+
put(key, file) {
|
|
135
|
+
return putFile(key, file);
|
|
136
|
+
},
|
|
137
|
+
async remove(key) {
|
|
138
|
+
let response = await s3Fetch(getObjectUrl(key), { method: 'DELETE' });
|
|
139
|
+
if (response.status === 404) {
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
await assertOk(response, `DELETE "${key}"`);
|
|
143
|
+
},
|
|
144
|
+
async set(key, file) {
|
|
145
|
+
await putFile(key, file);
|
|
146
|
+
},
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
function createBucketUrl(endpoint, bucket, forcePathStyle) {
|
|
150
|
+
let url = new URL(endpoint.toString());
|
|
151
|
+
if (forcePathStyle) {
|
|
152
|
+
url.pathname = joinPath(endpoint.pathname, encodeURIComponent(bucket));
|
|
153
|
+
}
|
|
154
|
+
else {
|
|
155
|
+
url.hostname = `${bucket}.${endpoint.hostname}`;
|
|
156
|
+
url.pathname = joinPath(endpoint.pathname);
|
|
157
|
+
}
|
|
158
|
+
return url;
|
|
159
|
+
}
|
|
160
|
+
function createObjectUrl(endpoint, bucket, forcePathStyle, key) {
|
|
161
|
+
let url = new URL(endpoint.toString());
|
|
162
|
+
if (forcePathStyle) {
|
|
163
|
+
url.pathname = joinPath(endpoint.pathname, encodeURIComponent(bucket), encodeS3Key(key));
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
url.hostname = `${bucket}.${endpoint.hostname}`;
|
|
167
|
+
url.pathname = joinPath(endpoint.pathname, encodeS3Key(key));
|
|
168
|
+
}
|
|
169
|
+
return url;
|
|
170
|
+
}
|
|
171
|
+
function encodeS3Key(key) {
|
|
172
|
+
return key
|
|
173
|
+
.split('/')
|
|
174
|
+
.map((segment) => encodeURIComponent(segment))
|
|
175
|
+
.join('/');
|
|
176
|
+
}
|
|
177
|
+
function encodeMetadataValue(value) {
|
|
178
|
+
return encodeURIComponent(value);
|
|
179
|
+
}
|
|
180
|
+
function decodeMetadataValue(value) {
|
|
181
|
+
if (value == null || value === '') {
|
|
182
|
+
return undefined;
|
|
183
|
+
}
|
|
184
|
+
try {
|
|
185
|
+
return decodeURIComponent(value);
|
|
186
|
+
}
|
|
187
|
+
catch {
|
|
188
|
+
return value;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
function getDefaultFileName(key) {
|
|
192
|
+
let lastSlashIndex = key.lastIndexOf('/');
|
|
193
|
+
let fileName = lastSlashIndex >= 0 ? key.slice(lastSlashIndex + 1) : key;
|
|
194
|
+
return fileName === '' ? key : fileName;
|
|
195
|
+
}
|
|
196
|
+
function parseListedObjects(xml) {
|
|
197
|
+
CONTENTS_PATTERN.lastIndex = 0;
|
|
198
|
+
let objects = [];
|
|
199
|
+
for (let match of xml.matchAll(CONTENTS_PATTERN)) {
|
|
200
|
+
let entry = match[1] ?? '';
|
|
201
|
+
let encodedKey = readXmlTag(entry, 'Key');
|
|
202
|
+
if (encodedKey == null) {
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
let key = decodeS3Key(encodedKey);
|
|
206
|
+
let size = parseInteger(readXmlTag(entry, 'Size')) ?? 0;
|
|
207
|
+
let lastModified = parseHttpDate(readXmlTag(entry, 'LastModified')) ?? 0;
|
|
208
|
+
objects.push({
|
|
209
|
+
key,
|
|
210
|
+
lastModified,
|
|
211
|
+
size,
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
return objects;
|
|
215
|
+
}
|
|
216
|
+
function parseNextCursor(xml) {
|
|
217
|
+
if (readXmlTag(xml, 'IsTruncated') !== 'true') {
|
|
218
|
+
return undefined;
|
|
219
|
+
}
|
|
220
|
+
return readXmlTag(xml, 'NextContinuationToken');
|
|
221
|
+
}
|
|
222
|
+
function decodeS3Key(value) {
|
|
223
|
+
try {
|
|
224
|
+
return decodeURIComponent(value);
|
|
225
|
+
}
|
|
226
|
+
catch {
|
|
227
|
+
return value;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
function readXmlTag(xml, tagName) {
|
|
231
|
+
let pattern = new RegExp(`<${tagName}>([\\s\\S]*?)</${tagName}>`);
|
|
232
|
+
let match = xml.match(pattern);
|
|
233
|
+
if (match == null || match[1] == null) {
|
|
234
|
+
return undefined;
|
|
235
|
+
}
|
|
236
|
+
return decodeXmlEntities(match[1]);
|
|
237
|
+
}
|
|
238
|
+
function decodeXmlEntities(value) {
|
|
239
|
+
return value
|
|
240
|
+
.replace(/&/g, '&')
|
|
241
|
+
.replace(/</g, '<')
|
|
242
|
+
.replace(/>/g, '>')
|
|
243
|
+
.replace(/"/g, '"')
|
|
244
|
+
.replace(/'/g, "'")
|
|
245
|
+
.replace(/'/g, "'");
|
|
246
|
+
}
|
|
247
|
+
function parseInteger(value) {
|
|
248
|
+
if (value == null || value === '') {
|
|
249
|
+
return undefined;
|
|
250
|
+
}
|
|
251
|
+
let parsed = Number(value);
|
|
252
|
+
return Number.isFinite(parsed) ? parsed : undefined;
|
|
253
|
+
}
|
|
254
|
+
function parseEpochMillis(value) {
|
|
255
|
+
let parsed = parseInteger(value);
|
|
256
|
+
return parsed != null ? parsed : undefined;
|
|
257
|
+
}
|
|
258
|
+
function parseHttpDate(value) {
|
|
259
|
+
if (value == null || value === '') {
|
|
260
|
+
return undefined;
|
|
261
|
+
}
|
|
262
|
+
let parsed = Date.parse(value);
|
|
263
|
+
return Number.isFinite(parsed) ? parsed : undefined;
|
|
264
|
+
}
|
|
265
|
+
function joinPath(basePath, ...parts) {
|
|
266
|
+
let normalizedBasePath = basePath.replace(/\/+$/g, '');
|
|
267
|
+
let normalizedParts = parts
|
|
268
|
+
.filter((part) => part != null && part !== '')
|
|
269
|
+
.map((part) => part.replace(/^\/+|\/+$/g, ''));
|
|
270
|
+
let joined = [normalizedBasePath, ...normalizedParts].filter((part) => part !== '').join('/');
|
|
271
|
+
if (joined === '') {
|
|
272
|
+
return '/';
|
|
273
|
+
}
|
|
274
|
+
return joined.startsWith('/') ? joined : `/${joined}`;
|
|
275
|
+
}
|
|
276
|
+
async function assertOk(response, operation) {
|
|
277
|
+
if (response.ok) {
|
|
278
|
+
return;
|
|
279
|
+
}
|
|
280
|
+
let message = `${response.status} ${response.statusText}`;
|
|
281
|
+
try {
|
|
282
|
+
let body = await response.text();
|
|
283
|
+
let s3Message = readXmlTag(body, 'Message');
|
|
284
|
+
if (s3Message != null && s3Message !== '') {
|
|
285
|
+
message = `${message} (${s3Message})`;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
catch {
|
|
289
|
+
// Ignore body parse errors and keep the status-only message.
|
|
290
|
+
}
|
|
291
|
+
throw new Error(`S3 request failed for ${operation}: ${message}`);
|
|
292
|
+
}
|
package/package.json
CHANGED
|
@@ -1,14 +1,48 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remix-run/file-storage-s3",
|
|
3
|
-
"version": "0.
|
|
4
|
-
"description": "
|
|
5
|
-
"
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "S3 backend for @remix-run/file-storage",
|
|
5
|
+
"author": "Michael Jackson <mjijackson@gmail.com>",
|
|
6
6
|
"repository": {
|
|
7
7
|
"type": "git",
|
|
8
8
|
"url": "git+https://github.com/remix-run/remix.git",
|
|
9
9
|
"directory": "packages/file-storage-s3"
|
|
10
10
|
},
|
|
11
|
-
"
|
|
12
|
-
|
|
11
|
+
"homepage": "https://github.com/remix-run/remix/tree/main/packages/file-storage-s3#readme",
|
|
12
|
+
"license": "MIT",
|
|
13
|
+
"files": [
|
|
14
|
+
"LICENSE",
|
|
15
|
+
"README.md",
|
|
16
|
+
"dist",
|
|
17
|
+
"src",
|
|
18
|
+
"!src/**/*.test.ts"
|
|
19
|
+
],
|
|
20
|
+
"type": "module",
|
|
21
|
+
"exports": {
|
|
22
|
+
".": {
|
|
23
|
+
"types": "./dist/index.d.ts",
|
|
24
|
+
"default": "./dist/index.js"
|
|
25
|
+
},
|
|
26
|
+
"./package.json": "./package.json"
|
|
27
|
+
},
|
|
28
|
+
"dependencies": {
|
|
29
|
+
"aws4fetch": "^1.0.20",
|
|
30
|
+
"@remix-run/file-storage": "^0.13.3"
|
|
31
|
+
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"@types/node": "^24.6.0",
|
|
34
|
+
"@typescript/native-preview": "7.0.0-dev.20251125.1"
|
|
35
|
+
},
|
|
36
|
+
"keywords": [
|
|
37
|
+
"file",
|
|
38
|
+
"storage",
|
|
39
|
+
"s3",
|
|
40
|
+
"aws"
|
|
41
|
+
],
|
|
42
|
+
"scripts": {
|
|
43
|
+
"build": "tsgo -p tsconfig.build.json",
|
|
44
|
+
"clean": "git clean -fdX",
|
|
45
|
+
"test": "node --disable-warning=ExperimentalWarning --test",
|
|
46
|
+
"typecheck": "tsgo --noEmit"
|
|
13
47
|
}
|
|
14
|
-
}
|
|
48
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { type S3FileStorageOptions, createS3FileStorage } from './lib/s3.ts'
|
package/src/lib/s3.ts
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
import { AwsClient } from 'aws4fetch'
|
|
2
|
+
|
|
3
|
+
import type { FileMetadata, FileStorage, ListOptions, ListResult } from '@remix-run/file-storage'
|
|
4
|
+
|
|
5
|
+
const CONTENTS_PATTERN = /<Contents>([\s\S]*?)<\/Contents>/g
|
|
6
|
+
|
|
7
|
+
type ListedObject = {
|
|
8
|
+
key: string
|
|
9
|
+
lastModified: number
|
|
10
|
+
size: number
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface S3FileStorageOptions {
|
|
14
|
+
/**
|
|
15
|
+
* AWS access key ID used to sign S3 requests.
|
|
16
|
+
*/
|
|
17
|
+
accessKeyId: string
|
|
18
|
+
/**
|
|
19
|
+
* AWS secret access key used to sign S3 requests.
|
|
20
|
+
*/
|
|
21
|
+
secretAccessKey: string
|
|
22
|
+
/**
|
|
23
|
+
* Bucket name used for all file storage operations.
|
|
24
|
+
*/
|
|
25
|
+
bucket: string
|
|
26
|
+
/**
|
|
27
|
+
* AWS region for request signing.
|
|
28
|
+
*/
|
|
29
|
+
region: string
|
|
30
|
+
/**
|
|
31
|
+
* Custom S3-compatible endpoint URL. Defaults to AWS S3 for the given region.
|
|
32
|
+
*/
|
|
33
|
+
endpoint?: string
|
|
34
|
+
/**
|
|
35
|
+
* Whether to use path-style bucket URLs (`/bucket/key`). Defaults to `true` when `endpoint` is
|
|
36
|
+
* provided and `false` otherwise.
|
|
37
|
+
*/
|
|
38
|
+
forcePathStyle?: boolean
|
|
39
|
+
/**
|
|
40
|
+
* Optional session token for temporary credentials.
|
|
41
|
+
*/
|
|
42
|
+
sessionToken?: string
|
|
43
|
+
/**
|
|
44
|
+
* Optional fetch implementation.
|
|
45
|
+
*/
|
|
46
|
+
fetch?: typeof globalThis.fetch
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Creates an S3-backed implementation of `FileStorage`.
|
|
51
|
+
*
|
|
52
|
+
* This works with AWS S3 and S3-compatible providers (for example MinIO or LocalStack) by
|
|
53
|
+
* overriding the `endpoint` option.
|
|
54
|
+
*
|
|
55
|
+
* @param options Configuration for the S3 backend
|
|
56
|
+
* @returns A `FileStorage` implementation backed by S3
|
|
57
|
+
*/
|
|
58
|
+
export function createS3FileStorage(options: S3FileStorageOptions): FileStorage {
|
|
59
|
+
let endpoint = new URL(options.endpoint ?? `https://s3.${options.region}.amazonaws.com`)
|
|
60
|
+
let forcePathStyle = options.forcePathStyle ?? options.endpoint != null
|
|
61
|
+
|
|
62
|
+
let aws = new AwsClient({
|
|
63
|
+
accessKeyId: options.accessKeyId,
|
|
64
|
+
secretAccessKey: options.secretAccessKey,
|
|
65
|
+
sessionToken: options.sessionToken,
|
|
66
|
+
service: 's3',
|
|
67
|
+
region: options.region,
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
async function s3Fetch(url: URL, init?: RequestInit): Promise<Response> {
|
|
71
|
+
let request = await aws.sign(url, init)
|
|
72
|
+
|
|
73
|
+
if (options.fetch != null) {
|
|
74
|
+
return options.fetch(request)
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return fetch(request)
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function getBucketUrl(): URL {
|
|
81
|
+
return createBucketUrl(endpoint, options.bucket, forcePathStyle)
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function getObjectUrl(key: string): URL {
|
|
85
|
+
return createObjectUrl(endpoint, options.bucket, forcePathStyle, key)
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async function putFile(key: string, file: File): Promise<File> {
|
|
89
|
+
let body = await file.arrayBuffer()
|
|
90
|
+
let headers = new Headers()
|
|
91
|
+
|
|
92
|
+
if (file.type !== '') {
|
|
93
|
+
headers.set('content-type', file.type)
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
headers.set('x-amz-meta-file-name', encodeMetadataValue(file.name))
|
|
97
|
+
headers.set('x-amz-meta-file-last-modified', String(file.lastModified))
|
|
98
|
+
|
|
99
|
+
let response = await s3Fetch(getObjectUrl(key), {
|
|
100
|
+
method: 'PUT',
|
|
101
|
+
headers,
|
|
102
|
+
body,
|
|
103
|
+
})
|
|
104
|
+
await assertOk(response, `PUT "${key}"`)
|
|
105
|
+
|
|
106
|
+
return new File([body], file.name, {
|
|
107
|
+
lastModified: file.lastModified,
|
|
108
|
+
type: file.type,
|
|
109
|
+
})
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async function getFileMetadata(object: ListedObject): Promise<FileMetadata> {
|
|
113
|
+
let response = await s3Fetch(getObjectUrl(object.key), { method: 'HEAD' })
|
|
114
|
+
|
|
115
|
+
if (response.status === 404) {
|
|
116
|
+
return {
|
|
117
|
+
key: object.key,
|
|
118
|
+
lastModified: object.lastModified,
|
|
119
|
+
name: getDefaultFileName(object.key),
|
|
120
|
+
size: object.size,
|
|
121
|
+
type: '',
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
await assertOk(response, `HEAD "${object.key}"`)
|
|
126
|
+
|
|
127
|
+
return {
|
|
128
|
+
key: object.key,
|
|
129
|
+
lastModified:
|
|
130
|
+
parseEpochMillis(response.headers.get('x-amz-meta-file-last-modified')) ??
|
|
131
|
+
object.lastModified,
|
|
132
|
+
name:
|
|
133
|
+
decodeMetadataValue(response.headers.get('x-amz-meta-file-name')) ??
|
|
134
|
+
getDefaultFileName(object.key),
|
|
135
|
+
size: parseInteger(response.headers.get('content-length')) ?? object.size,
|
|
136
|
+
type: response.headers.get('content-type') ?? '',
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return {
|
|
141
|
+
async get(key: string): Promise<File | null> {
|
|
142
|
+
let response = await s3Fetch(getObjectUrl(key), { method: 'GET' })
|
|
143
|
+
|
|
144
|
+
if (response.status === 404) {
|
|
145
|
+
return null
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
await assertOk(response, `GET "${key}"`)
|
|
149
|
+
|
|
150
|
+
let body = await response.arrayBuffer()
|
|
151
|
+
|
|
152
|
+
return new File(
|
|
153
|
+
[body],
|
|
154
|
+
decodeMetadataValue(response.headers.get('x-amz-meta-file-name')) ??
|
|
155
|
+
getDefaultFileName(key),
|
|
156
|
+
{
|
|
157
|
+
lastModified:
|
|
158
|
+
parseEpochMillis(response.headers.get('x-amz-meta-file-last-modified')) ??
|
|
159
|
+
parseHttpDate(response.headers.get('last-modified')) ??
|
|
160
|
+
0,
|
|
161
|
+
type: response.headers.get('content-type') ?? '',
|
|
162
|
+
},
|
|
163
|
+
)
|
|
164
|
+
},
|
|
165
|
+
async has(key: string): Promise<boolean> {
|
|
166
|
+
let response = await s3Fetch(getObjectUrl(key), { method: 'HEAD' })
|
|
167
|
+
|
|
168
|
+
if (response.status === 404) {
|
|
169
|
+
return false
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
await assertOk(response, `HEAD "${key}"`)
|
|
173
|
+
|
|
174
|
+
return true
|
|
175
|
+
},
|
|
176
|
+
async list<opts extends ListOptions>(options?: opts): Promise<ListResult<opts>> {
|
|
177
|
+
let { cursor, includeMetadata = false, limit = 32, prefix } = options ?? {}
|
|
178
|
+
|
|
179
|
+
if (limit <= 0) {
|
|
180
|
+
return {
|
|
181
|
+
files: [] as ListResult<opts>['files'],
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
let url = getBucketUrl()
|
|
186
|
+
url.searchParams.set('encoding-type', 'url')
|
|
187
|
+
url.searchParams.set('list-type', '2')
|
|
188
|
+
url.searchParams.set('max-keys', String(limit))
|
|
189
|
+
|
|
190
|
+
if (cursor !== undefined) {
|
|
191
|
+
url.searchParams.set('continuation-token', cursor)
|
|
192
|
+
}
|
|
193
|
+
if (prefix !== undefined) {
|
|
194
|
+
url.searchParams.set('prefix', prefix)
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
let response = await s3Fetch(url, { method: 'GET' })
|
|
198
|
+
await assertOk(response, 'LIST')
|
|
199
|
+
|
|
200
|
+
let xml = await response.text()
|
|
201
|
+
let objects = parseListedObjects(xml)
|
|
202
|
+
let nextCursor = parseNextCursor(xml)
|
|
203
|
+
|
|
204
|
+
if (!includeMetadata) {
|
|
205
|
+
return {
|
|
206
|
+
cursor: nextCursor,
|
|
207
|
+
files: objects.map((object) => ({ key: object.key })) as ListResult<opts>['files'],
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
let files = await Promise.all(objects.map((object) => getFileMetadata(object)))
|
|
212
|
+
|
|
213
|
+
return {
|
|
214
|
+
cursor: nextCursor,
|
|
215
|
+
files: files as ListResult<opts>['files'],
|
|
216
|
+
}
|
|
217
|
+
},
|
|
218
|
+
put(key: string, file: File): Promise<File> {
|
|
219
|
+
return putFile(key, file)
|
|
220
|
+
},
|
|
221
|
+
async remove(key: string): Promise<void> {
|
|
222
|
+
let response = await s3Fetch(getObjectUrl(key), { method: 'DELETE' })
|
|
223
|
+
|
|
224
|
+
if (response.status === 404) {
|
|
225
|
+
return
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
await assertOk(response, `DELETE "${key}"`)
|
|
229
|
+
},
|
|
230
|
+
async set(key: string, file: File): Promise<void> {
|
|
231
|
+
await putFile(key, file)
|
|
232
|
+
},
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
function createBucketUrl(endpoint: URL, bucket: string, forcePathStyle: boolean): URL {
|
|
237
|
+
let url = new URL(endpoint.toString())
|
|
238
|
+
|
|
239
|
+
if (forcePathStyle) {
|
|
240
|
+
url.pathname = joinPath(endpoint.pathname, encodeURIComponent(bucket))
|
|
241
|
+
} else {
|
|
242
|
+
url.hostname = `${bucket}.${endpoint.hostname}`
|
|
243
|
+
url.pathname = joinPath(endpoint.pathname)
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
return url
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
function createObjectUrl(endpoint: URL, bucket: string, forcePathStyle: boolean, key: string): URL {
|
|
250
|
+
let url = new URL(endpoint.toString())
|
|
251
|
+
|
|
252
|
+
if (forcePathStyle) {
|
|
253
|
+
url.pathname = joinPath(endpoint.pathname, encodeURIComponent(bucket), encodeS3Key(key))
|
|
254
|
+
} else {
|
|
255
|
+
url.hostname = `${bucket}.${endpoint.hostname}`
|
|
256
|
+
url.pathname = joinPath(endpoint.pathname, encodeS3Key(key))
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
return url
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
function encodeS3Key(key: string): string {
|
|
263
|
+
return key
|
|
264
|
+
.split('/')
|
|
265
|
+
.map((segment) => encodeURIComponent(segment))
|
|
266
|
+
.join('/')
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
function encodeMetadataValue(value: string): string {
|
|
270
|
+
return encodeURIComponent(value)
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
function decodeMetadataValue(value: string | null): string | undefined {
|
|
274
|
+
if (value == null || value === '') {
|
|
275
|
+
return undefined
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
try {
|
|
279
|
+
return decodeURIComponent(value)
|
|
280
|
+
} catch {
|
|
281
|
+
return value
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
function getDefaultFileName(key: string): string {
|
|
286
|
+
let lastSlashIndex = key.lastIndexOf('/')
|
|
287
|
+
let fileName = lastSlashIndex >= 0 ? key.slice(lastSlashIndex + 1) : key
|
|
288
|
+
return fileName === '' ? key : fileName
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
function parseListedObjects(xml: string): ListedObject[] {
|
|
292
|
+
CONTENTS_PATTERN.lastIndex = 0
|
|
293
|
+
|
|
294
|
+
let objects: ListedObject[] = []
|
|
295
|
+
|
|
296
|
+
for (let match of xml.matchAll(CONTENTS_PATTERN)) {
|
|
297
|
+
let entry = match[1] ?? ''
|
|
298
|
+
let encodedKey = readXmlTag(entry, 'Key')
|
|
299
|
+
|
|
300
|
+
if (encodedKey == null) {
|
|
301
|
+
continue
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
let key = decodeS3Key(encodedKey)
|
|
305
|
+
let size = parseInteger(readXmlTag(entry, 'Size')) ?? 0
|
|
306
|
+
let lastModified = parseHttpDate(readXmlTag(entry, 'LastModified')) ?? 0
|
|
307
|
+
|
|
308
|
+
objects.push({
|
|
309
|
+
key,
|
|
310
|
+
lastModified,
|
|
311
|
+
size,
|
|
312
|
+
})
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
return objects
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
function parseNextCursor(xml: string): string | undefined {
|
|
319
|
+
if (readXmlTag(xml, 'IsTruncated') !== 'true') {
|
|
320
|
+
return undefined
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
return readXmlTag(xml, 'NextContinuationToken')
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
function decodeS3Key(value: string): string {
|
|
327
|
+
try {
|
|
328
|
+
return decodeURIComponent(value)
|
|
329
|
+
} catch {
|
|
330
|
+
return value
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
function readXmlTag(xml: string, tagName: string): string | undefined {
|
|
335
|
+
let pattern = new RegExp(`<${tagName}>([\\s\\S]*?)</${tagName}>`)
|
|
336
|
+
let match = xml.match(pattern)
|
|
337
|
+
|
|
338
|
+
if (match == null || match[1] == null) {
|
|
339
|
+
return undefined
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
return decodeXmlEntities(match[1])
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
function decodeXmlEntities(value: string): string {
|
|
346
|
+
return value
|
|
347
|
+
.replace(/&/g, '&')
|
|
348
|
+
.replace(/</g, '<')
|
|
349
|
+
.replace(/>/g, '>')
|
|
350
|
+
.replace(/"/g, '"')
|
|
351
|
+
.replace(/'/g, "'")
|
|
352
|
+
.replace(/'/g, "'")
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
function parseInteger(value: string | null | undefined): number | undefined {
|
|
356
|
+
if (value == null || value === '') {
|
|
357
|
+
return undefined
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
let parsed = Number(value)
|
|
361
|
+
return Number.isFinite(parsed) ? parsed : undefined
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
function parseEpochMillis(value: string | null): number | undefined {
|
|
365
|
+
let parsed = parseInteger(value)
|
|
366
|
+
return parsed != null ? parsed : undefined
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
function parseHttpDate(value: string | null | undefined): number | undefined {
|
|
370
|
+
if (value == null || value === '') {
|
|
371
|
+
return undefined
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
let parsed = Date.parse(value)
|
|
375
|
+
return Number.isFinite(parsed) ? parsed : undefined
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
function joinPath(basePath: string, ...parts: (string | undefined)[]): string {
|
|
379
|
+
let normalizedBasePath = basePath.replace(/\/+$/g, '')
|
|
380
|
+
let normalizedParts = parts
|
|
381
|
+
.filter((part): part is string => part != null && part !== '')
|
|
382
|
+
.map((part) => part.replace(/^\/+|\/+$/g, ''))
|
|
383
|
+
|
|
384
|
+
let joined = [normalizedBasePath, ...normalizedParts].filter((part) => part !== '').join('/')
|
|
385
|
+
if (joined === '') {
|
|
386
|
+
return '/'
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
return joined.startsWith('/') ? joined : `/${joined}`
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
async function assertOk(response: Response, operation: string): Promise<void> {
|
|
393
|
+
if (response.ok) {
|
|
394
|
+
return
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
let message = `${response.status} ${response.statusText}`
|
|
398
|
+
|
|
399
|
+
try {
|
|
400
|
+
let body = await response.text()
|
|
401
|
+
let s3Message = readXmlTag(body, 'Message')
|
|
402
|
+
|
|
403
|
+
if (s3Message != null && s3Message !== '') {
|
|
404
|
+
message = `${message} (${s3Message})`
|
|
405
|
+
}
|
|
406
|
+
} catch {
|
|
407
|
+
// Ignore body parse errors and keep the status-only message.
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
throw new Error(`S3 request failed for ${operation}: ${message}`)
|
|
411
|
+
}
|