@constructive-io/s3-streamer 2.8.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +23 -0
- package/README.md +272 -0
- package/esm/index.js +6 -0
- package/esm/s3.js +22 -0
- package/esm/streamer.js +31 -0
- package/esm/utils.js +91 -0
- package/index.d.ts +6 -0
- package/index.js +26 -0
- package/package.json +52 -0
- package/s3.d.ts +9 -0
- package/s3.js +25 -0
- package/streamer.d.ts +23 -0
- package/streamer.js +38 -0
- package/utils.d.ts +38 -0
- package/utils.js +100 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
The MIT License (MIT)
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Dan Lynch <pyramation@gmail.com>
|
|
4
|
+
Copyright (c) 2025 Constructive <developers@constructive.io>
|
|
5
|
+
Copyright (c) 2020-present, Interweb, Inc.
|
|
6
|
+
|
|
7
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
8
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
9
|
+
in the Software without restriction, including without limitation the rights
|
|
10
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
11
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
12
|
+
furnished to do so, subject to the following conditions:
|
|
13
|
+
|
|
14
|
+
The above copyright notice and this permission notice shall be included in all
|
|
15
|
+
copies or substantial portions of the Software.
|
|
16
|
+
|
|
17
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
18
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
19
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
20
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
21
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
22
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
23
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
# s3-streamer
|
|
2
|
+
|
|
3
|
+
<p align="center" width="100%">
|
|
4
|
+
<img height="250" src="https://raw.githubusercontent.com/constructive-io/constructive/refs/heads/main/assets/outline-logo.svg" />
|
|
5
|
+
</p>
|
|
6
|
+
|
|
7
|
+
<p align="center" width="100%">
|
|
8
|
+
<a href="https://github.com/constructive-io/constructive/actions/workflows/run-tests.yaml">
|
|
9
|
+
<img height="20" src="https://github.com/constructive-io/constructive/actions/workflows/run-tests.yaml/badge.svg" />
|
|
10
|
+
</a>
|
|
11
|
+
<a href="https://github.com/constructive-io/constructive/blob/main/LICENSE"><img height="20" src="https://img.shields.io/badge/license-MIT-blue.svg"/></a>
|
|
12
|
+
<a href="https://www.npmjs.com/package/@constructive-io/s3-streamer"><img height="20" src="https://img.shields.io/github/package-json/v/constructive-io/constructive?filename=streaming%2Fs3-streamer%2Fpackage.json"/></a>
|
|
13
|
+
</p>
|
|
14
|
+
|
|
15
|
+
Stream uploads to S3 with automatic content-type detection, ETag generation, and metadata extraction. Built on AWS SDK v3 for optimal performance and smaller bundle sizes.
|
|
16
|
+
|
|
17
|
+
## Features
|
|
18
|
+
|
|
19
|
+
- 🚀 **Streaming uploads** - Memory efficient streaming directly to S3
|
|
20
|
+
- 🔍 **Automatic content-type detection** - Uses magic bytes to detect file types
|
|
21
|
+
- 🏷️ **Metadata extraction** - Generates ETags, SHA hashes, and UUIDs for uploaded content
|
|
22
|
+
- 📦 **AWS SDK v3** - Modern, modular SDK with better tree-shaking
|
|
23
|
+
- 🔧 **MinIO compatible** - Works with S3-compatible storage services
|
|
24
|
+
- 💪 **TypeScript support** - Full type definitions included
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
```sh
|
|
29
|
+
npm install @constructive-io/s3-streamer
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Quick Start
|
|
33
|
+
|
|
34
|
+
Stream uploads to S3
|
|
35
|
+
|
|
36
|
+
```js
|
|
37
|
+
import Streamer from '@constructive-io/s3-streamer';
|
|
38
|
+
const streamer = new Streamer(opts)
|
|
39
|
+
const readStream = createReadStream(filename);
|
|
40
|
+
const results = await streamer.upload({
|
|
41
|
+
readStream,
|
|
42
|
+
filename,
|
|
43
|
+
bucket,
|
|
44
|
+
key
|
|
45
|
+
});
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## Response Format
|
|
49
|
+
|
|
50
|
+
The upload methods return a detailed payload with upload results and file metadata:
|
|
51
|
+
|
|
52
|
+
```js
|
|
53
|
+
{
|
|
54
|
+
upload: {
|
|
55
|
+
ETag: '"952fd44d14cee87882239b707231609d"',
|
|
56
|
+
Location: 'http://localhost:9000/constructive/db1/assets/.gitignore',
|
|
57
|
+
Key: 'db1/assets/.gitignore',
|
|
58
|
+
Bucket: 'constructive'
|
|
59
|
+
},
|
|
60
|
+
magic: {
|
|
61
|
+
type: 'text/plain',
|
|
62
|
+
charset: 'us-ascii'
|
|
63
|
+
},
|
|
64
|
+
contentType: 'text/plain',
|
|
65
|
+
contents: {
|
|
66
|
+
uuid: '278aee01-1404-5725-8f0e-7044c9c16397',
|
|
67
|
+
sha: '7d65523f2a5afb69d76824dd1dfa62a34faa3197',
|
|
68
|
+
etag: '952fd44d14cee87882239b707231609d'
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### Response Fields
|
|
74
|
+
|
|
75
|
+
- **upload**: S3 upload response
|
|
76
|
+
- `ETag`: S3 ETag of the uploaded object
|
|
77
|
+
- `Location`: Full URL to the uploaded object
|
|
78
|
+
- `Key`: S3 object key
|
|
79
|
+
- `Bucket`: Bucket name where object was uploaded
|
|
80
|
+
- **magic**: File type detection results
|
|
81
|
+
- `type`: MIME type detected from file content
|
|
82
|
+
- `charset`: Character encoding (for text files)
|
|
83
|
+
- **contentType**: Final content-type used for upload
|
|
84
|
+
- **contents**: File metadata
|
|
85
|
+
- `uuid`: Deterministic UUID based on file content
|
|
86
|
+
- `sha`: SHA-1 hash of file content
|
|
87
|
+
- `etag`: Computed ETag (matches S3 ETag for single-part uploads)
|
|
88
|
+
|
|
89
|
+
## functional utils
|
|
90
|
+
|
|
91
|
+
If you don't want to use the `Streamer` class you can use the utils directly:
|
|
92
|
+
|
|
93
|
+
```js
|
|
94
|
+
import { getClient, upload } from '@constructive-io/s3-streamer';
|
|
95
|
+
const client = getClient(opts)
|
|
96
|
+
const readStream = createReadStream(filename);
|
|
97
|
+
const results = await upload({
|
|
98
|
+
client,
|
|
99
|
+
readStream,
|
|
100
|
+
filename,
|
|
101
|
+
bucket,
|
|
102
|
+
key
|
|
103
|
+
});
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
## Configuration
|
|
107
|
+
|
|
108
|
+
### AWS S3 Production
|
|
109
|
+
|
|
110
|
+
```js
|
|
111
|
+
const streamer = new Streamer({
|
|
112
|
+
defaultBucket: 'my-bucket',
|
|
113
|
+
awsRegion: 'us-east-1',
|
|
114
|
+
awsSecretKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
115
|
+
awsAccessKey: process.env.AWS_ACCESS_KEY_ID
|
|
116
|
+
});
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
### MinIO / S3-Compatible Storage
|
|
120
|
+
|
|
121
|
+
```js
|
|
122
|
+
const streamer = new Streamer({
|
|
123
|
+
defaultBucket: 'my-bucket',
|
|
124
|
+
awsRegion: 'us-east-1',
|
|
125
|
+
awsSecretKey: 'minio-secret',
|
|
126
|
+
awsAccessKey: 'minio-access',
|
|
127
|
+
minioEndpoint: 'http://localhost:9000'
|
|
128
|
+
});
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
## API Reference
|
|
132
|
+
|
|
133
|
+
### Streamer Class
|
|
134
|
+
|
|
135
|
+
#### Constructor Options
|
|
136
|
+
|
|
137
|
+
```typescript
|
|
138
|
+
interface StreamerOptions {
|
|
139
|
+
awsRegion: string; // AWS region (e.g., 'us-east-1')
|
|
140
|
+
awsSecretKey: string; // AWS secret access key
|
|
141
|
+
awsAccessKey: string; // AWS access key ID
|
|
142
|
+
minioEndpoint?: string; // Optional: MinIO/S3-compatible endpoint
|
|
143
|
+
defaultBucket: string; // Default bucket for uploads
|
|
144
|
+
}
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
#### Methods
|
|
148
|
+
|
|
149
|
+
##### `upload(params)`
|
|
150
|
+
|
|
151
|
+
Uploads a file stream to S3 with automatic content-type detection and metadata extraction.
|
|
152
|
+
|
|
153
|
+
```typescript
|
|
154
|
+
interface UploadParams {
|
|
155
|
+
readStream: ReadStream; // Node.js readable stream
|
|
156
|
+
filename: string; // Original filename (used for content-type detection)
|
|
157
|
+
key: string; // S3 object key (path in bucket)
|
|
158
|
+
bucket?: string; // Optional: Override default bucket
|
|
159
|
+
}
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
##### `destroy()`
|
|
163
|
+
|
|
164
|
+
Cleans up the S3 client connections. Should be called when done with the streamer instance.
|
|
165
|
+
|
|
166
|
+
```js
|
|
167
|
+
streamer.destroy();
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
### Functional API
|
|
171
|
+
|
|
172
|
+
If you prefer functional programming over classes:
|
|
173
|
+
|
|
174
|
+
```js
|
|
175
|
+
import { getClient, upload } from '@constructive-io/s3-streamer';
|
|
176
|
+
|
|
177
|
+
// Create S3 client
|
|
178
|
+
const client = getClient({
|
|
179
|
+
awsRegion: 'us-east-1',
|
|
180
|
+
awsSecretKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
181
|
+
awsAccessKey: process.env.AWS_ACCESS_KEY_ID,
|
|
182
|
+
minioEndpoint: 'http://localhost:9000' // optional
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
// Upload file
|
|
186
|
+
const results = await upload({
|
|
187
|
+
client,
|
|
188
|
+
readStream: createReadStream('file.pdf'),
|
|
189
|
+
filename: 'file.pdf',
|
|
190
|
+
bucket: 'my-bucket',
|
|
191
|
+
key: 'uploads/file.pdf'
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
// Clean up when done
|
|
195
|
+
client.destroy();
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
---
|
|
199
|
+
|
|
200
|
+
## Education and Tutorials
|
|
201
|
+
|
|
202
|
+
1. 🚀 [Quickstart: Getting Up and Running](https://constructive.io/learn/quickstart)
|
|
203
|
+
Get started with modular databases in minutes. Install prerequisites and deploy your first module.
|
|
204
|
+
|
|
205
|
+
2. 📦 [Modular PostgreSQL Development with Database Packages](https://constructive.io/learn/modular-postgres)
|
|
206
|
+
Learn to organize PostgreSQL projects with pgpm workspaces and reusable database modules.
|
|
207
|
+
|
|
208
|
+
3. ✏️ [Authoring Database Changes](https://constructive.io/learn/authoring-database-changes)
|
|
209
|
+
Master the workflow for adding, organizing, and managing database changes with pgpm.
|
|
210
|
+
|
|
211
|
+
4. 🧪 [End-to-End PostgreSQL Testing with TypeScript](https://constructive.io/learn/e2e-postgres-testing)
|
|
212
|
+
Master end-to-end PostgreSQL testing with ephemeral databases, RLS testing, and CI/CD automation.
|
|
213
|
+
|
|
214
|
+
5. ⚡ [Supabase Testing](https://constructive.io/learn/supabase)
|
|
215
|
+
Use TypeScript-first tools to test Supabase projects with realistic RLS, policies, and auth contexts.
|
|
216
|
+
|
|
217
|
+
6. 💧 [Drizzle ORM Testing](https://constructive.io/learn/drizzle-testing)
|
|
218
|
+
Run full-stack tests with Drizzle ORM, including database setup, teardown, and RLS enforcement.
|
|
219
|
+
|
|
220
|
+
7. 🔧 [Troubleshooting](https://constructive.io/learn/troubleshooting)
|
|
221
|
+
Common issues and solutions for pgpm, PostgreSQL, and testing.
|
|
222
|
+
|
|
223
|
+
## Related Constructive Tooling
|
|
224
|
+
|
|
225
|
+
### 🧪 Testing
|
|
226
|
+
|
|
227
|
+
* [pgsql-test](https://github.com/constructive-io/constructive/tree/main/postgres/pgsql-test): **📊 Isolated testing environments** with per-test transaction rollbacks—ideal for integration tests, complex migrations, and RLS simulation.
|
|
228
|
+
* [supabase-test](https://github.com/constructive-io/constructive/tree/main/postgres/supabase-test): **🧪 Supabase-native test harness** preconfigured for the local Supabase stack—per-test rollbacks, JWT/role context helpers, and CI/GitHub Actions ready.
|
|
229
|
+
* [graphile-test](https://github.com/constructive-io/constructive/tree/main/graphile/graphile-test): **🔐 Authentication mocking** for Graphile-focused test helpers and emulating row-level security contexts.
|
|
230
|
+
* [pg-query-context](https://github.com/constructive-io/constructive/tree/main/postgres/pg-query-context): **🔒 Session context injection** to add session-local context (e.g., `SET LOCAL`) into queries—ideal for setting `role`, `jwt.claims`, and other session settings.
|
|
231
|
+
|
|
232
|
+
### 🧠 Parsing & AST
|
|
233
|
+
|
|
234
|
+
* [pgsql-parser](https://www.npmjs.com/package/pgsql-parser): **🔄 SQL conversion engine** that interprets and converts PostgreSQL syntax.
|
|
235
|
+
* [libpg-query-node](https://www.npmjs.com/package/libpg-query): **🌉 Node.js bindings** for `libpg_query`, converting SQL into parse trees.
|
|
236
|
+
* [pg-proto-parser](https://www.npmjs.com/package/pg-proto-parser): **📦 Protobuf parser** for parsing PostgreSQL Protocol Buffers definitions to generate TypeScript interfaces, utility functions, and JSON mappings for enums.
|
|
237
|
+
* [@pgsql/enums](https://www.npmjs.com/package/@pgsql/enums): **🏷️ TypeScript enums** for PostgreSQL AST for safe and ergonomic parsing logic.
|
|
238
|
+
* [@pgsql/types](https://www.npmjs.com/package/@pgsql/types): **📝 Type definitions** for PostgreSQL AST nodes in TypeScript.
|
|
239
|
+
* [@pgsql/utils](https://www.npmjs.com/package/@pgsql/utils): **🛠️ AST utilities** for constructing and transforming PostgreSQL syntax trees.
|
|
240
|
+
* [pg-ast](https://www.npmjs.com/package/pg-ast): **🔍 Low-level AST tools** and transformations for Postgres query structures.
|
|
241
|
+
|
|
242
|
+
### 🚀 API & Dev Tools
|
|
243
|
+
|
|
244
|
+
* [@constructive-io/graphql-server](https://github.com/constructive-io/constructive/tree/main/graphql/server): **⚡ Express-based API server** powered by PostGraphile to expose a secure, scalable GraphQL API over your Postgres database.
|
|
245
|
+
* [@constructive-io/graphql-explorer](https://github.com/constructive-io/constructive/tree/main/graphql/explorer): **🔎 Visual API explorer** with GraphiQL for browsing across all databases and schemas—useful for debugging, documentation, and API prototyping.
|
|
246
|
+
|
|
247
|
+
### 🔁 Streaming & Uploads
|
|
248
|
+
|
|
249
|
+
* [etag-hash](https://github.com/constructive-io/constructive/tree/main/streaming/etag-hash): **🏷️ S3-compatible ETags** created by streaming and hashing file uploads in chunks.
|
|
250
|
+
* [etag-stream](https://github.com/constructive-io/constructive/tree/main/streaming/etag-stream): **🔄 ETag computation** via Node stream transformer during upload or transfer.
|
|
251
|
+
* [uuid-hash](https://github.com/constructive-io/constructive/tree/main/streaming/uuid-hash): **🆔 Deterministic UUIDs** generated from hashed content, great for deduplication and asset referencing.
|
|
252
|
+
* [uuid-stream](https://github.com/constructive-io/constructive/tree/main/streaming/uuid-stream): **🌊 Streaming UUID generation** based on piped file content—ideal for upload pipelines.
|
|
253
|
+
* [@constructive-io/s3-streamer](https://github.com/constructive-io/constructive/tree/main/streaming/s3-streamer): **📤 Direct S3 streaming** for large files with support for metadata injection and content validation.
|
|
254
|
+
* [@constructive-io/upload-names](https://github.com/constructive-io/constructive/tree/main/streaming/upload-names): **📂 Collision-resistant filenames** utility for structured and unique file names for uploads.
|
|
255
|
+
|
|
256
|
+
### 🧰 CLI & Codegen
|
|
257
|
+
|
|
258
|
+
* [pgpm](https://github.com/constructive-io/constructive/tree/main/pgpm/pgpm): **🖥️ PostgreSQL Package Manager** for modular Postgres development. Works with database workspaces, scaffolding, migrations, seeding, and installing database packages.
|
|
259
|
+
* [@constructive-io/cli](https://github.com/constructive-io/constructive/tree/main/packages/cli): **🖥️ Command-line toolkit** for managing Constructive projects—supports database scaffolding, migrations, seeding, code generation, and automation.
|
|
260
|
+
* [@constructive-io/graphql-codegen](https://github.com/constructive-io/constructive/tree/main/graphql/codegen): **✨ GraphQL code generation** (types, operations, SDK) from schema/endpoint introspection.
|
|
261
|
+
* [@constructive-io/query-builder](https://github.com/constructive-io/constructive/tree/main/packages/query-builder): **🏗️ SQL constructor** providing a robust TypeScript-based query builder for dynamic generation of `SELECT`, `INSERT`, `UPDATE`, `DELETE`, and stored procedure calls—supports advanced SQL features like `JOIN`, `GROUP BY`, and schema-qualified queries.
|
|
262
|
+
* [@constructive-io/graphql-query](https://github.com/constructive-io/constructive/tree/main/graphql/query): **🧩 Fluent GraphQL builder** for PostGraphile schemas. ⚡ Schema-aware via introspection, 🧩 composable and ergonomic for building deeply nested queries.
|
|
263
|
+
|
|
264
|
+
## Credits
|
|
265
|
+
|
|
266
|
+
**🛠 Built by the [Constructive](https://constructive.io) team — creators of modular Postgres tooling for secure, composable backends. If you like our work, contribute on [GitHub](https://github.com/constructive-io).**
|
|
267
|
+
|
|
268
|
+
## Disclaimer
|
|
269
|
+
|
|
270
|
+
AS DESCRIBED IN THE LICENSES, THE SOFTWARE IS PROVIDED "AS IS", AT YOUR OWN RISK, AND WITHOUT WARRANTIES OF ANY KIND.
|
|
271
|
+
|
|
272
|
+
No developer or entity involved in creating this software will be liable for any claims or damages whatsoever associated with your use, inability to use, or your interaction with other users of the code, including any direct, indirect, incidental, special, exemplary, punitive or consequential damages, or loss of profits, cryptocurrencies, tokens, or anything else of value.
|
package/esm/index.js
ADDED
package/esm/s3.js
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
2
|
+
export default function getS3(opts) {
|
|
3
|
+
const isMinio = Boolean(opts.minioEndpoint);
|
|
4
|
+
const awsConfig = {
|
|
5
|
+
region: opts.awsRegion,
|
|
6
|
+
...(opts.awsAccessKey && opts.awsSecretKey
|
|
7
|
+
? {
|
|
8
|
+
credentials: {
|
|
9
|
+
accessKeyId: opts.awsAccessKey,
|
|
10
|
+
secretAccessKey: opts.awsSecretKey,
|
|
11
|
+
},
|
|
12
|
+
}
|
|
13
|
+
: {}),
|
|
14
|
+
...(isMinio
|
|
15
|
+
? {
|
|
16
|
+
endpoint: opts.minioEndpoint,
|
|
17
|
+
forcePathStyle: true,
|
|
18
|
+
}
|
|
19
|
+
: {}),
|
|
20
|
+
};
|
|
21
|
+
return new S3Client(awsConfig);
|
|
22
|
+
}
|
package/esm/streamer.js
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import getS3 from './s3';
|
|
2
|
+
import { upload as streamUpload } from './utils';
|
|
3
|
+
export class Streamer {
|
|
4
|
+
s3;
|
|
5
|
+
defaultBucket;
|
|
6
|
+
constructor({ awsRegion, awsSecretKey, awsAccessKey, minioEndpoint, defaultBucket }) {
|
|
7
|
+
this.s3 = getS3({
|
|
8
|
+
awsRegion,
|
|
9
|
+
awsSecretKey,
|
|
10
|
+
awsAccessKey,
|
|
11
|
+
minioEndpoint
|
|
12
|
+
});
|
|
13
|
+
this.defaultBucket = defaultBucket;
|
|
14
|
+
}
|
|
15
|
+
async upload({ readStream, filename, key, bucket = this.defaultBucket }) {
|
|
16
|
+
if (!bucket) {
|
|
17
|
+
throw new Error('Bucket is required');
|
|
18
|
+
}
|
|
19
|
+
return await streamUpload({
|
|
20
|
+
client: this.s3,
|
|
21
|
+
readStream,
|
|
22
|
+
filename,
|
|
23
|
+
key,
|
|
24
|
+
bucket
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
destroy() {
|
|
28
|
+
this.s3.destroy();
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
export default Streamer;
|
package/esm/utils.js
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { Upload } from '@aws-sdk/lib-storage';
|
|
2
|
+
import { ContentStream, streamContentType } from '@constructive-io/content-type-stream';
|
|
3
|
+
import stream from 'stream';
|
|
4
|
+
export const uploadFromStream = ({ client, key, contentType, bucket }) => {
|
|
5
|
+
const pass = new stream.PassThrough();
|
|
6
|
+
const upload = new Upload({
|
|
7
|
+
client,
|
|
8
|
+
params: {
|
|
9
|
+
Body: pass,
|
|
10
|
+
Key: key,
|
|
11
|
+
ContentType: contentType,
|
|
12
|
+
Bucket: bucket
|
|
13
|
+
},
|
|
14
|
+
});
|
|
15
|
+
upload.done()
|
|
16
|
+
.then((data) => {
|
|
17
|
+
// Transform to match v2 response format
|
|
18
|
+
const result = {
|
|
19
|
+
Location: data.Location || `https://${bucket}.s3.amazonaws.com/${key}`,
|
|
20
|
+
ETag: data.ETag,
|
|
21
|
+
Bucket: bucket,
|
|
22
|
+
Key: key
|
|
23
|
+
};
|
|
24
|
+
pass.emit('upload', result);
|
|
25
|
+
})
|
|
26
|
+
.catch((err) => {
|
|
27
|
+
pass.emit('error', err);
|
|
28
|
+
});
|
|
29
|
+
return pass;
|
|
30
|
+
};
|
|
31
|
+
export const asyncUpload = ({ client, key, contentType, readStream, magic, bucket }) => {
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
// upload stream
|
|
34
|
+
let upload;
|
|
35
|
+
const uploadStream = uploadFromStream({
|
|
36
|
+
client,
|
|
37
|
+
key,
|
|
38
|
+
contentType,
|
|
39
|
+
bucket
|
|
40
|
+
});
|
|
41
|
+
// content stream
|
|
42
|
+
let contents;
|
|
43
|
+
const contentStream = new ContentStream();
|
|
44
|
+
const tryResolve = () => {
|
|
45
|
+
if (contents && upload) {
|
|
46
|
+
resolve({
|
|
47
|
+
upload,
|
|
48
|
+
magic,
|
|
49
|
+
contentType,
|
|
50
|
+
contents
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
contentStream
|
|
55
|
+
.on('contents', function (results) {
|
|
56
|
+
contents = results;
|
|
57
|
+
tryResolve();
|
|
58
|
+
})
|
|
59
|
+
.on('error', (error) => {
|
|
60
|
+
reject(error);
|
|
61
|
+
});
|
|
62
|
+
uploadStream
|
|
63
|
+
.on('upload', (results) => {
|
|
64
|
+
upload = results;
|
|
65
|
+
tryResolve();
|
|
66
|
+
})
|
|
67
|
+
.on('error', (error) => {
|
|
68
|
+
reject(error);
|
|
69
|
+
});
|
|
70
|
+
// Ensure proper cleanup on stream end
|
|
71
|
+
uploadStream.on('finish', () => {
|
|
72
|
+
readStream.destroy();
|
|
73
|
+
});
|
|
74
|
+
readStream.pipe(contentStream);
|
|
75
|
+
contentStream.pipe(uploadStream);
|
|
76
|
+
});
|
|
77
|
+
};
|
|
78
|
+
export const upload = async ({ client, readStream, filename, key, bucket }) => {
|
|
79
|
+
const { stream: newStream, magic, contentType } = await streamContentType({
|
|
80
|
+
readStream,
|
|
81
|
+
filename
|
|
82
|
+
});
|
|
83
|
+
return await asyncUpload({
|
|
84
|
+
client,
|
|
85
|
+
key,
|
|
86
|
+
contentType,
|
|
87
|
+
readStream: newStream,
|
|
88
|
+
magic,
|
|
89
|
+
bucket
|
|
90
|
+
});
|
|
91
|
+
};
|
package/index.d.ts
ADDED
package/index.js
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
17
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
18
|
+
};
|
|
19
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
20
|
+
exports.Streamer = exports.getClient = void 0;
|
|
21
|
+
const s3_1 = __importDefault(require("./s3"));
|
|
22
|
+
exports.getClient = s3_1.default;
|
|
23
|
+
const streamer_1 = __importDefault(require("./streamer"));
|
|
24
|
+
exports.Streamer = streamer_1.default;
|
|
25
|
+
__exportStar(require("./utils"), exports);
|
|
26
|
+
exports.default = streamer_1.default;
|
package/package.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@constructive-io/s3-streamer",
|
|
3
|
+
"version": "2.8.5",
|
|
4
|
+
"author": "Constructive <developers@constructive.io>",
|
|
5
|
+
"description": "stream files to s3",
|
|
6
|
+
"main": "index.js",
|
|
7
|
+
"module": "esm/index.js",
|
|
8
|
+
"types": "index.d.ts",
|
|
9
|
+
"homepage": "https://github.com/constructive-io/constructive",
|
|
10
|
+
"license": "MIT",
|
|
11
|
+
"publishConfig": {
|
|
12
|
+
"access": "public",
|
|
13
|
+
"directory": "dist"
|
|
14
|
+
},
|
|
15
|
+
"repository": {
|
|
16
|
+
"type": "git",
|
|
17
|
+
"url": "https://github.com/constructive-io/constructive"
|
|
18
|
+
},
|
|
19
|
+
"bugs": {
|
|
20
|
+
"url": "https://github.com/constructive-io/constructive/issues"
|
|
21
|
+
},
|
|
22
|
+
"scripts": {
|
|
23
|
+
"clean": "makage clean",
|
|
24
|
+
"prepack": "npm run build",
|
|
25
|
+
"build": "makage build",
|
|
26
|
+
"build:dev": "makage build --dev",
|
|
27
|
+
"lint": "eslint . --fix",
|
|
28
|
+
"test": "jest --passWithNoTests",
|
|
29
|
+
"test:watch": "jest --watch"
|
|
30
|
+
},
|
|
31
|
+
"devDependencies": {
|
|
32
|
+
"@constructive-io/s3-utils": "^2.3.5",
|
|
33
|
+
"@pgpmjs/env": "^2.8.4",
|
|
34
|
+
"glob": "^13.0.0",
|
|
35
|
+
"makage": "^0.1.8"
|
|
36
|
+
},
|
|
37
|
+
"dependencies": {
|
|
38
|
+
"@aws-sdk/client-s3": "^3.952.0",
|
|
39
|
+
"@aws-sdk/lib-storage": "^3.940.0",
|
|
40
|
+
"@constructive-io/content-type-stream": "^2.4.3",
|
|
41
|
+
"@pgpmjs/types": "^2.12.4"
|
|
42
|
+
},
|
|
43
|
+
"keywords": [
|
|
44
|
+
"s3",
|
|
45
|
+
"stream",
|
|
46
|
+
"upload",
|
|
47
|
+
"aws",
|
|
48
|
+
"minio",
|
|
49
|
+
"constructive"
|
|
50
|
+
],
|
|
51
|
+
"gitHead": "22cfe32e994e26a6490e04e28bab26d1e7e6345c"
|
|
52
|
+
}
|
package/s3.d.ts
ADDED
package/s3.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.default = getS3;
|
|
4
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
5
|
+
function getS3(opts) {
|
|
6
|
+
const isMinio = Boolean(opts.minioEndpoint);
|
|
7
|
+
const awsConfig = {
|
|
8
|
+
region: opts.awsRegion,
|
|
9
|
+
...(opts.awsAccessKey && opts.awsSecretKey
|
|
10
|
+
? {
|
|
11
|
+
credentials: {
|
|
12
|
+
accessKeyId: opts.awsAccessKey,
|
|
13
|
+
secretAccessKey: opts.awsSecretKey,
|
|
14
|
+
},
|
|
15
|
+
}
|
|
16
|
+
: {}),
|
|
17
|
+
...(isMinio
|
|
18
|
+
? {
|
|
19
|
+
endpoint: opts.minioEndpoint,
|
|
20
|
+
forcePathStyle: true,
|
|
21
|
+
}
|
|
22
|
+
: {}),
|
|
23
|
+
};
|
|
24
|
+
return new client_s3_1.S3Client(awsConfig);
|
|
25
|
+
}
|
package/streamer.d.ts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { ReadStream } from 'fs';
|
|
2
|
+
import { type AsyncUploadResult } from './utils';
|
|
3
|
+
interface StreamerOptions {
|
|
4
|
+
awsRegion: string;
|
|
5
|
+
awsSecretKey: string;
|
|
6
|
+
awsAccessKey: string;
|
|
7
|
+
minioEndpoint?: string;
|
|
8
|
+
defaultBucket: string;
|
|
9
|
+
}
|
|
10
|
+
interface UploadParams {
|
|
11
|
+
readStream: ReadStream;
|
|
12
|
+
filename: string;
|
|
13
|
+
key: string;
|
|
14
|
+
bucket?: string;
|
|
15
|
+
}
|
|
16
|
+
export declare class Streamer {
|
|
17
|
+
private s3;
|
|
18
|
+
private defaultBucket?;
|
|
19
|
+
constructor({ awsRegion, awsSecretKey, awsAccessKey, minioEndpoint, defaultBucket }: StreamerOptions);
|
|
20
|
+
upload({ readStream, filename, key, bucket }: UploadParams): Promise<AsyncUploadResult>;
|
|
21
|
+
destroy(): void;
|
|
22
|
+
}
|
|
23
|
+
export default Streamer;
|
package/streamer.js
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Streamer = void 0;
|
|
7
|
+
const s3_1 = __importDefault(require("./s3"));
|
|
8
|
+
const utils_1 = require("./utils");
|
|
9
|
+
class Streamer {
|
|
10
|
+
s3;
|
|
11
|
+
defaultBucket;
|
|
12
|
+
constructor({ awsRegion, awsSecretKey, awsAccessKey, minioEndpoint, defaultBucket }) {
|
|
13
|
+
this.s3 = (0, s3_1.default)({
|
|
14
|
+
awsRegion,
|
|
15
|
+
awsSecretKey,
|
|
16
|
+
awsAccessKey,
|
|
17
|
+
minioEndpoint
|
|
18
|
+
});
|
|
19
|
+
this.defaultBucket = defaultBucket;
|
|
20
|
+
}
|
|
21
|
+
async upload({ readStream, filename, key, bucket = this.defaultBucket }) {
|
|
22
|
+
if (!bucket) {
|
|
23
|
+
throw new Error('Bucket is required');
|
|
24
|
+
}
|
|
25
|
+
return await (0, utils_1.upload)({
|
|
26
|
+
client: this.s3,
|
|
27
|
+
readStream,
|
|
28
|
+
filename,
|
|
29
|
+
key,
|
|
30
|
+
bucket
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
destroy() {
|
|
34
|
+
this.s3.destroy();
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
exports.Streamer = Streamer;
|
|
38
|
+
exports.default = Streamer;
|
package/utils.d.ts
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
2
|
+
import { PassThrough, Readable } from 'stream';
|
|
3
|
+
export interface UploadParams {
|
|
4
|
+
client: S3Client;
|
|
5
|
+
key: string;
|
|
6
|
+
contentType: string;
|
|
7
|
+
bucket: string;
|
|
8
|
+
}
|
|
9
|
+
export interface AsyncUploadParams extends UploadParams {
|
|
10
|
+
readStream: Readable;
|
|
11
|
+
magic: {
|
|
12
|
+
charset: string;
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
export interface UploadWithFilenameParams {
|
|
16
|
+
client: S3Client;
|
|
17
|
+
readStream: Readable;
|
|
18
|
+
filename: string;
|
|
19
|
+
key: string;
|
|
20
|
+
bucket: string;
|
|
21
|
+
}
|
|
22
|
+
export interface UploadResult {
|
|
23
|
+
Location: string;
|
|
24
|
+
ETag?: string;
|
|
25
|
+
Bucket?: string;
|
|
26
|
+
Key?: string;
|
|
27
|
+
}
|
|
28
|
+
export interface AsyncUploadResult {
|
|
29
|
+
upload: UploadResult;
|
|
30
|
+
magic: {
|
|
31
|
+
charset: string;
|
|
32
|
+
};
|
|
33
|
+
contentType: string;
|
|
34
|
+
contents: unknown;
|
|
35
|
+
}
|
|
36
|
+
export declare const uploadFromStream: ({ client, key, contentType, bucket }: UploadParams) => PassThrough;
|
|
37
|
+
export declare const asyncUpload: ({ client, key, contentType, readStream, magic, bucket }: AsyncUploadParams) => Promise<AsyncUploadResult>;
|
|
38
|
+
export declare const upload: ({ client, readStream, filename, key, bucket }: UploadWithFilenameParams) => Promise<AsyncUploadResult>;
|
package/utils.js
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.upload = exports.asyncUpload = exports.uploadFromStream = void 0;
|
|
7
|
+
const lib_storage_1 = require("@aws-sdk/lib-storage");
|
|
8
|
+
const content_type_stream_1 = require("@constructive-io/content-type-stream");
|
|
9
|
+
const stream_1 = __importDefault(require("stream"));
|
|
10
|
+
const uploadFromStream = ({ client, key, contentType, bucket }) => {
|
|
11
|
+
const pass = new stream_1.default.PassThrough();
|
|
12
|
+
const upload = new lib_storage_1.Upload({
|
|
13
|
+
client,
|
|
14
|
+
params: {
|
|
15
|
+
Body: pass,
|
|
16
|
+
Key: key,
|
|
17
|
+
ContentType: contentType,
|
|
18
|
+
Bucket: bucket
|
|
19
|
+
},
|
|
20
|
+
});
|
|
21
|
+
upload.done()
|
|
22
|
+
.then((data) => {
|
|
23
|
+
// Transform to match v2 response format
|
|
24
|
+
const result = {
|
|
25
|
+
Location: data.Location || `https://${bucket}.s3.amazonaws.com/${key}`,
|
|
26
|
+
ETag: data.ETag,
|
|
27
|
+
Bucket: bucket,
|
|
28
|
+
Key: key
|
|
29
|
+
};
|
|
30
|
+
pass.emit('upload', result);
|
|
31
|
+
})
|
|
32
|
+
.catch((err) => {
|
|
33
|
+
pass.emit('error', err);
|
|
34
|
+
});
|
|
35
|
+
return pass;
|
|
36
|
+
};
|
|
37
|
+
exports.uploadFromStream = uploadFromStream;
|
|
38
|
+
const asyncUpload = ({ client, key, contentType, readStream, magic, bucket }) => {
|
|
39
|
+
return new Promise((resolve, reject) => {
|
|
40
|
+
// upload stream
|
|
41
|
+
let upload;
|
|
42
|
+
const uploadStream = (0, exports.uploadFromStream)({
|
|
43
|
+
client,
|
|
44
|
+
key,
|
|
45
|
+
contentType,
|
|
46
|
+
bucket
|
|
47
|
+
});
|
|
48
|
+
// content stream
|
|
49
|
+
let contents;
|
|
50
|
+
const contentStream = new content_type_stream_1.ContentStream();
|
|
51
|
+
const tryResolve = () => {
|
|
52
|
+
if (contents && upload) {
|
|
53
|
+
resolve({
|
|
54
|
+
upload,
|
|
55
|
+
magic,
|
|
56
|
+
contentType,
|
|
57
|
+
contents
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
contentStream
|
|
62
|
+
.on('contents', function (results) {
|
|
63
|
+
contents = results;
|
|
64
|
+
tryResolve();
|
|
65
|
+
})
|
|
66
|
+
.on('error', (error) => {
|
|
67
|
+
reject(error);
|
|
68
|
+
});
|
|
69
|
+
uploadStream
|
|
70
|
+
.on('upload', (results) => {
|
|
71
|
+
upload = results;
|
|
72
|
+
tryResolve();
|
|
73
|
+
})
|
|
74
|
+
.on('error', (error) => {
|
|
75
|
+
reject(error);
|
|
76
|
+
});
|
|
77
|
+
// Ensure proper cleanup on stream end
|
|
78
|
+
uploadStream.on('finish', () => {
|
|
79
|
+
readStream.destroy();
|
|
80
|
+
});
|
|
81
|
+
readStream.pipe(contentStream);
|
|
82
|
+
contentStream.pipe(uploadStream);
|
|
83
|
+
});
|
|
84
|
+
};
|
|
85
|
+
exports.asyncUpload = asyncUpload;
|
|
86
|
+
const upload = async ({ client, readStream, filename, key, bucket }) => {
|
|
87
|
+
const { stream: newStream, magic, contentType } = await (0, content_type_stream_1.streamContentType)({
|
|
88
|
+
readStream,
|
|
89
|
+
filename
|
|
90
|
+
});
|
|
91
|
+
return await (0, exports.asyncUpload)({
|
|
92
|
+
client,
|
|
93
|
+
key,
|
|
94
|
+
contentType,
|
|
95
|
+
readStream: newStream,
|
|
96
|
+
magic,
|
|
97
|
+
bucket
|
|
98
|
+
});
|
|
99
|
+
};
|
|
100
|
+
exports.upload = upload;
|