@alepha/bucket-s3 0.15.0 → 0.15.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -3
- package/dist/index.d.ts +35 -27
- package/dist/index.js +12 -4
- package/dist/index.js.map +1 -1
- package/package.json +7 -7
- package/src/index.ts +11 -3
- package/src/providers/S3FileStorageProvider.ts +1 -1
package/README.md
CHANGED
|
@@ -12,9 +12,18 @@ npm install alepha
|
|
|
12
12
|
|
|
13
13
|
## Module
|
|
14
14
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
15
|
+
| type | quality | stability |
|
|
16
|
+
|------|---------|-----------|
|
|
17
|
+
| backend | standard | stable |
|
|
18
|
+
|
|
19
|
+
S3-compatible file storage provider.
|
|
20
|
+
|
|
21
|
+
**Features:**
|
|
22
|
+
- AWS S3 compatibility
|
|
23
|
+
- Cloudflare R2 compatibility
|
|
24
|
+
- MinIO compatibility
|
|
25
|
+
- DigitalOcean Spaces compatibility
|
|
26
|
+
- Any S3-compatible backend
|
|
18
27
|
|
|
19
28
|
## API Reference
|
|
20
29
|
|
package/dist/index.d.ts
CHANGED
|
@@ -2,40 +2,40 @@ import * as alepha1 from "alepha";
|
|
|
2
2
|
import { Alepha, FileLike, Static } from "alepha";
|
|
3
3
|
import { FileStorageProvider } from "alepha/bucket";
|
|
4
4
|
import { S3Client } from "@aws-sdk/client-s3";
|
|
5
|
-
import { FileDetector, FileSystemProvider } from "alepha/file";
|
|
6
5
|
import * as alepha_logger0 from "alepha/logger";
|
|
6
|
+
import { FileDetector, FileSystemProvider } from "alepha/system";
|
|
7
7
|
|
|
8
8
|
//#region src/providers/S3FileStorageProvider.d.ts
|
|
9
9
|
declare const envSchema: alepha1.TObject<{
|
|
10
10
|
/**
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
11
|
+
* Custom S3 endpoint URL for S3-compatible services.
|
|
12
|
+
*
|
|
13
|
+
* Examples:
|
|
14
|
+
* - Cloudflare R2: https://<account-id>.r2.cloudflarestorage.com
|
|
15
|
+
* - MinIO: http://localhost:9000
|
|
16
|
+
* - DigitalOcean Spaces: https://<region>.digitaloceanspaces.com
|
|
17
|
+
*
|
|
18
|
+
* Leave empty for AWS S3.
|
|
19
|
+
*/
|
|
20
20
|
S3_ENDPOINT: alepha1.TOptional<alepha1.TString>;
|
|
21
21
|
/**
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
22
|
+
* AWS region or "auto" for R2.
|
|
23
|
+
*
|
|
24
|
+
* @default "us-east-1"
|
|
25
|
+
*/
|
|
26
26
|
S3_REGION: alepha1.TOptional<alepha1.TString>;
|
|
27
27
|
/**
|
|
28
|
-
|
|
29
|
-
|
|
28
|
+
* Access key ID for S3 authentication.
|
|
29
|
+
*/
|
|
30
30
|
S3_ACCESS_KEY_ID: alepha1.TString;
|
|
31
31
|
/**
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
* Secret access key for S3 authentication.
|
|
33
|
+
*/
|
|
34
34
|
S3_SECRET_ACCESS_KEY: alepha1.TString;
|
|
35
35
|
/**
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
36
|
+
* Force path-style URLs (required for MinIO and some S3-compatible services).
|
|
37
|
+
* Set to "true" to enable.
|
|
38
|
+
*/
|
|
39
39
|
S3_FORCE_PATH_STYLE: alepha1.TOptional<alepha1.TString>;
|
|
40
40
|
}>;
|
|
41
41
|
declare module "alepha" {
|
|
@@ -63,9 +63,9 @@ declare class S3FileStorageProvider implements FileStorageProvider {
|
|
|
63
63
|
constructor();
|
|
64
64
|
protected readonly onStart: alepha1.HookPrimitive<"start">;
|
|
65
65
|
/**
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
66
|
+
* Convert bucket name to S3-compatible format.
|
|
67
|
+
* S3 bucket names must be lowercase, 3-63 characters, no underscores.
|
|
68
|
+
*/
|
|
69
69
|
convertName(name: string): string;
|
|
70
70
|
protected createId(mimeType: string): string;
|
|
71
71
|
upload(bucketName: string, file: FileLike, fileId?: string): Promise<string>;
|
|
@@ -77,11 +77,19 @@ declare class S3FileStorageProvider implements FileStorageProvider {
|
|
|
77
77
|
//#endregion
|
|
78
78
|
//#region src/index.d.ts
|
|
79
79
|
/**
|
|
80
|
-
*
|
|
80
|
+
* | type | quality | stability |
|
|
81
|
+
* |------|---------|-----------|
|
|
82
|
+
* | backend | standard | stable |
|
|
81
83
|
*
|
|
82
|
-
*
|
|
84
|
+
* S3-compatible file storage provider.
|
|
85
|
+
*
|
|
86
|
+
* **Features:**
|
|
87
|
+
* - AWS S3 compatibility
|
|
88
|
+
* - Cloudflare R2 compatibility
|
|
89
|
+
* - MinIO compatibility
|
|
90
|
+
* - DigitalOcean Spaces compatibility
|
|
91
|
+
* - Any S3-compatible backend
|
|
83
92
|
*
|
|
84
|
-
* @see {@link S3FileStorageProvider}
|
|
85
93
|
* @module alepha.bucket.s3
|
|
86
94
|
*/
|
|
87
95
|
declare const AlephaBucketS3: alepha1.Service<alepha1.Module>;
|
package/dist/index.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { $env, $hook, $inject, $module, Alepha, AlephaError, t } from "alepha";
|
|
2
2
|
import { $bucket, AlephaBucket, FileNotFoundError, FileStorageProvider } from "alepha/bucket";
|
|
3
3
|
import { CreateBucketCommand, DeleteObjectCommand, GetObjectCommand, HeadBucketCommand, HeadObjectCommand, PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
|
|
4
|
-
import { FileDetector, FileSystemProvider } from "alepha/file";
|
|
5
4
|
import { $logger } from "alepha/logger";
|
|
5
|
+
import { FileDetector, FileSystemProvider } from "alepha/system";
|
|
6
6
|
|
|
7
7
|
//#region src/providers/S3FileStorageProvider.ts
|
|
8
8
|
const envSchema = t.object({
|
|
@@ -158,11 +158,19 @@ var S3FileStorageProvider = class {
|
|
|
158
158
|
//#endregion
|
|
159
159
|
//#region src/index.ts
|
|
160
160
|
/**
|
|
161
|
-
*
|
|
161
|
+
* | type | quality | stability |
|
|
162
|
+
* |------|---------|-----------|
|
|
163
|
+
* | backend | standard | stable |
|
|
162
164
|
*
|
|
163
|
-
*
|
|
165
|
+
* S3-compatible file storage provider.
|
|
166
|
+
*
|
|
167
|
+
* **Features:**
|
|
168
|
+
* - AWS S3 compatibility
|
|
169
|
+
* - Cloudflare R2 compatibility
|
|
170
|
+
* - MinIO compatibility
|
|
171
|
+
* - DigitalOcean Spaces compatibility
|
|
172
|
+
* - Any S3-compatible backend
|
|
164
173
|
*
|
|
165
|
-
* @see {@link S3FileStorageProvider}
|
|
166
174
|
* @module alepha.bucket.s3
|
|
167
175
|
*/
|
|
168
176
|
const AlephaBucketS3 = $module({
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":[],"sources":["../src/providers/S3FileStorageProvider.ts","../src/index.ts"],"sourcesContent":["import type { Readable } from \"node:stream\";\nimport {\n CreateBucketCommand,\n DeleteObjectCommand,\n GetObjectCommand,\n HeadBucketCommand,\n HeadObjectCommand,\n PutObjectCommand,\n S3Client,\n type S3ServiceException,\n} from \"@aws-sdk/client-s3\";\nimport {\n $env,\n $hook,\n $inject,\n Alepha,\n AlephaError,\n type FileLike,\n type Static,\n t,\n} from \"alepha\";\nimport {\n $bucket,\n FileNotFoundError,\n type FileStorageProvider,\n} from \"alepha/bucket\";\nimport { FileDetector, FileSystemProvider } from \"alepha/file\";\nimport { $logger } from \"alepha/logger\";\n\nconst envSchema = t.object({\n /**\n * Custom S3 endpoint URL for S3-compatible services.\n *\n * Examples:\n * - Cloudflare R2: https://<account-id>.r2.cloudflarestorage.com\n * - MinIO: http://localhost:9000\n * - DigitalOcean Spaces: https://<region>.digitaloceanspaces.com\n *\n * Leave empty for AWS S3.\n */\n S3_ENDPOINT: t.optional(t.string()),\n\n /**\n * AWS region or \"auto\" for R2.\n *\n * @default \"us-east-1\"\n */\n S3_REGION: t.optional(t.string()),\n\n /**\n * Access key ID for S3 authentication.\n */\n S3_ACCESS_KEY_ID: t.string(),\n\n /**\n * Secret access key for S3 authentication.\n */\n S3_SECRET_ACCESS_KEY: t.string(),\n\n /**\n * Force path-style URLs (required for MinIO and some S3-compatible services).\n * Set to \"true\" to enable.\n */\n S3_FORCE_PATH_STYLE: t.optional(t.string()),\n});\n\ndeclare module \"alepha\" {\n interface Env extends Partial<Static<typeof envSchema>> {}\n}\n\n/**\n * S3-compatible storage implementation of File Storage Provider.\n *\n * Works with AWS S3, Cloudflare R2, MinIO, DigitalOcean Spaces, and other S3-compatible services.\n */\nexport class S3FileStorageProvider implements FileStorageProvider {\n protected readonly log = $logger();\n protected readonly env = $env(envSchema);\n protected readonly alepha = $inject(Alepha);\n protected readonly fileSystem = $inject(FileSystemProvider);\n protected readonly fileDetector = $inject(FileDetector);\n protected readonly buckets: Set<string> = new Set();\n\n protected readonly client: S3Client;\n\n constructor() {\n this.client = new S3Client({\n endpoint: this.env.S3_ENDPOINT || undefined,\n region: this.env.S3_REGION || \"us-east-1\",\n credentials: {\n accessKeyId: this.env.S3_ACCESS_KEY_ID,\n secretAccessKey: this.env.S3_SECRET_ACCESS_KEY,\n },\n forcePathStyle: this.env.S3_FORCE_PATH_STYLE === \"true\",\n });\n }\n\n protected readonly onStart = $hook({\n on: \"start\",\n handler: async () => {\n for (const bucket of this.alepha.primitives($bucket)) {\n if (bucket.provider !== this) {\n continue;\n }\n\n const bucketName = this.convertName(bucket.name);\n\n this.log.debug(`Preparing S3 bucket '${bucketName}'...`);\n\n // Check if bucket exists, create if not\n try {\n await this.client.send(new HeadBucketCommand({ Bucket: bucketName }));\n } catch (error) {\n if (this.isNotFoundError(error)) {\n this.log.debug(`Creating S3 bucket '${bucketName}'...`);\n await this.client.send(\n new CreateBucketCommand({ Bucket: bucketName }),\n );\n } else {\n throw error;\n }\n }\n\n this.buckets.add(bucketName);\n this.log.info(`S3 bucket '${bucket.name}' OK`);\n }\n },\n });\n\n /**\n * Convert bucket name to S3-compatible format.\n * S3 bucket names must be lowercase, 3-63 characters, no underscores.\n */\n public convertName(name: string): string {\n return name.replaceAll(\"/\", \"-\").replaceAll(\"_\", \"-\").toLowerCase();\n }\n\n protected createId(mimeType: string): string {\n const ext = this.fileDetector.getExtensionFromMimeType(mimeType);\n return `${crypto.randomUUID()}.${ext}`;\n }\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId(file.type);\n\n this.log.trace(\n `Uploading file '${file.name}' to bucket '${bucketName}' with id '${fileId}'...`,\n );\n\n const bucket = this.convertName(bucketName);\n\n try {\n const body = Buffer.from(await file.arrayBuffer());\n\n await this.client.send(\n new PutObjectCommand({\n Bucket: bucket,\n Key: fileId,\n Body: body,\n ContentType: file.type || \"application/octet-stream\",\n ContentLength: file.size,\n Metadata: {\n name: encodeURIComponent(file.name),\n },\n }),\n );\n\n this.log.trace(`File uploaded successfully: ${fileId}`);\n return fileId;\n } catch (error) {\n this.log.error(`Failed to upload file: ${error}`);\n if (error instanceof Error) {\n throw new AlephaError(`Upload failed: ${error.message}`, {\n cause: error,\n });\n }\n throw error;\n }\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n this.log.trace(\n `Downloading file '${fileId}' from bucket '${bucketName}'...`,\n );\n\n const bucket = this.convertName(bucketName);\n\n try {\n const response = await this.client.send(\n new GetObjectCommand({\n Bucket: bucket,\n Key: fileId,\n }),\n );\n\n if (!response.Body) {\n throw new FileNotFoundError(\"File not found - empty response body\");\n }\n\n // Convert the stream to a buffer\n const stream = response.Body as Readable;\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n chunks.push(Buffer.from(chunk));\n }\n const buffer = Buffer.concat(chunks);\n\n const mimeType =\n response.ContentType || this.fileDetector.getContentType(fileId);\n\n const name = response.Metadata?.name\n ? decodeURIComponent(response.Metadata.name)\n : fileId;\n\n return this.fileSystem.createFile({\n buffer,\n name,\n type: mimeType,\n size: response.ContentLength,\n });\n } catch (error) {\n if (this.isNotFoundError(error)) {\n throw new FileNotFoundError(\n `File '${fileId}' not found in bucket '${bucketName}'`,\n );\n }\n\n this.log.error(`Failed to download file: ${error}`);\n if (error instanceof Error) {\n throw new FileNotFoundError(\"Error downloading file\", { cause: error });\n }\n throw error;\n }\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n this.log.trace(\n `Checking existence of file '${fileId}' in bucket '${bucketName}'...`,\n );\n\n const bucket = this.convertName(bucketName);\n\n try {\n await this.client.send(\n new HeadObjectCommand({\n Bucket: bucket,\n Key: fileId,\n }),\n );\n return true;\n } catch (error) {\n if (this.isNotFoundError(error)) {\n return false;\n }\n throw error;\n }\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n this.log.trace(`Deleting file '${fileId}' from bucket '${bucketName}'...`);\n\n const bucket = this.convertName(bucketName);\n\n try {\n await this.client.send(\n new DeleteObjectCommand({\n Bucket: bucket,\n Key: fileId,\n }),\n );\n } catch (error) {\n this.log.error(`Failed to delete file: ${error}`);\n if (error instanceof Error) {\n throw new FileNotFoundError(\"Error deleting file\", { cause: error });\n }\n throw error;\n }\n }\n\n protected isNotFoundError(error: unknown): boolean {\n if (error instanceof Error) {\n const name = error.name;\n // Check error name for S3 not found errors\n if (\n name === \"NotFound\" ||\n name === \"NoSuchKey\" ||\n name === \"NoSuchBucket\"\n ) {\n return true;\n }\n // Check HTTP status code for 404\n const metadata = (error as S3ServiceException).$metadata;\n if (metadata?.httpStatusCode === 404) {\n return true;\n }\n }\n return false;\n }\n}\n","import { $module } from \"alepha\";\nimport { AlephaBucket, FileStorageProvider } from \"alepha/bucket\";\nimport { S3FileStorageProvider } from \"./providers/S3FileStorageProvider.ts\";\n\nexport * from \"./providers/S3FileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Plugin for Alepha Bucket that provides S3-compatible storage capabilities.\n *\n * Works with AWS S3, Cloudflare R2, MinIO, DigitalOcean Spaces, and other S3-compatible services.\n *\n * @see {@link S3FileStorageProvider}\n * @module alepha.bucket.s3\n */\nexport const AlephaBucketS3 = $module({\n name: \"alepha.bucket.s3\",\n services: [S3FileStorageProvider],\n register: (alepha) =>\n alepha\n .with({\n optional: true,\n provide: FileStorageProvider,\n use: S3FileStorageProvider,\n })\n .with(AlephaBucket),\n});\n"],"mappings":";;;;;;;AA6BA,MAAM,YAAY,EAAE,OAAO;CAWzB,aAAa,EAAE,SAAS,EAAE,QAAQ,CAAC;CAOnC,WAAW,EAAE,SAAS,EAAE,QAAQ,CAAC;CAKjC,kBAAkB,EAAE,QAAQ;CAK5B,sBAAsB,EAAE,QAAQ;CAMhC,qBAAqB,EAAE,SAAS,EAAE,QAAQ,CAAC;CAC5C,CAAC;;;;;;AAWF,IAAa,wBAAb,MAAkE;CAChE,AAAmB,MAAM,SAAS;CAClC,AAAmB,MAAM,KAAK,UAAU;CACxC,AAAmB,SAAS,QAAQ,OAAO;CAC3C,AAAmB,aAAa,QAAQ,mBAAmB;CAC3D,AAAmB,eAAe,QAAQ,aAAa;CACvD,AAAmB,0BAAuB,IAAI,KAAK;CAEnD,AAAmB;CAEnB,cAAc;AACZ,OAAK,SAAS,IAAI,SAAS;GACzB,UAAU,KAAK,IAAI,eAAe;GAClC,QAAQ,KAAK,IAAI,aAAa;GAC9B,aAAa;IACX,aAAa,KAAK,IAAI;IACtB,iBAAiB,KAAK,IAAI;IAC3B;GACD,gBAAgB,KAAK,IAAI,wBAAwB;GAClD,CAAC;;CAGJ,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,QAAK,MAAM,UAAU,KAAK,OAAO,WAAW,QAAQ,EAAE;AACpD,QAAI,OAAO,aAAa,KACtB;IAGF,MAAM,aAAa,KAAK,YAAY,OAAO,KAAK;AAEhD,SAAK,IAAI,MAAM,wBAAwB,WAAW,MAAM;AAGxD,QAAI;AACF,WAAM,KAAK,OAAO,KAAK,IAAI,kBAAkB,EAAE,QAAQ,YAAY,CAAC,CAAC;aAC9D,OAAO;AACd,SAAI,KAAK,gBAAgB,MAAM,EAAE;AAC/B,WAAK,IAAI,MAAM,uBAAuB,WAAW,MAAM;AACvD,YAAM,KAAK,OAAO,KAChB,IAAI,oBAAoB,EAAE,QAAQ,YAAY,CAAC,CAChD;WAED,OAAM;;AAIV,SAAK,QAAQ,IAAI,WAAW;AAC5B,SAAK,IAAI,KAAK,cAAc,OAAO,KAAK,MAAM;;;EAGnD,CAAC;;;;;CAMF,AAAO,YAAY,MAAsB;AACvC,SAAO,KAAK,WAAW,KAAK,IAAI,CAAC,WAAW,KAAK,IAAI,CAAC,aAAa;;CAGrE,AAAU,SAAS,UAA0B;EAC3C,MAAM,MAAM,KAAK,aAAa,yBAAyB,SAAS;AAChE,SAAO,GAAG,OAAO,YAAY,CAAC,GAAG;;CAGnC,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,SAAS,KAAK,KAAK;AAEnC,OAAK,IAAI,MACP,mBAAmB,KAAK,KAAK,eAAe,WAAW,aAAa,OAAO,MAC5E;EAED,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;GACF,MAAM,OAAO,OAAO,KAAK,MAAM,KAAK,aAAa,CAAC;AAElD,SAAM,KAAK,OAAO,KAChB,IAAI,iBAAiB;IACnB,QAAQ;IACR,KAAK;IACL,MAAM;IACN,aAAa,KAAK,QAAQ;IAC1B,eAAe,KAAK;IACpB,UAAU,EACR,MAAM,mBAAmB,KAAK,KAAK,EACpC;IACF,CAAC,CACH;AAED,QAAK,IAAI,MAAM,+BAA+B,SAAS;AACvD,UAAO;WACA,OAAO;AACd,QAAK,IAAI,MAAM,0BAA0B,QAAQ;AACjD,OAAI,iBAAiB,MACnB,OAAM,IAAI,YAAY,kBAAkB,MAAM,WAAW,EACvD,OAAO,OACR,CAAC;AAEJ,SAAM;;;CAIV,MAAa,SAAS,YAAoB,QAAmC;AAC3E,OAAK,IAAI,MACP,qBAAqB,OAAO,iBAAiB,WAAW,MACzD;EAED,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;GACF,MAAM,WAAW,MAAM,KAAK,OAAO,KACjC,IAAI,iBAAiB;IACnB,QAAQ;IACR,KAAK;IACN,CAAC,CACH;AAED,OAAI,CAAC,SAAS,KACZ,OAAM,IAAI,kBAAkB,uCAAuC;GAIrE,MAAM,SAAS,SAAS;GACxB,MAAM,SAAmB,EAAE;AAC3B,cAAW,MAAM,SAAS,OACxB,QAAO,KAAK,OAAO,KAAK,MAAM,CAAC;GAEjC,MAAM,SAAS,OAAO,OAAO,OAAO;GAEpC,MAAM,WACJ,SAAS,eAAe,KAAK,aAAa,eAAe,OAAO;GAElE,MAAM,OAAO,SAAS,UAAU,OAC5B,mBAAmB,SAAS,SAAS,KAAK,GAC1C;AAEJ,UAAO,KAAK,WAAW,WAAW;IAChC;IACA;IACA,MAAM;IACN,MAAM,SAAS;IAChB,CAAC;WACK,OAAO;AACd,OAAI,KAAK,gBAAgB,MAAM,CAC7B,OAAM,IAAI,kBACR,SAAS,OAAO,yBAAyB,WAAW,GACrD;AAGH,QAAK,IAAI,MAAM,4BAA4B,QAAQ;AACnD,OAAI,iBAAiB,MACnB,OAAM,IAAI,kBAAkB,0BAA0B,EAAE,OAAO,OAAO,CAAC;AAEzE,SAAM;;;CAIV,MAAa,OAAO,YAAoB,QAAkC;AACxE,OAAK,IAAI,MACP,+BAA+B,OAAO,eAAe,WAAW,MACjE;EAED,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;AACF,SAAM,KAAK,OAAO,KAChB,IAAI,kBAAkB;IACpB,QAAQ;IACR,KAAK;IACN,CAAC,CACH;AACD,UAAO;WACA,OAAO;AACd,OAAI,KAAK,gBAAgB,MAAM,CAC7B,QAAO;AAET,SAAM;;;CAIV,MAAa,OAAO,YAAoB,QAA+B;AACrE,OAAK,IAAI,MAAM,kBAAkB,OAAO,iBAAiB,WAAW,MAAM;EAE1E,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;AACF,SAAM,KAAK,OAAO,KAChB,IAAI,oBAAoB;IACtB,QAAQ;IACR,KAAK;IACN,CAAC,CACH;WACM,OAAO;AACd,QAAK,IAAI,MAAM,0BAA0B,QAAQ;AACjD,OAAI,iBAAiB,MACnB,OAAM,IAAI,kBAAkB,uBAAuB,EAAE,OAAO,OAAO,CAAC;AAEtE,SAAM;;;CAIV,AAAU,gBAAgB,OAAyB;AACjD,MAAI,iBAAiB,OAAO;GAC1B,MAAM,OAAO,MAAM;AAEnB,OACE,SAAS,cACT,SAAS,eACT,SAAS,eAET,QAAO;AAIT,OADkB,MAA6B,WACjC,mBAAmB,IAC/B,QAAO;;AAGX,SAAO;;;;;;;;;;;;;;AC5RX,MAAa,iBAAiB,QAAQ;CACpC,MAAM;CACN,UAAU,CAAC,sBAAsB;CACjC,WAAW,WACT,OACG,KAAK;EACJ,UAAU;EACV,SAAS;EACT,KAAK;EACN,CAAC,CACD,KAAK,aAAa;CACxB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../src/providers/S3FileStorageProvider.ts","../src/index.ts"],"sourcesContent":["import type { Readable } from \"node:stream\";\nimport {\n CreateBucketCommand,\n DeleteObjectCommand,\n GetObjectCommand,\n HeadBucketCommand,\n HeadObjectCommand,\n PutObjectCommand,\n S3Client,\n type S3ServiceException,\n} from \"@aws-sdk/client-s3\";\nimport {\n $env,\n $hook,\n $inject,\n Alepha,\n AlephaError,\n type FileLike,\n type Static,\n t,\n} from \"alepha\";\nimport {\n $bucket,\n FileNotFoundError,\n type FileStorageProvider,\n} from \"alepha/bucket\";\nimport { $logger } from \"alepha/logger\";\nimport { FileDetector, FileSystemProvider } from \"alepha/system\";\n\nconst envSchema = t.object({\n /**\n * Custom S3 endpoint URL for S3-compatible services.\n *\n * Examples:\n * - Cloudflare R2: https://<account-id>.r2.cloudflarestorage.com\n * - MinIO: http://localhost:9000\n * - DigitalOcean Spaces: https://<region>.digitaloceanspaces.com\n *\n * Leave empty for AWS S3.\n */\n S3_ENDPOINT: t.optional(t.string()),\n\n /**\n * AWS region or \"auto\" for R2.\n *\n * @default \"us-east-1\"\n */\n S3_REGION: t.optional(t.string()),\n\n /**\n * Access key ID for S3 authentication.\n */\n S3_ACCESS_KEY_ID: t.string(),\n\n /**\n * Secret access key for S3 authentication.\n */\n S3_SECRET_ACCESS_KEY: t.string(),\n\n /**\n * Force path-style URLs (required for MinIO and some S3-compatible services).\n * Set to \"true\" to enable.\n */\n S3_FORCE_PATH_STYLE: t.optional(t.string()),\n});\n\ndeclare module \"alepha\" {\n interface Env extends Partial<Static<typeof envSchema>> {}\n}\n\n/**\n * S3-compatible storage implementation of File Storage Provider.\n *\n * Works with AWS S3, Cloudflare R2, MinIO, DigitalOcean Spaces, and other S3-compatible services.\n */\nexport class S3FileStorageProvider implements FileStorageProvider {\n protected readonly log = $logger();\n protected readonly env = $env(envSchema);\n protected readonly alepha = $inject(Alepha);\n protected readonly fileSystem = $inject(FileSystemProvider);\n protected readonly fileDetector = $inject(FileDetector);\n protected readonly buckets: Set<string> = new Set();\n\n protected readonly client: S3Client;\n\n constructor() {\n this.client = new S3Client({\n endpoint: this.env.S3_ENDPOINT || undefined,\n region: this.env.S3_REGION || \"us-east-1\",\n credentials: {\n accessKeyId: this.env.S3_ACCESS_KEY_ID,\n secretAccessKey: this.env.S3_SECRET_ACCESS_KEY,\n },\n forcePathStyle: this.env.S3_FORCE_PATH_STYLE === \"true\",\n });\n }\n\n protected readonly onStart = $hook({\n on: \"start\",\n handler: async () => {\n for (const bucket of this.alepha.primitives($bucket)) {\n if (bucket.provider !== this) {\n continue;\n }\n\n const bucketName = this.convertName(bucket.name);\n\n this.log.debug(`Preparing S3 bucket '${bucketName}'...`);\n\n // Check if bucket exists, create if not\n try {\n await this.client.send(new HeadBucketCommand({ Bucket: bucketName }));\n } catch (error) {\n if (this.isNotFoundError(error)) {\n this.log.debug(`Creating S3 bucket '${bucketName}'...`);\n await this.client.send(\n new CreateBucketCommand({ Bucket: bucketName }),\n );\n } else {\n throw error;\n }\n }\n\n this.buckets.add(bucketName);\n this.log.info(`S3 bucket '${bucket.name}' OK`);\n }\n },\n });\n\n /**\n * Convert bucket name to S3-compatible format.\n * S3 bucket names must be lowercase, 3-63 characters, no underscores.\n */\n public convertName(name: string): string {\n return name.replaceAll(\"/\", \"-\").replaceAll(\"_\", \"-\").toLowerCase();\n }\n\n protected createId(mimeType: string): string {\n const ext = this.fileDetector.getExtensionFromMimeType(mimeType);\n return `${crypto.randomUUID()}.${ext}`;\n }\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId(file.type);\n\n this.log.trace(\n `Uploading file '${file.name}' to bucket '${bucketName}' with id '${fileId}'...`,\n );\n\n const bucket = this.convertName(bucketName);\n\n try {\n const body = Buffer.from(await file.arrayBuffer());\n\n await this.client.send(\n new PutObjectCommand({\n Bucket: bucket,\n Key: fileId,\n Body: body,\n ContentType: file.type || \"application/octet-stream\",\n ContentLength: file.size,\n Metadata: {\n name: encodeURIComponent(file.name),\n },\n }),\n );\n\n this.log.trace(`File uploaded successfully: ${fileId}`);\n return fileId;\n } catch (error) {\n this.log.error(`Failed to upload file: ${error}`);\n if (error instanceof Error) {\n throw new AlephaError(`Upload failed: ${error.message}`, {\n cause: error,\n });\n }\n throw error;\n }\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n this.log.trace(\n `Downloading file '${fileId}' from bucket '${bucketName}'...`,\n );\n\n const bucket = this.convertName(bucketName);\n\n try {\n const response = await this.client.send(\n new GetObjectCommand({\n Bucket: bucket,\n Key: fileId,\n }),\n );\n\n if (!response.Body) {\n throw new FileNotFoundError(\"File not found - empty response body\");\n }\n\n // Convert the stream to a buffer\n const stream = response.Body as Readable;\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n chunks.push(Buffer.from(chunk));\n }\n const buffer = Buffer.concat(chunks);\n\n const mimeType =\n response.ContentType || this.fileDetector.getContentType(fileId);\n\n const name = response.Metadata?.name\n ? decodeURIComponent(response.Metadata.name)\n : fileId;\n\n return this.fileSystem.createFile({\n buffer,\n name,\n type: mimeType,\n size: response.ContentLength,\n });\n } catch (error) {\n if (this.isNotFoundError(error)) {\n throw new FileNotFoundError(\n `File '${fileId}' not found in bucket '${bucketName}'`,\n );\n }\n\n this.log.error(`Failed to download file: ${error}`);\n if (error instanceof Error) {\n throw new FileNotFoundError(\"Error downloading file\", { cause: error });\n }\n throw error;\n }\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n this.log.trace(\n `Checking existence of file '${fileId}' in bucket '${bucketName}'...`,\n );\n\n const bucket = this.convertName(bucketName);\n\n try {\n await this.client.send(\n new HeadObjectCommand({\n Bucket: bucket,\n Key: fileId,\n }),\n );\n return true;\n } catch (error) {\n if (this.isNotFoundError(error)) {\n return false;\n }\n throw error;\n }\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n this.log.trace(`Deleting file '${fileId}' from bucket '${bucketName}'...`);\n\n const bucket = this.convertName(bucketName);\n\n try {\n await this.client.send(\n new DeleteObjectCommand({\n Bucket: bucket,\n Key: fileId,\n }),\n );\n } catch (error) {\n this.log.error(`Failed to delete file: ${error}`);\n if (error instanceof Error) {\n throw new FileNotFoundError(\"Error deleting file\", { cause: error });\n }\n throw error;\n }\n }\n\n protected isNotFoundError(error: unknown): boolean {\n if (error instanceof Error) {\n const name = error.name;\n // Check error name for S3 not found errors\n if (\n name === \"NotFound\" ||\n name === \"NoSuchKey\" ||\n name === \"NoSuchBucket\"\n ) {\n return true;\n }\n // Check HTTP status code for 404\n const metadata = (error as S3ServiceException).$metadata;\n if (metadata?.httpStatusCode === 404) {\n return true;\n }\n }\n return false;\n }\n}\n","import { $module } from \"alepha\";\nimport { AlephaBucket, FileStorageProvider } from \"alepha/bucket\";\nimport { S3FileStorageProvider } from \"./providers/S3FileStorageProvider.ts\";\n\nexport * from \"./providers/S3FileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * | type | quality | stability |\n * |------|---------|-----------|\n * | backend | standard | stable |\n *\n * S3-compatible file storage provider.\n *\n * **Features:**\n * - AWS S3 compatibility\n * - Cloudflare R2 compatibility\n * - MinIO compatibility\n * - DigitalOcean Spaces compatibility\n * - Any S3-compatible backend\n *\n * @module alepha.bucket.s3\n */\nexport const AlephaBucketS3 = $module({\n name: \"alepha.bucket.s3\",\n services: [S3FileStorageProvider],\n register: (alepha) =>\n alepha\n .with({\n optional: true,\n provide: FileStorageProvider,\n use: S3FileStorageProvider,\n })\n .with(AlephaBucket),\n});\n"],"mappings":";;;;;;;AA6BA,MAAM,YAAY,EAAE,OAAO;CAWzB,aAAa,EAAE,SAAS,EAAE,QAAQ,CAAC;CAOnC,WAAW,EAAE,SAAS,EAAE,QAAQ,CAAC;CAKjC,kBAAkB,EAAE,QAAQ;CAK5B,sBAAsB,EAAE,QAAQ;CAMhC,qBAAqB,EAAE,SAAS,EAAE,QAAQ,CAAC;CAC5C,CAAC;;;;;;AAWF,IAAa,wBAAb,MAAkE;CAChE,AAAmB,MAAM,SAAS;CAClC,AAAmB,MAAM,KAAK,UAAU;CACxC,AAAmB,SAAS,QAAQ,OAAO;CAC3C,AAAmB,aAAa,QAAQ,mBAAmB;CAC3D,AAAmB,eAAe,QAAQ,aAAa;CACvD,AAAmB,0BAAuB,IAAI,KAAK;CAEnD,AAAmB;CAEnB,cAAc;AACZ,OAAK,SAAS,IAAI,SAAS;GACzB,UAAU,KAAK,IAAI,eAAe;GAClC,QAAQ,KAAK,IAAI,aAAa;GAC9B,aAAa;IACX,aAAa,KAAK,IAAI;IACtB,iBAAiB,KAAK,IAAI;IAC3B;GACD,gBAAgB,KAAK,IAAI,wBAAwB;GAClD,CAAC;;CAGJ,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,QAAK,MAAM,UAAU,KAAK,OAAO,WAAW,QAAQ,EAAE;AACpD,QAAI,OAAO,aAAa,KACtB;IAGF,MAAM,aAAa,KAAK,YAAY,OAAO,KAAK;AAEhD,SAAK,IAAI,MAAM,wBAAwB,WAAW,MAAM;AAGxD,QAAI;AACF,WAAM,KAAK,OAAO,KAAK,IAAI,kBAAkB,EAAE,QAAQ,YAAY,CAAC,CAAC;aAC9D,OAAO;AACd,SAAI,KAAK,gBAAgB,MAAM,EAAE;AAC/B,WAAK,IAAI,MAAM,uBAAuB,WAAW,MAAM;AACvD,YAAM,KAAK,OAAO,KAChB,IAAI,oBAAoB,EAAE,QAAQ,YAAY,CAAC,CAChD;WAED,OAAM;;AAIV,SAAK,QAAQ,IAAI,WAAW;AAC5B,SAAK,IAAI,KAAK,cAAc,OAAO,KAAK,MAAM;;;EAGnD,CAAC;;;;;CAMF,AAAO,YAAY,MAAsB;AACvC,SAAO,KAAK,WAAW,KAAK,IAAI,CAAC,WAAW,KAAK,IAAI,CAAC,aAAa;;CAGrE,AAAU,SAAS,UAA0B;EAC3C,MAAM,MAAM,KAAK,aAAa,yBAAyB,SAAS;AAChE,SAAO,GAAG,OAAO,YAAY,CAAC,GAAG;;CAGnC,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,SAAS,KAAK,KAAK;AAEnC,OAAK,IAAI,MACP,mBAAmB,KAAK,KAAK,eAAe,WAAW,aAAa,OAAO,MAC5E;EAED,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;GACF,MAAM,OAAO,OAAO,KAAK,MAAM,KAAK,aAAa,CAAC;AAElD,SAAM,KAAK,OAAO,KAChB,IAAI,iBAAiB;IACnB,QAAQ;IACR,KAAK;IACL,MAAM;IACN,aAAa,KAAK,QAAQ;IAC1B,eAAe,KAAK;IACpB,UAAU,EACR,MAAM,mBAAmB,KAAK,KAAK,EACpC;IACF,CAAC,CACH;AAED,QAAK,IAAI,MAAM,+BAA+B,SAAS;AACvD,UAAO;WACA,OAAO;AACd,QAAK,IAAI,MAAM,0BAA0B,QAAQ;AACjD,OAAI,iBAAiB,MACnB,OAAM,IAAI,YAAY,kBAAkB,MAAM,WAAW,EACvD,OAAO,OACR,CAAC;AAEJ,SAAM;;;CAIV,MAAa,SAAS,YAAoB,QAAmC;AAC3E,OAAK,IAAI,MACP,qBAAqB,OAAO,iBAAiB,WAAW,MACzD;EAED,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;GACF,MAAM,WAAW,MAAM,KAAK,OAAO,KACjC,IAAI,iBAAiB;IACnB,QAAQ;IACR,KAAK;IACN,CAAC,CACH;AAED,OAAI,CAAC,SAAS,KACZ,OAAM,IAAI,kBAAkB,uCAAuC;GAIrE,MAAM,SAAS,SAAS;GACxB,MAAM,SAAmB,EAAE;AAC3B,cAAW,MAAM,SAAS,OACxB,QAAO,KAAK,OAAO,KAAK,MAAM,CAAC;GAEjC,MAAM,SAAS,OAAO,OAAO,OAAO;GAEpC,MAAM,WACJ,SAAS,eAAe,KAAK,aAAa,eAAe,OAAO;GAElE,MAAM,OAAO,SAAS,UAAU,OAC5B,mBAAmB,SAAS,SAAS,KAAK,GAC1C;AAEJ,UAAO,KAAK,WAAW,WAAW;IAChC;IACA;IACA,MAAM;IACN,MAAM,SAAS;IAChB,CAAC;WACK,OAAO;AACd,OAAI,KAAK,gBAAgB,MAAM,CAC7B,OAAM,IAAI,kBACR,SAAS,OAAO,yBAAyB,WAAW,GACrD;AAGH,QAAK,IAAI,MAAM,4BAA4B,QAAQ;AACnD,OAAI,iBAAiB,MACnB,OAAM,IAAI,kBAAkB,0BAA0B,EAAE,OAAO,OAAO,CAAC;AAEzE,SAAM;;;CAIV,MAAa,OAAO,YAAoB,QAAkC;AACxE,OAAK,IAAI,MACP,+BAA+B,OAAO,eAAe,WAAW,MACjE;EAED,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;AACF,SAAM,KAAK,OAAO,KAChB,IAAI,kBAAkB;IACpB,QAAQ;IACR,KAAK;IACN,CAAC,CACH;AACD,UAAO;WACA,OAAO;AACd,OAAI,KAAK,gBAAgB,MAAM,CAC7B,QAAO;AAET,SAAM;;;CAIV,MAAa,OAAO,YAAoB,QAA+B;AACrE,OAAK,IAAI,MAAM,kBAAkB,OAAO,iBAAiB,WAAW,MAAM;EAE1E,MAAM,SAAS,KAAK,YAAY,WAAW;AAE3C,MAAI;AACF,SAAM,KAAK,OAAO,KAChB,IAAI,oBAAoB;IACtB,QAAQ;IACR,KAAK;IACN,CAAC,CACH;WACM,OAAO;AACd,QAAK,IAAI,MAAM,0BAA0B,QAAQ;AACjD,OAAI,iBAAiB,MACnB,OAAM,IAAI,kBAAkB,uBAAuB,EAAE,OAAO,OAAO,CAAC;AAEtE,SAAM;;;CAIV,AAAU,gBAAgB,OAAyB;AACjD,MAAI,iBAAiB,OAAO;GAC1B,MAAM,OAAO,MAAM;AAEnB,OACE,SAAS,cACT,SAAS,eACT,SAAS,eAET,QAAO;AAIT,OADkB,MAA6B,WACjC,mBAAmB,IAC/B,QAAO;;AAGX,SAAO;;;;;;;;;;;;;;;;;;;;;;ACpRX,MAAa,iBAAiB,QAAQ;CACpC,MAAM;CACN,UAAU,CAAC,sBAAsB;CACjC,WAAW,WACT,OACG,KAAK;EACJ,UAAU;EACV,SAAS;EACT,KAAK;EACN,CAAC,CACD,KAAK,aAAa;CACxB,CAAC"}
|
package/package.json
CHANGED
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
"minio"
|
|
14
14
|
],
|
|
15
15
|
"author": "Nicolas Foures",
|
|
16
|
-
"version": "0.15.
|
|
16
|
+
"version": "0.15.2",
|
|
17
17
|
"type": "module",
|
|
18
18
|
"engines": {
|
|
19
19
|
"node": ">=22.0.0"
|
|
@@ -26,17 +26,17 @@
|
|
|
26
26
|
"src"
|
|
27
27
|
],
|
|
28
28
|
"dependencies": {
|
|
29
|
-
"@aws-sdk/client-s3": "^3.
|
|
29
|
+
"@aws-sdk/client-s3": "^3.978.0"
|
|
30
30
|
},
|
|
31
31
|
"devDependencies": {
|
|
32
|
-
"@biomejs/biome": "^2.3.
|
|
33
|
-
"alepha": "0.15.
|
|
34
|
-
"tsdown": "^0.20.
|
|
32
|
+
"@biomejs/biome": "^2.3.13",
|
|
33
|
+
"alepha": "0.15.2",
|
|
34
|
+
"tsdown": "^0.20.1",
|
|
35
35
|
"typescript": "^5.9.3",
|
|
36
|
-
"vitest": "^4.0.
|
|
36
|
+
"vitest": "^4.0.18"
|
|
37
37
|
},
|
|
38
38
|
"peerDependencies": {
|
|
39
|
-
"alepha": "0.15.
|
|
39
|
+
"alepha": "0.15.2"
|
|
40
40
|
},
|
|
41
41
|
"scripts": {
|
|
42
42
|
"lint": "alepha lint",
|
package/src/index.ts
CHANGED
|
@@ -7,11 +7,19 @@ export * from "./providers/S3FileStorageProvider.ts";
|
|
|
7
7
|
// ---------------------------------------------------------------------------------------------------------------------
|
|
8
8
|
|
|
9
9
|
/**
|
|
10
|
-
*
|
|
10
|
+
* | type | quality | stability |
|
|
11
|
+
* |------|---------|-----------|
|
|
12
|
+
* | backend | standard | stable |
|
|
11
13
|
*
|
|
12
|
-
*
|
|
14
|
+
* S3-compatible file storage provider.
|
|
15
|
+
*
|
|
16
|
+
* **Features:**
|
|
17
|
+
* - AWS S3 compatibility
|
|
18
|
+
* - Cloudflare R2 compatibility
|
|
19
|
+
* - MinIO compatibility
|
|
20
|
+
* - DigitalOcean Spaces compatibility
|
|
21
|
+
* - Any S3-compatible backend
|
|
13
22
|
*
|
|
14
|
-
* @see {@link S3FileStorageProvider}
|
|
15
23
|
* @module alepha.bucket.s3
|
|
16
24
|
*/
|
|
17
25
|
export const AlephaBucketS3 = $module({
|
|
@@ -24,8 +24,8 @@ import {
|
|
|
24
24
|
FileNotFoundError,
|
|
25
25
|
type FileStorageProvider,
|
|
26
26
|
} from "alepha/bucket";
|
|
27
|
-
import { FileDetector, FileSystemProvider } from "alepha/file";
|
|
28
27
|
import { $logger } from "alepha/logger";
|
|
28
|
+
import { FileDetector, FileSystemProvider } from "alepha/system";
|
|
29
29
|
|
|
30
30
|
const envSchema = t.object({
|
|
31
31
|
/**
|