@alepha/bucket-azure 0.7.7 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +22 -23
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +17 -20
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.ts +17 -20
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +24 -25
- package/dist/index.js.map +1 -1
- package/package.json +5 -5
- package/src/index.ts +2 -1
- package/src/providers/AzureFileStorageProvider.ts +27 -18
package/dist/index.cjs
CHANGED
|
@@ -29,15 +29,12 @@ const __alepha_file = __toESM(require("@alepha/file"));
|
|
|
29
29
|
const __azure_storage_blob = __toESM(require("@azure/storage-blob"));
|
|
30
30
|
|
|
31
31
|
//#region src/providers/AzureFileStorageProvider.ts
|
|
32
|
-
const envSchema = __alepha_core.t.object({ AZ_STORAGE_CONNECTION_STRING: __alepha_core.t.string({
|
|
33
|
-
size: "long",
|
|
34
|
-
default: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
|
|
35
|
-
}) });
|
|
32
|
+
const envSchema = __alepha_core.t.object({ AZ_STORAGE_CONNECTION_STRING: __alepha_core.t.string({ size: "long" }) });
|
|
36
33
|
var AzureFileStorageProvider = class {
|
|
37
34
|
log = (0, __alepha_core.$logger)();
|
|
38
|
-
env = (0, __alepha_core.$
|
|
39
|
-
|
|
40
|
-
|
|
35
|
+
env = (0, __alepha_core.$inject)(envSchema);
|
|
36
|
+
bucket = (0, __alepha_core.$inject)(__alepha_bucket.BucketDescriptorProvider);
|
|
37
|
+
time = (0, __alepha_core.$inject)(__alepha_datetime.DateTimeProvider);
|
|
41
38
|
containers = {};
|
|
42
39
|
blobServiceClient;
|
|
43
40
|
options = {};
|
|
@@ -53,20 +50,20 @@ var AzureFileStorageProvider = class {
|
|
|
53
50
|
this.containers[containerName] = container;
|
|
54
51
|
return container;
|
|
55
52
|
}
|
|
56
|
-
async upload(bucketName, file
|
|
53
|
+
async upload(bucketName, file, fileId) {
|
|
57
54
|
fileId ??= this.createId();
|
|
58
55
|
const block = this.getBlock(bucketName, fileId);
|
|
59
56
|
const metadata = {
|
|
60
|
-
name: file
|
|
61
|
-
type: file
|
|
57
|
+
name: file.name,
|
|
58
|
+
type: file.type
|
|
62
59
|
};
|
|
63
|
-
if (file
|
|
60
|
+
if (file.filepath) await block.uploadFile(file.filepath, {
|
|
64
61
|
metadata,
|
|
65
|
-
blobHTTPHeaders: { blobContentType: file
|
|
62
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
66
63
|
});
|
|
67
|
-
else if (file
|
|
64
|
+
else if (file.size > 0) await block.uploadData(await file.arrayBuffer(), {
|
|
68
65
|
metadata,
|
|
69
|
-
blobHTTPHeaders: { blobContentType: file
|
|
66
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
70
67
|
});
|
|
71
68
|
else throw new Error("Raw stream upload is not supported yet");
|
|
72
69
|
return fileId;
|
|
@@ -78,7 +75,7 @@ var AzureFileStorageProvider = class {
|
|
|
78
75
|
throw error;
|
|
79
76
|
});
|
|
80
77
|
if (!blob.readableStreamBody) throw new __alepha_bucket.FileNotFoundError("File not found - empty stream body");
|
|
81
|
-
return (0, __alepha_file.
|
|
78
|
+
return (0, __alepha_file.createFile)(blob.readableStreamBody, blob.metadata);
|
|
82
79
|
}
|
|
83
80
|
async exists(bucketName, fileId) {
|
|
84
81
|
return await this.getBlock(bucketName, fileId).exists();
|
|
@@ -96,9 +93,9 @@ var AzureFileStorageProvider = class {
|
|
|
96
93
|
return this.containers[container].getBlockBlobClient(fileId);
|
|
97
94
|
}
|
|
98
95
|
onStart = (0, __alepha_core.$hook)({
|
|
99
|
-
|
|
96
|
+
on: "start",
|
|
100
97
|
handler: async () => {
|
|
101
|
-
for (const bucket of this.
|
|
98
|
+
for (const bucket of this.bucket.getBuckets()) {
|
|
102
99
|
const containerName = bucket.name.replaceAll("/", "-").toLowerCase();
|
|
103
100
|
this.log.debug(`Prepare container ${containerName}...`);
|
|
104
101
|
if (!this.containers[containerName]) this.containers[containerName] = await this.createContainerClient(containerName);
|
|
@@ -108,7 +105,7 @@ var AzureFileStorageProvider = class {
|
|
|
108
105
|
});
|
|
109
106
|
async createContainerClient(name) {
|
|
110
107
|
const container = this.blobServiceClient.getContainerClient(name);
|
|
111
|
-
await this.
|
|
108
|
+
await this.time.deadline((abortSignal) => container.createIfNotExists({ abortSignal }), [5, "seconds"]);
|
|
112
109
|
return container;
|
|
113
110
|
}
|
|
114
111
|
createId() {
|
|
@@ -128,11 +125,13 @@ var AzureFileStorageProvider = class {
|
|
|
128
125
|
*/
|
|
129
126
|
var AlephaBucketAzure = class {
|
|
130
127
|
name = "alepha.bucket.azure";
|
|
131
|
-
$services = (alepha) =>
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
128
|
+
$services = (alepha) => {
|
|
129
|
+
alepha.with({
|
|
130
|
+
provide: __alepha_bucket.FileStorageProvider,
|
|
131
|
+
use: AzureFileStorageProvider,
|
|
132
|
+
optional: true
|
|
133
|
+
}).with(__alepha_bucket.AlephaBucket);
|
|
134
|
+
};
|
|
136
135
|
};
|
|
137
136
|
|
|
138
137
|
//#endregion
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","names":["BucketDescriptorProvider","DateTimeProvider","containerName: string","bucketName: string","file: FileLike","fileId?: string","
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["envSchema: TObject<{\n\tAZ_STORAGE_CONNECTION_STRING: TString;\n}>","BucketDescriptorProvider","DateTimeProvider","containerName: string","bucketName: string","file: FileLike","fileId?: string","fileId: string","FileNotFoundError","container: string","name: string","alepha: Alepha","FileStorageProvider","AlephaBucket"],"sources":["../src/providers/AzureFileStorageProvider.ts","../src/index.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport {\n\tBucketDescriptorProvider,\n\tFileNotFoundError,\n\ttype FileStorageProvider,\n} from \"@alepha/bucket\";\nimport {\n\t$hook,\n\t$inject,\n\t$logger,\n\ttype FileLike,\n\ttype HookDescriptor,\n\ttype Logger,\n\ttype Static,\n\ttype TObject,\n\ttype TString,\n\tt,\n} from \"@alepha/core\";\nimport { DateTimeProvider } from \"@alepha/datetime\";\nimport { createFile } from \"@alepha/file\";\nimport {\n\tBlobServiceClient,\n\ttype BlockBlobClient,\n\ttype ContainerClient,\n\ttype StoragePipelineOptions,\n} from \"@azure/storage-blob\";\n\nconst envSchema: TObject<{\n\tAZ_STORAGE_CONNECTION_STRING: TString;\n}> = t.object({\n\tAZ_STORAGE_CONNECTION_STRING: t.string({\n\t\tsize: \"long\",\n\t}),\n});\n\ndeclare module \"@alepha/core\" {\n\tinterface Env extends Partial<Static<typeof envSchema>> {}\n}\n\nexport class AzureFileStorageProvider implements FileStorageProvider {\n\tprotected readonly log: Logger = $logger();\n\tprotected readonly env: Static<typeof envSchema> = $inject(envSchema);\n\tprotected readonly bucket: BucketDescriptorProvider = $inject(\n\t\tBucketDescriptorProvider,\n\t);\n\tprotected readonly time: DateTimeProvider = $inject(DateTimeProvider);\n\tprotected readonly containers: Record<string, ContainerClient> = {};\n\tprotected readonly blobServiceClient: BlobServiceClient;\n\tprotected readonly options: StoragePipelineOptions = {};\n\n\tconstructor() {\n\t\tthis.blobServiceClient = BlobServiceClient.fromConnectionString(\n\t\t\tthis.env.AZ_STORAGE_CONNECTION_STRING,\n\t\t\tthis.storagePipelineOptions(),\n\t\t);\n\t}\n\n\tpublic storagePipelineOptions(): StoragePipelineOptions {\n\t\treturn {};\n\t}\n\n\tpublic async createContainer(\n\t\tcontainerName: string,\n\t): Promise<ContainerClient> {\n\t\tif (this.containers[containerName]) {\n\t\t\treturn this.containers[containerName];\n\t\t}\n\t\tconst container = await this.createContainerClient(containerName);\n\t\tthis.containers[containerName] = container;\n\t\treturn container;\n\t}\n\n\tpublic async upload(\n\t\tbucketName: string,\n\t\tfile: FileLike,\n\t\tfileId?: string,\n\t): Promise<string> {\n\t\tfileId ??= this.createId();\n\t\tconst block = this.getBlock(bucketName, fileId);\n\n\t\tconst metadata = {\n\t\t\tname: file.name,\n\t\t\ttype: file.type,\n\t\t};\n\n\t\tif (file.filepath) {\n\t\t\tawait block.uploadFile(file.filepath, {\n\t\t\t\tmetadata,\n\t\t\t\tblobHTTPHeaders: {\n\t\t\t\t\tblobContentType: file.type,\n\t\t\t\t},\n\t\t\t});\n\t\t} else if (file.size > 0) {\n\t\t\tawait block.uploadData(await file.arrayBuffer(), {\n\t\t\t\tmetadata,\n\t\t\t\tblobHTTPHeaders: {\n\t\t\t\t\tblobContentType: file.type,\n\t\t\t\t},\n\t\t\t});\n\t\t} else {\n\t\t\tthrow new Error(\"Raw stream upload is not supported yet\");\n\t\t}\n\n\t\treturn fileId;\n\t}\n\n\tpublic async download(bucketName: string, fileId: string): Promise<FileLike> {\n\t\tconst block = this.getBlock(bucketName, fileId);\n\n\t\tconst blob = await block.download().catch((error) => {\n\t\t\tif (error instanceof Error) {\n\t\t\t\tthrow new FileNotFoundError(\"Error downloading file\", { cause: error });\n\t\t\t}\n\n\t\t\tthrow error;\n\t\t});\n\n\t\tif (!blob.readableStreamBody) {\n\t\t\tthrow new FileNotFoundError(\"File not found - empty stream body\");\n\t\t}\n\n\t\treturn createFile(blob.readableStreamBody, blob.metadata);\n\t}\n\n\tpublic async exists(bucketName: string, fileId: string): Promise<boolean> {\n\t\treturn await this.getBlock(bucketName, fileId).exists();\n\t}\n\n\tpublic async delete(bucketName: string, fileId: string): Promise<void> {\n\t\ttry {\n\t\t\tawait this.getBlock(bucketName, fileId).delete();\n\t\t} catch (error) {\n\t\t\tif (error instanceof Error) {\n\t\t\t\tthrow new FileNotFoundError(\"Error deleting file\", { cause: error });\n\t\t\t}\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\tpublic getBlock(container: string, fileId: string): BlockBlobClient {\n\t\tif (!this.containers[container]) {\n\t\t\tthrow new FileNotFoundError(\n\t\t\t\t`File '${fileId}' not found - container '${container}' does not exists`,\n\t\t\t);\n\t\t}\n\n\t\treturn this.containers[container].getBlockBlobClient(fileId);\n\t}\n\n\tpublic readonly onStart: HookDescriptor<\"start\"> = $hook({\n\t\ton: \"start\",\n\t\thandler: async () => {\n\t\t\tfor (const bucket of this.bucket.getBuckets()) {\n\t\t\t\tconst containerName = bucket.name.replaceAll(\"/\", \"-\").toLowerCase();\n\t\t\t\tthis.log.debug(`Prepare container ${containerName}...`);\n\n\t\t\t\tif (!this.containers[containerName]) {\n\t\t\t\t\tthis.containers[containerName] =\n\t\t\t\t\t\tawait this.createContainerClient(containerName);\n\t\t\t\t}\n\n\t\t\t\tthis.log.info(`Container ${bucket} OK`);\n\t\t\t}\n\t\t},\n\t});\n\n\tprotected async createContainerClient(\n\t\tname: string,\n\t): Promise<ContainerClient> {\n\t\tconst container = this.blobServiceClient.getContainerClient(name);\n\n\t\tawait this.time.deadline(\n\t\t\t(abortSignal) => container.createIfNotExists({ abortSignal }),\n\t\t\t[5, \"seconds\"],\n\t\t);\n\n\t\treturn container;\n\t}\n\n\tprotected createId(): string {\n\t\treturn randomUUID();\n\t}\n}\n","import { AlephaBucket, FileStorageProvider } from \"@alepha/bucket\";\nimport type { Alepha, Module } from \"@alepha/core\";\nimport { AzureFileStorageProvider } from \"./providers/AzureFileStorageProvider.ts\";\n\nexport * from \"./providers/AzureFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Alepha Bucket Azure Module\n *\n * Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.\n *\n * @see {@link AzureFileStorageProvider}\n * @module alepha.bucket.azure\n */\nexport class AlephaBucketAzure implements Module {\n\tpublic readonly name = \"alepha.bucket.azure\";\n\tpublic readonly $services = (alepha: Alepha): void => {\n\t\talepha\n\t\t\t.with({\n\t\t\t\tprovide: FileStorageProvider,\n\t\t\t\tuse: AzureFileStorageProvider,\n\t\t\t\toptional: true,\n\t\t\t})\n\t\t\t.with(AlephaBucket);\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BA,MAAMA,YAED,gBAAE,OAAO,EACb,8BAA8B,gBAAE,OAAO,EACtC,MAAM,OACN,EAAC,CACF,EAAC;AAMF,IAAa,2BAAb,MAAqE;CACpE,AAAmB,MAAc,4BAAS;CAC1C,AAAmB,MAAgC,2BAAQ,UAAU;CACrE,AAAmB,SAAmC,2BACrDC,yCACA;CACD,AAAmB,OAAyB,2BAAQC,mCAAiB;CACrE,AAAmB,aAA8C,CAAE;CACnE,AAAmB;CACnB,AAAmB,UAAkC,CAAE;CAEvD,cAAc;AACb,OAAK,oBAAoB,uCAAkB,qBAC1C,KAAK,IAAI,8BACT,KAAK,wBAAwB,CAC7B;CACD;CAED,AAAO,yBAAiD;AACvD,SAAO,CAAE;CACT;CAED,MAAa,gBACZC,eAC2B;AAC3B,MAAI,KAAK,WAAW,eACnB,QAAO,KAAK,WAAW;EAExB,MAAM,YAAY,MAAM,KAAK,sBAAsB,cAAc;AACjE,OAAK,WAAW,iBAAiB;AACjC,SAAO;CACP;CAED,MAAa,OACZC,YACAC,MACAC,QACkB;AAClB,aAAW,KAAK,UAAU;EAC1B,MAAM,QAAQ,KAAK,SAAS,YAAY,OAAO;EAE/C,MAAM,WAAW;GAChB,MAAM,KAAK;GACX,MAAM,KAAK;EACX;AAED,MAAI,KAAK,SACR,OAAM,MAAM,WAAW,KAAK,UAAU;GACrC;GACA,iBAAiB,EAChB,iBAAiB,KAAK,KACtB;EACD,EAAC;WACQ,KAAK,OAAO,EACtB,OAAM,MAAM,WAAW,MAAM,KAAK,aAAa,EAAE;GAChD;GACA,iBAAiB,EAChB,iBAAiB,KAAK,KACtB;EACD,EAAC;MAEF,OAAM,IAAI,MAAM;AAGjB,SAAO;CACP;CAED,MAAa,SAASF,YAAoBG,QAAmC;EAC5E,MAAM,QAAQ,KAAK,SAAS,YAAY,OAAO;EAE/C,MAAM,OAAO,MAAM,MAAM,UAAU,CAAC,MAAM,CAAC,UAAU;AACpD,OAAI,iBAAiB,MACpB,OAAM,IAAIC,kCAAkB,0BAA0B,EAAE,OAAO,MAAO;AAGvE,SAAM;EACN,EAAC;AAEF,OAAK,KAAK,mBACT,OAAM,IAAIA,kCAAkB;AAG7B,SAAO,8BAAW,KAAK,oBAAoB,KAAK,SAAS;CACzD;CAED,MAAa,OAAOJ,YAAoBG,QAAkC;AACzE,SAAO,MAAM,KAAK,SAAS,YAAY,OAAO,CAAC,QAAQ;CACvD;CAED,MAAa,OAAOH,YAAoBG,QAA+B;AACtE,MAAI;AACH,SAAM,KAAK,SAAS,YAAY,OAAO,CAAC,QAAQ;EAChD,SAAQ,OAAO;AACf,OAAI,iBAAiB,MACpB,OAAM,IAAIC,kCAAkB,uBAAuB,EAAE,OAAO,MAAO;AAEpE,SAAM;EACN;CACD;CAED,AAAO,SAASC,WAAmBF,QAAiC;AACnE,OAAK,KAAK,WAAW,WACpB,OAAM,IAAIC,mCACR,QAAQ,OAAO,2BAA2B,UAAU;AAIvD,SAAO,KAAK,WAAW,WAAW,mBAAmB,OAAO;CAC5D;CAED,AAAgB,UAAmC,yBAAM;EACxD,IAAI;EACJ,SAAS,YAAY;AACpB,QAAK,MAAM,UAAU,KAAK,OAAO,YAAY,EAAE;IAC9C,MAAM,gBAAgB,OAAO,KAAK,WAAW,KAAK,IAAI,CAAC,aAAa;AACpE,SAAK,IAAI,OAAO,oBAAoB,cAAc,KAAK;AAEvD,SAAK,KAAK,WAAW,eACpB,MAAK,WAAW,iBACf,MAAM,KAAK,sBAAsB,cAAc;AAGjD,SAAK,IAAI,MAAM,YAAY,OAAO,KAAK;GACvC;EACD;CACD,EAAC;CAEF,MAAgB,sBACfE,MAC2B;EAC3B,MAAM,YAAY,KAAK,kBAAkB,mBAAmB,KAAK;AAEjE,QAAM,KAAK,KAAK,SACf,CAAC,gBAAgB,UAAU,kBAAkB,EAAE,YAAa,EAAC,EAC7D,CAAC,GAAG,SAAU,EACd;AAED,SAAO;CACP;CAED,AAAU,WAAmB;AAC5B,SAAO,6BAAY;CACnB;AACD;;;;;;;;;;;;ACtKD,IAAa,oBAAb,MAAiD;CAChD,AAAgB,OAAO;CACvB,AAAgB,YAAY,CAACC,WAAyB;AACrD,SACE,KAAK;GACL,SAASC;GACT,KAAK;GACL,UAAU;EACV,EAAC,CACD,KAAKC,6BAAa;CACpB;AACD"}
|
package/dist/index.d.cts
CHANGED
|
@@ -1,24 +1,20 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { Alepha, FileLike, Module, Static } from "@alepha/core";
|
|
3
|
-
import * as _sinclair_typebox0 from "@sinclair/typebox";
|
|
1
|
+
import { Alepha, FileLike, HookDescriptor, Logger, Module, Static, TObject, TString } from "@alepha/core";
|
|
4
2
|
import { BucketDescriptorProvider, FileStorageProvider } from "@alepha/bucket";
|
|
5
3
|
import { DateTimeProvider } from "@alepha/datetime";
|
|
6
4
|
import { BlobServiceClient, BlockBlobClient, ContainerClient, StoragePipelineOptions } from "@azure/storage-blob";
|
|
7
5
|
|
|
8
6
|
//#region src/providers/AzureFileStorageProvider.d.ts
|
|
9
|
-
declare const envSchema:
|
|
10
|
-
AZ_STORAGE_CONNECTION_STRING:
|
|
7
|
+
declare const envSchema: TObject<{
|
|
8
|
+
AZ_STORAGE_CONNECTION_STRING: TString;
|
|
11
9
|
}>;
|
|
12
10
|
declare module "@alepha/core" {
|
|
13
11
|
interface Env extends Partial<Static<typeof envSchema>> {}
|
|
14
12
|
}
|
|
15
13
|
declare class AzureFileStorageProvider implements FileStorageProvider {
|
|
16
|
-
protected readonly log:
|
|
17
|
-
protected readonly env:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
protected readonly bucketProvider: BucketDescriptorProvider;
|
|
21
|
-
protected readonly dateTimeProvider: DateTimeProvider;
|
|
14
|
+
protected readonly log: Logger;
|
|
15
|
+
protected readonly env: Static<typeof envSchema>;
|
|
16
|
+
protected readonly bucket: BucketDescriptorProvider;
|
|
17
|
+
protected readonly time: DateTimeProvider;
|
|
22
18
|
protected readonly containers: Record<string, ContainerClient>;
|
|
23
19
|
protected readonly blobServiceClient: BlobServiceClient;
|
|
24
20
|
protected readonly options: StoragePipelineOptions;
|
|
@@ -30,23 +26,24 @@ declare class AzureFileStorageProvider implements FileStorageProvider {
|
|
|
30
26
|
exists(bucketName: string, fileId: string): Promise<boolean>;
|
|
31
27
|
delete(bucketName: string, fileId: string): Promise<void>;
|
|
32
28
|
getBlock(container: string, fileId: string): BlockBlobClient;
|
|
33
|
-
readonly onStart:
|
|
29
|
+
readonly onStart: HookDescriptor<"start">;
|
|
34
30
|
protected createContainerClient(name: string): Promise<ContainerClient>;
|
|
35
31
|
protected createId(): string;
|
|
36
32
|
}
|
|
37
33
|
//#endregion
|
|
38
34
|
//#region src/index.d.ts
|
|
35
|
+
// ---------------------------------------------------------------------------------------------------------------------
|
|
39
36
|
/**
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
37
|
+
* Alepha Bucket Azure Module
|
|
38
|
+
*
|
|
39
|
+
* Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.
|
|
40
|
+
*
|
|
41
|
+
* @see {@link AzureFileStorageProvider}
|
|
42
|
+
* @module alepha.bucket.azure
|
|
43
|
+
*/
|
|
47
44
|
declare class AlephaBucketAzure implements Module {
|
|
48
45
|
readonly name = "alepha.bucket.azure";
|
|
49
|
-
readonly $services: (alepha: Alepha) =>
|
|
46
|
+
readonly $services: (alepha: Alepha) => void;
|
|
50
47
|
}
|
|
51
48
|
//#endregion
|
|
52
49
|
export { AlephaBucketAzure, AzureFileStorageProvider };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.cts","names":["envSchema: TObject<{\n\tAZ_STORAGE_CONNECTION_STRING: TString;\n}>","containerName: string","bucketName: string","file: FileLike","fileId?: string","fileId: string","container: string","name: string","alepha: Alepha"],"sources":["../src/providers/AzureFileStorageProvider.ts","../src/index.ts"],"sourcesContent":[],"mappings":";;;;;;cA2BMA,WAAW;gCACc;AAHF,CAAA,CAAA;eAI5B,cAAA,CAAA;EAAA,UAD8B,GAAA,SAQR,OARQ,CAQA,MARA,CAAA,OAQc,SARd,CAAA,CAAA,CAAA,CAAA;;AADd,cAYJ,wBAAA,YAAoC,mBAZhC,CAAA;EAAA,mBAAA,GAAA,EAaQ,MAbR;EAAA,mBAAA,GAAA,EAcQ,MAdR,CAAA,OAcsB,SAdtB,CAAA;EAAA,mBAS4B,MAAA,EAMjB,wBANiB;EAAA,mBAAd,IAAA,EASL,gBATK;EAAA,mBAAR,UAAA,EAUS,MAVT,CAAA,MAAA,EAUwB,eAVxB,CAAA;EAAA,mBAAA,iBAAA,EAWgB,iBAXhB;EAAA,mBAAA,OAAA,EAYM,sBAZN;EAGvB,WAAa,CAAA;EAAA,sBAAA,CAAA,CAAA,EAkBqB,sBAlBrB;EAAA,eACY,CAAA,aAAA,EAAA,MAAA,CAAA,EAuBrB,OAvBqB,CAuBb,eAvBa,CAAA;EAAA,MACc,CAAA,UAAA,EAAA,MAAA,EAAA,IAAA,EAiC/B,QAjC+B,EAAA,MAAA,CAAA,EAAA,MAAA,CAAA,EAmCnC,OAnCmC,CAAA,MAAA,CAAA;EAAA,QAAd,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAiEmC,OAjEnC,CAiE2C,QAjE3C,CAAA;EAAA,MACG,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAkF8B,OAlF9B,CAAA,OAAA,CAAA;EAAA,MAGF,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAmFgC,OAnFhC,CAAA,IAAA,CAAA;EAAA,QACqB,CAAA,SAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EA6FM,eA7FN;EAAA,SAAf,OAAA,EAuGN,cAvGM,CAAA,OAAA,CAAA;EAAA,UACO,qBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAyHnC,OAzHmC,CAyH3B,eAzH2B,CAAA;EAAA,UACV,QAAA,CAAA,CAAA,EAAA,MAAA;;;;;;;;AAvBA;;;;AAEZ;AAAA,cCXJ,iBAAA,YAA6B,MDWzB,CAAA;EAAA,SAAA,IAAA,GAAA,qBAAA;EAAA,SAS4B,SAAA,EAAA,CAAA,MAAA,EClBP,MDkBO,EAAA,GAAA,IAAA"}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,24 +1,20 @@
|
|
|
1
1
|
import { BucketDescriptorProvider, FileStorageProvider } from "@alepha/bucket";
|
|
2
|
-
import
|
|
3
|
-
import { Alepha, FileLike, Module, Static } from "@alepha/core";
|
|
2
|
+
import { Alepha, FileLike, HookDescriptor, Logger, Module, Static, TObject, TString } from "@alepha/core";
|
|
4
3
|
import { DateTimeProvider } from "@alepha/datetime";
|
|
5
4
|
import { BlobServiceClient, BlockBlobClient, ContainerClient, StoragePipelineOptions } from "@azure/storage-blob";
|
|
6
|
-
import * as _sinclair_typebox0 from "@sinclair/typebox";
|
|
7
5
|
|
|
8
6
|
//#region src/providers/AzureFileStorageProvider.d.ts
|
|
9
|
-
declare const envSchema:
|
|
10
|
-
AZ_STORAGE_CONNECTION_STRING:
|
|
7
|
+
declare const envSchema: TObject<{
|
|
8
|
+
AZ_STORAGE_CONNECTION_STRING: TString;
|
|
11
9
|
}>;
|
|
12
10
|
declare module "@alepha/core" {
|
|
13
11
|
interface Env extends Partial<Static<typeof envSchema>> {}
|
|
14
12
|
}
|
|
15
13
|
declare class AzureFileStorageProvider implements FileStorageProvider {
|
|
16
|
-
protected readonly log:
|
|
17
|
-
protected readonly env:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
protected readonly bucketProvider: BucketDescriptorProvider;
|
|
21
|
-
protected readonly dateTimeProvider: DateTimeProvider;
|
|
14
|
+
protected readonly log: Logger;
|
|
15
|
+
protected readonly env: Static<typeof envSchema>;
|
|
16
|
+
protected readonly bucket: BucketDescriptorProvider;
|
|
17
|
+
protected readonly time: DateTimeProvider;
|
|
22
18
|
protected readonly containers: Record<string, ContainerClient>;
|
|
23
19
|
protected readonly blobServiceClient: BlobServiceClient;
|
|
24
20
|
protected readonly options: StoragePipelineOptions;
|
|
@@ -30,23 +26,24 @@ declare class AzureFileStorageProvider implements FileStorageProvider {
|
|
|
30
26
|
exists(bucketName: string, fileId: string): Promise<boolean>;
|
|
31
27
|
delete(bucketName: string, fileId: string): Promise<void>;
|
|
32
28
|
getBlock(container: string, fileId: string): BlockBlobClient;
|
|
33
|
-
readonly onStart:
|
|
29
|
+
readonly onStart: HookDescriptor<"start">;
|
|
34
30
|
protected createContainerClient(name: string): Promise<ContainerClient>;
|
|
35
31
|
protected createId(): string;
|
|
36
32
|
}
|
|
37
33
|
//#endregion
|
|
38
34
|
//#region src/index.d.ts
|
|
35
|
+
// ---------------------------------------------------------------------------------------------------------------------
|
|
39
36
|
/**
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
37
|
+
* Alepha Bucket Azure Module
|
|
38
|
+
*
|
|
39
|
+
* Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.
|
|
40
|
+
*
|
|
41
|
+
* @see {@link AzureFileStorageProvider}
|
|
42
|
+
* @module alepha.bucket.azure
|
|
43
|
+
*/
|
|
47
44
|
declare class AlephaBucketAzure implements Module {
|
|
48
45
|
readonly name = "alepha.bucket.azure";
|
|
49
|
-
readonly $services: (alepha: Alepha) =>
|
|
46
|
+
readonly $services: (alepha: Alepha) => void;
|
|
50
47
|
}
|
|
51
48
|
//#endregion
|
|
52
49
|
export { AlephaBucketAzure, AzureFileStorageProvider };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","names":["envSchema: TObject<{\n\tAZ_STORAGE_CONNECTION_STRING: TString;\n}>","containerName: string","bucketName: string","file: FileLike","fileId?: string","fileId: string","container: string","name: string","alepha: Alepha"],"sources":["../src/providers/AzureFileStorageProvider.ts","../src/index.ts"],"sourcesContent":[],"mappings":";;;;;;cA2BMA,WAAW;gCACc;AAHF,CAAA,CAAA;eAI5B,cAAA,CAAA;EAAA,UAD8B,GAAA,SAQR,OARQ,CAQA,MARA,CAAA,OAQc,SARd,CAAA,CAAA,CAAA,CAAA;;AADd,cAYJ,wBAAA,YAAoC,mBAZhC,CAAA;EAAA,mBAAA,GAAA,EAaQ,MAbR;EAAA,mBAAA,GAAA,EAcQ,MAdR,CAAA,OAcsB,SAdtB,CAAA;EAAA,mBAS4B,MAAA,EAMjB,wBANiB;EAAA,mBAAd,IAAA,EASL,gBATK;EAAA,mBAAR,UAAA,EAUS,MAVT,CAAA,MAAA,EAUwB,eAVxB,CAAA;EAAA,mBAAA,iBAAA,EAWgB,iBAXhB;EAAA,mBAAA,OAAA,EAYM,sBAZN;EAGvB,WAAa,CAAA;EAAA,sBAAA,CAAA,CAAA,EAkBqB,sBAlBrB;EAAA,eACY,CAAA,aAAA,EAAA,MAAA,CAAA,EAuBrB,OAvBqB,CAuBb,eAvBa,CAAA;EAAA,MACc,CAAA,UAAA,EAAA,MAAA,EAAA,IAAA,EAiC/B,QAjC+B,EAAA,MAAA,CAAA,EAAA,MAAA,CAAA,EAmCnC,OAnCmC,CAAA,MAAA,CAAA;EAAA,QAAd,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAiEmC,OAjEnC,CAiE2C,QAjE3C,CAAA;EAAA,MACG,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAkF8B,OAlF9B,CAAA,OAAA,CAAA;EAAA,MAGF,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAmFgC,OAnFhC,CAAA,IAAA,CAAA;EAAA,QACqB,CAAA,SAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EA6FM,eA7FN;EAAA,SAAf,OAAA,EAuGN,cAvGM,CAAA,OAAA,CAAA;EAAA,UACO,qBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAyHnC,OAzHmC,CAyH3B,eAzH2B,CAAA;EAAA,UACV,QAAA,CAAA,CAAA,EAAA,MAAA;;;;;;;;AAvBA;;;;AAEZ;AAAA,cCXJ,iBAAA,YAA6B,MDWzB,CAAA;EAAA,SAAA,IAAA,GAAA,qBAAA;EAAA,SAS4B,SAAA,EAAA,CAAA,MAAA,EClBP,MDkBO,EAAA,GAAA,IAAA"}
|
package/dist/index.js
CHANGED
|
@@ -1,20 +1,17 @@
|
|
|
1
1
|
import { AlephaBucket, BucketDescriptorProvider, FileNotFoundError, FileStorageProvider } from "@alepha/bucket";
|
|
2
2
|
import { randomUUID } from "node:crypto";
|
|
3
|
-
import { $
|
|
3
|
+
import { $hook, $inject, $logger, t } from "@alepha/core";
|
|
4
4
|
import { DateTimeProvider } from "@alepha/datetime";
|
|
5
|
-
import {
|
|
5
|
+
import { createFile } from "@alepha/file";
|
|
6
6
|
import { BlobServiceClient } from "@azure/storage-blob";
|
|
7
7
|
|
|
8
8
|
//#region src/providers/AzureFileStorageProvider.ts
|
|
9
|
-
const envSchema = t.object({ AZ_STORAGE_CONNECTION_STRING: t.string({
|
|
10
|
-
size: "long",
|
|
11
|
-
default: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
|
|
12
|
-
}) });
|
|
9
|
+
const envSchema = t.object({ AZ_STORAGE_CONNECTION_STRING: t.string({ size: "long" }) });
|
|
13
10
|
var AzureFileStorageProvider = class {
|
|
14
11
|
log = $logger();
|
|
15
|
-
env = $
|
|
16
|
-
|
|
17
|
-
|
|
12
|
+
env = $inject(envSchema);
|
|
13
|
+
bucket = $inject(BucketDescriptorProvider);
|
|
14
|
+
time = $inject(DateTimeProvider);
|
|
18
15
|
containers = {};
|
|
19
16
|
blobServiceClient;
|
|
20
17
|
options = {};
|
|
@@ -30,20 +27,20 @@ var AzureFileStorageProvider = class {
|
|
|
30
27
|
this.containers[containerName] = container;
|
|
31
28
|
return container;
|
|
32
29
|
}
|
|
33
|
-
async upload(bucketName, file
|
|
30
|
+
async upload(bucketName, file, fileId) {
|
|
34
31
|
fileId ??= this.createId();
|
|
35
32
|
const block = this.getBlock(bucketName, fileId);
|
|
36
33
|
const metadata = {
|
|
37
|
-
name: file
|
|
38
|
-
type: file
|
|
34
|
+
name: file.name,
|
|
35
|
+
type: file.type
|
|
39
36
|
};
|
|
40
|
-
if (file
|
|
37
|
+
if (file.filepath) await block.uploadFile(file.filepath, {
|
|
41
38
|
metadata,
|
|
42
|
-
blobHTTPHeaders: { blobContentType: file
|
|
39
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
43
40
|
});
|
|
44
|
-
else if (file
|
|
41
|
+
else if (file.size > 0) await block.uploadData(await file.arrayBuffer(), {
|
|
45
42
|
metadata,
|
|
46
|
-
blobHTTPHeaders: { blobContentType: file
|
|
43
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
47
44
|
});
|
|
48
45
|
else throw new Error("Raw stream upload is not supported yet");
|
|
49
46
|
return fileId;
|
|
@@ -55,7 +52,7 @@ var AzureFileStorageProvider = class {
|
|
|
55
52
|
throw error;
|
|
56
53
|
});
|
|
57
54
|
if (!blob.readableStreamBody) throw new FileNotFoundError("File not found - empty stream body");
|
|
58
|
-
return
|
|
55
|
+
return createFile(blob.readableStreamBody, blob.metadata);
|
|
59
56
|
}
|
|
60
57
|
async exists(bucketName, fileId) {
|
|
61
58
|
return await this.getBlock(bucketName, fileId).exists();
|
|
@@ -73,9 +70,9 @@ var AzureFileStorageProvider = class {
|
|
|
73
70
|
return this.containers[container].getBlockBlobClient(fileId);
|
|
74
71
|
}
|
|
75
72
|
onStart = $hook({
|
|
76
|
-
|
|
73
|
+
on: "start",
|
|
77
74
|
handler: async () => {
|
|
78
|
-
for (const bucket of this.
|
|
75
|
+
for (const bucket of this.bucket.getBuckets()) {
|
|
79
76
|
const containerName = bucket.name.replaceAll("/", "-").toLowerCase();
|
|
80
77
|
this.log.debug(`Prepare container ${containerName}...`);
|
|
81
78
|
if (!this.containers[containerName]) this.containers[containerName] = await this.createContainerClient(containerName);
|
|
@@ -85,7 +82,7 @@ var AzureFileStorageProvider = class {
|
|
|
85
82
|
});
|
|
86
83
|
async createContainerClient(name) {
|
|
87
84
|
const container = this.blobServiceClient.getContainerClient(name);
|
|
88
|
-
await this.
|
|
85
|
+
await this.time.deadline((abortSignal) => container.createIfNotExists({ abortSignal }), [5, "seconds"]);
|
|
89
86
|
return container;
|
|
90
87
|
}
|
|
91
88
|
createId() {
|
|
@@ -105,11 +102,13 @@ var AzureFileStorageProvider = class {
|
|
|
105
102
|
*/
|
|
106
103
|
var AlephaBucketAzure = class {
|
|
107
104
|
name = "alepha.bucket.azure";
|
|
108
|
-
$services = (alepha) =>
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
105
|
+
$services = (alepha) => {
|
|
106
|
+
alepha.with({
|
|
107
|
+
provide: FileStorageProvider,
|
|
108
|
+
use: AzureFileStorageProvider,
|
|
109
|
+
optional: true
|
|
110
|
+
}).with(AlephaBucket);
|
|
111
|
+
};
|
|
113
112
|
};
|
|
114
113
|
|
|
115
114
|
//#endregion
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["containerName: string","bucketName: string","file: FileLike","fileId?: string","
|
|
1
|
+
{"version":3,"file":"index.js","names":["envSchema: TObject<{\n\tAZ_STORAGE_CONNECTION_STRING: TString;\n}>","containerName: string","bucketName: string","file: FileLike","fileId?: string","fileId: string","container: string","name: string","alepha: Alepha"],"sources":["../src/providers/AzureFileStorageProvider.ts","../src/index.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport {\n\tBucketDescriptorProvider,\n\tFileNotFoundError,\n\ttype FileStorageProvider,\n} from \"@alepha/bucket\";\nimport {\n\t$hook,\n\t$inject,\n\t$logger,\n\ttype FileLike,\n\ttype HookDescriptor,\n\ttype Logger,\n\ttype Static,\n\ttype TObject,\n\ttype TString,\n\tt,\n} from \"@alepha/core\";\nimport { DateTimeProvider } from \"@alepha/datetime\";\nimport { createFile } from \"@alepha/file\";\nimport {\n\tBlobServiceClient,\n\ttype BlockBlobClient,\n\ttype ContainerClient,\n\ttype StoragePipelineOptions,\n} from \"@azure/storage-blob\";\n\nconst envSchema: TObject<{\n\tAZ_STORAGE_CONNECTION_STRING: TString;\n}> = t.object({\n\tAZ_STORAGE_CONNECTION_STRING: t.string({\n\t\tsize: \"long\",\n\t}),\n});\n\ndeclare module \"@alepha/core\" {\n\tinterface Env extends Partial<Static<typeof envSchema>> {}\n}\n\nexport class AzureFileStorageProvider implements FileStorageProvider {\n\tprotected readonly log: Logger = $logger();\n\tprotected readonly env: Static<typeof envSchema> = $inject(envSchema);\n\tprotected readonly bucket: BucketDescriptorProvider = $inject(\n\t\tBucketDescriptorProvider,\n\t);\n\tprotected readonly time: DateTimeProvider = $inject(DateTimeProvider);\n\tprotected readonly containers: Record<string, ContainerClient> = {};\n\tprotected readonly blobServiceClient: BlobServiceClient;\n\tprotected readonly options: StoragePipelineOptions = {};\n\n\tconstructor() {\n\t\tthis.blobServiceClient = BlobServiceClient.fromConnectionString(\n\t\t\tthis.env.AZ_STORAGE_CONNECTION_STRING,\n\t\t\tthis.storagePipelineOptions(),\n\t\t);\n\t}\n\n\tpublic storagePipelineOptions(): StoragePipelineOptions {\n\t\treturn {};\n\t}\n\n\tpublic async createContainer(\n\t\tcontainerName: string,\n\t): Promise<ContainerClient> {\n\t\tif (this.containers[containerName]) {\n\t\t\treturn this.containers[containerName];\n\t\t}\n\t\tconst container = await this.createContainerClient(containerName);\n\t\tthis.containers[containerName] = container;\n\t\treturn container;\n\t}\n\n\tpublic async upload(\n\t\tbucketName: string,\n\t\tfile: FileLike,\n\t\tfileId?: string,\n\t): Promise<string> {\n\t\tfileId ??= this.createId();\n\t\tconst block = this.getBlock(bucketName, fileId);\n\n\t\tconst metadata = {\n\t\t\tname: file.name,\n\t\t\ttype: file.type,\n\t\t};\n\n\t\tif (file.filepath) {\n\t\t\tawait block.uploadFile(file.filepath, {\n\t\t\t\tmetadata,\n\t\t\t\tblobHTTPHeaders: {\n\t\t\t\t\tblobContentType: file.type,\n\t\t\t\t},\n\t\t\t});\n\t\t} else if (file.size > 0) {\n\t\t\tawait block.uploadData(await file.arrayBuffer(), {\n\t\t\t\tmetadata,\n\t\t\t\tblobHTTPHeaders: {\n\t\t\t\t\tblobContentType: file.type,\n\t\t\t\t},\n\t\t\t});\n\t\t} else {\n\t\t\tthrow new Error(\"Raw stream upload is not supported yet\");\n\t\t}\n\n\t\treturn fileId;\n\t}\n\n\tpublic async download(bucketName: string, fileId: string): Promise<FileLike> {\n\t\tconst block = this.getBlock(bucketName, fileId);\n\n\t\tconst blob = await block.download().catch((error) => {\n\t\t\tif (error instanceof Error) {\n\t\t\t\tthrow new FileNotFoundError(\"Error downloading file\", { cause: error });\n\t\t\t}\n\n\t\t\tthrow error;\n\t\t});\n\n\t\tif (!blob.readableStreamBody) {\n\t\t\tthrow new FileNotFoundError(\"File not found - empty stream body\");\n\t\t}\n\n\t\treturn createFile(blob.readableStreamBody, blob.metadata);\n\t}\n\n\tpublic async exists(bucketName: string, fileId: string): Promise<boolean> {\n\t\treturn await this.getBlock(bucketName, fileId).exists();\n\t}\n\n\tpublic async delete(bucketName: string, fileId: string): Promise<void> {\n\t\ttry {\n\t\t\tawait this.getBlock(bucketName, fileId).delete();\n\t\t} catch (error) {\n\t\t\tif (error instanceof Error) {\n\t\t\t\tthrow new FileNotFoundError(\"Error deleting file\", { cause: error });\n\t\t\t}\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\tpublic getBlock(container: string, fileId: string): BlockBlobClient {\n\t\tif (!this.containers[container]) {\n\t\t\tthrow new FileNotFoundError(\n\t\t\t\t`File '${fileId}' not found - container '${container}' does not exists`,\n\t\t\t);\n\t\t}\n\n\t\treturn this.containers[container].getBlockBlobClient(fileId);\n\t}\n\n\tpublic readonly onStart: HookDescriptor<\"start\"> = $hook({\n\t\ton: \"start\",\n\t\thandler: async () => {\n\t\t\tfor (const bucket of this.bucket.getBuckets()) {\n\t\t\t\tconst containerName = bucket.name.replaceAll(\"/\", \"-\").toLowerCase();\n\t\t\t\tthis.log.debug(`Prepare container ${containerName}...`);\n\n\t\t\t\tif (!this.containers[containerName]) {\n\t\t\t\t\tthis.containers[containerName] =\n\t\t\t\t\t\tawait this.createContainerClient(containerName);\n\t\t\t\t}\n\n\t\t\t\tthis.log.info(`Container ${bucket} OK`);\n\t\t\t}\n\t\t},\n\t});\n\n\tprotected async createContainerClient(\n\t\tname: string,\n\t): Promise<ContainerClient> {\n\t\tconst container = this.blobServiceClient.getContainerClient(name);\n\n\t\tawait this.time.deadline(\n\t\t\t(abortSignal) => container.createIfNotExists({ abortSignal }),\n\t\t\t[5, \"seconds\"],\n\t\t);\n\n\t\treturn container;\n\t}\n\n\tprotected createId(): string {\n\t\treturn randomUUID();\n\t}\n}\n","import { AlephaBucket, FileStorageProvider } from \"@alepha/bucket\";\nimport type { Alepha, Module } from \"@alepha/core\";\nimport { AzureFileStorageProvider } from \"./providers/AzureFileStorageProvider.ts\";\n\nexport * from \"./providers/AzureFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Alepha Bucket Azure Module\n *\n * Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.\n *\n * @see {@link AzureFileStorageProvider}\n * @module alepha.bucket.azure\n */\nexport class AlephaBucketAzure implements Module {\n\tpublic readonly name = \"alepha.bucket.azure\";\n\tpublic readonly $services = (alepha: Alepha): void => {\n\t\talepha\n\t\t\t.with({\n\t\t\t\tprovide: FileStorageProvider,\n\t\t\t\tuse: AzureFileStorageProvider,\n\t\t\t\toptional: true,\n\t\t\t})\n\t\t\t.with(AlephaBucket);\n\t};\n}\n"],"mappings":";;;;;;;;AA2BA,MAAMA,YAED,EAAE,OAAO,EACb,8BAA8B,EAAE,OAAO,EACtC,MAAM,OACN,EAAC,CACF,EAAC;AAMF,IAAa,2BAAb,MAAqE;CACpE,AAAmB,MAAc,SAAS;CAC1C,AAAmB,MAAgC,QAAQ,UAAU;CACrE,AAAmB,SAAmC,QACrD,yBACA;CACD,AAAmB,OAAyB,QAAQ,iBAAiB;CACrE,AAAmB,aAA8C,CAAE;CACnE,AAAmB;CACnB,AAAmB,UAAkC,CAAE;CAEvD,cAAc;AACb,OAAK,oBAAoB,kBAAkB,qBAC1C,KAAK,IAAI,8BACT,KAAK,wBAAwB,CAC7B;CACD;CAED,AAAO,yBAAiD;AACvD,SAAO,CAAE;CACT;CAED,MAAa,gBACZC,eAC2B;AAC3B,MAAI,KAAK,WAAW,eACnB,QAAO,KAAK,WAAW;EAExB,MAAM,YAAY,MAAM,KAAK,sBAAsB,cAAc;AACjE,OAAK,WAAW,iBAAiB;AACjC,SAAO;CACP;CAED,MAAa,OACZC,YACAC,MACAC,QACkB;AAClB,aAAW,KAAK,UAAU;EAC1B,MAAM,QAAQ,KAAK,SAAS,YAAY,OAAO;EAE/C,MAAM,WAAW;GAChB,MAAM,KAAK;GACX,MAAM,KAAK;EACX;AAED,MAAI,KAAK,SACR,OAAM,MAAM,WAAW,KAAK,UAAU;GACrC;GACA,iBAAiB,EAChB,iBAAiB,KAAK,KACtB;EACD,EAAC;WACQ,KAAK,OAAO,EACtB,OAAM,MAAM,WAAW,MAAM,KAAK,aAAa,EAAE;GAChD;GACA,iBAAiB,EAChB,iBAAiB,KAAK,KACtB;EACD,EAAC;MAEF,OAAM,IAAI,MAAM;AAGjB,SAAO;CACP;CAED,MAAa,SAASF,YAAoBG,QAAmC;EAC5E,MAAM,QAAQ,KAAK,SAAS,YAAY,OAAO;EAE/C,MAAM,OAAO,MAAM,MAAM,UAAU,CAAC,MAAM,CAAC,UAAU;AACpD,OAAI,iBAAiB,MACpB,OAAM,IAAI,kBAAkB,0BAA0B,EAAE,OAAO,MAAO;AAGvE,SAAM;EACN,EAAC;AAEF,OAAK,KAAK,mBACT,OAAM,IAAI,kBAAkB;AAG7B,SAAO,WAAW,KAAK,oBAAoB,KAAK,SAAS;CACzD;CAED,MAAa,OAAOH,YAAoBG,QAAkC;AACzE,SAAO,MAAM,KAAK,SAAS,YAAY,OAAO,CAAC,QAAQ;CACvD;CAED,MAAa,OAAOH,YAAoBG,QAA+B;AACtE,MAAI;AACH,SAAM,KAAK,SAAS,YAAY,OAAO,CAAC,QAAQ;EAChD,SAAQ,OAAO;AACf,OAAI,iBAAiB,MACpB,OAAM,IAAI,kBAAkB,uBAAuB,EAAE,OAAO,MAAO;AAEpE,SAAM;EACN;CACD;CAED,AAAO,SAASC,WAAmBD,QAAiC;AACnE,OAAK,KAAK,WAAW,WACpB,OAAM,IAAI,mBACR,QAAQ,OAAO,2BAA2B,UAAU;AAIvD,SAAO,KAAK,WAAW,WAAW,mBAAmB,OAAO;CAC5D;CAED,AAAgB,UAAmC,MAAM;EACxD,IAAI;EACJ,SAAS,YAAY;AACpB,QAAK,MAAM,UAAU,KAAK,OAAO,YAAY,EAAE;IAC9C,MAAM,gBAAgB,OAAO,KAAK,WAAW,KAAK,IAAI,CAAC,aAAa;AACpE,SAAK,IAAI,OAAO,oBAAoB,cAAc,KAAK;AAEvD,SAAK,KAAK,WAAW,eACpB,MAAK,WAAW,iBACf,MAAM,KAAK,sBAAsB,cAAc;AAGjD,SAAK,IAAI,MAAM,YAAY,OAAO,KAAK;GACvC;EACD;CACD,EAAC;CAEF,MAAgB,sBACfE,MAC2B;EAC3B,MAAM,YAAY,KAAK,kBAAkB,mBAAmB,KAAK;AAEjE,QAAM,KAAK,KAAK,SACf,CAAC,gBAAgB,UAAU,kBAAkB,EAAE,YAAa,EAAC,EAC7D,CAAC,GAAG,SAAU,EACd;AAED,SAAO;CACP;CAED,AAAU,WAAmB;AAC5B,SAAO,YAAY;CACnB;AACD;;;;;;;;;;;;ACtKD,IAAa,oBAAb,MAAiD;CAChD,AAAgB,OAAO;CACvB,AAAgB,YAAY,CAACC,WAAyB;AACrD,SACE,KAAK;GACL,SAAS;GACT,KAAK;GACL,UAAU;EACV,EAAC,CACD,KAAK,aAAa;CACpB;AACD"}
|
package/package.json
CHANGED
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
"storage-blob"
|
|
11
11
|
],
|
|
12
12
|
"author": "Feunard",
|
|
13
|
-
"version": "0.
|
|
13
|
+
"version": "0.8.0",
|
|
14
14
|
"type": "module",
|
|
15
15
|
"engines": {
|
|
16
16
|
"node": ">=22.0.0"
|
|
@@ -23,10 +23,10 @@
|
|
|
23
23
|
"src"
|
|
24
24
|
],
|
|
25
25
|
"dependencies": {
|
|
26
|
-
"@alepha/bucket": "0.
|
|
27
|
-
"@alepha/core": "0.
|
|
28
|
-
"@alepha/datetime": "0.
|
|
29
|
-
"@alepha/file": "0.
|
|
26
|
+
"@alepha/bucket": "0.8.0",
|
|
27
|
+
"@alepha/core": "0.8.0",
|
|
28
|
+
"@alepha/datetime": "0.8.0",
|
|
29
|
+
"@alepha/file": "0.8.0",
|
|
30
30
|
"@azure/storage-blob": "^12.27.0"
|
|
31
31
|
},
|
|
32
32
|
"devDependencies": {
|
package/src/index.ts
CHANGED
|
@@ -16,7 +16,7 @@ export * from "./providers/AzureFileStorageProvider.ts";
|
|
|
16
16
|
*/
|
|
17
17
|
export class AlephaBucketAzure implements Module {
|
|
18
18
|
public readonly name = "alepha.bucket.azure";
|
|
19
|
-
public readonly $services = (alepha: Alepha) =>
|
|
19
|
+
public readonly $services = (alepha: Alepha): void => {
|
|
20
20
|
alepha
|
|
21
21
|
.with({
|
|
22
22
|
provide: FileStorageProvider,
|
|
@@ -24,4 +24,5 @@ export class AlephaBucketAzure implements Module {
|
|
|
24
24
|
optional: true,
|
|
25
25
|
})
|
|
26
26
|
.with(AlephaBucket);
|
|
27
|
+
};
|
|
27
28
|
}
|
|
@@ -4,10 +4,20 @@ import {
|
|
|
4
4
|
FileNotFoundError,
|
|
5
5
|
type FileStorageProvider,
|
|
6
6
|
} from "@alepha/bucket";
|
|
7
|
-
import
|
|
8
|
-
|
|
7
|
+
import {
|
|
8
|
+
$hook,
|
|
9
|
+
$inject,
|
|
10
|
+
$logger,
|
|
11
|
+
type FileLike,
|
|
12
|
+
type HookDescriptor,
|
|
13
|
+
type Logger,
|
|
14
|
+
type Static,
|
|
15
|
+
type TObject,
|
|
16
|
+
type TString,
|
|
17
|
+
t,
|
|
18
|
+
} from "@alepha/core";
|
|
9
19
|
import { DateTimeProvider } from "@alepha/datetime";
|
|
10
|
-
import {
|
|
20
|
+
import { createFile } from "@alepha/file";
|
|
11
21
|
import {
|
|
12
22
|
BlobServiceClient,
|
|
13
23
|
type BlockBlobClient,
|
|
@@ -15,14 +25,11 @@ import {
|
|
|
15
25
|
type StoragePipelineOptions,
|
|
16
26
|
} from "@azure/storage-blob";
|
|
17
27
|
|
|
18
|
-
const envSchema
|
|
28
|
+
const envSchema: TObject<{
|
|
29
|
+
AZ_STORAGE_CONNECTION_STRING: TString;
|
|
30
|
+
}> = t.object({
|
|
19
31
|
AZ_STORAGE_CONNECTION_STRING: t.string({
|
|
20
32
|
size: "long",
|
|
21
|
-
default:
|
|
22
|
-
"DefaultEndpointsProtocol=http;" +
|
|
23
|
-
"AccountName=devstoreaccount1;" +
|
|
24
|
-
"AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;" +
|
|
25
|
-
"BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
|
|
26
33
|
}),
|
|
27
34
|
});
|
|
28
35
|
|
|
@@ -31,10 +38,12 @@ declare module "@alepha/core" {
|
|
|
31
38
|
}
|
|
32
39
|
|
|
33
40
|
export class AzureFileStorageProvider implements FileStorageProvider {
|
|
34
|
-
protected readonly log = $logger();
|
|
35
|
-
protected readonly env = $
|
|
36
|
-
protected readonly
|
|
37
|
-
|
|
41
|
+
protected readonly log: Logger = $logger();
|
|
42
|
+
protected readonly env: Static<typeof envSchema> = $inject(envSchema);
|
|
43
|
+
protected readonly bucket: BucketDescriptorProvider = $inject(
|
|
44
|
+
BucketDescriptorProvider,
|
|
45
|
+
);
|
|
46
|
+
protected readonly time: DateTimeProvider = $inject(DateTimeProvider);
|
|
38
47
|
protected readonly containers: Record<string, ContainerClient> = {};
|
|
39
48
|
protected readonly blobServiceClient: BlobServiceClient;
|
|
40
49
|
protected readonly options: StoragePipelineOptions = {};
|
|
@@ -110,7 +119,7 @@ export class AzureFileStorageProvider implements FileStorageProvider {
|
|
|
110
119
|
throw new FileNotFoundError("File not found - empty stream body");
|
|
111
120
|
}
|
|
112
121
|
|
|
113
|
-
return
|
|
122
|
+
return createFile(blob.readableStreamBody, blob.metadata);
|
|
114
123
|
}
|
|
115
124
|
|
|
116
125
|
public async exists(bucketName: string, fileId: string): Promise<boolean> {
|
|
@@ -138,10 +147,10 @@ export class AzureFileStorageProvider implements FileStorageProvider {
|
|
|
138
147
|
return this.containers[container].getBlockBlobClient(fileId);
|
|
139
148
|
}
|
|
140
149
|
|
|
141
|
-
public readonly onStart = $hook({
|
|
142
|
-
|
|
150
|
+
public readonly onStart: HookDescriptor<"start"> = $hook({
|
|
151
|
+
on: "start",
|
|
143
152
|
handler: async () => {
|
|
144
|
-
for (const bucket of this.
|
|
153
|
+
for (const bucket of this.bucket.getBuckets()) {
|
|
145
154
|
const containerName = bucket.name.replaceAll("/", "-").toLowerCase();
|
|
146
155
|
this.log.debug(`Prepare container ${containerName}...`);
|
|
147
156
|
|
|
@@ -160,7 +169,7 @@ export class AzureFileStorageProvider implements FileStorageProvider {
|
|
|
160
169
|
): Promise<ContainerClient> {
|
|
161
170
|
const container = this.blobServiceClient.getContainerClient(name);
|
|
162
171
|
|
|
163
|
-
await this.
|
|
172
|
+
await this.time.deadline(
|
|
164
173
|
(abortSignal) => container.createIfNotExists({ abortSignal }),
|
|
165
174
|
[5, "seconds"],
|
|
166
175
|
);
|