@alepha/bucket-azure 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +21 -0
- package/dist/index.cjs +143 -0
- package/dist/index.d.ts +103 -0
- package/dist/index.js +140 -0
- package/package.json +49 -0
- package/src/index.ts +22 -0
- package/src/providers/AzureFileStorageProvider.ts +174 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Feunard
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# alepha/bucket/azure
|
|
2
|
+
|
|
3
|
+
```ts
|
|
4
|
+
import { $bucket } from "alepha/bucket";
|
|
5
|
+
import { AzureBucketModule } from "alepha/bucket/azure";
|
|
6
|
+
import { Alepha, run } from "alepha";
|
|
7
|
+
|
|
8
|
+
class App {
|
|
9
|
+
images = $bucket()
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const alepha = Alepha.create( {
|
|
13
|
+
env: {
|
|
14
|
+
AZ_STORAGE_CONNECTION_STRING: "",
|
|
15
|
+
},
|
|
16
|
+
})
|
|
17
|
+
.with(AzureBucketModule)
|
|
18
|
+
.with(App);
|
|
19
|
+
|
|
20
|
+
run(alepha);
|
|
21
|
+
```
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var bucket = require('@alepha/bucket');
|
|
4
|
+
var core = require('@alepha/core');
|
|
5
|
+
var node_crypto = require('node:crypto');
|
|
6
|
+
var datetime = require('@alepha/datetime');
|
|
7
|
+
var file = require('@alepha/file');
|
|
8
|
+
var storageBlob = require('@azure/storage-blob');
|
|
9
|
+
|
|
10
|
+
const envSchema = core.t.object({
|
|
11
|
+
AZ_STORAGE_CONNECTION_STRING: core.t.string({
|
|
12
|
+
size: "long",
|
|
13
|
+
default: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
|
|
14
|
+
})
|
|
15
|
+
});
|
|
16
|
+
class AzureFileStorageProvider {
|
|
17
|
+
log = core.$logger();
|
|
18
|
+
env = core.$env(envSchema);
|
|
19
|
+
bucketProvider = core.$inject(bucket.BucketDescriptorProvider);
|
|
20
|
+
dateTimeProvider = core.$inject(datetime.DateTimeProvider);
|
|
21
|
+
containers = {};
|
|
22
|
+
blobServiceClient;
|
|
23
|
+
options = {};
|
|
24
|
+
constructor() {
|
|
25
|
+
this.blobServiceClient = storageBlob.BlobServiceClient.fromConnectionString(
|
|
26
|
+
this.env.AZ_STORAGE_CONNECTION_STRING,
|
|
27
|
+
this.storagePipelineOptions()
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
storagePipelineOptions() {
|
|
31
|
+
return {};
|
|
32
|
+
}
|
|
33
|
+
async createContainer(containerName) {
|
|
34
|
+
if (this.containers[containerName]) {
|
|
35
|
+
return this.containers[containerName];
|
|
36
|
+
}
|
|
37
|
+
const container = await this.createContainerClient(containerName);
|
|
38
|
+
this.containers[containerName] = container;
|
|
39
|
+
return container;
|
|
40
|
+
}
|
|
41
|
+
async upload(bucketName, file2, fileId) {
|
|
42
|
+
fileId ??= this.createId();
|
|
43
|
+
const block = this.getBlock(bucketName, fileId);
|
|
44
|
+
const metadata = {
|
|
45
|
+
name: file2.name,
|
|
46
|
+
type: file2.type
|
|
47
|
+
};
|
|
48
|
+
if (file2.filepath) {
|
|
49
|
+
await block.uploadFile(file2.filepath, {
|
|
50
|
+
metadata,
|
|
51
|
+
blobHTTPHeaders: {
|
|
52
|
+
blobContentType: file2.type
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
} else if (file2.size > 0) {
|
|
56
|
+
await block.uploadData(await file2.arrayBuffer(), {
|
|
57
|
+
metadata,
|
|
58
|
+
blobHTTPHeaders: {
|
|
59
|
+
blobContentType: file2.type
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
} else {
|
|
63
|
+
throw new Error("Raw stream upload is not supported yet");
|
|
64
|
+
}
|
|
65
|
+
return fileId;
|
|
66
|
+
}
|
|
67
|
+
async download(bucketName, fileId) {
|
|
68
|
+
const block = this.getBlock(bucketName, fileId);
|
|
69
|
+
const blob = await block.download().catch((error) => {
|
|
70
|
+
if (error instanceof Error) {
|
|
71
|
+
throw new bucket.FileNotFoundError("Error downloading file", { cause: error });
|
|
72
|
+
}
|
|
73
|
+
throw error;
|
|
74
|
+
});
|
|
75
|
+
if (!blob.readableStreamBody) {
|
|
76
|
+
throw new bucket.FileNotFoundError("File not found - empty stream body");
|
|
77
|
+
}
|
|
78
|
+
return file.file(blob.readableStreamBody, blob.metadata);
|
|
79
|
+
}
|
|
80
|
+
async exists(bucketName, fileId) {
|
|
81
|
+
return await this.getBlock(bucketName, fileId).exists();
|
|
82
|
+
}
|
|
83
|
+
async delete(bucketName, fileId) {
|
|
84
|
+
try {
|
|
85
|
+
await this.getBlock(bucketName, fileId).delete();
|
|
86
|
+
} catch (error) {
|
|
87
|
+
if (error instanceof Error) {
|
|
88
|
+
throw new bucket.FileNotFoundError("Error deleting file", { cause: error });
|
|
89
|
+
}
|
|
90
|
+
throw error;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
getBlock(container, fileId) {
|
|
94
|
+
if (!this.containers[container]) {
|
|
95
|
+
throw new bucket.FileNotFoundError(
|
|
96
|
+
`File '${fileId}' not found - container '${container}' does not exists`
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
return this.containers[container].getBlockBlobClient(fileId);
|
|
100
|
+
}
|
|
101
|
+
onStart = core.$hook({
|
|
102
|
+
name: "start",
|
|
103
|
+
handler: async () => {
|
|
104
|
+
for (const bucket of this.bucketProvider.getBuckets()) {
|
|
105
|
+
const containerName = bucket.name.replaceAll("/", "-").toLowerCase();
|
|
106
|
+
this.log.debug(`Prepare container ${containerName}...`);
|
|
107
|
+
if (!this.containers[containerName]) {
|
|
108
|
+
this.containers[containerName] = await this.createContainerClient(containerName);
|
|
109
|
+
}
|
|
110
|
+
this.log.info(`Container ${bucket} OK`);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
async createContainerClient(name) {
|
|
115
|
+
const container = this.blobServiceClient.getContainerClient(name);
|
|
116
|
+
await this.dateTimeProvider.deadline(
|
|
117
|
+
(abortSignal) => container.createIfNotExists({ abortSignal }),
|
|
118
|
+
{ seconds: 5 }
|
|
119
|
+
);
|
|
120
|
+
return container;
|
|
121
|
+
}
|
|
122
|
+
createId() {
|
|
123
|
+
return node_crypto.randomUUID();
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
class AzureBucketModule {
|
|
128
|
+
name = "alepha/bucket/azure";
|
|
129
|
+
alepha = core.$inject(core.Alepha);
|
|
130
|
+
constructor() {
|
|
131
|
+
this.alepha.with(
|
|
132
|
+
{
|
|
133
|
+
default: true,
|
|
134
|
+
provide: bucket.FileStorageProvider,
|
|
135
|
+
use: AzureFileStorageProvider
|
|
136
|
+
},
|
|
137
|
+
bucket.BucketModule
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
exports.AzureBucketModule = AzureBucketModule;
|
|
143
|
+
exports.AzureFileStorageProvider = AzureFileStorageProvider;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import * as _alepha_core from '@alepha/core';
|
|
2
|
+
import { Static, FileLike, Module, Alepha } from '@alepha/core';
|
|
3
|
+
import { FileStorageProvider, BucketDescriptorProvider } from '@alepha/bucket';
|
|
4
|
+
import { DateTimeProvider } from '@alepha/datetime';
|
|
5
|
+
import { ContainerClient, BlobServiceClient, StoragePipelineOptions, BlockBlobClient } from '@azure/storage-blob';
|
|
6
|
+
|
|
7
|
+
/** Symbol key applied to readonly types */
|
|
8
|
+
declare const ReadonlyKind: unique symbol;
|
|
9
|
+
/** Symbol key applied to optional types */
|
|
10
|
+
declare const OptionalKind: unique symbol;
|
|
11
|
+
/** Symbol key applied to types */
|
|
12
|
+
declare const Hint: unique symbol;
|
|
13
|
+
/** Symbol key applied to types */
|
|
14
|
+
declare const Kind: unique symbol;
|
|
15
|
+
|
|
16
|
+
type StringFormatOption = 'date-time' | 'time' | 'date' | 'email' | 'idn-email' | 'hostname' | 'idn-hostname' | 'ipv4' | 'ipv6' | 'uri' | 'uri-reference' | 'iri' | 'uuid' | 'iri-reference' | 'uri-template' | 'json-pointer' | 'relative-json-pointer' | 'regex' | ({} & string);
|
|
17
|
+
type StringContentEncodingOption = '7bit' | '8bit' | 'binary' | 'quoted-printable' | 'base64' | ({} & string);
|
|
18
|
+
interface StringOptions extends SchemaOptions {
|
|
19
|
+
/** The maximum string length */
|
|
20
|
+
maxLength?: number;
|
|
21
|
+
/** The minimum string length */
|
|
22
|
+
minLength?: number;
|
|
23
|
+
/** A regular expression pattern this string should match */
|
|
24
|
+
pattern?: string;
|
|
25
|
+
/** A format this string should match */
|
|
26
|
+
format?: StringFormatOption;
|
|
27
|
+
/** The content encoding for this string */
|
|
28
|
+
contentEncoding?: StringContentEncodingOption;
|
|
29
|
+
/** The content media type for this string */
|
|
30
|
+
contentMediaType?: string;
|
|
31
|
+
}
|
|
32
|
+
interface TString extends TSchema, StringOptions {
|
|
33
|
+
[Kind]: 'String';
|
|
34
|
+
static: string;
|
|
35
|
+
type: 'string';
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
interface SchemaOptions {
|
|
39
|
+
$schema?: string;
|
|
40
|
+
/** Id for this schema */
|
|
41
|
+
$id?: string;
|
|
42
|
+
/** Title of this schema */
|
|
43
|
+
title?: string;
|
|
44
|
+
/** Description of this schema */
|
|
45
|
+
description?: string;
|
|
46
|
+
/** Default value for this schema */
|
|
47
|
+
default?: any;
|
|
48
|
+
/** Example values matching this schema */
|
|
49
|
+
examples?: any;
|
|
50
|
+
/** Optional annotation for readOnly */
|
|
51
|
+
readOnly?: boolean;
|
|
52
|
+
/** Optional annotation for writeOnly */
|
|
53
|
+
writeOnly?: boolean;
|
|
54
|
+
[prop: string]: any;
|
|
55
|
+
}
|
|
56
|
+
interface TKind {
|
|
57
|
+
[Kind]: string;
|
|
58
|
+
}
|
|
59
|
+
interface TSchema extends TKind, SchemaOptions {
|
|
60
|
+
[ReadonlyKind]?: string;
|
|
61
|
+
[OptionalKind]?: string;
|
|
62
|
+
[Hint]?: string;
|
|
63
|
+
params: unknown[];
|
|
64
|
+
static: unknown;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
declare const envSchema: _alepha_core.TObject<{
|
|
68
|
+
AZ_STORAGE_CONNECTION_STRING: TString;
|
|
69
|
+
}>;
|
|
70
|
+
declare module "@alepha/core" {
|
|
71
|
+
interface Env extends Partial<Static<typeof envSchema>> {
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
declare class AzureFileStorageProvider implements FileStorageProvider {
|
|
75
|
+
protected readonly log: _alepha_core.Logger;
|
|
76
|
+
protected readonly env: {
|
|
77
|
+
AZ_STORAGE_CONNECTION_STRING: string;
|
|
78
|
+
};
|
|
79
|
+
protected readonly bucketProvider: BucketDescriptorProvider;
|
|
80
|
+
protected readonly dateTimeProvider: DateTimeProvider;
|
|
81
|
+
protected readonly containers: Record<string, ContainerClient>;
|
|
82
|
+
protected readonly blobServiceClient: BlobServiceClient;
|
|
83
|
+
protected readonly options: StoragePipelineOptions;
|
|
84
|
+
constructor();
|
|
85
|
+
storagePipelineOptions(): StoragePipelineOptions;
|
|
86
|
+
createContainer(containerName: string): Promise<ContainerClient>;
|
|
87
|
+
upload(bucketName: string, file: FileLike, fileId?: string): Promise<string>;
|
|
88
|
+
download(bucketName: string, fileId: string): Promise<FileLike>;
|
|
89
|
+
exists(bucketName: string, fileId: string): Promise<boolean>;
|
|
90
|
+
delete(bucketName: string, fileId: string): Promise<void>;
|
|
91
|
+
getBlock(container: string, fileId: string): BlockBlobClient;
|
|
92
|
+
readonly onStart: _alepha_core.HookDescriptor<"start">;
|
|
93
|
+
protected createContainerClient(name: string): Promise<ContainerClient>;
|
|
94
|
+
protected createId(): string;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
declare class AzureBucketModule implements Module {
|
|
98
|
+
readonly name = "alepha/bucket/azure";
|
|
99
|
+
protected readonly alepha: Alepha;
|
|
100
|
+
constructor();
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
export { AzureBucketModule, AzureFileStorageProvider };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import { BucketDescriptorProvider, FileNotFoundError, FileStorageProvider, BucketModule } from '@alepha/bucket';
|
|
2
|
+
import { t, $logger, $env, $inject, $hook, Alepha } from '@alepha/core';
|
|
3
|
+
import { randomUUID } from 'node:crypto';
|
|
4
|
+
import { DateTimeProvider } from '@alepha/datetime';
|
|
5
|
+
import { file } from '@alepha/file';
|
|
6
|
+
import { BlobServiceClient } from '@azure/storage-blob';
|
|
7
|
+
|
|
8
|
+
const envSchema = t.object({
|
|
9
|
+
AZ_STORAGE_CONNECTION_STRING: t.string({
|
|
10
|
+
size: "long",
|
|
11
|
+
default: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
|
|
12
|
+
})
|
|
13
|
+
});
|
|
14
|
+
class AzureFileStorageProvider {
|
|
15
|
+
log = $logger();
|
|
16
|
+
env = $env(envSchema);
|
|
17
|
+
bucketProvider = $inject(BucketDescriptorProvider);
|
|
18
|
+
dateTimeProvider = $inject(DateTimeProvider);
|
|
19
|
+
containers = {};
|
|
20
|
+
blobServiceClient;
|
|
21
|
+
options = {};
|
|
22
|
+
constructor() {
|
|
23
|
+
this.blobServiceClient = BlobServiceClient.fromConnectionString(
|
|
24
|
+
this.env.AZ_STORAGE_CONNECTION_STRING,
|
|
25
|
+
this.storagePipelineOptions()
|
|
26
|
+
);
|
|
27
|
+
}
|
|
28
|
+
storagePipelineOptions() {
|
|
29
|
+
return {};
|
|
30
|
+
}
|
|
31
|
+
async createContainer(containerName) {
|
|
32
|
+
if (this.containers[containerName]) {
|
|
33
|
+
return this.containers[containerName];
|
|
34
|
+
}
|
|
35
|
+
const container = await this.createContainerClient(containerName);
|
|
36
|
+
this.containers[containerName] = container;
|
|
37
|
+
return container;
|
|
38
|
+
}
|
|
39
|
+
async upload(bucketName, file2, fileId) {
|
|
40
|
+
fileId ??= this.createId();
|
|
41
|
+
const block = this.getBlock(bucketName, fileId);
|
|
42
|
+
const metadata = {
|
|
43
|
+
name: file2.name,
|
|
44
|
+
type: file2.type
|
|
45
|
+
};
|
|
46
|
+
if (file2.filepath) {
|
|
47
|
+
await block.uploadFile(file2.filepath, {
|
|
48
|
+
metadata,
|
|
49
|
+
blobHTTPHeaders: {
|
|
50
|
+
blobContentType: file2.type
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
} else if (file2.size > 0) {
|
|
54
|
+
await block.uploadData(await file2.arrayBuffer(), {
|
|
55
|
+
metadata,
|
|
56
|
+
blobHTTPHeaders: {
|
|
57
|
+
blobContentType: file2.type
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
} else {
|
|
61
|
+
throw new Error("Raw stream upload is not supported yet");
|
|
62
|
+
}
|
|
63
|
+
return fileId;
|
|
64
|
+
}
|
|
65
|
+
async download(bucketName, fileId) {
|
|
66
|
+
const block = this.getBlock(bucketName, fileId);
|
|
67
|
+
const blob = await block.download().catch((error) => {
|
|
68
|
+
if (error instanceof Error) {
|
|
69
|
+
throw new FileNotFoundError("Error downloading file", { cause: error });
|
|
70
|
+
}
|
|
71
|
+
throw error;
|
|
72
|
+
});
|
|
73
|
+
if (!blob.readableStreamBody) {
|
|
74
|
+
throw new FileNotFoundError("File not found - empty stream body");
|
|
75
|
+
}
|
|
76
|
+
return file(blob.readableStreamBody, blob.metadata);
|
|
77
|
+
}
|
|
78
|
+
async exists(bucketName, fileId) {
|
|
79
|
+
return await this.getBlock(bucketName, fileId).exists();
|
|
80
|
+
}
|
|
81
|
+
async delete(bucketName, fileId) {
|
|
82
|
+
try {
|
|
83
|
+
await this.getBlock(bucketName, fileId).delete();
|
|
84
|
+
} catch (error) {
|
|
85
|
+
if (error instanceof Error) {
|
|
86
|
+
throw new FileNotFoundError("Error deleting file", { cause: error });
|
|
87
|
+
}
|
|
88
|
+
throw error;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
getBlock(container, fileId) {
|
|
92
|
+
if (!this.containers[container]) {
|
|
93
|
+
throw new FileNotFoundError(
|
|
94
|
+
`File '${fileId}' not found - container '${container}' does not exists`
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
return this.containers[container].getBlockBlobClient(fileId);
|
|
98
|
+
}
|
|
99
|
+
onStart = $hook({
|
|
100
|
+
name: "start",
|
|
101
|
+
handler: async () => {
|
|
102
|
+
for (const bucket of this.bucketProvider.getBuckets()) {
|
|
103
|
+
const containerName = bucket.name.replaceAll("/", "-").toLowerCase();
|
|
104
|
+
this.log.debug(`Prepare container ${containerName}...`);
|
|
105
|
+
if (!this.containers[containerName]) {
|
|
106
|
+
this.containers[containerName] = await this.createContainerClient(containerName);
|
|
107
|
+
}
|
|
108
|
+
this.log.info(`Container ${bucket} OK`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
async createContainerClient(name) {
|
|
113
|
+
const container = this.blobServiceClient.getContainerClient(name);
|
|
114
|
+
await this.dateTimeProvider.deadline(
|
|
115
|
+
(abortSignal) => container.createIfNotExists({ abortSignal }),
|
|
116
|
+
{ seconds: 5 }
|
|
117
|
+
);
|
|
118
|
+
return container;
|
|
119
|
+
}
|
|
120
|
+
createId() {
|
|
121
|
+
return randomUUID();
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
class AzureBucketModule {
|
|
126
|
+
name = "alepha/bucket/azure";
|
|
127
|
+
alepha = $inject(Alepha);
|
|
128
|
+
constructor() {
|
|
129
|
+
this.alepha.with(
|
|
130
|
+
{
|
|
131
|
+
default: true,
|
|
132
|
+
provide: FileStorageProvider,
|
|
133
|
+
use: AzureFileStorageProvider
|
|
134
|
+
},
|
|
135
|
+
BucketModule
|
|
136
|
+
);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export { AzureBucketModule, AzureFileStorageProvider };
|
package/package.json
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@alepha/bucket-azure",
|
|
3
|
+
"keywords": [
|
|
4
|
+
"alepha",
|
|
5
|
+
"bucket",
|
|
6
|
+
"file-storage",
|
|
7
|
+
"file",
|
|
8
|
+
"azure",
|
|
9
|
+
"storage-blob"
|
|
10
|
+
],
|
|
11
|
+
"author": "Feunard",
|
|
12
|
+
"version": "0.7.1",
|
|
13
|
+
"type": "module",
|
|
14
|
+
"license": "MIT",
|
|
15
|
+
"main": "./dist/index.js",
|
|
16
|
+
"types": "./dist/index.d.ts",
|
|
17
|
+
"files": [
|
|
18
|
+
"dist",
|
|
19
|
+
"src"
|
|
20
|
+
],
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"@alepha/bucket": "0.7.1",
|
|
23
|
+
"@alepha/core": "0.7.1",
|
|
24
|
+
"@alepha/datetime": "0.7.1",
|
|
25
|
+
"@alepha/file": "0.7.1",
|
|
26
|
+
"@azure/storage-blob": "^12.27.0"
|
|
27
|
+
},
|
|
28
|
+
"devDependencies": {
|
|
29
|
+
"pkgroll": "^2.13.1",
|
|
30
|
+
"vitest": "^3.2.4"
|
|
31
|
+
},
|
|
32
|
+
"scripts": {
|
|
33
|
+
"test": "vitest run",
|
|
34
|
+
"build": "pkgroll --clean-dist"
|
|
35
|
+
},
|
|
36
|
+
"repository": {
|
|
37
|
+
"type": "git",
|
|
38
|
+
"url": "git+https://github.com/feunard/alepha.git"
|
|
39
|
+
},
|
|
40
|
+
"homepage": "https://github.com/feunard/alepha",
|
|
41
|
+
"module": "./dist/index.js",
|
|
42
|
+
"exports": {
|
|
43
|
+
".": {
|
|
44
|
+
"types": "./dist/index.d.ts",
|
|
45
|
+
"import": "./dist/index.js",
|
|
46
|
+
"require": "./dist/index.cjs"
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { BucketModule, FileStorageProvider } from "@alepha/bucket";
|
|
2
|
+
import { $inject, Alepha, type Module } from "@alepha/core";
|
|
3
|
+
import { AzureFileStorageProvider } from "./providers/AzureFileStorageProvider.ts";
|
|
4
|
+
|
|
5
|
+
export * from "./providers/AzureFileStorageProvider.ts";
|
|
6
|
+
|
|
7
|
+
export class AzureBucketModule implements Module {
|
|
8
|
+
public readonly name = "alepha/bucket/azure";
|
|
9
|
+
|
|
10
|
+
protected readonly alepha = $inject(Alepha);
|
|
11
|
+
|
|
12
|
+
constructor() {
|
|
13
|
+
this.alepha.with(
|
|
14
|
+
{
|
|
15
|
+
default: true,
|
|
16
|
+
provide: FileStorageProvider,
|
|
17
|
+
use: AzureFileStorageProvider,
|
|
18
|
+
},
|
|
19
|
+
BucketModule,
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
import {
|
|
3
|
+
BucketDescriptorProvider,
|
|
4
|
+
FileNotFoundError,
|
|
5
|
+
type FileStorageProvider,
|
|
6
|
+
} from "@alepha/bucket";
|
|
7
|
+
import type { Static } from "@alepha/core";
|
|
8
|
+
import { $env, $hook, $inject, $logger, type FileLike, t } from "@alepha/core";
|
|
9
|
+
import { DateTimeProvider } from "@alepha/datetime";
|
|
10
|
+
import { file } from "@alepha/file";
|
|
11
|
+
import {
|
|
12
|
+
BlobServiceClient,
|
|
13
|
+
type BlockBlobClient,
|
|
14
|
+
type ContainerClient,
|
|
15
|
+
type StoragePipelineOptions,
|
|
16
|
+
} from "@azure/storage-blob";
|
|
17
|
+
|
|
18
|
+
const envSchema = t.object({
|
|
19
|
+
AZ_STORAGE_CONNECTION_STRING: t.string({
|
|
20
|
+
size: "long",
|
|
21
|
+
default:
|
|
22
|
+
"DefaultEndpointsProtocol=http;" +
|
|
23
|
+
"AccountName=devstoreaccount1;" +
|
|
24
|
+
"AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;" +
|
|
25
|
+
"BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
|
|
26
|
+
}),
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
declare module "@alepha/core" {
|
|
30
|
+
interface Env extends Partial<Static<typeof envSchema>> {}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export class AzureFileStorageProvider implements FileStorageProvider {
|
|
34
|
+
protected readonly log = $logger();
|
|
35
|
+
protected readonly env = $env(envSchema);
|
|
36
|
+
protected readonly bucketProvider = $inject(BucketDescriptorProvider);
|
|
37
|
+
protected readonly dateTimeProvider = $inject(DateTimeProvider);
|
|
38
|
+
protected readonly containers: Record<string, ContainerClient> = {};
|
|
39
|
+
protected readonly blobServiceClient: BlobServiceClient;
|
|
40
|
+
protected readonly options: StoragePipelineOptions = {};
|
|
41
|
+
|
|
42
|
+
constructor() {
|
|
43
|
+
this.blobServiceClient = BlobServiceClient.fromConnectionString(
|
|
44
|
+
this.env.AZ_STORAGE_CONNECTION_STRING,
|
|
45
|
+
this.storagePipelineOptions(),
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
public storagePipelineOptions(): StoragePipelineOptions {
|
|
50
|
+
return {};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
public async createContainer(
|
|
54
|
+
containerName: string,
|
|
55
|
+
): Promise<ContainerClient> {
|
|
56
|
+
if (this.containers[containerName]) {
|
|
57
|
+
return this.containers[containerName];
|
|
58
|
+
}
|
|
59
|
+
const container = await this.createContainerClient(containerName);
|
|
60
|
+
this.containers[containerName] = container;
|
|
61
|
+
return container;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
public async upload(
|
|
65
|
+
bucketName: string,
|
|
66
|
+
file: FileLike,
|
|
67
|
+
fileId?: string,
|
|
68
|
+
): Promise<string> {
|
|
69
|
+
fileId ??= this.createId();
|
|
70
|
+
const block = this.getBlock(bucketName, fileId);
|
|
71
|
+
|
|
72
|
+
const metadata = {
|
|
73
|
+
name: file.name,
|
|
74
|
+
type: file.type,
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
if (file.filepath) {
|
|
78
|
+
await block.uploadFile(file.filepath, {
|
|
79
|
+
metadata,
|
|
80
|
+
blobHTTPHeaders: {
|
|
81
|
+
blobContentType: file.type,
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
} else if (file.size > 0) {
|
|
85
|
+
await block.uploadData(await file.arrayBuffer(), {
|
|
86
|
+
metadata,
|
|
87
|
+
blobHTTPHeaders: {
|
|
88
|
+
blobContentType: file.type,
|
|
89
|
+
},
|
|
90
|
+
});
|
|
91
|
+
} else {
|
|
92
|
+
throw new Error("Raw stream upload is not supported yet");
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return fileId;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
public async download(bucketName: string, fileId: string): Promise<FileLike> {
|
|
99
|
+
const block = this.getBlock(bucketName, fileId);
|
|
100
|
+
|
|
101
|
+
const blob = await block.download().catch((error) => {
|
|
102
|
+
if (error instanceof Error) {
|
|
103
|
+
throw new FileNotFoundError("Error downloading file", { cause: error });
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
throw error;
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
if (!blob.readableStreamBody) {
|
|
110
|
+
throw new FileNotFoundError("File not found - empty stream body");
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return file(blob.readableStreamBody, blob.metadata);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
public async exists(bucketName: string, fileId: string): Promise<boolean> {
|
|
117
|
+
return await this.getBlock(bucketName, fileId).exists();
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
public async delete(bucketName: string, fileId: string): Promise<void> {
|
|
121
|
+
try {
|
|
122
|
+
await this.getBlock(bucketName, fileId).delete();
|
|
123
|
+
} catch (error) {
|
|
124
|
+
if (error instanceof Error) {
|
|
125
|
+
throw new FileNotFoundError("Error deleting file", { cause: error });
|
|
126
|
+
}
|
|
127
|
+
throw error;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
public getBlock(container: string, fileId: string): BlockBlobClient {
|
|
132
|
+
if (!this.containers[container]) {
|
|
133
|
+
throw new FileNotFoundError(
|
|
134
|
+
`File '${fileId}' not found - container '${container}' does not exists`,
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
return this.containers[container].getBlockBlobClient(fileId);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
public readonly onStart = $hook({
|
|
142
|
+
name: "start",
|
|
143
|
+
handler: async () => {
|
|
144
|
+
for (const bucket of this.bucketProvider.getBuckets()) {
|
|
145
|
+
const containerName = bucket.name.replaceAll("/", "-").toLowerCase();
|
|
146
|
+
this.log.debug(`Prepare container ${containerName}...`);
|
|
147
|
+
|
|
148
|
+
if (!this.containers[containerName]) {
|
|
149
|
+
this.containers[containerName] =
|
|
150
|
+
await this.createContainerClient(containerName);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
this.log.info(`Container ${bucket} OK`);
|
|
154
|
+
}
|
|
155
|
+
},
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
protected async createContainerClient(
|
|
159
|
+
name: string,
|
|
160
|
+
): Promise<ContainerClient> {
|
|
161
|
+
const container = this.blobServiceClient.getContainerClient(name);
|
|
162
|
+
|
|
163
|
+
await this.dateTimeProvider.deadline(
|
|
164
|
+
(abortSignal) => container.createIfNotExists({ abortSignal }),
|
|
165
|
+
{ seconds: 5 },
|
|
166
|
+
);
|
|
167
|
+
|
|
168
|
+
return container;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
protected createId(): string {
|
|
172
|
+
return randomUUID();
|
|
173
|
+
}
|
|
174
|
+
}
|