@alepha/bucket-azure 0.11.8 → 0.11.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +126 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +52 -0
- package/dist/index.d.cts.map +1 -0
- package/package.json +9 -8
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
let __alepha_bucket = require("@alepha/bucket");
|
|
2
|
+
let __alepha_core = require("@alepha/core");
|
|
3
|
+
let node_crypto = require("node:crypto");
|
|
4
|
+
let node_stream = require("node:stream");
|
|
5
|
+
let __alepha_datetime = require("@alepha/datetime");
|
|
6
|
+
let __alepha_file = require("@alepha/file");
|
|
7
|
+
let __alepha_logger = require("@alepha/logger");
|
|
8
|
+
let __azure_storage_blob = require("@azure/storage-blob");
|
|
9
|
+
|
|
10
|
+
//#region src/providers/AzureFileStorageProvider.ts
|
|
11
|
+
const envSchema = __alepha_core.t.object({ AZ_STORAGE_CONNECTION_STRING: __alepha_core.t.string() });
|
|
12
|
+
/**
|
|
13
|
+
* Azure Blog Storage implementation of File Storage Provider.
|
|
14
|
+
*/
|
|
15
|
+
var AzureFileStorageProvider = class {
|
|
16
|
+
log = (0, __alepha_logger.$logger)();
|
|
17
|
+
env = (0, __alepha_core.$env)(envSchema);
|
|
18
|
+
alepha = (0, __alepha_core.$inject)(__alepha_core.Alepha);
|
|
19
|
+
time = (0, __alepha_core.$inject)(__alepha_datetime.DateTimeProvider);
|
|
20
|
+
fileSystem = (0, __alepha_core.$inject)(__alepha_file.FileSystem);
|
|
21
|
+
containers = {};
|
|
22
|
+
blobServiceClient;
|
|
23
|
+
options = {};
|
|
24
|
+
constructor() {
|
|
25
|
+
this.blobServiceClient = __azure_storage_blob.BlobServiceClient.fromConnectionString(this.env.AZ_STORAGE_CONNECTION_STRING, this.options);
|
|
26
|
+
}
|
|
27
|
+
onStart = (0, __alepha_core.$hook)({
|
|
28
|
+
on: "start",
|
|
29
|
+
handler: async () => {
|
|
30
|
+
for (const bucket of this.alepha.descriptors(__alepha_bucket.$bucket)) {
|
|
31
|
+
if (bucket.provider !== this) continue;
|
|
32
|
+
const containerName = this.convertName(bucket.name);
|
|
33
|
+
this.log.debug(`Prepare container '${containerName}' ...`);
|
|
34
|
+
if (!this.containers[containerName]) this.containers[containerName] = await this.createContainerClient(containerName);
|
|
35
|
+
this.log.info(`Container '${bucket.name}' OK`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
convertName(name) {
|
|
40
|
+
return name.replaceAll("/", "-").toLowerCase();
|
|
41
|
+
}
|
|
42
|
+
async upload(bucketName, file, fileId) {
|
|
43
|
+
fileId ??= this.createId();
|
|
44
|
+
this.log.trace(`Uploading file '${file.name}' to bucket '${bucketName}' with id '${fileId}'...`);
|
|
45
|
+
const block = this.getBlock(bucketName, fileId);
|
|
46
|
+
const metadata = {
|
|
47
|
+
name: file.name,
|
|
48
|
+
type: file.type
|
|
49
|
+
};
|
|
50
|
+
if (file.filepath) await block.uploadFile(file.filepath, {
|
|
51
|
+
metadata,
|
|
52
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
53
|
+
});
|
|
54
|
+
else if (file.size > 0) await block.uploadData(await file.arrayBuffer(), {
|
|
55
|
+
metadata,
|
|
56
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
57
|
+
});
|
|
58
|
+
else await block.uploadStream(node_stream.Readable.from(file.stream()), file.size || void 0, 5, {
|
|
59
|
+
metadata,
|
|
60
|
+
blobHTTPHeaders: { blobContentType: file.type }
|
|
61
|
+
});
|
|
62
|
+
return fileId;
|
|
63
|
+
}
|
|
64
|
+
async download(bucketName, fileId) {
|
|
65
|
+
this.log.trace(`Downloading file '${fileId}' from bucket '${bucketName}'...`);
|
|
66
|
+
const blob = await this.getBlock(bucketName, fileId).download().catch((error) => {
|
|
67
|
+
if (error instanceof Error) throw new __alepha_bucket.FileNotFoundError("Error downloading file", { cause: error });
|
|
68
|
+
throw error;
|
|
69
|
+
});
|
|
70
|
+
if (!blob.readableStreamBody) throw new __alepha_bucket.FileNotFoundError("File not found - empty stream body");
|
|
71
|
+
return this.fileSystem.createFile({
|
|
72
|
+
stream: blob.readableStreamBody,
|
|
73
|
+
...blob.metadata,
|
|
74
|
+
size: blob.contentLength
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
async exists(bucketName, fileId) {
|
|
78
|
+
this.log.trace(`Checking existence of file '${fileId}' in bucket '${bucketName}'...`);
|
|
79
|
+
return await this.getBlock(bucketName, fileId).exists();
|
|
80
|
+
}
|
|
81
|
+
async delete(bucketName, fileId) {
|
|
82
|
+
this.log.trace(`Deleting file '${fileId}' from bucket '${bucketName}'...`);
|
|
83
|
+
try {
|
|
84
|
+
await this.getBlock(bucketName, fileId).delete();
|
|
85
|
+
} catch (error) {
|
|
86
|
+
if (error instanceof Error) throw new __alepha_bucket.FileNotFoundError("Error deleting file", { cause: error });
|
|
87
|
+
throw error;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
getBlock(container, fileId) {
|
|
91
|
+
const containerName = this.convertName(container);
|
|
92
|
+
if (!this.containers[containerName]) throw new __alepha_bucket.FileNotFoundError(`File '${fileId}' not found - container '${container}' does not exists`);
|
|
93
|
+
return this.containers[containerName].getBlockBlobClient(fileId);
|
|
94
|
+
}
|
|
95
|
+
async createContainerClient(name) {
|
|
96
|
+
const container = this.blobServiceClient.getContainerClient(name);
|
|
97
|
+
await this.time.deadline((abortSignal) => container.createIfNotExists({ abortSignal }), [5, "seconds"]);
|
|
98
|
+
return container;
|
|
99
|
+
}
|
|
100
|
+
createId() {
|
|
101
|
+
return (0, node_crypto.randomUUID)();
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
//#endregion
|
|
106
|
+
//#region src/index.ts
|
|
107
|
+
/**
|
|
108
|
+
* Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.
|
|
109
|
+
*
|
|
110
|
+
* @see {@link AzureFileStorageProvider}
|
|
111
|
+
* @module alepha.bucket.azure
|
|
112
|
+
*/
|
|
113
|
+
const AlephaBucketAzure = (0, __alepha_core.$module)({
|
|
114
|
+
name: "alepha.bucket.azure",
|
|
115
|
+
services: [AzureFileStorageProvider],
|
|
116
|
+
register: (alepha) => alepha.with({
|
|
117
|
+
optional: true,
|
|
118
|
+
provide: __alepha_bucket.FileStorageProvider,
|
|
119
|
+
use: AzureFileStorageProvider
|
|
120
|
+
}).with(__alepha_bucket.AlephaBucket)
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
//#endregion
|
|
124
|
+
exports.AlephaBucketAzure = AlephaBucketAzure;
|
|
125
|
+
exports.AzureFileStorageProvider = AzureFileStorageProvider;
|
|
126
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["t","Alepha","DateTimeProvider","FileSystem","BlobServiceClient","$bucket","Readable","FileNotFoundError","FileStorageProvider","AlephaBucket"],"sources":["../src/providers/AzureFileStorageProvider.ts","../src/index.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport { Readable } from \"node:stream\";\nimport {\n $bucket,\n FileNotFoundError,\n type FileStorageProvider,\n} from \"@alepha/bucket\";\nimport {\n $env,\n $hook,\n $inject,\n Alepha,\n type FileLike,\n type Static,\n t,\n} from \"@alepha/core\";\nimport { DateTimeProvider } from \"@alepha/datetime\";\nimport { FileSystem } from \"@alepha/file\";\nimport { $logger } from \"@alepha/logger\";\nimport {\n BlobServiceClient,\n type BlockBlobClient,\n type ContainerClient,\n type StoragePipelineOptions,\n} from \"@azure/storage-blob\";\n\nconst envSchema = t.object({\n AZ_STORAGE_CONNECTION_STRING: t.string(),\n});\n\ndeclare module \"@alepha/core\" {\n interface Env extends Partial<Static<typeof envSchema>> {}\n}\n\n/**\n * Azure Blog Storage implementation of File Storage Provider.\n */\nexport class AzureFileStorageProvider implements FileStorageProvider {\n protected readonly log = $logger();\n protected readonly env = $env(envSchema);\n protected readonly alepha = $inject(Alepha);\n protected readonly time = $inject(DateTimeProvider);\n protected readonly fileSystem = $inject(FileSystem);\n protected readonly containers: Record<string, ContainerClient> = {};\n protected readonly blobServiceClient: BlobServiceClient;\n\n public readonly options: StoragePipelineOptions = {};\n\n constructor() {\n this.blobServiceClient = BlobServiceClient.fromConnectionString(\n this.env.AZ_STORAGE_CONNECTION_STRING,\n this.options,\n );\n }\n\n protected readonly onStart = $hook({\n on: \"start\",\n handler: async () => {\n for (const bucket of this.alepha.descriptors($bucket)) {\n if (bucket.provider !== this) {\n continue;\n }\n\n const containerName = this.convertName(bucket.name);\n\n this.log.debug(`Prepare container '${containerName}' ...`);\n\n if (!this.containers[containerName]) {\n this.containers[containerName] =\n await this.createContainerClient(containerName);\n }\n\n this.log.info(`Container '${bucket.name}' OK`);\n }\n },\n });\n\n public convertName(name: string): string {\n // Azure Blob Storage does not allow uppercase letters in container names\n return name.replaceAll(\"/\", \"-\").toLowerCase();\n }\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId();\n\n this.log.trace(\n `Uploading file '${file.name}' to bucket '${bucketName}' with id '${fileId}'...`,\n );\n\n const block = this.getBlock(bucketName, fileId);\n\n const metadata = {\n name: file.name,\n type: file.type,\n };\n\n if (file.filepath) {\n await block.uploadFile(file.filepath, {\n metadata,\n blobHTTPHeaders: {\n blobContentType: file.type,\n },\n });\n } else if (file.size > 0) {\n await block.uploadData(await file.arrayBuffer(), {\n metadata,\n blobHTTPHeaders: {\n blobContentType: file.type,\n },\n });\n } else {\n await block.uploadStream(\n Readable.from(file.stream()),\n file.size || undefined,\n 5,\n {\n metadata,\n blobHTTPHeaders: {\n blobContentType: file.type,\n },\n },\n );\n }\n\n return fileId;\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n this.log.trace(\n `Downloading file '${fileId}' from bucket '${bucketName}'...`,\n );\n const block = this.getBlock(bucketName, fileId);\n\n const blob = await block.download().catch((error) => {\n if (error instanceof Error) {\n throw new FileNotFoundError(\"Error downloading file\", { cause: error });\n }\n\n throw error;\n });\n\n if (!blob.readableStreamBody) {\n throw new FileNotFoundError(\"File not found - empty stream body\");\n }\n\n return this.fileSystem.createFile({\n stream: blob.readableStreamBody,\n ...blob.metadata,\n size: blob.contentLength,\n });\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n this.log.trace(\n `Checking existence of file '${fileId}' in bucket '${bucketName}'...`,\n );\n return await this.getBlock(bucketName, fileId).exists();\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n this.log.trace(`Deleting file '${fileId}' from bucket '${bucketName}'...`);\n try {\n await this.getBlock(bucketName, fileId).delete();\n } catch (error) {\n if (error instanceof Error) {\n throw new FileNotFoundError(\"Error deleting file\", { cause: error });\n }\n throw error;\n }\n }\n\n public getBlock(container: string, fileId: string): BlockBlobClient {\n const containerName = this.convertName(container);\n\n if (!this.containers[containerName]) {\n throw new FileNotFoundError(\n `File '${fileId}' not found - container '${container}' does not exists`,\n );\n }\n\n return this.containers[containerName].getBlockBlobClient(fileId);\n }\n\n protected async createContainerClient(\n name: string,\n ): Promise<ContainerClient> {\n const container = this.blobServiceClient.getContainerClient(name);\n\n await this.time.deadline(\n (abortSignal) => container.createIfNotExists({ abortSignal }),\n [5, \"seconds\"],\n );\n\n return container;\n }\n\n protected createId(): string {\n return randomUUID();\n }\n}\n","import { AlephaBucket, FileStorageProvider } from \"@alepha/bucket\";\nimport { $module } from \"@alepha/core\";\nimport { AzureFileStorageProvider } from \"./providers/AzureFileStorageProvider.ts\";\n\nexport * from \"./providers/AzureFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.\n *\n * @see {@link AzureFileStorageProvider}\n * @module alepha.bucket.azure\n */\nexport const AlephaBucketAzure = $module({\n name: \"alepha.bucket.azure\",\n services: [AzureFileStorageProvider],\n register: (alepha) =>\n alepha\n .with({\n optional: true,\n provide: FileStorageProvider,\n use: AzureFileStorageProvider,\n })\n .with(AlephaBucket),\n});\n"],"mappings":";;;;;;;;;;AA0BA,MAAM,YAAYA,gBAAE,OAAO,EACzB,8BAA8BA,gBAAE,QAAQ,EACzC,CAAC;;;;AASF,IAAa,2BAAb,MAAqE;CACnE,AAAmB,oCAAe;CAClC,AAAmB,8BAAW,UAAU;CACxC,AAAmB,oCAAiBC,qBAAO;CAC3C,AAAmB,kCAAeC,mCAAiB;CACnD,AAAmB,wCAAqBC,yBAAW;CACnD,AAAmB,aAA8C,EAAE;CACnE,AAAmB;CAEnB,AAAgB,UAAkC,EAAE;CAEpD,cAAc;AACZ,OAAK,oBAAoBC,uCAAkB,qBACzC,KAAK,IAAI,8BACT,KAAK,QACN;;CAGH,AAAmB,mCAAgB;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,QAAK,MAAM,UAAU,KAAK,OAAO,YAAYC,wBAAQ,EAAE;AACrD,QAAI,OAAO,aAAa,KACtB;IAGF,MAAM,gBAAgB,KAAK,YAAY,OAAO,KAAK;AAEnD,SAAK,IAAI,MAAM,sBAAsB,cAAc,OAAO;AAE1D,QAAI,CAAC,KAAK,WAAW,eACnB,MAAK,WAAW,iBACd,MAAM,KAAK,sBAAsB,cAAc;AAGnD,SAAK,IAAI,KAAK,cAAc,OAAO,KAAK,MAAM;;;EAGnD,CAAC;CAEF,AAAO,YAAY,MAAsB;AAEvC,SAAO,KAAK,WAAW,KAAK,IAAI,CAAC,aAAa;;CAGhD,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,UAAU;AAE1B,OAAK,IAAI,MACP,mBAAmB,KAAK,KAAK,eAAe,WAAW,aAAa,OAAO,MAC5E;EAED,MAAM,QAAQ,KAAK,SAAS,YAAY,OAAO;EAE/C,MAAM,WAAW;GACf,MAAM,KAAK;GACX,MAAM,KAAK;GACZ;AAED,MAAI,KAAK,SACP,OAAM,MAAM,WAAW,KAAK,UAAU;GACpC;GACA,iBAAiB,EACf,iBAAiB,KAAK,MACvB;GACF,CAAC;WACO,KAAK,OAAO,EACrB,OAAM,MAAM,WAAW,MAAM,KAAK,aAAa,EAAE;GAC/C;GACA,iBAAiB,EACf,iBAAiB,KAAK,MACvB;GACF,CAAC;MAEF,OAAM,MAAM,aACVC,qBAAS,KAAK,KAAK,QAAQ,CAAC,EAC5B,KAAK,QAAQ,QACb,GACA;GACE;GACA,iBAAiB,EACf,iBAAiB,KAAK,MACvB;GACF,CACF;AAGH,SAAO;;CAGT,MAAa,SAAS,YAAoB,QAAmC;AAC3E,OAAK,IAAI,MACP,qBAAqB,OAAO,iBAAiB,WAAW,MACzD;EAGD,MAAM,OAAO,MAFC,KAAK,SAAS,YAAY,OAAO,CAEtB,UAAU,CAAC,OAAO,UAAU;AACnD,OAAI,iBAAiB,MACnB,OAAM,IAAIC,kCAAkB,0BAA0B,EAAE,OAAO,OAAO,CAAC;AAGzE,SAAM;IACN;AAEF,MAAI,CAAC,KAAK,mBACR,OAAM,IAAIA,kCAAkB,qCAAqC;AAGnE,SAAO,KAAK,WAAW,WAAW;GAChC,QAAQ,KAAK;GACb,GAAG,KAAK;GACR,MAAM,KAAK;GACZ,CAAC;;CAGJ,MAAa,OAAO,YAAoB,QAAkC;AACxE,OAAK,IAAI,MACP,+BAA+B,OAAO,eAAe,WAAW,MACjE;AACD,SAAO,MAAM,KAAK,SAAS,YAAY,OAAO,CAAC,QAAQ;;CAGzD,MAAa,OAAO,YAAoB,QAA+B;AACrE,OAAK,IAAI,MAAM,kBAAkB,OAAO,iBAAiB,WAAW,MAAM;AAC1E,MAAI;AACF,SAAM,KAAK,SAAS,YAAY,OAAO,CAAC,QAAQ;WACzC,OAAO;AACd,OAAI,iBAAiB,MACnB,OAAM,IAAIA,kCAAkB,uBAAuB,EAAE,OAAO,OAAO,CAAC;AAEtE,SAAM;;;CAIV,AAAO,SAAS,WAAmB,QAAiC;EAClE,MAAM,gBAAgB,KAAK,YAAY,UAAU;AAEjD,MAAI,CAAC,KAAK,WAAW,eACnB,OAAM,IAAIA,kCACR,SAAS,OAAO,2BAA2B,UAAU,mBACtD;AAGH,SAAO,KAAK,WAAW,eAAe,mBAAmB,OAAO;;CAGlE,MAAgB,sBACd,MAC0B;EAC1B,MAAM,YAAY,KAAK,kBAAkB,mBAAmB,KAAK;AAEjE,QAAM,KAAK,KAAK,UACb,gBAAgB,UAAU,kBAAkB,EAAE,aAAa,CAAC,EAC7D,CAAC,GAAG,UAAU,CACf;AAED,SAAO;;CAGT,AAAU,WAAmB;AAC3B,sCAAmB;;;;;;;;;;;;AC3LvB,MAAa,+CAA4B;CACvC,MAAM;CACN,UAAU,CAAC,yBAAyB;CACpC,WAAW,WACT,OACG,KAAK;EACJ,UAAU;EACV,SAASC;EACT,KAAK;EACN,CAAC,CACD,KAAKC,6BAAa;CACxB,CAAC"}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import * as _alepha_core1 from "@alepha/core";
|
|
2
|
+
import { Alepha, FileLike, Static } from "@alepha/core";
|
|
3
|
+
import * as _alepha_logger0 from "@alepha/logger";
|
|
4
|
+
import { FileStorageProvider } from "@alepha/bucket";
|
|
5
|
+
import { DateTimeProvider } from "@alepha/datetime";
|
|
6
|
+
import { FileSystem } from "@alepha/file";
|
|
7
|
+
import { BlobServiceClient, BlockBlobClient, ContainerClient, StoragePipelineOptions } from "@azure/storage-blob";
|
|
8
|
+
|
|
9
|
+
//#region src/providers/AzureFileStorageProvider.d.ts
|
|
10
|
+
declare const envSchema: _alepha_core1.TObject<{
|
|
11
|
+
AZ_STORAGE_CONNECTION_STRING: _alepha_core1.TString;
|
|
12
|
+
}>;
|
|
13
|
+
declare module "@alepha/core" {
|
|
14
|
+
interface Env extends Partial<Static<typeof envSchema>> {}
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Azure Blog Storage implementation of File Storage Provider.
|
|
18
|
+
*/
|
|
19
|
+
declare class AzureFileStorageProvider implements FileStorageProvider {
|
|
20
|
+
protected readonly log: _alepha_logger0.Logger;
|
|
21
|
+
protected readonly env: {
|
|
22
|
+
AZ_STORAGE_CONNECTION_STRING: string;
|
|
23
|
+
};
|
|
24
|
+
protected readonly alepha: Alepha;
|
|
25
|
+
protected readonly time: DateTimeProvider;
|
|
26
|
+
protected readonly fileSystem: FileSystem;
|
|
27
|
+
protected readonly containers: Record<string, ContainerClient>;
|
|
28
|
+
protected readonly blobServiceClient: BlobServiceClient;
|
|
29
|
+
readonly options: StoragePipelineOptions;
|
|
30
|
+
constructor();
|
|
31
|
+
protected readonly onStart: _alepha_core1.HookDescriptor<"start">;
|
|
32
|
+
convertName(name: string): string;
|
|
33
|
+
upload(bucketName: string, file: FileLike, fileId?: string): Promise<string>;
|
|
34
|
+
download(bucketName: string, fileId: string): Promise<FileLike>;
|
|
35
|
+
exists(bucketName: string, fileId: string): Promise<boolean>;
|
|
36
|
+
delete(bucketName: string, fileId: string): Promise<void>;
|
|
37
|
+
getBlock(container: string, fileId: string): BlockBlobClient;
|
|
38
|
+
protected createContainerClient(name: string): Promise<ContainerClient>;
|
|
39
|
+
protected createId(): string;
|
|
40
|
+
}
|
|
41
|
+
//#endregion
|
|
42
|
+
//#region src/index.d.ts
|
|
43
|
+
/**
|
|
44
|
+
* Plugin for Alepha Bucket that provides Azure Blob Storage capabilities.
|
|
45
|
+
*
|
|
46
|
+
* @see {@link AzureFileStorageProvider}
|
|
47
|
+
* @module alepha.bucket.azure
|
|
48
|
+
*/
|
|
49
|
+
declare const AlephaBucketAzure: _alepha_core1.Service<_alepha_core1.Module>;
|
|
50
|
+
//#endregion
|
|
51
|
+
export { AlephaBucketAzure, AzureFileStorageProvider };
|
|
52
|
+
//# sourceMappingURL=index.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.cts","names":[],"sources":["../src/providers/AzureFileStorageProvider.ts","../src/index.ts"],"sourcesContent":[],"mappings":";;;;;;;;;cA0BM,WAEJ,aAAA,CAFa;gCAEb,aAAA,CAAA;;;EAFI,UAAA,GAEJ,SAGsB,OAHtB,CAG8B,MAH9B,CAAA,OAG4C,SAH5C,CAAA,CAAA,CAAA,CAFa;;;;;AAKS,cAMX,wBAAA,YAAoC,mBANzB,CAAA;EAAO,mBAAA,GAAA,EAMO,eAAA,CACd,MAPO;EAAA,mBAAA,GAAA,EAAA;IAAA,4BAAA,EAAA,MAAA;EAMlB,CAAA;EAAyB,mBACd,MAAA,EAEG,MAFH;EAEG,mBAAA,IAAA,EACF,gBADE;EACF,mBAAA,UAAA,EACM,UADN;EACM,mBAAA,UAAA,EACE,MADF,CAAA,MAAA,EACiB,eADjB,CAAA;EACiB,mBAAA,iBAAA,EACR,iBADQ;EAAf,SAAA,OAAA,EAGN,sBAHM;EACO,WAAA,CAAA;EAEb,mBAAA,OAAA,EAAsB,aAAA,CASrB,cATD,CAAA,OAAA,CAAA;EAAsB,WASrB,CAAA,IAAA,EAAA,MAAA,CAAA,EAAA,MAAA;EA6BlB,MAAA,CAAA,UAAA,EAAA,MAAA,EAAA,IAAA,EAAA,QAAA,EAAA,MAAA,CAAA,EAAA,MAAA,CAAA,EAEL,OAFK,CAAA,MAAA,CAAA;EAEL,QAAA,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EA6CwD,OA7CxD,CA6CgE,QA7ChE,CAAA;EA6CgE,MAAA,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAyBV,OAzBU,CAAA,OAAA,CAAA;EAAR,MAAA,CAAA,UAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAgCF,OAhCE,CAAA,IAAA,CAAA;EAyBF,QAAA,CAAA,SAAA,EAAA,MAAA,EAAA,MAAA,EAAA,MAAA,CAAA,EAmBL,eAnBK;EAOA,UAAA,qBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EA0BtD,OA1BsD,CA0B9C,eA1B8C,CAAA;EAYL,UAAA,QAAA,CAAA,CAAA,EAAA,MAAA;;;;;;;;;;cCjKzC,mBAAiB,aAAA,CAAA,QAW5B,aAAA,CAX4B,MAAA"}
|
package/package.json
CHANGED
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
"storage-blob"
|
|
11
11
|
],
|
|
12
12
|
"author": "Feunard",
|
|
13
|
-
"version": "0.11.
|
|
13
|
+
"version": "0.11.10",
|
|
14
14
|
"type": "module",
|
|
15
15
|
"engines": {
|
|
16
16
|
"node": ">=22.0.0"
|
|
@@ -23,18 +23,18 @@
|
|
|
23
23
|
"src"
|
|
24
24
|
],
|
|
25
25
|
"dependencies": {
|
|
26
|
-
"@alepha/bucket": "0.11.
|
|
27
|
-
"@alepha/core": "0.11.
|
|
28
|
-
"@alepha/datetime": "0.11.
|
|
29
|
-
"@alepha/file": "0.11.
|
|
30
|
-
"@alepha/logger": "0.11.
|
|
26
|
+
"@alepha/bucket": "0.11.10",
|
|
27
|
+
"@alepha/core": "0.11.10",
|
|
28
|
+
"@alepha/datetime": "0.11.10",
|
|
29
|
+
"@alepha/file": "0.11.10",
|
|
30
|
+
"@alepha/logger": "0.11.10",
|
|
31
31
|
"@azure/storage-blob": "^12.29.1"
|
|
32
32
|
},
|
|
33
33
|
"devDependencies": {
|
|
34
34
|
"@biomejs/biome": "^2.3.5",
|
|
35
35
|
"tsdown": "^0.16.4",
|
|
36
36
|
"typescript": "^5.9.3",
|
|
37
|
-
"vitest": "^4.0.
|
|
37
|
+
"vitest": "^4.0.9"
|
|
38
38
|
},
|
|
39
39
|
"scripts": {
|
|
40
40
|
"test": "vitest run",
|
|
@@ -51,7 +51,8 @@
|
|
|
51
51
|
"exports": {
|
|
52
52
|
".": {
|
|
53
53
|
"types": "./dist/index.d.ts",
|
|
54
|
-
"import": "./dist/index.js"
|
|
54
|
+
"import": "./dist/index.js",
|
|
55
|
+
"require": "./dist/index.cjs"
|
|
55
56
|
}
|
|
56
57
|
}
|
|
57
58
|
}
|