@alepha/bucket-vercel 0.14.2 → 0.14.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts
CHANGED
|
@@ -460,6 +460,7 @@ declare class EventManager {
|
|
|
460
460
|
protected events: Record<string, Array<Hook>>;
|
|
461
461
|
constructor(logFn?: () => LoggerInterface | undefined);
|
|
462
462
|
protected get log(): LoggerInterface | undefined;
|
|
463
|
+
clear(): void;
|
|
463
464
|
/**
|
|
464
465
|
* Registers a hook for the specified event.
|
|
465
466
|
*/
|
|
@@ -992,6 +993,34 @@ interface State {
|
|
|
992
993
|
"alepha.logger"?: LoggerInterface;
|
|
993
994
|
/**
|
|
994
995
|
* If defined, the Alepha container will only register this service and its dependencies.
|
|
996
|
+
*
|
|
997
|
+
* @example
|
|
998
|
+
* ```ts
|
|
999
|
+
* class MigrateCmd {
|
|
1000
|
+
* db = $inject(DatabaseProvider);
|
|
1001
|
+
* alepha = $inject(Alepha);
|
|
1002
|
+
* env = $env(
|
|
1003
|
+
* t.object({
|
|
1004
|
+
* MIGRATE: t.optional(t.boolean()),
|
|
1005
|
+
* }),
|
|
1006
|
+
* );
|
|
1007
|
+
*
|
|
1008
|
+
* constructor() {
|
|
1009
|
+
* if (this.env.MIGRATE) {
|
|
1010
|
+
* this.alepha.set("alepha.target", MigrateCmd);
|
|
1011
|
+
* }
|
|
1012
|
+
* }
|
|
1013
|
+
*
|
|
1014
|
+
* ready = $hook({
|
|
1015
|
+
* on: "ready",
|
|
1016
|
+
* handler: async () => {
|
|
1017
|
+
* if (this.env.MIGRATE) {
|
|
1018
|
+
* await this.db.migrate();
|
|
1019
|
+
* }
|
|
1020
|
+
* },
|
|
1021
|
+
* });
|
|
1022
|
+
* }
|
|
1023
|
+
* ```
|
|
995
1024
|
*/
|
|
996
1025
|
"alepha.target"?: Service;
|
|
997
1026
|
/**
|
|
@@ -1539,6 +1568,13 @@ interface LsOptions {
|
|
|
1539
1568
|
* FileSystem interface providing utilities for working with files.
|
|
1540
1569
|
*/
|
|
1541
1570
|
declare abstract class FileSystemProvider {
|
|
1571
|
+
/**
|
|
1572
|
+
* Joins multiple path segments into a single path.
|
|
1573
|
+
*
|
|
1574
|
+
* @param paths - The path segments to join
|
|
1575
|
+
* @returns The joined path
|
|
1576
|
+
*/
|
|
1577
|
+
abstract join(...paths: string[]): string;
|
|
1542
1578
|
/**
|
|
1543
1579
|
* Creates a FileLike object from various sources.
|
|
1544
1580
|
*
|
package/package.json
CHANGED
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
"blob"
|
|
11
11
|
],
|
|
12
12
|
"author": "Nicolas Foures",
|
|
13
|
-
"version": "0.14.
|
|
13
|
+
"version": "0.14.3",
|
|
14
14
|
"type": "module",
|
|
15
15
|
"engines": {
|
|
16
16
|
"node": ">=22.0.0"
|
|
@@ -27,12 +27,12 @@
|
|
|
27
27
|
},
|
|
28
28
|
"devDependencies": {
|
|
29
29
|
"@types/node": "^25.0.3",
|
|
30
|
-
"alepha": "0.14.
|
|
31
|
-
"tsdown": "^0.19.0-beta.
|
|
30
|
+
"alepha": "0.14.3",
|
|
31
|
+
"tsdown": "^0.19.0-beta.5",
|
|
32
32
|
"vitest": "^4.0.16"
|
|
33
33
|
},
|
|
34
34
|
"peerDependencies": {
|
|
35
|
-
"alepha": "0.14.
|
|
35
|
+
"alepha": "0.14.3"
|
|
36
36
|
},
|
|
37
37
|
"scripts": {
|
|
38
38
|
"lint": "alepha lint",
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { vi } from "vitest";
|
|
2
|
+
|
|
3
|
+
export class MockVercelBlobApi {
|
|
4
|
+
mockStorage = new Map<string, any>();
|
|
5
|
+
|
|
6
|
+
put = vi.fn(async (pathname: string, body: any, options: any = {}) => {
|
|
7
|
+
// Handle ReadableStream from file.stream()
|
|
8
|
+
let data: Buffer;
|
|
9
|
+
|
|
10
|
+
if (body && typeof body.getReader === "function") {
|
|
11
|
+
// It's a Web ReadableStream
|
|
12
|
+
const reader = body.getReader();
|
|
13
|
+
const chunks: Uint8Array[] = [];
|
|
14
|
+
|
|
15
|
+
while (true) {
|
|
16
|
+
const { done, value } = await reader.read();
|
|
17
|
+
if (done) break;
|
|
18
|
+
chunks.push(value);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Combine all chunks into a single buffer
|
|
22
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
23
|
+
const combined = new Uint8Array(totalLength);
|
|
24
|
+
let offset = 0;
|
|
25
|
+
for (const chunk of chunks) {
|
|
26
|
+
combined.set(chunk, offset);
|
|
27
|
+
offset += chunk.length;
|
|
28
|
+
}
|
|
29
|
+
data = Buffer.from(combined);
|
|
30
|
+
} else if (body && body.constructor?.name === "Readable") {
|
|
31
|
+
// It's a Node.js Readable stream
|
|
32
|
+
const chunks: Buffer[] = [];
|
|
33
|
+
|
|
34
|
+
for await (const chunk of body) {
|
|
35
|
+
chunks.push(Buffer.from(chunk));
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
data = Buffer.concat(chunks);
|
|
39
|
+
} else if (Buffer.isBuffer(body)) {
|
|
40
|
+
data = body;
|
|
41
|
+
} else if (body instanceof ArrayBuffer) {
|
|
42
|
+
data = Buffer.from(body);
|
|
43
|
+
} else {
|
|
44
|
+
data = Buffer.from(String(body));
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const blob = {
|
|
48
|
+
pathname,
|
|
49
|
+
data,
|
|
50
|
+
contentType: options.contentType || "application/octet-stream",
|
|
51
|
+
size: data.length,
|
|
52
|
+
uploadedAt: new Date(),
|
|
53
|
+
url: `https://mock-blob.vercel-storage.com${pathname}`,
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
this.mockStorage.set(pathname, blob);
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
url: blob.url,
|
|
60
|
+
pathname,
|
|
61
|
+
size: blob.size,
|
|
62
|
+
uploadedAt: blob.uploadedAt.toISOString(),
|
|
63
|
+
contentType: blob.contentType,
|
|
64
|
+
} as any;
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
head = vi.fn(async (pathname: string, options: any = {}) => {
|
|
68
|
+
const blob = this.mockStorage.get(pathname);
|
|
69
|
+
|
|
70
|
+
if (!blob) {
|
|
71
|
+
return null;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
url: blob.url,
|
|
76
|
+
pathname,
|
|
77
|
+
size: blob.size,
|
|
78
|
+
uploadedAt: blob.uploadedAt.toISOString(),
|
|
79
|
+
contentType: blob.contentType,
|
|
80
|
+
} as any;
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
del = vi.fn(async (pathname: string | string[], options: any = {}) => {
|
|
84
|
+
const existed = this.mockStorage.delete(String(pathname));
|
|
85
|
+
return { success: existed } as any;
|
|
86
|
+
});
|
|
87
|
+
}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { del } from "@vercel/blob";
|
|
2
|
+
import { Alepha } from "alepha";
|
|
3
|
+
import { afterAll, afterEach, beforeEach, describe, test, vi } from "vitest";
|
|
4
|
+
import {
|
|
5
|
+
TEST_DOCUMENTS_BUCKET,
|
|
6
|
+
TEST_IMAGES_BUCKET,
|
|
7
|
+
TestApp,
|
|
8
|
+
testDeleteFile,
|
|
9
|
+
testDeleteNonExistentFile,
|
|
10
|
+
testDownloadAndMetadata,
|
|
11
|
+
testFileExistence,
|
|
12
|
+
testFileStream,
|
|
13
|
+
testNonExistentFile,
|
|
14
|
+
testNonExistentFileError,
|
|
15
|
+
testUploadAndExistence,
|
|
16
|
+
testUploadIntoBuckets,
|
|
17
|
+
} from "../../../alepha/src/bucket/__tests__/shared.ts";
|
|
18
|
+
import { AlephaBucketVercel, VercelFileStorageProvider } from "../index.ts";
|
|
19
|
+
import { MockVercelBlobApi } from "./MockVercelBlobApi.ts";
|
|
20
|
+
import { VercelBlobApi } from "./VercelBlobProvider.ts";
|
|
21
|
+
|
|
22
|
+
const withMock =
|
|
23
|
+
process.env.BLOB_READ_WRITE_TOKEN === "vercel_blob_rw_mock_token_123456789";
|
|
24
|
+
|
|
25
|
+
const alepha = Alepha.create();
|
|
26
|
+
|
|
27
|
+
if (withMock) {
|
|
28
|
+
alepha.with({
|
|
29
|
+
provide: VercelBlobApi,
|
|
30
|
+
use: MockVercelBlobApi,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// Mock fetch to return blob data
|
|
34
|
+
const originalFetch = globalThis.fetch;
|
|
35
|
+
(globalThis as any).fetch = vi.fn(
|
|
36
|
+
async (input: RequestInfo | URL, init?: RequestInit) => {
|
|
37
|
+
const url = typeof input === "string" ? input : input.toString();
|
|
38
|
+
|
|
39
|
+
if (url.startsWith("https://mock-blob.vercel-storage.com")) {
|
|
40
|
+
const __mockStorage = alepha.inject(MockVercelBlobApi).mockStorage;
|
|
41
|
+
const pathname = url.replace(
|
|
42
|
+
"https://mock-blob.vercel-storage.com",
|
|
43
|
+
"",
|
|
44
|
+
);
|
|
45
|
+
const blob = __mockStorage.get(pathname);
|
|
46
|
+
|
|
47
|
+
if (!blob) {
|
|
48
|
+
return new Response(null, { status: 404, statusText: "Not Found" });
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const stream = new ReadableStream({
|
|
52
|
+
start(controller) {
|
|
53
|
+
controller.enqueue(new Uint8Array(blob.data));
|
|
54
|
+
controller.close();
|
|
55
|
+
},
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
return new Response(stream, {
|
|
59
|
+
status: 200,
|
|
60
|
+
headers: {
|
|
61
|
+
"Content-Type": blob.contentType,
|
|
62
|
+
"Content-Length": blob.size.toString(),
|
|
63
|
+
},
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// For non-mock URLs, use original fetch if available
|
|
68
|
+
if (originalFetch) {
|
|
69
|
+
return originalFetch(input, init);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
throw new Error("fetch is not available in this environment");
|
|
73
|
+
},
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
alepha.with(AlephaBucketVercel).with(TestApp);
|
|
78
|
+
const provider = alepha.inject(VercelFileStorageProvider);
|
|
79
|
+
|
|
80
|
+
describe("VercelFileStorageProvider", () => {
|
|
81
|
+
const uploadedFiles: string[] = [];
|
|
82
|
+
|
|
83
|
+
const cleanup = async () => {
|
|
84
|
+
if (!withMock && uploadedFiles.length > 0) {
|
|
85
|
+
try {
|
|
86
|
+
await del(uploadedFiles, {
|
|
87
|
+
token: process.env.BLOB_READ_WRITE_TOKEN,
|
|
88
|
+
});
|
|
89
|
+
} catch (error) {
|
|
90
|
+
// Ignore cleanup errors
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
uploadedFiles.length = 0;
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
beforeEach(async () => {
|
|
97
|
+
if (withMock) {
|
|
98
|
+
alepha.inject(MockVercelBlobApi).mockStorage.clear();
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
afterEach(async () => {
|
|
103
|
+
if (withMock) {
|
|
104
|
+
alepha.inject(MockVercelBlobApi).mockStorage.clear();
|
|
105
|
+
}
|
|
106
|
+
await cleanup();
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
afterAll(cleanup);
|
|
110
|
+
|
|
111
|
+
const trackFileId = (bucketName: string, fileId: string) => {
|
|
112
|
+
const storeName = provider.convertName(bucketName);
|
|
113
|
+
uploadedFiles.push(`${storeName}/${fileId}`);
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
test("should upload a file and return a fileId", async () => {
|
|
117
|
+
const fileId = await testUploadAndExistence(provider);
|
|
118
|
+
trackFileId(TEST_IMAGES_BUCKET, fileId);
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
test("should download a file and restore its metadata", async () => {
|
|
122
|
+
const fileId = await testDownloadAndMetadata(provider);
|
|
123
|
+
trackFileId(TEST_IMAGES_BUCKET, fileId);
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
test("exists() should return false for a non-existent file", async () => {
|
|
127
|
+
await testNonExistentFile(provider);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
test("exists() should return true for an existing file", async () => {
|
|
131
|
+
const fileId = await testFileExistence(provider);
|
|
132
|
+
trackFileId(TEST_IMAGES_BUCKET, fileId);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
test("should delete a file", async () => {
|
|
136
|
+
await testDeleteFile(provider);
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
test("delete() should not throw for a non-existent file", async () => {
|
|
140
|
+
await testDeleteNonExistentFile(provider);
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
test("download() should throw FileNotFoundError for a non-existent file", async () => {
|
|
144
|
+
await testNonExistentFileError(provider);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
test("should handle uploading to different buckets", async () => {
|
|
148
|
+
const { docId, imgId } = await testUploadIntoBuckets(provider);
|
|
149
|
+
trackFileId(TEST_DOCUMENTS_BUCKET, docId);
|
|
150
|
+
trackFileId(TEST_IMAGES_BUCKET, imgId);
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
test("should handle empty files correctly", async () => {
|
|
154
|
+
// await testEmptyFiles(provider);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
test("should be able to upload, stream with metadata", async () => {
|
|
158
|
+
const fileId = await testFileStream(provider);
|
|
159
|
+
trackFileId(TEST_IMAGES_BUCKET, fileId);
|
|
160
|
+
});
|
|
161
|
+
});
|