@donkeylabs/server 2.0.19 → 2.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/caching-strategies.md +677 -0
- package/docs/dev-experience.md +656 -0
- package/docs/hot-reload-limitations.md +166 -0
- package/docs/load-testing.md +974 -0
- package/docs/plugin-registry-design.md +1064 -0
- package/docs/production.md +1229 -0
- package/docs/workflows.md +90 -3
- package/package.json +1 -1
- package/src/admin/routes.ts +153 -0
- package/src/core/cron.ts +90 -9
- package/src/core/index.ts +31 -0
- package/src/core/job-adapter-kysely.ts +176 -73
- package/src/core/job-adapter-sqlite.ts +10 -0
- package/src/core/jobs.ts +112 -17
- package/src/core/migrations/workflows/002_add_metadata_column.ts +28 -0
- package/src/core/process-adapter-kysely.ts +62 -21
- package/src/core/storage-adapter-local.test.ts +199 -0
- package/src/core/storage.test.ts +197 -0
- package/src/core/workflow-adapter-kysely.ts +66 -19
- package/src/core/workflow-executor.ts +239 -0
- package/src/core/workflow-proxy.ts +238 -0
- package/src/core/workflow-socket.ts +449 -0
- package/src/core/workflow-state-machine.ts +593 -0
- package/src/core/workflows.test.ts +758 -0
- package/src/core/workflows.ts +705 -595
- package/src/core.ts +17 -6
- package/src/index.ts +14 -0
- package/src/testing/database.test.ts +263 -0
- package/src/testing/database.ts +173 -0
- package/src/testing/e2e.test.ts +189 -0
- package/src/testing/e2e.ts +272 -0
- package/src/testing/index.ts +18 -0
|
@@ -43,6 +43,7 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
43
43
|
private db: Kysely<Database>;
|
|
44
44
|
private cleanupTimer?: ReturnType<typeof setInterval>;
|
|
45
45
|
private cleanupDays: number;
|
|
46
|
+
private stopped = false;
|
|
46
47
|
|
|
47
48
|
constructor(db: Kysely<any>, config: KyselyProcessAdapterConfig = {}) {
|
|
48
49
|
this.db = db as Kysely<Database>;
|
|
@@ -57,7 +58,16 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
57
58
|
}
|
|
58
59
|
}
|
|
59
60
|
|
|
61
|
+
/** Check if adapter is stopped (for safe database access) */
|
|
62
|
+
private checkStopped(): boolean {
|
|
63
|
+
return this.stopped;
|
|
64
|
+
}
|
|
65
|
+
|
|
60
66
|
async create(process: Omit<ManagedProcess, "id">): Promise<ManagedProcess> {
|
|
67
|
+
if (this.checkStopped()) {
|
|
68
|
+
throw new Error("ProcessAdapter has been stopped");
|
|
69
|
+
}
|
|
70
|
+
|
|
61
71
|
const id = `proc_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`;
|
|
62
72
|
|
|
63
73
|
await this.db
|
|
@@ -85,17 +95,27 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
85
95
|
}
|
|
86
96
|
|
|
87
97
|
async get(processId: string): Promise<ManagedProcess | null> {
|
|
88
|
-
|
|
89
|
-
.selectFrom("__donkeylabs_processes__")
|
|
90
|
-
.selectAll()
|
|
91
|
-
.where("id", "=", processId)
|
|
92
|
-
.executeTakeFirst();
|
|
98
|
+
if (this.checkStopped()) return null;
|
|
93
99
|
|
|
94
|
-
|
|
95
|
-
|
|
100
|
+
try {
|
|
101
|
+
const row = await this.db
|
|
102
|
+
.selectFrom("__donkeylabs_processes__")
|
|
103
|
+
.selectAll()
|
|
104
|
+
.where("id", "=", processId)
|
|
105
|
+
.executeTakeFirst();
|
|
106
|
+
|
|
107
|
+
if (!row) return null;
|
|
108
|
+
return this.rowToProcess(row);
|
|
109
|
+
} catch (err: any) {
|
|
110
|
+
// Silently ignore errors if adapter was stopped during query
|
|
111
|
+
if (this.stopped && err?.message?.includes("destroyed")) return null;
|
|
112
|
+
throw err;
|
|
113
|
+
}
|
|
96
114
|
}
|
|
97
115
|
|
|
98
116
|
async update(processId: string, updates: Partial<ManagedProcess>): Promise<void> {
|
|
117
|
+
if (this.checkStopped()) return;
|
|
118
|
+
|
|
99
119
|
const updateData: Partial<ProcessesTable> = {};
|
|
100
120
|
|
|
101
121
|
if (updates.pid !== undefined) {
|
|
@@ -137,14 +157,22 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
137
157
|
|
|
138
158
|
if (Object.keys(updateData).length === 0) return;
|
|
139
159
|
|
|
140
|
-
|
|
141
|
-
.
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
160
|
+
try {
|
|
161
|
+
await this.db
|
|
162
|
+
.updateTable("__donkeylabs_processes__")
|
|
163
|
+
.set(updateData)
|
|
164
|
+
.where("id", "=", processId)
|
|
165
|
+
.execute();
|
|
166
|
+
} catch (err: any) {
|
|
167
|
+
// Silently ignore errors if adapter was stopped during query
|
|
168
|
+
if (this.stopped && err?.message?.includes("destroyed")) return;
|
|
169
|
+
throw err;
|
|
170
|
+
}
|
|
145
171
|
}
|
|
146
172
|
|
|
147
173
|
async delete(processId: string): Promise<boolean> {
|
|
174
|
+
if (this.checkStopped()) return false;
|
|
175
|
+
|
|
148
176
|
// Check if exists first since BunSqliteDialect doesn't report numDeletedRows properly
|
|
149
177
|
const exists = await this.db
|
|
150
178
|
.selectFrom("__donkeylabs_processes__")
|
|
@@ -163,6 +191,8 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
163
191
|
}
|
|
164
192
|
|
|
165
193
|
async getByName(name: string): Promise<ManagedProcess[]> {
|
|
194
|
+
if (this.checkStopped()) return [];
|
|
195
|
+
|
|
166
196
|
const rows = await this.db
|
|
167
197
|
.selectFrom("__donkeylabs_processes__")
|
|
168
198
|
.selectAll()
|
|
@@ -174,18 +204,28 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
174
204
|
}
|
|
175
205
|
|
|
176
206
|
async getRunning(): Promise<ManagedProcess[]> {
|
|
177
|
-
|
|
178
|
-
.selectFrom("__donkeylabs_processes__")
|
|
179
|
-
.selectAll()
|
|
180
|
-
.where((eb) =>
|
|
181
|
-
eb.or([eb("status", "=", "running"), eb("status", "=", "spawning")])
|
|
182
|
-
)
|
|
183
|
-
.execute();
|
|
207
|
+
if (this.checkStopped()) return [];
|
|
184
208
|
|
|
185
|
-
|
|
209
|
+
try {
|
|
210
|
+
const rows = await this.db
|
|
211
|
+
.selectFrom("__donkeylabs_processes__")
|
|
212
|
+
.selectAll()
|
|
213
|
+
.where((eb) =>
|
|
214
|
+
eb.or([eb("status", "=", "running"), eb("status", "=", "spawning")])
|
|
215
|
+
)
|
|
216
|
+
.execute();
|
|
217
|
+
|
|
218
|
+
return rows.map((r) => this.rowToProcess(r));
|
|
219
|
+
} catch (err: any) {
|
|
220
|
+
// Silently ignore errors if adapter was stopped during query
|
|
221
|
+
if (this.stopped && err?.message?.includes("destroyed")) return [];
|
|
222
|
+
throw err;
|
|
223
|
+
}
|
|
186
224
|
}
|
|
187
225
|
|
|
188
226
|
async getOrphaned(): Promise<ManagedProcess[]> {
|
|
227
|
+
if (this.checkStopped()) return [];
|
|
228
|
+
|
|
189
229
|
const rows = await this.db
|
|
190
230
|
.selectFrom("__donkeylabs_processes__")
|
|
191
231
|
.selectAll()
|
|
@@ -223,7 +263,7 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
223
263
|
|
|
224
264
|
/** Clean up old stopped/crashed processes */
|
|
225
265
|
private async cleanup(): Promise<void> {
|
|
226
|
-
if (this.cleanupDays <= 0) return;
|
|
266
|
+
if (this.cleanupDays <= 0 || this.checkStopped()) return;
|
|
227
267
|
|
|
228
268
|
try {
|
|
229
269
|
const cutoff = new Date();
|
|
@@ -252,6 +292,7 @@ export class KyselyProcessAdapter implements ProcessAdapter {
|
|
|
252
292
|
|
|
253
293
|
/** Stop the adapter and cleanup timer */
|
|
254
294
|
stop(): void {
|
|
295
|
+
this.stopped = true;
|
|
255
296
|
if (this.cleanupTimer) {
|
|
256
297
|
clearInterval(this.cleanupTimer);
|
|
257
298
|
this.cleanupTimer = undefined;
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
|
|
2
|
+
import { rm, mkdir, readFile, writeFile } from "node:fs/promises";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { existsSync } from "node:fs";
|
|
5
|
+
import { createStorage } from "./storage";
|
|
6
|
+
|
|
7
|
+
const TEST_DIR = "/tmp/donkeylabs-storage-test";
|
|
8
|
+
|
|
9
|
+
describe("LocalStorageAdapter", () => {
|
|
10
|
+
beforeEach(async () => {
|
|
11
|
+
// Clean up test directory
|
|
12
|
+
if (existsSync(TEST_DIR)) {
|
|
13
|
+
await rm(TEST_DIR, { recursive: true });
|
|
14
|
+
}
|
|
15
|
+
await mkdir(TEST_DIR, { recursive: true });
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
afterEach(async () => {
|
|
19
|
+
// Clean up test directory
|
|
20
|
+
if (existsSync(TEST_DIR)) {
|
|
21
|
+
await rm(TEST_DIR, { recursive: true });
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it("should upload and download a file", async () => {
|
|
26
|
+
const storage = createStorage({
|
|
27
|
+
provider: "local",
|
|
28
|
+
directory: TEST_DIR,
|
|
29
|
+
baseUrl: "/files",
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
const result = await storage.upload({
|
|
33
|
+
key: "test/hello.txt",
|
|
34
|
+
body: "Hello, Local Storage!",
|
|
35
|
+
contentType: "text/plain",
|
|
36
|
+
visibility: "public",
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
expect(result.key).toBe("test/hello.txt");
|
|
40
|
+
expect(result.size).toBe(21);
|
|
41
|
+
expect(result.url).toBe("/files/test/hello.txt");
|
|
42
|
+
|
|
43
|
+
// Verify file exists on disk
|
|
44
|
+
const filePath = join(TEST_DIR, "test/hello.txt");
|
|
45
|
+
expect(existsSync(filePath)).toBe(true);
|
|
46
|
+
const content = await readFile(filePath, "utf-8");
|
|
47
|
+
expect(content).toBe("Hello, Local Storage!");
|
|
48
|
+
|
|
49
|
+
// Download the file
|
|
50
|
+
const downloaded = await storage.download("test/hello.txt");
|
|
51
|
+
expect(downloaded).not.toBeNull();
|
|
52
|
+
|
|
53
|
+
const reader = downloaded!.body.getReader();
|
|
54
|
+
const chunks: Uint8Array[] = [];
|
|
55
|
+
while (true) {
|
|
56
|
+
const { done, value } = await reader.read();
|
|
57
|
+
if (done) break;
|
|
58
|
+
chunks.push(value);
|
|
59
|
+
}
|
|
60
|
+
const text = new TextDecoder().decode(Buffer.concat(chunks));
|
|
61
|
+
expect(text).toBe("Hello, Local Storage!");
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it("should store and retrieve metadata", async () => {
|
|
65
|
+
const storage = createStorage({
|
|
66
|
+
provider: "local",
|
|
67
|
+
directory: TEST_DIR,
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
await storage.upload({
|
|
71
|
+
key: "with-meta.txt",
|
|
72
|
+
body: "test",
|
|
73
|
+
contentType: "text/plain",
|
|
74
|
+
metadata: { author: "tester", version: "1.0" },
|
|
75
|
+
visibility: "private",
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
const head = await storage.head("with-meta.txt");
|
|
79
|
+
expect(head).not.toBeNull();
|
|
80
|
+
expect(head!.contentType).toBe("text/plain");
|
|
81
|
+
expect(head!.metadata).toEqual({ author: "tester", version: "1.0" });
|
|
82
|
+
expect(head!.visibility).toBe("private");
|
|
83
|
+
|
|
84
|
+
// Verify metadata file exists
|
|
85
|
+
const metaPath = join(TEST_DIR, ".with-meta.txt.meta.json");
|
|
86
|
+
expect(existsSync(metaPath)).toBe(true);
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
it("should delete files and metadata", async () => {
|
|
90
|
+
const storage = createStorage({
|
|
91
|
+
provider: "local",
|
|
92
|
+
directory: TEST_DIR,
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
await storage.upload({
|
|
96
|
+
key: "to-delete.txt",
|
|
97
|
+
body: "delete me",
|
|
98
|
+
metadata: { temp: "true" },
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
const filePath = join(TEST_DIR, "to-delete.txt");
|
|
102
|
+
const metaPath = join(TEST_DIR, ".to-delete.txt.meta.json");
|
|
103
|
+
|
|
104
|
+
expect(existsSync(filePath)).toBe(true);
|
|
105
|
+
expect(existsSync(metaPath)).toBe(true);
|
|
106
|
+
|
|
107
|
+
await storage.delete("to-delete.txt");
|
|
108
|
+
|
|
109
|
+
expect(existsSync(filePath)).toBe(false);
|
|
110
|
+
expect(existsSync(metaPath)).toBe(false);
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
it("should list files", async () => {
|
|
114
|
+
const storage = createStorage({
|
|
115
|
+
provider: "local",
|
|
116
|
+
directory: TEST_DIR,
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
await storage.upload({ key: "a/1.txt", body: "a1" });
|
|
120
|
+
await storage.upload({ key: "a/2.txt", body: "a2" });
|
|
121
|
+
await storage.upload({ key: "b/1.txt", body: "b1" });
|
|
122
|
+
await storage.upload({ key: "root.txt", body: "root" });
|
|
123
|
+
|
|
124
|
+
const allFiles = await storage.list();
|
|
125
|
+
// Filter out metadata files from count
|
|
126
|
+
const dataFiles = allFiles.files.filter((f) => !f.key.includes(".meta.json"));
|
|
127
|
+
expect(dataFiles.length).toBe(4);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
it("should copy files", async () => {
|
|
131
|
+
const storage = createStorage({
|
|
132
|
+
provider: "local",
|
|
133
|
+
directory: TEST_DIR,
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
await storage.upload({
|
|
137
|
+
key: "original.txt",
|
|
138
|
+
body: "Original",
|
|
139
|
+
contentType: "text/plain",
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
await storage.copy({
|
|
143
|
+
source: "original.txt",
|
|
144
|
+
destination: "copy.txt",
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
expect(existsSync(join(TEST_DIR, "original.txt"))).toBe(true);
|
|
148
|
+
expect(existsSync(join(TEST_DIR, "copy.txt"))).toBe(true);
|
|
149
|
+
|
|
150
|
+
const copyContent = await readFile(join(TEST_DIR, "copy.txt"), "utf-8");
|
|
151
|
+
expect(copyContent).toBe("Original");
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
it("should move files", async () => {
|
|
155
|
+
const storage = createStorage({
|
|
156
|
+
provider: "local",
|
|
157
|
+
directory: TEST_DIR,
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
await storage.upload({
|
|
161
|
+
key: "to-move.txt",
|
|
162
|
+
body: "Moving",
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
await storage.move("to-move.txt", "moved.txt");
|
|
166
|
+
|
|
167
|
+
expect(existsSync(join(TEST_DIR, "to-move.txt"))).toBe(false);
|
|
168
|
+
expect(existsSync(join(TEST_DIR, "moved.txt"))).toBe(true);
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
it("should handle various body types", async () => {
|
|
172
|
+
const storage = createStorage({
|
|
173
|
+
provider: "local",
|
|
174
|
+
directory: TEST_DIR,
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
// String body
|
|
178
|
+
await storage.upload({
|
|
179
|
+
key: "string.txt",
|
|
180
|
+
body: "string content",
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
// Buffer body
|
|
184
|
+
await storage.upload({
|
|
185
|
+
key: "buffer.txt",
|
|
186
|
+
body: Buffer.from("buffer content"),
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
// Uint8Array body
|
|
190
|
+
await storage.upload({
|
|
191
|
+
key: "uint8.txt",
|
|
192
|
+
body: new TextEncoder().encode("uint8 content"),
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
expect(await readFile(join(TEST_DIR, "string.txt"), "utf-8")).toBe("string content");
|
|
196
|
+
expect(await readFile(join(TEST_DIR, "buffer.txt"), "utf-8")).toBe("buffer content");
|
|
197
|
+
expect(await readFile(join(TEST_DIR, "uint8.txt"), "utf-8")).toBe("uint8 content");
|
|
198
|
+
});
|
|
199
|
+
});
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { describe, it, expect } from "bun:test";
|
|
2
|
+
import { createStorage, MemoryStorageAdapter } from "./storage";
|
|
3
|
+
|
|
4
|
+
describe("Storage Service", () => {
|
|
5
|
+
describe("MemoryStorageAdapter", () => {
|
|
6
|
+
it("should upload and download a file", async () => {
|
|
7
|
+
const storage = createStorage({ provider: "memory" });
|
|
8
|
+
|
|
9
|
+
const result = await storage.upload({
|
|
10
|
+
key: "test/hello.txt",
|
|
11
|
+
body: "Hello, World!",
|
|
12
|
+
contentType: "text/plain",
|
|
13
|
+
visibility: "public",
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
expect(result.key).toBe("test/hello.txt");
|
|
17
|
+
expect(result.size).toBe(13);
|
|
18
|
+
expect(result.url).toBe("memory://test/hello.txt");
|
|
19
|
+
|
|
20
|
+
const downloaded = await storage.download("test/hello.txt");
|
|
21
|
+
expect(downloaded).not.toBeNull();
|
|
22
|
+
expect(downloaded!.contentType).toBe("text/plain");
|
|
23
|
+
|
|
24
|
+
// Read stream content
|
|
25
|
+
const reader = downloaded!.body.getReader();
|
|
26
|
+
const { value } = await reader.read();
|
|
27
|
+
const text = new TextDecoder().decode(value);
|
|
28
|
+
expect(text).toBe("Hello, World!");
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
it("should return null for non-existent file", async () => {
|
|
32
|
+
const storage = createStorage({ provider: "memory" });
|
|
33
|
+
const result = await storage.download("nonexistent.txt");
|
|
34
|
+
expect(result).toBeNull();
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it("should check file existence", async () => {
|
|
38
|
+
const storage = createStorage({ provider: "memory" });
|
|
39
|
+
|
|
40
|
+
await storage.upload({
|
|
41
|
+
key: "exists.txt",
|
|
42
|
+
body: "test",
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
expect(await storage.exists("exists.txt")).toBe(true);
|
|
46
|
+
expect(await storage.exists("doesnt-exist.txt")).toBe(false);
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it("should delete a file", async () => {
|
|
50
|
+
const storage = createStorage({ provider: "memory" });
|
|
51
|
+
|
|
52
|
+
await storage.upload({
|
|
53
|
+
key: "to-delete.txt",
|
|
54
|
+
body: "delete me",
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
expect(await storage.delete("to-delete.txt")).toBe(true);
|
|
58
|
+
expect(await storage.exists("to-delete.txt")).toBe(false);
|
|
59
|
+
expect(await storage.delete("to-delete.txt")).toBe(false); // Already deleted
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
it("should list files with prefix", async () => {
|
|
63
|
+
const storage = createStorage({ provider: "memory" });
|
|
64
|
+
|
|
65
|
+
await storage.upload({ key: "users/1/avatar.png", body: "img" });
|
|
66
|
+
await storage.upload({ key: "users/1/banner.png", body: "img" });
|
|
67
|
+
await storage.upload({ key: "users/2/avatar.png", body: "img" });
|
|
68
|
+
await storage.upload({ key: "products/1.jpg", body: "img" });
|
|
69
|
+
|
|
70
|
+
const usersResult = await storage.list({ prefix: "users/" });
|
|
71
|
+
expect(usersResult.files.length).toBe(3);
|
|
72
|
+
|
|
73
|
+
const user1Result = await storage.list({ prefix: "users/1/" });
|
|
74
|
+
expect(user1Result.files.length).toBe(2);
|
|
75
|
+
|
|
76
|
+
const allResult = await storage.list();
|
|
77
|
+
expect(allResult.files.length).toBe(4);
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
it("should list with delimiter for directory-like listing", async () => {
|
|
81
|
+
const storage = createStorage({ provider: "memory" });
|
|
82
|
+
|
|
83
|
+
await storage.upload({ key: "a/1.txt", body: "a" });
|
|
84
|
+
await storage.upload({ key: "a/2.txt", body: "a" });
|
|
85
|
+
await storage.upload({ key: "b/1.txt", body: "b" });
|
|
86
|
+
await storage.upload({ key: "root.txt", body: "root" });
|
|
87
|
+
|
|
88
|
+
const result = await storage.list({ delimiter: "/" });
|
|
89
|
+
expect(result.files.length).toBe(1); // root.txt only
|
|
90
|
+
expect(result.files[0].key).toBe("root.txt");
|
|
91
|
+
expect(result.prefixes).toContain("a/");
|
|
92
|
+
expect(result.prefixes).toContain("b/");
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
it("should get file metadata with head", async () => {
|
|
96
|
+
const storage = createStorage({ provider: "memory" });
|
|
97
|
+
|
|
98
|
+
await storage.upload({
|
|
99
|
+
key: "meta-test.txt",
|
|
100
|
+
body: "Hello",
|
|
101
|
+
contentType: "text/plain",
|
|
102
|
+
metadata: { author: "test" },
|
|
103
|
+
visibility: "private",
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
const head = await storage.head("meta-test.txt");
|
|
107
|
+
expect(head).not.toBeNull();
|
|
108
|
+
expect(head!.key).toBe("meta-test.txt");
|
|
109
|
+
expect(head!.size).toBe(5);
|
|
110
|
+
expect(head!.contentType).toBe("text/plain");
|
|
111
|
+
expect(head!.metadata).toEqual({ author: "test" });
|
|
112
|
+
expect(head!.visibility).toBe("private");
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
it("should copy a file", async () => {
|
|
116
|
+
const storage = createStorage({ provider: "memory" });
|
|
117
|
+
|
|
118
|
+
await storage.upload({
|
|
119
|
+
key: "original.txt",
|
|
120
|
+
body: "Original content",
|
|
121
|
+
contentType: "text/plain",
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
const result = await storage.copy({
|
|
125
|
+
source: "original.txt",
|
|
126
|
+
destination: "copy.txt",
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
expect(result.key).toBe("copy.txt");
|
|
130
|
+
expect(await storage.exists("original.txt")).toBe(true);
|
|
131
|
+
expect(await storage.exists("copy.txt")).toBe(true);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
it("should move a file", async () => {
|
|
135
|
+
const storage = createStorage({ provider: "memory" });
|
|
136
|
+
|
|
137
|
+
await storage.upload({
|
|
138
|
+
key: "to-move.txt",
|
|
139
|
+
body: "Moving",
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
const result = await storage.move("to-move.txt", "moved.txt");
|
|
143
|
+
|
|
144
|
+
expect(result.key).toBe("moved.txt");
|
|
145
|
+
expect(await storage.exists("to-move.txt")).toBe(false);
|
|
146
|
+
expect(await storage.exists("moved.txt")).toBe(true);
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
it("should delete multiple files", async () => {
|
|
150
|
+
const storage = createStorage({ provider: "memory" });
|
|
151
|
+
|
|
152
|
+
await storage.upload({ key: "batch/1.txt", body: "1" });
|
|
153
|
+
await storage.upload({ key: "batch/2.txt", body: "2" });
|
|
154
|
+
await storage.upload({ key: "batch/3.txt", body: "3" });
|
|
155
|
+
|
|
156
|
+
const result = await storage.deleteMany([
|
|
157
|
+
"batch/1.txt",
|
|
158
|
+
"batch/2.txt",
|
|
159
|
+
"batch/nonexistent.txt",
|
|
160
|
+
]);
|
|
161
|
+
|
|
162
|
+
expect(result.deleted).toContain("batch/1.txt");
|
|
163
|
+
expect(result.deleted).toContain("batch/2.txt");
|
|
164
|
+
expect(result.errors).toContain("batch/nonexistent.txt");
|
|
165
|
+
expect(await storage.exists("batch/3.txt")).toBe(true);
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
it("should handle pagination", async () => {
|
|
169
|
+
const storage = createStorage({ provider: "memory" });
|
|
170
|
+
|
|
171
|
+
// Create 10 files
|
|
172
|
+
for (let i = 0; i < 10; i++) {
|
|
173
|
+
await storage.upload({ key: `page/${i}.txt`, body: String(i) });
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Get first page
|
|
177
|
+
const page1 = await storage.list({ prefix: "page/", limit: 3 });
|
|
178
|
+
expect(page1.files.length).toBe(3);
|
|
179
|
+
expect(page1.hasMore).toBe(true);
|
|
180
|
+
expect(page1.cursor).not.toBeNull();
|
|
181
|
+
|
|
182
|
+
// Get second page
|
|
183
|
+
const page2 = await storage.list({
|
|
184
|
+
prefix: "page/",
|
|
185
|
+
limit: 3,
|
|
186
|
+
cursor: page1.cursor!,
|
|
187
|
+
});
|
|
188
|
+
expect(page2.files.length).toBe(3);
|
|
189
|
+
expect(page2.hasMore).toBe(true);
|
|
190
|
+
|
|
191
|
+
// Ensure no duplicates
|
|
192
|
+
const page1Keys = page1.files.map((f) => f.key);
|
|
193
|
+
const page2Keys = page2.files.map((f) => f.key);
|
|
194
|
+
expect(page1Keys.some((k) => page2Keys.includes(k))).toBe(false);
|
|
195
|
+
});
|
|
196
|
+
});
|
|
197
|
+
});
|