opencode-swarm-plugin 0.17.0 → 0.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.beads/issues.jsonl +24 -24
- package/dist/index.js +302 -47
- package/dist/plugin.js +302 -47
- package/examples/commands/swarm.md +59 -0
- package/global-skills/swarm-coordination/SKILL.md +70 -0
- package/package.json +1 -1
- package/src/learning.ts +106 -0
- package/src/rate-limiter.ts +48 -4
- package/src/streams/index.ts +29 -0
- package/src/streams/projections.ts +15 -0
- package/src/streams/store.ts +141 -72
- package/src/swarm.ts +294 -57
package/src/learning.ts
CHANGED
|
@@ -956,6 +956,112 @@ export class ErrorAccumulator {
|
|
|
956
956
|
}
|
|
957
957
|
}
|
|
958
958
|
|
|
959
|
+
// ============================================================================
|
|
960
|
+
// Semantic Memory Integration Helpers
|
|
961
|
+
// ============================================================================
|
|
962
|
+
|
|
963
|
+
/**
|
|
964
|
+
* Format memory store instruction for successful task completion
|
|
965
|
+
*
|
|
966
|
+
* @param beadId - Bead ID that completed
|
|
967
|
+
* @param summary - Completion summary
|
|
968
|
+
* @param filesTouched - Files modified
|
|
969
|
+
* @param strategy - Decomposition strategy used (if applicable)
|
|
970
|
+
* @returns Memory store instruction object
|
|
971
|
+
*/
|
|
972
|
+
export function formatMemoryStoreOnSuccess(
|
|
973
|
+
beadId: string,
|
|
974
|
+
summary: string,
|
|
975
|
+
filesTouched: string[],
|
|
976
|
+
strategy?: DecompositionStrategy,
|
|
977
|
+
): {
|
|
978
|
+
information: string;
|
|
979
|
+
metadata: string;
|
|
980
|
+
instruction: string;
|
|
981
|
+
} {
|
|
982
|
+
const strategyInfo = strategy ? ` using ${strategy} strategy` : "";
|
|
983
|
+
|
|
984
|
+
return {
|
|
985
|
+
information: `Task "${beadId}" completed successfully${strategyInfo}.
|
|
986
|
+
Key insight: ${summary}
|
|
987
|
+
Files touched: ${filesTouched.join(", ") || "none"}`,
|
|
988
|
+
metadata: `swarm, success, ${beadId}, ${strategy || "completion"}`,
|
|
989
|
+
instruction:
|
|
990
|
+
"Store this successful completion in semantic-memory for future reference",
|
|
991
|
+
};
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
/**
|
|
995
|
+
* Format memory store instruction for architectural problems (3-strike)
|
|
996
|
+
*
|
|
997
|
+
* @param beadId - Bead ID that struck out
|
|
998
|
+
* @param failures - Array of failure attempts
|
|
999
|
+
* @returns Memory store instruction object
|
|
1000
|
+
*/
|
|
1001
|
+
export function formatMemoryStoreOn3Strike(
|
|
1002
|
+
beadId: string,
|
|
1003
|
+
failures: Array<{ attempt: string; reason: string }>,
|
|
1004
|
+
): {
|
|
1005
|
+
information: string;
|
|
1006
|
+
metadata: string;
|
|
1007
|
+
instruction: string;
|
|
1008
|
+
} {
|
|
1009
|
+
const failuresList = failures
|
|
1010
|
+
.map((f, i) => `${i + 1}. ${f.attempt} - Failed: ${f.reason}`)
|
|
1011
|
+
.join("\n");
|
|
1012
|
+
|
|
1013
|
+
return {
|
|
1014
|
+
information: `Architecture problem detected in ${beadId}: Task failed after 3 attempts.
|
|
1015
|
+
Attempts:
|
|
1016
|
+
${failuresList}
|
|
1017
|
+
|
|
1018
|
+
This indicates a structural issue requiring human decision, not another fix attempt.`,
|
|
1019
|
+
metadata: `architecture, 3-strike, ${beadId}, failure`,
|
|
1020
|
+
instruction:
|
|
1021
|
+
"Store this architectural problem in semantic-memory to avoid similar patterns in future",
|
|
1022
|
+
};
|
|
1023
|
+
}
|
|
1024
|
+
|
|
1025
|
+
/**
|
|
1026
|
+
* Format memory query instruction for task decomposition
|
|
1027
|
+
*
|
|
1028
|
+
* @param task - Task description
|
|
1029
|
+
* @param limit - Max results to return
|
|
1030
|
+
* @returns Memory query instruction object
|
|
1031
|
+
*/
|
|
1032
|
+
export function formatMemoryQueryForDecomposition(
|
|
1033
|
+
task: string,
|
|
1034
|
+
limit: number = 3,
|
|
1035
|
+
): {
|
|
1036
|
+
query: string;
|
|
1037
|
+
limit: number;
|
|
1038
|
+
instruction: string;
|
|
1039
|
+
} {
|
|
1040
|
+
return {
|
|
1041
|
+
query: task,
|
|
1042
|
+
limit,
|
|
1043
|
+
instruction:
|
|
1044
|
+
"Query semantic-memory for relevant past learnings about similar tasks before decomposition",
|
|
1045
|
+
};
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
/**
|
|
1049
|
+
* Format memory validation hint when CASS history helped
|
|
1050
|
+
*
|
|
1051
|
+
* @param beadId - Bead ID that benefited from CASS
|
|
1052
|
+
* @returns Memory validation hint
|
|
1053
|
+
*/
|
|
1054
|
+
export function formatMemoryValidationHint(beadId: string): {
|
|
1055
|
+
instruction: string;
|
|
1056
|
+
context: string;
|
|
1057
|
+
} {
|
|
1058
|
+
return {
|
|
1059
|
+
instruction:
|
|
1060
|
+
"If any semantic-memory entries helped with this task, validate them to reset decay timer",
|
|
1061
|
+
context: `Task ${beadId} completed successfully with assistance from past learnings`,
|
|
1062
|
+
};
|
|
1063
|
+
}
|
|
1064
|
+
|
|
959
1065
|
// ============================================================================
|
|
960
1066
|
// Exports
|
|
961
1067
|
// ============================================================================
|
package/src/rate-limiter.ts
CHANGED
|
@@ -35,12 +35,15 @@ import { homedir } from "node:os";
|
|
|
35
35
|
// SQLite is optional - only available in Bun runtime
|
|
36
36
|
// We use dynamic import to avoid breaking Node.js environments
|
|
37
37
|
interface BunDatabase {
|
|
38
|
-
run(
|
|
38
|
+
run(
|
|
39
|
+
sql: string,
|
|
40
|
+
params?: unknown[],
|
|
41
|
+
): { changes: number; lastInsertRowid: number };
|
|
39
42
|
query<T>(sql: string): {
|
|
40
43
|
get(...params: unknown[]): T | null;
|
|
41
44
|
};
|
|
42
45
|
prepare(sql: string): {
|
|
43
|
-
run(...params: unknown[]):
|
|
46
|
+
run(...params: unknown[]): { changes: number; lastInsertRowid: number };
|
|
44
47
|
};
|
|
45
48
|
close(): void;
|
|
46
49
|
}
|
|
@@ -453,6 +456,47 @@ export class SqliteRateLimiter implements RateLimiter {
|
|
|
453
456
|
return { allowed, remaining, resetAt };
|
|
454
457
|
}
|
|
455
458
|
|
|
459
|
+
/**
|
|
460
|
+
* Clean up old rate limit entries in bounded batches
|
|
461
|
+
*
|
|
462
|
+
* Limits cleanup to prevent blocking recordRequest on large datasets:
|
|
463
|
+
* - BATCH_SIZE: 1000 rows per iteration
|
|
464
|
+
* - MAX_BATCHES: 10 (max 10k rows per cleanup invocation)
|
|
465
|
+
*
|
|
466
|
+
* Stops early if fewer than BATCH_SIZE rows deleted (no more to clean).
|
|
467
|
+
*/
|
|
468
|
+
private cleanup(): void {
|
|
469
|
+
const BATCH_SIZE = 1000;
|
|
470
|
+
const MAX_BATCHES = 10;
|
|
471
|
+
const cutoff = Date.now() - 7_200_000; // 2 hours
|
|
472
|
+
|
|
473
|
+
let totalDeleted = 0;
|
|
474
|
+
|
|
475
|
+
// Run bounded batches
|
|
476
|
+
for (let i = 0; i < MAX_BATCHES; i++) {
|
|
477
|
+
const result = this.db.run(
|
|
478
|
+
`DELETE FROM rate_limits
|
|
479
|
+
WHERE rowid IN (
|
|
480
|
+
SELECT rowid FROM rate_limits
|
|
481
|
+
WHERE timestamp < ?
|
|
482
|
+
LIMIT ?
|
|
483
|
+
)`,
|
|
484
|
+
[cutoff, BATCH_SIZE],
|
|
485
|
+
);
|
|
486
|
+
|
|
487
|
+
totalDeleted += result.changes;
|
|
488
|
+
|
|
489
|
+
// Stop if we deleted less than batch size (no more to delete)
|
|
490
|
+
if (result.changes < BATCH_SIZE) break;
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
if (totalDeleted > 0) {
|
|
494
|
+
console.log("[RateLimiter] Cleanup completed:", {
|
|
495
|
+
deletedRows: totalDeleted,
|
|
496
|
+
});
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
|
|
456
500
|
async recordRequest(agentName: string, endpoint: string): Promise<void> {
|
|
457
501
|
const now = Date.now();
|
|
458
502
|
|
|
@@ -465,9 +509,9 @@ export class SqliteRateLimiter implements RateLimiter {
|
|
|
465
509
|
stmt.run(agentName, endpoint, "hour", now);
|
|
466
510
|
|
|
467
511
|
// Opportunistic cleanup of old entries (1% chance to avoid overhead)
|
|
512
|
+
// Now bounded to prevent blocking on large datasets
|
|
468
513
|
if (Math.random() < 0.01) {
|
|
469
|
-
|
|
470
|
-
this.db.run(`DELETE FROM rate_limits WHERE timestamp < ?`, [cutoff]);
|
|
514
|
+
this.cleanup();
|
|
471
515
|
}
|
|
472
516
|
}
|
|
473
517
|
|
package/src/streams/index.ts
CHANGED
|
@@ -12,6 +12,35 @@ import { existsSync, mkdirSync, appendFileSync } from "node:fs";
|
|
|
12
12
|
import { join } from "node:path";
|
|
13
13
|
import { homedir } from "node:os";
|
|
14
14
|
|
|
15
|
+
// ============================================================================
|
|
16
|
+
// Query Timeout Wrapper
|
|
17
|
+
// ============================================================================
|
|
18
|
+
|
|
19
|
+
const DEFAULT_QUERY_TIMEOUT_MS = 30000; // 30 seconds
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Wrap a promise with a timeout
|
|
23
|
+
*
|
|
24
|
+
* @param promise - The promise to wrap
|
|
25
|
+
* @param ms - Timeout in milliseconds
|
|
26
|
+
* @param operation - Operation name for error message
|
|
27
|
+
* @returns The result of the promise
|
|
28
|
+
* @throws Error if timeout is reached
|
|
29
|
+
*/
|
|
30
|
+
export async function withTimeout<T>(
|
|
31
|
+
promise: Promise<T>,
|
|
32
|
+
ms: number,
|
|
33
|
+
operation: string,
|
|
34
|
+
): Promise<T> {
|
|
35
|
+
const timeout = new Promise<never>((_, reject) =>
|
|
36
|
+
setTimeout(
|
|
37
|
+
() => reject(new Error(`${operation} timed out after ${ms}ms`)),
|
|
38
|
+
ms,
|
|
39
|
+
),
|
|
40
|
+
);
|
|
41
|
+
return Promise.race([promise, timeout]);
|
|
42
|
+
}
|
|
43
|
+
|
|
15
44
|
// ============================================================================
|
|
16
45
|
// Debug Logging
|
|
17
46
|
// ============================================================================
|
|
@@ -275,6 +275,13 @@ export async function checkConflicts(
|
|
|
275
275
|
// Check each requested path against the reservation pattern
|
|
276
276
|
for (const path of paths) {
|
|
277
277
|
if (pathMatches(path, reservation.path_pattern)) {
|
|
278
|
+
console.warn("[SwarmMail] Conflict detected", {
|
|
279
|
+
path,
|
|
280
|
+
holder: reservation.agent_name,
|
|
281
|
+
pattern: reservation.path_pattern,
|
|
282
|
+
requestedBy: agentName,
|
|
283
|
+
});
|
|
284
|
+
|
|
278
285
|
conflicts.push({
|
|
279
286
|
path,
|
|
280
287
|
holder: reservation.agent_name,
|
|
@@ -285,6 +292,14 @@ export async function checkConflicts(
|
|
|
285
292
|
}
|
|
286
293
|
}
|
|
287
294
|
|
|
295
|
+
if (conflicts.length > 0) {
|
|
296
|
+
console.warn("[SwarmMail] Total conflicts detected", {
|
|
297
|
+
count: conflicts.length,
|
|
298
|
+
requestedBy: agentName,
|
|
299
|
+
paths,
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
|
|
288
303
|
return conflicts;
|
|
289
304
|
}
|
|
290
305
|
|
package/src/streams/store.ts
CHANGED
|
@@ -35,6 +35,12 @@ export async function appendEvent(
|
|
|
35
35
|
// Extract common fields
|
|
36
36
|
const { type, project_key, timestamp, ...rest } = event;
|
|
37
37
|
|
|
38
|
+
console.log("[SwarmMail] Appending event", {
|
|
39
|
+
type,
|
|
40
|
+
projectKey: project_key,
|
|
41
|
+
timestamp,
|
|
42
|
+
});
|
|
43
|
+
|
|
38
44
|
// Insert event
|
|
39
45
|
const result = await db.query<{ id: number; sequence: number }>(
|
|
40
46
|
`INSERT INTO events (type, project_key, timestamp, data)
|
|
@@ -49,7 +55,15 @@ export async function appendEvent(
|
|
|
49
55
|
}
|
|
50
56
|
const { id, sequence } = row;
|
|
51
57
|
|
|
58
|
+
console.log("[SwarmMail] Event appended", {
|
|
59
|
+
type,
|
|
60
|
+
id,
|
|
61
|
+
sequence,
|
|
62
|
+
projectKey: project_key,
|
|
63
|
+
});
|
|
64
|
+
|
|
52
65
|
// Update materialized views based on event type
|
|
66
|
+
console.debug("[SwarmMail] Updating materialized views", { type, id });
|
|
53
67
|
await updateMaterializedViews(db, { ...event, id, sequence });
|
|
54
68
|
|
|
55
69
|
return { ...event, id, sequence };
|
|
@@ -88,9 +102,14 @@ export async function appendEvents(
|
|
|
88
102
|
results.push(enrichedEvent);
|
|
89
103
|
}
|
|
90
104
|
await db.exec("COMMIT");
|
|
91
|
-
} catch (
|
|
92
|
-
|
|
93
|
-
|
|
105
|
+
} catch (e) {
|
|
106
|
+
// FIX: Log rollback failures (connection lost, etc)
|
|
107
|
+
try {
|
|
108
|
+
await db.exec("ROLLBACK");
|
|
109
|
+
} catch (rollbackError) {
|
|
110
|
+
console.error("[SwarmMail] ROLLBACK failed:", rollbackError);
|
|
111
|
+
}
|
|
112
|
+
throw e;
|
|
94
113
|
}
|
|
95
114
|
|
|
96
115
|
return results;
|
|
@@ -285,60 +304,69 @@ async function updateMaterializedViews(
|
|
|
285
304
|
db: Awaited<ReturnType<typeof getDatabase>>,
|
|
286
305
|
event: AgentEvent & { id: number; sequence: number },
|
|
287
306
|
): Promise<void> {
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
307
|
+
try {
|
|
308
|
+
switch (event.type) {
|
|
309
|
+
case "agent_registered":
|
|
310
|
+
await handleAgentRegistered(
|
|
311
|
+
db,
|
|
312
|
+
event as AgentRegisteredEvent & { id: number; sequence: number },
|
|
313
|
+
);
|
|
314
|
+
break;
|
|
315
|
+
|
|
316
|
+
case "agent_active":
|
|
317
|
+
await db.query(
|
|
318
|
+
`UPDATE agents SET last_active_at = $1 WHERE project_key = $2 AND name = $3`,
|
|
319
|
+
[event.timestamp, event.project_key, event.agent_name],
|
|
320
|
+
);
|
|
321
|
+
break;
|
|
322
|
+
|
|
323
|
+
case "message_sent":
|
|
324
|
+
await handleMessageSent(
|
|
325
|
+
db,
|
|
326
|
+
event as MessageSentEvent & { id: number; sequence: number },
|
|
327
|
+
);
|
|
328
|
+
break;
|
|
329
|
+
|
|
330
|
+
case "message_read":
|
|
331
|
+
await db.query(
|
|
332
|
+
`UPDATE message_recipients SET read_at = $1 WHERE message_id = $2 AND agent_name = $3`,
|
|
333
|
+
[event.timestamp, event.message_id, event.agent_name],
|
|
334
|
+
);
|
|
335
|
+
break;
|
|
336
|
+
|
|
337
|
+
case "message_acked":
|
|
338
|
+
await db.query(
|
|
339
|
+
`UPDATE message_recipients SET acked_at = $1 WHERE message_id = $2 AND agent_name = $3`,
|
|
340
|
+
[event.timestamp, event.message_id, event.agent_name],
|
|
341
|
+
);
|
|
342
|
+
break;
|
|
343
|
+
|
|
344
|
+
case "file_reserved":
|
|
345
|
+
await handleFileReserved(
|
|
346
|
+
db,
|
|
347
|
+
event as FileReservedEvent & { id: number; sequence: number },
|
|
348
|
+
);
|
|
349
|
+
break;
|
|
350
|
+
|
|
351
|
+
case "file_released":
|
|
352
|
+
await handleFileReleased(db, event);
|
|
353
|
+
break;
|
|
354
|
+
|
|
355
|
+
// Task events don't need materialized views (query events directly)
|
|
356
|
+
case "task_started":
|
|
357
|
+
case "task_progress":
|
|
358
|
+
case "task_completed":
|
|
359
|
+
case "task_blocked":
|
|
360
|
+
// No-op for now - could add task tracking table later
|
|
361
|
+
break;
|
|
362
|
+
}
|
|
363
|
+
} catch (error) {
|
|
364
|
+
console.error("[SwarmMail] Failed to update materialized views", {
|
|
365
|
+
eventType: event.type,
|
|
366
|
+
eventId: event.id,
|
|
367
|
+
error,
|
|
368
|
+
});
|
|
369
|
+
throw error;
|
|
342
370
|
}
|
|
343
371
|
}
|
|
344
372
|
|
|
@@ -369,6 +397,13 @@ async function handleMessageSent(
|
|
|
369
397
|
db: Awaited<ReturnType<typeof getDatabase>>,
|
|
370
398
|
event: MessageSentEvent & { id: number; sequence: number },
|
|
371
399
|
): Promise<void> {
|
|
400
|
+
console.log("[SwarmMail] Handling message sent event", {
|
|
401
|
+
from: event.from_agent,
|
|
402
|
+
to: event.to_agents,
|
|
403
|
+
subject: event.subject,
|
|
404
|
+
projectKey: event.project_key,
|
|
405
|
+
});
|
|
406
|
+
|
|
372
407
|
// Insert message
|
|
373
408
|
const result = await db.query<{ id: number }>(
|
|
374
409
|
`INSERT INTO messages (project_key, from_agent, subject, body, thread_id, importance, ack_required, created_at)
|
|
@@ -392,14 +427,22 @@ async function handleMessageSent(
|
|
|
392
427
|
}
|
|
393
428
|
const messageId = msgRow.id;
|
|
394
429
|
|
|
395
|
-
//
|
|
396
|
-
|
|
430
|
+
// FIX: Bulk insert recipients to avoid N+1 queries
|
|
431
|
+
if (event.to_agents.length > 0) {
|
|
432
|
+
const values = event.to_agents.map((_, i) => `($1, $${i + 2})`).join(", ");
|
|
433
|
+
const params = [messageId, ...event.to_agents];
|
|
434
|
+
|
|
397
435
|
await db.query(
|
|
398
436
|
`INSERT INTO message_recipients (message_id, agent_name)
|
|
399
|
-
VALUES
|
|
437
|
+
VALUES ${values}
|
|
400
438
|
ON CONFLICT DO NOTHING`,
|
|
401
|
-
|
|
439
|
+
params,
|
|
402
440
|
);
|
|
441
|
+
|
|
442
|
+
console.log("[SwarmMail] Message recipients inserted", {
|
|
443
|
+
messageId,
|
|
444
|
+
recipientCount: event.to_agents.length,
|
|
445
|
+
});
|
|
403
446
|
}
|
|
404
447
|
}
|
|
405
448
|
|
|
@@ -407,20 +450,46 @@ async function handleFileReserved(
|
|
|
407
450
|
db: Awaited<ReturnType<typeof getDatabase>>,
|
|
408
451
|
event: FileReservedEvent & { id: number; sequence: number },
|
|
409
452
|
): Promise<void> {
|
|
410
|
-
|
|
453
|
+
console.log("[SwarmMail] Handling file reservation event", {
|
|
454
|
+
agent: event.agent_name,
|
|
455
|
+
paths: event.paths,
|
|
456
|
+
exclusive: event.exclusive,
|
|
457
|
+
projectKey: event.project_key,
|
|
458
|
+
});
|
|
459
|
+
|
|
460
|
+
// FIX: Bulk insert reservations to avoid N+1 queries
|
|
461
|
+
if (event.paths.length > 0) {
|
|
462
|
+
// Each path gets its own VALUES clause with placeholders:
|
|
463
|
+
// ($1=project_key, $2=agent_name, $3=path1, $4=exclusive, $5=reason, $6=created_at, $7=expires_at)
|
|
464
|
+
// ($1=project_key, $2=agent_name, $8=path2, $4=exclusive, $5=reason, $6=created_at, $7=expires_at)
|
|
465
|
+
// etc.
|
|
466
|
+
const values = event.paths
|
|
467
|
+
.map(
|
|
468
|
+
(_, i) =>
|
|
469
|
+
`($1, $2, $${i + 3}, $${event.paths.length + 3}, $${event.paths.length + 4}, $${event.paths.length + 5}, $${event.paths.length + 6})`,
|
|
470
|
+
)
|
|
471
|
+
.join(", ");
|
|
472
|
+
|
|
473
|
+
const params = [
|
|
474
|
+
event.project_key, // $1
|
|
475
|
+
event.agent_name, // $2
|
|
476
|
+
...event.paths, // $3, $4, ... (one per path)
|
|
477
|
+
event.exclusive, // $N+3
|
|
478
|
+
event.reason || null, // $N+4
|
|
479
|
+
event.timestamp, // $N+5
|
|
480
|
+
event.expires_at, // $N+6
|
|
481
|
+
];
|
|
482
|
+
|
|
411
483
|
await db.query(
|
|
412
484
|
`INSERT INTO reservations (project_key, agent_name, path_pattern, exclusive, reason, created_at, expires_at)
|
|
413
|
-
VALUES
|
|
414
|
-
|
|
415
|
-
event.project_key,
|
|
416
|
-
event.agent_name,
|
|
417
|
-
path,
|
|
418
|
-
event.exclusive,
|
|
419
|
-
event.reason || null,
|
|
420
|
-
event.timestamp,
|
|
421
|
-
event.expires_at,
|
|
422
|
-
],
|
|
485
|
+
VALUES ${values}`,
|
|
486
|
+
params,
|
|
423
487
|
);
|
|
488
|
+
|
|
489
|
+
console.log("[SwarmMail] File reservations inserted", {
|
|
490
|
+
agent: event.agent_name,
|
|
491
|
+
reservationCount: event.paths.length,
|
|
492
|
+
});
|
|
424
493
|
}
|
|
425
494
|
}
|
|
426
495
|
|