opencode-swarm-plugin 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,7 +24,7 @@
24
24
  {"id":"opencode-swarm-plugin-1gtm","title":"Update test bead","description":"Original description","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:11:39.569462-08:00","updated_at":"2025-12-08T08:11:42.010151-08:00","closed_at":"2025-12-08T08:11:42.010151-08:00"}
25
25
  {"id":"opencode-swarm-plugin-1i8","title":"Test bead","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:23:49.873224-08:00","updated_at":"2025-12-07T19:24:13.408304-08:00","closed_at":"2025-12-07T19:24:13.408304-08:00"}
26
26
  {"id":"opencode-swarm-plugin-1j0t","title":"Bead to close","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T07:46:49.618076-08:00","updated_at":"2025-12-08T07:46:49.644609-08:00","closed_at":"2025-12-08T07:46:49.644609-08:00"}
27
- {"id":"opencode-swarm-plugin-1li","title":"Test parent","description":"","status":"open","priority":2,"issue_type":"epic","created_at":"2025-12-07T19:35:34.909384-08:00","updated_at":"2025-12-07T19:35:34.909384-08:00"}
27
+ {"id":"opencode-swarm-plugin-1li","title":"Test parent","description":"","status":"closed","priority":2,"issue_type":"epic","created_at":"2025-12-07T19:35:34.909384-08:00","updated_at":"2025-12-08T08:35:24.648164-08:00","closed_at":"2025-12-08T08:35:24.648164-08:00"}
28
28
  {"id":"opencode-swarm-plugin-1li.1","title":"Test child","description":"","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-07T19:35:39.491063-08:00","updated_at":"2025-12-07T19:35:39.491063-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-1li.1","depends_on_id":"opencode-swarm-plugin-1li","type":"parent-child","created_at":"2025-12-07T19:35:39.491364-08:00","created_by":"daemon"}]}
29
29
  {"id":"opencode-swarm-plugin-1lkv","title":"Query test bead","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:11:39.223436-08:00","updated_at":"2025-12-08T08:11:41.799207-08:00","closed_at":"2025-12-08T08:11:41.799207-08:00"}
30
30
  {"id":"opencode-swarm-plugin-1mre","title":"Test bead minimal","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T07:49:09.126035-08:00","updated_at":"2025-12-08T07:49:11.799933-08:00","closed_at":"2025-12-08T07:49:11.799933-08:00"}
@@ -67,6 +67,11 @@
67
67
  {"id":"opencode-swarm-plugin-37az","title":"Thread link test bead","description":"[thread:test-thread-123]","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:05:34.247814-08:00","updated_at":"2025-12-08T08:05:36.051352-08:00","closed_at":"2025-12-08T08:05:36.051352-08:00"}
68
68
  {"id":"opencode-swarm-plugin-3c0h","title":"Limit test bead 3","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:57:06.042676-08:00","updated_at":"2025-12-07T19:57:08.336974-08:00","closed_at":"2025-12-07T19:57:08.336974-08:00"}
69
69
  {"id":"opencode-swarm-plugin-3e3o","title":"Thread link test bead","description":"Important context here\n\n[thread:test-thread-789]","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T07:46:50.71229-08:00","updated_at":"2025-12-08T07:46:52.478079-08:00","closed_at":"2025-12-08T07:46:52.478079-08:00"}
70
+ {"id":"opencode-swarm-plugin-3fa7","title":"Add rate limiting to Agent Mail","description":"Implement per-agent rate limiting with Redis backend to prevent abuse","status":"closed","priority":2,"issue_type":"epic","created_at":"2025-12-08T08:28:07.547069-08:00","updated_at":"2025-12-08T08:29:09.032995-08:00","closed_at":"2025-12-08T08:29:09.032995-08:00"}
71
+ {"id":"opencode-swarm-plugin-3fa7.1","title":"Add Redis dependency and connection","description":"Add redis-py to requirements.txt and create Redis connection helper","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:28:14.489575-08:00","updated_at":"2025-12-08T08:28:54.193001-08:00","closed_at":"2025-12-08T08:28:54.193001-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-3fa7.1","depends_on_id":"opencode-swarm-plugin-3fa7","type":"parent-child","created_at":"2025-12-08T08:28:14.489983-08:00","created_by":"daemon"}]}
72
+ {"id":"opencode-swarm-plugin-3fa7.2","title":"Implement rate limiter class","description":"Create RateLimiter class using Redis sorted sets for sliding window rate limiting","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:28:14.527429-08:00","updated_at":"2025-12-08T08:29:08.941471-08:00","closed_at":"2025-12-08T08:29:08.941471-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-3fa7.2","depends_on_id":"opencode-swarm-plugin-3fa7","type":"parent-child","created_at":"2025-12-08T08:28:14.527749-08:00","created_by":"daemon"}]}
73
+ {"id":"opencode-swarm-plugin-3fa7.3","title":"Integrate rate limiting into API endpoints","description":"Add rate limiting middleware to send_message and other high-traffic endpoints","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:28:14.563509-08:00","updated_at":"2025-12-08T08:29:08.973236-08:00","closed_at":"2025-12-08T08:29:08.973236-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-3fa7.3","depends_on_id":"opencode-swarm-plugin-3fa7","type":"parent-child","created_at":"2025-12-08T08:28:14.563815-08:00","created_by":"daemon"}]}
74
+ {"id":"opencode-swarm-plugin-3fa7.4","title":"Add rate limit configuration and tests","description":"Add rate limit config to docker-compose.yml and create integration tests","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:28:14.601283-08:00","updated_at":"2025-12-08T08:29:09.002331-08:00","closed_at":"2025-12-08T08:29:09.002331-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-3fa7.4","depends_on_id":"opencode-swarm-plugin-3fa7","type":"parent-child","created_at":"2025-12-08T08:28:14.601636-08:00","created_by":"daemon"}]}
70
75
  {"id":"opencode-swarm-plugin-3fw","title":"Integration test epic","description":"Testing epic creation","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-07T19:37:34.859822-08:00","updated_at":"2025-12-07T19:37:36.450884-08:00","closed_at":"2025-12-07T19:37:36.450884-08:00"}
71
76
  {"id":"opencode-swarm-plugin-3fw.1","title":"Subtask 1","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:37:34.888133-08:00","updated_at":"2025-12-07T19:37:36.473973-08:00","closed_at":"2025-12-07T19:37:36.473973-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-3fw.1","depends_on_id":"opencode-swarm-plugin-3fw","type":"parent-child","created_at":"2025-12-07T19:37:34.888439-08:00","created_by":"daemon"}]}
72
77
  {"id":"opencode-swarm-plugin-3fw.2","title":"Subtask 2","description":"","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-07T19:37:34.915829-08:00","updated_at":"2025-12-07T19:37:36.494929-08:00","closed_at":"2025-12-07T19:37:36.494929-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-3fw.2","depends_on_id":"opencode-swarm-plugin-3fw","type":"parent-child","created_at":"2025-12-07T19:37:34.916128-08:00","created_by":"daemon"}]}
@@ -157,6 +162,11 @@
157
162
  {"id":"opencode-swarm-plugin-6ep7","title":"Test bug with priority","description":"This is a critical bug","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-12-08T08:11:38.996265-08:00","updated_at":"2025-12-08T08:11:41.663466-08:00","closed_at":"2025-12-08T08:11:41.663466-08:00"}
158
163
  {"id":"opencode-swarm-plugin-6jd","title":"Test bead minimal","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:37:33.768675-08:00","updated_at":"2025-12-07T19:37:35.990589-08:00","closed_at":"2025-12-07T19:37:35.990589-08:00"}
159
164
  {"id":"opencode-swarm-plugin-6k55","title":"Test bug with priority","description":"This is a critical bug","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-12-07T19:57:05.655726-08:00","updated_at":"2025-12-07T19:57:08.099897-08:00","closed_at":"2025-12-07T19:57:08.099897-08:00"}
165
+ {"id":"opencode-swarm-plugin-6ocg","title":"Add rate limiting to Agent Mail server","description":"Implement per-agent rate limiting with Redis primary and SQLite fallback. Prevents abuse and runaway agents.","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-08T08:34:33.28449-08:00","updated_at":"2025-12-08T08:35:41.856259-08:00","closed_at":"2025-12-08T08:35:41.856259-08:00"}
166
+ {"id":"opencode-swarm-plugin-6ocg.1","title":"Add rate limiter module with Redis/SQLite backends","description":"Create rate_limiter.py with RateLimiter class. Redis primary (sliding window with sorted sets), SQLite fallback (COUNT query). Configurable limits per endpoint.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-08T08:34:44.240115-08:00","updated_at":"2025-12-08T08:35:42.506425-08:00","closed_at":"2025-12-08T08:35:42.506425-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-6ocg.1","depends_on_id":"opencode-swarm-plugin-6ocg","type":"parent-child","created_at":"2025-12-08T08:34:44.240449-08:00","created_by":"daemon"}]}
167
+ {"id":"opencode-swarm-plugin-6ocg.2","title":"Integrate rate limiting into send_message endpoint","description":"Add rate limit check to tool_send_message. Return 429 with retry-after header when exceeded. Track by agent_id.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:34:44.278972-08:00","updated_at":"2025-12-08T08:35:43.041616-08:00","closed_at":"2025-12-08T08:35:43.041616-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-6ocg.2","depends_on_id":"opencode-swarm-plugin-6ocg","type":"parent-child","created_at":"2025-12-08T08:34:44.279328-08:00","created_by":"daemon"}]}
168
+ {"id":"opencode-swarm-plugin-6ocg.3","title":"Add rate limit configuration and Docker setup","description":"Add Redis to docker-compose.yml. Add RATE_LIMIT_* env vars. Update requirements.txt with redis-py.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:34:44.317207-08:00","updated_at":"2025-12-08T08:35:43.593244-08:00","closed_at":"2025-12-08T08:35:43.593244-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-6ocg.3","depends_on_id":"opencode-swarm-plugin-6ocg","type":"parent-child","created_at":"2025-12-08T08:34:44.317517-08:00","created_by":"daemon"}]}
169
+ {"id":"opencode-swarm-plugin-6ocg.4","title":"Add rate limiter integration tests","description":"Test rate limiting behavior: limit enforcement, Redis failover to SQLite, 429 responses, retry-after headers.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:34:44.355231-08:00","updated_at":"2025-12-08T08:35:44.087082-08:00","closed_at":"2025-12-08T08:35:44.087082-08:00","dependencies":[{"issue_id":"opencode-swarm-plugin-6ocg.4","depends_on_id":"opencode-swarm-plugin-6ocg","type":"parent-child","created_at":"2025-12-08T08:34:44.355586-08:00","created_by":"daemon"}]}
160
170
  {"id":"opencode-swarm-plugin-6oty","title":"Lifecycle test bead","description":"50% complete","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:14:30.448872-08:00","updated_at":"2025-12-08T08:14:30.527881-08:00","closed_at":"2025-12-08T08:14:30.527881-08:00"}
161
171
  {"id":"opencode-swarm-plugin-6p6h","title":"High priority ready bead","description":"","status":"closed","priority":0,"issue_type":"task","created_at":"2025-12-08T07:53:44.648855-08:00","updated_at":"2025-12-08T07:53:46.605112-08:00","closed_at":"2025-12-08T07:53:46.605112-08:00"}
162
172
  {"id":"opencode-swarm-plugin-6rs3","title":"Update test bead","description":"Original description","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:41:11.205193-08:00","updated_at":"2025-12-07T19:41:13.573642-08:00","closed_at":"2025-12-07T19:41:13.573642-08:00"}
@@ -248,7 +258,7 @@
248
258
  {"id":"opencode-swarm-plugin-a1qj","title":"Bead to start","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T07:49:10.19575-08:00","updated_at":"2025-12-08T07:49:12.301943-08:00","closed_at":"2025-12-08T07:49:12.301943-08:00"}
249
259
  {"id":"opencode-swarm-plugin-a1tr","title":"Cleanup task","description":"","status":"closed","priority":3,"issue_type":"chore","created_at":"2025-12-07T19:41:10.702003-08:00","updated_at":"2025-12-07T19:41:13.300043-08:00","closed_at":"2025-12-07T19:41:13.300043-08:00"}
250
260
  {"id":"opencode-swarm-plugin-a2p6","title":"Bead to close","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:57:06.489496-08:00","updated_at":"2025-12-07T19:57:06.515662-08:00","closed_at":"2025-12-07T19:57:06.515662-08:00"}
251
- {"id":"opencode-swarm-plugin-a2v","title":"Docker integration test infrastructure","description":"Create Docker-based integration test environment with real beads CLI and Agent Mail server for testing all plugin modules without mocks","status":"open","priority":1,"issue_type":"epic","created_at":"2025-12-07T19:23:43.835677-08:00","updated_at":"2025-12-07T19:23:43.835677-08:00"}
261
+ {"id":"opencode-swarm-plugin-a2v","title":"Docker integration test infrastructure","description":"Create Docker-based integration test environment with real beads CLI and Agent Mail server for testing all plugin modules without mocks","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-07T19:23:43.835677-08:00","updated_at":"2025-12-08T08:35:23.531219-08:00","closed_at":"2025-12-08T08:35:23.531219-08:00"}
252
262
  {"id":"opencode-swarm-plugin-a3fa","title":"Lifecycle test bead","description":"50% complete","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:05:34.668388-08:00","updated_at":"2025-12-08T08:05:34.745926-08:00","closed_at":"2025-12-08T08:05:34.745926-08:00"}
253
263
  {"id":"opencode-swarm-plugin-a76x","title":"New feature request","description":"","status":"closed","priority":1,"issue_type":"feature","created_at":"2025-12-08T08:21:26.918472-08:00","updated_at":"2025-12-08T08:21:29.744733-08:00","closed_at":"2025-12-08T08:21:29.744733-08:00"}
254
264
  {"id":"opencode-swarm-plugin-a8y","title":"Epic with file references","description":"","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-07T19:39:10.635641-08:00","updated_at":"2025-12-07T19:39:12.218959-08:00","closed_at":"2025-12-07T19:39:12.218959-08:00"}
@@ -480,7 +490,7 @@
480
490
  {"id":"opencode-swarm-plugin-k2wm","title":"Thread link test bead","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:56:44.160426-08:00","updated_at":"2025-12-07T19:56:45.501563-08:00","closed_at":"2025-12-07T19:56:45.501563-08:00"}
481
491
  {"id":"opencode-swarm-plugin-k3ji","title":"Limit test bead 4","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-07T19:57:06.077162-08:00","updated_at":"2025-12-07T19:57:08.361319-08:00","closed_at":"2025-12-07T19:57:08.361319-08:00"}
482
492
  {"id":"opencode-swarm-plugin-k48x","title":"Bead to close","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T07:49:10.083654-08:00","updated_at":"2025-12-08T07:49:10.109594-08:00","closed_at":"2025-12-08T07:49:10.109594-08:00"}
483
- {"id":"opencode-swarm-plugin-k8d","title":"Docker integration test infrastructure","description":"Create Docker-based integration test environment with real beads CLI and Agent Mail server for testing all plugin modules without mocks","status":"open","priority":1,"issue_type":"epic","created_at":"2025-12-07T19:24:22.198656-08:00","updated_at":"2025-12-07T19:24:22.198656-08:00"}
493
+ {"id":"opencode-swarm-plugin-k8d","title":"Docker integration test infrastructure","description":"Create Docker-based integration test environment with real beads CLI and Agent Mail server for testing all plugin modules without mocks","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-07T19:24:22.198656-08:00","updated_at":"2025-12-08T08:35:24.03232-08:00","closed_at":"2025-12-08T08:35:24.03232-08:00"}
484
494
  {"id":"opencode-swarm-plugin-k93","title":"High priority ready bead","description":"","status":"closed","priority":0,"issue_type":"task","created_at":"2025-12-07T19:39:10.444177-08:00","updated_at":"2025-12-07T19:39:12.109004-08:00","closed_at":"2025-12-07T19:39:12.109004-08:00"}
485
495
  {"id":"opencode-swarm-plugin-kci6","title":"Update test bead","description":"Updated description","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T07:53:44.191794-08:00","updated_at":"2025-12-08T07:53:46.477523-08:00","closed_at":"2025-12-08T07:53:46.477523-08:00"}
486
496
  {"id":"opencode-swarm-plugin-kcm4","title":"Query test bead","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-08T08:22:16.997856-08:00","updated_at":"2025-12-08T08:22:19.364091-08:00","closed_at":"2025-12-08T08:22:19.364091-08:00"}
package/README.md CHANGED
@@ -17,6 +17,8 @@ This plugin provides intelligent, self-improving tools for multi-agent workflows
17
17
  - **Anti-pattern detection** - Automatically learns what decomposition strategies fail and avoids them
18
18
  - **Pre-completion validation** - UBS bug scanning before marking tasks complete
19
19
  - **History-informed decomposition** - Queries CASS for similar past tasks to inform strategy
20
+ - **Graceful degradation** - Works with whatever tools are available, degrades features when tools missing
21
+ - **Swarm discipline** - Enforces beads tracking, aggressive planning, and agent communication
20
22
 
21
23
  ## Installation
22
24
 
@@ -96,6 +98,7 @@ bd --version
96
98
 
97
99
  | Tool | Description |
98
100
  | ------------------------------ | ------------------------------------------------------------------------ |
101
+ | `swarm_init` | Check tool availability, report degraded features |
99
102
  | `swarm_decompose` | Generate decomposition prompt, optionally queries CASS for similar tasks |
100
103
  | `swarm_validate_decomposition` | Validate decomposition response, detect instruction conflicts |
101
104
  | `swarm_status` | Get swarm status by epic ID |
package/dist/index.js CHANGED
@@ -12890,12 +12890,12 @@ async function commandExists(cmd) {
12890
12890
  return false;
12891
12891
  }
12892
12892
  }
12893
- async function urlReachable(url2, timeoutMs = 1000) {
12893
+ async function urlReachable(url2, timeoutMs = 2000) {
12894
12894
  try {
12895
12895
  const controller = new AbortController;
12896
12896
  const timeout = setTimeout(() => controller.abort(), timeoutMs);
12897
12897
  const response = await fetch(url2, {
12898
- method: "HEAD",
12898
+ method: "GET",
12899
12899
  signal: controller.signal
12900
12900
  });
12901
12901
  clearTimeout(timeout);
package/dist/plugin.js CHANGED
@@ -12890,12 +12890,12 @@ async function commandExists(cmd) {
12890
12890
  return false;
12891
12891
  }
12892
12892
  }
12893
- async function urlReachable(url2, timeoutMs = 1000) {
12893
+ async function urlReachable(url2, timeoutMs = 2000) {
12894
12894
  try {
12895
12895
  const controller = new AbortController;
12896
12896
  const timeout = setTimeout(() => controller.abort(), timeoutMs);
12897
12897
  const response = await fetch(url2, {
12898
- method: "HEAD",
12898
+ method: "GET",
12899
12899
  signal: controller.signal
12900
12900
  });
12901
12901
  clearTimeout(timeout);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-swarm-plugin",
3
- "version": "0.2.0",
3
+ "version": "0.3.0",
4
4
  "description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -55,14 +55,15 @@ async function commandExists(cmd: string): Promise<boolean> {
55
55
  */
56
56
  async function urlReachable(
57
57
  url: string,
58
- timeoutMs: number = 1000,
58
+ timeoutMs: number = 2000,
59
59
  ): Promise<boolean> {
60
60
  try {
61
61
  const controller = new AbortController();
62
62
  const timeout = setTimeout(() => controller.abort(), timeoutMs);
63
63
 
64
+ // Use GET instead of HEAD - some servers don't support HEAD
64
65
  const response = await fetch(url, {
65
- method: "HEAD",
66
+ method: "GET",
66
67
  signal: controller.signal,
67
68
  });
68
69
 
package/Dockerfile DELETED
@@ -1,30 +0,0 @@
1
- # Test runner container for opencode-swarm-plugin integration tests
2
- FROM oven/bun:latest
3
-
4
- # Install git (required for beads) and curl (for healthchecks)
5
- RUN apt-get update && apt-get install -y \
6
- git \
7
- curl \
8
- && rm -rf /var/lib/apt/lists/*
9
-
10
- # Download bd CLI (beads issue tracker) for linux/amd64
11
- ARG BD_VERSION=0.2.8
12
- RUN curl -fsSL "https://github.com/beads-ai/beads/releases/download/v${BD_VERSION}/bd-linux-amd64" \
13
- -o /usr/local/bin/bd \
14
- && chmod +x /usr/local/bin/bd
15
-
16
- WORKDIR /app
17
-
18
- # Copy package files and install dependencies
19
- COPY package.json bun.lock* ./
20
- RUN bun install --frozen-lockfile
21
-
22
- # Copy source code
23
- COPY . .
24
-
25
- # Copy entrypoint script
26
- COPY scripts/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
27
- RUN chmod +x /usr/local/bin/docker-entrypoint.sh
28
-
29
- ENTRYPOINT ["docker-entrypoint.sh"]
30
- CMD ["bun", "run", "test:integration"]
@@ -1,23 +0,0 @@
1
- FROM python:3.11-slim
2
-
3
- WORKDIR /app
4
-
5
- # Install dependencies
6
- COPY requirements.txt .
7
- RUN pip install --no-cache-dir -r requirements.txt
8
-
9
- # Copy server code
10
- COPY server.py .
11
-
12
- # Create data directory for SQLite
13
- RUN mkdir -p /data
14
-
15
- # Expose port
16
- EXPOSE 8765
17
-
18
- # Health check
19
- HEALTHCHECK --interval=10s --timeout=3s --start-period=5s --retries=3 \
20
- CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:8765/health/liveness')" || exit 1
21
-
22
- # Run server
23
- CMD ["uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8765"]
@@ -1,3 +0,0 @@
1
- fastapi>=0.109.0
2
- uvicorn[standard]>=0.27.0
3
- pydantic>=2.5.0
@@ -1,879 +0,0 @@
1
- """
2
- Minimal Agent Mail Server for Integration Testing
3
-
4
- A lightweight MCP-compatible server that provides multi-agent coordination
5
- capabilities: messaging, file reservations, and project management.
6
-
7
- This is NOT the production Agent Mail server - it's a minimal implementation
8
- for testing the opencode-swarm-plugin MCP client.
9
- """
10
-
11
- import random
12
- import sqlite3
13
- import uuid
14
- from contextlib import contextmanager
15
- from datetime import datetime, timedelta, timezone
16
- from pathlib import Path
17
- from typing import Any
18
-
19
- from fastapi import FastAPI, HTTPException
20
- from fastapi.responses import JSONResponse
21
- from pydantic import BaseModel
22
-
23
- # =============================================================================
24
- # Configuration
25
- # =============================================================================
26
-
27
- DB_PATH = Path("/data/agentmail.db")
28
-
29
- # Agent name generation wordlists
30
- ADJECTIVES = [
31
- "Blue", "Red", "Green", "Golden", "Silver", "Crystal", "Shadow", "Bright",
32
- "Swift", "Silent", "Bold", "Calm", "Wild", "Noble", "Frost", "Storm",
33
- "Dawn", "Dusk", "Iron", "Copper", "Azure", "Crimson", "Amber", "Jade",
34
- "Coral", "Misty", "Sunny", "Lunar", "Solar", "Cosmic", "Terra", "Aqua",
35
- ]
36
-
37
- NOUNS = [
38
- "Lake", "Stone", "River", "Mountain", "Forest", "Valley", "Meadow", "Peak",
39
- "Canyon", "Desert", "Ocean", "Island", "Prairie", "Grove", "Creek", "Ridge",
40
- "Harbor", "Cliff", "Glacier", "Dune", "Marsh", "Brook", "Hill", "Plain",
41
- "Bay", "Cape", "Delta", "Fjord", "Mesa", "Plateau", "Reef", "Tundra",
42
- ]
43
-
44
- # =============================================================================
45
- # Database Setup
46
- # =============================================================================
47
-
48
- def init_db():
49
- """Initialize SQLite database with required tables."""
50
- DB_PATH.parent.mkdir(parents=True, exist_ok=True)
51
-
52
- conn = sqlite3.connect(str(DB_PATH))
53
- conn.row_factory = sqlite3.Row
54
-
55
- conn.executescript("""
56
- -- Projects table
57
- CREATE TABLE IF NOT EXISTS projects (
58
- id INTEGER PRIMARY KEY AUTOINCREMENT,
59
- slug TEXT UNIQUE NOT NULL,
60
- human_key TEXT NOT NULL,
61
- created_at TEXT NOT NULL DEFAULT (datetime('now'))
62
- );
63
-
64
- -- Agents table
65
- CREATE TABLE IF NOT EXISTS agents (
66
- id INTEGER PRIMARY KEY AUTOINCREMENT,
67
- name TEXT NOT NULL,
68
- program TEXT NOT NULL,
69
- model TEXT NOT NULL,
70
- task_description TEXT,
71
- inception_ts TEXT NOT NULL DEFAULT (datetime('now')),
72
- last_active_ts TEXT NOT NULL DEFAULT (datetime('now')),
73
- project_id INTEGER NOT NULL,
74
- FOREIGN KEY (project_id) REFERENCES projects(id),
75
- UNIQUE (name, project_id)
76
- );
77
-
78
- -- Messages table
79
- CREATE TABLE IF NOT EXISTS messages (
80
- id INTEGER PRIMARY KEY AUTOINCREMENT,
81
- project_id INTEGER NOT NULL,
82
- sender_id INTEGER NOT NULL,
83
- subject TEXT NOT NULL,
84
- body_md TEXT,
85
- thread_id TEXT,
86
- importance TEXT DEFAULT 'normal',
87
- ack_required INTEGER DEFAULT 0,
88
- kind TEXT DEFAULT 'message',
89
- created_ts TEXT NOT NULL DEFAULT (datetime('now')),
90
- FOREIGN KEY (project_id) REFERENCES projects(id),
91
- FOREIGN KEY (sender_id) REFERENCES agents(id)
92
- );
93
-
94
- -- Message recipients table (many-to-many)
95
- CREATE TABLE IF NOT EXISTS message_recipients (
96
- message_id INTEGER NOT NULL,
97
- agent_id INTEGER NOT NULL,
98
- read_at TEXT,
99
- acked_at TEXT,
100
- PRIMARY KEY (message_id, agent_id),
101
- FOREIGN KEY (message_id) REFERENCES messages(id),
102
- FOREIGN KEY (agent_id) REFERENCES agents(id)
103
- );
104
-
105
- -- File reservations table
106
- CREATE TABLE IF NOT EXISTS file_reservations (
107
- id INTEGER PRIMARY KEY AUTOINCREMENT,
108
- project_id INTEGER NOT NULL,
109
- agent_id INTEGER NOT NULL,
110
- path_pattern TEXT NOT NULL,
111
- exclusive INTEGER DEFAULT 1,
112
- reason TEXT,
113
- created_ts TEXT NOT NULL DEFAULT (datetime('now')),
114
- expires_ts TEXT NOT NULL,
115
- FOREIGN KEY (project_id) REFERENCES projects(id),
116
- FOREIGN KEY (agent_id) REFERENCES agents(id)
117
- );
118
-
119
- -- Full-text search for messages
120
- CREATE VIRTUAL TABLE IF NOT EXISTS messages_fts USING fts5(
121
- subject, body_md, content='messages', content_rowid='id'
122
- );
123
-
124
- -- Triggers to keep FTS in sync
125
- CREATE TRIGGER IF NOT EXISTS messages_ai AFTER INSERT ON messages BEGIN
126
- INSERT INTO messages_fts(rowid, subject, body_md)
127
- VALUES (new.id, new.subject, new.body_md);
128
- END;
129
-
130
- CREATE TRIGGER IF NOT EXISTS messages_ad AFTER DELETE ON messages BEGIN
131
- INSERT INTO messages_fts(messages_fts, rowid, subject, body_md)
132
- VALUES ('delete', old.id, old.subject, old.body_md);
133
- END;
134
-
135
- CREATE TRIGGER IF NOT EXISTS messages_au AFTER UPDATE ON messages BEGIN
136
- INSERT INTO messages_fts(messages_fts, rowid, subject, body_md)
137
- VALUES ('delete', old.id, old.subject, old.body_md);
138
- INSERT INTO messages_fts(rowid, subject, body_md)
139
- VALUES (new.id, new.subject, new.body_md);
140
- END;
141
- """)
142
-
143
- conn.commit()
144
- conn.close()
145
-
146
-
147
- @contextmanager
148
- def get_db():
149
- """Get database connection with row factory."""
150
- conn = sqlite3.connect(str(DB_PATH))
151
- conn.row_factory = sqlite3.Row
152
- try:
153
- yield conn
154
- conn.commit()
155
- finally:
156
- conn.close()
157
-
158
-
159
- def generate_agent_name() -> str:
160
- """Generate a random adjective+noun agent name."""
161
- return f"{random.choice(ADJECTIVES)}{random.choice(NOUNS)}"
162
-
163
-
164
- def generate_slug(human_key: str) -> str:
165
- """Generate a URL-safe slug from a human key."""
166
- # Simple slug: replace path separators and special chars
167
- slug = human_key.replace("/", "_").replace("\\", "_").replace(" ", "_")
168
- slug = "".join(c for c in slug if c.isalnum() or c == "_")
169
- return slug.lower()[:64]
170
-
171
-
172
- def now_iso() -> str:
173
- """Get current time in ISO format."""
174
- return datetime.now(timezone.utc).isoformat()
175
-
176
-
177
- # =============================================================================
178
- # FastAPI App
179
- # =============================================================================
180
-
181
- app = FastAPI(title="Agent Mail Test Server", version="0.1.0")
182
-
183
-
184
- @app.on_event("startup")
185
- async def startup():
186
- """Initialize database on startup."""
187
- init_db()
188
-
189
-
190
- # =============================================================================
191
- # Health Endpoints
192
- # =============================================================================
193
-
194
- @app.get("/health/liveness")
195
- async def health_liveness():
196
- """Liveness check for container health."""
197
- return {"status": "ok", "timestamp": now_iso()}
198
-
199
-
200
- @app.get("/health/readiness")
201
- async def health_readiness():
202
- """Readiness check - verify database is accessible."""
203
- try:
204
- with get_db() as conn:
205
- conn.execute("SELECT 1")
206
- return {"status": "ready", "timestamp": now_iso()}
207
- except Exception as e:
208
- raise HTTPException(status_code=503, detail=str(e))
209
-
210
-
211
- # =============================================================================
212
- # MCP JSON-RPC Endpoint
213
- # =============================================================================
214
-
215
- class MCPRequest(BaseModel):
216
- """MCP JSON-RPC request format."""
217
- jsonrpc: str = "2.0"
218
- id: str
219
- method: str
220
- params: dict[str, Any] = {}
221
-
222
-
223
- class MCPError(BaseModel):
224
- """MCP JSON-RPC error format."""
225
- code: int
226
- message: str
227
- data: Any = None
228
-
229
-
230
- class MCPResponse(BaseModel):
231
- """MCP JSON-RPC response format."""
232
- jsonrpc: str = "2.0"
233
- id: str
234
- result: Any = None
235
- error: MCPError | None = None
236
-
237
-
238
- @app.post("/mcp/")
239
- async def mcp_endpoint(request: MCPRequest):
240
- """
241
- MCP JSON-RPC endpoint.
242
-
243
- Handles tools/call method for Agent Mail operations.
244
- """
245
- if request.method != "tools/call":
246
- return MCPResponse(
247
- id=request.id,
248
- error=MCPError(
249
- code=-32601,
250
- message=f"Method not found: {request.method}",
251
- )
252
- )
253
-
254
- tool_name = request.params.get("name", "")
255
- arguments = request.params.get("arguments", {})
256
-
257
- try:
258
- result = await dispatch_tool(tool_name, arguments)
259
- return MCPResponse(id=request.id, result=result)
260
- except ValueError as e:
261
- return MCPResponse(
262
- id=request.id,
263
- error=MCPError(code=-32602, message=str(e))
264
- )
265
- except Exception as e:
266
- return MCPResponse(
267
- id=request.id,
268
- error=MCPError(code=-32000, message=str(e))
269
- )
270
-
271
-
272
- # =============================================================================
273
- # Tool Dispatcher
274
- # =============================================================================
275
-
276
- async def dispatch_tool(name: str, args: dict[str, Any]) -> Any:
277
- """Dispatch tool call to appropriate handler."""
278
- tools = {
279
- "ensure_project": tool_ensure_project,
280
- "register_agent": tool_register_agent,
281
- "send_message": tool_send_message,
282
- "fetch_inbox": tool_fetch_inbox,
283
- "mark_message_read": tool_mark_message_read,
284
- "summarize_thread": tool_summarize_thread,
285
- "file_reservation_paths": tool_file_reservation_paths,
286
- "release_file_reservations": tool_release_file_reservations,
287
- "acknowledge_message": tool_acknowledge_message,
288
- "search_messages": tool_search_messages,
289
- }
290
-
291
- handler = tools.get(name)
292
- if not handler:
293
- raise ValueError(f"Unknown tool: {name}")
294
-
295
- return await handler(args)
296
-
297
-
298
- # =============================================================================
299
- # Tool Implementations
300
- # =============================================================================
301
-
302
- async def tool_ensure_project(args: dict[str, Any]) -> dict:
303
- """Create or get a project by human_key."""
304
- human_key = args.get("human_key")
305
- if not human_key:
306
- raise ValueError("human_key is required")
307
-
308
- slug = generate_slug(human_key)
309
-
310
- with get_db() as conn:
311
- # Try to find existing project
312
- row = conn.execute(
313
- "SELECT * FROM projects WHERE human_key = ?",
314
- (human_key,)
315
- ).fetchone()
316
-
317
- if row:
318
- return dict(row)
319
-
320
- # Create new project
321
- cursor = conn.execute(
322
- "INSERT INTO projects (slug, human_key, created_at) VALUES (?, ?, ?)",
323
- (slug, human_key, now_iso())
324
- )
325
- project_id = cursor.lastrowid
326
-
327
- row = conn.execute(
328
- "SELECT * FROM projects WHERE id = ?",
329
- (project_id,)
330
- ).fetchone()
331
-
332
- return dict(row)
333
-
334
-
335
- async def tool_register_agent(args: dict[str, Any]) -> dict:
336
- """Register an agent with a project."""
337
- project_key = args.get("project_key")
338
- program = args.get("program", "unknown")
339
- model = args.get("model", "unknown")
340
- name = args.get("name")
341
- task_description = args.get("task_description", "")
342
-
343
- if not project_key:
344
- raise ValueError("project_key is required")
345
-
346
- with get_db() as conn:
347
- # Get project
348
- project = conn.execute(
349
- "SELECT * FROM projects WHERE human_key = ?",
350
- (project_key,)
351
- ).fetchone()
352
-
353
- if not project:
354
- raise ValueError(f"Project not found: {project_key}")
355
-
356
- project_id = project["id"]
357
-
358
- # Generate name if not provided
359
- if not name:
360
- # Keep trying until we get a unique name
361
- for _ in range(100):
362
- name = generate_agent_name()
363
- existing = conn.execute(
364
- "SELECT id FROM agents WHERE name = ? AND project_id = ?",
365
- (name, project_id)
366
- ).fetchone()
367
- if not existing:
368
- break
369
- else:
370
- name = f"{generate_agent_name()}_{uuid.uuid4().hex[:4]}"
371
-
372
- # Check if agent already exists
373
- existing = conn.execute(
374
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
375
- (name, project_id)
376
- ).fetchone()
377
-
378
- if existing:
379
- # Update last_active_ts
380
- conn.execute(
381
- "UPDATE agents SET last_active_ts = ? WHERE id = ?",
382
- (now_iso(), existing["id"])
383
- )
384
- return dict(existing)
385
-
386
- # Create new agent
387
- now = now_iso()
388
- cursor = conn.execute(
389
- """INSERT INTO agents
390
- (name, program, model, task_description, inception_ts, last_active_ts, project_id)
391
- VALUES (?, ?, ?, ?, ?, ?, ?)""",
392
- (name, program, model, task_description, now, now, project_id)
393
- )
394
- agent_id = cursor.lastrowid
395
-
396
- row = conn.execute(
397
- "SELECT * FROM agents WHERE id = ?",
398
- (agent_id,)
399
- ).fetchone()
400
-
401
- return dict(row)
402
-
403
-
404
- async def tool_send_message(args: dict[str, Any]) -> dict:
405
- """Send a message to other agents."""
406
- project_key = args.get("project_key")
407
- sender_name = args.get("sender_name")
408
- to = args.get("to", [])
409
- subject = args.get("subject", "")
410
- body_md = args.get("body_md", "")
411
- thread_id = args.get("thread_id")
412
- importance = args.get("importance", "normal")
413
- ack_required = args.get("ack_required", False)
414
-
415
- if not project_key:
416
- raise ValueError("project_key is required")
417
- if not sender_name:
418
- raise ValueError("sender_name is required")
419
- if not to:
420
- raise ValueError("to is required (list of recipient names)")
421
-
422
- with get_db() as conn:
423
- # Get project
424
- project = conn.execute(
425
- "SELECT * FROM projects WHERE human_key = ?",
426
- (project_key,)
427
- ).fetchone()
428
- if not project:
429
- raise ValueError(f"Project not found: {project_key}")
430
-
431
- project_id = project["id"]
432
-
433
- # Get sender agent
434
- sender = conn.execute(
435
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
436
- (sender_name, project_id)
437
- ).fetchone()
438
- if not sender:
439
- raise ValueError(f"Sender agent not found: {sender_name}")
440
-
441
- # Create message
442
- cursor = conn.execute(
443
- """INSERT INTO messages
444
- (project_id, sender_id, subject, body_md, thread_id, importance, ack_required, created_ts)
445
- VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
446
- (project_id, sender["id"], subject, body_md, thread_id, importance,
447
- 1 if ack_required else 0, now_iso())
448
- )
449
- message_id = cursor.lastrowid
450
-
451
- # Add recipients
452
- for recipient_name in to:
453
- recipient = conn.execute(
454
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
455
- (recipient_name, project_id)
456
- ).fetchone()
457
-
458
- if recipient:
459
- conn.execute(
460
- "INSERT INTO message_recipients (message_id, agent_id) VALUES (?, ?)",
461
- (message_id, recipient["id"])
462
- )
463
-
464
- return {
465
- "id": message_id,
466
- "subject": subject,
467
- "sent_to": to,
468
- "created_ts": now_iso(),
469
- }
470
-
471
-
472
- async def tool_fetch_inbox(args: dict[str, Any]) -> list[dict]:
473
- """Fetch inbox messages for an agent."""
474
- project_key = args.get("project_key")
475
- agent_name = args.get("agent_name")
476
- limit = args.get("limit", 10)
477
- include_bodies = args.get("include_bodies", False)
478
- urgent_only = args.get("urgent_only", False)
479
- since_ts = args.get("since_ts")
480
-
481
- if not project_key:
482
- raise ValueError("project_key is required")
483
- if not agent_name:
484
- raise ValueError("agent_name is required")
485
-
486
- with get_db() as conn:
487
- # Get project and agent
488
- project = conn.execute(
489
- "SELECT * FROM projects WHERE human_key = ?",
490
- (project_key,)
491
- ).fetchone()
492
- if not project:
493
- raise ValueError(f"Project not found: {project_key}")
494
-
495
- agent = conn.execute(
496
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
497
- (agent_name, project["id"])
498
- ).fetchone()
499
- if not agent:
500
- raise ValueError(f"Agent not found: {agent_name}")
501
-
502
- # Build query
503
- query = """
504
- SELECT m.*, a.name as from_name
505
- FROM messages m
506
- JOIN message_recipients mr ON m.id = mr.message_id
507
- JOIN agents a ON m.sender_id = a.id
508
- WHERE mr.agent_id = ?
509
- """
510
- params: list[Any] = [agent["id"]]
511
-
512
- if urgent_only:
513
- query += " AND m.importance = 'urgent'"
514
-
515
- if since_ts:
516
- query += " AND m.created_ts > ?"
517
- params.append(since_ts)
518
-
519
- query += " ORDER BY m.created_ts DESC LIMIT ?"
520
- params.append(limit)
521
-
522
- rows = conn.execute(query, params).fetchall()
523
-
524
- messages = []
525
- for row in rows:
526
- msg = {
527
- "id": row["id"],
528
- "subject": row["subject"],
529
- "from": row["from_name"],
530
- "created_ts": row["created_ts"],
531
- "importance": row["importance"],
532
- "ack_required": bool(row["ack_required"]),
533
- "thread_id": row["thread_id"],
534
- "kind": row["kind"],
535
- }
536
- if include_bodies:
537
- msg["body_md"] = row["body_md"]
538
- messages.append(msg)
539
-
540
- return messages
541
-
542
-
543
- async def tool_mark_message_read(args: dict[str, Any]) -> dict:
544
- """Mark a message as read."""
545
- project_key = args.get("project_key")
546
- agent_name = args.get("agent_name")
547
- message_id = args.get("message_id")
548
-
549
- if not all([project_key, agent_name, message_id]):
550
- raise ValueError("project_key, agent_name, and message_id are required")
551
-
552
- with get_db() as conn:
553
- # Get agent
554
- project = conn.execute(
555
- "SELECT * FROM projects WHERE human_key = ?",
556
- (project_key,)
557
- ).fetchone()
558
- if not project:
559
- raise ValueError(f"Project not found: {project_key}")
560
-
561
- agent = conn.execute(
562
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
563
- (agent_name, project["id"])
564
- ).fetchone()
565
- if not agent:
566
- raise ValueError(f"Agent not found: {agent_name}")
567
-
568
- # Update read timestamp
569
- conn.execute(
570
- """UPDATE message_recipients
571
- SET read_at = ?
572
- WHERE message_id = ? AND agent_id = ?""",
573
- (now_iso(), message_id, agent["id"])
574
- )
575
-
576
- return {"message_id": message_id, "read_at": now_iso()}
577
-
578
-
579
- async def tool_summarize_thread(args: dict[str, Any]) -> dict:
580
- """Summarize a message thread."""
581
- project_key = args.get("project_key")
582
- thread_id = args.get("thread_id")
583
- include_examples = args.get("include_examples", False)
584
-
585
- if not project_key:
586
- raise ValueError("project_key is required")
587
- if not thread_id:
588
- raise ValueError("thread_id is required")
589
-
590
- with get_db() as conn:
591
- # Get project
592
- project = conn.execute(
593
- "SELECT * FROM projects WHERE human_key = ?",
594
- (project_key,)
595
- ).fetchone()
596
- if not project:
597
- raise ValueError(f"Project not found: {project_key}")
598
-
599
- # Get messages in thread
600
- rows = conn.execute(
601
- """SELECT m.*, a.name as from_name
602
- FROM messages m
603
- JOIN agents a ON m.sender_id = a.id
604
- WHERE m.thread_id = ? AND m.project_id = ?
605
- ORDER BY m.created_ts ASC""",
606
- (thread_id, project["id"])
607
- ).fetchall()
608
-
609
- # Build summary
610
- participants = list(set(row["from_name"] for row in rows))
611
-
612
- # Simple key points extraction (just use subjects for now)
613
- key_points = [row["subject"] for row in rows[:5]]
614
-
615
- # Action items (messages with "urgent" importance)
616
- action_items = [
617
- row["subject"] for row in rows
618
- if row["importance"] == "urgent"
619
- ]
620
-
621
- result = {
622
- "thread_id": thread_id,
623
- "summary": {
624
- "participants": participants,
625
- "key_points": key_points,
626
- "action_items": action_items,
627
- "total_messages": len(rows),
628
- }
629
- }
630
-
631
- if include_examples and rows:
632
- examples = []
633
- for row in rows[:3]:
634
- examples.append({
635
- "id": row["id"],
636
- "subject": row["subject"],
637
- "from": row["from_name"],
638
- "body_md": row["body_md"],
639
- })
640
- result["examples"] = examples
641
-
642
- return result
643
-
644
-
645
- async def tool_file_reservation_paths(args: dict[str, Any]) -> dict:
646
- """Reserve file paths for exclusive editing."""
647
- project_key = args.get("project_key")
648
- agent_name = args.get("agent_name")
649
- paths = args.get("paths", [])
650
- ttl_seconds = args.get("ttl_seconds", 3600)
651
- exclusive = args.get("exclusive", True)
652
- reason = args.get("reason", "")
653
-
654
- if not project_key:
655
- raise ValueError("project_key is required")
656
- if not agent_name:
657
- raise ValueError("agent_name is required")
658
- if not paths:
659
- raise ValueError("paths is required (list of path patterns)")
660
-
661
- with get_db() as conn:
662
- # Get project and agent
663
- project = conn.execute(
664
- "SELECT * FROM projects WHERE human_key = ?",
665
- (project_key,)
666
- ).fetchone()
667
- if not project:
668
- raise ValueError(f"Project not found: {project_key}")
669
-
670
- agent = conn.execute(
671
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
672
- (agent_name, project["id"])
673
- ).fetchone()
674
- if not agent:
675
- raise ValueError(f"Agent not found: {agent_name}")
676
-
677
- project_id = project["id"]
678
- agent_id = agent["id"]
679
-
680
- # Check for conflicts with existing reservations
681
- conflicts = []
682
- granted = []
683
- now = now_iso()
684
- expires = (datetime.now(timezone.utc) + timedelta(seconds=ttl_seconds)).isoformat()
685
-
686
- # Clean up expired reservations first
687
- conn.execute(
688
- "DELETE FROM file_reservations WHERE expires_ts < ?",
689
- (now,)
690
- )
691
-
692
- for path in paths:
693
- # Check for conflicting exclusive reservations
694
- # Simple matching: exact match or glob patterns
695
- conflicting = conn.execute(
696
- """SELECT fr.*, a.name as holder_name
697
- FROM file_reservations fr
698
- JOIN agents a ON fr.agent_id = a.id
699
- WHERE fr.project_id = ?
700
- AND fr.agent_id != ?
701
- AND fr.exclusive = 1
702
- AND (fr.path_pattern = ? OR fr.path_pattern LIKE ? OR ? LIKE fr.path_pattern)""",
703
- (project_id, agent_id, path, path.replace("*", "%"), path.replace("*", "%"))
704
- ).fetchall()
705
-
706
- if conflicting:
707
- conflicts.append({
708
- "path": path,
709
- "holders": [r["holder_name"] for r in conflicting],
710
- })
711
- else:
712
- # Grant the reservation
713
- cursor = conn.execute(
714
- """INSERT INTO file_reservations
715
- (project_id, agent_id, path_pattern, exclusive, reason, created_ts, expires_ts)
716
- VALUES (?, ?, ?, ?, ?, ?, ?)""",
717
- (project_id, agent_id, path, 1 if exclusive else 0, reason, now, expires)
718
- )
719
- granted.append({
720
- "id": cursor.lastrowid,
721
- "path_pattern": path,
722
- "exclusive": exclusive,
723
- "reason": reason,
724
- "expires_ts": expires,
725
- })
726
-
727
- return {
728
- "granted": granted,
729
- "conflicts": conflicts,
730
- }
731
-
732
-
733
- async def tool_release_file_reservations(args: dict[str, Any]) -> dict:
734
- """Release file reservations."""
735
- project_key = args.get("project_key")
736
- agent_name = args.get("agent_name")
737
- paths = args.get("paths")
738
- file_reservation_ids = args.get("file_reservation_ids")
739
-
740
- if not project_key:
741
- raise ValueError("project_key is required")
742
- if not agent_name:
743
- raise ValueError("agent_name is required")
744
-
745
- with get_db() as conn:
746
- # Get project and agent
747
- project = conn.execute(
748
- "SELECT * FROM projects WHERE human_key = ?",
749
- (project_key,)
750
- ).fetchone()
751
- if not project:
752
- raise ValueError(f"Project not found: {project_key}")
753
-
754
- agent = conn.execute(
755
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
756
- (agent_name, project["id"])
757
- ).fetchone()
758
- if not agent:
759
- raise ValueError(f"Agent not found: {agent_name}")
760
-
761
- # Build delete query
762
- if file_reservation_ids:
763
- # Delete by IDs
764
- placeholders = ",".join("?" * len(file_reservation_ids))
765
- cursor = conn.execute(
766
- f"""DELETE FROM file_reservations
767
- WHERE id IN ({placeholders}) AND agent_id = ?""",
768
- (*file_reservation_ids, agent["id"])
769
- )
770
- elif paths:
771
- # Delete by paths
772
- placeholders = ",".join("?" * len(paths))
773
- cursor = conn.execute(
774
- f"""DELETE FROM file_reservations
775
- WHERE path_pattern IN ({placeholders}) AND agent_id = ?""",
776
- (*paths, agent["id"])
777
- )
778
- else:
779
- # Delete all for this agent
780
- cursor = conn.execute(
781
- "DELETE FROM file_reservations WHERE agent_id = ?",
782
- (agent["id"],)
783
- )
784
-
785
- return {
786
- "released": cursor.rowcount,
787
- "released_at": now_iso(),
788
- }
789
-
790
-
791
- async def tool_acknowledge_message(args: dict[str, Any]) -> dict:
792
- """Acknowledge a message."""
793
- project_key = args.get("project_key")
794
- agent_name = args.get("agent_name")
795
- message_id = args.get("message_id")
796
-
797
- if not all([project_key, agent_name, message_id]):
798
- raise ValueError("project_key, agent_name, and message_id are required")
799
-
800
- with get_db() as conn:
801
- # Get agent
802
- project = conn.execute(
803
- "SELECT * FROM projects WHERE human_key = ?",
804
- (project_key,)
805
- ).fetchone()
806
- if not project:
807
- raise ValueError(f"Project not found: {project_key}")
808
-
809
- agent = conn.execute(
810
- "SELECT * FROM agents WHERE name = ? AND project_id = ?",
811
- (agent_name, project["id"])
812
- ).fetchone()
813
- if not agent:
814
- raise ValueError(f"Agent not found: {agent_name}")
815
-
816
- # Update ack timestamp
817
- now = now_iso()
818
- conn.execute(
819
- """UPDATE message_recipients
820
- SET acked_at = ?
821
- WHERE message_id = ? AND agent_id = ?""",
822
- (now, message_id, agent["id"])
823
- )
824
-
825
- return {"message_id": message_id, "acked_at": now}
826
-
827
-
828
- async def tool_search_messages(args: dict[str, Any]) -> list[dict]:
829
- """Search messages using FTS5."""
830
- project_key = args.get("project_key")
831
- query = args.get("query", "")
832
- limit = args.get("limit", 20)
833
-
834
- if not project_key:
835
- raise ValueError("project_key is required")
836
- if not query:
837
- raise ValueError("query is required")
838
-
839
- with get_db() as conn:
840
- # Get project
841
- project = conn.execute(
842
- "SELECT * FROM projects WHERE human_key = ?",
843
- (project_key,)
844
- ).fetchone()
845
- if not project:
846
- raise ValueError(f"Project not found: {project_key}")
847
-
848
- # Search using FTS5
849
- rows = conn.execute(
850
- """SELECT m.*, a.name as from_name
851
- FROM messages m
852
- JOIN messages_fts fts ON m.id = fts.rowid
853
- JOIN agents a ON m.sender_id = a.id
854
- WHERE m.project_id = ? AND messages_fts MATCH ?
855
- ORDER BY rank
856
- LIMIT ?""",
857
- (project["id"], query, limit)
858
- ).fetchall()
859
-
860
- return [
861
- {
862
- "id": row["id"],
863
- "subject": row["subject"],
864
- "from": row["from_name"],
865
- "created_ts": row["created_ts"],
866
- "importance": row["importance"],
867
- "thread_id": row["thread_id"],
868
- }
869
- for row in rows
870
- ]
871
-
872
-
873
- # =============================================================================
874
- # Main
875
- # =============================================================================
876
-
877
- if __name__ == "__main__":
878
- import uvicorn
879
- uvicorn.run(app, host="0.0.0.0", port=8765)
@@ -1,45 +0,0 @@
1
- # Docker Compose for opencode-swarm-plugin integration tests
2
- # Services: agent-mail (FastAPI) + test-runner (Bun)
3
-
4
- services:
5
- agent-mail:
6
- build:
7
- context: ./docker/agent-mail
8
- dockerfile: Dockerfile
9
- ports:
10
- - "8765:8765"
11
- environment:
12
- - AGENT_MAIL_HOST=0.0.0.0
13
- - AGENT_MAIL_PORT=8765
14
- healthcheck:
15
- test: ["CMD", "curl", "-f", "http://localhost:8765/health/liveness"]
16
- interval: 5s
17
- timeout: 3s
18
- retries: 10
19
- start_period: 5s
20
- networks:
21
- - test-network
22
-
23
- test-runner:
24
- build:
25
- context: .
26
- dockerfile: Dockerfile
27
- depends_on:
28
- agent-mail:
29
- condition: service_healthy
30
- environment:
31
- - AGENT_MAIL_URL=http://agent-mail:8765
32
- - GIT_AUTHOR_NAME=Test Runner
33
- - GIT_AUTHOR_EMAIL=test@example.com
34
- - GIT_COMMITTER_NAME=Test Runner
35
- - GIT_COMMITTER_EMAIL=test@example.com
36
- volumes:
37
- # Mount source for faster iteration (optional - comment out for clean builds)
38
- - ./src:/app/src:ro
39
- - ./tests:/app/tests:ro
40
- networks:
41
- - test-network
42
-
43
- networks:
44
- test-network:
45
- driver: bridge
@@ -1,54 +0,0 @@
1
- #!/bin/bash
2
- set -e
3
-
4
- # Colors for output
5
- RED='\033[0;31m'
6
- GREEN='\033[0;32m'
7
- YELLOW='\033[1;33m'
8
- NC='\033[0m' # No Color
9
-
10
- log() { echo -e "${GREEN}[entrypoint]${NC} $1"; }
11
- warn() { echo -e "${YELLOW}[entrypoint]${NC} $1"; }
12
- error() { echo -e "${RED}[entrypoint]${NC} $1"; }
13
-
14
- # Initialize git repo if not present (beads requires git)
15
- if [ ! -d ".git" ]; then
16
- log "Initializing git repository..."
17
- git init
18
- git config user.email "test@example.com"
19
- git config user.name "Test Runner"
20
- git add -A
21
- git commit -m "Initial commit for test environment" --allow-empty
22
- fi
23
-
24
- # Initialize beads if not present
25
- if [ ! -d ".beads" ]; then
26
- log "Initializing beads..."
27
- bd init --name "test-project" || warn "beads init failed (may already exist)"
28
- fi
29
-
30
- # Wait for agent-mail to be healthy
31
- AGENT_MAIL_URL="${AGENT_MAIL_URL:-http://agent-mail:8765}"
32
- MAX_RETRIES=30
33
- RETRY_COUNT=0
34
-
35
- log "Waiting for agent-mail at ${AGENT_MAIL_URL}..."
36
-
37
- while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
38
- if curl -sf "${AGENT_MAIL_URL}/health/liveness" > /dev/null 2>&1; then
39
- log "agent-mail is healthy!"
40
- break
41
- fi
42
- RETRY_COUNT=$((RETRY_COUNT + 1))
43
- if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
44
- error "agent-mail failed to become healthy after ${MAX_RETRIES} attempts"
45
- exit 1
46
- fi
47
- warn "Waiting for agent-mail... (attempt ${RETRY_COUNT}/${MAX_RETRIES})"
48
- sleep 1
49
- done
50
-
51
- log "Starting integration tests..."
52
-
53
- # Execute the command passed to the container
54
- exec "$@"