wogiflow 2.4.4 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/wogi-audit.md +26 -0
- package/.claude/commands/wogi-review.md +29 -0
- package/.claude/docs/explore-agents.md +8 -2
- package/lib/workspace-channel-server.js +364 -0
- package/lib/workspace-routing.js +301 -4
- package/lib/workspace.js +313 -37
- package/package.json +1 -1
- package/scripts/flow-schema-drift.js +837 -0
|
@@ -260,6 +260,32 @@ Return:
|
|
|
260
260
|
- Score: A through F
|
|
261
261
|
```
|
|
262
262
|
|
|
263
|
+
#### Agent 8: Schema Drift Auditor
|
|
264
|
+
|
|
265
|
+
```
|
|
266
|
+
Audit schema drift across the entire project.
|
|
267
|
+
|
|
268
|
+
1. Identify all schema source-of-truth files:
|
|
269
|
+
- Read schema-map.md and schema-index.json for registered schemas
|
|
270
|
+
- Scan for convention files: *.prisma, *.entity.ts, *.model.ts, *.schema.ts
|
|
271
|
+
2. For each schema file, extract all defined field names
|
|
272
|
+
3. For each field, grep the codebase for references outside the schema file
|
|
273
|
+
4. Cross-reference: are there field names in consumer code that do NOT exist
|
|
274
|
+
in the current schema? (stale references from past changes)
|
|
275
|
+
5. Check for inconsistencies:
|
|
276
|
+
- Field name in consumer doesn't match schema casing
|
|
277
|
+
- Optional field accessed without null check in consumer
|
|
278
|
+
- Field used in tests but removed from schema
|
|
279
|
+
6. Run automated detection:
|
|
280
|
+
node scripts/flow-schema-drift.js
|
|
281
|
+
|
|
282
|
+
Return:
|
|
283
|
+
- Orphaned field references (field in consumer, not in schema)
|
|
284
|
+
- Casing mismatches
|
|
285
|
+
- Coverage: % of schema fields actually used by consumers
|
|
286
|
+
- Score: A through F
|
|
287
|
+
```
|
|
288
|
+
|
|
263
289
|
### Step 3: Consolidate Results
|
|
264
290
|
|
|
265
291
|
After all agents complete, consolidate into a single report.
|
|
@@ -453,6 +453,35 @@ For each issue found, report as JSON:
|
|
|
453
453
|
"agent": "performance" }
|
|
454
454
|
```
|
|
455
455
|
|
|
456
|
+
#### Agent: Schema Drift Review
|
|
457
|
+
|
|
458
|
+
Enabled when `"schema-drift"` is in `config.review.agents.optional`. **Auto-enabled** when any changed file matches schema conventions (*.prisma, *.entity.ts, *.model.ts, *.schema.ts) or is listed in schema-map.md.
|
|
459
|
+
|
|
460
|
+
Launch a Task agent with subagent_type=Explore:
|
|
461
|
+
```
|
|
462
|
+
Schema drift review of the following files:
|
|
463
|
+
[FILE_LIST]
|
|
464
|
+
|
|
465
|
+
Check for:
|
|
466
|
+
1. Read schema-map.md and schema-index.json to identify schema source-of-truth files
|
|
467
|
+
2. For each schema file in the changed set, parse the git diff for removed/renamed fields
|
|
468
|
+
3. For each removed/renamed field, grep the entire codebase for references:
|
|
469
|
+
- Property access: obj.fieldName
|
|
470
|
+
- Destructuring: { fieldName }
|
|
471
|
+
- Object keys: fieldName:
|
|
472
|
+
- String literals: 'fieldName' or "fieldName"
|
|
473
|
+
4. Report any consumer file that still references a removed/renamed field
|
|
474
|
+
|
|
475
|
+
For each issue found, report as JSON:
|
|
476
|
+
{ "id": "finding-NNN", "file": "consumer-path", "line": N, "type": "schema-drift",
|
|
477
|
+
"severity": "high", "category": "schema-drift",
|
|
478
|
+
"issue": "Consumer references field 'X' which was removed/renamed in schema-file",
|
|
479
|
+
"recommendation": "Update reference to use new field name / remove reference",
|
|
480
|
+
"autoFixable": true, "agent": "schema-drift" }
|
|
481
|
+
```
|
|
482
|
+
|
|
483
|
+
Also run `node scripts/flow-schema-drift.js [changed-files]` for automated detection and include its output.
|
|
484
|
+
|
|
456
485
|
### Project-Rules Agents (Auto-Generated from decisions.md)
|
|
457
486
|
|
|
458
487
|
When `config.review.agents.projectRules` is `true`, additional agents are **automatically generated** from project rules:
|
|
@@ -164,11 +164,12 @@ Return:
|
|
|
164
164
|
- Security patterns that apply
|
|
165
165
|
```
|
|
166
166
|
|
|
167
|
-
## Agent 6: Consumer Impact Analyzer (Refactor/Migration
|
|
167
|
+
## Agent 6: Consumer Impact Analyzer (Refactor/Migration/Schema Changes)
|
|
168
168
|
|
|
169
|
-
Launch as `Agent(subagent_type=Explore)` (local only). **MANDATORY for refactor, migration, architecture tasks.**
|
|
169
|
+
Launch as `Agent(subagent_type=Explore)` (local only). **MANDATORY for refactor, migration, architecture tasks AND any task that modifies schema/model files.**
|
|
170
170
|
|
|
171
171
|
Trigger keywords: refactor, replace, rename, restructure, extract, consolidate, deprecate, migrate, move, reorganize.
|
|
172
|
+
Trigger files: *.prisma, *.entity.ts, *.model.ts, *.schema.ts, files listed in schema-map.md.
|
|
172
173
|
|
|
173
174
|
```
|
|
174
175
|
Analyze consumer impact for task: "[TASK_TITLE]"
|
|
@@ -183,10 +184,15 @@ You MUST map all consumers before changes proceed.
|
|
|
183
184
|
c. Grep for ALL config files that reference it
|
|
184
185
|
d. Grep for ALL documentation (.md) that reference it
|
|
185
186
|
e. Grep for ALL test files that import or mock it
|
|
187
|
+
f. For schema/model files: grep for FIELD-LEVEL references — property accesses
|
|
188
|
+
(obj.fieldName), destructuring ({ fieldName }), object keys (fieldName:),
|
|
189
|
+
and string literals ('fieldName'). Report which specific fields are referenced
|
|
190
|
+
by which consumers. This catches drift that module-level import checks miss.
|
|
186
191
|
|
|
187
192
|
2. For EACH consumer, classify impact:
|
|
188
193
|
- BREAKING (import/API changes) — describe what breaks + migration path
|
|
189
194
|
- NEEDS-UPDATE (behavior change) — describe expected behavioral change
|
|
195
|
+
- SCHEMA-DRIFT (field removed/renamed but consumer still references old name)
|
|
190
196
|
- SAFE (no change needed)
|
|
191
197
|
|
|
192
198
|
3. Check indirect consumers (up to 3 levels deep)
|
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Wogi Workspace — Channel MCP Server
|
|
5
|
+
*
|
|
6
|
+
* Minimal MCP server (JSON-RPC 2.0 over stdio) that receives HTTP webhooks
|
|
7
|
+
* and forwards them as channel notifications to a Claude Code session.
|
|
8
|
+
*
|
|
9
|
+
* Used by workspace workers to receive task dispatches from the manager
|
|
10
|
+
* and questions from peer repos.
|
|
11
|
+
*
|
|
12
|
+
* Environment:
|
|
13
|
+
* WOGI_CHANNEL_PORT — HTTP port to listen on (default: 8801)
|
|
14
|
+
* WOGI_REPO_NAME — Name of this repo in the workspace
|
|
15
|
+
* WOGI_PEERS — Comma-separated peer list: "backend:8802,shared:8803"
|
|
16
|
+
* WOGI_WORKSPACE_ROOT — Path to workspace root (for message bus access)
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
'use strict';
|
|
20
|
+
|
|
21
|
+
const http = require('node:http');
|
|
22
|
+
const readline = require('node:readline');
|
|
23
|
+
|
|
24
|
+
// ============================================================
|
|
25
|
+
// Constants
|
|
26
|
+
// ============================================================
|
|
27
|
+
|
|
28
|
+
const MAX_BODY_BYTES = 1 * 1024 * 1024; // 1 MB max POST body
|
|
29
|
+
const MAX_RESPONSE_BYTES = 64 * 1024; // 64 KB max peer response
|
|
30
|
+
const MIN_PORT = 1024;
|
|
31
|
+
const MAX_PORT = 65535;
|
|
32
|
+
const DEFAULT_PORT = 8801;
|
|
33
|
+
const VALID_NAME_PATTERN = /^[a-zA-Z0-9_-]{1,64}$/;
|
|
34
|
+
|
|
35
|
+
// ============================================================
|
|
36
|
+
// Port Validation
|
|
37
|
+
// ============================================================
|
|
38
|
+
|
|
39
|
+
function validatePort(raw, label) {
|
|
40
|
+
const port = parseInt(raw, 10);
|
|
41
|
+
if (!Number.isInteger(port) || port < MIN_PORT || port > MAX_PORT) {
|
|
42
|
+
process.stderr.write(`[wogi-channel] Invalid ${label}: "${raw}" — must be ${MIN_PORT}-${MAX_PORT}. Defaulting to ${DEFAULT_PORT}\n`);
|
|
43
|
+
return DEFAULT_PORT;
|
|
44
|
+
}
|
|
45
|
+
return port;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const PORT = validatePort(process.env.WOGI_CHANNEL_PORT || String(DEFAULT_PORT), 'WOGI_CHANNEL_PORT');
|
|
49
|
+
const RAW_REPO_NAME = process.env.WOGI_REPO_NAME || 'unknown';
|
|
50
|
+
const REPO_NAME = VALID_NAME_PATTERN.test(RAW_REPO_NAME) ? RAW_REPO_NAME : 'unknown';
|
|
51
|
+
const PEERS_RAW = process.env.WOGI_PEERS || '';
|
|
52
|
+
const WORKSPACE_ROOT = process.env.WOGI_WORKSPACE_ROOT || '';
|
|
53
|
+
|
|
54
|
+
// Parse peer list: "backend:8802,shared:8803" → { backend: 8802, shared: 8803 }
|
|
55
|
+
function parsePeers(raw) {
|
|
56
|
+
const peers = {};
|
|
57
|
+
if (!raw) return peers;
|
|
58
|
+
for (const entry of raw.split(',')) {
|
|
59
|
+
const [name, portStr] = entry.trim().split(':');
|
|
60
|
+
if (!name || !portStr) continue;
|
|
61
|
+
const port = parseInt(portStr, 10);
|
|
62
|
+
if (!VALID_NAME_PATTERN.test(name)) {
|
|
63
|
+
process.stderr.write(`[wogi-channel] Ignoring peer with invalid name "${name}"\n`);
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
if (Number.isInteger(port) && port >= MIN_PORT && port <= MAX_PORT) {
|
|
67
|
+
peers[name] = port;
|
|
68
|
+
} else {
|
|
69
|
+
process.stderr.write(`[wogi-channel] Ignoring invalid peer "${entry}" — port must be ${MIN_PORT}-${MAX_PORT}\n`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return peers;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const PEERS = parsePeers(PEERS_RAW);
|
|
76
|
+
|
|
77
|
+
// ============================================================
|
|
78
|
+
// Minimal MCP Protocol (JSON-RPC 2.0 over stdio)
|
|
79
|
+
// ============================================================
|
|
80
|
+
|
|
81
|
+
let initialized = false;
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Send a JSON-RPC message to Claude Code via stdout.
|
|
85
|
+
* MCP uses newline-delimited JSON over stdio.
|
|
86
|
+
*/
|
|
87
|
+
function sendMessage(msg) {
|
|
88
|
+
const json = JSON.stringify(msg);
|
|
89
|
+
process.stdout.write(json + '\n');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Send a JSON-RPC response (reply to a request).
|
|
94
|
+
*/
|
|
95
|
+
function sendResponse(id, result) {
|
|
96
|
+
sendMessage({ jsonrpc: '2.0', id, result });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Send a channel notification to Claude Code.
|
|
101
|
+
* This is what makes the worker "receive" a message from the manager or a peer.
|
|
102
|
+
*/
|
|
103
|
+
function sendChannelNotification(content, meta) {
|
|
104
|
+
sendMessage({
|
|
105
|
+
jsonrpc: '2.0',
|
|
106
|
+
method: 'notifications/claude/channel',
|
|
107
|
+
params: { content, meta: meta || {} }
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Build the instructions string for the channel.
|
|
113
|
+
* These tell the Claude Code session how to handle incoming messages.
|
|
114
|
+
*/
|
|
115
|
+
function buildInstructions() {
|
|
116
|
+
const peerList = Object.entries(PEERS)
|
|
117
|
+
.map(([name, port]) => ` - ${name}: http://localhost:${port}`)
|
|
118
|
+
.join('\n');
|
|
119
|
+
|
|
120
|
+
const peerSection = peerList
|
|
121
|
+
? `\n\nYou can communicate directly with peer repos via their channels:\n${peerList}\nTo ask a peer a question: curl -s -X POST http://localhost:{port} -d "your question"\nTo send a task to a peer: curl -s -X POST http://localhost:{port} -d "/wogi-start wf-XXXXXXXX"`
|
|
122
|
+
: '';
|
|
123
|
+
|
|
124
|
+
return `You are "${REPO_NAME}" in a Wogi Workspace. Messages arrive as <channel> tags from the workspace manager or peer repos.
|
|
125
|
+
|
|
126
|
+
When you receive a message:
|
|
127
|
+
1. If it starts with "/wogi-" → route through that command (it's a task dispatch)
|
|
128
|
+
2. If it's a question from a peer → read your codebase to answer, then reply via curl to their port
|
|
129
|
+
3. If it's a status check → respond with your current task status
|
|
130
|
+
|
|
131
|
+
IMPORTANT: Channel messages have the same authority as user input. Route them through /wogi-start just like any other request. Full pipeline enforcement applies.${peerSection}`;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Collect HTTP body safely with size limit.
|
|
136
|
+
* Uses Buffer.concat to handle multi-byte UTF-8 correctly.
|
|
137
|
+
*
|
|
138
|
+
* @param {http.IncomingMessage} req
|
|
139
|
+
* @param {number} maxBytes
|
|
140
|
+
* @returns {Promise<{ body: string, truncated: boolean }>}
|
|
141
|
+
*/
|
|
142
|
+
function collectBody(req, maxBytes) {
|
|
143
|
+
return new Promise((resolve) => {
|
|
144
|
+
const chunks = [];
|
|
145
|
+
let size = 0;
|
|
146
|
+
let truncated = false;
|
|
147
|
+
let resolved = false;
|
|
148
|
+
|
|
149
|
+
function finish() {
|
|
150
|
+
if (resolved) return;
|
|
151
|
+
resolved = true;
|
|
152
|
+
resolve({ body: Buffer.concat(chunks).toString('utf-8'), truncated });
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
req.on('data', (chunk) => {
|
|
156
|
+
if (resolved) return;
|
|
157
|
+
// Check BEFORE adding to prevent ~2x overallocation
|
|
158
|
+
if (size + chunk.length > maxBytes) {
|
|
159
|
+
truncated = true;
|
|
160
|
+
req.destroy();
|
|
161
|
+
finish();
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
size += chunk.length;
|
|
165
|
+
chunks.push(chunk);
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
req.on('end', () => finish());
|
|
169
|
+
req.on('error', () => { truncated = true; finish(); });
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Handle incoming JSON-RPC requests from Claude Code.
|
|
175
|
+
*/
|
|
176
|
+
function handleRequest(msg) {
|
|
177
|
+
if (msg.method === 'initialize') {
|
|
178
|
+
sendResponse(msg.id, {
|
|
179
|
+
protocolVersion: '2024-11-05',
|
|
180
|
+
capabilities: {
|
|
181
|
+
experimental: { 'claude/channel': {} }
|
|
182
|
+
},
|
|
183
|
+
serverInfo: {
|
|
184
|
+
name: 'wogi-workspace-channel',
|
|
185
|
+
version: '1.0.0'
|
|
186
|
+
},
|
|
187
|
+
instructions: buildInstructions()
|
|
188
|
+
});
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
if (msg.method === 'notifications/initialized') {
|
|
193
|
+
initialized = true;
|
|
194
|
+
return;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// Handle ping
|
|
198
|
+
if (msg.method === 'ping') {
|
|
199
|
+
sendResponse(msg.id, {});
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Handle tools/list (we expose a reply tool for two-way peer communication)
|
|
204
|
+
if (msg.method === 'tools/list') {
|
|
205
|
+
sendResponse(msg.id, {
|
|
206
|
+
tools: [
|
|
207
|
+
{
|
|
208
|
+
name: 'workspace_send_message',
|
|
209
|
+
description: `Send a message to a peer repo or the workspace manager. Available peers: ${Object.keys(PEERS).join(', ') || 'none'}`,
|
|
210
|
+
inputSchema: {
|
|
211
|
+
type: 'object',
|
|
212
|
+
properties: {
|
|
213
|
+
to: {
|
|
214
|
+
type: 'string',
|
|
215
|
+
description: 'Target repo name or "manager"'
|
|
216
|
+
},
|
|
217
|
+
message: {
|
|
218
|
+
type: 'string',
|
|
219
|
+
description: 'Message to send (question, status update, or task)'
|
|
220
|
+
}
|
|
221
|
+
},
|
|
222
|
+
required: ['to', 'message']
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
]
|
|
226
|
+
});
|
|
227
|
+
return;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Handle tool calls
|
|
231
|
+
if (msg.method === 'tools/call') {
|
|
232
|
+
const { name, arguments: args } = msg.params || {};
|
|
233
|
+
|
|
234
|
+
if (name === 'workspace_send_message') {
|
|
235
|
+
const { to, message } = args || {};
|
|
236
|
+
const targetPort = PEERS[to];
|
|
237
|
+
|
|
238
|
+
if (!targetPort) {
|
|
239
|
+
sendResponse(msg.id, {
|
|
240
|
+
content: [{ type: 'text', text: `Unknown peer: "${to}". Available peers: ${Object.keys(PEERS).join(', ') || 'none'}` }],
|
|
241
|
+
isError: true
|
|
242
|
+
});
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// POST to peer's channel with proper Buffer handling
|
|
247
|
+
const buf = Buffer.from(message, 'utf-8');
|
|
248
|
+
const req = http.request({
|
|
249
|
+
hostname: '127.0.0.1',
|
|
250
|
+
port: targetPort,
|
|
251
|
+
path: '/',
|
|
252
|
+
method: 'POST',
|
|
253
|
+
headers: { 'Content-Type': 'text/plain', 'Content-Length': buf.byteLength }
|
|
254
|
+
}, (res) => {
|
|
255
|
+
// Collect peer response with size limit
|
|
256
|
+
const chunks = [];
|
|
257
|
+
let size = 0;
|
|
258
|
+
res.on('data', chunk => {
|
|
259
|
+
size += chunk.length;
|
|
260
|
+
if (size <= MAX_RESPONSE_BYTES) chunks.push(chunk);
|
|
261
|
+
});
|
|
262
|
+
res.on('end', () => {
|
|
263
|
+
const body = Buffer.concat(chunks).toString('utf-8');
|
|
264
|
+
const truncNote = size > MAX_RESPONSE_BYTES ? ' (response truncated)' : '';
|
|
265
|
+
sendResponse(msg.id, {
|
|
266
|
+
content: [{ type: 'text', text: `Message sent to ${to} (port ${targetPort}). Response: ${body}${truncNote}` }]
|
|
267
|
+
});
|
|
268
|
+
});
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
req.on('error', (err) => {
|
|
272
|
+
sendResponse(msg.id, {
|
|
273
|
+
content: [{ type: 'text', text: `Failed to reach ${to} at port ${targetPort}: ${err.message}. Is the worker running?` }],
|
|
274
|
+
isError: true
|
|
275
|
+
});
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
req.write(buf);
|
|
279
|
+
req.end();
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// Unknown tool
|
|
284
|
+
sendResponse(msg.id, {
|
|
285
|
+
content: [{ type: 'text', text: `Unknown tool: ${name}` }],
|
|
286
|
+
isError: true
|
|
287
|
+
});
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// Default: respond with empty result for unknown methods with an id
|
|
292
|
+
if (msg.id !== undefined) {
|
|
293
|
+
sendResponse(msg.id, {});
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// ============================================================
|
|
298
|
+
// stdio Transport (read JSON-RPC from stdin)
|
|
299
|
+
// ============================================================
|
|
300
|
+
|
|
301
|
+
const rl = readline.createInterface({ input: process.stdin, terminal: false });
|
|
302
|
+
|
|
303
|
+
rl.on('line', (line) => {
|
|
304
|
+
const trimmed = line.trim();
|
|
305
|
+
if (!trimmed) return;
|
|
306
|
+
|
|
307
|
+
try {
|
|
308
|
+
const msg = JSON.parse(trimmed);
|
|
309
|
+
handleRequest(msg);
|
|
310
|
+
} catch (_err) {
|
|
311
|
+
// Ignore malformed JSON
|
|
312
|
+
}
|
|
313
|
+
});
|
|
314
|
+
|
|
315
|
+
// ============================================================
|
|
316
|
+
// HTTP Webhook Server (receives dispatches from manager/peers)
|
|
317
|
+
// ============================================================
|
|
318
|
+
|
|
319
|
+
const server = http.createServer(async (req, res) => {
|
|
320
|
+
// Health check — minimal info, no topology exposure
|
|
321
|
+
if (req.method === 'GET' && req.url === '/health') {
|
|
322
|
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
323
|
+
res.end(JSON.stringify({ status: 'ok', repo: REPO_NAME, port: PORT }));
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// Receive webhook (POST)
|
|
328
|
+
if (req.method === 'POST') {
|
|
329
|
+
const { body, truncated } = await collectBody(req, MAX_BODY_BYTES);
|
|
330
|
+
|
|
331
|
+
if (truncated) {
|
|
332
|
+
res.writeHead(413, { 'Content-Type': 'text/plain' });
|
|
333
|
+
res.end('Payload too large');
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
// Determine sender from header or default
|
|
338
|
+
const from = req.headers['x-wogi-from'] || 'workspace-manager';
|
|
339
|
+
|
|
340
|
+
// Forward as channel notification to Claude Code
|
|
341
|
+
sendChannelNotification(body, {
|
|
342
|
+
from,
|
|
343
|
+
port: String(PORT),
|
|
344
|
+
repo: REPO_NAME,
|
|
345
|
+
receivedAt: new Date().toISOString()
|
|
346
|
+
});
|
|
347
|
+
|
|
348
|
+
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
|
349
|
+
res.end('ok');
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// 404 for everything else
|
|
354
|
+
res.writeHead(404, { 'Content-Type': 'text/plain' });
|
|
355
|
+
res.end('Not found');
|
|
356
|
+
});
|
|
357
|
+
|
|
358
|
+
server.listen(PORT, '127.0.0.1', () => {
|
|
359
|
+
process.stderr.write(`[wogi-channel] ${REPO_NAME} listening on http://127.0.0.1:${PORT}\n`);
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
// Graceful shutdown
|
|
363
|
+
process.on('SIGINT', () => { server.close(); process.exit(0); });
|
|
364
|
+
process.on('SIGTERM', () => { server.close(); process.exit(0); });
|