@grackle-ai/core 0.75.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/dist/adapter-config.d.ts +6 -0
- package/dist/adapter-config.d.ts.map +1 -0
- package/dist/adapter-config.js +19 -0
- package/dist/adapter-config.js.map +1 -0
- package/dist/adapter-manager.d.ts +22 -0
- package/dist/adapter-manager.d.ts.map +1 -0
- package/dist/adapter-manager.js +81 -0
- package/dist/adapter-manager.js.map +1 -0
- package/dist/auto-reconnect.d.ts +23 -0
- package/dist/auto-reconnect.d.ts.map +1 -0
- package/dist/auto-reconnect.js +164 -0
- package/dist/auto-reconnect.js.map +1 -0
- package/dist/compute-task-status.d.ts +28 -0
- package/dist/compute-task-status.d.ts.map +1 -0
- package/dist/compute-task-status.js +70 -0
- package/dist/compute-task-status.js.map +1 -0
- package/dist/credential-bundle.d.ts +12 -0
- package/dist/credential-bundle.d.ts.map +1 -0
- package/dist/credential-bundle.js +183 -0
- package/dist/credential-bundle.js.map +1 -0
- package/dist/event-bus.d.ts +37 -0
- package/dist/event-bus.d.ts.map +1 -0
- package/dist/event-bus.js +65 -0
- package/dist/event-bus.js.map +1 -0
- package/dist/event-processor.d.ts +36 -0
- package/dist/event-processor.d.ts.map +1 -0
- package/dist/event-processor.js +312 -0
- package/dist/event-processor.js.map +1 -0
- package/dist/grpc-service.d.ts +22 -0
- package/dist/grpc-service.d.ts.map +1 -0
- package/dist/grpc-service.js +1724 -0
- package/dist/grpc-service.js.map +1 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +25 -0
- package/dist/index.js.map +1 -0
- package/dist/knowledge-init.d.ts +27 -0
- package/dist/knowledge-init.d.ts.map +1 -0
- package/dist/knowledge-init.js +212 -0
- package/dist/knowledge-init.js.map +1 -0
- package/dist/lifecycle.d.ts +36 -0
- package/dist/lifecycle.d.ts.map +1 -0
- package/dist/lifecycle.js +112 -0
- package/dist/lifecycle.js.map +1 -0
- package/dist/log-writer.d.ts +32 -0
- package/dist/log-writer.d.ts.map +1 -0
- package/dist/log-writer.js +104 -0
- package/dist/log-writer.js.map +1 -0
- package/dist/logger.d.ts +4 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +10 -0
- package/dist/logger.js.map +1 -0
- package/dist/pipe-delivery.d.ts +41 -0
- package/dist/pipe-delivery.d.ts.map +1 -0
- package/dist/pipe-delivery.js +186 -0
- package/dist/pipe-delivery.js.map +1 -0
- package/dist/processor-registry.d.ts +25 -0
- package/dist/processor-registry.d.ts.map +1 -0
- package/dist/processor-registry.js +58 -0
- package/dist/processor-registry.js.map +1 -0
- package/dist/reanimate-agent.d.ts +12 -0
- package/dist/reanimate-agent.d.ts.map +1 -0
- package/dist/reanimate-agent.js +76 -0
- package/dist/reanimate-agent.js.map +1 -0
- package/dist/session-recovery.d.ts +16 -0
- package/dist/session-recovery.d.ts.map +1 -0
- package/dist/session-recovery.js +129 -0
- package/dist/session-recovery.js.map +1 -0
- package/dist/signals/sigchld.d.ts +7 -0
- package/dist/signals/sigchld.d.ts.map +1 -0
- package/dist/signals/sigchld.js +167 -0
- package/dist/signals/sigchld.js.map +1 -0
- package/dist/signals/signal-delivery.d.ts +14 -0
- package/dist/signals/signal-delivery.d.ts.map +1 -0
- package/dist/signals/signal-delivery.js +166 -0
- package/dist/signals/signal-delivery.js.map +1 -0
- package/dist/stream-hub.d.ts +14 -0
- package/dist/stream-hub.d.ts.map +1 -0
- package/dist/stream-hub.js +95 -0
- package/dist/stream-hub.js.map +1 -0
- package/dist/stream-registry.d.ts +84 -0
- package/dist/stream-registry.d.ts.map +1 -0
- package/dist/stream-registry.js +363 -0
- package/dist/stream-registry.js.map +1 -0
- package/dist/test-utils/integration-setup.d.ts +11 -0
- package/dist/test-utils/integration-setup.d.ts.map +1 -0
- package/dist/test-utils/integration-setup.js +32 -0
- package/dist/test-utils/integration-setup.js.map +1 -0
- package/dist/test-utils/mock-database.d.ts +130 -0
- package/dist/test-utils/mock-database.d.ts.map +1 -0
- package/dist/test-utils/mock-database.js +147 -0
- package/dist/test-utils/mock-database.js.map +1 -0
- package/dist/token-push.d.ts +22 -0
- package/dist/token-push.d.ts.map +1 -0
- package/dist/token-push.js +78 -0
- package/dist/token-push.js.map +1 -0
- package/dist/transcript.d.ts +5 -0
- package/dist/transcript.d.ts.map +1 -0
- package/dist/transcript.js +71 -0
- package/dist/transcript.js.map +1 -0
- package/dist/utils/exec.d.ts +17 -0
- package/dist/utils/exec.d.ts.map +1 -0
- package/dist/utils/exec.js +21 -0
- package/dist/utils/exec.js.map +1 -0
- package/dist/utils/format-gh-error.d.ts +6 -0
- package/dist/utils/format-gh-error.d.ts.map +1 -0
- package/dist/utils/format-gh-error.js +30 -0
- package/dist/utils/format-gh-error.js.map +1 -0
- package/dist/utils/network.d.ts +7 -0
- package/dist/utils/network.d.ts.map +1 -0
- package/dist/utils/network.js +21 -0
- package/dist/utils/network.js.map +1 -0
- package/dist/utils/ports.d.ts +3 -0
- package/dist/utils/ports.d.ts.map +1 -0
- package/dist/utils/ports.js +19 -0
- package/dist/utils/ports.js.map +1 -0
- package/dist/utils/sleep.d.ts +3 -0
- package/dist/utils/sleep.d.ts.map +1 -0
- package/dist/utils/sleep.js +5 -0
- package/dist/utils/sleep.js.map +1 -0
- package/dist/ws-bridge.d.ts +30 -0
- package/dist/ws-bridge.d.ts.map +1 -0
- package/dist/ws-bridge.js +372 -0
- package/dist/ws-bridge.js.map +1 -0
- package/dist/ws-broadcast.d.ts +19 -0
- package/dist/ws-broadcast.d.ts.map +1 -0
- package/dist/ws-broadcast.js +60 -0
- package/dist/ws-broadcast.js.map +1 -0
- package/package.json +57 -0
|
@@ -0,0 +1,1724 @@
|
|
|
1
|
+
import { ConnectError, Code } from "@connectrpc/connect";
|
|
2
|
+
import { create } from "@bufbuild/protobuf";
|
|
3
|
+
import { grackle, powerline } from "@grackle-ai/common";
|
|
4
|
+
import { v4 as uuid } from "uuid";
|
|
5
|
+
import { envRegistry, sessionStore, tokenStore, workspaceStore, taskStore, findingStore, personaStore, settingsStore, isAllowedSettingKey, credentialProviders, grackleHome, safeParseJsonArray, slugify } from "@grackle-ai/database";
|
|
6
|
+
import * as adapterManager from "./adapter-manager.js";
|
|
7
|
+
import { reconnectOrProvision } from "@grackle-ai/adapter-sdk";
|
|
8
|
+
import * as streamHub from "./stream-hub.js";
|
|
9
|
+
import * as tokenPush from "./token-push.js";
|
|
10
|
+
import { parseAdapterConfig } from "./adapter-config.js";
|
|
11
|
+
import { emit } from "./event-bus.js";
|
|
12
|
+
import { processEventStream } from "./event-processor.js";
|
|
13
|
+
import * as processorRegistry from "./processor-registry.js";
|
|
14
|
+
import { recoverSuspendedSessions } from "./session-recovery.js";
|
|
15
|
+
import { clearReconnectState } from "./auto-reconnect.js";
|
|
16
|
+
import { join } from "node:path";
|
|
17
|
+
import { LOGS_DIR, DEFAULT_WEB_PORT, DEFAULT_MCP_PORT, MAX_TASK_DEPTH, SESSION_STATUS, TERMINAL_SESSION_STATUSES, END_REASON, TASK_STATUS, ROOT_TASK_ID, taskStatusToEnum, taskStatusToString, workspaceStatusToEnum, claudeProviderModeToEnum, providerToggleToEnum, eventTypeToEnum, } from "@grackle-ai/common";
|
|
18
|
+
import * as logWriter from "./log-writer.js";
|
|
19
|
+
import { resolvePersona, fetchOrchestratorContext, SystemPromptBuilder, buildTaskPrompt } from "@grackle-ai/prompt";
|
|
20
|
+
import { createScopedToken, loadOrCreateApiKey, generatePairingCode } from "@grackle-ai/auth";
|
|
21
|
+
import { computeTaskStatus } from "./compute-task-status.js";
|
|
22
|
+
import { logger } from "./logger.js";
|
|
23
|
+
import { reanimateAgent } from "./reanimate-agent.js";
|
|
24
|
+
import { getKnowledgeEmbedder, isKnowledgeEnabled } from "./knowledge-init.js";
|
|
25
|
+
import { knowledgeSearch, getNode as getKnowledgeNodeById, expandNode, createNativeNode, ingest, createPassThroughChunker, listRecentNodes, } from "@grackle-ai/knowledge";
|
|
26
|
+
import { exec } from "./utils/exec.js";
|
|
27
|
+
import { formatGhError } from "./utils/format-gh-error.js";
|
|
28
|
+
import { detectLanIp } from "./utils/network.js";
|
|
29
|
+
import * as streamRegistry from "./stream-registry.js";
|
|
30
|
+
import * as pipeDelivery from "./pipe-delivery.js";
|
|
31
|
+
import { ensureAsyncDeliveryListener } from "./pipe-delivery.js";
|
|
32
|
+
import { cleanupLifecycleStream } from "./lifecycle.js";
|
|
33
|
+
/** Valid pipe mode values for SpawnRequest and StartTaskRequest. */
|
|
34
|
+
const VALID_PIPE_MODES = new Set(["", "sync", "async", "detach"]);
|
|
35
|
+
/** Timeout for `gh codespace list` in milliseconds. */
|
|
36
|
+
const GH_CODESPACE_LIST_TIMEOUT_MS = 30_000;
|
|
37
|
+
/** Timeout for `gh codespace create` in milliseconds. */
|
|
38
|
+
const GH_CODESPACE_CREATE_TIMEOUT_MS = 300_000;
|
|
39
|
+
/** Maximum number of codespaces returned by `gh codespace list`. */
|
|
40
|
+
const GH_CODESPACE_LIST_LIMIT = 50;
|
|
41
|
+
/** Validate pipe mode and parentSessionId. Throws ConnectError on invalid input. */
|
|
42
|
+
function validatePipeInputs(pipe, parentSessionId) {
|
|
43
|
+
if (pipe && !VALID_PIPE_MODES.has(pipe)) {
|
|
44
|
+
throw new ConnectError(`Invalid pipe mode: "${pipe}". Must be "sync", "async", "detach", or empty.`, Code.InvalidArgument);
|
|
45
|
+
}
|
|
46
|
+
if (pipe && pipe !== "detach" && !parentSessionId) {
|
|
47
|
+
throw new ConnectError(`Pipe mode "${pipe}" requires parent_session_id`, Code.InvalidArgument);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Map a bind host to a dialable URL host. Wildcard addresses become loopback,
|
|
52
|
+
* unless GRACKLE_DOCKER_HOST is set (DooD mode) — in that case, use that value
|
|
53
|
+
* so sibling containers can reach the server by container name.
|
|
54
|
+
*/
|
|
55
|
+
export function toDialableHost(bindHost) {
|
|
56
|
+
if (bindHost === "0.0.0.0" || bindHost === "::") {
|
|
57
|
+
const dockerHost = process.env.GRACKLE_DOCKER_HOST;
|
|
58
|
+
if (dockerHost) {
|
|
59
|
+
if (dockerHost.startsWith("[") && dockerHost.endsWith("]")) {
|
|
60
|
+
return dockerHost;
|
|
61
|
+
}
|
|
62
|
+
return dockerHost.includes(":") ? `[${dockerHost}]` : dockerHost;
|
|
63
|
+
}
|
|
64
|
+
return bindHost === "::" ? "[::1]" : "127.0.0.1";
|
|
65
|
+
}
|
|
66
|
+
return bindHost.includes(":") ? `[${bindHost}]` : bindHost;
|
|
67
|
+
}
|
|
68
|
+
function envRowToProto(row) {
|
|
69
|
+
return create(grackle.EnvironmentSchema, {
|
|
70
|
+
id: row.id,
|
|
71
|
+
displayName: row.displayName,
|
|
72
|
+
adapterType: row.adapterType,
|
|
73
|
+
adapterConfig: row.adapterConfig,
|
|
74
|
+
bootstrapped: row.bootstrapped,
|
|
75
|
+
status: row.status,
|
|
76
|
+
lastSeen: row.lastSeen || "",
|
|
77
|
+
envInfo: row.envInfo || "",
|
|
78
|
+
createdAt: row.createdAt,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
function sessionRowToProto(row) {
|
|
82
|
+
return create(grackle.SessionSchema, {
|
|
83
|
+
id: row.id,
|
|
84
|
+
environmentId: row.environmentId,
|
|
85
|
+
runtime: row.runtime,
|
|
86
|
+
runtimeSessionId: row.runtimeSessionId ?? "",
|
|
87
|
+
prompt: row.prompt,
|
|
88
|
+
model: row.model,
|
|
89
|
+
status: row.status,
|
|
90
|
+
logPath: row.logPath ?? "",
|
|
91
|
+
turns: row.turns,
|
|
92
|
+
startedAt: row.startedAt,
|
|
93
|
+
suspendedAt: row.suspendedAt ?? "",
|
|
94
|
+
endedAt: row.endedAt ?? "",
|
|
95
|
+
error: row.error ?? "",
|
|
96
|
+
taskId: row.taskId,
|
|
97
|
+
personaId: row.personaId,
|
|
98
|
+
inputTokens: row.inputTokens,
|
|
99
|
+
outputTokens: row.outputTokens,
|
|
100
|
+
costUsd: row.costUsd,
|
|
101
|
+
endReason: row.endReason ?? "",
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
function workspaceRowToProto(row) {
|
|
105
|
+
return create(grackle.WorkspaceSchema, {
|
|
106
|
+
id: row.id,
|
|
107
|
+
name: row.name,
|
|
108
|
+
description: row.description,
|
|
109
|
+
repoUrl: row.repoUrl,
|
|
110
|
+
environmentId: row.environmentId,
|
|
111
|
+
status: workspaceStatusToEnum(row.status),
|
|
112
|
+
createdAt: row.createdAt,
|
|
113
|
+
updatedAt: row.updatedAt,
|
|
114
|
+
useWorktrees: row.useWorktrees,
|
|
115
|
+
worktreeBasePath: row.worktreeBasePath,
|
|
116
|
+
defaultPersonaId: row.defaultPersonaId,
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
function taskRowToProto(row, childIds, computedStatus, latestSessionId) {
|
|
120
|
+
return create(grackle.TaskSchema, {
|
|
121
|
+
id: row.id,
|
|
122
|
+
workspaceId: row.workspaceId ?? undefined,
|
|
123
|
+
title: row.title,
|
|
124
|
+
description: row.description,
|
|
125
|
+
status: taskStatusToEnum(computedStatus ?? row.status),
|
|
126
|
+
branch: row.branch,
|
|
127
|
+
latestSessionId: latestSessionId ?? "",
|
|
128
|
+
dependsOn: safeParseJsonArray(row.dependsOn),
|
|
129
|
+
startedAt: row.startedAt ?? "",
|
|
130
|
+
completedAt: row.completedAt ?? "",
|
|
131
|
+
createdAt: row.createdAt,
|
|
132
|
+
updatedAt: row.updatedAt,
|
|
133
|
+
sortOrder: row.sortOrder,
|
|
134
|
+
parentTaskId: row.parentTaskId,
|
|
135
|
+
depth: row.depth,
|
|
136
|
+
childTaskIds: childIds ?? taskStore.getChildren(row.id).map((c) => c.id),
|
|
137
|
+
canDecompose: row.canDecompose,
|
|
138
|
+
defaultPersonaId: row.defaultPersonaId,
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
function findingRowToProto(row) {
|
|
142
|
+
return create(grackle.FindingSchema, {
|
|
143
|
+
...row,
|
|
144
|
+
tags: safeParseJsonArray(row.tags),
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
/** Safely parse a JSON string, returning the fallback value on failure. */
|
|
148
|
+
function safeParseJson(value, fallback) {
|
|
149
|
+
if (!value) {
|
|
150
|
+
return fallback;
|
|
151
|
+
}
|
|
152
|
+
try {
|
|
153
|
+
return JSON.parse(value);
|
|
154
|
+
}
|
|
155
|
+
catch {
|
|
156
|
+
return fallback;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
/** Convert a persona database row to a Persona proto message. */
|
|
160
|
+
function personaRowToProto(row) {
|
|
161
|
+
const toolConfig = safeParseJson(row.toolConfig, {});
|
|
162
|
+
const mcpServers = safeParseJson(row.mcpServers, []);
|
|
163
|
+
return create(grackle.PersonaSchema, {
|
|
164
|
+
id: row.id,
|
|
165
|
+
name: row.name,
|
|
166
|
+
description: row.description,
|
|
167
|
+
systemPrompt: row.systemPrompt,
|
|
168
|
+
toolConfig: create(grackle.ToolConfigSchema, {
|
|
169
|
+
allowedTools: Array.isArray(toolConfig.allowedTools)
|
|
170
|
+
? toolConfig.allowedTools.filter((t) => typeof t === "string")
|
|
171
|
+
: [],
|
|
172
|
+
disallowedTools: Array.isArray(toolConfig.disallowedTools)
|
|
173
|
+
? toolConfig.disallowedTools.filter((t) => typeof t === "string")
|
|
174
|
+
: [],
|
|
175
|
+
}),
|
|
176
|
+
runtime: row.runtime,
|
|
177
|
+
model: row.model,
|
|
178
|
+
maxTurns: row.maxTurns,
|
|
179
|
+
mcpServers: mcpServers
|
|
180
|
+
.filter((s) =>
|
|
181
|
+
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition -- typeof null === "object", JSON.parse can return null
|
|
182
|
+
s !== null &&
|
|
183
|
+
typeof s === "object" &&
|
|
184
|
+
typeof s.name === "string" &&
|
|
185
|
+
typeof s.command === "string")
|
|
186
|
+
.map((s) => create(grackle.McpServerConfigSchema, {
|
|
187
|
+
name: s.name,
|
|
188
|
+
command: s.command,
|
|
189
|
+
args: Array.isArray(s.args)
|
|
190
|
+
? s.args.filter((a) => typeof a === "string")
|
|
191
|
+
: [],
|
|
192
|
+
tools: Array.isArray(s.tools)
|
|
193
|
+
? s.tools.filter((t) => typeof t === "string")
|
|
194
|
+
: [],
|
|
195
|
+
})),
|
|
196
|
+
createdAt: row.createdAt,
|
|
197
|
+
updatedAt: row.updatedAt,
|
|
198
|
+
type: row.type || "agent",
|
|
199
|
+
script: row.script || "",
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
/** Convert persona MCP server configs to a JSON string for the PowerLine SpawnRequest. */
|
|
203
|
+
function personaMcpServersToJson(row) {
|
|
204
|
+
let mcpServers;
|
|
205
|
+
try {
|
|
206
|
+
mcpServers = JSON.parse(row.mcpServers || "[]");
|
|
207
|
+
}
|
|
208
|
+
catch {
|
|
209
|
+
logger.warn({ personaId: row.id }, "Failed to parse persona mcpServers JSON; ignoring");
|
|
210
|
+
return "";
|
|
211
|
+
}
|
|
212
|
+
if (!Array.isArray(mcpServers) || mcpServers.length === 0) {
|
|
213
|
+
return "";
|
|
214
|
+
}
|
|
215
|
+
return buildMcpServersJson(mcpServers);
|
|
216
|
+
}
|
|
217
|
+
/** Build a JSON string of MCP server configs for the PowerLine SpawnRequest. */
|
|
218
|
+
export function buildMcpServersJson(mcpServers) {
|
|
219
|
+
const obj = {};
|
|
220
|
+
for (const s of mcpServers) {
|
|
221
|
+
obj[s.name] = {
|
|
222
|
+
command: s.command,
|
|
223
|
+
args: s.args || [],
|
|
224
|
+
...(s.tools && s.tools.length > 0 ? { tools: s.tools } : {}),
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
return JSON.stringify(obj);
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Walk up the task parent chain and return the environmentId from the first
|
|
231
|
+
* ancestor that has a session. Returns empty string if no ancestor has one.
|
|
232
|
+
*/
|
|
233
|
+
export function resolveAncestorEnvironmentId(parentTaskId) {
|
|
234
|
+
let currentId = parentTaskId;
|
|
235
|
+
for (let i = 0; i < MAX_TASK_DEPTH && currentId; i++) {
|
|
236
|
+
const session = sessionStore.getLatestSessionForTask(currentId);
|
|
237
|
+
if (session?.environmentId) {
|
|
238
|
+
return session.environmentId;
|
|
239
|
+
}
|
|
240
|
+
const parent = taskStore.getTask(currentId);
|
|
241
|
+
if (!parent) {
|
|
242
|
+
break;
|
|
243
|
+
}
|
|
244
|
+
currentId = parent.parentTaskId;
|
|
245
|
+
}
|
|
246
|
+
return "";
|
|
247
|
+
}
|
|
248
|
+
/** Register all Grackle gRPC service handlers on the given ConnectRPC router. */
|
|
249
|
+
export function registerGrackleRoutes(router) {
|
|
250
|
+
router.service(grackle.Grackle, {
|
|
251
|
+
async listEnvironments() {
|
|
252
|
+
const rows = envRegistry.listEnvironments();
|
|
253
|
+
return create(grackle.EnvironmentListSchema, {
|
|
254
|
+
environments: rows.map(envRowToProto),
|
|
255
|
+
});
|
|
256
|
+
},
|
|
257
|
+
async addEnvironment(req) {
|
|
258
|
+
if (!req.displayName || !req.adapterType) {
|
|
259
|
+
throw new ConnectError("displayName and adapterType required", Code.InvalidArgument);
|
|
260
|
+
}
|
|
261
|
+
const id = req.displayName.toLowerCase().replace(/[^a-z0-9-]/g, "-");
|
|
262
|
+
envRegistry.addEnvironment(id, req.displayName, req.adapterType, req.adapterConfig);
|
|
263
|
+
emit("environment.changed", {});
|
|
264
|
+
const row = envRegistry.getEnvironment(id);
|
|
265
|
+
return envRowToProto(row);
|
|
266
|
+
},
|
|
267
|
+
async updateEnvironment(req) {
|
|
268
|
+
if (!req.id) {
|
|
269
|
+
throw new ConnectError("id is required", Code.InvalidArgument);
|
|
270
|
+
}
|
|
271
|
+
const existing = envRegistry.getEnvironment(req.id);
|
|
272
|
+
if (!existing) {
|
|
273
|
+
throw new ConnectError(`Environment not found: ${req.id}`, Code.NotFound);
|
|
274
|
+
}
|
|
275
|
+
const displayName = req.displayName !== undefined ? req.displayName : undefined;
|
|
276
|
+
if (displayName?.trim() === "") {
|
|
277
|
+
throw new ConnectError("Environment name cannot be empty", Code.InvalidArgument);
|
|
278
|
+
}
|
|
279
|
+
let adapterConfig;
|
|
280
|
+
if (req.adapterConfig !== undefined) {
|
|
281
|
+
const raw = req.adapterConfig.trim() || "{}";
|
|
282
|
+
let parsed;
|
|
283
|
+
try {
|
|
284
|
+
parsed = JSON.parse(raw);
|
|
285
|
+
}
|
|
286
|
+
catch {
|
|
287
|
+
throw new ConnectError("adapterConfig is not valid JSON", Code.InvalidArgument);
|
|
288
|
+
}
|
|
289
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
290
|
+
throw new ConnectError("adapterConfig must be a JSON object", Code.InvalidArgument);
|
|
291
|
+
}
|
|
292
|
+
adapterConfig = raw;
|
|
293
|
+
}
|
|
294
|
+
const trimmedName = displayName !== undefined ? displayName.trim() : undefined;
|
|
295
|
+
if (trimmedName === undefined && adapterConfig === undefined) {
|
|
296
|
+
throw new ConnectError("No updatable fields provided", Code.InvalidArgument);
|
|
297
|
+
}
|
|
298
|
+
envRegistry.updateEnvironment(req.id, {
|
|
299
|
+
displayName: trimmedName,
|
|
300
|
+
adapterConfig,
|
|
301
|
+
});
|
|
302
|
+
logger.info({ environmentId: req.id, displayName: trimmedName }, "Environment updated");
|
|
303
|
+
emit("environment.changed", {});
|
|
304
|
+
const updated = envRegistry.getEnvironment(req.id);
|
|
305
|
+
return envRowToProto(updated);
|
|
306
|
+
},
|
|
307
|
+
async removeEnvironment(req) {
|
|
308
|
+
// Block deletion if workspaces still reference this environment
|
|
309
|
+
const wsCount = workspaceStore.countWorkspacesByEnvironment(req.id);
|
|
310
|
+
if (wsCount > 0) {
|
|
311
|
+
throw new ConnectError(`Cannot remove environment: ${wsCount} active workspace(s) still reference it. Archive or reparent them first.`, Code.FailedPrecondition);
|
|
312
|
+
}
|
|
313
|
+
// Stop auto-reconnect attempts for this environment
|
|
314
|
+
clearReconnectState(req.id);
|
|
315
|
+
// Disconnect the adapter if currently connected
|
|
316
|
+
const env = envRegistry.getEnvironment(req.id);
|
|
317
|
+
if (env) {
|
|
318
|
+
const adapter = adapterManager.getAdapter(env.adapterType);
|
|
319
|
+
if (adapter) {
|
|
320
|
+
try {
|
|
321
|
+
await adapter.disconnect(req.id);
|
|
322
|
+
}
|
|
323
|
+
catch {
|
|
324
|
+
/* best-effort */
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
adapterManager.removeConnection(req.id);
|
|
329
|
+
// Delete sessions referencing this environment (FK constraint)
|
|
330
|
+
sessionStore.deleteByEnvironment(req.id);
|
|
331
|
+
envRegistry.removeEnvironment(req.id);
|
|
332
|
+
emit("environment.changed", {});
|
|
333
|
+
emit("environment.removed", { environmentId: req.id });
|
|
334
|
+
return create(grackle.EmptySchema, {});
|
|
335
|
+
},
|
|
336
|
+
async *provisionEnvironment(req) {
|
|
337
|
+
// Manual provision overrides auto-reconnect
|
|
338
|
+
clearReconnectState(req.id);
|
|
339
|
+
const env = envRegistry.getEnvironment(req.id);
|
|
340
|
+
if (!env) {
|
|
341
|
+
yield create(grackle.ProvisionEventSchema, {
|
|
342
|
+
stage: "error",
|
|
343
|
+
message: `Environment not found: ${req.id}`,
|
|
344
|
+
progress: 0,
|
|
345
|
+
});
|
|
346
|
+
return;
|
|
347
|
+
}
|
|
348
|
+
const adapter = adapterManager.getAdapter(env.adapterType);
|
|
349
|
+
if (!adapter) {
|
|
350
|
+
yield create(grackle.ProvisionEventSchema, {
|
|
351
|
+
stage: "error",
|
|
352
|
+
message: `No adapter for type: ${env.adapterType}`,
|
|
353
|
+
progress: 0,
|
|
354
|
+
});
|
|
355
|
+
return;
|
|
356
|
+
}
|
|
357
|
+
envRegistry.updateEnvironmentStatus(req.id, "connecting");
|
|
358
|
+
emit("environment.changed", {});
|
|
359
|
+
const config = parseAdapterConfig(env.adapterConfig);
|
|
360
|
+
config.defaultRuntime = env.defaultRuntime;
|
|
361
|
+
const powerlineToken = env.powerlineToken;
|
|
362
|
+
try {
|
|
363
|
+
for await (const event of reconnectOrProvision(req.id, adapter, config, powerlineToken, !!env.bootstrapped)) {
|
|
364
|
+
yield create(grackle.ProvisionEventSchema, {
|
|
365
|
+
stage: event.stage,
|
|
366
|
+
message: event.message,
|
|
367
|
+
progress: event.progress,
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
catch (err) {
|
|
372
|
+
logger.error({ environmentId: req.id, err }, "Provision/bootstrap failed");
|
|
373
|
+
const currentEnv = envRegistry.getEnvironment(req.id);
|
|
374
|
+
if (currentEnv?.status !== "connected") {
|
|
375
|
+
envRegistry.updateEnvironmentStatus(req.id, "error");
|
|
376
|
+
emit("environment.changed", {});
|
|
377
|
+
}
|
|
378
|
+
yield create(grackle.ProvisionEventSchema, {
|
|
379
|
+
stage: "error",
|
|
380
|
+
message: `Provision failed: ${err instanceof Error ? err.message : String(err)}`,
|
|
381
|
+
progress: 0,
|
|
382
|
+
});
|
|
383
|
+
return;
|
|
384
|
+
}
|
|
385
|
+
try {
|
|
386
|
+
const conn = await adapter.connect(req.id, config, powerlineToken);
|
|
387
|
+
adapterManager.setConnection(req.id, conn);
|
|
388
|
+
// Push stored tokens to newly connected environment
|
|
389
|
+
await tokenPush.pushToEnv(req.id);
|
|
390
|
+
envRegistry.updateEnvironmentStatus(req.id, "connected");
|
|
391
|
+
envRegistry.markBootstrapped(req.id);
|
|
392
|
+
emit("environment.changed", {});
|
|
393
|
+
// Auto-recover suspended sessions (fire-and-forget)
|
|
394
|
+
recoverSuspendedSessions(req.id, conn).catch((err) => {
|
|
395
|
+
logger.error({ environmentId: req.id, err }, "Session recovery failed");
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
catch (err) {
|
|
399
|
+
// adapter.connect() actually failed
|
|
400
|
+
envRegistry.updateEnvironmentStatus(req.id, "error");
|
|
401
|
+
emit("environment.changed", {});
|
|
402
|
+
yield create(grackle.ProvisionEventSchema, {
|
|
403
|
+
stage: "error",
|
|
404
|
+
message: `Connection failed: ${err instanceof Error ? err.message : String(err)}`,
|
|
405
|
+
progress: 0,
|
|
406
|
+
});
|
|
407
|
+
return;
|
|
408
|
+
}
|
|
409
|
+
// Best-effort: notify client that provision completed.
|
|
410
|
+
// If the client already disconnected (e.g. fire-and-forget fetch in
|
|
411
|
+
// test helpers), the yield throws — but the environment IS connected,
|
|
412
|
+
// so we must NOT revert the status to "error".
|
|
413
|
+
try {
|
|
414
|
+
yield create(grackle.ProvisionEventSchema, {
|
|
415
|
+
stage: "ready",
|
|
416
|
+
message: "Environment connected",
|
|
417
|
+
progress: 1,
|
|
418
|
+
});
|
|
419
|
+
}
|
|
420
|
+
catch {
|
|
421
|
+
// Client disconnected after successful provision — ignore
|
|
422
|
+
}
|
|
423
|
+
},
|
|
424
|
+
async stopEnvironment(req) {
|
|
425
|
+
const env = envRegistry.getEnvironment(req.id);
|
|
426
|
+
if (!env) {
|
|
427
|
+
throw new ConnectError(`Environment not found: ${req.id}`, Code.NotFound);
|
|
428
|
+
}
|
|
429
|
+
const adapter = adapterManager.getAdapter(env.adapterType);
|
|
430
|
+
if (adapter) {
|
|
431
|
+
await adapter.stop(req.id, parseAdapterConfig(env.adapterConfig));
|
|
432
|
+
}
|
|
433
|
+
adapterManager.removeConnection(req.id);
|
|
434
|
+
envRegistry.updateEnvironmentStatus(req.id, "disconnected");
|
|
435
|
+
emit("environment.changed", {});
|
|
436
|
+
return create(grackle.EmptySchema, {});
|
|
437
|
+
},
|
|
438
|
+
async destroyEnvironment(req) {
|
|
439
|
+
const env = envRegistry.getEnvironment(req.id);
|
|
440
|
+
if (!env) {
|
|
441
|
+
throw new ConnectError(`Environment not found: ${req.id}`, Code.NotFound);
|
|
442
|
+
}
|
|
443
|
+
const adapter = adapterManager.getAdapter(env.adapterType);
|
|
444
|
+
if (adapter) {
|
|
445
|
+
await adapter.destroy(req.id, parseAdapterConfig(env.adapterConfig));
|
|
446
|
+
}
|
|
447
|
+
adapterManager.removeConnection(req.id);
|
|
448
|
+
envRegistry.updateEnvironmentStatus(req.id, "disconnected");
|
|
449
|
+
emit("environment.changed", {});
|
|
450
|
+
return create(grackle.EmptySchema, {});
|
|
451
|
+
},
|
|
452
|
+
async spawnAgent(req) {
|
|
453
|
+
if (!req.environmentId) {
|
|
454
|
+
throw new ConnectError("environment_id is required", Code.InvalidArgument);
|
|
455
|
+
}
|
|
456
|
+
const env = envRegistry.getEnvironment(req.environmentId);
|
|
457
|
+
if (!env) {
|
|
458
|
+
throw new ConnectError(`Environment not found: ${req.environmentId}`, Code.NotFound);
|
|
459
|
+
}
|
|
460
|
+
let conn = adapterManager.getConnection(req.environmentId);
|
|
461
|
+
if (!conn) {
|
|
462
|
+
// Auto-provision: attempt to reconnect/provision a disconnected environment
|
|
463
|
+
const adapter = adapterManager.getAdapter(env.adapterType);
|
|
464
|
+
if (!adapter) {
|
|
465
|
+
throw new ConnectError(`No adapter for type: ${env.adapterType}`, Code.FailedPrecondition);
|
|
466
|
+
}
|
|
467
|
+
logger.info({ environmentId: req.environmentId }, "Auto-provisioning environment for SpawnAgent");
|
|
468
|
+
envRegistry.updateEnvironmentStatus(req.environmentId, "connecting");
|
|
469
|
+
emit("environment.changed", {});
|
|
470
|
+
const config = parseAdapterConfig(env.adapterConfig);
|
|
471
|
+
config.defaultRuntime = env.defaultRuntime;
|
|
472
|
+
const powerlineToken = env.powerlineToken;
|
|
473
|
+
try {
|
|
474
|
+
for await (const provEvent of reconnectOrProvision(req.environmentId, adapter, config, powerlineToken, !!env.bootstrapped)) {
|
|
475
|
+
logger.info({ environmentId: req.environmentId, stage: provEvent.stage }, "Auto-provision progress (SpawnAgent)");
|
|
476
|
+
emit("environment.provision_progress", {
|
|
477
|
+
environmentId: req.environmentId,
|
|
478
|
+
stage: provEvent.stage,
|
|
479
|
+
message: provEvent.message,
|
|
480
|
+
progress: provEvent.progress,
|
|
481
|
+
});
|
|
482
|
+
}
|
|
483
|
+
conn = await adapter.connect(req.environmentId, config, powerlineToken);
|
|
484
|
+
adapterManager.setConnection(req.environmentId, conn);
|
|
485
|
+
await tokenPush.pushToEnv(req.environmentId);
|
|
486
|
+
envRegistry.updateEnvironmentStatus(req.environmentId, "connected");
|
|
487
|
+
envRegistry.markBootstrapped(req.environmentId);
|
|
488
|
+
emit("environment.changed", {});
|
|
489
|
+
// Auto-recover suspended sessions (fire-and-forget)
|
|
490
|
+
recoverSuspendedSessions(req.environmentId, conn).catch((err) => {
|
|
491
|
+
logger.error({ environmentId: req.environmentId, err }, "Session recovery failed");
|
|
492
|
+
});
|
|
493
|
+
logger.info({ environmentId: req.environmentId }, "Auto-provision complete (SpawnAgent)");
|
|
494
|
+
emit("environment.provision_progress", {
|
|
495
|
+
environmentId: req.environmentId,
|
|
496
|
+
stage: "ready",
|
|
497
|
+
message: "Environment connected",
|
|
498
|
+
progress: 1,
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
catch (err) {
|
|
502
|
+
logger.error({ environmentId: req.environmentId, err }, "Auto-provision failed (SpawnAgent)");
|
|
503
|
+
envRegistry.updateEnvironmentStatus(req.environmentId, "error");
|
|
504
|
+
emit("environment.changed", {});
|
|
505
|
+
throw new ConnectError(`Failed to auto-connect environment ${req.environmentId}: ${err instanceof Error ? err.message : String(err)}`, Code.FailedPrecondition);
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
// Resolve persona via cascade (request → app default)
|
|
509
|
+
let resolved;
|
|
510
|
+
try {
|
|
511
|
+
resolved = resolvePersona(req.personaId);
|
|
512
|
+
}
|
|
513
|
+
catch (err) {
|
|
514
|
+
throw new ConnectError(err.message, Code.FailedPrecondition);
|
|
515
|
+
}
|
|
516
|
+
const sessionId = uuid();
|
|
517
|
+
const { runtime, model, systemPrompt, persona } = resolved;
|
|
518
|
+
const maxTurns = req.maxTurns || resolved.maxTurns;
|
|
519
|
+
const logPath = join(grackleHome, LOGS_DIR, sessionId);
|
|
520
|
+
const builderPrompt = new SystemPromptBuilder({
|
|
521
|
+
personaPrompt: systemPrompt,
|
|
522
|
+
}).build();
|
|
523
|
+
const systemContext = req.systemContext
|
|
524
|
+
? builderPrompt + "\n\n" + req.systemContext
|
|
525
|
+
: builderPrompt;
|
|
526
|
+
// Validate pipe inputs before creating the session or spawning the child
|
|
527
|
+
validatePipeInputs(req.pipe, req.parentSessionId);
|
|
528
|
+
const pipeMode = req.pipe;
|
|
529
|
+
sessionStore.createSession(sessionId, req.environmentId, runtime, req.prompt, model, logPath, "", // taskId
|
|
530
|
+
resolved.personaId, // personaId
|
|
531
|
+
req.parentSessionId || "", // parentSessionId
|
|
532
|
+
pipeMode || "");
|
|
533
|
+
const mcpServersJson = personaMcpServersToJson(persona);
|
|
534
|
+
const mcpPort = parseInt(process.env.GRACKLE_MCP_PORT || String(DEFAULT_MCP_PORT), 10);
|
|
535
|
+
const mcpDialHost = toDialableHost(process.env.GRACKLE_HOST || "127.0.0.1");
|
|
536
|
+
const mcpUrl = `http://${mcpDialHost}:${mcpPort}/mcp`;
|
|
537
|
+
const mcpToken = createScopedToken({ sub: sessionId, pid: "", per: resolved.personaId, sid: sessionId }, loadOrCreateApiKey(grackleHome));
|
|
538
|
+
const powerlineReq = create(powerline.SpawnRequestSchema, {
|
|
539
|
+
sessionId,
|
|
540
|
+
runtime,
|
|
541
|
+
prompt: req.prompt,
|
|
542
|
+
model,
|
|
543
|
+
maxTurns,
|
|
544
|
+
branch: req.branch,
|
|
545
|
+
worktreeBasePath: req.branch
|
|
546
|
+
? (req.worktreeBasePath.trim() || process.env.GRACKLE_WORKTREE_BASE || "/workspace")
|
|
547
|
+
: "",
|
|
548
|
+
systemContext,
|
|
549
|
+
mcpServersJson,
|
|
550
|
+
mcpUrl,
|
|
551
|
+
mcpToken,
|
|
552
|
+
scriptContent: resolved.type === "script" ? resolved.script : "",
|
|
553
|
+
pipe: req.pipe,
|
|
554
|
+
});
|
|
555
|
+
// Create lifecycle stream — every session gets one. The spawner holds
|
|
556
|
+
// a lifecycle fd; when it's closed, the session auto-stops.
|
|
557
|
+
const lifecycleStream = streamRegistry.createStream(`lifecycle:${sessionId}`);
|
|
558
|
+
const spawnerId = req.parentSessionId || "__server__";
|
|
559
|
+
streamRegistry.subscribe(lifecycleStream.id, spawnerId, "rw", "detach", true);
|
|
560
|
+
streamRegistry.subscribe(lifecycleStream.id, sessionId, "rw", "detach", false);
|
|
561
|
+
// Set up IPC pipe stream (optional, on top of lifecycle stream)
|
|
562
|
+
let pipeFd = 0;
|
|
563
|
+
if (pipeMode && pipeMode !== "detach" && req.parentSessionId) {
|
|
564
|
+
const ipcStream = streamRegistry.createStream(`pipe:${sessionId}`);
|
|
565
|
+
const parentSub = streamRegistry.subscribe(ipcStream.id, req.parentSessionId, "rw", pipeMode === "sync" ? "sync" : "async", true);
|
|
566
|
+
streamRegistry.subscribe(ipcStream.id, sessionId, "rw", "async", false);
|
|
567
|
+
pipeFd = parentSub.fd;
|
|
568
|
+
if (pipeMode === "async") {
|
|
569
|
+
ensureAsyncDeliveryListener(req.parentSessionId); // parent receives child messages
|
|
570
|
+
ensureAsyncDeliveryListener(sessionId); // child receives parent messages
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
// Push fresh credentials before spawning (best-effort).
|
|
574
|
+
// For local envs, skip file tokens — the PowerLine is on the same machine.
|
|
575
|
+
await tokenPush.refreshTokensForTask(req.environmentId, runtime, env.adapterType === "local" ? { excludeFileTokens: true } : undefined);
|
|
576
|
+
processEventStream(conn.client.spawn(powerlineReq), {
|
|
577
|
+
sessionId,
|
|
578
|
+
logPath,
|
|
579
|
+
systemContext,
|
|
580
|
+
prompt: req.prompt,
|
|
581
|
+
});
|
|
582
|
+
const row = sessionStore.getSession(sessionId);
|
|
583
|
+
const proto = sessionRowToProto(row);
|
|
584
|
+
proto.pipeFd = pipeFd;
|
|
585
|
+
return proto;
|
|
586
|
+
},
|
|
587
|
+
async resumeAgent(req) {
|
|
588
|
+
const row = reanimateAgent(req.sessionId);
|
|
589
|
+
return sessionRowToProto(row);
|
|
590
|
+
},
|
|
591
|
+
async sendInput(req) {
|
|
592
|
+
const session = sessionStore.getSession(req.sessionId);
|
|
593
|
+
if (!session) {
|
|
594
|
+
throw new ConnectError(`Session not found: ${req.sessionId}`, Code.NotFound);
|
|
595
|
+
}
|
|
596
|
+
if (TERMINAL_SESSION_STATUSES.has(session.status)) {
|
|
597
|
+
throw new ConnectError(`Session ${req.sessionId} has ended (status: ${session.status})`, Code.FailedPrecondition);
|
|
598
|
+
}
|
|
599
|
+
const conn = adapterManager.getConnection(session.environmentId);
|
|
600
|
+
if (!conn) {
|
|
601
|
+
throw new ConnectError(`Environment ${session.environmentId} not connected`, Code.FailedPrecondition);
|
|
602
|
+
}
|
|
603
|
+
// Persist and publish user input event so subscribers see the text in the event stream
|
|
604
|
+
const userInputEvent = create(grackle.SessionEventSchema, {
|
|
605
|
+
sessionId: req.sessionId,
|
|
606
|
+
type: grackle.EventType.USER_INPUT,
|
|
607
|
+
timestamp: new Date().toISOString(),
|
|
608
|
+
content: req.text,
|
|
609
|
+
raw: "",
|
|
610
|
+
});
|
|
611
|
+
if (session.logPath) {
|
|
612
|
+
logWriter.writeEvent(session.logPath, userInputEvent);
|
|
613
|
+
}
|
|
614
|
+
streamHub.publish(userInputEvent);
|
|
615
|
+
await conn.client.sendInput(create(powerline.InputMessageSchema, {
|
|
616
|
+
sessionId: req.sessionId,
|
|
617
|
+
text: req.text,
|
|
618
|
+
}));
|
|
619
|
+
return create(grackle.EmptySchema, {});
|
|
620
|
+
},
|
|
621
|
+
async getUsage(req) {
|
|
622
|
+
if (!req.id) {
|
|
623
|
+
throw new ConnectError("id is required", Code.InvalidArgument);
|
|
624
|
+
}
|
|
625
|
+
switch (req.scope) {
|
|
626
|
+
case "session": {
|
|
627
|
+
const session = sessionStore.getSession(req.id);
|
|
628
|
+
if (!session) {
|
|
629
|
+
throw new ConnectError(`Session not found: ${req.id}`, Code.NotFound);
|
|
630
|
+
}
|
|
631
|
+
return create(grackle.UsageStatsSchema, {
|
|
632
|
+
inputTokens: session.inputTokens,
|
|
633
|
+
outputTokens: session.outputTokens,
|
|
634
|
+
costUsd: session.costUsd,
|
|
635
|
+
sessionCount: 1,
|
|
636
|
+
});
|
|
637
|
+
}
|
|
638
|
+
case "task": {
|
|
639
|
+
const usage = sessionStore.aggregateUsage({ taskId: req.id });
|
|
640
|
+
return create(grackle.UsageStatsSchema, usage);
|
|
641
|
+
}
|
|
642
|
+
case "task_tree": {
|
|
643
|
+
const descendants = taskStore.getDescendants(req.id);
|
|
644
|
+
const taskIds = [req.id, ...descendants.map((d) => d.id)];
|
|
645
|
+
const usage = sessionStore.aggregateUsage({ taskIds });
|
|
646
|
+
return create(grackle.UsageStatsSchema, usage);
|
|
647
|
+
}
|
|
648
|
+
case "workspace": {
|
|
649
|
+
const tasks = taskStore.listTasks(req.id);
|
|
650
|
+
const taskIds = tasks.map((t) => t.id);
|
|
651
|
+
const usage = taskIds.length > 0
|
|
652
|
+
? sessionStore.aggregateUsage({ taskIds })
|
|
653
|
+
: { inputTokens: 0, outputTokens: 0, costUsd: 0, sessionCount: 0 };
|
|
654
|
+
return create(grackle.UsageStatsSchema, usage);
|
|
655
|
+
}
|
|
656
|
+
case "environment": {
|
|
657
|
+
const usage = sessionStore.aggregateUsage({ environmentId: req.id });
|
|
658
|
+
return create(grackle.UsageStatsSchema, usage);
|
|
659
|
+
}
|
|
660
|
+
default:
|
|
661
|
+
throw new ConnectError(`Invalid usage scope: ${req.scope}`, Code.InvalidArgument);
|
|
662
|
+
}
|
|
663
|
+
},
|
|
664
|
+
async waitForPipe(req) {
|
|
665
|
+
const sub = streamRegistry.getSubscription(req.sessionId, req.fd);
|
|
666
|
+
if (!sub) {
|
|
667
|
+
throw new ConnectError(`No subscription found for session ${req.sessionId} fd ${req.fd}`, Code.NotFound);
|
|
668
|
+
}
|
|
669
|
+
if (sub.deliveryMode !== "sync") {
|
|
670
|
+
throw new ConnectError(`Subscription fd ${req.fd} is not a sync subscription (mode: ${sub.deliveryMode})`, Code.FailedPrecondition);
|
|
671
|
+
}
|
|
672
|
+
// Use try/finally so the pipe stream is cleaned up even if consumeSync rejects
|
|
673
|
+
// (e.g., the request is cancelled or times out) to prevent unbounded memory growth.
|
|
674
|
+
let msg;
|
|
675
|
+
try {
|
|
676
|
+
msg = await streamRegistry.consumeSync(sub.id);
|
|
677
|
+
}
|
|
678
|
+
finally {
|
|
679
|
+
const stream = streamRegistry.getStream(sub.streamId);
|
|
680
|
+
if (stream) {
|
|
681
|
+
streamRegistry.deleteStream(sub.streamId);
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
return create(grackle.WaitForPipeResponseSchema, {
|
|
685
|
+
content: msg.content,
|
|
686
|
+
senderSessionId: msg.senderId,
|
|
687
|
+
});
|
|
688
|
+
},
|
|
689
|
+
async writeToFd(req) {
|
|
690
|
+
const sub = streamRegistry.getSubscription(req.sessionId, req.fd);
|
|
691
|
+
if (!sub) {
|
|
692
|
+
throw new ConnectError(`No subscription found for session ${req.sessionId} fd ${req.fd}`, Code.NotFound);
|
|
693
|
+
}
|
|
694
|
+
if (sub.permission !== "w" && sub.permission !== "rw") {
|
|
695
|
+
throw new ConnectError(`Subscription fd ${req.fd} does not have write permission (permission: ${sub.permission})`, Code.FailedPrecondition);
|
|
696
|
+
}
|
|
697
|
+
const stream = streamRegistry.getStream(sub.streamId);
|
|
698
|
+
if (!stream) {
|
|
699
|
+
throw new ConnectError("Stream no longer exists", Code.FailedPrecondition);
|
|
700
|
+
}
|
|
701
|
+
// Publish to stream — delivery is handled by async listeners registered
|
|
702
|
+
// at spawn time via ensureAsyncDeliveryListener. This is the same path
|
|
703
|
+
// used by publishChildCompletion for child→parent delivery.
|
|
704
|
+
const msg = streamRegistry.publish(sub.streamId, req.sessionId, req.message);
|
|
705
|
+
// Verify delivery to async subscribers — check if the published message
|
|
706
|
+
// was marked as delivered for each async target. Sync and detach subscribers
|
|
707
|
+
// are excluded (sync waits for consumeSync, detach buffers silently).
|
|
708
|
+
for (const targetSub of stream.subscriptions.values()) {
|
|
709
|
+
if (targetSub.sessionId === req.sessionId) {
|
|
710
|
+
continue;
|
|
711
|
+
}
|
|
712
|
+
if (targetSub.deliveryMode === "async" && !msg.deliveredTo.has(targetSub.id)) {
|
|
713
|
+
throw new ConnectError("Message delivery failed — target environment may be disconnected", Code.FailedPrecondition);
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
return create(grackle.EmptySchema, {});
|
|
717
|
+
},
|
|
718
|
+
async closeFd(req) {
|
|
719
|
+
const sub = streamRegistry.getSubscription(req.sessionId, req.fd);
|
|
720
|
+
if (!sub) {
|
|
721
|
+
throw new ConnectError(`No subscription found for session ${req.sessionId} fd ${req.fd}`, Code.NotFound);
|
|
722
|
+
}
|
|
723
|
+
if (streamRegistry.hasUndeliveredMessages(sub.id)) {
|
|
724
|
+
throw new ConnectError(`Cannot close fd ${req.fd}: undelivered messages pending. Process or consume them first.`, Code.FailedPrecondition);
|
|
725
|
+
}
|
|
726
|
+
const streamId = sub.streamId;
|
|
727
|
+
const stream = streamRegistry.getStream(streamId);
|
|
728
|
+
// Collect child sessions (inherited subscriptions, not the caller's)
|
|
729
|
+
const childSubs = [];
|
|
730
|
+
if (stream) {
|
|
731
|
+
for (const s of stream.subscriptions.values()) {
|
|
732
|
+
if (s.sessionId !== req.sessionId) {
|
|
733
|
+
childSubs.push({ sessionId: s.sessionId, subId: s.id });
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
// Unsubscribe the caller
|
|
738
|
+
streamRegistry.unsubscribe(sub.id);
|
|
739
|
+
// Also unsubscribe children — when their last subscription is removed,
|
|
740
|
+
// the lifecycle manager's orphan callback auto-stops them.
|
|
741
|
+
let stopped = false;
|
|
742
|
+
for (const child of childSubs) {
|
|
743
|
+
streamRegistry.unsubscribe(child.subId);
|
|
744
|
+
// Check if the child was orphaned (auto-stopped)
|
|
745
|
+
const childSession = sessionStore.getSession(child.sessionId);
|
|
746
|
+
if (childSession?.status === SESSION_STATUS.STOPPED) {
|
|
747
|
+
stopped = true;
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
// Clean up async listeners for caller and any unsubscribed children
|
|
751
|
+
pipeDelivery.cleanupAsyncListenerIfEmpty(req.sessionId);
|
|
752
|
+
for (const child of childSubs) {
|
|
753
|
+
pipeDelivery.cleanupAsyncListenerIfEmpty(child.sessionId);
|
|
754
|
+
}
|
|
755
|
+
return create(grackle.CloseFdResponseSchema, { stopped });
|
|
756
|
+
},
|
|
757
|
+
getSessionFds(req) {
|
|
758
|
+
const subs = streamRegistry.getSubscriptionsForSession(req.id);
|
|
759
|
+
const fds = subs.map((sub) => {
|
|
760
|
+
const stream = streamRegistry.getStream(sub.streamId);
|
|
761
|
+
let targetSessionId = "";
|
|
762
|
+
if (stream) {
|
|
763
|
+
for (const s of stream.subscriptions.values()) {
|
|
764
|
+
if (s.sessionId !== req.id) {
|
|
765
|
+
targetSessionId = s.sessionId;
|
|
766
|
+
break;
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
return create(grackle.FdInfoSchema, {
|
|
771
|
+
fd: sub.fd,
|
|
772
|
+
streamName: stream?.name || "",
|
|
773
|
+
permission: sub.permission,
|
|
774
|
+
deliveryMode: sub.deliveryMode,
|
|
775
|
+
owned: sub.createdBySpawn,
|
|
776
|
+
targetSessionId,
|
|
777
|
+
});
|
|
778
|
+
});
|
|
779
|
+
return create(grackle.SessionFdsSchema, { fds });
|
|
780
|
+
},
|
|
781
|
+
async killAgent(req) {
|
|
782
|
+
const session = sessionStore.getSession(req.id);
|
|
783
|
+
if (!session) {
|
|
784
|
+
throw new ConnectError(`Session not found: ${req.id}`, Code.NotFound);
|
|
785
|
+
}
|
|
786
|
+
// Set STOPPED + killed BEFORE closing the lifecycle FD so the orphan
|
|
787
|
+
// callback sees the session is already terminal and skips. Without this,
|
|
788
|
+
// the orphan callback would see IDLE → reason="completed", which is wrong
|
|
789
|
+
// for an explicit kill.
|
|
790
|
+
if (!TERMINAL_SESSION_STATUSES.has(session.status)) {
|
|
791
|
+
sessionStore.updateSession(req.id, SESSION_STATUS.STOPPED, undefined, undefined, END_REASON.KILLED);
|
|
792
|
+
streamHub.publish(create(grackle.SessionEventSchema, {
|
|
793
|
+
sessionId: req.id,
|
|
794
|
+
type: grackle.EventType.STATUS,
|
|
795
|
+
timestamp: new Date().toISOString(),
|
|
796
|
+
content: END_REASON.KILLED,
|
|
797
|
+
raw: "",
|
|
798
|
+
}));
|
|
799
|
+
if (session.taskId) {
|
|
800
|
+
const task = taskStore.getTask(session.taskId);
|
|
801
|
+
if (task) {
|
|
802
|
+
emit("task.updated", { taskId: task.id, workspaceId: task.workspaceId || "" });
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
// Delete the lifecycle stream — orphan callback sees session is already
|
|
807
|
+
// STOPPED and skips status change, but still kills the PowerLine process.
|
|
808
|
+
cleanupLifecycleStream(req.id);
|
|
809
|
+
// Also close any other subscriptions (pipe streams etc.)
|
|
810
|
+
const subs = streamRegistry.getSubscriptionsForSession(req.id);
|
|
811
|
+
for (const sub of subs) {
|
|
812
|
+
streamRegistry.unsubscribe(sub.id);
|
|
813
|
+
}
|
|
814
|
+
return create(grackle.EmptySchema, {});
|
|
815
|
+
},
|
|
816
|
+
async listSessions(req) {
|
|
817
|
+
const rows = sessionStore.listSessions(req.environmentId, req.status);
|
|
818
|
+
return create(grackle.SessionListSchema, {
|
|
819
|
+
sessions: rows.map(sessionRowToProto),
|
|
820
|
+
});
|
|
821
|
+
},
|
|
822
|
+
async getSession(req) {
|
|
823
|
+
const row = sessionStore.getSession(req.id);
|
|
824
|
+
if (!row) {
|
|
825
|
+
throw new ConnectError(`Session not found: ${req.id}`, Code.NotFound);
|
|
826
|
+
}
|
|
827
|
+
return sessionRowToProto(row);
|
|
828
|
+
},
|
|
829
|
+
async getSessionEvents(req) {
|
|
830
|
+
const session = sessionStore.getSession(req.id);
|
|
831
|
+
if (!session) {
|
|
832
|
+
throw new ConnectError(`Session not found: ${req.id}`, Code.NotFound);
|
|
833
|
+
}
|
|
834
|
+
if (!session.logPath) {
|
|
835
|
+
return create(grackle.SessionEventListSchema, {
|
|
836
|
+
sessionId: req.id,
|
|
837
|
+
events: [],
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
const entries = logWriter.readLog(session.logPath);
|
|
841
|
+
return create(grackle.SessionEventListSchema, {
|
|
842
|
+
sessionId: req.id,
|
|
843
|
+
events: entries.map((e) => create(grackle.SessionEventSchema, {
|
|
844
|
+
sessionId: e.session_id,
|
|
845
|
+
type: eventTypeToEnum(e.type),
|
|
846
|
+
timestamp: e.timestamp,
|
|
847
|
+
content: e.content,
|
|
848
|
+
raw: e.raw || "",
|
|
849
|
+
})),
|
|
850
|
+
});
|
|
851
|
+
},
|
|
852
|
+
async getTaskSessions(req) {
|
|
853
|
+
if (!req.id) {
|
|
854
|
+
throw new ConnectError("task id is required", Code.InvalidArgument);
|
|
855
|
+
}
|
|
856
|
+
const rows = sessionStore.listSessionsForTask(req.id);
|
|
857
|
+
return create(grackle.SessionListSchema, {
|
|
858
|
+
sessions: rows.map(sessionRowToProto),
|
|
859
|
+
});
|
|
860
|
+
},
|
|
861
|
+
async *streamSession(req) {
|
|
862
|
+
const stream = streamHub.createStream(req.id);
|
|
863
|
+
try {
|
|
864
|
+
for await (const event of stream) {
|
|
865
|
+
yield event;
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
finally {
|
|
869
|
+
stream.cancel();
|
|
870
|
+
}
|
|
871
|
+
},
|
|
872
|
+
async *streamAll() {
|
|
873
|
+
const stream = streamHub.createGlobalStream();
|
|
874
|
+
try {
|
|
875
|
+
for await (const event of stream) {
|
|
876
|
+
yield event;
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
finally {
|
|
880
|
+
stream.cancel();
|
|
881
|
+
}
|
|
882
|
+
},
|
|
883
|
+
async setToken(req) {
|
|
884
|
+
if (!req.name) {
|
|
885
|
+
throw new ConnectError("name is required", Code.InvalidArgument);
|
|
886
|
+
}
|
|
887
|
+
if (!req.value) {
|
|
888
|
+
throw new ConnectError("value is required", Code.InvalidArgument);
|
|
889
|
+
}
|
|
890
|
+
tokenStore.setToken({
|
|
891
|
+
name: req.name,
|
|
892
|
+
type: req.type,
|
|
893
|
+
envVar: req.envVar,
|
|
894
|
+
filePath: req.filePath,
|
|
895
|
+
value: req.value,
|
|
896
|
+
expiresAt: req.expiresAt,
|
|
897
|
+
});
|
|
898
|
+
emit("token.changed", {});
|
|
899
|
+
await tokenPush.pushToAll();
|
|
900
|
+
return create(grackle.EmptySchema, {});
|
|
901
|
+
},
|
|
902
|
+
async listTokens() {
|
|
903
|
+
const items = tokenStore.listTokens();
|
|
904
|
+
return create(grackle.TokenListSchema, {
|
|
905
|
+
tokens: items.map((t) => create(grackle.TokenInfoSchema, {
|
|
906
|
+
name: t.name,
|
|
907
|
+
type: t.type,
|
|
908
|
+
envVar: t.envVar || "",
|
|
909
|
+
filePath: t.filePath || "",
|
|
910
|
+
expiresAt: t.expiresAt || "",
|
|
911
|
+
})),
|
|
912
|
+
});
|
|
913
|
+
},
|
|
914
|
+
async deleteToken(req) {
|
|
915
|
+
if (!req.name) {
|
|
916
|
+
throw new ConnectError("name is required", Code.InvalidArgument);
|
|
917
|
+
}
|
|
918
|
+
tokenStore.deleteToken(req.name);
|
|
919
|
+
emit("token.changed", {});
|
|
920
|
+
await tokenPush.pushToAll();
|
|
921
|
+
return create(grackle.EmptySchema, {});
|
|
922
|
+
},
|
|
923
|
+
// ─── Credential Providers ─────────────────────────────────
|
|
924
|
+
async getCredentialProviders() {
|
|
925
|
+
const config = credentialProviders.getCredentialProviders();
|
|
926
|
+
return create(grackle.CredentialProviderConfigSchema, {
|
|
927
|
+
claude: claudeProviderModeToEnum(config.claude),
|
|
928
|
+
github: providerToggleToEnum(config.github),
|
|
929
|
+
copilot: providerToggleToEnum(config.copilot),
|
|
930
|
+
codex: providerToggleToEnum(config.codex),
|
|
931
|
+
goose: providerToggleToEnum(config.goose),
|
|
932
|
+
});
|
|
933
|
+
},
|
|
934
|
+
async setCredentialProvider(req) {
|
|
935
|
+
if (!credentialProviders.VALID_PROVIDERS.includes(req.provider)) {
|
|
936
|
+
throw new ConnectError(`Invalid provider: ${req.provider}. Must be one of: ${credentialProviders.VALID_PROVIDERS.join(", ")}`, Code.InvalidArgument);
|
|
937
|
+
}
|
|
938
|
+
const allowed = req.provider === "claude"
|
|
939
|
+
? credentialProviders.VALID_CLAUDE_VALUES
|
|
940
|
+
: credentialProviders.VALID_TOGGLE_VALUES;
|
|
941
|
+
if (!allowed.has(req.value)) {
|
|
942
|
+
throw new ConnectError(`Invalid value for ${req.provider}: ${req.value}. Must be one of: ${[...allowed].join(", ")}`, Code.InvalidArgument);
|
|
943
|
+
}
|
|
944
|
+
const current = credentialProviders.getCredentialProviders();
|
|
945
|
+
const updated = { ...current, [req.provider]: req.value };
|
|
946
|
+
credentialProviders.setCredentialProviders(updated);
|
|
947
|
+
emit("credential.providers_changed", updated);
|
|
948
|
+
return create(grackle.CredentialProviderConfigSchema, {
|
|
949
|
+
claude: claudeProviderModeToEnum(updated.claude),
|
|
950
|
+
github: providerToggleToEnum(updated.github),
|
|
951
|
+
copilot: providerToggleToEnum(updated.copilot),
|
|
952
|
+
codex: providerToggleToEnum(updated.codex),
|
|
953
|
+
goose: providerToggleToEnum(updated.goose),
|
|
954
|
+
});
|
|
955
|
+
},
|
|
956
|
+
// ─── Workspaces ──────────────────────────────────────────
|
|
957
|
+
async listWorkspaces(req) {
|
|
958
|
+
const rows = workspaceStore.listWorkspaces(req.environmentId || undefined);
|
|
959
|
+
return create(grackle.WorkspaceListSchema, {
|
|
960
|
+
workspaces: rows.map(workspaceRowToProto),
|
|
961
|
+
});
|
|
962
|
+
},
|
|
963
|
+
async createWorkspace(req) {
|
|
964
|
+
if (!req.name) {
|
|
965
|
+
throw new ConnectError("name is required", Code.InvalidArgument);
|
|
966
|
+
}
|
|
967
|
+
if (!req.environmentId) {
|
|
968
|
+
throw new ConnectError("environment_id is required", Code.InvalidArgument);
|
|
969
|
+
}
|
|
970
|
+
const env = envRegistry.getEnvironment(req.environmentId);
|
|
971
|
+
if (!env) {
|
|
972
|
+
throw new ConnectError(`Environment not found: ${req.environmentId}`, Code.NotFound);
|
|
973
|
+
}
|
|
974
|
+
let id = slugify(req.name) || uuid().slice(0, 8);
|
|
975
|
+
// If slug already exists (e.g. archived workspace), append a short suffix
|
|
976
|
+
if (workspaceStore.getWorkspace(id)) {
|
|
977
|
+
id = `${id}-${uuid().slice(0, 4)}`;
|
|
978
|
+
}
|
|
979
|
+
// useWorktrees defaults to true when not specified
|
|
980
|
+
const useWorktrees = req.useWorktrees ?? true;
|
|
981
|
+
workspaceStore.createWorkspace(id, req.name, req.description, req.repoUrl, req.environmentId, useWorktrees, req.worktreeBasePath ?? "", req.defaultPersonaId ?? "");
|
|
982
|
+
emit("workspace.created", { workspaceId: id });
|
|
983
|
+
const row = workspaceStore.getWorkspace(id);
|
|
984
|
+
return workspaceRowToProto(row);
|
|
985
|
+
},
|
|
986
|
+
async getWorkspace(req) {
|
|
987
|
+
const row = workspaceStore.getWorkspace(req.id);
|
|
988
|
+
if (!row)
|
|
989
|
+
throw new ConnectError(`Workspace not found: ${req.id}`, Code.NotFound);
|
|
990
|
+
return workspaceRowToProto(row);
|
|
991
|
+
},
|
|
992
|
+
async archiveWorkspace(req) {
|
|
993
|
+
workspaceStore.archiveWorkspace(req.id);
|
|
994
|
+
emit("workspace.archived", { workspaceId: req.id });
|
|
995
|
+
return create(grackle.EmptySchema, {});
|
|
996
|
+
},
|
|
997
|
+
async updateWorkspace(req) {
|
|
998
|
+
const existing = workspaceStore.getWorkspace(req.id);
|
|
999
|
+
if (!existing) {
|
|
1000
|
+
throw new ConnectError(`Workspace not found: ${req.id}`, Code.NotFound);
|
|
1001
|
+
}
|
|
1002
|
+
if (req.name?.trim() === "") {
|
|
1003
|
+
throw new ConnectError("Workspace name cannot be empty", Code.InvalidArgument);
|
|
1004
|
+
}
|
|
1005
|
+
if (req.repoUrl !== undefined && req.repoUrl !== "" && !/^https?:\/\//i.test(req.repoUrl)) {
|
|
1006
|
+
throw new ConnectError("Repository URL must use http or https scheme", Code.InvalidArgument);
|
|
1007
|
+
}
|
|
1008
|
+
if (req.environmentId !== undefined) {
|
|
1009
|
+
const env = envRegistry.getEnvironment(req.environmentId);
|
|
1010
|
+
if (!env) {
|
|
1011
|
+
throw new ConnectError(`Environment not found: ${req.environmentId}`, Code.NotFound);
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
const row = workspaceStore.updateWorkspace(req.id, {
|
|
1015
|
+
name: req.name !== undefined ? req.name.trim() : undefined,
|
|
1016
|
+
description: req.description,
|
|
1017
|
+
repoUrl: req.repoUrl,
|
|
1018
|
+
environmentId: req.environmentId,
|
|
1019
|
+
useWorktrees: req.useWorktrees ?? undefined,
|
|
1020
|
+
worktreeBasePath: req.worktreeBasePath,
|
|
1021
|
+
defaultPersonaId: req.defaultPersonaId,
|
|
1022
|
+
});
|
|
1023
|
+
if (!row) {
|
|
1024
|
+
throw new ConnectError(`Workspace not found after update: ${req.id}`, Code.NotFound);
|
|
1025
|
+
}
|
|
1026
|
+
emit("workspace.updated", { workspaceId: req.id });
|
|
1027
|
+
return workspaceRowToProto(row);
|
|
1028
|
+
},
|
|
1029
|
+
// ─── Tasks ───────────────────────────────────────────────
|
|
1030
|
+
async listTasks(req) {
|
|
1031
|
+
const rows = taskStore.listTasks(req.workspaceId || undefined, {
|
|
1032
|
+
search: req.search || undefined,
|
|
1033
|
+
status: req.status || undefined,
|
|
1034
|
+
});
|
|
1035
|
+
const childIdsMap = taskStore.buildChildIdsMap(rows);
|
|
1036
|
+
// Batch-fetch sessions for all tasks and group by taskId
|
|
1037
|
+
const taskIds = rows.map((r) => r.id);
|
|
1038
|
+
const allSessions = sessionStore.listSessionsByTaskIds(taskIds);
|
|
1039
|
+
const sessionsByTask = new Map();
|
|
1040
|
+
for (const s of allSessions) {
|
|
1041
|
+
const arr = sessionsByTask.get(s.taskId) ?? [];
|
|
1042
|
+
arr.push(s);
|
|
1043
|
+
sessionsByTask.set(s.taskId, arr);
|
|
1044
|
+
}
|
|
1045
|
+
return create(grackle.TaskListSchema, {
|
|
1046
|
+
tasks: rows.map((r) => {
|
|
1047
|
+
const taskSessions = sessionsByTask.get(r.id) ?? [];
|
|
1048
|
+
const { status, latestSessionId } = computeTaskStatus(r.status, taskSessions);
|
|
1049
|
+
return taskRowToProto(r, childIdsMap.get(r.id) ?? [], status, latestSessionId);
|
|
1050
|
+
}),
|
|
1051
|
+
});
|
|
1052
|
+
},
|
|
1053
|
+
async createTask(req) {
|
|
1054
|
+
if (!req.title) {
|
|
1055
|
+
throw new ConnectError("title is required", Code.InvalidArgument);
|
|
1056
|
+
}
|
|
1057
|
+
const workspaceId = req.workspaceId || undefined;
|
|
1058
|
+
let workspace;
|
|
1059
|
+
if (workspaceId) {
|
|
1060
|
+
workspace = workspaceStore.getWorkspace(workspaceId);
|
|
1061
|
+
if (!workspace)
|
|
1062
|
+
throw new ConnectError(`Workspace not found: ${workspaceId}`, Code.NotFound);
|
|
1063
|
+
}
|
|
1064
|
+
// Validate parent task if specified
|
|
1065
|
+
if (req.parentTaskId) {
|
|
1066
|
+
const parent = taskStore.getTask(req.parentTaskId);
|
|
1067
|
+
if (!parent)
|
|
1068
|
+
throw new ConnectError(`Parent task not found: ${req.parentTaskId}`, Code.NotFound);
|
|
1069
|
+
if (!parent.canDecompose) {
|
|
1070
|
+
throw new ConnectError(`Parent task "${parent.title}" (${req.parentTaskId}) does not have decomposition rights`, Code.FailedPrecondition);
|
|
1071
|
+
}
|
|
1072
|
+
if (parent.depth + 1 > MAX_TASK_DEPTH) {
|
|
1073
|
+
throw new ConnectError(`Task depth would exceed maximum of ${MAX_TASK_DEPTH}`, Code.FailedPrecondition);
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
const id = uuid().slice(0, 8);
|
|
1077
|
+
taskStore.createTask(id, workspaceId, req.title, req.description, [...req.dependsOn], workspace ? slugify(workspace.name) : "", req.parentTaskId,
|
|
1078
|
+
// Default to false (no decomposition rights) unless explicitly granted.
|
|
1079
|
+
// Orchestrator/root processes that need fork() must opt in.
|
|
1080
|
+
req.canDecompose ?? false, req.defaultPersonaId ?? "");
|
|
1081
|
+
const row = taskStore.getTask(id);
|
|
1082
|
+
emit("task.created", { taskId: id, workspaceId: req.workspaceId });
|
|
1083
|
+
return taskRowToProto(row);
|
|
1084
|
+
},
|
|
1085
|
+
async getTask(req) {
|
|
1086
|
+
const row = taskStore.getTask(req.id);
|
|
1087
|
+
if (!row)
|
|
1088
|
+
throw new ConnectError(`Task not found: ${req.id}`, Code.NotFound);
|
|
1089
|
+
const taskSessions = sessionStore.listSessionsForTask(req.id);
|
|
1090
|
+
const { status, latestSessionId } = computeTaskStatus(row.status, taskSessions);
|
|
1091
|
+
return taskRowToProto(row, undefined, status, latestSessionId);
|
|
1092
|
+
},
|
|
1093
|
+
async updateTask(req) {
|
|
1094
|
+
const existing = taskStore.getTask(req.id);
|
|
1095
|
+
if (!existing)
|
|
1096
|
+
throw new ConnectError(`Task not found: ${req.id}`, Code.NotFound);
|
|
1097
|
+
let reqStatus = existing.status;
|
|
1098
|
+
if (req.status !== grackle.TaskStatus.UNSPECIFIED) {
|
|
1099
|
+
if (req.id === ROOT_TASK_ID) {
|
|
1100
|
+
throw new ConnectError("Cannot change the status of the system task", Code.PermissionDenied);
|
|
1101
|
+
}
|
|
1102
|
+
const converted = taskStatusToString(req.status);
|
|
1103
|
+
if (!converted) {
|
|
1104
|
+
throw new ConnectError(`Unknown task status enum value: ${req.status}`, Code.InvalidArgument);
|
|
1105
|
+
}
|
|
1106
|
+
reqStatus = converted;
|
|
1107
|
+
}
|
|
1108
|
+
taskStore.updateTask(req.id, req.title !== "" ? req.title : existing.title, req.description !== "" ? req.description : existing.description, reqStatus, req.dependsOn.length > 0
|
|
1109
|
+
? [...req.dependsOn]
|
|
1110
|
+
: safeParseJsonArray(existing.dependsOn), req.defaultPersonaId);
|
|
1111
|
+
// Late-bind: associate an existing session with this task
|
|
1112
|
+
if (req.sessionId !== "") {
|
|
1113
|
+
const session = sessionStore.getSession(req.sessionId);
|
|
1114
|
+
if (!session) {
|
|
1115
|
+
throw new ConnectError(`Session not found: ${req.sessionId}`, Code.NotFound);
|
|
1116
|
+
}
|
|
1117
|
+
if (TERMINAL_SESSION_STATUSES.has(session.status)) {
|
|
1118
|
+
throw new ConnectError(`Cannot bind terminal session ${req.sessionId} (status: ${session.status})`, Code.FailedPrecondition);
|
|
1119
|
+
}
|
|
1120
|
+
// Verify the processor exists before mutating DB state to avoid partial updates
|
|
1121
|
+
if (!processorRegistry.get(req.sessionId)) {
|
|
1122
|
+
throw new ConnectError(`No active event processor for session ${req.sessionId}`, Code.FailedPrecondition);
|
|
1123
|
+
}
|
|
1124
|
+
sessionStore.setSessionTask(req.sessionId, req.id);
|
|
1125
|
+
processorRegistry.lateBind(req.sessionId, req.id, existing.workspaceId || undefined);
|
|
1126
|
+
emit("task.started", { taskId: req.id, sessionId: req.sessionId, workspaceId: existing.workspaceId || "" });
|
|
1127
|
+
}
|
|
1128
|
+
emit("task.updated", { taskId: req.id, workspaceId: existing.workspaceId || "" });
|
|
1129
|
+
const row = taskStore.getTask(req.id);
|
|
1130
|
+
const taskSessions = sessionStore.listSessionsForTask(req.id);
|
|
1131
|
+
const { status, latestSessionId } = computeTaskStatus(row.status, taskSessions);
|
|
1132
|
+
return taskRowToProto(row, undefined, status, latestSessionId);
|
|
1133
|
+
},
|
|
1134
|
+
async startTask(req) {
|
|
1135
|
+
const task = taskStore.getTask(req.taskId);
|
|
1136
|
+
if (!task)
|
|
1137
|
+
throw new ConnectError(`Task not found: ${req.taskId}`, Code.NotFound);
|
|
1138
|
+
{
|
|
1139
|
+
const taskSessions = sessionStore.listSessionsForTask(req.taskId);
|
|
1140
|
+
const { status: effectiveStatus } = computeTaskStatus(task.status, taskSessions);
|
|
1141
|
+
if (req.taskId === ROOT_TASK_ID) {
|
|
1142
|
+
// Root task is always re-startable unless actively working
|
|
1143
|
+
if (effectiveStatus === TASK_STATUS.WORKING) {
|
|
1144
|
+
throw new ConnectError("System is already running", Code.FailedPrecondition);
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
else if (![TASK_STATUS.NOT_STARTED, TASK_STATUS.FAILED].includes(effectiveStatus)) {
|
|
1148
|
+
throw new ConnectError(`Task ${req.taskId} cannot be started (status: ${effectiveStatus})`, Code.FailedPrecondition);
|
|
1149
|
+
}
|
|
1150
|
+
}
|
|
1151
|
+
if (!taskStore.areDependenciesMet(req.taskId)) {
|
|
1152
|
+
throw new ConnectError(`Task ${req.taskId} has unmet dependencies`, Code.FailedPrecondition);
|
|
1153
|
+
}
|
|
1154
|
+
const workspace = task.workspaceId ? workspaceStore.getWorkspace(task.workspaceId) : undefined;
|
|
1155
|
+
if (task.workspaceId && !workspace) {
|
|
1156
|
+
throw new ConnectError(`Workspace not found: ${task.workspaceId}`, Code.NotFound);
|
|
1157
|
+
}
|
|
1158
|
+
const environmentId = req.environmentId
|
|
1159
|
+
|| resolveAncestorEnvironmentId(task.parentTaskId)
|
|
1160
|
+
|| workspace?.environmentId
|
|
1161
|
+
|| "";
|
|
1162
|
+
if (!environmentId) {
|
|
1163
|
+
throw new ConnectError("No environment specified for task, ancestor, or workspace", Code.FailedPrecondition);
|
|
1164
|
+
}
|
|
1165
|
+
const conn = adapterManager.getConnection(environmentId);
|
|
1166
|
+
if (!conn)
|
|
1167
|
+
throw new ConnectError(`Environment ${environmentId} not connected`, Code.FailedPrecondition);
|
|
1168
|
+
// Resolve persona via cascade (request → task → workspace → app default)
|
|
1169
|
+
let resolved;
|
|
1170
|
+
try {
|
|
1171
|
+
resolved = resolvePersona(req.personaId, task.defaultPersonaId, workspace?.defaultPersonaId || "");
|
|
1172
|
+
}
|
|
1173
|
+
catch (err) {
|
|
1174
|
+
throw new ConnectError(err.message, Code.FailedPrecondition);
|
|
1175
|
+
}
|
|
1176
|
+
// Validate pipe inputs before creating the session
|
|
1177
|
+
validatePipeInputs(req.pipe, req.parentSessionId);
|
|
1178
|
+
const taskPipeMode = req.pipe;
|
|
1179
|
+
const env = envRegistry.getEnvironment(environmentId);
|
|
1180
|
+
const sessionId = uuid();
|
|
1181
|
+
const { runtime, model, maxTurns, systemPrompt, persona } = resolved;
|
|
1182
|
+
const logPath = join(grackleHome, LOGS_DIR, sessionId);
|
|
1183
|
+
const taskPrompt = buildTaskPrompt(task.title, task.description, req.notes);
|
|
1184
|
+
const isOrchestrator = task.canDecompose && task.depth <= 1;
|
|
1185
|
+
const orchestratorCtx = isOrchestrator
|
|
1186
|
+
? fetchOrchestratorContext(task.workspaceId || "")
|
|
1187
|
+
: undefined;
|
|
1188
|
+
const systemContext = new SystemPromptBuilder({
|
|
1189
|
+
task: { title: task.title, description: task.description, notes: req.notes || "" },
|
|
1190
|
+
taskId: task.id,
|
|
1191
|
+
canDecompose: task.canDecompose,
|
|
1192
|
+
personaPrompt: systemPrompt,
|
|
1193
|
+
taskDepth: task.depth,
|
|
1194
|
+
...orchestratorCtx,
|
|
1195
|
+
...(orchestratorCtx && { triggerMode: "fresh" }),
|
|
1196
|
+
}).build();
|
|
1197
|
+
sessionStore.createSession(sessionId, environmentId, runtime, task.title, model, logPath, task.id, resolved.personaId, req.parentSessionId || "", // parentSessionId
|
|
1198
|
+
taskPipeMode || "");
|
|
1199
|
+
emit("task.started", { taskId: task.id, sessionId, workspaceId: task.workspaceId || "" });
|
|
1200
|
+
// Re-push stored tokens + provider credentials (scoped to runtime) so they're fresh for this session.
|
|
1201
|
+
// For local envs, skip file tokens — the PowerLine is on the same machine.
|
|
1202
|
+
await tokenPush.refreshTokensForTask(environmentId, runtime, env?.adapterType === "local" ? { excludeFileTokens: true } : undefined);
|
|
1203
|
+
const mcpServersJson = personaMcpServersToJson(persona);
|
|
1204
|
+
const useWorktrees = workspace?.useWorktrees ?? false;
|
|
1205
|
+
if (!useWorktrees) {
|
|
1206
|
+
logger.warn({ taskId: task.id, workspaceId: task.workspaceId, branch: task.branch }, "Worktrees disabled for workspace — agent will work in main checkout. Concurrent tasks on the same environment may conflict.");
|
|
1207
|
+
}
|
|
1208
|
+
const taskMcpPort = parseInt(process.env.GRACKLE_MCP_PORT || String(DEFAULT_MCP_PORT), 10);
|
|
1209
|
+
const taskMcpDialHost = toDialableHost(process.env.GRACKLE_HOST || "127.0.0.1");
|
|
1210
|
+
const taskMcpUrl = `http://${taskMcpDialHost}:${taskMcpPort}/mcp`;
|
|
1211
|
+
const taskMcpToken = createScopedToken({ sub: task.id, pid: task.workspaceId || "", per: resolved.personaId, sid: sessionId }, loadOrCreateApiKey(grackleHome));
|
|
1212
|
+
const powerlineReq = create(powerline.SpawnRequestSchema, {
|
|
1213
|
+
sessionId,
|
|
1214
|
+
runtime,
|
|
1215
|
+
prompt: taskPrompt,
|
|
1216
|
+
model,
|
|
1217
|
+
maxTurns,
|
|
1218
|
+
branch: task.branch,
|
|
1219
|
+
worktreeBasePath: task.branch
|
|
1220
|
+
? (workspace?.worktreeBasePath || process.env.GRACKLE_WORKTREE_BASE || "/workspace")
|
|
1221
|
+
: "",
|
|
1222
|
+
useWorktrees,
|
|
1223
|
+
systemContext,
|
|
1224
|
+
workspaceId: task.workspaceId ?? undefined,
|
|
1225
|
+
taskId: task.id,
|
|
1226
|
+
mcpServersJson,
|
|
1227
|
+
mcpUrl: taskMcpUrl,
|
|
1228
|
+
mcpToken: taskMcpToken,
|
|
1229
|
+
scriptContent: resolved.type === "script" ? resolved.script : "",
|
|
1230
|
+
pipe: req.pipe,
|
|
1231
|
+
});
|
|
1232
|
+
// Create lifecycle stream for the task session
|
|
1233
|
+
const taskLifecycleStream = streamRegistry.createStream(`lifecycle:${sessionId}`);
|
|
1234
|
+
const taskSpawnerId = req.parentSessionId || "__server__";
|
|
1235
|
+
streamRegistry.subscribe(taskLifecycleStream.id, taskSpawnerId, "rw", "detach", true);
|
|
1236
|
+
streamRegistry.subscribe(taskLifecycleStream.id, sessionId, "rw", "detach", false);
|
|
1237
|
+
// Set up IPC pipe stream (optional)
|
|
1238
|
+
let taskPipeFd = 0;
|
|
1239
|
+
if (taskPipeMode && taskPipeMode !== "detach" && req.parentSessionId) {
|
|
1240
|
+
const ipcStream = streamRegistry.createStream(`pipe:${sessionId}`);
|
|
1241
|
+
const parentSub = streamRegistry.subscribe(ipcStream.id, req.parentSessionId, "rw", taskPipeMode === "sync" ? "sync" : "async", true);
|
|
1242
|
+
streamRegistry.subscribe(ipcStream.id, sessionId, "rw", "async", false);
|
|
1243
|
+
taskPipeFd = parentSub.fd;
|
|
1244
|
+
if (taskPipeMode === "async") {
|
|
1245
|
+
ensureAsyncDeliveryListener(req.parentSessionId); // parent receives child messages
|
|
1246
|
+
ensureAsyncDeliveryListener(sessionId); // child receives parent messages
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
processEventStream(conn.client.spawn(powerlineReq), {
|
|
1250
|
+
sessionId,
|
|
1251
|
+
logPath,
|
|
1252
|
+
workspaceId: task.workspaceId ?? undefined,
|
|
1253
|
+
taskId: task.id,
|
|
1254
|
+
systemContext,
|
|
1255
|
+
prompt: taskPrompt,
|
|
1256
|
+
});
|
|
1257
|
+
const row = sessionStore.getSession(sessionId);
|
|
1258
|
+
const taskProto = sessionRowToProto(row);
|
|
1259
|
+
taskProto.pipeFd = taskPipeFd;
|
|
1260
|
+
return taskProto;
|
|
1261
|
+
},
|
|
1262
|
+
async completeTask(req) {
|
|
1263
|
+
if (req.id === ROOT_TASK_ID) {
|
|
1264
|
+
throw new ConnectError("Cannot complete the system task", Code.PermissionDenied);
|
|
1265
|
+
}
|
|
1266
|
+
const task = taskStore.getTask(req.id);
|
|
1267
|
+
if (!task)
|
|
1268
|
+
throw new ConnectError(`Task not found: ${req.id}`, Code.NotFound);
|
|
1269
|
+
taskStore.markTaskComplete(task.id, TASK_STATUS.COMPLETE);
|
|
1270
|
+
// Close lifecycle FDs for any active sessions — cascades to STOPPED via orphan callback
|
|
1271
|
+
const activeSessions = sessionStore.getActiveSessionsForTask(req.id);
|
|
1272
|
+
for (const activeSession of activeSessions) {
|
|
1273
|
+
cleanupLifecycleStream(activeSession.id);
|
|
1274
|
+
const subs = streamRegistry.getSubscriptionsForSession(activeSession.id);
|
|
1275
|
+
for (const sub of subs) {
|
|
1276
|
+
streamRegistry.unsubscribe(sub.id);
|
|
1277
|
+
}
|
|
1278
|
+
}
|
|
1279
|
+
// Check for newly unblocked tasks
|
|
1280
|
+
if (task.workspaceId) {
|
|
1281
|
+
const unblocked = taskStore.checkAndUnblock(task.workspaceId);
|
|
1282
|
+
for (const t of unblocked) {
|
|
1283
|
+
streamHub.publish(create(grackle.SessionEventSchema, {
|
|
1284
|
+
sessionId: "",
|
|
1285
|
+
type: grackle.EventType.SYSTEM,
|
|
1286
|
+
timestamp: new Date().toISOString(),
|
|
1287
|
+
content: JSON.stringify({
|
|
1288
|
+
type: "task_unblocked",
|
|
1289
|
+
taskId: t.id,
|
|
1290
|
+
title: t.title,
|
|
1291
|
+
}),
|
|
1292
|
+
raw: "",
|
|
1293
|
+
}));
|
|
1294
|
+
}
|
|
1295
|
+
}
|
|
1296
|
+
emit("task.completed", { taskId: task.id, workspaceId: task.workspaceId || "" });
|
|
1297
|
+
const row = taskStore.getTask(task.id);
|
|
1298
|
+
const taskSessions = sessionStore.listSessionsForTask(task.id);
|
|
1299
|
+
const { status, latestSessionId } = computeTaskStatus(row.status, taskSessions);
|
|
1300
|
+
return taskRowToProto(row, undefined, status, latestSessionId);
|
|
1301
|
+
},
|
|
1302
|
+
async resumeTask(req) {
|
|
1303
|
+
const task = taskStore.getTask(req.id);
|
|
1304
|
+
if (!task)
|
|
1305
|
+
throw new ConnectError(`Task not found: ${req.id}`, Code.NotFound);
|
|
1306
|
+
const latestSession = sessionStore.getLatestSessionForTask(req.id);
|
|
1307
|
+
if (!latestSession) {
|
|
1308
|
+
throw new ConnectError(`Task ${req.id} has no sessions to resume`, Code.FailedPrecondition);
|
|
1309
|
+
}
|
|
1310
|
+
if (![SESSION_STATUS.STOPPED, SESSION_STATUS.SUSPENDED].includes(latestSession.status)) {
|
|
1311
|
+
throw new ConnectError(`Latest session ${latestSession.id} is not resumable (status: ${latestSession.status})`, Code.FailedPrecondition);
|
|
1312
|
+
}
|
|
1313
|
+
if (!latestSession.runtimeSessionId) {
|
|
1314
|
+
throw new ConnectError(`Latest session ${latestSession.id} has no runtime session ID — cannot resume`, Code.FailedPrecondition);
|
|
1315
|
+
}
|
|
1316
|
+
const conn = adapterManager.getConnection(latestSession.environmentId);
|
|
1317
|
+
if (!conn) {
|
|
1318
|
+
throw new ConnectError(`Environment ${latestSession.environmentId} not connected`, Code.FailedPrecondition);
|
|
1319
|
+
}
|
|
1320
|
+
const powerlineReq = create(powerline.ResumeRequestSchema, {
|
|
1321
|
+
sessionId: latestSession.id,
|
|
1322
|
+
runtimeSessionId: latestSession.runtimeSessionId,
|
|
1323
|
+
runtime: latestSession.runtime,
|
|
1324
|
+
});
|
|
1325
|
+
const logPath = latestSession.logPath || join(grackleHome, LOGS_DIR, latestSession.id);
|
|
1326
|
+
processEventStream(conn.client.resume(powerlineReq), {
|
|
1327
|
+
sessionId: latestSession.id,
|
|
1328
|
+
logPath,
|
|
1329
|
+
workspaceId: task.workspaceId ?? undefined,
|
|
1330
|
+
taskId: task.id,
|
|
1331
|
+
});
|
|
1332
|
+
emit("task.started", { taskId: task.id, sessionId: latestSession.id, workspaceId: task.workspaceId || "" });
|
|
1333
|
+
const row = sessionStore.getSession(latestSession.id);
|
|
1334
|
+
return sessionRowToProto(row);
|
|
1335
|
+
},
|
|
1336
|
+
async stopTask(req) {
|
|
1337
|
+
const task = taskStore.getTask(req.id);
|
|
1338
|
+
if (!task) {
|
|
1339
|
+
throw new ConnectError(`Task not found: ${req.id}`, Code.NotFound);
|
|
1340
|
+
}
|
|
1341
|
+
// Terminate all active sessions for this task using the fd-closure pattern
|
|
1342
|
+
const activeSessions = sessionStore.getActiveSessionsForTask(req.id);
|
|
1343
|
+
for (const activeSession of activeSessions) {
|
|
1344
|
+
cleanupLifecycleStream(activeSession.id);
|
|
1345
|
+
const subs = streamRegistry.getSubscriptionsForSession(activeSession.id);
|
|
1346
|
+
for (const sub of subs) {
|
|
1347
|
+
streamRegistry.unsubscribe(sub.id);
|
|
1348
|
+
}
|
|
1349
|
+
const current = sessionStore.getSession(activeSession.id);
|
|
1350
|
+
if (current && !TERMINAL_SESSION_STATUSES.has(current.status)) {
|
|
1351
|
+
sessionStore.updateSession(activeSession.id, SESSION_STATUS.STOPPED, undefined, undefined, END_REASON.INTERRUPTED);
|
|
1352
|
+
streamHub.publish(create(grackle.SessionEventSchema, {
|
|
1353
|
+
sessionId: activeSession.id,
|
|
1354
|
+
type: grackle.EventType.STATUS,
|
|
1355
|
+
timestamp: new Date().toISOString(),
|
|
1356
|
+
content: END_REASON.INTERRUPTED,
|
|
1357
|
+
raw: "",
|
|
1358
|
+
}));
|
|
1359
|
+
}
|
|
1360
|
+
}
|
|
1361
|
+
// Mark task complete
|
|
1362
|
+
taskStore.markTaskComplete(req.id, TASK_STATUS.COMPLETE);
|
|
1363
|
+
// Check for newly unblocked tasks
|
|
1364
|
+
if (task.workspaceId) {
|
|
1365
|
+
taskStore.checkAndUnblock(task.workspaceId);
|
|
1366
|
+
}
|
|
1367
|
+
emit("task.completed", { taskId: task.id, workspaceId: task.workspaceId || "" });
|
|
1368
|
+
const updated = taskStore.getTask(req.id);
|
|
1369
|
+
const taskSessions = sessionStore.listSessionsForTask(req.id);
|
|
1370
|
+
const { status, latestSessionId } = computeTaskStatus(updated.status, taskSessions);
|
|
1371
|
+
return taskRowToProto(updated, undefined, status, latestSessionId);
|
|
1372
|
+
},
|
|
1373
|
+
async deleteTask(req) {
|
|
1374
|
+
if (req.id === ROOT_TASK_ID) {
|
|
1375
|
+
throw new ConnectError("Cannot delete the system task", Code.PermissionDenied);
|
|
1376
|
+
}
|
|
1377
|
+
const task = taskStore.getTask(req.id);
|
|
1378
|
+
if (!task) {
|
|
1379
|
+
throw new ConnectError(`Task not found: ${req.id}`, Code.NotFound);
|
|
1380
|
+
}
|
|
1381
|
+
const children = taskStore.getChildren(req.id);
|
|
1382
|
+
if (children.length > 0) {
|
|
1383
|
+
throw new ConnectError("Cannot delete task with children. Delete children first.", Code.FailedPrecondition);
|
|
1384
|
+
}
|
|
1385
|
+
// Terminate all active sessions via lifecycle cleanup before deleting the task
|
|
1386
|
+
const activeSessions = sessionStore.getActiveSessionsForTask(req.id);
|
|
1387
|
+
for (const activeSession of activeSessions) {
|
|
1388
|
+
cleanupLifecycleStream(activeSession.id);
|
|
1389
|
+
const subs = streamRegistry.getSubscriptionsForSession(activeSession.id);
|
|
1390
|
+
for (const sub of subs) {
|
|
1391
|
+
streamRegistry.unsubscribe(sub.id);
|
|
1392
|
+
}
|
|
1393
|
+
}
|
|
1394
|
+
const changes = taskStore.deleteTask(req.id);
|
|
1395
|
+
if (changes === 0) {
|
|
1396
|
+
logger.error({ taskId: req.id }, "deleteTask returned 0 changes despite task existing");
|
|
1397
|
+
throw new ConnectError(`Failed to delete task ${req.id}: no rows affected`, Code.Internal);
|
|
1398
|
+
}
|
|
1399
|
+
emit("task.deleted", { taskId: req.id, workspaceId: task.workspaceId || "" });
|
|
1400
|
+
return create(grackle.EmptySchema, {});
|
|
1401
|
+
},
|
|
1402
|
+
// ─── Personas ───────────────────────────────────────────────
|
|
1403
|
+
async listPersonas() {
|
|
1404
|
+
const rows = personaStore.listPersonas();
|
|
1405
|
+
return create(grackle.PersonaListSchema, {
|
|
1406
|
+
personas: rows.map(personaRowToProto),
|
|
1407
|
+
});
|
|
1408
|
+
},
|
|
1409
|
+
async createPersona(req) {
|
|
1410
|
+
if (!req.name)
|
|
1411
|
+
throw new ConnectError("Persona name is required", Code.InvalidArgument);
|
|
1412
|
+
const personaType = req.type || "agent";
|
|
1413
|
+
if (personaType !== "agent" && personaType !== "script") {
|
|
1414
|
+
throw new ConnectError(`Invalid persona type: "${personaType}". Must be "agent" or "script".`, Code.InvalidArgument);
|
|
1415
|
+
}
|
|
1416
|
+
if (personaType === "script") {
|
|
1417
|
+
if (!req.script) {
|
|
1418
|
+
throw new ConnectError("Script content is required for script personas", Code.InvalidArgument);
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
else {
|
|
1422
|
+
if (!req.systemPrompt) {
|
|
1423
|
+
throw new ConnectError("Persona system_prompt is required", Code.InvalidArgument);
|
|
1424
|
+
}
|
|
1425
|
+
}
|
|
1426
|
+
// Enforce unique ID and unique name
|
|
1427
|
+
let id = slugify(req.name) || uuid().slice(0, 8);
|
|
1428
|
+
if (personaStore.getPersona(id)) {
|
|
1429
|
+
id = `${id}-${uuid().slice(0, 4)}`;
|
|
1430
|
+
}
|
|
1431
|
+
if (personaStore.getPersonaByName(req.name)) {
|
|
1432
|
+
throw new ConnectError(`Persona with name "${req.name}" already exists`, Code.AlreadyExists);
|
|
1433
|
+
}
|
|
1434
|
+
const toolConfigJson = JSON.stringify({
|
|
1435
|
+
allowedTools: [...(req.toolConfig?.allowedTools || [])],
|
|
1436
|
+
disallowedTools: [...(req.toolConfig?.disallowedTools || [])],
|
|
1437
|
+
});
|
|
1438
|
+
const mcpServersJson = JSON.stringify(req.mcpServers.map((s) => ({
|
|
1439
|
+
name: s.name,
|
|
1440
|
+
command: s.command,
|
|
1441
|
+
args: [...s.args],
|
|
1442
|
+
tools: [...s.tools],
|
|
1443
|
+
})));
|
|
1444
|
+
personaStore.createPersona(id, req.name, req.description, req.systemPrompt, toolConfigJson, req.runtime, req.model, req.maxTurns, mcpServersJson, personaType, req.script);
|
|
1445
|
+
emit("persona.created", { personaId: id });
|
|
1446
|
+
const row = personaStore.getPersona(id);
|
|
1447
|
+
return personaRowToProto(row);
|
|
1448
|
+
},
|
|
1449
|
+
async getPersona(req) {
|
|
1450
|
+
const row = personaStore.getPersona(req.id);
|
|
1451
|
+
if (!row)
|
|
1452
|
+
throw new ConnectError(`Persona not found: ${req.id}`, Code.NotFound);
|
|
1453
|
+
return personaRowToProto(row);
|
|
1454
|
+
},
|
|
1455
|
+
async updatePersona(req) {
|
|
1456
|
+
const existing = personaStore.getPersona(req.id);
|
|
1457
|
+
if (!existing)
|
|
1458
|
+
throw new ConnectError(`Persona not found: ${req.id}`, Code.NotFound);
|
|
1459
|
+
// Only update toolConfig/mcpServers if the request provides non-empty values;
|
|
1460
|
+
// otherwise keep the existing stored value.
|
|
1461
|
+
const hasNewToolConfig = !!req.toolConfig &&
|
|
1462
|
+
(req.toolConfig.allowedTools.length > 0 ||
|
|
1463
|
+
req.toolConfig.disallowedTools.length > 0);
|
|
1464
|
+
const toolConfigJson = hasNewToolConfig
|
|
1465
|
+
? JSON.stringify({
|
|
1466
|
+
allowedTools: [...(req.toolConfig?.allowedTools || [])],
|
|
1467
|
+
disallowedTools: [...(req.toolConfig?.disallowedTools || [])],
|
|
1468
|
+
})
|
|
1469
|
+
: existing.toolConfig;
|
|
1470
|
+
const hasNewMcpServers = Array.isArray(req.mcpServers) && req.mcpServers.length > 0;
|
|
1471
|
+
const mcpServersJson = hasNewMcpServers
|
|
1472
|
+
? JSON.stringify(req.mcpServers.map((s) => ({
|
|
1473
|
+
name: s.name,
|
|
1474
|
+
command: s.command,
|
|
1475
|
+
args: [...s.args],
|
|
1476
|
+
tools: [...s.tools],
|
|
1477
|
+
})))
|
|
1478
|
+
: existing.mcpServers;
|
|
1479
|
+
// Treat empty string / 0 as "not set" and keep existing value
|
|
1480
|
+
const name = req.name || existing.name;
|
|
1481
|
+
if (name !== existing.name && personaStore.getPersonaByName(name)) {
|
|
1482
|
+
throw new ConnectError(`Persona with name "${name}" already exists`, Code.AlreadyExists);
|
|
1483
|
+
}
|
|
1484
|
+
const description = req.description || existing.description;
|
|
1485
|
+
const systemPrompt = req.systemPrompt || existing.systemPrompt;
|
|
1486
|
+
const runtime = req.runtime || existing.runtime;
|
|
1487
|
+
const model = req.model || existing.model;
|
|
1488
|
+
const maxTurns = req.maxTurns === 0 ? existing.maxTurns : req.maxTurns;
|
|
1489
|
+
// Empty string means "keep existing", non-empty means "set to this value"
|
|
1490
|
+
const updatedType = req.type || existing.type;
|
|
1491
|
+
const updatedScript = req.script || existing.script;
|
|
1492
|
+
personaStore.updatePersona(req.id, name, description, systemPrompt, toolConfigJson, runtime, model, maxTurns, mcpServersJson, updatedType, updatedScript);
|
|
1493
|
+
emit("persona.updated", { personaId: req.id });
|
|
1494
|
+
const row = personaStore.getPersona(req.id);
|
|
1495
|
+
return personaRowToProto(row);
|
|
1496
|
+
},
|
|
1497
|
+
async deletePersona(req) {
|
|
1498
|
+
personaStore.deletePersona(req.id);
|
|
1499
|
+
emit("persona.deleted", { personaId: req.id });
|
|
1500
|
+
return create(grackle.EmptySchema, {});
|
|
1501
|
+
},
|
|
1502
|
+
// ─── Settings ─────────────────────────────────────────────
|
|
1503
|
+
async getSetting(req) {
|
|
1504
|
+
if (!isAllowedSettingKey(req.key)) {
|
|
1505
|
+
throw new ConnectError(`Setting key not allowed: ${req.key}`, Code.InvalidArgument);
|
|
1506
|
+
}
|
|
1507
|
+
const value = settingsStore.getSetting(req.key);
|
|
1508
|
+
return create(grackle.SettingResponseSchema, {
|
|
1509
|
+
key: req.key,
|
|
1510
|
+
value: value ?? "",
|
|
1511
|
+
});
|
|
1512
|
+
},
|
|
1513
|
+
async setSetting(req) {
|
|
1514
|
+
if (!isAllowedSettingKey(req.key)) {
|
|
1515
|
+
throw new ConnectError(`Setting key not allowed: ${req.key}`, Code.InvalidArgument);
|
|
1516
|
+
}
|
|
1517
|
+
// Validate persona exists and has required fields when setting default_persona_id
|
|
1518
|
+
if (req.key === "default_persona_id" && req.value) {
|
|
1519
|
+
const persona = personaStore.getPersona(req.value);
|
|
1520
|
+
if (!persona) {
|
|
1521
|
+
throw new ConnectError(`Persona not found: ${req.value}`, Code.NotFound);
|
|
1522
|
+
}
|
|
1523
|
+
if (!persona.runtime || !persona.model) {
|
|
1524
|
+
throw new ConnectError(`Persona "${persona.name}" must have runtime and model configured`, Code.FailedPrecondition);
|
|
1525
|
+
}
|
|
1526
|
+
}
|
|
1527
|
+
settingsStore.setSetting(req.key, req.value);
|
|
1528
|
+
emit("setting.changed", { key: req.key, value: req.value });
|
|
1529
|
+
return create(grackle.SettingResponseSchema, {
|
|
1530
|
+
key: req.key,
|
|
1531
|
+
value: req.value,
|
|
1532
|
+
});
|
|
1533
|
+
},
|
|
1534
|
+
// ─── Findings ────────────────────────────────────────────
|
|
1535
|
+
async postFinding(req) {
|
|
1536
|
+
if (!req.title) {
|
|
1537
|
+
throw new ConnectError("title is required", Code.InvalidArgument);
|
|
1538
|
+
}
|
|
1539
|
+
const id = uuid().slice(0, 8);
|
|
1540
|
+
findingStore.postFinding(id, req.workspaceId, req.taskId, req.sessionId, req.category, req.title, req.content, [...req.tags]);
|
|
1541
|
+
emit("finding.posted", { workspaceId: req.workspaceId, findingId: id });
|
|
1542
|
+
const rows = findingStore.queryFindings(req.workspaceId);
|
|
1543
|
+
const row = rows.find((r) => r.id === id);
|
|
1544
|
+
return findingRowToProto(row);
|
|
1545
|
+
},
|
|
1546
|
+
async queryFindings(req) {
|
|
1547
|
+
const rows = findingStore.queryFindings(req.workspaceId, req.categories.length > 0 ? [...req.categories] : undefined, req.tags.length > 0 ? [...req.tags] : undefined, req.limit || undefined);
|
|
1548
|
+
return create(grackle.FindingListSchema, {
|
|
1549
|
+
findings: rows.map(findingRowToProto),
|
|
1550
|
+
});
|
|
1551
|
+
},
|
|
1552
|
+
// ─── Codespaces ────────────────────────────────────────────
|
|
1553
|
+
async listCodespaces() {
|
|
1554
|
+
try {
|
|
1555
|
+
const result = await exec("gh", [
|
|
1556
|
+
"codespace",
|
|
1557
|
+
"list",
|
|
1558
|
+
"--json",
|
|
1559
|
+
"name,repository,state,gitStatus",
|
|
1560
|
+
"--limit",
|
|
1561
|
+
String(GH_CODESPACE_LIST_LIMIT),
|
|
1562
|
+
], { timeout: GH_CODESPACE_LIST_TIMEOUT_MS });
|
|
1563
|
+
const entries = JSON.parse(result.stdout || "[]");
|
|
1564
|
+
return create(grackle.CodespaceListSchema, {
|
|
1565
|
+
codespaces: entries.map((e) => create(grackle.CodespaceInfoSchema, {
|
|
1566
|
+
name: String(e.name ?? ""),
|
|
1567
|
+
repository: String(e.repository ?? ""),
|
|
1568
|
+
state: String(e.state ?? ""),
|
|
1569
|
+
gitStatus: String(e.gitStatus ?? ""),
|
|
1570
|
+
})),
|
|
1571
|
+
});
|
|
1572
|
+
}
|
|
1573
|
+
catch (err) {
|
|
1574
|
+
logger.warn({ err }, "Failed to list codespaces");
|
|
1575
|
+
return create(grackle.CodespaceListSchema, {
|
|
1576
|
+
codespaces: [],
|
|
1577
|
+
error: formatGhError(err, "list codespaces"),
|
|
1578
|
+
});
|
|
1579
|
+
}
|
|
1580
|
+
},
|
|
1581
|
+
async createCodespace(req) {
|
|
1582
|
+
if (!req.repo.trim()) {
|
|
1583
|
+
throw new ConnectError("repo is required", Code.InvalidArgument);
|
|
1584
|
+
}
|
|
1585
|
+
const trimmedRepo = req.repo.trim();
|
|
1586
|
+
const createArgs = ["codespace", "create", "--repo", trimmedRepo];
|
|
1587
|
+
if (req.machine.trim()) {
|
|
1588
|
+
createArgs.push("--machine", req.machine.trim());
|
|
1589
|
+
}
|
|
1590
|
+
try {
|
|
1591
|
+
const result = await exec("gh", createArgs, {
|
|
1592
|
+
timeout: GH_CODESPACE_CREATE_TIMEOUT_MS,
|
|
1593
|
+
});
|
|
1594
|
+
return create(grackle.CreateCodespaceResponseSchema, {
|
|
1595
|
+
name: result.stdout.trim(),
|
|
1596
|
+
repository: trimmedRepo,
|
|
1597
|
+
});
|
|
1598
|
+
}
|
|
1599
|
+
catch (err) {
|
|
1600
|
+
logger.error({ err, repo: trimmedRepo }, "Failed to create codespace");
|
|
1601
|
+
throw new ConnectError(formatGhError(err, "create codespace"), Code.Internal);
|
|
1602
|
+
}
|
|
1603
|
+
},
|
|
1604
|
+
async generatePairingCode() {
|
|
1605
|
+
const code = generatePairingCode();
|
|
1606
|
+
if (!code) {
|
|
1607
|
+
throw new ConnectError("Maximum active pairing codes reached. Wait for existing codes to expire.", Code.ResourceExhausted);
|
|
1608
|
+
}
|
|
1609
|
+
const webPort = parseInt(process.env.GRACKLE_WEB_PORT || String(DEFAULT_WEB_PORT), 10);
|
|
1610
|
+
const bindHost = process.env.GRACKLE_HOST || "127.0.0.1";
|
|
1611
|
+
const WILDCARD_ADDRESSES = new Set(["0.0.0.0", "::", "0:0:0:0:0:0:0:0"]);
|
|
1612
|
+
const pairingHost = WILDCARD_ADDRESSES.has(bindHost)
|
|
1613
|
+
? (detectLanIp() || "localhost")
|
|
1614
|
+
: (bindHost === "127.0.0.1" || bindHost === "::1" ? "localhost" : bindHost);
|
|
1615
|
+
const url = `http://${pairingHost}:${webPort}/pair?code=${code}`;
|
|
1616
|
+
return create(grackle.PairingCodeResponseSchema, { code, url });
|
|
1617
|
+
},
|
|
1618
|
+
// ── Knowledge Graph ────────────────────────────────────────
|
|
1619
|
+
async searchKnowledge(req) {
|
|
1620
|
+
const embedder = getKnowledgeEmbedder();
|
|
1621
|
+
if (!embedder) {
|
|
1622
|
+
throw new ConnectError("Knowledge graph not available", Code.Unavailable);
|
|
1623
|
+
}
|
|
1624
|
+
const results = await knowledgeSearch(req.query, embedder, {
|
|
1625
|
+
limit: req.limit || 10,
|
|
1626
|
+
workspaceId: req.workspaceId || undefined,
|
|
1627
|
+
});
|
|
1628
|
+
return create(grackle.SearchKnowledgeResponseSchema, {
|
|
1629
|
+
results: results.map((r) => create(grackle.SearchKnowledgeResultSchema, {
|
|
1630
|
+
score: r.score,
|
|
1631
|
+
node: knowledgeNodeToProto(r.node),
|
|
1632
|
+
edges: r.edges.map(knowledgeEdgeToProto),
|
|
1633
|
+
})),
|
|
1634
|
+
});
|
|
1635
|
+
},
|
|
1636
|
+
async getKnowledgeNode(req) {
|
|
1637
|
+
if (!isKnowledgeEnabled()) {
|
|
1638
|
+
throw new ConnectError("Knowledge graph not available", Code.Unavailable);
|
|
1639
|
+
}
|
|
1640
|
+
const result = await getKnowledgeNodeById(req.id);
|
|
1641
|
+
if (!result) {
|
|
1642
|
+
throw new ConnectError(`Knowledge node not found: ${req.id}`, Code.NotFound);
|
|
1643
|
+
}
|
|
1644
|
+
return create(grackle.GetKnowledgeNodeResponseSchema, {
|
|
1645
|
+
node: knowledgeNodeToProto(result.node),
|
|
1646
|
+
edges: result.edges.map(knowledgeEdgeToProto),
|
|
1647
|
+
});
|
|
1648
|
+
},
|
|
1649
|
+
async expandKnowledgeNode(req) {
|
|
1650
|
+
if (!isKnowledgeEnabled()) {
|
|
1651
|
+
throw new ConnectError("Knowledge graph not available", Code.Unavailable);
|
|
1652
|
+
}
|
|
1653
|
+
const result = await expandNode(req.id, {
|
|
1654
|
+
depth: req.depth || 1,
|
|
1655
|
+
edgeTypes: req.edgeTypes.length > 0 ? req.edgeTypes : undefined,
|
|
1656
|
+
});
|
|
1657
|
+
return create(grackle.ExpandKnowledgeNodeResponseSchema, {
|
|
1658
|
+
nodes: result.nodes.map(knowledgeNodeToProto),
|
|
1659
|
+
edges: result.edges.map(knowledgeEdgeToProto),
|
|
1660
|
+
});
|
|
1661
|
+
},
|
|
1662
|
+
async listRecentKnowledgeNodes(req) {
|
|
1663
|
+
if (!isKnowledgeEnabled()) {
|
|
1664
|
+
throw new ConnectError("Knowledge graph not available", Code.Unavailable);
|
|
1665
|
+
}
|
|
1666
|
+
const result = await listRecentNodes(req.limit || 20, req.workspaceId || undefined);
|
|
1667
|
+
return create(grackle.ListRecentKnowledgeNodesResponseSchema, {
|
|
1668
|
+
nodes: result.nodes.map(knowledgeNodeToProto),
|
|
1669
|
+
edges: result.edges.map(knowledgeEdgeToProto),
|
|
1670
|
+
});
|
|
1671
|
+
},
|
|
1672
|
+
async createKnowledgeNode(req) {
|
|
1673
|
+
const embedder = getKnowledgeEmbedder();
|
|
1674
|
+
if (!embedder) {
|
|
1675
|
+
throw new ConnectError("Knowledge graph not available", Code.Unavailable);
|
|
1676
|
+
}
|
|
1677
|
+
const chunker = createPassThroughChunker();
|
|
1678
|
+
const embedded = await ingest(req.content, chunker, embedder);
|
|
1679
|
+
if (embedded.length === 0) {
|
|
1680
|
+
throw new ConnectError("Content produced no embeddings", Code.InvalidArgument);
|
|
1681
|
+
}
|
|
1682
|
+
const id = await createNativeNode({
|
|
1683
|
+
category: (req.category || "insight"),
|
|
1684
|
+
title: req.title,
|
|
1685
|
+
content: req.content,
|
|
1686
|
+
tags: [...req.tags],
|
|
1687
|
+
embedding: embedded[0].vector,
|
|
1688
|
+
workspaceId: req.workspaceId || "",
|
|
1689
|
+
});
|
|
1690
|
+
return create(grackle.CreateKnowledgeNodeResponseSchema, { id });
|
|
1691
|
+
},
|
|
1692
|
+
});
|
|
1693
|
+
}
|
|
1694
|
+
// ---------------------------------------------------------------------------
|
|
1695
|
+
// Knowledge graph proto converters
|
|
1696
|
+
// ---------------------------------------------------------------------------
|
|
1697
|
+
/** Convert a KnowledgeNode to its proto representation. */
|
|
1698
|
+
function knowledgeNodeToProto(node) {
|
|
1699
|
+
return create(grackle.KnowledgeNodeProtoSchema, {
|
|
1700
|
+
id: node.id,
|
|
1701
|
+
kind: node.kind,
|
|
1702
|
+
workspaceId: node.workspaceId,
|
|
1703
|
+
createdAt: node.createdAt,
|
|
1704
|
+
updatedAt: node.updatedAt,
|
|
1705
|
+
sourceType: node.kind === "reference" ? node.sourceType : "",
|
|
1706
|
+
sourceId: node.kind === "reference" ? node.sourceId : "",
|
|
1707
|
+
label: node.kind === "reference" ? node.label : "",
|
|
1708
|
+
category: node.kind === "native" ? node.category : "",
|
|
1709
|
+
title: node.kind === "native" ? node.title : "",
|
|
1710
|
+
content: node.kind === "native" ? node.content : "",
|
|
1711
|
+
tags: node.kind === "native" ? node.tags : [],
|
|
1712
|
+
});
|
|
1713
|
+
}
|
|
1714
|
+
/** Convert a KnowledgeEdge to its proto representation. */
|
|
1715
|
+
function knowledgeEdgeToProto(edge) {
|
|
1716
|
+
return create(grackle.KnowledgeEdgeProtoSchema, {
|
|
1717
|
+
fromId: edge.fromId,
|
|
1718
|
+
toId: edge.toId,
|
|
1719
|
+
type: edge.type,
|
|
1720
|
+
metadataJson: edge.metadata ? JSON.stringify(edge.metadata) : "",
|
|
1721
|
+
createdAt: edge.createdAt,
|
|
1722
|
+
});
|
|
1723
|
+
}
|
|
1724
|
+
//# sourceMappingURL=grpc-service.js.map
|