@linkedclaw/cli 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +172 -10
- package/dist/bin.js +1921 -163
- package/dist/bin.js.map +1 -1
- package/package.json +3 -3
- package/src/arena/api.ts +154 -0
- package/src/arena/hash.ts +15 -0
- package/src/arena/types.ts +106 -0
- package/src/bin.ts +12 -2
- package/src/commands/agent.ts +264 -0
- package/src/commands/arena.ts +393 -0
- package/src/commands/converge.ts +969 -0
- package/src/commands/provider.ts +8 -8
- package/src/commands/requester.ts +64 -21
- package/src/config.ts +11 -2
- package/src/converge/api.ts +213 -0
- package/src/converge/hash.ts +35 -0
- package/src/converge/lock.ts +30 -0
- package/src/converge/staging.ts +83 -0
- package/src/converge/types.ts +91 -0
- package/src/converge/workspace.ts +92 -0
- package/src/handlers/subprocess.ts +8 -8
- package/src/types.ts +5 -5
- package/test/agent-help.test.ts +207 -0
- package/test/arena-api.test.ts +211 -0
- package/test/arena-commands.test.ts +559 -0
- package/test/arena-hash.test.ts +33 -0
- package/test/cli-help.test.ts +23 -3
- package/test/converge-accept.test.ts +206 -0
- package/test/converge-decision.test.ts +274 -0
- package/test/converge-hash.test.ts +58 -0
- package/test/converge-help.test.ts +58 -0
- package/test/converge-lock.test.ts +48 -0
- package/test/converge-review.test.ts +135 -0
- package/test/converge-run.test.ts +286 -0
- package/test/converge-staging.test.ts +161 -0
- package/test/converge-status.test.ts +141 -0
- package/test/converge-workspace.test.ts +92 -0
package/src/commands/provider.ts
CHANGED
|
@@ -6,7 +6,7 @@ import {
|
|
|
6
6
|
RelayClient,
|
|
7
7
|
type ProviderHandler,
|
|
8
8
|
} from "@linkedclaw/provider-runtime";
|
|
9
|
-
import type { CreateAgentRequest, UpdateAgentRequest,
|
|
9
|
+
import type { CreateAgentRequest, UpdateAgentRequest, GigTaskSubmitRequest } from "../types.js";
|
|
10
10
|
import type { ProviderConfig } from "../config.js";
|
|
11
11
|
import { buildContext } from "../context.js";
|
|
12
12
|
import { SubprocessHandler } from "../handlers/subprocess.js";
|
|
@@ -101,7 +101,7 @@ export function registerProviderCommands(program: Command): void {
|
|
|
101
101
|
|
|
102
102
|
const runtime = new ProviderRuntime({
|
|
103
103
|
cloud: {
|
|
104
|
-
|
|
104
|
+
gigTasks: {
|
|
105
105
|
accept: (taskId, body) => providerClient.acceptGigTask(taskId, body as unknown as Record<string, unknown>),
|
|
106
106
|
submit: (taskId, body) => providerClient.submitGigTask(taskId, body as unknown as Record<string, unknown>),
|
|
107
107
|
},
|
|
@@ -144,9 +144,9 @@ export function registerProviderCommands(program: Command): void {
|
|
|
144
144
|
|
|
145
145
|
provider
|
|
146
146
|
.command("pick <bct_id>")
|
|
147
|
-
.description("Manually accept a
|
|
147
|
+
.description("Manually accept a gig task (provider side)")
|
|
148
148
|
.requiredOption("--agent-id <agt_id>", "Which of your agents is accepting")
|
|
149
|
-
.option("--slot-key <key>", "Slot key for sliced
|
|
149
|
+
.option("--slot-key <key>", "Slot key for sliced gig tasks")
|
|
150
150
|
.option("--human", "Human-readable output")
|
|
151
151
|
.action(async (taskId: string, opts) => {
|
|
152
152
|
await runCommand(async () => {
|
|
@@ -159,12 +159,12 @@ export function registerProviderCommands(program: Command): void {
|
|
|
159
159
|
|
|
160
160
|
provider
|
|
161
161
|
.command("submit <bct_id> <result_file>")
|
|
162
|
-
.description('Submit a
|
|
162
|
+
.description('Submit a gig task result. result_file = JSON path or "-" for stdin.')
|
|
163
163
|
.option("--human", "Human-readable output")
|
|
164
164
|
.action(async (taskId: string, resultFile: string, opts) => {
|
|
165
165
|
await runCommand(async () => {
|
|
166
166
|
const raw = resultFile === "-" ? await readStdin() : readFileSync(resultFile, "utf8");
|
|
167
|
-
const body = JSON.parse(raw) as
|
|
167
|
+
const body = JSON.parse(raw) as GigTaskSubmitRequest;
|
|
168
168
|
const { providerClient } = buildContext();
|
|
169
169
|
return providerClient.submitGigTask(taskId, body as unknown as Record<string, unknown>);
|
|
170
170
|
}, { human: opts.human });
|
|
@@ -225,10 +225,10 @@ function makeHttpHandler(url: string): ProviderHandler {
|
|
|
225
225
|
error?: { code: string; message: string };
|
|
226
226
|
};
|
|
227
227
|
},
|
|
228
|
-
async
|
|
228
|
+
async onGigTaskOffer(evt) {
|
|
229
229
|
return (await postEvent(url, evt)) as { accept: boolean; slot_key?: string };
|
|
230
230
|
},
|
|
231
|
-
async
|
|
231
|
+
async onGigTaskExecute(evt) {
|
|
232
232
|
return (await postEvent(url, evt)) as { result_payload?: Record<string, unknown> };
|
|
233
233
|
},
|
|
234
234
|
};
|
|
@@ -2,10 +2,11 @@ import { Command } from "commander";
|
|
|
2
2
|
import { readFileSync } from "node:fs";
|
|
3
3
|
import { load as yamlLoad } from "js-yaml";
|
|
4
4
|
import type { ConsumerClient } from "@linkedclaw/consumer";
|
|
5
|
+
import { fetchCapabilitySchema, CapabilitySchemaError } from "@linkedclaw/consumer";
|
|
5
6
|
import type {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
AcceptGigTaskRequest,
|
|
8
|
+
CreateGigTaskRequest,
|
|
9
|
+
GigTaskSubmitRequest,
|
|
9
10
|
EndSessionRequest,
|
|
10
11
|
InvokeRequest,
|
|
11
12
|
TaskManifest,
|
|
@@ -19,7 +20,7 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
19
20
|
.description("Search public agent listings by capability")
|
|
20
21
|
.option("--owner <owner>", '"me" or a "usr_..." id')
|
|
21
22
|
.option("--status <status>", "filter by status (online/offline/disabled)")
|
|
22
|
-
.option("--sort <sort>", "newest |
|
|
23
|
+
.option("--sort <sort>", "newest | trust (default)")
|
|
23
24
|
.option("--human", "Human-readable output")
|
|
24
25
|
.action(async (capability: string, opts) => {
|
|
25
26
|
await runCommand(async () => {
|
|
@@ -175,12 +176,12 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
175
176
|
}, { human: opts.human });
|
|
176
177
|
});
|
|
177
178
|
|
|
178
|
-
const
|
|
179
|
+
const gigTask = program.command("gig-task").description("Gig Task commands");
|
|
179
180
|
|
|
180
|
-
|
|
181
|
+
gigTask
|
|
181
182
|
.command("create <manifest>")
|
|
182
183
|
.description(
|
|
183
|
-
'Create a
|
|
184
|
+
'Create a gig task from a YAML/JSON manifest file. Use "-" for stdin. ' +
|
|
184
185
|
'Required fields: capability, instruction, target_providers, credits_per_provider.',
|
|
185
186
|
)
|
|
186
187
|
.option("--human", "Human-readable output")
|
|
@@ -188,14 +189,14 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
188
189
|
await runCommand(async () => {
|
|
189
190
|
const { consumer } = buildContext();
|
|
190
191
|
const raw = manifestPath === "-" ? await readStdin() : readFileSync(manifestPath, "utf8");
|
|
191
|
-
const body = parseYamlOrJson(raw) as
|
|
192
|
+
const body = parseYamlOrJson(raw) as CreateGigTaskRequest;
|
|
192
193
|
return consumer.createGigTask(body as unknown as Record<string, unknown>);
|
|
193
194
|
}, { human: opts.human });
|
|
194
195
|
});
|
|
195
196
|
|
|
196
|
-
|
|
197
|
+
gigTask
|
|
197
198
|
.command("get <bct_id>")
|
|
198
|
-
.description("Get a
|
|
199
|
+
.description("Get a gig task by id")
|
|
199
200
|
.option("--human", "Human-readable output")
|
|
200
201
|
.action(async (taskId: string, opts) => {
|
|
201
202
|
await runCommand(async () => {
|
|
@@ -204,9 +205,9 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
204
205
|
}, { human: opts.human });
|
|
205
206
|
});
|
|
206
207
|
|
|
207
|
-
|
|
208
|
+
gigTask
|
|
208
209
|
.command("list")
|
|
209
|
-
.description("List
|
|
210
|
+
.description("List gig tasks I own")
|
|
210
211
|
.option("--status <s>", "Filter by status")
|
|
211
212
|
.option("--human", "Human-readable output")
|
|
212
213
|
.action(async (opts) => {
|
|
@@ -218,9 +219,9 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
218
219
|
}, { human: opts.human });
|
|
219
220
|
});
|
|
220
221
|
|
|
221
|
-
|
|
222
|
+
gigTask
|
|
222
223
|
.command("available")
|
|
223
|
-
.description("List open
|
|
224
|
+
.description("List open gig tasks I could pick up (as provider)")
|
|
224
225
|
.option("--human", "Human-readable output")
|
|
225
226
|
.action(async (opts) => {
|
|
226
227
|
await runCommand(async () => {
|
|
@@ -229,25 +230,25 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
229
230
|
}, { human: opts.human });
|
|
230
231
|
});
|
|
231
232
|
|
|
232
|
-
|
|
233
|
+
gigTask
|
|
233
234
|
.command("accept <bct_id>")
|
|
234
|
-
.description("Accept a
|
|
235
|
+
.description("Accept a gig task (provider side) — returns a result_id")
|
|
235
236
|
.requiredOption("--agent-id <agt_id>", "Which of your agents is accepting")
|
|
236
|
-
.option("--slot-key <key>", "Slot key for sliced
|
|
237
|
+
.option("--slot-key <key>", "Slot key for sliced gig tasks")
|
|
237
238
|
.option("--human", "Human-readable output")
|
|
238
239
|
.action(async (taskId: string, opts) => {
|
|
239
240
|
await runCommand(async () => {
|
|
240
241
|
const { providerClient } = buildContext();
|
|
241
|
-
const body:
|
|
242
|
+
const body: AcceptGigTaskRequest = { agent_id: opts.agentId };
|
|
242
243
|
if (opts.slotKey !== undefined) body.slot_key = opts.slotKey;
|
|
243
244
|
return providerClient.acceptGigTask(taskId, body as unknown as Record<string, unknown>);
|
|
244
245
|
}, { human: opts.human });
|
|
245
246
|
});
|
|
246
247
|
|
|
247
|
-
|
|
248
|
+
gigTask
|
|
248
249
|
.command("submit <bct_id>")
|
|
249
250
|
.description(
|
|
250
|
-
"Submit
|
|
251
|
+
"Submit gig task result (provider side). Body must include `result_data` (string) " +
|
|
251
252
|
"and may include `result_payload` (object) and `proof` (array).",
|
|
252
253
|
)
|
|
253
254
|
.requiredOption("--body <json>", 'JSON body (or "-" to read from stdin)')
|
|
@@ -256,7 +257,7 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
256
257
|
await runCommand(async () => {
|
|
257
258
|
const { providerClient } = buildContext();
|
|
258
259
|
const raw = opts.body === "-" ? await readStdin() : opts.body;
|
|
259
|
-
const body = parseJsonOrFail(raw, "--body") as unknown as
|
|
260
|
+
const body = parseJsonOrFail(raw, "--body") as unknown as GigTaskSubmitRequest;
|
|
260
261
|
if (typeof body !== "object" || body === null || typeof (body as { result_data?: unknown }).result_data !== "string") {
|
|
261
262
|
throw new Error("--body must include a `result_data` string field");
|
|
262
263
|
}
|
|
@@ -296,6 +297,48 @@ export function registerRequesterCommands(program: Command): void {
|
|
|
296
297
|
return consumer.getBalance();
|
|
297
298
|
}, { human: opts.human });
|
|
298
299
|
});
|
|
300
|
+
|
|
301
|
+
program
|
|
302
|
+
.command("show <agent_id>")
|
|
303
|
+
.description("Show full agent listing including capabilities_meta")
|
|
304
|
+
.option("--capability <name>", "Print only this capability's meta entry")
|
|
305
|
+
.option("--human", "Human-readable output")
|
|
306
|
+
.action(async (agentId: string, opts) => {
|
|
307
|
+
await runCommand(async () => {
|
|
308
|
+
const { consumer } = buildContext();
|
|
309
|
+
const agent = await consumer.getAgent(agentId);
|
|
310
|
+
if (opts.capability) {
|
|
311
|
+
const meta = agent.capabilities_meta?.[opts.capability];
|
|
312
|
+
if (!meta) {
|
|
313
|
+
throw new Error(
|
|
314
|
+
`agent ${agentId} has no capabilities_meta entry for ${JSON.stringify(opts.capability)}`,
|
|
315
|
+
);
|
|
316
|
+
}
|
|
317
|
+
return meta;
|
|
318
|
+
}
|
|
319
|
+
return agent;
|
|
320
|
+
}, { human: opts.human });
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
program
|
|
324
|
+
.command("schema <agent_id>")
|
|
325
|
+
.description("Fetch + sha256-verify a capability's input JSON Schema")
|
|
326
|
+
.requiredOption("--capability <name>", "Capability name")
|
|
327
|
+
.option("--human", "Human-readable output")
|
|
328
|
+
.action(async (agentId: string, opts) => {
|
|
329
|
+
await runCommand(async () => {
|
|
330
|
+
const { consumer } = buildContext();
|
|
331
|
+
const agent = await consumer.getAgent(agentId);
|
|
332
|
+
try {
|
|
333
|
+
return await fetchCapabilitySchema(agent, opts.capability);
|
|
334
|
+
} catch (err) {
|
|
335
|
+
if (err instanceof CapabilitySchemaError) {
|
|
336
|
+
throw new Error(`schema fetch failed: ${err.message}`);
|
|
337
|
+
}
|
|
338
|
+
throw err;
|
|
339
|
+
}
|
|
340
|
+
}, { human: opts.human });
|
|
341
|
+
});
|
|
299
342
|
}
|
|
300
343
|
|
|
301
344
|
async function runHireRepl(
|
package/src/config.ts
CHANGED
|
@@ -2,9 +2,11 @@ import { readFileSync, writeFileSync, mkdirSync, existsSync, chmodSync } from "n
|
|
|
2
2
|
import { homedir } from "node:os";
|
|
3
3
|
import { join, dirname } from "node:path";
|
|
4
4
|
import { load as yamlLoad, dump as yamlDump } from "js-yaml";
|
|
5
|
+
import type { PricingModel } from "./types.js";
|
|
5
6
|
|
|
6
7
|
export const DEFAULT_CLOUD_URL = "https://api.linkedclaw.com";
|
|
7
8
|
export const DEFAULT_RELAY_URL = "wss://api.linkedclaw.com/ws";
|
|
9
|
+
export const DEFAULT_SERVICES_HOST_URL = "https://api.linkedclaw.com";
|
|
8
10
|
|
|
9
11
|
export interface SanitizeConfig {
|
|
10
12
|
scope?: { stripPatterns?: string[] };
|
|
@@ -14,17 +16,18 @@ export interface SanitizeConfig {
|
|
|
14
16
|
export interface ProviderConfig {
|
|
15
17
|
cloudUrl: string;
|
|
16
18
|
relayUrl: string;
|
|
19
|
+
servicesHostUrl?: string;
|
|
17
20
|
apiKey?: string;
|
|
18
21
|
agentId?: string;
|
|
19
22
|
agentName?: string;
|
|
20
23
|
slug?: string;
|
|
21
24
|
description?: string;
|
|
22
25
|
capabilities?: string[];
|
|
23
|
-
pricingModel?:
|
|
26
|
+
pricingModel?: PricingModel;
|
|
24
27
|
priceCredits?: number;
|
|
25
28
|
invokeTimeoutMs?: number;
|
|
26
29
|
sessionTurnTimeoutMs?: number;
|
|
27
|
-
|
|
30
|
+
gigTaskTimeoutMs?: number;
|
|
28
31
|
maxConcurrentRuns?: number;
|
|
29
32
|
perRequesterLimit?: number;
|
|
30
33
|
sanitize?: SanitizeConfig;
|
|
@@ -65,12 +68,18 @@ export function resolveConfig(overrides: Partial<ProviderConfig> = {}): Provider
|
|
|
65
68
|
overrides.cloudUrl ?? env["LINKEDCLAW_CLOUD_URL"] ?? file.cloudUrl ?? DEFAULT_CLOUD_URL;
|
|
66
69
|
const relayUrl =
|
|
67
70
|
overrides.relayUrl ?? env["LINKEDCLAW_RELAY_URL"] ?? file.relayUrl ?? DEFAULT_RELAY_URL;
|
|
71
|
+
const servicesHostUrl =
|
|
72
|
+
overrides.servicesHostUrl ??
|
|
73
|
+
env["LINKEDCLAW_SERVICES_HOST_URL"] ??
|
|
74
|
+
file.servicesHostUrl ??
|
|
75
|
+
cloudUrl;
|
|
68
76
|
const apiKey = overrides.apiKey ?? env["LINKEDCLAW_API_KEY"] ?? file.apiKey;
|
|
69
77
|
return {
|
|
70
78
|
...file,
|
|
71
79
|
...overrides,
|
|
72
80
|
cloudUrl,
|
|
73
81
|
relayUrl,
|
|
82
|
+
servicesHostUrl,
|
|
74
83
|
...(apiKey !== undefined ? { apiKey } : {}),
|
|
75
84
|
};
|
|
76
85
|
}
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import { LinkedClawError } from "../errors.js";
|
|
2
|
+
import type {
|
|
3
|
+
AgentListing,
|
|
4
|
+
CommonsLogEvent,
|
|
5
|
+
CruxDecisionRequest,
|
|
6
|
+
CruxDecisionResponse,
|
|
7
|
+
DebateRecord,
|
|
8
|
+
MandateRecord,
|
|
9
|
+
} from "./types.js";
|
|
10
|
+
|
|
11
|
+
interface FetchError extends Error {
|
|
12
|
+
code: number;
|
|
13
|
+
body: unknown;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function makeFetchError(code: number, body: unknown): FetchError {
|
|
17
|
+
const err = new LinkedClawError(`api_${code}`, `HTTP ${code}`) as unknown as FetchError;
|
|
18
|
+
(err as any).code = code;
|
|
19
|
+
(err as any).body = body;
|
|
20
|
+
return err;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function makeConvergeApi(cloudUrl: string, apiKey: string) {
|
|
24
|
+
async function apiFetch(path: string, opts: RequestInit = {}): Promise<unknown> {
|
|
25
|
+
const url = cloudUrl.replace(/\/$/, "") + path;
|
|
26
|
+
const res = await fetch(url, {
|
|
27
|
+
...opts,
|
|
28
|
+
headers: {
|
|
29
|
+
"Content-Type": "application/json",
|
|
30
|
+
Authorization: `Bearer ${apiKey}`,
|
|
31
|
+
...(opts.headers ?? {}),
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
let body: unknown;
|
|
35
|
+
try {
|
|
36
|
+
body = await res.json();
|
|
37
|
+
} catch {
|
|
38
|
+
body = null;
|
|
39
|
+
}
|
|
40
|
+
if (!res.ok) throw makeFetchError(res.status, body);
|
|
41
|
+
return body;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return {
|
|
45
|
+
async getDebate(debateId: string): Promise<DebateRecord> {
|
|
46
|
+
return apiFetch(`/api/v1/debates/${debateId}`) as Promise<DebateRecord>;
|
|
47
|
+
},
|
|
48
|
+
|
|
49
|
+
async getCommonsLogEvents(
|
|
50
|
+
cid: string,
|
|
51
|
+
opts: { offset?: number; limit?: number } = {},
|
|
52
|
+
): Promise<{ events: CommonsLogEvent[]; next_offset: number }> {
|
|
53
|
+
// The cloud serializes events with `event_type` inside `payload` only
|
|
54
|
+
// (commons_logs.py:464). We hoist it onto the envelope here so callers
|
|
55
|
+
// can match on `ev.event_type` without reaching into payload every time.
|
|
56
|
+
// Also: server caps `limit` at 1000 — when the caller asks for more,
|
|
57
|
+
// page transparently and concatenate.
|
|
58
|
+
const requested = opts.limit ?? 1000;
|
|
59
|
+
const PAGE = 1000;
|
|
60
|
+
const offsetStart = opts.offset ?? 0;
|
|
61
|
+
let collected: CommonsLogEvent[] = [];
|
|
62
|
+
let cursor = offsetStart;
|
|
63
|
+
while (collected.length < requested) {
|
|
64
|
+
const params = new URLSearchParams();
|
|
65
|
+
params.set("offset", String(cursor));
|
|
66
|
+
params.set("limit", String(Math.min(PAGE, requested - collected.length)));
|
|
67
|
+
const page = (await apiFetch(
|
|
68
|
+
`/api/v1/commons-logs/${cid}/events?${params}`,
|
|
69
|
+
)) as { events: Array<Omit<CommonsLogEvent, "event_type"> & { event_type?: string }>; next_offset: number };
|
|
70
|
+
const hoisted: CommonsLogEvent[] = page.events.map((e) => ({
|
|
71
|
+
...e,
|
|
72
|
+
event_type: e.event_type ?? (e.payload as { event_type?: string })?.event_type ?? "",
|
|
73
|
+
}));
|
|
74
|
+
collected = collected.concat(hoisted);
|
|
75
|
+
if (page.events.length === 0 || page.next_offset === cursor) break;
|
|
76
|
+
cursor = page.next_offset;
|
|
77
|
+
}
|
|
78
|
+
return { events: collected, next_offset: cursor };
|
|
79
|
+
},
|
|
80
|
+
|
|
81
|
+
async discoverPaAgentId(): Promise<string> {
|
|
82
|
+
const result = (await apiFetch(
|
|
83
|
+
"/api/v1/agents?capability=convergence_synthesizer.v1",
|
|
84
|
+
)) as { agents?: AgentListing[] } | AgentListing[];
|
|
85
|
+
const listings: AgentListing[] = Array.isArray(result)
|
|
86
|
+
? result
|
|
87
|
+
: (result as any).agents ?? [];
|
|
88
|
+
if (listings.length === 0) {
|
|
89
|
+
throw new LinkedClawError("pa_not_found", "No agent found with capability convergence_synthesizer.v1");
|
|
90
|
+
}
|
|
91
|
+
return listings[0].agent_id;
|
|
92
|
+
},
|
|
93
|
+
|
|
94
|
+
async findExistingMandate(
|
|
95
|
+
principalAgentId: string,
|
|
96
|
+
delegateAgentId: string,
|
|
97
|
+
requiredScopes: string[],
|
|
98
|
+
): Promise<MandateRecord | null> {
|
|
99
|
+
// Server-side GET /api/v1/mandates lists all of the caller's mandates;
|
|
100
|
+
// it does not filter by agent. We filter client-side on principal_agent_id
|
|
101
|
+
// so we don't reuse a mandate scoped to a different agent of the same user.
|
|
102
|
+
const result = (await apiFetch(`/api/v1/mandates?kind=generalized`)) as
|
|
103
|
+
| { mandates?: MandateRecord[] }
|
|
104
|
+
| MandateRecord[];
|
|
105
|
+
const list: MandateRecord[] = Array.isArray(result)
|
|
106
|
+
? result
|
|
107
|
+
: (result as any).mandates ?? [];
|
|
108
|
+
const required = new Set(requiredScopes);
|
|
109
|
+
const now = Date.now();
|
|
110
|
+
for (const m of list) {
|
|
111
|
+
if (m.principal_agent_id !== principalAgentId) continue;
|
|
112
|
+
if (m.delegate_agent_id !== delegateAgentId) continue;
|
|
113
|
+
if (m.revoked_at) continue;
|
|
114
|
+
if (m.expires_at && new Date(m.expires_at).getTime() <= now) continue;
|
|
115
|
+
if (![...required].every((s) => m.scope.includes(s))) continue;
|
|
116
|
+
return m;
|
|
117
|
+
}
|
|
118
|
+
return null;
|
|
119
|
+
},
|
|
120
|
+
|
|
121
|
+
async issueMandate(
|
|
122
|
+
principalAgentId: string,
|
|
123
|
+
delegateAgentId: string,
|
|
124
|
+
scopes: string[],
|
|
125
|
+
expiresAt?: string,
|
|
126
|
+
): Promise<MandateRecord> {
|
|
127
|
+
return apiFetch("/api/v1/mandates", {
|
|
128
|
+
method: "POST",
|
|
129
|
+
body: JSON.stringify({
|
|
130
|
+
principal_agent_id: principalAgentId,
|
|
131
|
+
delegate_agent_id: delegateAgentId,
|
|
132
|
+
scope: scopes,
|
|
133
|
+
...(expiresAt ? { expires_at: expiresAt } : {}),
|
|
134
|
+
}),
|
|
135
|
+
}) as Promise<MandateRecord>;
|
|
136
|
+
},
|
|
137
|
+
|
|
138
|
+
async startRun(sourceDebateId: string): Promise<{ run_id: string; commons_log_id: string }> {
|
|
139
|
+
return apiFetch("/api/v1/convergence/runs", {
|
|
140
|
+
method: "POST",
|
|
141
|
+
body: JSON.stringify({ source_debate_id: sourceDebateId }),
|
|
142
|
+
}) as Promise<{ run_id: string; commons_log_id: string }>;
|
|
143
|
+
},
|
|
144
|
+
|
|
145
|
+
async getRun(runId: string): Promise<{
|
|
146
|
+
run_id: string;
|
|
147
|
+
source_debate_id: string;
|
|
148
|
+
agent_a_id: string;
|
|
149
|
+
agent_b_id: string;
|
|
150
|
+
pa_agent_id: string;
|
|
151
|
+
status: string;
|
|
152
|
+
}> {
|
|
153
|
+
return apiFetch(`/api/v1/convergence/runs/${runId}`) as Promise<{
|
|
154
|
+
run_id: string;
|
|
155
|
+
source_debate_id: string;
|
|
156
|
+
agent_a_id: string;
|
|
157
|
+
agent_b_id: string;
|
|
158
|
+
pa_agent_id: string;
|
|
159
|
+
status: string;
|
|
160
|
+
}>;
|
|
161
|
+
},
|
|
162
|
+
|
|
163
|
+
async acceptOwnerB(runId: string): Promise<{ ok: boolean }> {
|
|
164
|
+
return apiFetch(`/api/v1/convergence/runs/${runId}/owner_b_accept`, {
|
|
165
|
+
method: "POST",
|
|
166
|
+
}) as Promise<{ ok: boolean }>;
|
|
167
|
+
},
|
|
168
|
+
|
|
169
|
+
async appendCommonsLog(
|
|
170
|
+
cid: string,
|
|
171
|
+
eventType: string,
|
|
172
|
+
payload: Record<string, unknown>,
|
|
173
|
+
): Promise<{ seq: number }> {
|
|
174
|
+
return apiFetch(`/api/v1/commons-logs/${cid}/append`, {
|
|
175
|
+
method: "POST",
|
|
176
|
+
body: JSON.stringify({ event_type: eventType, payload }),
|
|
177
|
+
}) as Promise<{ seq: number }>;
|
|
178
|
+
},
|
|
179
|
+
|
|
180
|
+
async acceptCruxDecision(
|
|
181
|
+
runId: string,
|
|
182
|
+
cruxId: string,
|
|
183
|
+
body: CruxDecisionRequest,
|
|
184
|
+
): Promise<CruxDecisionResponse> {
|
|
185
|
+
return apiFetch(`/api/v1/convergence/runs/${runId}/cruxes/${cruxId}/accept`, {
|
|
186
|
+
method: "POST",
|
|
187
|
+
body: JSON.stringify(body),
|
|
188
|
+
}) as Promise<CruxDecisionResponse>;
|
|
189
|
+
},
|
|
190
|
+
|
|
191
|
+
async rejectCruxDecision(
|
|
192
|
+
runId: string,
|
|
193
|
+
cruxId: string,
|
|
194
|
+
body: CruxDecisionRequest,
|
|
195
|
+
): Promise<CruxDecisionResponse> {
|
|
196
|
+
return apiFetch(`/api/v1/convergence/runs/${runId}/cruxes/${cruxId}/reject`, {
|
|
197
|
+
method: "POST",
|
|
198
|
+
body: JSON.stringify(body),
|
|
199
|
+
}) as Promise<CruxDecisionResponse>;
|
|
200
|
+
},
|
|
201
|
+
|
|
202
|
+
async attestCruxDecision(
|
|
203
|
+
runId: string,
|
|
204
|
+
cruxId: string,
|
|
205
|
+
body: CruxDecisionRequest,
|
|
206
|
+
): Promise<CruxDecisionResponse> {
|
|
207
|
+
return apiFetch(`/api/v1/convergence/runs/${runId}/cruxes/${cruxId}/attest`, {
|
|
208
|
+
method: "POST",
|
|
209
|
+
body: JSON.stringify(body),
|
|
210
|
+
}) as Promise<CruxDecisionResponse>;
|
|
211
|
+
},
|
|
212
|
+
};
|
|
213
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
|
|
3
|
+
// Encode a string as a JSON string literal with non-ASCII chars escaped as \uXXXX
|
|
4
|
+
// to match Python json.dumps(ensure_ascii=True) output.
|
|
5
|
+
function encodeString(s: string): string {
|
|
6
|
+
let out = '"';
|
|
7
|
+
for (let i = 0; i < s.length; i++) {
|
|
8
|
+
const cp = s.charCodeAt(i);
|
|
9
|
+
if (cp === 0x22) out += '\\"';
|
|
10
|
+
else if (cp === 0x5c) out += "\\\\";
|
|
11
|
+
else if (cp === 0x08) out += "\\b";
|
|
12
|
+
else if (cp === 0x09) out += "\\t";
|
|
13
|
+
else if (cp === 0x0a) out += "\\n";
|
|
14
|
+
else if (cp === 0x0c) out += "\\f";
|
|
15
|
+
else if (cp === 0x0d) out += "\\r";
|
|
16
|
+
else if (cp < 0x20 || cp > 0x7e) out += `\\u${cp.toString(16).padStart(4, "0")}`;
|
|
17
|
+
else out += s[i];
|
|
18
|
+
}
|
|
19
|
+
return out + '"';
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function canonicalize(value: unknown): string {
|
|
23
|
+
if (value === null) return "null";
|
|
24
|
+
if (typeof value === "string") return encodeString(value);
|
|
25
|
+
if (typeof value !== "object") return JSON.stringify(value);
|
|
26
|
+
if (Array.isArray(value)) return "[" + value.map(canonicalize).join(",") + "]";
|
|
27
|
+
const keys = Object.keys(value as Record<string, unknown>).sort();
|
|
28
|
+
return "{" + keys.map((k) => encodeString(k) + ":" + canonicalize((value as any)[k])).join(",") + "}";
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export function sha256OfCanonicalJson(value: unknown): string {
|
|
32
|
+
const h = createHash("sha256");
|
|
33
|
+
h.update(canonicalize(value));
|
|
34
|
+
return "sha256:" + h.digest("hex");
|
|
35
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { closeSync, openSync, unlinkSync, writeSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { LinkedClawError } from "../errors.js";
|
|
4
|
+
|
|
5
|
+
const LOCK_FILENAME = ".lock";
|
|
6
|
+
|
|
7
|
+
export function acquireLock(stagingDir: string): () => void {
|
|
8
|
+
const path = join(stagingDir, LOCK_FILENAME);
|
|
9
|
+
let fd: number;
|
|
10
|
+
try {
|
|
11
|
+
fd = openSync(path, "wx");
|
|
12
|
+
} catch (e: any) {
|
|
13
|
+
if (e.code === "EEXIST") {
|
|
14
|
+
throw new LinkedClawError(
|
|
15
|
+
"lock_held",
|
|
16
|
+
`Lock held at ${path}. If no other run/accept is in progress, delete ${path} to recover.`,
|
|
17
|
+
);
|
|
18
|
+
}
|
|
19
|
+
throw e;
|
|
20
|
+
}
|
|
21
|
+
writeSync(fd, JSON.stringify({ pid: process.pid }));
|
|
22
|
+
closeSync(fd);
|
|
23
|
+
return () => {
|
|
24
|
+
try {
|
|
25
|
+
unlinkSync(path);
|
|
26
|
+
} catch {
|
|
27
|
+
// ignore
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, readdirSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { dirname, join } from "node:path";
|
|
4
|
+
import { load as yamlLoad, dump as yamlDump } from "js-yaml";
|
|
5
|
+
|
|
6
|
+
export interface StagingFrontmatter {
|
|
7
|
+
debate_id: string;
|
|
8
|
+
run_id: string;
|
|
9
|
+
crux_id: string;
|
|
10
|
+
sub_debate_chain: string[];
|
|
11
|
+
latest_sub_debate_id: string | null;
|
|
12
|
+
source_crux_map_hash: string;
|
|
13
|
+
generation_id: string;
|
|
14
|
+
generated_at: string;
|
|
15
|
+
pa_body_hash: string;
|
|
16
|
+
outcome: "converged" | "partial_overlap" | "needs_input" | "irreconcilable" | "already_aligned";
|
|
17
|
+
bilateral_mandate_intact: boolean;
|
|
18
|
+
citations_a: Array<Record<string, unknown>>;
|
|
19
|
+
citations_b: Array<Record<string, unknown>>;
|
|
20
|
+
mod_progress_summary: Record<string, unknown>;
|
|
21
|
+
attested_by_user: boolean;
|
|
22
|
+
provenance?: Record<string, unknown>;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface StagingDoc {
|
|
26
|
+
frontmatter: StagingFrontmatter;
|
|
27
|
+
userResponse: string;
|
|
28
|
+
body: string;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export function stagingPathFor(stagingDir: string, cruxId: string): string {
|
|
32
|
+
return join(stagingDir, `${cruxId}.md`);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function listCruxFiles(stagingDir: string): string[] {
|
|
36
|
+
if (!existsSync(stagingDir)) return [];
|
|
37
|
+
return readdirSync(stagingDir).filter(
|
|
38
|
+
(f) => f.endsWith(".md") && !f.startsWith("."),
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function parseStaging(text: string): StagingDoc {
|
|
43
|
+
if (!text.startsWith("---\n")) {
|
|
44
|
+
throw new Error("Missing YAML frontmatter: document must start with ---\\n");
|
|
45
|
+
}
|
|
46
|
+
const endIdx = text.indexOf("\n---\n", 4);
|
|
47
|
+
if (endIdx === -1) {
|
|
48
|
+
throw new Error("Malformed frontmatter: no closing ---");
|
|
49
|
+
}
|
|
50
|
+
const yamlText = text.slice(4, endIdx);
|
|
51
|
+
const body = text.slice(endIdx + 5);
|
|
52
|
+
const raw = yamlLoad(yamlText) as Record<string, unknown>;
|
|
53
|
+
if (!raw || typeof raw !== "object") {
|
|
54
|
+
throw new Error("Frontmatter parsed to non-object");
|
|
55
|
+
}
|
|
56
|
+
const userResponse = typeof raw._user_response === "string" ? raw._user_response : "";
|
|
57
|
+
delete raw._user_response;
|
|
58
|
+
return {
|
|
59
|
+
frontmatter: raw as unknown as StagingFrontmatter,
|
|
60
|
+
userResponse,
|
|
61
|
+
body,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export function dumpStaging(doc: StagingDoc): string {
|
|
66
|
+
const fmRaw: Record<string, unknown> = { ...doc.frontmatter };
|
|
67
|
+
fmRaw._user_response = doc.userResponse ?? "";
|
|
68
|
+
const yamlText = yamlDump(fmRaw, { lineWidth: -1, sortKeys: false });
|
|
69
|
+
return `---\n${yamlText}---\n${doc.body}`;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function readStaging(path: string): StagingDoc {
|
|
73
|
+
return parseStaging(readFileSync(path, "utf8"));
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function writeStaging(path: string, doc: StagingDoc): void {
|
|
77
|
+
mkdirSync(dirname(path), { recursive: true });
|
|
78
|
+
writeFileSync(path, dumpStaging(doc), "utf8");
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export function computePaBodyHash(body: string): string {
|
|
82
|
+
return "sha256:" + createHash("sha256").update(Buffer.from(body, "utf8")).digest("hex");
|
|
83
|
+
}
|