agent-relay 3.2.17 → 3.2.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/agent-relay-broker-darwin-arm64 +0 -0
- package/bin/agent-relay-broker-darwin-x64 +0 -0
- package/bin/agent-relay-broker-linux-arm64 +0 -0
- package/bin/agent-relay-broker-linux-x64 +0 -0
- package/dist/index.cjs +86 -43
- package/dist/src/cli/commands/cloud.d.ts +1 -9
- package/dist/src/cli/commands/cloud.d.ts.map +1 -1
- package/dist/src/cli/commands/cloud.js +326 -323
- package/dist/src/cli/commands/cloud.js.map +1 -1
- package/dist/src/cli/commands/connect.d.ts.map +1 -1
- package/dist/src/cli/commands/connect.js +6 -10
- package/dist/src/cli/commands/connect.js.map +1 -1
- package/package.json +16 -10
- package/packages/acp-bridge/package.json +2 -2
- package/packages/brand/README.md +36 -0
- package/packages/brand/brand.css +226 -0
- package/packages/brand/package.json +20 -0
- package/packages/cloud/dist/api-client.d.ts +33 -0
- package/packages/cloud/dist/api-client.d.ts.map +1 -0
- package/packages/cloud/dist/api-client.js +123 -0
- package/packages/cloud/dist/api-client.js.map +1 -0
- package/packages/cloud/dist/auth.d.ts +13 -0
- package/packages/cloud/dist/auth.d.ts.map +1 -0
- package/packages/cloud/dist/auth.js +248 -0
- package/packages/cloud/dist/auth.js.map +1 -0
- package/packages/cloud/dist/index.d.ts +5 -0
- package/packages/cloud/dist/index.d.ts.map +1 -0
- package/packages/cloud/dist/index.js +5 -0
- package/packages/cloud/dist/index.js.map +1 -0
- package/packages/cloud/dist/types.d.ts +73 -0
- package/packages/cloud/dist/types.d.ts.map +1 -0
- package/packages/cloud/dist/types.js +19 -0
- package/packages/cloud/dist/types.js.map +1 -0
- package/packages/cloud/dist/workflows.d.ts +34 -0
- package/packages/cloud/dist/workflows.d.ts.map +1 -0
- package/packages/cloud/dist/workflows.js +389 -0
- package/packages/cloud/dist/workflows.js.map +1 -0
- package/packages/cloud/package.json +44 -0
- package/packages/cloud/src/api-client.ts +169 -0
- package/packages/cloud/src/auth.ts +314 -0
- package/packages/cloud/src/index.ts +41 -0
- package/packages/cloud/src/types.ts +97 -0
- package/packages/cloud/src/workflows.ts +539 -0
- package/packages/cloud/tsconfig.json +21 -0
- package/packages/config/package.json +1 -1
- package/packages/hooks/package.json +4 -4
- package/packages/memory/package.json +2 -2
- package/packages/openclaw/package.json +2 -2
- package/packages/policy/package.json +2 -2
- package/packages/sdk/dist/workflows/__tests__/e2big-and-verify.test.d.ts +2 -0
- package/packages/sdk/dist/workflows/__tests__/e2big-and-verify.test.d.ts.map +1 -0
- package/packages/sdk/dist/workflows/__tests__/e2big-and-verify.test.js +62 -0
- package/packages/sdk/dist/workflows/__tests__/e2big-and-verify.test.js.map +1 -0
- package/packages/sdk/dist/workflows/runner.d.ts +4 -0
- package/packages/sdk/dist/workflows/runner.d.ts.map +1 -1
- package/packages/sdk/dist/workflows/runner.js +76 -39
- package/packages/sdk/dist/workflows/runner.js.map +1 -1
- package/packages/sdk/package.json +2 -2
- package/packages/sdk/src/__tests__/workflow-runner.test.ts +73 -2
- package/packages/sdk/src/workflows/__tests__/e2big-and-verify.test.ts +117 -0
- package/packages/sdk/src/workflows/runner.ts +105 -38
- package/packages/sdk-py/pyproject.toml +1 -1
- package/packages/sdk-swift/Sources/AgentRelaySDK/RelayObserver.swift +2 -0
- package/packages/telemetry/package.json +1 -1
- package/packages/trajectory/package.json +2 -2
- package/packages/user-directory/package.json +2 -2
- package/packages/utils/package.json +2 -2
|
@@ -0,0 +1,539 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
|
|
5
|
+
import ignore from "ignore";
|
|
6
|
+
import * as tar from "tar";
|
|
7
|
+
|
|
8
|
+
import { ensureAuthenticated, authorizedApiFetch } from "./auth.js";
|
|
9
|
+
import { defaultApiUrl, type WorkflowFileType, type RunWorkflowResponse, type WorkflowLogsResponse, type SyncPatchResponse } from "./types.js";
|
|
10
|
+
|
|
11
|
+
type ResolvedWorkflowInput = {
|
|
12
|
+
workflow: string;
|
|
13
|
+
fileType: WorkflowFileType;
|
|
14
|
+
sourceFileType?: WorkflowFileType;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
type S3Credentials = {
|
|
18
|
+
accessKeyId: string;
|
|
19
|
+
secretAccessKey: string;
|
|
20
|
+
sessionToken: string;
|
|
21
|
+
bucket: string;
|
|
22
|
+
prefix: string;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
type PrepareWorkflowResponse = {
|
|
26
|
+
runId: string;
|
|
27
|
+
s3Credentials: S3Credentials;
|
|
28
|
+
s3CodeKey: string;
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
type RunWorkflowOptions = {
|
|
32
|
+
apiUrl?: string;
|
|
33
|
+
fileType?: WorkflowFileType;
|
|
34
|
+
syncCode?: boolean;
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const CODE_SYNC_EXCLUDES = [
|
|
38
|
+
".git",
|
|
39
|
+
"node_modules",
|
|
40
|
+
".sst",
|
|
41
|
+
".next",
|
|
42
|
+
".open-next",
|
|
43
|
+
".env",
|
|
44
|
+
".env.*",
|
|
45
|
+
".env.local",
|
|
46
|
+
".env.production",
|
|
47
|
+
"*.pem",
|
|
48
|
+
"*.key",
|
|
49
|
+
"credentials.json",
|
|
50
|
+
".aws",
|
|
51
|
+
".ssh",
|
|
52
|
+
];
|
|
53
|
+
|
|
54
|
+
function validateYamlWorkflow(content: string): void {
|
|
55
|
+
const hasField = (field: string) =>
|
|
56
|
+
new RegExp(`^${field}\\s*:`, "m").test(content);
|
|
57
|
+
|
|
58
|
+
if (!hasField("version")) {
|
|
59
|
+
throw new Error('missing required field "version"');
|
|
60
|
+
}
|
|
61
|
+
if (!hasField("swarm")) {
|
|
62
|
+
throw new Error('missing required field "swarm"');
|
|
63
|
+
}
|
|
64
|
+
if (!hasField("agents")) {
|
|
65
|
+
throw new Error('missing required field "agents"');
|
|
66
|
+
}
|
|
67
|
+
if (!hasField("workflows")) {
|
|
68
|
+
throw new Error('missing required field "workflows"');
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
async function validateTypeScriptWorkflow(content: string): Promise<void> {
|
|
73
|
+
try {
|
|
74
|
+
const { execSync } = await import("node:child_process");
|
|
75
|
+
execSync("npx --yes esbuild --bundle=false --format=esm --loader=ts", {
|
|
76
|
+
input: content,
|
|
77
|
+
encoding: "utf-8",
|
|
78
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
79
|
+
timeout: 30000,
|
|
80
|
+
});
|
|
81
|
+
} catch (error) {
|
|
82
|
+
const err = error as { status?: number; killed?: boolean; stderr?: unknown };
|
|
83
|
+
if (err.killed || !err.status) {
|
|
84
|
+
console.error("TypeScript validation skipped: esbuild not available or timed out");
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
const stderr = typeof err.stderr === "string" ? err.stderr.trim() : "";
|
|
88
|
+
const message = stderr || "TypeScript validation failed";
|
|
89
|
+
throw new Error(`Workflow file has syntax errors:\n${message}`);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export function inferWorkflowFileType(filePath: string): WorkflowFileType | null {
|
|
94
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
95
|
+
switch (ext) {
|
|
96
|
+
case ".yaml":
|
|
97
|
+
case ".yml":
|
|
98
|
+
return "yaml";
|
|
99
|
+
case ".ts":
|
|
100
|
+
case ".mts":
|
|
101
|
+
case ".cts":
|
|
102
|
+
return "ts";
|
|
103
|
+
case ".py":
|
|
104
|
+
return "py";
|
|
105
|
+
default:
|
|
106
|
+
return null;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
export function shouldSyncCodeByDefault(
|
|
111
|
+
_workflowArg: string,
|
|
112
|
+
_explicitFileType?: WorkflowFileType,
|
|
113
|
+
): boolean {
|
|
114
|
+
return true;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
export async function resolveWorkflowInput(
|
|
118
|
+
workflowArg: string,
|
|
119
|
+
explicitFileType?: WorkflowFileType,
|
|
120
|
+
): Promise<ResolvedWorkflowInput> {
|
|
121
|
+
const looksLikeFile = path.isAbsolute(workflowArg) ||
|
|
122
|
+
workflowArg.includes(path.sep) ||
|
|
123
|
+
inferWorkflowFileType(workflowArg) !== null;
|
|
124
|
+
|
|
125
|
+
try {
|
|
126
|
+
const workflow = await fs.readFile(workflowArg, "utf-8");
|
|
127
|
+
const fileType = explicitFileType ?? inferWorkflowFileType(workflowArg);
|
|
128
|
+
if (!fileType) {
|
|
129
|
+
throw new Error(`Could not infer workflow type from ${workflowArg}. Use --file-type.`);
|
|
130
|
+
}
|
|
131
|
+
return { workflow, fileType };
|
|
132
|
+
} catch (error) {
|
|
133
|
+
const err = error as NodeJS.ErrnoException;
|
|
134
|
+
if (err.code === "EISDIR") {
|
|
135
|
+
throw new Error(`Workflow path is not a file: ${workflowArg}`);
|
|
136
|
+
}
|
|
137
|
+
if (!isMissingFileError(error)) {
|
|
138
|
+
throw error;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (looksLikeFile) {
|
|
143
|
+
throw new Error(`Workflow file not found: ${workflowArg}`);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
return {
|
|
147
|
+
workflow: workflowArg,
|
|
148
|
+
fileType: explicitFileType ?? "yaml",
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
export async function runWorkflow(
|
|
153
|
+
workflowArg: string,
|
|
154
|
+
options: RunWorkflowOptions = {},
|
|
155
|
+
): Promise<RunWorkflowResponse> {
|
|
156
|
+
const apiUrl = options.apiUrl ?? defaultApiUrl();
|
|
157
|
+
let auth = await ensureAuthenticated(apiUrl);
|
|
158
|
+
const input = await resolveWorkflowInput(workflowArg, options.fileType);
|
|
159
|
+
|
|
160
|
+
if (input.fileType === "ts") {
|
|
161
|
+
await validateTypeScriptWorkflow(input.workflow);
|
|
162
|
+
} else if (input.fileType === "yaml") {
|
|
163
|
+
console.error("Validating workflow...");
|
|
164
|
+
validateYamlWorkflow(input.workflow);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
const syncCode = options.syncCode ?? shouldSyncCodeByDefault(workflowArg, options.fileType);
|
|
168
|
+
const requestBody: Record<string, unknown> = {
|
|
169
|
+
workflow: input.workflow,
|
|
170
|
+
fileType: input.fileType,
|
|
171
|
+
};
|
|
172
|
+
if (input.sourceFileType) {
|
|
173
|
+
requestBody.sourceFileType = input.sourceFileType;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
if (syncCode) {
|
|
177
|
+
const t0 = Date.now();
|
|
178
|
+
console.error("Preparing run...");
|
|
179
|
+
const { response: prepResponse, auth: prepAuth } = await authorizedApiFetch(auth, "/api/v1/workflows/prepare", {
|
|
180
|
+
method: "POST",
|
|
181
|
+
headers: { Accept: "application/json" },
|
|
182
|
+
});
|
|
183
|
+
auth = prepAuth;
|
|
184
|
+
|
|
185
|
+
const prepPayload = await readJsonResponse(prepResponse);
|
|
186
|
+
if (!prepResponse.ok) {
|
|
187
|
+
throw new Error(`Workflow prepare failed: ${describeResponseError(prepResponse, prepPayload)}`);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (!isPrepareWorkflowResponse(prepPayload)) {
|
|
191
|
+
throw new Error("Workflow prepare response was not valid JSON.");
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
const prepared = prepPayload;
|
|
195
|
+
console.error(` Prepared in ${((Date.now() - t0) / 1000).toFixed(1)}s`);
|
|
196
|
+
|
|
197
|
+
const t1 = Date.now();
|
|
198
|
+
console.error("Creating tarball...");
|
|
199
|
+
const s3Client = createScopedS3Client(prepared.s3Credentials);
|
|
200
|
+
const tarball = await createTarball(process.cwd());
|
|
201
|
+
console.error(` Tarball: ${(tarball.length / 1024).toFixed(0)}KB in ${((Date.now() - t1) / 1000).toFixed(1)}s`);
|
|
202
|
+
|
|
203
|
+
const t2 = Date.now();
|
|
204
|
+
console.error("Uploading to S3...");
|
|
205
|
+
const key = scopedCodeKey(prepared.s3Credentials.prefix, prepared.s3CodeKey);
|
|
206
|
+
await s3Client.send(
|
|
207
|
+
new PutObjectCommand({
|
|
208
|
+
Bucket: prepared.s3Credentials.bucket,
|
|
209
|
+
Key: key,
|
|
210
|
+
Body: tarball,
|
|
211
|
+
ContentType: "application/gzip",
|
|
212
|
+
}),
|
|
213
|
+
);
|
|
214
|
+
console.error(` Uploaded in ${((Date.now() - t2) / 1000).toFixed(1)}s`);
|
|
215
|
+
|
|
216
|
+
requestBody.runId = prepared.runId;
|
|
217
|
+
requestBody.s3CodeKey = prepared.s3CodeKey;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
const t3 = Date.now();
|
|
221
|
+
console.error("Launching workflow...");
|
|
222
|
+
const { response, auth: updatedAuth } = await authorizedApiFetch(
|
|
223
|
+
auth,
|
|
224
|
+
"/api/v1/workflows/run",
|
|
225
|
+
{
|
|
226
|
+
method: "POST",
|
|
227
|
+
headers: {
|
|
228
|
+
"Content-Type": "application/json",
|
|
229
|
+
Accept: "application/json",
|
|
230
|
+
},
|
|
231
|
+
body: JSON.stringify(requestBody),
|
|
232
|
+
},
|
|
233
|
+
);
|
|
234
|
+
auth = updatedAuth;
|
|
235
|
+
|
|
236
|
+
console.error(` Launched in ${((Date.now() - t3) / 1000).toFixed(1)}s`);
|
|
237
|
+
|
|
238
|
+
const payload = await readJsonResponse(response);
|
|
239
|
+
if (!response.ok) {
|
|
240
|
+
throw new Error(`Workflow run failed: ${describeResponseError(response, payload)}`);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (
|
|
244
|
+
!payload ||
|
|
245
|
+
typeof payload !== "object" ||
|
|
246
|
+
typeof (payload as { runId?: unknown }).runId !== "string" ||
|
|
247
|
+
typeof (payload as { status?: unknown }).status !== "string"
|
|
248
|
+
) {
|
|
249
|
+
throw new Error("Workflow run response was not valid JSON.");
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
return payload as RunWorkflowResponse;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
export async function getRunStatus(
|
|
256
|
+
runId: string,
|
|
257
|
+
options: { apiUrl?: string } = {},
|
|
258
|
+
): Promise<Record<string, unknown>> {
|
|
259
|
+
const apiUrl = options.apiUrl ?? defaultApiUrl();
|
|
260
|
+
const auth = await ensureAuthenticated(apiUrl);
|
|
261
|
+
const { response } = await authorizedApiFetch(
|
|
262
|
+
auth,
|
|
263
|
+
`/api/v1/workflows/runs/${encodeURIComponent(runId)}`,
|
|
264
|
+
{
|
|
265
|
+
headers: { Accept: "application/json" },
|
|
266
|
+
},
|
|
267
|
+
);
|
|
268
|
+
|
|
269
|
+
const payload = await readJsonResponse(response);
|
|
270
|
+
if (!response.ok) {
|
|
271
|
+
throw new Error(`Status request failed: ${describeResponseError(response, payload)}`);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
if (!payload || typeof payload !== "object" || Array.isArray(payload)) {
|
|
275
|
+
throw new Error("Status response was not valid JSON.");
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
return payload as Record<string, unknown>;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
export async function cancelWorkflow(
|
|
282
|
+
runId: string,
|
|
283
|
+
options: { apiUrl?: string } = {},
|
|
284
|
+
): Promise<{ runId: string; status: string }> {
|
|
285
|
+
const apiUrl = options.apiUrl ?? defaultApiUrl();
|
|
286
|
+
const auth = await ensureAuthenticated(apiUrl);
|
|
287
|
+
const { response } = await authorizedApiFetch(
|
|
288
|
+
auth,
|
|
289
|
+
`/api/v1/workflows/runs/${encodeURIComponent(runId)}/cancel`,
|
|
290
|
+
{
|
|
291
|
+
method: "POST",
|
|
292
|
+
headers: { Accept: "application/json" },
|
|
293
|
+
},
|
|
294
|
+
);
|
|
295
|
+
|
|
296
|
+
const payload = await readJsonResponse(response);
|
|
297
|
+
if (!response.ok) {
|
|
298
|
+
throw new Error(`Cancel failed: ${describeResponseError(response, payload)}`);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
if (!payload || typeof payload !== "object" || Array.isArray(payload)) {
|
|
302
|
+
throw new Error("Cancel response was not valid JSON.");
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
return payload as { runId: string; status: string };
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
export async function getRunLogs(
|
|
309
|
+
runId: string,
|
|
310
|
+
options: {
|
|
311
|
+
apiUrl?: string;
|
|
312
|
+
offset?: number;
|
|
313
|
+
sandboxId?: string;
|
|
314
|
+
} = {},
|
|
315
|
+
): Promise<WorkflowLogsResponse> {
|
|
316
|
+
const apiUrl = options.apiUrl ?? defaultApiUrl();
|
|
317
|
+
const auth = await ensureAuthenticated(apiUrl);
|
|
318
|
+
const searchParams = new URLSearchParams();
|
|
319
|
+
if (typeof options.offset === "number") {
|
|
320
|
+
searchParams.set("offset", String(options.offset));
|
|
321
|
+
}
|
|
322
|
+
if (options.sandboxId) {
|
|
323
|
+
searchParams.set("sandboxId", options.sandboxId);
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
const requestPath = `/api/v1/workflows/runs/${encodeURIComponent(runId)}/logs${searchParams.size ? `?${searchParams.toString()}` : ""}`;
|
|
327
|
+
|
|
328
|
+
const { response } = await authorizedApiFetch(auth, requestPath, {
|
|
329
|
+
headers: { Accept: "application/json" },
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
const payload = await readJsonResponse(response);
|
|
333
|
+
if (!response.ok) {
|
|
334
|
+
throw new Error(`Log request failed: ${describeResponseError(response, payload)}`);
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
if (
|
|
338
|
+
!payload ||
|
|
339
|
+
typeof payload !== "object" ||
|
|
340
|
+
typeof (payload as { content?: unknown }).content !== "string" ||
|
|
341
|
+
typeof (payload as { offset?: unknown }).offset !== "number" ||
|
|
342
|
+
typeof (payload as { totalSize?: unknown }).totalSize !== "number" ||
|
|
343
|
+
typeof (payload as { done?: unknown }).done !== "boolean"
|
|
344
|
+
) {
|
|
345
|
+
throw new Error("Log response was not valid JSON.");
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
return payload as WorkflowLogsResponse;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
export async function syncWorkflowPatch(
|
|
352
|
+
runId: string,
|
|
353
|
+
options: { apiUrl?: string } = {},
|
|
354
|
+
): Promise<SyncPatchResponse> {
|
|
355
|
+
const apiUrl = options.apiUrl ?? defaultApiUrl();
|
|
356
|
+
let auth = await ensureAuthenticated(apiUrl);
|
|
357
|
+
|
|
358
|
+
// Verify the run is completed
|
|
359
|
+
const { response: statusResponse, auth: a1 } = await authorizedApiFetch(
|
|
360
|
+
auth,
|
|
361
|
+
`/api/v1/workflows/runs/${encodeURIComponent(runId)}`,
|
|
362
|
+
{ headers: { Accept: "application/json" } },
|
|
363
|
+
);
|
|
364
|
+
auth = a1;
|
|
365
|
+
|
|
366
|
+
if (!statusResponse.ok) {
|
|
367
|
+
const payload = await readJsonResponse(statusResponse);
|
|
368
|
+
throw new Error(`Failed to fetch run status: ${describeResponseError(statusResponse, payload)}`);
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
const runData = (await statusResponse.json()) as { status?: string };
|
|
372
|
+
if (runData.status !== "completed" && runData.status !== "failed" && runData.status !== "cancelled") {
|
|
373
|
+
throw new Error(`Run is still ${runData.status ?? "unknown"}. Wait for completion before syncing.`);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// Download the patch
|
|
377
|
+
const { response } = await authorizedApiFetch(
|
|
378
|
+
auth,
|
|
379
|
+
`/api/v1/workflows/runs/${encodeURIComponent(runId)}/patch`,
|
|
380
|
+
{ headers: { Accept: "application/json" } },
|
|
381
|
+
);
|
|
382
|
+
|
|
383
|
+
const payload = await readJsonResponse(response);
|
|
384
|
+
if (!response.ok) {
|
|
385
|
+
throw new Error(`Patch download failed: ${describeResponseError(response, payload)}`);
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
if (
|
|
389
|
+
!payload ||
|
|
390
|
+
typeof payload !== "object" ||
|
|
391
|
+
typeof (payload as { hasChanges?: unknown }).hasChanges !== "boolean"
|
|
392
|
+
) {
|
|
393
|
+
throw new Error("Patch response was not valid JSON.");
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
return payload as SyncPatchResponse;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// ── Internal helpers ──────────────────────────────────────────────────────────
|
|
400
|
+
|
|
401
|
+
async function readJsonResponse(response: Response): Promise<unknown> {
|
|
402
|
+
const rawBody = await response.text();
|
|
403
|
+
if (!rawBody) {
|
|
404
|
+
return null;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
try {
|
|
408
|
+
return JSON.parse(rawBody);
|
|
409
|
+
} catch {
|
|
410
|
+
return rawBody;
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
function describeResponseError(response: Response, payload: unknown): string {
|
|
415
|
+
if (typeof payload === "string" && payload.trim()) {
|
|
416
|
+
return `${response.status} ${response.statusText}: ${payload.trim()}`;
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
if (payload && typeof payload === "object" && !Array.isArray(payload)) {
|
|
420
|
+
const record = payload as Record<string, unknown>;
|
|
421
|
+
const message = record.error ?? record.message;
|
|
422
|
+
if (typeof message === "string" && message.trim()) {
|
|
423
|
+
return `${response.status} ${response.statusText}: ${message.trim()}`;
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
return `${response.status} ${response.statusText}`;
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
function isMissingFileError(error: unknown): error is NodeJS.ErrnoException {
|
|
431
|
+
return Boolean(error && typeof error === "object" && "code" in error && error.code === "ENOENT");
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
function isPrepareWorkflowResponse(payload: unknown): payload is PrepareWorkflowResponse {
|
|
435
|
+
if (!payload || typeof payload !== "object" || Array.isArray(payload)) {
|
|
436
|
+
return false;
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
const record = payload as Record<string, unknown>;
|
|
440
|
+
const s3Creds = record.s3Credentials;
|
|
441
|
+
if (!s3Creds || typeof s3Creds !== "object" || Array.isArray(s3Creds)) {
|
|
442
|
+
return false;
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
const creds = s3Creds as Record<string, unknown>;
|
|
446
|
+
return (
|
|
447
|
+
typeof record.runId === "string" &&
|
|
448
|
+
typeof record.s3CodeKey === "string" &&
|
|
449
|
+
typeof creds.accessKeyId === "string" &&
|
|
450
|
+
typeof creds.secretAccessKey === "string" &&
|
|
451
|
+
typeof creds.sessionToken === "string" &&
|
|
452
|
+
typeof creds.bucket === "string" &&
|
|
453
|
+
typeof creds.prefix === "string"
|
|
454
|
+
);
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
function createScopedS3Client(s3Credentials: S3Credentials): S3Client {
|
|
458
|
+
return new S3Client({
|
|
459
|
+
region: process.env.AWS_REGION ?? process.env.AWS_DEFAULT_REGION ?? "us-east-1",
|
|
460
|
+
credentials: {
|
|
461
|
+
accessKeyId: s3Credentials.accessKeyId,
|
|
462
|
+
secretAccessKey: s3Credentials.secretAccessKey,
|
|
463
|
+
sessionToken: s3Credentials.sessionToken,
|
|
464
|
+
},
|
|
465
|
+
});
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
async function createTarball(rootDir: string): Promise<Buffer> {
|
|
469
|
+
const absoluteRoot = path.resolve(rootDir);
|
|
470
|
+
|
|
471
|
+
try {
|
|
472
|
+
const { execSync } = await import("node:child_process");
|
|
473
|
+
const gitFiles = execSync("git ls-files -z", {
|
|
474
|
+
cwd: absoluteRoot,
|
|
475
|
+
encoding: "utf-8",
|
|
476
|
+
maxBuffer: 50 * 1024 * 1024,
|
|
477
|
+
});
|
|
478
|
+
const files = gitFiles.split("\0").filter(Boolean);
|
|
479
|
+
if (files.length > 0) {
|
|
480
|
+
const tarStream = tar.create(
|
|
481
|
+
{ gzip: true, cwd: absoluteRoot, portable: true },
|
|
482
|
+
files,
|
|
483
|
+
);
|
|
484
|
+
const chunks: Buffer[] = [];
|
|
485
|
+
for await (const chunk of tarStream) {
|
|
486
|
+
chunks.push(Buffer.from(chunk as Uint8Array));
|
|
487
|
+
}
|
|
488
|
+
return Buffer.concat(chunks);
|
|
489
|
+
}
|
|
490
|
+
} catch {
|
|
491
|
+
// Not a git repo or git not available — fall back to ignore-based filter
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
const ig = await buildIgnoreMatcher(absoluteRoot);
|
|
495
|
+
const tarStream = tar.create(
|
|
496
|
+
{
|
|
497
|
+
gzip: true,
|
|
498
|
+
cwd: absoluteRoot,
|
|
499
|
+
portable: true,
|
|
500
|
+
filter(entryPath: string): boolean {
|
|
501
|
+
const normalized = normalizeEntryPath(entryPath);
|
|
502
|
+
if (!normalized || normalized === ".") return true;
|
|
503
|
+
return !ig.ignores(normalized);
|
|
504
|
+
},
|
|
505
|
+
},
|
|
506
|
+
["."],
|
|
507
|
+
);
|
|
508
|
+
|
|
509
|
+
const chunks: Buffer[] = [];
|
|
510
|
+
for await (const chunk of tarStream) {
|
|
511
|
+
chunks.push(Buffer.from(chunk as Uint8Array));
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
return Buffer.concat(chunks);
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
async function buildIgnoreMatcher(rootDir: string): Promise<ignore.Ignore> {
|
|
518
|
+
const ig = ignore();
|
|
519
|
+
ig.add(CODE_SYNC_EXCLUDES);
|
|
520
|
+
|
|
521
|
+
try {
|
|
522
|
+
const gitignoreContent = await fs.readFile(path.join(rootDir, ".gitignore"), "utf-8");
|
|
523
|
+
ig.add(gitignoreContent);
|
|
524
|
+
} catch (error) {
|
|
525
|
+
if (!isMissingFileError(error)) {
|
|
526
|
+
throw error;
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
return ig;
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
function normalizeEntryPath(entryPath: string): string {
|
|
534
|
+
return entryPath.replace(/^\.\//, "").replace(/\\/g, "/");
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
function scopedCodeKey(prefix: string, key: string): string {
|
|
538
|
+
return [prefix, key].filter(Boolean).join("/");
|
|
539
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "NodeNext",
|
|
5
|
+
"moduleResolution": "NodeNext",
|
|
6
|
+
"lib": ["ES2022", "DOM"],
|
|
7
|
+
"outDir": "./dist",
|
|
8
|
+
"rootDir": "./src",
|
|
9
|
+
"strict": true,
|
|
10
|
+
"esModuleInterop": true,
|
|
11
|
+
"skipLibCheck": true,
|
|
12
|
+
"forceConsistentCasingInFileNames": true,
|
|
13
|
+
"declaration": true,
|
|
14
|
+
"declarationMap": true,
|
|
15
|
+
"sourceMap": true,
|
|
16
|
+
"resolveJsonModule": true,
|
|
17
|
+
"isolatedModules": true
|
|
18
|
+
},
|
|
19
|
+
"include": ["src/**/*"],
|
|
20
|
+
"exclude": ["node_modules", "dist", "**/*.test.ts"]
|
|
21
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agent-relay/hooks",
|
|
3
|
-
"version": "3.2.
|
|
3
|
+
"version": "3.2.21",
|
|
4
4
|
"description": "Hook emitter, registry, and trajectory hooks for Agent Relay",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -37,9 +37,9 @@
|
|
|
37
37
|
"test:watch": "vitest"
|
|
38
38
|
},
|
|
39
39
|
"dependencies": {
|
|
40
|
-
"@agent-relay/config": "3.2.
|
|
41
|
-
"@agent-relay/trajectory": "3.2.
|
|
42
|
-
"@agent-relay/sdk": "3.2.
|
|
40
|
+
"@agent-relay/config": "3.2.21",
|
|
41
|
+
"@agent-relay/trajectory": "3.2.21",
|
|
42
|
+
"@agent-relay/sdk": "3.2.21"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/node": "^22.19.3",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agent-relay/memory",
|
|
3
|
-
"version": "3.2.
|
|
3
|
+
"version": "3.2.21",
|
|
4
4
|
"description": "Semantic memory storage and retrieval system for agent-relay with multiple backend support",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"test:watch": "vitest"
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@agent-relay/hooks": "3.2.
|
|
25
|
+
"@agent-relay/hooks": "3.2.21"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|
|
28
28
|
"@types/node": "^22.19.3",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agent-relay/openclaw",
|
|
3
|
-
"version": "3.2.
|
|
3
|
+
"version": "3.2.21",
|
|
4
4
|
"description": "Relaycast bridge for OpenClaw — messaging, identity, runtime setup, and local spawning",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -29,7 +29,7 @@
|
|
|
29
29
|
"postinstall": "node -e \"try{require('child_process').execSync('ldd --version 2>&1',{stdio:'pipe'})}catch{try{require('child_process').execSync('apk info gcompat 2>/dev/null',{stdio:'pipe'})}catch{console.warn('\\n\\u26a0\\ufe0f @agent-relay/openclaw: Alpine detected without gcompat. Spawning requires glibc.\\n Install with: apk add gcompat libstdc++\\n')}}\""
|
|
30
30
|
},
|
|
31
31
|
"dependencies": {
|
|
32
|
-
"@agent-relay/sdk": "3.2.
|
|
32
|
+
"@agent-relay/sdk": "3.2.21",
|
|
33
33
|
"@relaycast/sdk": "^1.0.0",
|
|
34
34
|
"ws": "^8.0.0"
|
|
35
35
|
},
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agent-relay/policy",
|
|
3
|
-
"version": "3.2.
|
|
3
|
+
"version": "3.2.21",
|
|
4
4
|
"description": "Agent policy management with multi-level fallback (repo, local PRPM, cloud workspace)",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"test:watch": "vitest"
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@agent-relay/config": "3.2.
|
|
25
|
+
"@agent-relay/config": "3.2.21"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|
|
28
28
|
"@types/node": "^22.19.3",
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"e2big-and-verify.test.d.ts","sourceRoot":"","sources":["../../../src/workflows/__tests__/e2big-and-verify.test.ts"],"names":[],"mappings":""}
|