@treeseed/agent 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +7 -0
- package/README.md +62 -0
- package/dist/agent-runtime.js +111 -0
- package/dist/agents/adapters/execution.js +90 -0
- package/dist/agents/adapters/mutations.js +30 -0
- package/dist/agents/adapters/notification.js +16 -0
- package/dist/agents/adapters/repository.js +61 -0
- package/dist/agents/adapters/research.js +25 -0
- package/dist/agents/adapters/verification.js +62 -0
- package/dist/agents/cli-tools.js +5 -0
- package/dist/agents/cli.js +77 -0
- package/dist/agents/content-store.js +1 -0
- package/dist/agents/contracts/messages.js +138 -0
- package/dist/agents/contracts/run.js +0 -0
- package/dist/agents/d1-store.js +1 -0
- package/dist/agents/frontmatter.js +1 -0
- package/dist/agents/git-runtime.js +1 -0
- package/dist/agents/index.js +5 -0
- package/dist/agents/kernel/agent-kernel.js +284 -0
- package/dist/agents/kernel/trigger-resolver.js +153 -0
- package/dist/agents/model-registry.js +1 -0
- package/dist/agents/registry-helper.js +14 -0
- package/dist/agents/registry.js +91 -0
- package/dist/agents/runtime-types.js +0 -0
- package/dist/agents/sdk-filters.js +1 -0
- package/dist/agents/sdk-types.js +1 -0
- package/dist/agents/sdk.js +1 -0
- package/dist/agents/spec-loader.js +53 -0
- package/dist/agents/spec-normalizer.js +257 -0
- package/dist/agents/spec-types.js +0 -0
- package/dist/agents/stores/cursor-store.js +1 -0
- package/dist/agents/stores/helpers.js +1 -0
- package/dist/agents/stores/lease-store.js +1 -0
- package/dist/agents/stores/message-store.js +1 -0
- package/dist/agents/stores/run-store.js +1 -0
- package/dist/agents/stores/subscription-store.js +1 -0
- package/dist/agents/testing/agents-smoke.js +32 -0
- package/dist/agents/testing/e2e-harness.js +435 -0
- package/dist/agents/wrangler-d1.js +1 -0
- package/dist/index.js +9 -0
- package/dist/scripts/assert-release-tag-version.d.ts +1 -0
- package/dist/scripts/assert-release-tag-version.js +20 -0
- package/dist/scripts/build-dist.d.ts +1 -0
- package/dist/scripts/build-dist.js +98 -0
- package/dist/scripts/package-tools.d.ts +1 -0
- package/dist/scripts/package-tools.js +7 -0
- package/dist/scripts/publish-package.d.ts +1 -0
- package/dist/scripts/publish-package.js +19 -0
- package/dist/scripts/release-verify.d.ts +1 -0
- package/dist/scripts/release-verify.js +143 -0
- package/dist/scripts/test-smoke.d.ts +1 -0
- package/dist/scripts/test-smoke.js +23 -0
- package/dist/scripts/treeseed-agents.d.ts +2 -0
- package/dist/scripts/treeseed-agents.js +13 -0
- package/dist/src/agent-runtime.d.ts +17 -0
- package/dist/src/agents/adapters/execution.d.ts +46 -0
- package/dist/src/agents/adapters/mutations.d.ts +22 -0
- package/dist/src/agents/adapters/notification.d.ts +11 -0
- package/dist/src/agents/adapters/repository.d.ts +28 -0
- package/dist/src/agents/adapters/research.d.ts +14 -0
- package/dist/src/agents/adapters/verification.d.ts +36 -0
- package/dist/src/agents/cli-tools.d.ts +1 -0
- package/dist/src/agents/cli.d.ts +6 -0
- package/dist/src/agents/content-store.d.ts +1 -0
- package/dist/src/agents/contracts/messages.d.ts +88 -0
- package/dist/src/agents/contracts/run.d.ts +20 -0
- package/dist/src/agents/d1-store.d.ts +1 -0
- package/dist/src/agents/frontmatter.d.ts +1 -0
- package/dist/src/agents/git-runtime.d.ts +1 -0
- package/dist/src/agents/index.d.ts +1 -0
- package/dist/src/agents/kernel/agent-kernel.d.ts +52 -0
- package/dist/src/agents/kernel/trigger-resolver.d.ts +18 -0
- package/dist/src/agents/model-registry.d.ts +1 -0
- package/dist/src/agents/registry-helper.d.ts +4 -0
- package/dist/src/agents/registry.d.ts +7 -0
- package/dist/src/agents/runtime-types.d.ts +117 -0
- package/dist/src/agents/sdk-filters.d.ts +1 -0
- package/dist/src/agents/sdk-types.d.ts +1 -0
- package/dist/src/agents/sdk.d.ts +1 -0
- package/dist/src/agents/spec-loader.d.ts +18 -0
- package/dist/src/agents/spec-normalizer.d.ts +2 -0
- package/dist/src/agents/spec-types.d.ts +64 -0
- package/dist/src/agents/stores/cursor-store.d.ts +1 -0
- package/dist/src/agents/stores/helpers.d.ts +1 -0
- package/dist/src/agents/stores/lease-store.d.ts +1 -0
- package/dist/src/agents/stores/message-store.d.ts +1 -0
- package/dist/src/agents/stores/run-store.d.ts +1 -0
- package/dist/src/agents/stores/subscription-store.d.ts +1 -0
- package/dist/src/agents/testing/agents-smoke.d.ts +1 -0
- package/dist/src/agents/testing/e2e-harness.d.ts +44 -0
- package/dist/src/agents/wrangler-d1.d.ts +1 -0
- package/dist/src/index.d.ts +3 -0
- package/package.json +54 -0
|
@@ -0,0 +1,435 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { existsSync } from "node:fs";
|
|
3
|
+
import { access, cp, mkdtemp, mkdir, readFile, readdir, rm, symlink, writeFile } from "node:fs/promises";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import { promisify } from "node:util";
|
|
7
|
+
import { createRequire } from "node:module";
|
|
8
|
+
import ts from "typescript";
|
|
9
|
+
import { MemoryAgentDatabase } from "../d1-store.js";
|
|
10
|
+
import { resolveModelDefinition } from "../model-registry.js";
|
|
11
|
+
import { runFromRecord } from "../stores/run-store.js";
|
|
12
|
+
import { serializeFrontmatterDocument } from "../frontmatter.js";
|
|
13
|
+
const execFileAsync = promisify(execFile);
|
|
14
|
+
const require2 = createRequire(import.meta.url);
|
|
15
|
+
function nowIso() {
|
|
16
|
+
return (/* @__PURE__ */ new Date()).toISOString();
|
|
17
|
+
}
|
|
18
|
+
function resolveDocsRoot() {
|
|
19
|
+
if (process.env.TREESEED_AGENT_FIXTURE_ROOT) {
|
|
20
|
+
return path.resolve(process.env.TREESEED_AGENT_FIXTURE_ROOT);
|
|
21
|
+
}
|
|
22
|
+
const cwd = process.cwd();
|
|
23
|
+
const corePackageRoot = path.resolve(path.dirname(require2.resolve("@treeseed/core")), "..");
|
|
24
|
+
const candidates = [];
|
|
25
|
+
let current = cwd;
|
|
26
|
+
while (true) {
|
|
27
|
+
candidates.push(
|
|
28
|
+
path.resolve(current, ".fixtures", "treeseed-fixtures", "sites", "working-site"),
|
|
29
|
+
path.resolve(current, "fixture"),
|
|
30
|
+
path.resolve(current, "fixtures", "sites", "working-site")
|
|
31
|
+
);
|
|
32
|
+
const parent = path.resolve(current, "..");
|
|
33
|
+
if (parent === current) {
|
|
34
|
+
break;
|
|
35
|
+
}
|
|
36
|
+
current = parent;
|
|
37
|
+
}
|
|
38
|
+
candidates.push(
|
|
39
|
+
path.resolve(corePackageRoot, ".fixtures", "treeseed-fixtures", "sites", "working-site"),
|
|
40
|
+
path.resolve(corePackageRoot, "fixture")
|
|
41
|
+
);
|
|
42
|
+
for (const candidate of candidates) {
|
|
43
|
+
if (existsSync(path.join(candidate, "src", "manifest.yaml"))) {
|
|
44
|
+
return candidate;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
throw new Error(
|
|
48
|
+
`Unable to resolve an agent smoke fixture root. Checked: ${candidates.join(", ")}`
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
async function resolveWranglerBin() {
|
|
52
|
+
if (process.env.TREESEED_AGENT_WRANGLER_BIN) {
|
|
53
|
+
return path.resolve(process.env.TREESEED_AGENT_WRANGLER_BIN);
|
|
54
|
+
}
|
|
55
|
+
try {
|
|
56
|
+
const wranglerPackageRoot = path.resolve(path.dirname(require2.resolve("wrangler/package.json")));
|
|
57
|
+
const packageJson = JSON.parse(await readFile(path.join(wranglerPackageRoot, "package.json"), "utf8"));
|
|
58
|
+
const relativeBin = typeof packageJson.bin === "string" ? packageJson.bin : packageJson.bin?.wrangler;
|
|
59
|
+
if (!relativeBin) {
|
|
60
|
+
throw new Error("Unable to resolve wrangler binary path from package.json.");
|
|
61
|
+
}
|
|
62
|
+
return path.resolve(wranglerPackageRoot, relativeBin);
|
|
63
|
+
} catch {
|
|
64
|
+
const packageLocal = path.resolve(resolveDocsRoot(), "node_modules", ".bin", "wrangler");
|
|
65
|
+
await access(packageLocal);
|
|
66
|
+
return packageLocal;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
async function runCommand(command, args, cwd) {
|
|
70
|
+
await execFileAsync(command, args, {
|
|
71
|
+
cwd,
|
|
72
|
+
env: process.env,
|
|
73
|
+
maxBuffer: 10 * 1024 * 1024
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
async function walkFiles(root) {
|
|
77
|
+
const entries = await readdir(root, { withFileTypes: true }).catch(() => []);
|
|
78
|
+
const nested = await Promise.all(
|
|
79
|
+
entries.map(async (entry) => {
|
|
80
|
+
const fullPath = path.join(root, entry.name);
|
|
81
|
+
if (entry.isDirectory()) {
|
|
82
|
+
return walkFiles(fullPath);
|
|
83
|
+
}
|
|
84
|
+
return [fullPath];
|
|
85
|
+
})
|
|
86
|
+
);
|
|
87
|
+
return nested.flat();
|
|
88
|
+
}
|
|
89
|
+
async function patchFixtureAgentSpecs(repoRoot) {
|
|
90
|
+
const updates = /* @__PURE__ */ new Map([
|
|
91
|
+
["architecture-agent.mdx", " operations: [pick, update, create]"],
|
|
92
|
+
["engineer-agent.mdx", " operations: [pick, update, create]"],
|
|
93
|
+
["releaser-agent.mdx", " operations: [pick, update, get, create]"],
|
|
94
|
+
["researcher-agent.mdx", " operations: [pick, update, create]"],
|
|
95
|
+
["reviewer-agent.mdx", " operations: [pick, update, get, create]"]
|
|
96
|
+
]);
|
|
97
|
+
for (const [filename, permissionLine] of updates) {
|
|
98
|
+
const filePath = path.join(repoRoot, "src", "content", "agents", filename);
|
|
99
|
+
const source = await readFile(filePath, "utf8").catch(() => null);
|
|
100
|
+
if (!source) {
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
const next = source.replace(
|
|
104
|
+
/(\n - model: message\n) operations: \[[^\]]+\]/,
|
|
105
|
+
`$1${permissionLine}`
|
|
106
|
+
);
|
|
107
|
+
if (next !== source) {
|
|
108
|
+
await writeFile(filePath, next, "utf8");
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async function transpileFixtureAgentHandlers(repoRoot) {
|
|
113
|
+
const agentsRoot = path.join(repoRoot, "src", "agents");
|
|
114
|
+
const agentFiles = (await readdir(agentsRoot, { withFileTypes: true }).catch(() => [])).filter((entry) => entry.isFile() && entry.name.endsWith(".ts")).map((entry) => entry.name);
|
|
115
|
+
for (const filename of agentFiles) {
|
|
116
|
+
const sourcePath = path.join(agentsRoot, filename);
|
|
117
|
+
const outputPath = path.join(agentsRoot, filename.replace(/\.ts$/u, ".js"));
|
|
118
|
+
const source = await readFile(sourcePath, "utf8");
|
|
119
|
+
const transformed = ts.transpileModule(source, {
|
|
120
|
+
compilerOptions: {
|
|
121
|
+
module: ts.ModuleKind.ESNext,
|
|
122
|
+
target: ts.ScriptTarget.ES2022
|
|
123
|
+
}
|
|
124
|
+
}).outputText.replace(/(['"`])(\.[^'"`\n]+)\.ts\1/g, "$1$2.js$1");
|
|
125
|
+
await writeFile(outputPath, transformed, "utf8");
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
async function migrateDatabase(repoRoot, persistTo) {
|
|
129
|
+
const wrangler = await resolveWranglerBin();
|
|
130
|
+
for (const migration of [
|
|
131
|
+
"0001_subscribers.sql",
|
|
132
|
+
"0002_agent_runtime.sql",
|
|
133
|
+
"0003_agent_run_trace.sql"
|
|
134
|
+
]) {
|
|
135
|
+
await runCommand(
|
|
136
|
+
wrangler,
|
|
137
|
+
[
|
|
138
|
+
"d1",
|
|
139
|
+
"execute",
|
|
140
|
+
"karyon-docs-site-data",
|
|
141
|
+
"--local",
|
|
142
|
+
"--persist-to",
|
|
143
|
+
persistTo,
|
|
144
|
+
"--file",
|
|
145
|
+
path.join(repoRoot, "migrations", migration)
|
|
146
|
+
],
|
|
147
|
+
repoRoot
|
|
148
|
+
);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
async function initializeSandboxRepo(repoRoot) {
|
|
152
|
+
await runCommand("git", ["init", "-b", "main"], repoRoot);
|
|
153
|
+
await runCommand("git", ["config", "user.email", "agents-e2e@example.test"], repoRoot);
|
|
154
|
+
await runCommand("git", ["config", "user.name", "Agents E2E"], repoRoot);
|
|
155
|
+
await runCommand("git", ["add", "."], repoRoot);
|
|
156
|
+
await runCommand("git", ["commit", "-m", "test: baseline sandbox"], repoRoot);
|
|
157
|
+
}
|
|
158
|
+
function createObjectiveDocument(slug, date) {
|
|
159
|
+
return serializeFrontmatterDocument(
|
|
160
|
+
{
|
|
161
|
+
title: `Objective ${slug}`,
|
|
162
|
+
description: `Objective ${slug} description`,
|
|
163
|
+
date,
|
|
164
|
+
status: "planned",
|
|
165
|
+
tags: ["agent", "e2e"],
|
|
166
|
+
summary: `Summary for ${slug}`,
|
|
167
|
+
draft: false,
|
|
168
|
+
timeHorizon: "near-term",
|
|
169
|
+
motivation: `Motivation for ${slug}`,
|
|
170
|
+
primaryContributor: "planner-agent",
|
|
171
|
+
relatedQuestions: [],
|
|
172
|
+
relatedBooks: []
|
|
173
|
+
},
|
|
174
|
+
`# Objective ${slug}
|
|
175
|
+
`
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
function createQuestionDocument(slug, date, relatedObjectives = []) {
|
|
179
|
+
return serializeFrontmatterDocument(
|
|
180
|
+
{
|
|
181
|
+
title: `Question ${slug}`,
|
|
182
|
+
description: `Question ${slug} description`,
|
|
183
|
+
date,
|
|
184
|
+
status: "planned",
|
|
185
|
+
tags: ["agent", "e2e"],
|
|
186
|
+
summary: `Summary for ${slug}`,
|
|
187
|
+
draft: false,
|
|
188
|
+
questionType: "implementation",
|
|
189
|
+
motivation: `Motivation for ${slug}`,
|
|
190
|
+
primaryContributor: "planner-agent",
|
|
191
|
+
relatedObjectives,
|
|
192
|
+
relatedBooks: []
|
|
193
|
+
},
|
|
194
|
+
`# Question ${slug}
|
|
195
|
+
`
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
function createKnowledgeDocument(slug, title) {
|
|
199
|
+
return serializeFrontmatterDocument(
|
|
200
|
+
{
|
|
201
|
+
title,
|
|
202
|
+
slug,
|
|
203
|
+
updated: nowIso(),
|
|
204
|
+
tags: ["agent", "e2e"]
|
|
205
|
+
},
|
|
206
|
+
`# ${title}
|
|
207
|
+
`
|
|
208
|
+
);
|
|
209
|
+
}
|
|
210
|
+
async function createAgentTestRuntime(options) {
|
|
211
|
+
const rootDir = await mkdtemp(path.join(os.tmpdir(), "karyon-agents-e2e-"));
|
|
212
|
+
const repoRoot = path.join(rootDir, "docs");
|
|
213
|
+
const persistTo = path.join(rootDir, ".wrangler-state");
|
|
214
|
+
const docsRoot = resolveDocsRoot();
|
|
215
|
+
const previousContentRoot = process.env.TREESEED_AGENT_CONTENT_ROOT;
|
|
216
|
+
const previousExecutionMode = process.env.TREESEED_AGENT_EXECUTION_PROVIDER;
|
|
217
|
+
const previousTenantRoot = process.env.TREESEED_TENANT_ROOT;
|
|
218
|
+
const previousCwd = process.cwd();
|
|
219
|
+
const sharedNodeModules = path.join(previousCwd, "node_modules");
|
|
220
|
+
await cp(docsRoot, repoRoot, {
|
|
221
|
+
recursive: true,
|
|
222
|
+
filter(source) {
|
|
223
|
+
const relativePath = path.relative(docsRoot, source);
|
|
224
|
+
if (!relativePath) {
|
|
225
|
+
return true;
|
|
226
|
+
}
|
|
227
|
+
return ![
|
|
228
|
+
".wrangler",
|
|
229
|
+
".agent-worktrees",
|
|
230
|
+
"node_modules",
|
|
231
|
+
"dist",
|
|
232
|
+
".astro",
|
|
233
|
+
"coverage"
|
|
234
|
+
].some((prefix) => relativePath === prefix || relativePath.startsWith(`${prefix}${path.sep}`));
|
|
235
|
+
}
|
|
236
|
+
});
|
|
237
|
+
if (existsSync(sharedNodeModules)) {
|
|
238
|
+
await symlink(sharedNodeModules, path.join(repoRoot, "node_modules"), "dir");
|
|
239
|
+
}
|
|
240
|
+
await transpileFixtureAgentHandlers(repoRoot);
|
|
241
|
+
await patchFixtureAgentSpecs(repoRoot);
|
|
242
|
+
process.env.TREESEED_AGENT_CONTENT_ROOT = path.join(repoRoot, "src", "content");
|
|
243
|
+
process.env.TREESEED_AGENT_EXECUTION_PROVIDER = options?.executionMode ?? "stub";
|
|
244
|
+
process.env.TREESEED_TENANT_ROOT = repoRoot;
|
|
245
|
+
process.chdir(repoRoot);
|
|
246
|
+
await mkdir(persistTo, { recursive: true });
|
|
247
|
+
await initializeSandboxRepo(repoRoot);
|
|
248
|
+
const [{ AgentKernel }, { AgentSdk }] = await Promise.all([
|
|
249
|
+
import("../kernel/agent-kernel.js"),
|
|
250
|
+
import("../sdk.js")
|
|
251
|
+
]);
|
|
252
|
+
const sdk = options?.databaseMode === "local-d1" ? (await migrateDatabase(repoRoot, persistTo), AgentSdk.createLocal({
|
|
253
|
+
repoRoot,
|
|
254
|
+
databaseName: "karyon-docs-site-data",
|
|
255
|
+
persistTo
|
|
256
|
+
})) : new AgentSdk({
|
|
257
|
+
repoRoot,
|
|
258
|
+
database: new MemoryAgentDatabase()
|
|
259
|
+
});
|
|
260
|
+
const kernel = new AgentKernel(sdk, repoRoot, {
|
|
261
|
+
execution: options?.execution,
|
|
262
|
+
mutations: options?.mutations
|
|
263
|
+
});
|
|
264
|
+
async function writeSeedFile(relativePath, source, message) {
|
|
265
|
+
const filePath = path.join(repoRoot, relativePath);
|
|
266
|
+
await mkdir(path.dirname(filePath), { recursive: true });
|
|
267
|
+
await writeFile(filePath, source, "utf8");
|
|
268
|
+
await runCommand("git", ["add", relativePath], repoRoot);
|
|
269
|
+
await runCommand("git", ["commit", "-m", message], repoRoot);
|
|
270
|
+
}
|
|
271
|
+
return {
|
|
272
|
+
rootDir,
|
|
273
|
+
repoRoot,
|
|
274
|
+
persistTo,
|
|
275
|
+
sdk,
|
|
276
|
+
kernel,
|
|
277
|
+
async seedObjectives(entries) {
|
|
278
|
+
for (const entry of entries) {
|
|
279
|
+
await writeSeedFile(
|
|
280
|
+
path.join("src", "content", "objectives", `${entry.slug}.mdx`),
|
|
281
|
+
createObjectiveDocument(entry.slug, entry.date ?? "2099-01-01T00:00:00.000Z"),
|
|
282
|
+
`test(seed): objective ${entry.slug}`
|
|
283
|
+
);
|
|
284
|
+
}
|
|
285
|
+
},
|
|
286
|
+
async seedQuestions(entries) {
|
|
287
|
+
for (const entry of entries) {
|
|
288
|
+
await writeSeedFile(
|
|
289
|
+
path.join("src", "content", "questions", `${entry.slug}.mdx`),
|
|
290
|
+
createQuestionDocument(
|
|
291
|
+
entry.slug,
|
|
292
|
+
entry.date ?? "2099-01-01T00:00:00.000Z",
|
|
293
|
+
entry.relatedObjectives ?? []
|
|
294
|
+
),
|
|
295
|
+
`test(seed): question ${entry.slug}`
|
|
296
|
+
);
|
|
297
|
+
}
|
|
298
|
+
},
|
|
299
|
+
async seedKnowledge(entries) {
|
|
300
|
+
for (const entry of entries) {
|
|
301
|
+
await writeSeedFile(
|
|
302
|
+
path.join("src", "content", "knowledge", `${entry.slug}.md`),
|
|
303
|
+
createKnowledgeDocument(entry.slug, entry.title ?? `Knowledge ${entry.slug}`),
|
|
304
|
+
`test(seed): knowledge ${entry.slug}`
|
|
305
|
+
);
|
|
306
|
+
}
|
|
307
|
+
},
|
|
308
|
+
async seedMessages(entries) {
|
|
309
|
+
const messages = [];
|
|
310
|
+
for (const entry of entries) {
|
|
311
|
+
const created = await sdk.createMessage({
|
|
312
|
+
...entry,
|
|
313
|
+
actor: "agents-e2e"
|
|
314
|
+
});
|
|
315
|
+
messages.push(created.payload);
|
|
316
|
+
}
|
|
317
|
+
return messages;
|
|
318
|
+
},
|
|
319
|
+
async clearModelContent(model) {
|
|
320
|
+
const definition = resolveModelDefinition(model);
|
|
321
|
+
if (!definition.contentDir) {
|
|
322
|
+
throw new Error(`Model ${model} is not content-backed.`);
|
|
323
|
+
}
|
|
324
|
+
const relativeContentDir = path.relative(repoRoot, definition.contentDir);
|
|
325
|
+
await rm(definition.contentDir, { recursive: true, force: true });
|
|
326
|
+
await mkdir(definition.contentDir, { recursive: true });
|
|
327
|
+
await runCommand("git", ["add", "-A", relativeContentDir], repoRoot);
|
|
328
|
+
await runCommand("git", ["commit", "-m", `test(seed): clear ${model}`], repoRoot);
|
|
329
|
+
},
|
|
330
|
+
runAgent(slug) {
|
|
331
|
+
return kernel.runAgent(slug);
|
|
332
|
+
},
|
|
333
|
+
runCycle() {
|
|
334
|
+
return kernel.runCycle();
|
|
335
|
+
},
|
|
336
|
+
async readMessages() {
|
|
337
|
+
const response = await sdk.search({
|
|
338
|
+
model: "message",
|
|
339
|
+
sort: [{ field: "created_at", direction: "asc" }],
|
|
340
|
+
limit: 100
|
|
341
|
+
});
|
|
342
|
+
return response.payload;
|
|
343
|
+
},
|
|
344
|
+
async readRunLogs() {
|
|
345
|
+
const database = sdk.database;
|
|
346
|
+
if (database.inspectRuns) {
|
|
347
|
+
return database.inspectRuns().map((row) => runFromRecord(row));
|
|
348
|
+
}
|
|
349
|
+
const rows = database.db ? await database.db.prepare(`
|
|
350
|
+
SELECT
|
|
351
|
+
record_key AS run_id,
|
|
352
|
+
lookup_key AS agent_slug,
|
|
353
|
+
status,
|
|
354
|
+
json_extract(payload_json, '$.triggerSource') AS trigger_source,
|
|
355
|
+
json_extract(payload_json, '$.handlerKind') AS handler_kind,
|
|
356
|
+
json_extract(payload_json, '$.triggerKind') AS trigger_kind,
|
|
357
|
+
json_extract(payload_json, '$.selectedItemKey') AS selected_item_key,
|
|
358
|
+
json_extract(payload_json, '$.selectedMessageId') AS selected_message_id,
|
|
359
|
+
json_extract(payload_json, '$.claimedMessageId') AS claimed_message_id,
|
|
360
|
+
json_extract(payload_json, '$.branchName') AS branch_name,
|
|
361
|
+
secondary_key AS commit_sha,
|
|
362
|
+
json_extract(payload_json, '$.prUrl') AS pr_url,
|
|
363
|
+
json_extract(payload_json, '$.summary') AS summary,
|
|
364
|
+
json_extract(payload_json, '$.error') AS error,
|
|
365
|
+
json_extract(payload_json, '$.errorCategory') AS error_category,
|
|
366
|
+
json_extract(payload_json, '$.changedPaths') AS changed_paths,
|
|
367
|
+
created_at AS started_at,
|
|
368
|
+
json_extract(payload_json, '$.finishedAt') AS finished_at
|
|
369
|
+
FROM runtime_records
|
|
370
|
+
WHERE record_type = 'agent_run'
|
|
371
|
+
ORDER BY created_at ASC
|
|
372
|
+
`).all() : { results: [] };
|
|
373
|
+
return rows.results.map((row) => runFromRecord(row));
|
|
374
|
+
},
|
|
375
|
+
async readContentLeases() {
|
|
376
|
+
const database = sdk.database;
|
|
377
|
+
if (database.inspectLeases) {
|
|
378
|
+
return database.inspectLeases();
|
|
379
|
+
}
|
|
380
|
+
if (!database.db) {
|
|
381
|
+
return [];
|
|
382
|
+
}
|
|
383
|
+
const rows = await database.db.prepare("SELECT * FROM lease_state ORDER BY item_key ASC").all();
|
|
384
|
+
return rows.results;
|
|
385
|
+
},
|
|
386
|
+
async readSandboxArtifacts() {
|
|
387
|
+
const worktreeRoot = path.join(repoRoot, ".agent-worktrees");
|
|
388
|
+
const files = (await walkFiles(worktreeRoot)).filter((entry) => entry.includes(`${path.sep}.agent-artifacts${path.sep}`));
|
|
389
|
+
return Promise.all(
|
|
390
|
+
files.map(async (filePath) => ({
|
|
391
|
+
path: filePath,
|
|
392
|
+
content: await readFile(filePath, "utf8")
|
|
393
|
+
}))
|
|
394
|
+
);
|
|
395
|
+
},
|
|
396
|
+
async claimMessage(messageTypes, workerId = "agents-e2e-claimer") {
|
|
397
|
+
const claimed = await sdk.claimMessage({
|
|
398
|
+
workerId,
|
|
399
|
+
messageTypes,
|
|
400
|
+
leaseSeconds: 300
|
|
401
|
+
});
|
|
402
|
+
return claimed.payload;
|
|
403
|
+
},
|
|
404
|
+
async claimObjectiveLease(itemKey, workerId = "agents-e2e-lease-holder") {
|
|
405
|
+
return sdk.database.tryClaimContentLease({
|
|
406
|
+
model: "objective",
|
|
407
|
+
itemKey,
|
|
408
|
+
claimedBy: workerId,
|
|
409
|
+
leaseSeconds: 300
|
|
410
|
+
});
|
|
411
|
+
},
|
|
412
|
+
async cleanup() {
|
|
413
|
+
if (previousContentRoot === void 0) {
|
|
414
|
+
delete process.env.TREESEED_AGENT_CONTENT_ROOT;
|
|
415
|
+
} else {
|
|
416
|
+
process.env.TREESEED_AGENT_CONTENT_ROOT = previousContentRoot;
|
|
417
|
+
}
|
|
418
|
+
if (previousExecutionMode === void 0) {
|
|
419
|
+
delete process.env.TREESEED_AGENT_EXECUTION_PROVIDER;
|
|
420
|
+
} else {
|
|
421
|
+
process.env.TREESEED_AGENT_EXECUTION_PROVIDER = previousExecutionMode;
|
|
422
|
+
}
|
|
423
|
+
if (previousTenantRoot === void 0) {
|
|
424
|
+
delete process.env.TREESEED_TENANT_ROOT;
|
|
425
|
+
} else {
|
|
426
|
+
process.env.TREESEED_TENANT_ROOT = previousTenantRoot;
|
|
427
|
+
}
|
|
428
|
+
process.chdir(previousCwd);
|
|
429
|
+
await rm(rootDir, { recursive: true, force: true });
|
|
430
|
+
}
|
|
431
|
+
};
|
|
432
|
+
}
|
|
433
|
+
export {
|
|
434
|
+
createAgentTestRuntime
|
|
435
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from "@treeseed/sdk/wrangler-d1";
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { AgentKernel } from "./agents/kernel/agent-kernel.js";
|
|
2
|
+
import { runTreeseedAgentCli } from "./agents/cli.js";
|
|
3
|
+
import { resolveAgentHandler, listRegisteredAgentHandlers } from "./agents/registry.js";
|
|
4
|
+
export {
|
|
5
|
+
AgentKernel,
|
|
6
|
+
listRegisteredAgentHandlers,
|
|
7
|
+
resolveAgentHandler,
|
|
8
|
+
runTreeseedAgentCli
|
|
9
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { readFileSync } from 'node:fs';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
import { fileURLToPath } from 'node:url';
|
|
4
|
+
const packageRoot = resolve(fileURLToPath(new URL('..', import.meta.url)));
|
|
5
|
+
const packageJson = JSON.parse(readFileSync(resolve(packageRoot, 'package.json'), 'utf8'));
|
|
6
|
+
const packageVersion = packageJson.version;
|
|
7
|
+
const tagName = process.argv[2] || process.env.GITHUB_REF_NAME;
|
|
8
|
+
if (!tagName) {
|
|
9
|
+
console.error('Release tag validation requires a tag name argument or GITHUB_REF_NAME.');
|
|
10
|
+
process.exit(1);
|
|
11
|
+
}
|
|
12
|
+
if (!/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/.test(tagName)) {
|
|
13
|
+
console.error(`Release tag "${tagName}" must be a plain semver tag like "${packageVersion}".`);
|
|
14
|
+
process.exit(1);
|
|
15
|
+
}
|
|
16
|
+
if (tagName !== packageVersion) {
|
|
17
|
+
console.error(`Release tag "${tagName}" does not match @treeseed/agent version "${packageVersion}".`);
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
console.log(`Release tag "${tagName}" matches @treeseed/agent version "${packageVersion}".`);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { copyFileSync, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync, chmodSync } from 'node:fs';
|
|
2
|
+
import { dirname, extname, join, relative, resolve } from 'node:path';
|
|
3
|
+
import { build } from 'esbuild';
|
|
4
|
+
import ts from 'typescript';
|
|
5
|
+
import { packageRoot } from './package-tools.js';
|
|
6
|
+
const srcRoot = resolve(packageRoot, 'src');
|
|
7
|
+
const scriptsRoot = resolve(packageRoot, 'scripts');
|
|
8
|
+
const distRoot = resolve(packageRoot, 'dist');
|
|
9
|
+
const COPY_EXTENSIONS = new Set(['.d.js', '.json', '.md']);
|
|
10
|
+
function walkFiles(root) {
|
|
11
|
+
const files = [];
|
|
12
|
+
for (const entry of readdirSync(root, { withFileTypes: true })) {
|
|
13
|
+
const fullPath = join(root, entry.name);
|
|
14
|
+
if (entry.isDirectory())
|
|
15
|
+
files.push(...walkFiles(fullPath));
|
|
16
|
+
else
|
|
17
|
+
files.push(fullPath);
|
|
18
|
+
}
|
|
19
|
+
return files;
|
|
20
|
+
}
|
|
21
|
+
function ensureDir(filePath) {
|
|
22
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
23
|
+
}
|
|
24
|
+
function rewriteRuntimeSpecifiers(contents) {
|
|
25
|
+
return contents
|
|
26
|
+
.replace(/(['"`])(\.[^'"`\n]+)\.(mjs|ts)\1/g, '$1$2.js$1')
|
|
27
|
+
.replace(/(['"`])\.\.\/src\//g, '$1../');
|
|
28
|
+
}
|
|
29
|
+
async function compileModule(filePath, sourceRoot, outputRoot) {
|
|
30
|
+
const relativePath = relative(sourceRoot, filePath);
|
|
31
|
+
const outputFile = resolve(outputRoot, relativePath.replace(/\.(mjs|ts)$/u, '.js'));
|
|
32
|
+
ensureDir(outputFile);
|
|
33
|
+
await build({
|
|
34
|
+
entryPoints: [filePath],
|
|
35
|
+
outfile: outputFile,
|
|
36
|
+
platform: 'node',
|
|
37
|
+
format: 'esm',
|
|
38
|
+
bundle: false,
|
|
39
|
+
logLevel: 'silent',
|
|
40
|
+
});
|
|
41
|
+
const builtSource = readFileSync(outputFile, 'utf8');
|
|
42
|
+
writeFileSync(outputFile, rewriteRuntimeSpecifiers(builtSource), 'utf8');
|
|
43
|
+
}
|
|
44
|
+
function copyAsset(filePath, sourceRoot, outputRoot) {
|
|
45
|
+
const outputFile = resolve(outputRoot, relative(sourceRoot, filePath));
|
|
46
|
+
ensureDir(outputFile);
|
|
47
|
+
copyFileSync(filePath, outputFile);
|
|
48
|
+
if (outputFile.endsWith('.d.js')) {
|
|
49
|
+
writeFileSync(outputFile, rewriteRuntimeSpecifiers(readFileSync(outputFile, 'utf8')), 'utf8');
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
function transpileScript(filePath) {
|
|
53
|
+
const source = readFileSync(filePath, 'utf8');
|
|
54
|
+
const relativePath = relative(scriptsRoot, filePath);
|
|
55
|
+
const outputFile = resolve(distRoot, 'scripts', relativePath.replace(/\.(mjs|ts)$/u, '.js'));
|
|
56
|
+
const transformed = extname(filePath) === '.ts'
|
|
57
|
+
? ts.transpileModule(source, {
|
|
58
|
+
compilerOptions: { module: ts.ModuleKind.ESNext, target: ts.ScriptTarget.ES2022 },
|
|
59
|
+
}).outputText
|
|
60
|
+
: source;
|
|
61
|
+
ensureDir(outputFile);
|
|
62
|
+
writeFileSync(outputFile, rewriteRuntimeSpecifiers(transformed), 'utf8');
|
|
63
|
+
chmodSync(outputFile, 0o755);
|
|
64
|
+
}
|
|
65
|
+
function emitDeclarations() {
|
|
66
|
+
const configPath = ts.findConfigFile(packageRoot, ts.sys.fileExists, 'tsconfig.json');
|
|
67
|
+
if (!configPath)
|
|
68
|
+
throw new Error('Unable to locate tsconfig.json for declaration build.');
|
|
69
|
+
const configFile = ts.readConfigFile(configPath, ts.sys.readFile);
|
|
70
|
+
const parsed = ts.parseJsonConfigFileContent(configFile.config, ts.sys, packageRoot);
|
|
71
|
+
const program = ts.createProgram({
|
|
72
|
+
rootNames: parsed.fileNames,
|
|
73
|
+
options: { ...parsed.options, declaration: true, emitDeclarationOnly: true, declarationDir: distRoot, noEmit: false },
|
|
74
|
+
});
|
|
75
|
+
const result = program.emit();
|
|
76
|
+
if (result.emitSkipped)
|
|
77
|
+
throw new Error('Declaration build failed.');
|
|
78
|
+
}
|
|
79
|
+
rmSync(distRoot, { recursive: true, force: true });
|
|
80
|
+
for (const filePath of walkFiles(srcRoot)) {
|
|
81
|
+
const extension = extname(filePath);
|
|
82
|
+
if (extension === '.ts')
|
|
83
|
+
await compileModule(filePath, srcRoot, distRoot);
|
|
84
|
+
else if (COPY_EXTENSIONS.has(extension))
|
|
85
|
+
copyAsset(filePath, srcRoot, distRoot);
|
|
86
|
+
}
|
|
87
|
+
for (const filePath of walkFiles(scriptsRoot)) {
|
|
88
|
+
const extension = extname(filePath);
|
|
89
|
+
if (extension === '.ts' || extension === '.ts')
|
|
90
|
+
transpileScript(filePath);
|
|
91
|
+
}
|
|
92
|
+
emitDeclarations();
|
|
93
|
+
if (existsSync(resolve(packageRoot, 'README.md'))) {
|
|
94
|
+
copyFileSync(resolve(packageRoot, 'README.md'), resolve(distRoot, '..', 'README.md'));
|
|
95
|
+
}
|
|
96
|
+
if (existsSync(resolve(packageRoot, 'Dockerfile'))) {
|
|
97
|
+
copyFileSync(resolve(packageRoot, 'Dockerfile'), resolve(distRoot, '..', 'Dockerfile'));
|
|
98
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const packageRoot: string;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { dirname, resolve } from 'node:path';
|
|
2
|
+
import { fileURLToPath } from 'node:url';
|
|
3
|
+
const scriptRoot = dirname(fileURLToPath(import.meta.url));
|
|
4
|
+
const packageCandidate = resolve(scriptRoot, '..');
|
|
5
|
+
export const packageRoot = packageCandidate.endsWith('/dist')
|
|
6
|
+
? resolve(packageCandidate, '..')
|
|
7
|
+
: packageCandidate;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { spawnSync } from 'node:child_process';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
import { fileURLToPath } from 'node:url';
|
|
4
|
+
const packageRoot = resolve(fileURLToPath(new URL('..', import.meta.url)));
|
|
5
|
+
const extraArgs = process.argv.slice(2);
|
|
6
|
+
const npmArgs = ['publish', '.', '--access', 'public'];
|
|
7
|
+
if (process.env.GITHUB_ACTIONS === 'true')
|
|
8
|
+
npmArgs.push('--provenance');
|
|
9
|
+
npmArgs.push(...extraArgs);
|
|
10
|
+
const result = spawnSync('npm', npmArgs, {
|
|
11
|
+
cwd: packageRoot,
|
|
12
|
+
stdio: 'inherit',
|
|
13
|
+
env: process.env,
|
|
14
|
+
});
|
|
15
|
+
if (result.error) {
|
|
16
|
+
console.error(result.error.message);
|
|
17
|
+
process.exit(1);
|
|
18
|
+
}
|
|
19
|
+
process.exit(result.status ?? 1);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|