loreli 0.0.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/LICENSE +1 -1
  2. package/README.md +710 -97
  3. package/bin/loreli.js +89 -0
  4. package/package.json +77 -14
  5. package/packages/README.md +101 -0
  6. package/packages/action/README.md +98 -0
  7. package/packages/action/prompts/action.md +172 -0
  8. package/packages/action/src/index.js +684 -0
  9. package/packages/agent/README.md +606 -0
  10. package/packages/agent/src/backends/claude.js +387 -0
  11. package/packages/agent/src/backends/codex.js +351 -0
  12. package/packages/agent/src/backends/cursor.js +371 -0
  13. package/packages/agent/src/backends/index.js +486 -0
  14. package/packages/agent/src/base.js +138 -0
  15. package/packages/agent/src/cli.js +275 -0
  16. package/packages/agent/src/discover.js +396 -0
  17. package/packages/agent/src/factory.js +124 -0
  18. package/packages/agent/src/index.js +12 -0
  19. package/packages/agent/src/models.js +159 -0
  20. package/packages/agent/src/output.js +62 -0
  21. package/packages/agent/src/session.js +162 -0
  22. package/packages/agent/src/trace.js +186 -0
  23. package/packages/classify/README.md +136 -0
  24. package/packages/classify/prompts/blocker.md +12 -0
  25. package/packages/classify/prompts/feedback.md +14 -0
  26. package/packages/classify/prompts/pane-state.md +20 -0
  27. package/packages/classify/src/index.js +81 -0
  28. package/packages/config/README.md +898 -0
  29. package/packages/config/src/defaults.js +145 -0
  30. package/packages/config/src/index.js +223 -0
  31. package/packages/config/src/schema.js +291 -0
  32. package/packages/config/src/validate.js +160 -0
  33. package/packages/context/README.md +165 -0
  34. package/packages/context/src/index.js +198 -0
  35. package/packages/hub/README.md +338 -0
  36. package/packages/hub/src/base.js +154 -0
  37. package/packages/hub/src/github.js +1597 -0
  38. package/packages/hub/src/index.js +79 -0
  39. package/packages/hub/src/labels.js +48 -0
  40. package/packages/identity/README.md +288 -0
  41. package/packages/identity/src/index.js +620 -0
  42. package/packages/identity/src/themes/avatar.js +217 -0
  43. package/packages/identity/src/themes/digimon.js +217 -0
  44. package/packages/identity/src/themes/dragonball.js +217 -0
  45. package/packages/identity/src/themes/lotr.js +217 -0
  46. package/packages/identity/src/themes/marvel.js +217 -0
  47. package/packages/identity/src/themes/pokemon.js +217 -0
  48. package/packages/identity/src/themes/starwars.js +217 -0
  49. package/packages/identity/src/themes/transformers.js +217 -0
  50. package/packages/identity/src/themes/zelda.js +217 -0
  51. package/packages/knowledge/README.md +217 -0
  52. package/packages/knowledge/src/index.js +243 -0
  53. package/packages/log/README.md +93 -0
  54. package/packages/log/src/index.js +252 -0
  55. package/packages/marker/README.md +200 -0
  56. package/packages/marker/src/index.js +184 -0
  57. package/packages/mcp/README.md +323 -0
  58. package/packages/mcp/instructions.md +126 -0
  59. package/packages/mcp/scaffolding/.agents/skills/loreli-context/SKILL.md +89 -0
  60. package/packages/mcp/scaffolding/ISSUE_TEMPLATE/config.yml +2 -0
  61. package/packages/mcp/scaffolding/ISSUE_TEMPLATE/loreli.yml +83 -0
  62. package/packages/mcp/scaffolding/loreli.yml +491 -0
  63. package/packages/mcp/scaffolding/mcp-configs/.codex/config.toml +4 -0
  64. package/packages/mcp/scaffolding/mcp-configs/.cursor/mcp.json +14 -0
  65. package/packages/mcp/scaffolding/mcp-configs/.mcp.json +14 -0
  66. package/packages/mcp/scaffolding/pull-request.md +23 -0
  67. package/packages/mcp/src/index.js +600 -0
  68. package/packages/mcp/src/tools/agent-context.js +44 -0
  69. package/packages/mcp/src/tools/agents.js +450 -0
  70. package/packages/mcp/src/tools/context.js +200 -0
  71. package/packages/mcp/src/tools/github.js +1163 -0
  72. package/packages/mcp/src/tools/hitl.js +162 -0
  73. package/packages/mcp/src/tools/index.js +18 -0
  74. package/packages/mcp/src/tools/refactor.js +227 -0
  75. package/packages/mcp/src/tools/repo.js +44 -0
  76. package/packages/mcp/src/tools/start.js +904 -0
  77. package/packages/mcp/src/tools/status.js +149 -0
  78. package/packages/mcp/src/tools/work.js +134 -0
  79. package/packages/orchestrator/README.md +192 -0
  80. package/packages/orchestrator/src/index.js +1492 -0
  81. package/packages/planner/README.md +251 -0
  82. package/packages/planner/prompts/plan-reviewer.md +109 -0
  83. package/packages/planner/prompts/planner.md +191 -0
  84. package/packages/planner/prompts/tiebreaker-reviewer.md +71 -0
  85. package/packages/planner/src/index.js +1381 -0
  86. package/packages/review/README.md +129 -0
  87. package/packages/review/prompts/reviewer.md +158 -0
  88. package/packages/review/src/index.js +1403 -0
  89. package/packages/risk/README.md +178 -0
  90. package/packages/risk/prompts/risk.md +272 -0
  91. package/packages/risk/src/index.js +439 -0
  92. package/packages/session/README.md +165 -0
  93. package/packages/session/src/index.js +215 -0
  94. package/packages/test-utils/README.md +96 -0
  95. package/packages/test-utils/src/index.js +354 -0
  96. package/packages/tmux/README.md +261 -0
  97. package/packages/tmux/src/index.js +501 -0
  98. package/packages/workflow/README.md +317 -0
  99. package/packages/workflow/prompts/preamble.md +14 -0
  100. package/packages/workflow/src/index.js +660 -0
  101. package/packages/workflow/src/proof-of-life.js +74 -0
  102. package/packages/workspace/README.md +143 -0
  103. package/packages/workspace/src/index.js +1127 -0
  104. package/index.js +0 -8
@@ -0,0 +1,904 @@
1
+ import { readFile, readdir } from 'node:fs/promises';
2
+ import { join, dirname } from 'node:path';
3
+ import { fileURLToPath } from 'node:url';
4
+ import { Config, check } from 'loreli/config';
5
+ import { hub as createHub, definitions } from 'loreli/hub';
6
+ import { logger, bind } from 'loreli/log';
7
+ import { models } from 'loreli/agent';
8
+ import { ENTRY, CODEX_TOML, prune as pruneWorkspaces } from 'loreli/workspace';
9
+ import { Tmux } from 'loreli/tmux';
10
+ import { responder } from 'loreli/workflow';
11
+ import { capability } from 'loreli/identity';
12
+ import { select } from './repo.js';
13
+
14
+ const log = logger('start');
15
+
16
+ const __dirname = dirname(fileURLToPath(import.meta.url));
17
+ const SCAFFOLDING = join(__dirname, '..', '..', 'scaffolding');
18
+ const MERGE_BASE_RETRIES = 5;
19
+ const MERGE_BASE_MIN_DELAY = 200;
20
+ const MERGE_BASE_MAX_DELAY = 2000;
21
+
22
+ /**
23
+ * Manifest of required files that start discovers and repairs.
24
+ *
25
+ * Single-file entries have a `source` string pointing to a scaffolding file.
26
+ * Directory entries have a `sourceDir` string pointing to a scaffolding
27
+ * directory whose contents are each scaffolded as individual files.
28
+ *
29
+ * @type {Array<{key: string, target: string, source?: string, sourceDir?: string}>}
30
+ */
31
+ const REQUIRED = [
32
+ { key: 'pr', target: '.github/pull_request_template.md', source: 'pull-request.md' },
33
+ { key: 'issue', target: '.github/ISSUE_TEMPLATE', sourceDir: 'ISSUE_TEMPLATE' },
34
+ { key: 'config', target: 'loreli.yml', source: 'loreli.yml' },
35
+ { key: 'contextSkill', target: '.agents/skills/loreli-context/SKILL.md', source: '.agents/skills/loreli-context/SKILL.md' }
36
+ ];
37
+
38
+ export default {
39
+ start: {
40
+ title: 'Start',
41
+ description: 'Initialize orchestration for a target GitHub repository. Validates access, discovers or scaffolds templates and loreli.yml, loads config, and reports the detected environment. Safe to re-run — only creates files that are absent.',
42
+ schema: {
43
+ type: 'object',
44
+ properties: {
45
+ repo: { type: 'string', description: 'Target repository in "owner/name" format.' },
46
+ theme: {
47
+ oneOf: [
48
+ { type: 'string' },
49
+ { type: 'array', items: { type: 'string' } }
50
+ ],
51
+ description: 'Agent theme or list of themes to randomize per work item. Valid: transformers, pokemon, marvel, digimon, starwars, lotr, dragonball, avatar, zelda.'
52
+ },
53
+ reviewers: {
54
+ type: 'array', items: { type: 'string' },
55
+ description: 'GitHub usernames for Human In The Loop. Empty = auto-merge.'
56
+ }
57
+ }
58
+ },
59
+ /**
60
+ * @param {object} args - Tool arguments.
61
+ * @param {object} ctx - Execution context (hub, registry, storage, etc.).
62
+ * @returns {Promise<object>} Start result with environment info and templates.
63
+ */
64
+ async exec(args, ctx) {
65
+ if (args.theme) check.theme(args.theme);
66
+ const repo = select(args.repo, ctx);
67
+ if (!repo) {
68
+ return {
69
+ content: [{ type: 'text', text: 'No repository configured. Pass --repo, set LORELI_REPO, or set repo in loreli.yml.' }],
70
+ isError: true
71
+ };
72
+ }
73
+
74
+ // Agent MCP servers (spawned from .mcp.json by Claude Code, Codex, etc.)
75
+ // are hydrated from env vars during _hydrate(). If a session ID is
76
+ // already set, this is an agent server — not a parent orchestrator.
77
+ // Re-starting would overwrite the session context and break
78
+ // the agent's ability to resolve its identity and task.
79
+ if (ctx.sessionId && ctx.agentName) {
80
+ log.info(`start skipped: agent server for ${ctx.agentName} (session: ${ctx.sessionId})`);
81
+ return {
82
+ content: [{
83
+ type: 'text',
84
+ text: [
85
+ `This is an agent server for ${ctx.agentName} — start is not needed.`,
86
+ 'Your session, identity, and repository context are already configured.',
87
+ 'Use the plan, pr, and comment tools to interact with the repository.'
88
+ ].join('\n')
89
+ }]
90
+ };
91
+ }
92
+
93
+ log.info(`starting: ${repo}`);
94
+
95
+ // 0. Create hub if not yet attached (token comes from env or config)
96
+ if (!ctx.hub) {
97
+ ctx.hub = createHub({ config: ctx.config });
98
+ }
99
+
100
+ // 1. Discover environment + existing files in parallel
101
+ const backendRegistry = ctx.backendRegistry;
102
+
103
+ const templates = { pr: null, issue: null, codeowners: null, contributing: null, config: null };
104
+ const checks = [
105
+ ['.github/pull_request_template.md', 'pr'],
106
+ ['.github/PULL_REQUEST_TEMPLATE.md', 'pr'],
107
+ ['.github/ISSUE_TEMPLATE', 'issue'],
108
+ ['CODEOWNERS', 'codeowners'],
109
+ ['CONTRIBUTING.md', 'contributing'],
110
+ ['loreli.yml', 'config'],
111
+ ['.agents/skills/loreli-context/SKILL.md', 'contextSkill']
112
+ ];
113
+
114
+ const [/* discover result */, ...fileResults] = await Promise.all([
115
+ backendRegistry.discover(),
116
+ ...checks.map(function probe([path]) {
117
+ return ctx.hub.read(repo, path).then(function found() { return path; }, function miss() { return null; });
118
+ })
119
+ ]);
120
+
121
+ for (let i = 0; i < checks.length; i += 1) {
122
+ if (fileResults[i]) templates[checks[i][1]] = fileResults[i];
123
+ }
124
+
125
+ const backends = backendRegistry.available();
126
+ const providers = backendRegistry.providers();
127
+ const sideInfo = capability(providers);
128
+
129
+ let strategy = 'error';
130
+ if (sideInfo.mode === 'dual') strategy = 'yin-yang';
131
+ else if (sideInfo.mode === 'single') strategy = 'fresh-instance';
132
+
133
+ log.info(`backends: ${backends.map(function name(b) { return b.name; }).join(', ') || 'none'}, strategy: ${strategy}`);
134
+
135
+ // 3. Scaffold any missing required files
136
+ const scaffolded = [];
137
+ const toScaffold = [];
138
+
139
+ for (const entry of REQUIRED) {
140
+ if (!templates[entry.key]) {
141
+ if (entry.sourceDir) {
142
+ // Directory-based template: read all files in the source directory
143
+ const dir = join(SCAFFOLDING, entry.sourceDir);
144
+ const files = await readdir(dir);
145
+ for (const file of files) {
146
+ const content = await readFile(join(dir, file), 'utf8');
147
+ toScaffold.push({ path: `${entry.target}/${file}`, content });
148
+ }
149
+ } else {
150
+ const content = await readFile(join(SCAFFOLDING, entry.source), 'utf8');
151
+ toScaffold.push({ path: entry.target, content });
152
+ }
153
+ scaffolded.push(entry.target);
154
+ }
155
+ }
156
+
157
+ // Inline: scaffold() was a simple hub iteration over write()
158
+ if (toScaffold.length) {
159
+ log.info(`scaffolding ${toScaffold.length} files: ${scaffolded.join(', ')}`);
160
+ for (const file of toScaffold) {
161
+ await ctx.hub.write(repo, file.path, {
162
+ content: file.content,
163
+ message: `chore: scaffold ${file.path}`
164
+ });
165
+ }
166
+ }
167
+
168
+ // Run ensure* operations and config load in parallel — each
169
+ // targets different files so there are no ordering dependencies.
170
+ const config = new Config();
171
+ await Promise.all([
172
+ ensureDependency(ctx.hub, repo, scaffolded),
173
+ ensureMcpConfigs(ctx.hub, repo, scaffolded, backendRegistry.configPaths()),
174
+ ensureGitignore(ctx.hub, repo, scaffolded),
175
+ config.load(ctx.hub, repo)
176
+ ]);
177
+
178
+ // 5. Merge start param overrides
179
+ const overrides = {};
180
+ if (args.theme) overrides.theme = args.theme;
181
+ if (args.reviewers) overrides.reviewers = args.reviewers;
182
+ config.merge(overrides);
183
+
184
+ const theme = config.get('theme');
185
+ const mergeBase = config.get('merge.base') ?? 'main';
186
+
187
+ // 6. Ensure merge base and labels in parallel — both depend on
188
+ // config but are independent of each other.
189
+ let ensuredLabels = [];
190
+ const labelPromise = (async function ensureLabels() {
191
+ if (config.get('labels.track') === false) return [];
192
+ const labelNames = [
193
+ 'loreli',
194
+ 'loreli:planner', 'loreli:action', 'loreli:reviewer',
195
+ 'loreli:approved', 'loreli:changes-requested'
196
+ ];
197
+ for (const backend of backends) {
198
+ if (backend.provider && backend.provider !== 'unknown') {
199
+ labelNames.push(`loreli:${backend.provider}`);
200
+ }
201
+ }
202
+ const extra = config.get('labels.extra') ?? [];
203
+ for (const name of extra) labelNames.push(name);
204
+
205
+ const unique = [...new Set(labelNames)];
206
+ return ctx.hub.ensure(repo, definitions(unique));
207
+ })();
208
+
209
+ const [baseProvision, labels] = await Promise.all([
210
+ ensureMergeBase(ctx.hub, repo, mergeBase),
211
+ labelPromise
212
+ ]);
213
+ ensuredLabels = labels;
214
+
215
+ // 7. Initialize session storage
216
+ const sessionId = `${repo.replace('/', '-')}-${Date.now()}`;
217
+ await ctx.storage.init(sessionId, config.toJSON());
218
+
219
+ // 8. Prune stale sessions, orphaned workspaces, and stale tmux sessions
220
+ if (config.get('cleanup.autoprune') !== false) {
221
+ const retention = config.get('cleanup.retention');
222
+ try {
223
+ const pruned = await ctx.storage.prune(retention);
224
+ if (pruned.length) log.info(`pruned ${pruned.length} stale sessions`);
225
+ const wsRemoved = await pruneWorkspaces(sessionId);
226
+ if (wsRemoved.length) log.info(`pruned ${wsRemoved.length} orphaned workspaces`);
227
+ const swept = await ctx.storage.sweep();
228
+ if (swept.length) log.info(`swept ${swept.length} stray files: ${swept.join(', ')}`);
229
+
230
+ // Destroy tmux sessions left behind by a previous MCP server.
231
+ // A stale tmux session contains agent panes running with outdated
232
+ // env (API keys, budgets, tokens). Spawning new agents into it
233
+ // inherits that stale env from the tmux server process.
234
+ if (Tmux.available()) {
235
+ const tmux = new Tmux();
236
+ const sessionName = config.get('tmux.session');
237
+ const result = await tmux.reap(sessionName);
238
+ if (result.killed) {
239
+ log.info(`reaped stale tmux session "${sessionName}" (${result.panes} panes)`);
240
+ }
241
+ }
242
+ } catch (err) {
243
+ log.warn(`cleanup failed (non-fatal): ${err.message}`);
244
+ }
245
+ }
246
+
247
+ // 9. Bind the logging subsystem to this session and config
248
+ bind({ session: sessionId, config });
249
+ log.info(`session initialized: ${sessionId}`);
250
+
251
+ // Wire rate limit warnings to the logger
252
+ if (ctx.hub.onRateLimitWarning !== undefined || Object.getOwnPropertyDescriptor(Object.getPrototypeOf(ctx.hub), 'onRateLimitWarning')) {
253
+ ctx.hub.onRateLimitWarning = function warnRateLimit(info) {
254
+ log.warn(`rate limit low: ${info.remaining}/${info.limit} (${Math.round(info.ratio * 100)}%), resets ${info.reset}`);
255
+ };
256
+ }
257
+
258
+ // Attach config and repo to context for downstream tools
259
+ ctx.config = config;
260
+ ctx.repo = repo;
261
+ ctx.sessionId = sessionId;
262
+
263
+ // Wire the orchestrator with the now-available hub, config, and repo
264
+ if (ctx.orchestrator) {
265
+ ctx.orchestrator.hub = ctx.hub;
266
+ ctx.orchestrator.cfg = ctx.config;
267
+ ctx.orchestrator.sessionId = sessionId;
268
+ ctx.orchestrator.repo = repo;
269
+ ctx.orchestrator.clientIdentity = ctx.clientIdentity ?? null;
270
+ }
271
+
272
+ // Wire workflow instances with the now-available hub
273
+ if (ctx.planner) ctx.planner.hub = ctx.hub;
274
+ if (ctx.action) ctx.action.hub = ctx.hub;
275
+ if (ctx.risk) ctx.risk.hub = ctx.hub;
276
+ if (ctx.review) ctx.review.hub = ctx.hub;
277
+
278
+ // Register risk workflow reactor handlers BEFORE review so
279
+ // labels are applied before scan() checks for them.
280
+ if (ctx.risk && ctx.orchestrator) {
281
+ const handlers = ctx.risk.reactor();
282
+ for (const [name, handler] of Object.entries(handlers)) {
283
+ ctx.orchestrator.register(name, handler);
284
+ }
285
+ }
286
+
287
+ // Register review workflow reactor handlers with the orchestrator
288
+ if (ctx.review && ctx.orchestrator) {
289
+ const handlers = ctx.review.reactor();
290
+ for (const [name, handler] of Object.entries(handlers)) {
291
+ ctx.orchestrator.register(name, handler);
292
+ }
293
+ }
294
+
295
+ // Register planner workflow reactor handlers
296
+ if (ctx.planner && ctx.orchestrator) {
297
+ const handlers = ctx.planner.reactor();
298
+ for (const [name, handler] of Object.entries(handlers)) {
299
+ ctx.orchestrator.register(name, handler);
300
+ }
301
+ }
302
+
303
+ // Register action workflow reactor handlers (dispatch dormant agents)
304
+ if (ctx.action && ctx.orchestrator) {
305
+ const handlers = ctx.action.reactor();
306
+ for (const [name, handler] of Object.entries(handlers)) {
307
+ ctx.orchestrator.register(name, handler);
308
+ }
309
+ }
310
+
311
+ // Register proof-of-life responder — scans issues/PRs for requests
312
+ // targeting our agents and posts health check responses.
313
+ if (ctx.orchestrator && ctx.hub) {
314
+ ctx.orchestrator.register('proof-of-life', responder(ctx.orchestrator, ctx.hub));
315
+ }
316
+
317
+ // Knowledge reactor — detect recurring feedback patterns and
318
+ // dispatch planning via planner when threshold is met.
319
+ if (ctx.orchestrator && ctx.hub) {
320
+ ctx.orchestrator.register('knowledge', async function knowledge(repo) {
321
+ const cfg = ctx.orchestrator.cfg;
322
+ if (!(cfg?.get?.('feedback.enabled') ?? true)) return;
323
+
324
+ const threshold = cfg?.get?.('feedback.threshold') ?? 5;
325
+ const { patterns, objective } = await import('loreli/knowledge');
326
+
327
+ const found = await patterns(ctx.hub, repo, { threshold });
328
+
329
+ const cat = await ctx.hub.category(repo, 'Loreli');
330
+ const discussions = await ctx.hub.discussions(repo, cat.id);
331
+ const openTitles = new Set(
332
+ discussions.filter(function open(d) { return !d.closed; })
333
+ .map(function title(d) { return d.title; })
334
+ );
335
+
336
+ for (const pattern of found) {
337
+ const title = `${pattern.category} feedback pattern`;
338
+ if (openTitles.has(title) || ctx._feedbackDispatched?.has(pattern.category)) continue;
339
+
340
+ await ctx.planner.plan(repo, objective(pattern), {
341
+ feedbackCategory: pattern.category
342
+ });
343
+
344
+ ctx._feedbackDispatched ??= new Set();
345
+ ctx._feedbackDispatched.add(pattern.category);
346
+ }
347
+ });
348
+ }
349
+
350
+ // Register workflows with orchestrator for demand-driven scaling.
351
+ // Each workflow reports its demand signal so scale() can spawn
352
+ // agents proportional to current workload.
353
+ if (ctx.orchestrator) {
354
+ const roleMap = { risk: ctx.risk, review: ctx.review, planner: ctx.planner, action: ctx.action };
355
+ for (const [role, workflow] of Object.entries(roleMap)) {
356
+ if (workflow) ctx.orchestrator.workflows.set(role, workflow);
357
+ }
358
+
359
+ // Demand-driven scaling — runs after all workflow handlers so
360
+ // demand signals reflect the latest hydrated state. Spawns agents
361
+ // to fill deficits respecting global/per-role caps and cooldowns.
362
+ ctx.orchestrator.register('scale', async function scale(repo) {
363
+ await ctx.orchestrator.scale(repo);
364
+ });
365
+ }
366
+
367
+ // Global reap safety net — catches dormant agents that the
368
+ // event-driven handlers (promote, land) missed. Runs last
369
+ // in the reactor chain.
370
+ if (ctx.orchestrator) {
371
+ ctx.orchestrator.register('reap', async function reap(repo) {
372
+ await ctx.orchestrator.reap(repo);
373
+ });
374
+ }
375
+
376
+ // Wire workflow event listeners to orchestrator events
377
+ for (const workflow of [ctx.planner, ctx.action, ctx.risk, ctx.review].filter(Boolean)) {
378
+ const listeners = workflow.events();
379
+ for (const [event, handler] of Object.entries(listeners)) {
380
+ ctx.orchestrator?.on(event, handler);
381
+ }
382
+ }
383
+
384
+ // Start the reactor loop — every started MCP instance joins
385
+ // the distributed system. The reactor polls for unclaimed issues,
386
+ // PRs needing review, and plans needing verdict. Agents are spawned
387
+ // on demand and reaped when idle.
388
+ if (ctx.orchestrator) {
389
+ ctx.orchestrator.watch(repo);
390
+ ctx.orchestrator.monitor();
391
+ }
392
+
393
+ // Dynamically update the add_agent tool description with discovered
394
+ // backends and model aliases so MCP clients see what's available.
395
+ if (ctx.server?.updateTool) {
396
+ const backendList = backends.map(function name(b) { return `${b.name} (${b.provider})`; }).join(', ');
397
+ const aliases = [...new Set(backends.flatMap(function aliases(b) { return models.list(b.name, ctx.config); }))];
398
+ const modelList = aliases.join(', ');
399
+ const providerList = providers.join(', ');
400
+
401
+ ctx.server.updateTool('add_agent', {
402
+ description: [
403
+ 'Add an agent to the team. Acquires a themed identity, selects a backend, and prepares the agent for spawning.',
404
+ `Available backends: ${backendList || 'none detected'}.`,
405
+ `Model aliases: ${modelList}.`,
406
+ `Providers: ${providerList || 'none detected'}.`
407
+ ].join(' ')
408
+ });
409
+ }
410
+
411
+ // 10. Build found list
412
+ const found = Object.entries(templates)
413
+ .filter(function exists([, v]) { return v; })
414
+ .map(function entry([k]) { return k; });
415
+
416
+ return {
417
+ content: [{
418
+ type: 'text',
419
+ text: [
420
+ `Started: ${repo}`,
421
+ `Session: ${sessionId}`,
422
+ `Theme: ${theme}`,
423
+ `Backends: ${backends.map(function name(b) { return `${b.name} (${b.provider})`; }).join(', ') || 'none detected'}`,
424
+ `Review strategy: ${strategy}`,
425
+ `Merge HITL: ${config.get('merge.hitl')}`,
426
+ baseProvision.created ? `Merge base provisioned: ${mergeBase} (from ${baseProvision.from})` : '',
427
+ found.length ? `Found: ${found.join(', ')}` : '',
428
+ scaffolded.length ? `Scaffolded: ${scaffolded.join(', ')}` : '',
429
+ ensuredLabels.length ? `Labels ensured: ${ensuredLabels.join(', ')}` : ''
430
+ ].filter(Boolean).join('\n')
431
+ }]
432
+ };
433
+ }
434
+ },
435
+
436
+ environment: {
437
+ title: 'Environment',
438
+ description: 'Report the detected environment: tmux version, available agent backends, providers, and review strategy.',
439
+ schema: { type: 'object', properties: {} },
440
+ /**
441
+ * @param {object} _args - Tool arguments (none).
442
+ * @param {object} ctx - Execution context.
443
+ * @returns {Promise<object>} Environment report.
444
+ */
445
+ async exec(_args, ctx) {
446
+ try {
447
+ const backendRegistry = ctx.backendRegistry;
448
+ await backendRegistry.discover();
449
+
450
+ return {
451
+ content: [{
452
+ type: 'text',
453
+ text: JSON.stringify({
454
+ backends: backendRegistry.available(),
455
+ providers: backendRegistry.providers(),
456
+ capability: capability(backendRegistry.providers()),
457
+ tmux: (await import('loreli/tmux')).Tmux.available()
458
+ }, null, 2)
459
+ }]
460
+ };
461
+ } catch (err) {
462
+ return {
463
+ content: [{ type: 'text', text: `Environment detection failed: ${err.message}` }],
464
+ isError: true
465
+ };
466
+ }
467
+ }
468
+ }
469
+ };
470
+
471
+ /**
472
+ * Determine whether an API error indicates a missing resource.
473
+ *
474
+ * @param {any} err - Error object from hub calls.
475
+ * @returns {boolean} True when the response status is 404.
476
+ */
477
+ function isNotFound(err) {
478
+ return err?.status === 404;
479
+ }
480
+
481
+ /**
482
+ * Determine whether branch creation raced with another caller.
483
+ *
484
+ * @param {any} err - Error object from hub.fork().
485
+ * @returns {boolean} True when the remote branch already exists.
486
+ */
487
+ function isBranchExists(err) {
488
+ if (err?.status !== 422) return false;
489
+ const body = JSON.stringify(err?.response?.data ?? '').toLowerCase();
490
+ if (body.includes('reference already exists')) return true;
491
+
492
+ const errors = err?.response?.data?.errors;
493
+ if (!Array.isArray(errors)) return false;
494
+
495
+ return errors.some(function alreadyExists(item) {
496
+ return item?.code === 'already_exists' || item?.message?.toLowerCase?.().includes('already exists');
497
+ });
498
+ }
499
+
500
+ /**
501
+ * Ensure the configured merge base branch exists in the remote repository.
502
+ * When missing, create it from the repository default branch.
503
+ *
504
+ * @param {object} hub - Hub instance.
505
+ * @param {string} repo - "owner/name" repository.
506
+ * @param {string} base - Configured merge base branch.
507
+ * @returns {Promise<{created: boolean, from: string|null}>}
508
+ */
509
+ async function ensureMergeBase(hub, repo, base, options = {}) {
510
+ if (!base) return { created: false, from: null };
511
+ if (typeof hub?.branch !== 'function') return { created: false, from: null };
512
+ if (typeof hub?.repo !== 'function') return { created: false, from: null };
513
+ if (typeof hub?.fork !== 'function') return { created: false, from: null };
514
+
515
+ try {
516
+ await hub.branch(repo, base);
517
+ return { created: false, from: null };
518
+ } catch (err) {
519
+ if (!isNotFound(err)) throw err;
520
+ }
521
+
522
+ const info = await hub.repo(repo);
523
+ const from = info?.default_branch ?? 'main';
524
+
525
+ try {
526
+ await hub.fork(repo, { name: base, from });
527
+ log.info(`start: provisioned merge base "${base}" from "${from}"`);
528
+ } catch (err) {
529
+ if (!isBranchExists(err)) throw err;
530
+ log.info(`start: merge base "${base}" already exists (race), continuing`);
531
+ }
532
+
533
+ await waitForBranch(hub, repo, base, options);
534
+ return { created: true, from };
535
+ }
536
+
537
+ /**
538
+ * Wait until a branch becomes visible through the branch endpoint.
539
+ * This absorbs short post-create consistency gaps from the GitHub API.
540
+ *
541
+ * @param {object} hub - Hub instance.
542
+ * @param {string} repo - "owner/name" repository.
543
+ * @param {string} base - Branch name to verify.
544
+ * @param {object} [options] - Retry options.
545
+ * @param {number} [options.retries=5] - Maximum branch-read attempts.
546
+ * @param {number} [options.minDelay=200] - Initial delay in milliseconds.
547
+ * @param {number} [options.maxDelay=2000] - Maximum backoff delay in milliseconds.
548
+ * @param {(ms: number) => Promise<void>} [options.wait] - Delay function override for tests.
549
+ * @returns {Promise<void>}
550
+ */
551
+ async function waitForBranch(hub, repo, base, options = {}) {
552
+ const retries = options.retries ?? MERGE_BASE_RETRIES;
553
+ const minDelay = options.minDelay ?? MERGE_BASE_MIN_DELAY;
554
+ const maxDelay = options.maxDelay ?? MERGE_BASE_MAX_DELAY;
555
+
556
+ for (let attempt = 0; attempt < retries; attempt += 1) {
557
+ try {
558
+ await hub.branch(repo, base);
559
+ return;
560
+ } catch (err) {
561
+ if (!isNotFound(err) || attempt === retries - 1) throw err;
562
+ const wait = Math.min(minDelay * (2 ** attempt), maxDelay);
563
+ log.debug(`start: merge base "${base}" not yet visible (${attempt + 1}/${retries}) — waiting ${wait}ms`);
564
+ await pause(wait, options.wait);
565
+ }
566
+ }
567
+ }
568
+
569
+ /**
570
+ * Wait for a number of milliseconds.
571
+ *
572
+ * @param {number} ms - Delay duration.
573
+ * @param {(ms: number) => Promise<void>} [wait] - Optional delay override.
574
+ * @returns {Promise<void>}
575
+ */
576
+ async function pause(ms, wait) {
577
+ if (typeof wait === 'function') {
578
+ await wait(ms);
579
+ return;
580
+ }
581
+
582
+ await new Promise(function onTimeout(resolve) {
583
+ setTimeout(resolve, ms);
584
+ });
585
+ }
586
+
587
+ export { ensureMergeBase, normalizeCodexEnvVars };
588
+
589
+ /**
590
+ * Ensure `loreli` exists in the target repo's devDependencies.
591
+ *
592
+ * - If `package.json` doesn't exist, creates one with `{ devDependencies: { loreli: "*" } }`.
593
+ * - If it exists but `loreli` is missing from devDependencies, reads the
594
+ * current content, merges the dependency, and writes back — preserving
595
+ * all existing fields.
596
+ * - If `loreli` is already present, does nothing.
597
+ *
598
+ * @param {object} hub - GitHub hub instance.
599
+ * @param {string} repo - "owner/name" repository.
600
+ * @param {string[]} scaffolded - Mutable list of scaffolded file descriptions (for reporting).
601
+ * @returns {Promise<void>}
602
+ */
603
+ // MCP config constants (ENTRY, CODEX_TOML) imported from @loreli/workspace
604
+ // LORELI_TOML needs a leading newline for appending to existing TOML files
605
+ const CODEX_ENV_VARS = 'env_vars = ["GITHUB_TOKEN"]';
606
+ const LORELI_TOML = `\n${CODEX_TOML}`;
607
+
608
+ /**
609
+ * Find the next TOML table header index at or after `start`.
610
+ *
611
+ * @param {string[]} lines - TOML content split by line.
612
+ * @param {number} start - Start index.
613
+ * @returns {number} Index of next table header or lines.length.
614
+ */
615
+ function nextTomlTable(lines, start) {
616
+ for (let index = start; index < lines.length; index += 1) {
617
+ if (/^\[[^\]]+\]\s*$/.test(lines[index].trim())) return index;
618
+ }
619
+ return lines.length;
620
+ }
621
+
622
+ /**
623
+ * Find a TOML table header line index.
624
+ *
625
+ * @param {string[]} lines - TOML content split by line.
626
+ * @param {number} start - Start index.
627
+ * @param {string} header - Exact table header text.
628
+ * @returns {number} Header index or -1 when missing.
629
+ */
630
+ function findTomlHeader(lines, start, header) {
631
+ for (let index = start; index < lines.length; index += 1) {
632
+ if (lines[index].trim() === header) return index;
633
+ }
634
+ return -1;
635
+ }
636
+
637
+ /**
638
+ * Normalize `.codex/config.toml` loreli token forwarding.
639
+ *
640
+ * Ensures `env_vars = ["GITHUB_TOKEN"]` is defined in the
641
+ * `[mcp_servers.loreli]` table and not inside the nested
642
+ * `[mcp_servers.loreli.env]` table.
643
+ *
644
+ * @param {string} content - Existing TOML content.
645
+ * @returns {string|null} Updated content when changed, else null.
646
+ */
647
+ function normalizeCodexEnvVars(content) {
648
+ if (/^\s*mcp_servers\.loreli\.env_vars\s*=.*$/m.test(content)) return null;
649
+
650
+ const lines = content.split('\n');
651
+ const loreli = findTomlHeader(lines, 0, '[mcp_servers.loreli]');
652
+ if (loreli === -1) return null;
653
+
654
+ let changed = false;
655
+ let env = findTomlHeader(lines, loreli + 1, '[mcp_servers.loreli.env]');
656
+
657
+ if (env !== -1) {
658
+ const envEnd = nextTomlTable(lines, env + 1);
659
+ const envLines = lines.slice(env + 1, envEnd);
660
+ const kept = envLines.filter(function keepLine(line) {
661
+ return !/^\s*env_vars\s*=/.test(line);
662
+ });
663
+ if (kept.length !== envLines.length) {
664
+ lines.splice(env + 1, envEnd - env - 1, ...kept);
665
+ changed = true;
666
+ env = findTomlHeader(lines, loreli + 1, '[mcp_servers.loreli.env]');
667
+ }
668
+ }
669
+
670
+ const loreliEnd = env === -1 ? nextTomlTable(lines, loreli + 1) : env;
671
+ const hasEnvVars = lines.slice(loreli + 1, loreliEnd).some(function hasLine(line) {
672
+ return /^\s*env_vars\s*=/.test(line);
673
+ });
674
+
675
+ if (!hasEnvVars) {
676
+ lines.splice(loreliEnd, 0, CODEX_ENV_VARS);
677
+ changed = true;
678
+ }
679
+
680
+ if (!changed) return null;
681
+ return lines.join('\n');
682
+ }
683
+
684
+ /**
685
+ * Ensure the loreli MCP server is configured in all supported agent CLI
686
+ * config files (.mcp.json, .cursor/mcp.json, .codex/config.toml).
687
+ *
688
+ * For each config file:
689
+ * - If it doesn't exist, creates it with the loreli server entry.
690
+ * - If it exists but loreli is missing, merges/appends the entry.
691
+ * - If loreli is already configured, does nothing.
692
+ *
693
+ * JSON configs are parsed, merged, and re-serialized to preserve
694
+ * existing server entries. TOML configs are appended (TOML sections
695
+ * are order-independent).
696
+ *
697
+ * @param {object} hub - GitHub hub instance.
698
+ * @param {string} repo - "owner/name" repository.
699
+ * @param {string[]} scaffolded - Mutable list of scaffolded file descriptions.
700
+ * @param {Array<{path: string, format: string, marker: string}>} configs - Config entries from backend registry.
701
+ * @returns {Promise<void>}
702
+ */
703
+ async function ensureMcpConfigs(hub, repo, scaffolded, configs) {
704
+ for (const entry of configs) {
705
+ let existing;
706
+ try {
707
+ existing = await hub.read(repo, entry.path);
708
+ } catch { /* file doesn't exist */ }
709
+
710
+ if (!existing) {
711
+ // File doesn't exist — create from template
712
+ const content = await readFile(join(SCAFFOLDING, 'mcp-configs', entry.path), 'utf8');
713
+ await hub.write(repo, entry.path, {
714
+ content,
715
+ message: `chore: scaffold ${entry.path} with loreli MCP server`
716
+ });
717
+ scaffolded.push(entry.path);
718
+ log.info(`created ${entry.path} with loreli MCP server`);
719
+ continue;
720
+ }
721
+
722
+ // File exists — check if loreli is already configured
723
+ if (existing.content.includes(entry.marker)) {
724
+ if (entry.path === '.codex/config.toml') {
725
+ const updated = normalizeCodexEnvVars(existing.content);
726
+ if (updated) {
727
+ await hub.write(repo, entry.path, {
728
+ content: updated,
729
+ message: 'chore: add codex env_vars forwarding for loreli MCP server'
730
+ });
731
+ scaffolded.push(`${entry.path} (added env_vars forwarding)`);
732
+ log.info(`normalized env_vars forwarding in existing ${entry.path}`);
733
+ continue;
734
+ }
735
+ }
736
+
737
+ log.info(`loreli already in ${entry.path} — skipping`);
738
+ continue;
739
+ }
740
+
741
+ // Merge loreli into existing config
742
+ if (entry.format === 'json') {
743
+ const config = JSON.parse(existing.content);
744
+ config.mcpServers = { ...config.mcpServers, loreli: ENTRY };
745
+ await hub.write(repo, entry.path, {
746
+ content: JSON.stringify(config, null, 2) + '\n',
747
+ message: `chore: add loreli MCP server to ${entry.path}`
748
+ });
749
+ } else {
750
+ // TOML: append the section (order-independent in TOML)
751
+ const updated = existing.content.trimEnd() + LORELI_TOML;
752
+ await hub.write(repo, entry.path, {
753
+ content: updated,
754
+ message: `chore: add loreli MCP server to ${entry.path}`
755
+ });
756
+ }
757
+
758
+ scaffolded.push(`${entry.path} (added loreli)`);
759
+ log.info(`added loreli to existing ${entry.path}`);
760
+ }
761
+ }
762
+
763
+ async function ensureDependency(hub, repo, scaffolded) {
764
+ let existing;
765
+ try {
766
+ existing = await hub.read(repo, 'package.json');
767
+ } catch { /* file doesn't exist */ }
768
+
769
+ const pkg = existing ? JSON.parse(existing.content) : {};
770
+ let changed = false;
771
+ const changes = [];
772
+
773
+ if (!pkg.devDependencies?.loreli) {
774
+ pkg.devDependencies = { ...pkg.devDependencies, loreli: '*' };
775
+ changed = true;
776
+ changes.push('devDependency:loreli');
777
+ }
778
+
779
+ if (!pkg.scripts?.test) {
780
+ pkg.scripts = { ...pkg.scripts, test: 'node --test' };
781
+ changed = true;
782
+ changes.push('script:test');
783
+ }
784
+
785
+ if (!changed) {
786
+ log.info('package.json already has loreli devDependency and npm test script — skipping');
787
+ return;
788
+ }
789
+
790
+ await hub.write(repo, 'package.json', {
791
+ content: JSON.stringify(pkg, null, 2) + '\n',
792
+ message: existing
793
+ ? `chore: update package.json (${changes.join(', ')})`
794
+ : 'chore: scaffold package.json with loreli and npm test script'
795
+ });
796
+
797
+ if (existing) {
798
+ scaffolded.push(`package.json (updated ${changes.join(', ')})`);
799
+ log.info(`updated package.json (${changes.join(', ')})`);
800
+ return;
801
+ }
802
+
803
+ scaffolded.push('package.json (created with loreli and npm test script)');
804
+ log.info('created package.json with loreli devDependency and npm test script');
805
+ }
806
+
807
+ /**
808
+ * Marker string used to detect whether the Loreli gitignore block is
809
+ * already present. Checked via `String.includes()`.
810
+ * @type {string}
811
+ */
812
+ const GITIGNORE_MARKER = '.loreli/';
813
+
814
+ /**
815
+ * Ensure `.gitignore` contains Loreli artifact ignore rules.
816
+ *
817
+ * Prevents agent-generated files (`.loreli/`, `.claude/`, `.codex/`,
818
+ * `.cursor/hooks.json`) from leaking into PRs. This is the primary
819
+ * defense — committed to the repo, it survives clones and works
820
+ * regardless of how agents run git.
821
+ *
822
+ * - If `.gitignore` doesn't exist, creates it from the scaffolding template.
823
+ * - If it exists but doesn't contain the marker, appends the Loreli block.
824
+ * - If the marker is already present, does nothing.
825
+ *
826
+ * @param {object} hub - GitHub hub instance.
827
+ * @param {string} repo - "owner/name" repository.
828
+ * @param {string[]} scaffolded - Mutable list of scaffolded file descriptions.
829
+ * @returns {Promise<void>}
830
+ */
831
+ async function ensureGitignore(hub, repo, scaffolded) {
832
+ const block = await readFile(join(SCAFFOLDING, '.gitignore'), 'utf8');
833
+
834
+ let existing;
835
+ try {
836
+ existing = await hub.read(repo, '.gitignore');
837
+ } catch { /* file doesn't exist */ }
838
+
839
+ if (!existing) {
840
+ try {
841
+ await hub.write(repo, '.gitignore', {
842
+ content: block,
843
+ message: 'chore: scaffold .gitignore with loreli artifact rules'
844
+ });
845
+ scaffolded.push('.gitignore');
846
+ log.info('created .gitignore with loreli artifact rules');
847
+ return;
848
+ } catch (err) {
849
+ if (err?.status !== 409) throw err;
850
+ log.warn('ensureGitignore: create conflict detected, reloading and retrying merge path');
851
+ }
852
+ }
853
+
854
+ if (!existing) {
855
+ try {
856
+ existing = await hub.read(repo, '.gitignore');
857
+ } catch { /* file may still not exist after conflict */ }
858
+
859
+ if (!existing) {
860
+ await hub.write(repo, '.gitignore', {
861
+ content: block,
862
+ message: 'chore: scaffold .gitignore with loreli artifact rules'
863
+ });
864
+ scaffolded.push('.gitignore');
865
+ log.info('created .gitignore with loreli artifact rules');
866
+ return;
867
+ }
868
+ }
869
+
870
+ if (existing.content.includes(GITIGNORE_MARKER)) {
871
+ log.info('loreli already in .gitignore — skipping');
872
+ return;
873
+ }
874
+
875
+ // Append the Loreli block to the existing .gitignore, separated by
876
+ // a blank line so it's visually distinct from existing entries.
877
+ const updated = existing.content.trimEnd() + '\n\n' + block;
878
+ try {
879
+ await hub.write(repo, '.gitignore', {
880
+ content: updated,
881
+ message: 'chore: add loreli artifact rules to .gitignore'
882
+ });
883
+ scaffolded.push('.gitignore (added loreli rules)');
884
+ log.info('added loreli artifact rules to existing .gitignore');
885
+ } catch (err) {
886
+ if (err?.status !== 409) throw err;
887
+
888
+ // Shared test repos can race with another writer between read and write.
889
+ // Re-read once: if loreli rules already landed, we're done; otherwise retry.
890
+ const latest = await hub.read(repo, '.gitignore');
891
+ if (latest.content.includes(GITIGNORE_MARKER)) {
892
+ log.info('loreli artifact rules already present after conflict — skipping');
893
+ return;
894
+ }
895
+
896
+ const retried = latest.content.trimEnd() + '\n\n' + block;
897
+ await hub.write(repo, '.gitignore', {
898
+ content: retried,
899
+ message: 'chore: add loreli artifact rules to .gitignore'
900
+ });
901
+ scaffolded.push('.gitignore (added loreli rules)');
902
+ log.info('added loreli artifact rules to existing .gitignore (after conflict retry)');
903
+ }
904
+ }