xab 12.0.0 → 14.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +189 -13
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -201,7 +201,7 @@ function buildRepoContext(repoPath, config) {
|
|
|
201
201
|
const docPaths = discoverDocPaths(repoPath);
|
|
202
202
|
return { structure, instructions, docPaths };
|
|
203
203
|
}
|
|
204
|
-
function buildCommitContext(repoPath, repoCtx, config, touchedPaths, commitMessage) {
|
|
204
|
+
function buildCommitContext(repoPath, repoCtx, config, touchedPaths, commitMessage, memoryBlock) {
|
|
205
205
|
const includedFiles = [];
|
|
206
206
|
const sections = [];
|
|
207
207
|
const rs = repoCtx.structure;
|
|
@@ -214,6 +214,9 @@ function buildCommitContext(repoPath, repoCtx, config, touchedPaths, commitMessa
|
|
|
214
214
|
structLines.push(`Packages: ${rs.packages.join(", ")}`);
|
|
215
215
|
sections.push(structLines.join(`
|
|
216
216
|
`));
|
|
217
|
+
if (memoryBlock) {
|
|
218
|
+
sections.push(memoryBlock);
|
|
219
|
+
}
|
|
217
220
|
for (const [name, content] of repoCtx.instructions) {
|
|
218
221
|
sections.push(`--- ${name} ---
|
|
219
222
|
${content}`);
|
|
@@ -585,7 +588,18 @@ You are looking at a worktree based on the TARGET branch "${opts.targetBranch}".
|
|
|
585
588
|
- New services, containers, or infrastructure to deploy? \u2192 note what
|
|
586
589
|
- Config files that need manual updates on servers? \u2192 note which
|
|
587
590
|
- Dependencies on external services being added or removed? \u2192 note what
|
|
588
|
-
- If the commit is just normal code changes that only need a deploy+restart, leave opsNotes as []
|
|
591
|
+
- If the commit is just normal code changes that only need a deploy+restart, leave opsNotes as []
|
|
592
|
+
8. Record any non-obvious discoveries that would help analyze FUTURE commits:
|
|
593
|
+
- Path mappings between source and target branches (e.g. "frontend/ in source = apps/frontend/ in target")
|
|
594
|
+
- Key functions, helpers, or patterns you found (e.g. "betExitValueLocal() is the shared P&L helper")
|
|
595
|
+
- Architectural facts (e.g. "store exports are at the bottom of fastMarkets.ts")
|
|
596
|
+
- Only include genuinely useful, non-obvious facts. Do NOT repeat things already in the merge memory or repo docs.
|
|
597
|
+
- Leave discoveries as [] if nothing new and non-obvious was found.
|
|
598
|
+
9. If merge memory entries were provided in the context above, evaluate each one:
|
|
599
|
+
- List the KEYS of entries you want to KEEP in keepMemoryKeys
|
|
600
|
+
- Drop entries that are stale, obvious from repo docs, or no longer relevant
|
|
601
|
+
- Keep entries that saved you time or would save time on similar future commits
|
|
602
|
+
- If no memory was provided, return keepMemoryKeys as []`;
|
|
589
603
|
let response;
|
|
590
604
|
if (diffChunks.length > 1) {
|
|
591
605
|
await thread.run(firstPrompt);
|
|
@@ -613,7 +627,9 @@ You now have the complete diff. Analyze and produce your structured response.`,
|
|
|
613
627
|
reasoning: "Could not parse structured output",
|
|
614
628
|
applicationStrategy: "Manual review recommended",
|
|
615
629
|
affectedComponents: [],
|
|
616
|
-
opsNotes: []
|
|
630
|
+
opsNotes: [],
|
|
631
|
+
discoveries: [],
|
|
632
|
+
keepMemoryKeys: []
|
|
617
633
|
});
|
|
618
634
|
}
|
|
619
635
|
async function applyCommit(opts) {
|
|
@@ -778,9 +794,28 @@ var init_codex = __esm(() => {
|
|
|
778
794
|
type: "array",
|
|
779
795
|
items: { type: "string" },
|
|
780
796
|
description: "Operator action items ONLY if this commit requires something beyond a standard code deploy+restart. Examples: new env vars to add, database migrations to run, new services to deploy, infrastructure changes, config file updates on servers. Leave as empty array [] if no operator action is needed \u2014 a normal code deploy does NOT count."
|
|
797
|
+
},
|
|
798
|
+
discoveries: {
|
|
799
|
+
type: "array",
|
|
800
|
+
items: { type: "string" },
|
|
801
|
+
description: "Reusable learnings for FUTURE commits, formatted as 'type:key:value'. Types: path_mapping, pattern, codebase, architecture, convention, warning. Examples: 'path_mapping:frontend_prefix:frontend/ in source = apps/frontend/ in target', 'codebase:pnl_helpers:betExitValueLocal() is the shared P&L function'. Only non-obvious facts. Leave as [] if nothing new."
|
|
802
|
+
},
|
|
803
|
+
keepMemoryKeys: {
|
|
804
|
+
type: "array",
|
|
805
|
+
items: { type: "string" },
|
|
806
|
+
description: "From the merge memory provided in context, list the KEYS of entries that are still useful for future commits. Entries whose keys are NOT listed here will be garbage-collected. If no merge memory was provided, return []. Be selective \u2014 only keep entries that are genuinely useful going forward, not stale or obvious facts."
|
|
781
807
|
}
|
|
782
808
|
},
|
|
783
|
-
required: [
|
|
809
|
+
required: [
|
|
810
|
+
"summary",
|
|
811
|
+
"alreadyInTarget",
|
|
812
|
+
"reasoning",
|
|
813
|
+
"applicationStrategy",
|
|
814
|
+
"affectedComponents",
|
|
815
|
+
"opsNotes",
|
|
816
|
+
"discoveries",
|
|
817
|
+
"keepMemoryKeys"
|
|
818
|
+
],
|
|
784
819
|
additionalProperties: false
|
|
785
820
|
};
|
|
786
821
|
applyResultSchema = {
|
|
@@ -1200,9 +1235,114 @@ function findResumableRun(baseDir, workBranch) {
|
|
|
1200
1235
|
}
|
|
1201
1236
|
var init_audit = () => {};
|
|
1202
1237
|
|
|
1238
|
+
// src/memory.ts
|
|
1239
|
+
import { existsSync as existsSync4, readFileSync as readFileSync4, writeFileSync as writeFileSync2, mkdirSync as mkdirSync2 } from "fs";
|
|
1240
|
+
import { join as join4 } from "path";
|
|
1241
|
+
|
|
1242
|
+
class MergeMemory {
|
|
1243
|
+
entries = [];
|
|
1244
|
+
filePath;
|
|
1245
|
+
maxEntries = 50;
|
|
1246
|
+
constructor(repoPath) {
|
|
1247
|
+
this.filePath = join4(repoPath, ".backmerge", "memory.jsonl");
|
|
1248
|
+
this.load();
|
|
1249
|
+
}
|
|
1250
|
+
load() {
|
|
1251
|
+
if (!existsSync4(this.filePath))
|
|
1252
|
+
return;
|
|
1253
|
+
try {
|
|
1254
|
+
const raw = readFileSync4(this.filePath, "utf-8");
|
|
1255
|
+
this.entries = raw.split(`
|
|
1256
|
+
`).filter(Boolean).map((line) => {
|
|
1257
|
+
try {
|
|
1258
|
+
return JSON.parse(line);
|
|
1259
|
+
} catch {
|
|
1260
|
+
return null;
|
|
1261
|
+
}
|
|
1262
|
+
}).filter((e) => e !== null);
|
|
1263
|
+
} catch {}
|
|
1264
|
+
}
|
|
1265
|
+
save() {
|
|
1266
|
+
const dir = join4(this.filePath, "..");
|
|
1267
|
+
mkdirSync2(dir, { recursive: true });
|
|
1268
|
+
writeFileSync2(this.filePath, this.entries.map((e) => JSON.stringify(e)).join(`
|
|
1269
|
+
`) + `
|
|
1270
|
+
`);
|
|
1271
|
+
}
|
|
1272
|
+
get all() {
|
|
1273
|
+
return this.entries;
|
|
1274
|
+
}
|
|
1275
|
+
get count() {
|
|
1276
|
+
return this.entries.length;
|
|
1277
|
+
}
|
|
1278
|
+
addDiscoveries(discoveries, commitHash) {
|
|
1279
|
+
let added = 0;
|
|
1280
|
+
for (const d of discoveries) {
|
|
1281
|
+
const existing = this.entries.find((e) => e.key === d.key);
|
|
1282
|
+
if (existing) {
|
|
1283
|
+
if (existing.value !== d.value) {
|
|
1284
|
+
existing.value = d.value;
|
|
1285
|
+
existing.source = commitHash;
|
|
1286
|
+
existing.ts = new Date().toISOString();
|
|
1287
|
+
}
|
|
1288
|
+
existing.useCount++;
|
|
1289
|
+
} else {
|
|
1290
|
+
this.entries.push({
|
|
1291
|
+
type: d.type,
|
|
1292
|
+
key: d.key,
|
|
1293
|
+
value: d.value,
|
|
1294
|
+
source: commitHash,
|
|
1295
|
+
ts: new Date().toISOString(),
|
|
1296
|
+
useCount: 1
|
|
1297
|
+
});
|
|
1298
|
+
added++;
|
|
1299
|
+
}
|
|
1300
|
+
}
|
|
1301
|
+
if (this.entries.length > this.maxEntries) {
|
|
1302
|
+
this.entries = this.entries.slice(-this.maxEntries);
|
|
1303
|
+
}
|
|
1304
|
+
this.save();
|
|
1305
|
+
return added;
|
|
1306
|
+
}
|
|
1307
|
+
applyGC(kept) {
|
|
1308
|
+
const keptKeys = new Set(kept.map((k) => k.key));
|
|
1309
|
+
const before = this.entries.length;
|
|
1310
|
+
const newEntries = [];
|
|
1311
|
+
for (const k of kept) {
|
|
1312
|
+
const existing = this.entries.find((e) => e.key === k.key);
|
|
1313
|
+
if (existing) {
|
|
1314
|
+
existing.value = k.value;
|
|
1315
|
+
existing.useCount++;
|
|
1316
|
+
newEntries.push(existing);
|
|
1317
|
+
} else {
|
|
1318
|
+
newEntries.push({
|
|
1319
|
+
type: "pattern",
|
|
1320
|
+
key: k.key,
|
|
1321
|
+
value: k.value,
|
|
1322
|
+
source: "gc",
|
|
1323
|
+
ts: new Date().toISOString(),
|
|
1324
|
+
useCount: 1
|
|
1325
|
+
});
|
|
1326
|
+
}
|
|
1327
|
+
}
|
|
1328
|
+
this.entries = newEntries;
|
|
1329
|
+
this.save();
|
|
1330
|
+
return before - this.entries.length;
|
|
1331
|
+
}
|
|
1332
|
+
toPromptBlock() {
|
|
1333
|
+
if (this.entries.length === 0)
|
|
1334
|
+
return "";
|
|
1335
|
+
const lines = this.entries.map((e) => `[${e.type}] ${e.key}: ${e.value}`);
|
|
1336
|
+
return `--- Merge memory (${this.entries.length} learnings from previous commits) ---
|
|
1337
|
+
${lines.join(`
|
|
1338
|
+
`)}`;
|
|
1339
|
+
}
|
|
1340
|
+
}
|
|
1341
|
+
var init_memory = () => {};
|
|
1342
|
+
|
|
1203
1343
|
// src/git.ts
|
|
1204
1344
|
import simpleGit from "simple-git";
|
|
1205
|
-
import { join as
|
|
1345
|
+
import { join as join5 } from "path";
|
|
1206
1346
|
import { tmpdir } from "os";
|
|
1207
1347
|
function createGit(cwd) {
|
|
1208
1348
|
return simpleGit(cwd);
|
|
@@ -1262,7 +1402,7 @@ async function getDescendantCommitsSince(git, since, ref) {
|
|
|
1262
1402
|
}
|
|
1263
1403
|
function generateWorktreePath(repoName) {
|
|
1264
1404
|
const id = Math.random().toString(36).slice(2, 8);
|
|
1265
|
-
return
|
|
1405
|
+
return join5(tmpdir(), `backmerge-${repoName}-${id}`);
|
|
1266
1406
|
}
|
|
1267
1407
|
async function createDetachedWorktree(git, path, ref) {
|
|
1268
1408
|
await git.raw(["worktree", "add", "--detach", path, ref]);
|
|
@@ -1456,6 +1596,10 @@ async function runEngine(opts, cb) {
|
|
|
1456
1596
|
for (const l of logs)
|
|
1457
1597
|
cb.onLog(` ${l}`, "gray");
|
|
1458
1598
|
}
|
|
1599
|
+
const memory = new MergeMemory(repoPath);
|
|
1600
|
+
if (memory.count > 0) {
|
|
1601
|
+
cb.onLog(`Merge memory: ${memory.count} entries from previous commits`, "cyan");
|
|
1602
|
+
}
|
|
1459
1603
|
if (config.promptHints && config.promptHints.length > 0) {
|
|
1460
1604
|
cb.onLog(`Active hints (${config.promptHints.length}):`, "cyan");
|
|
1461
1605
|
for (const h of config.promptHints) {
|
|
@@ -1616,6 +1760,7 @@ async function runEngine(opts, cb) {
|
|
|
1616
1760
|
maxAttempts: effectiveMaxAttempts,
|
|
1617
1761
|
commitPrefix,
|
|
1618
1762
|
workBranch: wbName,
|
|
1763
|
+
memory,
|
|
1619
1764
|
workBranchHead: currentBranchHead
|
|
1620
1765
|
});
|
|
1621
1766
|
if (decision.kind === "applied" && decision.newCommitHash) {
|
|
@@ -1657,7 +1802,7 @@ async function processOneCommit(o) {
|
|
|
1657
1802
|
try {
|
|
1658
1803
|
touchedPaths = await getCommitFiles(o.git, commit.hash);
|
|
1659
1804
|
} catch {}
|
|
1660
|
-
const commitCtx = buildCommitContext(o.repoPath, o.repoCtx, o.config, touchedPaths, commit.message);
|
|
1805
|
+
const commitCtx = buildCommitContext(o.repoPath, o.repoCtx, o.config, touchedPaths, commit.message, o.memory.toPromptBlock());
|
|
1661
1806
|
if (commitCtx.includedFiles.length > 0) {
|
|
1662
1807
|
audit.writeRelevantDocs(commit.hash, 0, commitCtx.includedFiles.join(`
|
|
1663
1808
|
`));
|
|
@@ -1685,6 +1830,31 @@ async function processOneCommit(o) {
|
|
|
1685
1830
|
});
|
|
1686
1831
|
audit.writeAnalysis(commit.hash, 1, analysis);
|
|
1687
1832
|
cb.onAnalysis(commit, analysis);
|
|
1833
|
+
if (analysis.discoveries && analysis.discoveries.length > 0) {
|
|
1834
|
+
const parsed = analysis.discoveries.map((d) => {
|
|
1835
|
+
const firstColon = d.indexOf(":");
|
|
1836
|
+
const secondColon = firstColon >= 0 ? d.indexOf(":", firstColon + 1) : -1;
|
|
1837
|
+
if (secondColon >= 0) {
|
|
1838
|
+
return {
|
|
1839
|
+
type: d.slice(0, firstColon),
|
|
1840
|
+
key: d.slice(firstColon + 1, secondColon),
|
|
1841
|
+
value: d.slice(secondColon + 1)
|
|
1842
|
+
};
|
|
1843
|
+
}
|
|
1844
|
+
return { type: "pattern", key: d.slice(0, 30), value: d };
|
|
1845
|
+
});
|
|
1846
|
+
const added = o.memory.addDiscoveries(parsed, commit.hash);
|
|
1847
|
+
if (added > 0) {
|
|
1848
|
+
cb.onLog(`Memory: +${added} new discoveries (${o.memory.count} total)`, "cyan");
|
|
1849
|
+
}
|
|
1850
|
+
}
|
|
1851
|
+
if (o.memory.count > 0 && analysis.keepMemoryKeys) {
|
|
1852
|
+
const kept = o.memory.all.filter((e) => analysis.keepMemoryKeys.includes(e.key));
|
|
1853
|
+
const gcCount = o.memory.applyGC(kept.map((e) => ({ key: e.key, value: e.value })));
|
|
1854
|
+
if (gcCount > 0) {
|
|
1855
|
+
cb.onLog(`Memory: GC'd ${gcCount} stale entries (${o.memory.count} remaining)`, "gray");
|
|
1856
|
+
}
|
|
1857
|
+
}
|
|
1688
1858
|
} catch (e) {
|
|
1689
1859
|
audit.error("analysis", commit.hash, commit.message, e.message);
|
|
1690
1860
|
return mkFailed(commit, "analysis", e.message, start);
|
|
@@ -1974,6 +2144,7 @@ var init_engine = __esm(() => {
|
|
|
1974
2144
|
init_codex();
|
|
1975
2145
|
init_review();
|
|
1976
2146
|
init_audit();
|
|
2147
|
+
init_memory();
|
|
1977
2148
|
init_git();
|
|
1978
2149
|
});
|
|
1979
2150
|
|
|
@@ -1983,11 +2154,11 @@ __export(exports_batch, {
|
|
|
1983
2154
|
runBatch: () => runBatch
|
|
1984
2155
|
});
|
|
1985
2156
|
import chalk from "chalk";
|
|
1986
|
-
import { readFileSync as
|
|
1987
|
-
import { join as
|
|
2157
|
+
import { readFileSync as readFileSync6 } from "fs";
|
|
2158
|
+
import { join as join7 } from "path";
|
|
1988
2159
|
function getVersion() {
|
|
1989
2160
|
try {
|
|
1990
|
-
const pkg = JSON.parse(
|
|
2161
|
+
const pkg = JSON.parse(readFileSync6(join7(import.meta.dir, "..", "package.json"), "utf-8"));
|
|
1991
2162
|
return pkg.version ?? "?";
|
|
1992
2163
|
} catch {
|
|
1993
2164
|
return "?";
|
|
@@ -2121,6 +2292,11 @@ async function runBatch(opts) {
|
|
|
2121
2292
|
if (analysis.opsNotes.length > 0) {
|
|
2122
2293
|
log(` ${chalk.yellow(" ops:")} ${analysis.opsNotes.join("; ")}`);
|
|
2123
2294
|
}
|
|
2295
|
+
if (analysis.discoveries && analysis.discoveries.length > 0) {
|
|
2296
|
+
for (const d of analysis.discoveries) {
|
|
2297
|
+
log(` ${chalk.cyan(` \uD83D\uDCA1 ${d}`)}`);
|
|
2298
|
+
}
|
|
2299
|
+
}
|
|
2124
2300
|
},
|
|
2125
2301
|
onDecision(commit, decision) {
|
|
2126
2302
|
if (jsonl)
|
|
@@ -2265,12 +2441,12 @@ import { useState, useEffect, useCallback, useRef } from "react";
|
|
|
2265
2441
|
import { Box, Text, useInput, useApp, Static, Newline } from "ink";
|
|
2266
2442
|
import SelectInput from "ink-select-input";
|
|
2267
2443
|
import Spinner from "ink-spinner";
|
|
2268
|
-
import { readFileSync as
|
|
2269
|
-
import { join as
|
|
2444
|
+
import { readFileSync as readFileSync5 } from "fs";
|
|
2445
|
+
import { join as join6 } from "path";
|
|
2270
2446
|
import { jsxDEV, Fragment } from "react/jsx-dev-runtime";
|
|
2271
2447
|
var XAB_VERSION = (() => {
|
|
2272
2448
|
try {
|
|
2273
|
-
return JSON.parse(
|
|
2449
|
+
return JSON.parse(readFileSync5(join6(import.meta.dir, "..", "package.json"), "utf-8")).version ?? "?";
|
|
2274
2450
|
} catch {
|
|
2275
2451
|
return "?";
|
|
2276
2452
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "xab",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "14.0.0",
|
|
4
4
|
"description": "AI-powered curated branch reconciliation engine",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -33,6 +33,6 @@
|
|
|
33
33
|
"ink-text-input": "^6.0.0",
|
|
34
34
|
"react": "18.3.1",
|
|
35
35
|
"simple-git": "^3.33.0",
|
|
36
|
-
"xab": "
|
|
36
|
+
"xab": "12"
|
|
37
37
|
}
|
|
38
38
|
}
|