agentsbestfriend 0.1.4 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -18059,9 +18059,52 @@ Summary:`;
18059
18059
  // ../core/dist/llm/pipelines.js
18060
18060
  import { readFile } from "fs/promises";
18061
18061
  import { join as join4 } from "path";
18062
+ async function generateSummaries(projectRoot, opts) {
18063
+ const start = Date.now();
18064
+ const provider = getLlmProvider();
18065
+ if (!provider) {
18066
+ throw new LlmUnavailableError("none", "LLM provider is set to 'none'");
18067
+ }
18068
+ if (!await provider.isAvailable()) {
18069
+ throw new LlmUnavailableError(provider.name, "Cannot reach Ollama. Is it running?");
18070
+ }
18071
+ const db = createProjectDb(projectRoot);
18072
+ const stats = {
18073
+ generated: 0,
18074
+ skipped: 0,
18075
+ errors: 0,
18076
+ durationMs: 0
18077
+ };
18078
+ try {
18079
+ const rows = opts?.force ? db.select({ id: files.id, path: files.path }).from(files).all() : db.select({ id: files.id, path: files.path }).from(files).where(isNull(files.summary)).all();
18080
+ const batchSize = opts?.batchSize ?? 5;
18081
+ for (let i = 0; i < rows.length; i += batchSize) {
18082
+ const batch = rows.slice(i, i + batchSize);
18083
+ for (const row of batch) {
18084
+ try {
18085
+ const absPath = join4(projectRoot, row.path);
18086
+ const content = await readFile(absPath, "utf-8");
18087
+ const summary = await provider.generateSummary(content, row.path);
18088
+ db.update(files).set({ summary }).where(eq(files.id, row.id)).run();
18089
+ stats.generated++;
18090
+ } catch (err) {
18091
+ if (err instanceof LlmUnavailableError)
18092
+ throw err;
18093
+ stats.errors++;
18094
+ }
18095
+ }
18096
+ }
18097
+ stats.skipped = rows.length === 0 ? db.select({ id: files.id }).from(files).all().length : 0;
18098
+ stats.durationMs = Date.now() - start;
18099
+ return stats;
18100
+ } finally {
18101
+ closeDb(db);
18102
+ }
18103
+ }
18062
18104
  var init_pipelines = __esm({
18063
18105
  "../core/dist/llm/pipelines.js"() {
18064
18106
  "use strict";
18107
+ init_drizzle_orm();
18065
18108
  init_connection();
18066
18109
  init_schema2();
18067
18110
  init_llm();
@@ -44512,12 +44555,12 @@ var GitActionSchema = external_exports3.enum(["log", "file_history", "blame", "d
44512
44555
  function registerGitTool(server) {
44513
44556
  server.tool("abf_git", "Query git history, blame, and diff for the project.", {
44514
44557
  action: GitActionSchema.describe("Git action: log (recent commits), file_history, blame, diff"),
44515
- file: external_exports3.string().optional().describe("File path (required for file_history, blame; optional for diff)"),
44558
+ file_path: external_exports3.string().optional().describe("File path (required for file_history, blame; optional for diff)"),
44516
44559
  count: external_exports3.number().int().min(1).max(100).default(10).describe("Number of commits (log, file_history)"),
44517
44560
  line_start: external_exports3.number().int().optional().describe("Start line for blame range"),
44518
44561
  line_end: external_exports3.number().int().optional().describe("End line for blame range")
44519
- }, async ({ action, file: file2, count, line_start, line_end }) => {
44520
- const cwd = process.cwd();
44562
+ }, async ({ action, file_path, count, line_start, line_end }) => {
44563
+ const cwd = process.env.ABF_PROJECT_ROOT || process.cwd();
44521
44564
  if (!await isGitRepo(cwd)) {
44522
44565
  return {
44523
44566
  content: [
@@ -44537,25 +44580,25 @@ function registerGitTool(server) {
44537
44580
  break;
44538
44581
  }
44539
44582
  case "file_history": {
44540
- if (!file2) {
44541
- return errorResult("file_history requires a `file` parameter.");
44583
+ if (!file_path) {
44584
+ return errorResult("file_history requires a `file_path` parameter.");
44542
44585
  }
44543
- const history = await getFileHistory(cwd, file2, count);
44586
+ const history = await getFileHistory(cwd, file_path, count);
44544
44587
  text4 = `History for ${history.filePath}:
44545
44588
  ` + formatCommits(history.commits);
44546
44589
  break;
44547
44590
  }
44548
44591
  case "blame": {
44549
- if (!file2) {
44550
- return errorResult("blame requires a `file` parameter.");
44592
+ if (!file_path) {
44593
+ return errorResult("blame requires a `file_path` parameter.");
44551
44594
  }
44552
44595
  const range = line_start && line_end ? [line_start, line_end] : void 0;
44553
- const blameLines = await getBlame(cwd, file2, range);
44596
+ const blameLines = await getBlame(cwd, file_path, range);
44554
44597
  text4 = formatBlame(blameLines);
44555
44598
  break;
44556
44599
  }
44557
44600
  case "diff": {
44558
- const diff = await getDiff(cwd, file2);
44601
+ const diff = await getDiff(cwd, file_path);
44559
44602
  const { filesChanged, insertions, deletions } = diff.stats;
44560
44603
  const header = `${filesChanged} file(s) changed, +${insertions} -${deletions}`;
44561
44604
  text4 = diff.combined ? `${header}
@@ -44595,16 +44638,17 @@ function errorResult(msg) {
44595
44638
 
44596
44639
  // ../server/dist/tools/index-tool.js
44597
44640
  init_indexer();
44598
- var IndexActionSchema = external_exports3.enum(["status", "rebuild", "update"]);
44641
+ init_llm();
44642
+ var IndexActionSchema = external_exports3.enum(["status", "rebuild", "update", "summarize"]);
44599
44643
  function registerIndexTool(server) {
44600
- server.tool("abf_index", "Manage the file index: check status, trigger rebuild, or incremental update.", {
44601
- action: IndexActionSchema.describe("status: show index info, rebuild: full re-index, update: incremental update")
44644
+ server.tool("abf_index", "Manage the file index: check status, trigger rebuild, incremental update, or generate LLM summaries. Rebuild/update will auto-generate summaries when Ollama is available.", {
44645
+ action: IndexActionSchema.describe("status: show index info, rebuild: full re-index, update: incremental update, summarize: (re)generate LLM file summaries (requires Ollama)")
44602
44646
  }, async ({ action }) => {
44603
- const cwd = process.cwd();
44647
+ const projectRoot = process.env.ABF_PROJECT_ROOT || process.cwd();
44604
44648
  try {
44605
44649
  switch (action) {
44606
44650
  case "status": {
44607
- const status = await getIndexStatus(cwd);
44651
+ const status = await getIndexStatus(projectRoot);
44608
44652
  const lastUp = status.lastUpdated ? status.lastUpdated.toISOString() : "never";
44609
44653
  const sizeMb = (status.indexSizeBytes / (1024 * 1024)).toFixed(2);
44610
44654
  const text4 = [
@@ -44617,8 +44661,8 @@ function registerIndexTool(server) {
44617
44661
  }
44618
44662
  case "rebuild":
44619
44663
  case "update": {
44620
- const stats = await runIndexPipeline(cwd);
44621
- const text4 = [
44664
+ const stats = await runIndexPipeline(projectRoot);
44665
+ const textParts = [
44622
44666
  `Index ${action} complete (${stats.durationMs}ms)`,
44623
44667
  `Discovered: ${stats.totalDiscovered}`,
44624
44668
  `New: ${stats.indexed}`,
@@ -44626,6 +44670,27 @@ function registerIndexTool(server) {
44626
44670
  `Removed: ${stats.removed}`,
44627
44671
  `Unchanged: ${stats.skipped}`,
44628
44672
  stats.errors > 0 ? `Errors: ${stats.errors}` : null
44673
+ ].filter(Boolean);
44674
+ const provider = getLlmProvider();
44675
+ if (provider && await provider.isAvailable()) {
44676
+ try {
44677
+ const sumStats = await generateSummaries(projectRoot);
44678
+ textParts.push("", `LLM summaries: ${sumStats.generated} generated, ${sumStats.skipped} skipped${sumStats.errors > 0 ? `, ${sumStats.errors} errors` : ""} (${sumStats.durationMs}ms)`);
44679
+ } catch {
44680
+ textParts.push("", "LLM summaries: skipped (Ollama error)");
44681
+ }
44682
+ }
44683
+ return {
44684
+ content: [{ type: "text", text: textParts.join("\n") }]
44685
+ };
44686
+ }
44687
+ case "summarize": {
44688
+ const stats = await generateSummaries(projectRoot);
44689
+ const text4 = [
44690
+ `Summary generation complete (${stats.durationMs}ms)`,
44691
+ `Generated: ${stats.generated}`,
44692
+ `Skipped (already have summary): ${stats.skipped}`,
44693
+ stats.errors > 0 ? `Errors: ${stats.errors}` : null
44629
44694
  ].filter(Boolean).join("\n");
44630
44695
  return { content: [{ type: "text", text: text4 }] };
44631
44696
  }
@@ -44647,7 +44712,7 @@ function registerSymbolsTool(server) {
44647
44712
  file_path: external_exports3.string().describe("Path to the file (relative or absolute)"),
44648
44713
  depth: external_exports3.number().int().min(1).max(5).default(2).describe("How deep to show nested symbols")
44649
44714
  }, async ({ file_path, depth }) => {
44650
- const cwd = process.cwd();
44715
+ const cwd = process.env.ABF_PROJECT_ROOT || process.cwd();
44651
44716
  const absPath = file_path.startsWith("/") ? file_path : join9(cwd, file_path);
44652
44717
  try {
44653
44718
  const content = readFileSync6(absPath, "utf-8");
@@ -44689,17 +44754,43 @@ function formatSymbolTree(symbols2, maxDepth, currentDepth) {
44689
44754
  import { readFileSync as readFileSync7 } from "fs";
44690
44755
  import { join as join10 } from "path";
44691
44756
  function registerChunkTool(server) {
44692
- server.tool("abf_chunk", "Smart file chunking by symbol boundaries. Without chunk_index: returns chunk overview. With chunk_index: returns that chunk's content.", {
44693
- file_path: external_exports3.string().describe("Path to the file"),
44694
- chunk_index: external_exports3.number().int().min(0).optional().describe("Request a specific chunk by index (0-based). Omit for overview.")
44695
- }, async ({ file_path, chunk_index }) => {
44696
- const cwd = process.cwd();
44697
- const absPath = file_path.startsWith("/") ? file_path : join10(cwd, file_path);
44757
+ server.tool("abf_chunk", `Smart file chunking by symbol boundaries. Returns actual source code.
44758
+ Use EXACTLY ONE of these modes:
44759
+ - symbol: pass a symbol name to get its full source code directly
44760
+ - chunk_index: pass a 0-based chunk index to get that chunk's code
44761
+ - (neither): returns a chunk overview listing \u2014 use this first to discover available chunks, then call again with chunk_index to retrieve code`, {
44762
+ file_path: external_exports3.string().describe("Path to the file (relative or absolute)"),
44763
+ chunk_index: external_exports3.number().int().min(0).optional().describe("0-based chunk index to retrieve that chunk's source code. Get the index from the overview first."),
44764
+ symbol: external_exports3.string().optional().describe("Name of a symbol (function, class, etc.) to retrieve its full source code directly.")
44765
+ }, async ({ file_path, chunk_index, symbol: symbol2 }) => {
44766
+ const projectRoot = process.env.ABF_PROJECT_ROOT || process.cwd();
44767
+ const absPath = file_path.startsWith("/") ? file_path : join10(projectRoot, file_path);
44698
44768
  try {
44699
44769
  const content = readFileSync7(absPath, "utf-8");
44700
44770
  const lines = content.split("\n");
44701
- const { symbols: symbols2 } = parseFile(absPath, content);
44702
- const chunks = buildChunks(symbols2, lines.length);
44771
+ const { symbols: parsedSymbols } = parseFile(absPath, content);
44772
+ if (symbol2) {
44773
+ const match = findSymbol(parsedSymbols, symbol2);
44774
+ if (!match) {
44775
+ const available = parsedSymbols.map((s) => `${s.kind} ${s.name}`).join(", ");
44776
+ return {
44777
+ content: [
44778
+ {
44779
+ type: "text",
44780
+ text: `Symbol "${symbol2}" not found in ${file_path}. Available: ${available || "(none)"}`
44781
+ }
44782
+ ]
44783
+ };
44784
+ }
44785
+ const chunkLines = lines.slice(match.startLine - 1, match.endLine);
44786
+ const text5 = [
44787
+ `${match.kind} ${match.name} (L${match.startLine}-${match.endLine})`,
44788
+ "---",
44789
+ ...chunkLines
44790
+ ].join("\n");
44791
+ return { content: [{ type: "text", text: text5 }] };
44792
+ }
44793
+ const chunks = buildChunks(parsedSymbols, lines.length);
44703
44794
  if (chunk_index !== void 0) {
44704
44795
  if (chunk_index < 0 || chunk_index >= chunks.length) {
44705
44796
  return {
@@ -44722,7 +44813,9 @@ function registerChunkTool(server) {
44722
44813
  }
44723
44814
  const overview = chunks.map((c, i) => `[${i}] ${c.label} L${c.startLine}-${c.endLine} (${c.endLine - c.startLine + 1} lines)`).join("\n");
44724
44815
  const text4 = `${chunks.length} chunks in ${file_path}:
44725
- ${overview}`;
44816
+ ${overview}
44817
+
44818
+ To get source code, call again with chunk_index=<number> or symbol=<name>.`;
44726
44819
  return { content: [{ type: "text", text: text4 }] };
44727
44820
  } catch (err) {
44728
44821
  const msg = err instanceof Error ? err.message : String(err);
@@ -44730,6 +44823,18 @@ ${overview}`;
44730
44823
  }
44731
44824
  });
44732
44825
  }
44826
+ function findSymbol(symbols2, name) {
44827
+ const lower = name.toLowerCase();
44828
+ for (const sym of symbols2) {
44829
+ if (sym.name.toLowerCase() === lower)
44830
+ return sym;
44831
+ for (const child of sym.children) {
44832
+ if (child.name.toLowerCase() === lower)
44833
+ return child;
44834
+ }
44835
+ }
44836
+ return void 0;
44837
+ }
44733
44838
  var MAX_CHUNK_LINES = 200;
44734
44839
  function buildChunks(symbols2, totalLines) {
44735
44840
  if (symbols2.length === 0) {
@@ -44812,7 +44917,7 @@ function registerDependenciesTool(server) {
44812
44917
  direction: external_exports3.enum(["imports", "imported_by", "both"]).default("both").describe("Which direction to analyze"),
44813
44918
  depth: external_exports3.number().int().min(1).max(3).default(1).describe("Depth of transitive dependencies")
44814
44919
  }, async ({ file_path, direction, depth }) => {
44815
- const cwd = process.cwd();
44920
+ const cwd = process.env.ABF_PROJECT_ROOT || process.cwd();
44816
44921
  const absPath = file_path.startsWith("/") ? file_path : join11(cwd, file_path);
44817
44922
  const relPath = file_path.startsWith("/") ? file_path.slice(cwd.length + 1) : file_path;
44818
44923
  try {
@@ -44935,13 +45040,13 @@ function importMatchesTarget(importPath, sourceFile, targetRelPath, targetName)
44935
45040
  init_search();
44936
45041
  function registerImpactTool(server) {
44937
45042
  server.tool("abf_impact", "Find all files and lines that reference a given symbol name. Useful for change impact analysis.", {
44938
- symbol_name: external_exports3.string().describe("The symbol (function, class, variable) name to find references for"),
45043
+ symbol: external_exports3.string().describe("The symbol (function, class, variable) name to find references for"),
44939
45044
  file_path: external_exports3.string().optional().describe("Optional: scope search to usages of this symbol from this file")
44940
- }, async ({ symbol_name, file_path }) => {
44941
- const cwd = process.cwd();
45045
+ }, async ({ symbol: symbol2, file_path }) => {
45046
+ const cwd = process.env.ABF_PROJECT_ROOT || process.cwd();
44942
45047
  try {
44943
45048
  const results = await ripgrepSearch({
44944
- query: `\\b${escapeRegex2(symbol_name)}\\b`,
45049
+ query: `\\b${escapeRegex2(symbol2)}\\b`,
44945
45050
  cwd,
44946
45051
  maxResults: 50,
44947
45052
  regex: true,
@@ -44952,7 +45057,7 @@ function registerImpactTool(server) {
44952
45057
  content: [
44953
45058
  {
44954
45059
  type: "text",
44955
- text: `No references found for "${symbol_name}".`
45060
+ text: `No references found for "${symbol2}".`
44956
45061
  }
44957
45062
  ]
44958
45063
  };
@@ -44963,12 +45068,12 @@ function registerImpactTool(server) {
44963
45068
  group.push({
44964
45069
  line: match.lineNumber,
44965
45070
  text: match.lineText.trim(),
44966
- usage: classifyUsage(match.lineText, symbol_name)
45071
+ usage: classifyUsage(match.lineText, symbol2)
44967
45072
  });
44968
45073
  byFile.set(match.filePath, group);
44969
45074
  }
44970
45075
  const lines = [
44971
- `${results.totalMatches} references to "${symbol_name}" in ${byFile.size} files:`,
45076
+ `${results.totalMatches} references to "${symbol2}" in ${byFile.size} files:`,
44972
45077
  ""
44973
45078
  ];
44974
45079
  for (const [filePath, refs] of byFile) {