@aria-cli/tools 1.0.2 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. package/dist/.aria-build-stamp.json +1 -1
  2. package/dist/.tsbuildinfo +1 -1
  3. package/dist/definitions/code-intelligence.d.ts +9 -0
  4. package/dist/definitions/code-intelligence.d.ts.map +1 -0
  5. package/dist/definitions/code-intelligence.js +471 -0
  6. package/dist/definitions/code-intelligence.js.map +1 -0
  7. package/dist/definitions/core.d.ts +3 -0
  8. package/dist/definitions/core.d.ts.map +1 -1
  9. package/dist/definitions/core.js +13 -1
  10. package/dist/definitions/core.js.map +1 -1
  11. package/dist/definitions/filesystem.d.ts +3 -2
  12. package/dist/definitions/filesystem.d.ts.map +1 -1
  13. package/dist/definitions/filesystem.js +4 -38
  14. package/dist/definitions/filesystem.js.map +1 -1
  15. package/dist/definitions/frg.d.ts +4 -0
  16. package/dist/definitions/frg.d.ts.map +1 -0
  17. package/dist/definitions/frg.js +64 -0
  18. package/dist/definitions/frg.js.map +1 -0
  19. package/dist/definitions/index.d.ts +3 -0
  20. package/dist/definitions/index.d.ts.map +1 -1
  21. package/dist/definitions/index.js +3 -0
  22. package/dist/definitions/index.js.map +1 -1
  23. package/dist/definitions/search.d.ts +10 -0
  24. package/dist/definitions/search.d.ts.map +1 -0
  25. package/dist/definitions/search.js +61 -0
  26. package/dist/definitions/search.js.map +1 -0
  27. package/dist/executors/apply-patch.d.ts.map +1 -1
  28. package/dist/executors/apply-patch.js +18 -0
  29. package/dist/executors/apply-patch.js.map +1 -1
  30. package/dist/executors/code-intelligence.d.ts +139 -0
  31. package/dist/executors/code-intelligence.d.ts.map +1 -0
  32. package/dist/executors/code-intelligence.js +883 -0
  33. package/dist/executors/code-intelligence.js.map +1 -0
  34. package/dist/executors/filesystem.d.ts.map +1 -1
  35. package/dist/executors/filesystem.js +14 -8
  36. package/dist/executors/filesystem.js.map +1 -1
  37. package/dist/executors/frg-freshness.d.ts +94 -0
  38. package/dist/executors/frg-freshness.d.ts.map +1 -0
  39. package/dist/executors/frg-freshness.js +577 -0
  40. package/dist/executors/frg-freshness.js.map +1 -0
  41. package/dist/executors/frg.d.ts +28 -0
  42. package/dist/executors/frg.d.ts.map +1 -0
  43. package/dist/executors/frg.js +299 -0
  44. package/dist/executors/frg.js.map +1 -0
  45. package/dist/executors/index.d.ts +6 -0
  46. package/dist/executors/index.d.ts.map +1 -1
  47. package/dist/executors/index.js +5 -0
  48. package/dist/executors/index.js.map +1 -1
  49. package/dist/executors/lsp-client.d.ts +39 -0
  50. package/dist/executors/lsp-client.d.ts.map +1 -0
  51. package/dist/executors/lsp-client.js +297 -0
  52. package/dist/executors/lsp-client.js.map +1 -0
  53. package/dist/executors/restart.d.ts +4 -9
  54. package/dist/executors/restart.d.ts.map +1 -1
  55. package/dist/executors/restart.js +20 -51
  56. package/dist/executors/restart.js.map +1 -1
  57. package/dist/executors/search-freshness.d.ts +51 -0
  58. package/dist/executors/search-freshness.d.ts.map +1 -0
  59. package/dist/executors/search-freshness.js +196 -0
  60. package/dist/executors/search-freshness.js.map +1 -0
  61. package/dist/executors/search.d.ts +12 -0
  62. package/dist/executors/search.d.ts.map +1 -0
  63. package/dist/executors/search.js +67 -0
  64. package/dist/executors/search.js.map +1 -0
  65. package/dist/headless-control-contract.d.ts +4 -0
  66. package/dist/headless-control-contract.d.ts.map +1 -1
  67. package/dist/index.d.ts +2 -2
  68. package/dist/index.d.ts.map +1 -1
  69. package/dist/index.js +1 -1
  70. package/dist/index.js.map +1 -1
  71. package/dist/network-runtime/local-control-contract.d.ts +2 -0
  72. package/dist/network-runtime/local-control-contract.d.ts.map +1 -1
  73. package/dist/network-runtime/local-control-contract.js +2 -0
  74. package/dist/network-runtime/local-control-contract.js.map +1 -1
  75. package/dist-cjs/.tsbuildinfo +1 -1
  76. package/dist-cjs/definitions/code-intelligence.d.ts +8 -0
  77. package/dist-cjs/definitions/code-intelligence.js +474 -0
  78. package/dist-cjs/definitions/code-intelligence.js.map +1 -0
  79. package/dist-cjs/definitions/core.d.ts +3 -0
  80. package/dist-cjs/definitions/core.js +17 -2
  81. package/dist-cjs/definitions/core.js.map +1 -1
  82. package/dist-cjs/definitions/filesystem.d.ts +3 -2
  83. package/dist-cjs/definitions/filesystem.js +3 -37
  84. package/dist-cjs/definitions/filesystem.js.map +1 -1
  85. package/dist-cjs/definitions/frg.d.ts +3 -0
  86. package/dist-cjs/definitions/frg.js +67 -0
  87. package/dist-cjs/definitions/frg.js.map +1 -0
  88. package/dist-cjs/definitions/index.d.ts +3 -0
  89. package/dist-cjs/definitions/index.js +7 -1
  90. package/dist-cjs/definitions/index.js.map +1 -1
  91. package/dist-cjs/definitions/search.d.ts +9 -0
  92. package/dist-cjs/definitions/search.js +64 -0
  93. package/dist-cjs/definitions/search.js.map +1 -0
  94. package/dist-cjs/executors/apply-patch.js +18 -0
  95. package/dist-cjs/executors/apply-patch.js.map +1 -1
  96. package/dist-cjs/executors/code-intelligence.d.ts +138 -0
  97. package/dist-cjs/executors/code-intelligence.js +926 -0
  98. package/dist-cjs/executors/code-intelligence.js.map +1 -0
  99. package/dist-cjs/executors/filesystem.js +17 -8
  100. package/dist-cjs/executors/filesystem.js.map +1 -1
  101. package/dist-cjs/executors/frg-freshness.d.ts +93 -0
  102. package/dist-cjs/executors/frg-freshness.js +628 -0
  103. package/dist-cjs/executors/frg-freshness.js.map +1 -0
  104. package/dist-cjs/executors/frg.d.ts +27 -0
  105. package/dist-cjs/executors/frg.js +335 -0
  106. package/dist-cjs/executors/frg.js.map +1 -0
  107. package/dist-cjs/executors/index.d.ts +6 -0
  108. package/dist-cjs/executors/index.js +34 -2
  109. package/dist-cjs/executors/index.js.map +1 -1
  110. package/dist-cjs/executors/lsp-client.d.ts +38 -0
  111. package/dist-cjs/executors/lsp-client.js +311 -0
  112. package/dist-cjs/executors/lsp-client.js.map +1 -0
  113. package/dist-cjs/executors/restart.d.ts +4 -9
  114. package/dist-cjs/executors/restart.js +19 -50
  115. package/dist-cjs/executors/restart.js.map +1 -1
  116. package/dist-cjs/executors/search-freshness.d.ts +50 -0
  117. package/dist-cjs/executors/search-freshness.js +235 -0
  118. package/dist-cjs/executors/search-freshness.js.map +1 -0
  119. package/dist-cjs/executors/search.d.ts +11 -0
  120. package/dist-cjs/executors/search.js +103 -0
  121. package/dist-cjs/executors/search.js.map +1 -0
  122. package/dist-cjs/headless-control-contract.d.ts +15 -11
  123. package/dist-cjs/index.d.ts +2 -2
  124. package/dist-cjs/index.js +22 -2
  125. package/dist-cjs/index.js.map +1 -1
  126. package/dist-cjs/network-runtime/local-control-contract.d.ts +2 -0
  127. package/dist-cjs/network-runtime/local-control-contract.js +2 -0
  128. package/dist-cjs/network-runtime/local-control-contract.js.map +1 -1
  129. package/package.json +9 -5
  130. package/src/definitions/code-intelligence.ts +526 -0
  131. package/src/definitions/core.ts +13 -1
  132. package/src/definitions/filesystem.ts +3 -39
  133. package/src/definitions/frg.ts +67 -0
  134. package/src/definitions/index.ts +3 -0
  135. package/src/definitions/search.ts +67 -0
  136. package/src/executors/apply-patch.ts +20 -0
  137. package/src/executors/code-intelligence.ts +1179 -0
  138. package/src/executors/filesystem.ts +15 -8
  139. package/src/executors/frg-freshness.ts +743 -0
  140. package/src/executors/frg.ts +394 -0
  141. package/src/executors/index.ts +58 -0
  142. package/src/executors/lsp-client.ts +355 -0
  143. package/src/executors/restart.ts +21 -56
  144. package/src/executors/search-freshness.ts +249 -0
  145. package/src/executors/search.ts +89 -0
  146. package/src/index.ts +25 -0
  147. package/src/network-runtime/local-control-contract.ts +2 -0
  148. package/tests/definitions/tool-inventory.test.ts +17 -6
  149. package/tests/executors/frg-freshness.test.ts +136 -0
  150. package/tests/executors/frg-merge.test.ts +70 -0
  151. package/tests/executors/frg-session-content.test.ts +40 -0
  152. package/tests/executors/frg.test.ts +56 -0
  153. package/tests/integration/headless-control-contract.integration.test.ts +2 -0
  154. package/tests/loading-tier.test.ts +6 -6
  155. package/tests/test-lane-manifest.ts +4 -0
  156. package/tsconfig.cjs.json +9 -1
  157. package/tsconfig.json +1 -1
@@ -0,0 +1,249 @@
1
+ /**
2
+ * Freshness tracker for aria-search (code_search tool).
3
+ *
4
+ * Mirrors frg-freshness.ts patterns to close the freshness gaps:
5
+ * 1. Tracks file mutations from ARIA tool writes (session overlay)
6
+ * 2. Reconciles git-dirty files on each search
7
+ * 3. Generates session overlay for the napi search() call
8
+ *
9
+ * The session overlay gives 0ms visibility for pending mutations —
10
+ * files written by agents/users appear in search results immediately
11
+ * without waiting for an index rebuild or hot.log sync.
12
+ */
13
+
14
+ import * as fsSync from "node:fs";
15
+ import * as nodePath from "node:path";
16
+ import { execFileSync } from "node:child_process";
17
+
18
+ export type SearchMutationOperation = "write" | "delete";
19
+
20
+ export interface SearchMutationRecord {
21
+ path: string;
22
+ operation: SearchMutationOperation;
23
+ content?: string;
24
+ sequence: number;
25
+ }
26
+
27
+ export interface SearchSessionOverlay {
28
+ sessionWrites: Array<{ path: string; content: string }>;
29
+ sessionDeletes: string[];
30
+ }
31
+
32
+ // ---------------------------------------------------------------------------
33
+ // In-memory state (per repo root)
34
+ // ---------------------------------------------------------------------------
35
+
36
+ interface RepoState {
37
+ sequence: number;
38
+ pending: Map<string, SearchMutationRecord>;
39
+ lastGitReconcileMs: number;
40
+ }
41
+
42
+ const repoStates = new Map<string, RepoState>();
43
+
44
+ /** Minimum interval between git-dirty reconciliations (avoid hammering git). */
45
+ const GIT_RECONCILE_INTERVAL_MS = 2_000;
46
+
47
+ /** Max content size to cache in memory per file. */
48
+ const MAX_CACHED_CONTENT_BYTES = 256 * 1024;
49
+
50
+ function norm(p: string): string {
51
+ return nodePath.resolve(p);
52
+ }
53
+
54
+ function getState(root: string): RepoState {
55
+ const key = norm(root);
56
+ let state = repoStates.get(key);
57
+ if (!state) {
58
+ state = { sequence: 0, pending: new Map(), lastGitReconcileMs: 0 };
59
+ repoStates.set(key, state);
60
+ }
61
+ return state;
62
+ }
63
+
64
+ // ---------------------------------------------------------------------------
65
+ // Public API — called from filesystem/patch executors
66
+ // ---------------------------------------------------------------------------
67
+
68
+ /**
69
+ * Record a file mutation from an ARIA tool write/edit/delete.
70
+ * Called alongside recordFrgMutation in filesystem.ts and apply-patch.ts.
71
+ */
72
+ export function recordSearchMutation(
73
+ filePath: string,
74
+ operation: SearchMutationOperation,
75
+ content?: string,
76
+ ): void {
77
+ // Find the git repo root to scope mutations correctly
78
+ const repoRoot = findGitRepoRoot(filePath);
79
+ if (!repoRoot) return;
80
+
81
+ const state = getState(repoRoot);
82
+ state.sequence += 1;
83
+ const normalizedPath = norm(filePath);
84
+
85
+ state.pending.set(normalizedPath, {
86
+ path: normalizedPath,
87
+ operation,
88
+ content:
89
+ typeof content === "string" && Buffer.byteLength(content, "utf8") <= MAX_CACHED_CONTENT_BYTES
90
+ ? content
91
+ : undefined,
92
+ sequence: state.sequence,
93
+ });
94
+ }
95
+
96
+ // ---------------------------------------------------------------------------
97
+ // Public API — called from search executor
98
+ // ---------------------------------------------------------------------------
99
+
100
+ /**
101
+ * Reconcile git-dirty files into the pending mutations map.
102
+ * Runs `git diff --name-status -z HEAD` and `git ls-files -o --exclude-standard -z`
103
+ * to detect files modified/created/deleted by external agents or the user.
104
+ *
105
+ * Throttled to at most once per GIT_RECONCILE_INTERVAL_MS to avoid
106
+ * hammering git on rapid sequential searches.
107
+ */
108
+ export function reconcileSearchGitDirty(root: string): void {
109
+ const state = getState(root);
110
+ const now = Date.now();
111
+ if (now - state.lastGitReconcileMs < GIT_RECONCILE_INTERVAL_MS) {
112
+ return; // Throttled — recent reconciliation is still fresh enough
113
+ }
114
+ state.lastGitReconcileMs = now;
115
+
116
+ const normalizedRoot = norm(root);
117
+
118
+ // Tracked modified/deleted files
119
+ const trackedOutput = runGit(normalizedRoot, ["diff", "--name-status", "-z", "HEAD", "--"]);
120
+ if (trackedOutput) {
121
+ const tokens = trackedOutput.split("\0").filter(Boolean);
122
+ for (let i = 0; i < tokens.length; i++) {
123
+ const status = tokens[i] ?? "";
124
+ if (status.startsWith("R")) {
125
+ // Rename: old path deleted, new path written
126
+ const oldPath = tokens[++i];
127
+ const newPath = tokens[++i];
128
+ if (oldPath) addGitDirtyMutation(state, normalizedRoot, oldPath, "delete");
129
+ if (newPath) addGitDirtyMutation(state, normalizedRoot, newPath, "write");
130
+ continue;
131
+ }
132
+ const filePath = tokens[++i];
133
+ if (!filePath) continue;
134
+ addGitDirtyMutation(
135
+ state,
136
+ normalizedRoot,
137
+ filePath,
138
+ status.startsWith("D") ? "delete" : "write",
139
+ );
140
+ }
141
+ }
142
+
143
+ // Untracked new files
144
+ const untrackedOutput = runGit(normalizedRoot, ["ls-files", "-o", "--exclude-standard", "-z"]);
145
+ if (untrackedOutput) {
146
+ for (const token of untrackedOutput.split("\0").filter(Boolean)) {
147
+ addGitDirtyMutation(state, normalizedRoot, token, "write");
148
+ }
149
+ }
150
+ }
151
+
152
+ function addGitDirtyMutation(
153
+ state: RepoState,
154
+ root: string,
155
+ relativePath: string,
156
+ operation: SearchMutationOperation,
157
+ ): void {
158
+ const absPath = norm(nodePath.join(root, relativePath));
159
+ const existing = state.pending.get(absPath);
160
+ // Don't overwrite ARIA-sourced mutations (which have cached content)
161
+ if (existing?.content) return;
162
+ if (existing?.operation === operation) return;
163
+
164
+ state.sequence += 1;
165
+ state.pending.set(absPath, {
166
+ path: absPath,
167
+ operation,
168
+ content: undefined, // Will be read from disk in getSearchSessionOverlay
169
+ sequence: state.sequence,
170
+ });
171
+ }
172
+
173
+ /**
174
+ * Generate the session overlay for the napi search() call.
175
+ * Reads content from disk for mutations without cached content.
176
+ */
177
+ export function getSearchSessionOverlay(root: string): SearchSessionOverlay {
178
+ const state = getState(root);
179
+ const sessionWrites: Array<{ path: string; content: string }> = [];
180
+ const sessionDeletes: string[] = [];
181
+
182
+ for (const mutation of state.pending.values()) {
183
+ if (mutation.operation === "delete") {
184
+ sessionDeletes.push(mutation.path);
185
+ continue;
186
+ }
187
+
188
+ let content = mutation.content;
189
+ if (typeof content !== "string") {
190
+ // Read from disk — closes the gap where git-dirty files have no cached content
191
+ try {
192
+ const buf = fsSync.readFileSync(mutation.path);
193
+ if (!buf.subarray(0, Math.min(buf.length, 8192)).includes(0)) {
194
+ content = buf.toString("utf8");
195
+ }
196
+ } catch {
197
+ continue; // Unreadable — skip
198
+ }
199
+ }
200
+
201
+ if (typeof content === "string") {
202
+ sessionWrites.push({ path: mutation.path, content });
203
+ }
204
+ }
205
+
206
+ return { sessionWrites, sessionDeletes };
207
+ }
208
+
209
+ /**
210
+ * Clear all pending mutations for a repo root.
211
+ * Called after a full index rebuild (all mutations are now in the base index).
212
+ */
213
+ export function clearSearchMutations(root: string): void {
214
+ const key = norm(root);
215
+ repoStates.delete(key);
216
+ }
217
+
218
+ // ---------------------------------------------------------------------------
219
+ // Helpers
220
+ // ---------------------------------------------------------------------------
221
+
222
+ function findGitRepoRoot(startPath: string): string | null {
223
+ let current = norm(startPath);
224
+ try {
225
+ if (!fsSync.statSync(current).isDirectory()) {
226
+ current = nodePath.dirname(current);
227
+ }
228
+ } catch {
229
+ current = nodePath.dirname(current);
230
+ }
231
+ while (true) {
232
+ if (fsSync.existsSync(nodePath.join(current, ".git"))) return current;
233
+ const parent = nodePath.dirname(current);
234
+ if (parent === current) return null;
235
+ current = parent;
236
+ }
237
+ }
238
+
239
+ function runGit(cwd: string, args: string[]): string | null {
240
+ try {
241
+ return execFileSync("git", args, {
242
+ cwd,
243
+ encoding: "utf8",
244
+ stdio: ["ignore", "pipe", "ignore"],
245
+ }).trim();
246
+ } catch {
247
+ return null;
248
+ }
249
+ }
@@ -0,0 +1,89 @@
1
+ /**
2
+ * Executor for the native indexed regex search tool.
3
+ *
4
+ * Freshness architecture (frg parity):
5
+ * 1. ARIA tool writes → recordSearchMutation → session overlay (0ms visibility)
6
+ * 2. External edits → reconcileSearchGitDirty → session overlay (0ms visibility)
7
+ * 3. Commit/version change → syncIndex or buildIndex (incremental/full)
8
+ * 4. Session overlay passed to napi search() — no index rebuild needed
9
+ */
10
+
11
+ import * as nodePath from "node:path";
12
+ import { buildIndex, syncIndex, indexStatus, search } from "@aria-cli/search";
13
+ import { success, fail } from "./utils.js";
14
+ import type { ToolContext, ToolResult } from "../types.js";
15
+ import {
16
+ reconcileSearchGitDirty,
17
+ getSearchSessionOverlay,
18
+ clearSearchMutations,
19
+ } from "./search-freshness.js";
20
+
21
+ interface SearchInput {
22
+ pattern: string;
23
+ directory?: string;
24
+ fileGlob?: string;
25
+ fileType?: string;
26
+ caseSensitive?: boolean;
27
+ literal?: boolean;
28
+ maxResults?: number;
29
+ context?: number;
30
+ }
31
+
32
+ export async function executeSearch(input: unknown, ctx: ToolContext): Promise<ToolResult> {
33
+ const opts = input as SearchInput;
34
+ const dir = nodePath.resolve(ctx.workingDir, opts.directory || ".");
35
+
36
+ try {
37
+ // Step 1: Index management — build or sync base index if needed.
38
+ const status = indexStatus(dir);
39
+ if (status.state === "none") {
40
+ buildIndex(dir);
41
+ clearSearchMutations(dir); // fresh index includes everything
42
+ } else if (status.state === "stale") {
43
+ // Commit or binary version changed — incremental sync is sufficient
44
+ // (syncIndex rebuilds hot.log from git diff, much cheaper than full build).
45
+ // Only fall back to full build if sync fails.
46
+ try {
47
+ syncIndex(dir);
48
+ } catch {
49
+ buildIndex(dir);
50
+ }
51
+ clearSearchMutations(dir);
52
+ }
53
+ // "ready" — base index is fresh, session overlay handles uncommitted changes
54
+
55
+ // Step 2: Reconcile git-dirty files into pending mutations.
56
+ // Detects files modified/created/deleted by external agents or the user.
57
+ // Throttled to avoid hammering git on rapid sequential searches.
58
+ reconcileSearchGitDirty(dir);
59
+
60
+ // Step 3: Generate session overlay from pending mutations.
61
+ // This gives 0ms visibility for ARIA tool writes and external edits —
62
+ // no index rebuild needed. The overlay is applied in-memory by the
63
+ // Rust search engine on top of the base index + hot.log.
64
+ const overlay = getSearchSessionOverlay(dir);
65
+
66
+ // Step 4: Search with overlay.
67
+ const results = search({
68
+ pattern: opts.pattern,
69
+ directory: dir,
70
+ maxResults: opts.maxResults ?? 1000,
71
+ fileGlob: opts.fileGlob,
72
+ fileType: opts.fileType,
73
+ caseSensitive: opts.caseSensitive ?? true,
74
+ literal: opts.literal ?? false,
75
+ context: opts.context ?? 0,
76
+ sessionWrites: overlay.sessionWrites,
77
+ sessionDeletes: overlay.sessionDeletes,
78
+ });
79
+
80
+ const truncated = results.length >= (opts.maxResults ?? 1000);
81
+ return success(
82
+ `Found ${results.length} matches for "${opts.pattern}"${truncated ? " (truncated)" : ""}`,
83
+ { matches: results, truncated },
84
+ );
85
+ } catch (err) {
86
+ const reason = err instanceof Error ? err.message : String(err);
87
+ return fail(`Search failed: ${reason}`);
88
+ }
89
+ }
package/src/index.ts CHANGED
@@ -515,6 +515,25 @@ export {
515
515
  executeGlob,
516
516
  executeGrep,
517
517
  executeApplyPatch,
518
+ executeFrg,
519
+ recordFrgMutation,
520
+ getPendingFrgMutations,
521
+ getPendingFrgMutationBytes,
522
+ clearPendingFrgMutations,
523
+ flushPendingFrgMutations,
524
+ maybeBuildFrgIndexForRepo,
525
+ ensureFrgRepoStateLoaded,
526
+ reconcileGitDirtyMutations,
527
+ getGitDirtyStatus,
528
+ getFrgFlushStatus,
529
+ getFrgFreshnessSnapshot,
530
+ scheduleBackgroundFrgFlush,
531
+ shouldForceSynchronousFrgFlush,
532
+ DEFAULT_BACKGROUND_FRG_FLUSH_DELAY_MS,
533
+ BACKGROUND_FRG_MAX_FLUSH_DELAY_MS,
534
+ MAX_PENDING_MUTATIONS_BEFORE_SYNC_FLUSH,
535
+ MAX_PENDING_MUTATION_BYTES_BEFORE_SYNC_FLUSH,
536
+ MAX_CACHED_MUTATION_CONTENT_BYTES,
518
537
  } from "./executors/index.js";
519
538
 
520
539
  // Filesystem types
@@ -528,6 +547,12 @@ export type {
528
547
  GrepInput,
529
548
  GrepMatch,
530
549
  ApplyPatchInput,
550
+ FrgInput,
551
+ FrgMutationOperation,
552
+ FrgMutationRecord,
553
+ FrgGitDirtyStatus,
554
+ FrgFlushStatus,
555
+ FrgFreshnessSnapshot,
531
556
  } from "./executors/index.js";
532
557
 
533
558
  // Shell executors
@@ -45,6 +45,7 @@ export const RunRequestSchema = z
45
45
  arion: NonEmptyStringSchema.optional(),
46
46
  cwd: NonEmptyStringSchema.optional(),
47
47
  history: MessageHistorySchema,
48
+ requestedModel: NonEmptyStringSchema.optional(),
48
49
  preferredTier: z.enum(["fast", "balanced", "powerful", "ensemble"]).optional(),
49
50
  budget: z.number().positive().optional(),
50
51
  maxTurns: z.number().int().positive().optional(),
@@ -144,6 +145,7 @@ export const ResumeRunRequestSchema = z
144
145
  state: z.unknown(),
145
146
  arion: NonEmptyStringSchema.optional(),
146
147
  cwd: NonEmptyStringSchema.optional(),
148
+ requestedModel: NonEmptyStringSchema.optional(),
147
149
  preferredTier: z.enum(["fast", "balanced", "powerful", "ensemble"]).optional(),
148
150
  budget: z.number().positive().optional(),
149
151
  maxTurns: z.number().int().positive().optional(),
@@ -15,12 +15,13 @@ import {
15
15
  ARION_TOOL_DEFINITIONS,
16
16
  DELEGATION_TOOL_DEFINITIONS,
17
17
  META_TOOL_DEFINITIONS,
18
+ FRG_TOOL_DEFINITIONS,
18
19
  } from "../../src/definitions/core.js";
19
20
 
20
21
  describe("Tool Inventory", () => {
21
22
  describe("total tool count", () => {
22
- it("should have exactly 64 core tools", () => {
23
- expect(CORE_TOOL_DEFINITIONS).toHaveLength(64);
23
+ it("should have exactly 74 core tools", () => {
24
+ expect(CORE_TOOL_DEFINITIONS).toHaveLength(74);
24
25
  });
25
26
  });
26
27
 
@@ -35,6 +36,7 @@ describe("Tool Inventory", () => {
35
36
  expect(names).toContain("apply_patch");
36
37
  expect(names).toContain("write_stdin");
37
38
  expect(names).toContain("ls");
39
+ expect(names).toContain("frg");
38
40
  });
39
41
 
40
42
  it("should include OpenClaw-forked tools", () => {
@@ -107,7 +109,7 @@ describe("Tool Inventory", () => {
107
109
  });
108
110
 
109
111
  it("all core tools have loadingTier 'always' or 'deferred'", () => {
110
- const ALLOWED_DEFERRED = new Set(["session_history", "self_diagnose"]);
112
+ const ALLOWED_DEFERRED = new Set(["session_history", "frg"]);
111
113
  for (const tool of CORE_TOOL_DEFINITIONS) {
112
114
  if (ALLOWED_DEFERRED.has(tool.name)) {
113
115
  expect(tool.loadingTier, `Tool "${tool.name}" should have loadingTier "deferred"`).toBe(
@@ -126,8 +128,8 @@ describe("Tool Inventory", () => {
126
128
  const countByCategory = (category: string) =>
127
129
  CORE_TOOL_DEFINITIONS.filter((t) => t.category === category).length;
128
130
 
129
- it("filesystem: 7 tools", () => {
130
- expect(countByCategory("filesystem")).toBe(7);
131
+ it("filesystem: 8 tools", () => {
132
+ expect(countByCategory("filesystem")).toBe(8);
131
133
  });
132
134
 
133
135
  it("shell: 8 tools", () => {
@@ -154,6 +156,10 @@ describe("Tool Inventory", () => {
154
156
  expect(countByCategory("messaging")).toBe(14);
155
157
  });
156
158
 
159
+ it("code: 9 tools (rg, ug, probe, sg, cbm, lsp, serena, fff, frg)", () => {
160
+ expect(countByCategory("code")).toBe(9);
161
+ });
162
+
157
163
  it("category counts sum to total", () => {
158
164
  const total =
159
165
  countByCategory("filesystem") +
@@ -162,7 +168,8 @@ describe("Tool Inventory", () => {
162
168
  countByCategory("memory") +
163
169
  countByCategory("arion") +
164
170
  countByCategory("meta") +
165
- countByCategory("messaging");
171
+ countByCategory("messaging") +
172
+ countByCategory("code");
166
173
  expect(total).toBe(CORE_TOOL_DEFINITIONS.length);
167
174
  });
168
175
  });
@@ -195,6 +202,10 @@ describe("Tool Inventory", () => {
195
202
  it("META_TOOL_DEFINITIONS has 12 tools", () => {
196
203
  expect(META_TOOL_DEFINITIONS).toHaveLength(12);
197
204
  });
205
+
206
+ it("FRG_TOOL_DEFINITIONS has 1 tool", () => {
207
+ expect(FRG_TOOL_DEFINITIONS).toHaveLength(1);
208
+ });
198
209
  });
199
210
 
200
211
  describe("every tool has an execute function", () => {
@@ -0,0 +1,136 @@
1
+ import { beforeEach, describe, expect, it } from "vitest";
2
+ import * as fs from "node:fs/promises";
3
+ import * as path from "node:path";
4
+ import * as os from "node:os";
5
+ import {
6
+ DEFAULT_BACKGROUND_FRG_FLUSH_DELAY_MS,
7
+ MAX_PENDING_MUTATIONS_BEFORE_SYNC_FLUSH,
8
+ clearPendingFrgMutations,
9
+ ensureFrgRepoStateLoaded,
10
+ getFrgFreshnessSnapshot,
11
+ getGitDirtyStatus,
12
+ getPendingFrgMutations,
13
+ recordFrgMutation,
14
+ scheduleBackgroundFrgFlush,
15
+ searchPendingFrgMutations,
16
+ shouldForceSynchronousFrgFlush,
17
+ } from "../../src/executors/frg-freshness.js";
18
+
19
+ describe("frg freshness manager", () => {
20
+ let repoDir: string;
21
+ let filePath: string;
22
+
23
+ beforeEach(async () => {
24
+ repoDir = await fs.mkdtemp(path.join(os.tmpdir(), "aria-frg-freshness-"));
25
+ await fs.mkdir(path.join(repoDir, ".git"));
26
+ await fs.writeFile(path.join(repoDir, "a.ts"), "export const a = 1;\n", "utf8");
27
+ filePath = path.join(repoDir, "a.ts");
28
+ clearPendingFrgMutations(repoDir);
29
+ });
30
+
31
+ it("records write mutations in sequence order", () => {
32
+ recordFrgMutation(filePath, "write");
33
+ recordFrgMutation(filePath, "write");
34
+
35
+ const pending = getPendingFrgMutations(repoDir);
36
+ expect(pending).toHaveLength(1);
37
+ expect(pending[0]?.operation).toBe("write");
38
+ expect(pending[0]?.path).toBe(path.resolve(filePath));
39
+ });
40
+
41
+ it("tracks delete operations", () => {
42
+ recordFrgMutation(filePath, "delete");
43
+ const pending = getPendingFrgMutations(repoDir);
44
+ expect(pending).toHaveLength(1);
45
+ expect(pending[0]?.operation).toBe("delete");
46
+ });
47
+
48
+ it("searches pending session mutations directly", async () => {
49
+ const updatedContent = 'export const token = "abc123";\nexport const Beta = 2;\n';
50
+ await fs.writeFile(filePath, updatedContent, "utf8");
51
+ recordFrgMutation(filePath, "write", updatedContent);
52
+
53
+ const result = await searchPendingFrgMutations(repoDir, {
54
+ pattern: "abc123",
55
+ literal: true,
56
+ });
57
+ expect(result?.mode).toBe("matches");
58
+ expect(result && result.mode === "matches" ? result.matches?.length : 0).toBeGreaterThan(0);
59
+
60
+ const filesOnly = await searchPendingFrgMutations(repoDir, {
61
+ pattern: "token",
62
+ filesOnly: true,
63
+ });
64
+ expect(filesOnly?.mode).toBe("files");
65
+ expect(filesOnly && filesOnly.mode === "files" ? filesOnly.files : []).toContain("a.ts");
66
+
67
+ const count = await searchPendingFrgMutations(repoDir, {
68
+ pattern: "export",
69
+ count: true,
70
+ });
71
+ expect(count?.mode).toBe("counts");
72
+ expect(count && count.mode === "counts" ? count.counts?.[0]?.count : 0).toBeGreaterThan(0);
73
+
74
+ const quiet = await searchPendingFrgMutations(repoDir, {
75
+ pattern: "BETA",
76
+ caseInsensitive: true,
77
+ quiet: true,
78
+ });
79
+ expect(quiet?.mode).toBe("quiet");
80
+ expect(quiet?.matched).toBe(true);
81
+ });
82
+
83
+ it("schedules background flushes without forcing sync below threshold", async () => {
84
+ const updatedContent = 'export const token = "abc123";\n';
85
+ await fs.writeFile(filePath, updatedContent, "utf8");
86
+ recordFrgMutation(filePath, "write", updatedContent);
87
+
88
+ expect(DEFAULT_BACKGROUND_FRG_FLUSH_DELAY_MS).toBeGreaterThan(0);
89
+ expect(shouldForceSynchronousFrgFlush(repoDir)).toBe(false);
90
+
91
+ scheduleBackgroundFrgFlush(repoDir, 5);
92
+ await new Promise((resolve) => setTimeout(resolve, 25));
93
+ });
94
+
95
+ it("forces sync only when pending mutations cross threshold", () => {
96
+ for (let i = 0; i < MAX_PENDING_MUTATIONS_BEFORE_SYNC_FLUSH - 1; i++) {
97
+ recordFrgMutation(path.join(repoDir, `f-${i}.ts`), "write");
98
+ }
99
+ expect(shouldForceSynchronousFrgFlush(repoDir)).toBe(false);
100
+
101
+ recordFrgMutation(
102
+ path.join(repoDir, `f-${MAX_PENDING_MUTATIONS_BEFORE_SYNC_FLUSH}.ts`),
103
+ "write",
104
+ );
105
+ expect(shouldForceSynchronousFrgFlush(repoDir)).toBe(true);
106
+ });
107
+
108
+ it("reports freshness snapshot and can clear/reload state boundaries", async () => {
109
+ const updatedContent = 'export const wal = "persisted";\n';
110
+ await fs.writeFile(filePath, updatedContent, "utf8");
111
+ recordFrgMutation(filePath, "write", updatedContent);
112
+
113
+ const pendingBefore = getPendingFrgMutations(repoDir);
114
+ expect(pendingBefore).toHaveLength(1);
115
+
116
+ clearPendingFrgMutations(repoDir);
117
+ ensureFrgRepoStateLoaded(repoDir);
118
+ const pendingAfter = getPendingFrgMutations(repoDir);
119
+ expect(pendingAfter).toHaveLength(0);
120
+ });
121
+
122
+ it("reads git dirty status and freshness metadata", async () => {
123
+ const dirtyPath = path.join(repoDir, "dirty.ts");
124
+ await fs.writeFile(dirtyPath, "export const dirty = true;\n", "utf8");
125
+
126
+ const dirty = getGitDirtyStatus(repoDir);
127
+ expect(Array.isArray(dirty.untracked)).toBe(true);
128
+
129
+ const snapshot = getFrgFreshnessSnapshot(repoDir, {
130
+ commit_hash: "abc",
131
+ tree_hash: "def",
132
+ age_seconds: 10,
133
+ });
134
+ expect(snapshot.freshnessState).toBeDefined();
135
+ });
136
+ });
@@ -0,0 +1,70 @@
1
+ import { afterEach, beforeEach, describe, expect, it } from "vitest";
2
+ import * as fs from "node:fs/promises";
3
+ import * as path from "node:path";
4
+ import * as os from "node:os";
5
+ import type { ToolContext } from "../../src/types.js";
6
+ import { clearPendingFrgMutations, recordFrgMutation } from "../../src/executors/frg-freshness.js";
7
+ import { executeFrg } from "../../src/executors/frg.js";
8
+
9
+ describe("frg search merge precedence", () => {
10
+ let repoDir: string;
11
+ let filePath: string;
12
+ let ctx: ToolContext;
13
+
14
+ beforeEach(async () => {
15
+ repoDir = await fs.mkdtemp(path.join(os.tmpdir(), "aria-frg-merge-"));
16
+ await fs.mkdir(path.join(repoDir, ".git"));
17
+ filePath = path.join(repoDir, "a.ts");
18
+ await fs.writeFile(filePath, 'export const version = "old";\n', "utf8");
19
+ clearPendingFrgMutations(repoDir);
20
+ ctx = {
21
+ workingDir: repoDir,
22
+ env: {},
23
+ confirm: async () => true,
24
+ };
25
+ await executeFrg({ command: "init", path: repoDir }, ctx);
26
+ await executeFrg({ command: "index", path: repoDir }, ctx);
27
+ });
28
+
29
+ afterEach(() => {
30
+ clearPendingFrgMutations(repoDir);
31
+ });
32
+
33
+ it("shadows indexed matches for mutated files with session delta matches", async () => {
34
+ const updatedContent = 'export const version = "new";\n';
35
+ await fs.writeFile(filePath, updatedContent, "utf8");
36
+ recordFrgMutation(filePath, "write", updatedContent);
37
+
38
+ const result = await executeFrg({ command: "search", path: repoDir, pattern: "version" }, ctx);
39
+ expect(result.success).toBe(true);
40
+ const data = result.data as { matches?: Array<{ file: string; content: string }> };
41
+ const matches = data.matches ?? [];
42
+ expect(matches.length).toBeGreaterThan(0);
43
+ expect(matches.every((match) => match.file === "a.ts")).toBe(true);
44
+ expect(matches.some((match) => match.content.includes("new"))).toBe(true);
45
+ expect(matches.some((match) => match.content.includes("old"))).toBe(false);
46
+ });
47
+
48
+ it("shadows indexed files/counts for mutated files", async () => {
49
+ const updatedContent = 'export const changed = "yes";\nexport const changedAgain = "yes";\n';
50
+ await fs.writeFile(filePath, updatedContent, "utf8");
51
+ recordFrgMutation(filePath, "write", updatedContent);
52
+
53
+ const filesOnly = await executeFrg(
54
+ { command: "search", path: repoDir, pattern: "changed", filesOnly: true },
55
+ ctx,
56
+ );
57
+ expect(filesOnly.success).toBe(true);
58
+ const files = (filesOnly.data as { files?: string[] }).files ?? [];
59
+ expect(files).toEqual(["a.ts"]);
60
+
61
+ const counts = await executeFrg(
62
+ { command: "search", path: repoDir, pattern: "changed", count: true },
63
+ ctx,
64
+ );
65
+ expect(counts.success).toBe(true);
66
+ const countRows =
67
+ (counts.data as { counts?: Array<{ file: string; count: number }> }).counts ?? [];
68
+ expect(countRows).toEqual([{ file: "a.ts", count: 2 }]);
69
+ });
70
+ });