@polka-codes/cli-shared 0.10.22 → 0.10.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/index.js +1913 -7
  2. package/package.json +3 -3
  3. package/dist/config.js +0 -202
  4. package/dist/config.js.map +0 -1
  5. package/dist/config.parameters.test.js +0 -240
  6. package/dist/config.parameters.test.js.map +0 -1
  7. package/dist/config.rules.test.js +0 -92
  8. package/dist/config.rules.test.js.map +0 -1
  9. package/dist/config.test.js +0 -311
  10. package/dist/config.test.js.map +0 -1
  11. package/dist/index.js.map +0 -1
  12. package/dist/memory-manager.js +0 -76
  13. package/dist/memory-manager.js.map +0 -1
  14. package/dist/project-scope.js +0 -67
  15. package/dist/project-scope.js.map +0 -1
  16. package/dist/provider.js +0 -366
  17. package/dist/provider.js.map +0 -1
  18. package/dist/provider.test.js +0 -21
  19. package/dist/provider.test.js.map +0 -1
  20. package/dist/sqlite-memory-store.js +0 -911
  21. package/dist/sqlite-memory-store.js.map +0 -1
  22. package/dist/sqlite-memory-store.test.js +0 -661
  23. package/dist/sqlite-memory-store.test.js.map +0 -1
  24. package/dist/utils/__tests__/parameterSimplifier.test.js +0 -137
  25. package/dist/utils/__tests__/parameterSimplifier.test.js.map +0 -1
  26. package/dist/utils/checkRipgrep.js +0 -22
  27. package/dist/utils/checkRipgrep.js.map +0 -1
  28. package/dist/utils/eventHandler.js +0 -199
  29. package/dist/utils/eventHandler.js.map +0 -1
  30. package/dist/utils/eventHandler.test.js +0 -50
  31. package/dist/utils/eventHandler.test.js.map +0 -1
  32. package/dist/utils/index.js +0 -7
  33. package/dist/utils/index.js.map +0 -1
  34. package/dist/utils/listFiles.js +0 -136
  35. package/dist/utils/listFiles.js.map +0 -1
  36. package/dist/utils/listFiles.test.js +0 -64
  37. package/dist/utils/listFiles.test.js.map +0 -1
  38. package/dist/utils/parameterSimplifier.js +0 -65
  39. package/dist/utils/parameterSimplifier.js.map +0 -1
  40. package/dist/utils/readMultiline.js +0 -19
  41. package/dist/utils/readMultiline.js.map +0 -1
  42. package/dist/utils/search.constants.js +0 -8
  43. package/dist/utils/search.constants.js.map +0 -1
  44. package/dist/utils/searchFiles.js +0 -72
  45. package/dist/utils/searchFiles.js.map +0 -1
  46. package/dist/utils/searchFiles.test.js +0 -140
  47. package/dist/utils/searchFiles.test.js.map +0 -1
package/dist/index.js CHANGED
@@ -1,7 +1,1913 @@
1
- export * from './config.js';
2
- export * from './memory-manager.js';
3
- export * from './project-scope.js';
4
- export * from './provider.js';
5
- export * from './sqlite-memory-store.js';
6
- export * from './utils/index.js';
7
- //# sourceMappingURL=index.js.map
1
+ // src/config.ts
2
+ import { existsSync, readFileSync } from "fs";
3
+ import { readFile } from "fs/promises";
4
+ import { homedir } from "os";
5
+ import { join } from "path";
6
+ import { configSchema } from "@polka-codes/core";
7
+ import { merge } from "lodash-es";
8
+ import { parse } from "yaml";
9
+ import { ZodError } from "zod";
10
+ function getGlobalConfigPath(home = homedir()) {
11
+ return join(home, ".config", "polkacodes", "config.yml");
12
+ }
13
+ function loadConfigAtPath(path) {
14
+ try {
15
+ return readConfig(path);
16
+ } catch (error) {
17
+ if (error instanceof ZodError) {
18
+ console.warn(`Config validation failed for ${path}: ${error.message}`);
19
+ }
20
+ return void 0;
21
+ }
22
+ }
23
+ var localConfigFileName = ".polkacodes.yml";
24
+ var mergeArray = (a, b) => {
25
+ if (!a && !b) {
26
+ return void 0;
27
+ }
28
+ if (!a) {
29
+ return b;
30
+ }
31
+ if (!b) {
32
+ return a;
33
+ }
34
+ return [...a, ...b];
35
+ };
36
+ function mergeConfigs(configs) {
37
+ if (configs.length === 0) {
38
+ return {};
39
+ }
40
+ const mergedConfig = configs.reduce((acc, config) => {
41
+ const merged = merge({}, acc, config);
42
+ const accRules = acc.rules ? Array.isArray(acc.rules) ? acc.rules : [acc.rules] : void 0;
43
+ const configRules = config.rules ? Array.isArray(config.rules) ? config.rules : [config.rules] : void 0;
44
+ merged.rules = mergeArray(accRules, configRules);
45
+ merged.excludeFiles = mergeArray(acc.excludeFiles, config.excludeFiles);
46
+ return merged;
47
+ }, {});
48
+ return mergedConfig;
49
+ }
50
+ async function resolveRules(rules) {
51
+ if (!rules) {
52
+ return void 0;
53
+ }
54
+ if (typeof rules === "string") {
55
+ return rules;
56
+ }
57
+ const resolvedRules = await Promise.all(
58
+ rules.map(async (rule) => {
59
+ if (typeof rule === "string") {
60
+ return rule;
61
+ }
62
+ if ("url" in rule) {
63
+ try {
64
+ const controller = new AbortController();
65
+ const timeoutId = setTimeout(() => controller.abort(), 3e4);
66
+ const response = await fetch(rule.url, { signal: controller.signal });
67
+ clearTimeout(timeoutId);
68
+ if (response.ok) {
69
+ return await response.text();
70
+ }
71
+ console.warn(`Failed to fetch rule from ${rule.url}: ${response.statusText}`);
72
+ } catch (error) {
73
+ if (error instanceof Error && error.name === "AbortError") {
74
+ console.warn(`Timeout fetching rule from ${rule.url} (30s)`);
75
+ } else {
76
+ console.warn(`Error fetching rule from ${rule.url}: ${error}`);
77
+ }
78
+ }
79
+ } else if ("repo" in rule) {
80
+ const ref = rule.commit ?? rule.tag ?? rule.branch ?? "main";
81
+ const url = `https://raw.githubusercontent.com/${rule.repo}/${ref}/${rule.path}`;
82
+ try {
83
+ const controller = new AbortController();
84
+ const timeoutId = setTimeout(() => controller.abort(), 3e4);
85
+ const response = await fetch(url, { signal: controller.signal });
86
+ clearTimeout(timeoutId);
87
+ if (response.ok) {
88
+ return await response.text();
89
+ }
90
+ console.warn(`Failed to fetch rule from ${url}: ${response.statusText}`);
91
+ } catch (error) {
92
+ if (error instanceof Error && error.name === "AbortError") {
93
+ console.warn(`Timeout fetching rule from ${url} (30s)`);
94
+ } else {
95
+ console.warn(`Error fetching rule from ${url}: ${error}`);
96
+ }
97
+ }
98
+ } else if ("path" in rule) {
99
+ if (existsSync(rule.path)) {
100
+ return await readFile(rule.path, "utf-8");
101
+ }
102
+ console.warn(`Rule file not found: ${rule.path}`);
103
+ }
104
+ return void 0;
105
+ })
106
+ );
107
+ return resolvedRules.filter((rule) => rule !== void 0).join("\n\n");
108
+ }
109
+ async function loadConfig(paths, cwd = process.cwd(), home = homedir()) {
110
+ const configs = [];
111
+ const globalConfigPath = getGlobalConfigPath(home);
112
+ if (existsSync(globalConfigPath)) {
113
+ try {
114
+ const globalConfig = readConfig(globalConfigPath);
115
+ configs.push(globalConfig);
116
+ } catch (error) {
117
+ console.warn(`Error loading global config file: ${globalConfigPath}
118
+ ${error}`);
119
+ }
120
+ }
121
+ if (paths && paths.length > 0) {
122
+ const configPaths = Array.isArray(paths) ? paths : [paths];
123
+ for (const path of configPaths) {
124
+ try {
125
+ const config = readConfig(path);
126
+ configs.push(config);
127
+ } catch (error) {
128
+ const errorCode = error?.code;
129
+ if (errorCode === "ENOENT") {
130
+ continue;
131
+ }
132
+ console.error(`Error loading config file: ${path}
133
+ ${error}`);
134
+ throw error;
135
+ }
136
+ }
137
+ } else {
138
+ const configPath = join(cwd, localConfigFileName);
139
+ try {
140
+ const projectConfig = readConfig(configPath);
141
+ configs.push(projectConfig);
142
+ } catch (error) {
143
+ if (error instanceof ZodError) {
144
+ console.error(`Error in config file: ${configPath}
145
+ ${error}`);
146
+ throw error;
147
+ }
148
+ }
149
+ }
150
+ const mergedConfig = configs.length > 0 ? mergeConfigs(configs) : void 0;
151
+ if (!mergedConfig) {
152
+ return void 0;
153
+ }
154
+ const resolvedRules = await resolveRules(mergedConfig.rules);
155
+ return {
156
+ ...mergedConfig,
157
+ rules: resolvedRules
158
+ };
159
+ }
160
+ var readConfig = (path) => {
161
+ const file = readFileSync(path, "utf8");
162
+ const config = parse(file);
163
+ const result = configSchema.parse(config);
164
+ if (result == null) {
165
+ return {};
166
+ }
167
+ return result;
168
+ };
169
+ var readLocalConfig = (path) => {
170
+ try {
171
+ return readConfig(path ?? localConfigFileName);
172
+ } catch (error) {
173
+ if (error instanceof ZodError) {
174
+ console.warn(`Local config validation failed for ${path ?? localConfigFileName}: ${error.message}`);
175
+ } else {
176
+ console.debug(
177
+ `Could not load local config from ${path ?? localConfigFileName}: ${error instanceof Error ? error.message : String(error)}`
178
+ );
179
+ }
180
+ return void 0;
181
+ }
182
+ };
183
+
184
+ // src/memory-manager.ts
185
+ var MemoryManager = class {
186
+ store;
187
+ constructor(store) {
188
+ this.store = store;
189
+ }
190
+ /**
191
+ * Read memory by topic name
192
+ */
193
+ async readMemory(topic) {
194
+ return this.store.readMemory(topic);
195
+ }
196
+ /**
197
+ * Update memory with operation
198
+ */
199
+ async updateMemory(operation, topic, content, metadata) {
200
+ return this.store.updateMemory(operation, topic, content, metadata);
201
+ }
202
+ /**
203
+ * Query memory with filters
204
+ */
205
+ async queryMemory(query = {}, options) {
206
+ const finalQuery = {
207
+ ...query
208
+ };
209
+ const operation = options?.operation;
210
+ if ((operation === "select" || !operation) && !finalQuery.limit) {
211
+ finalQuery.limit = 1e3;
212
+ }
213
+ return this.store.queryMemory(finalQuery, options);
214
+ }
215
+ /**
216
+ * Batch update memory
217
+ */
218
+ async batchUpdateMemory(operations) {
219
+ return this.store.batchUpdateMemory(operations);
220
+ }
221
+ /**
222
+ * Get database statistics
223
+ */
224
+ async getStats() {
225
+ return this.store.getStats();
226
+ }
227
+ /**
228
+ * Close the memory store
229
+ */
230
+ async close() {
231
+ await this.store.close();
232
+ }
233
+ /**
234
+ * Execute a transaction
235
+ * Exposes the underlying store's transaction method if available
236
+ */
237
+ async transaction(callback) {
238
+ const storeWithTransaction = this.store;
239
+ if (typeof storeWithTransaction.transaction === "function") {
240
+ return storeWithTransaction.transaction(callback);
241
+ }
242
+ return callback();
243
+ }
244
+ };
245
+
246
+ // src/project-scope.ts
247
+ import { existsSync as existsSync2 } from "fs";
248
+ import { dirname, normalize, resolve, sep } from "path";
249
+ function detectProjectScope(cwd) {
250
+ const projectPath = findProjectRoot(cwd);
251
+ if (!projectPath) {
252
+ return "global";
253
+ }
254
+ const normalizedPath = normalizePath(projectPath);
255
+ return `project:${normalizedPath}`;
256
+ }
257
+ function findProjectRoot(dir) {
258
+ if (!existsSync2(dir)) {
259
+ return null;
260
+ }
261
+ const primaryMarkers = [".git", "package.json"];
262
+ const secondaryMarkers = [
263
+ "Cargo.toml",
264
+ // Rust
265
+ "go.mod",
266
+ // Go
267
+ "pyproject.toml",
268
+ // Python
269
+ "requirements.txt",
270
+ // Python
271
+ "setup.py",
272
+ // Python
273
+ "Gemfile",
274
+ // Ruby
275
+ "pom.xml",
276
+ // Java Maven
277
+ "build.gradle"
278
+ // Java Gradle
279
+ ];
280
+ for (const marker of primaryMarkers) {
281
+ if (existsSync2(resolve(dir, marker))) {
282
+ return dir;
283
+ }
284
+ }
285
+ for (const marker of secondaryMarkers) {
286
+ if (existsSync2(resolve(dir, marker))) {
287
+ return dir;
288
+ }
289
+ }
290
+ const parent = dirname(dir);
291
+ if (parent === dir) {
292
+ return null;
293
+ }
294
+ return findProjectRoot(parent);
295
+ }
296
+ function normalizePath(path) {
297
+ return normalize(path).split(sep).join("/");
298
+ }
299
+
300
+ // src/provider.ts
301
+ import { spawn as spawn2 } from "child_process";
302
+ import { mkdir, readFile as readFile2, rename, unlink, writeFile } from "fs/promises";
303
+ import { dirname as dirname2, normalize as normalize2, resolve as resolve3 } from "path";
304
+ import { vertex } from "@ai-sdk/google-vertex";
305
+ import { input, select } from "@inquirer/prompts";
306
+ import { generateText, stepCountIs } from "ai";
307
+ import ignore2 from "ignore";
308
+ import { lookup } from "mime-types";
309
+
310
+ // src/utils/checkRipgrep.ts
311
+ import { spawnSync } from "child_process";
312
+ var rgAvailability = {
313
+ isAvailable: null
314
+ };
315
+ function checkRipgrep() {
316
+ if (rgAvailability.isAvailable !== null) {
317
+ return rgAvailability.isAvailable;
318
+ }
319
+ const rg = spawnSync("rg", ["--version"]);
320
+ if (rg.error || rg.status !== 0) {
321
+ rgAvailability.isAvailable = false;
322
+ return false;
323
+ }
324
+ rgAvailability.isAvailable = true;
325
+ return true;
326
+ }
327
+
328
+ // src/utils/listFiles.ts
329
+ import { promises as fs } from "fs";
330
+ import { join as join2, relative, resolve as resolve2 } from "path";
331
+ import ignore from "ignore";
332
+ var DEFAULT_IGNORES = [
333
+ "__pycache__",
334
+ ".DS_Store",
335
+ ".env",
336
+ ".git",
337
+ ".idea",
338
+ ".svn",
339
+ ".temp",
340
+ ".vscode",
341
+ "coverage",
342
+ "dist",
343
+ "node_modules",
344
+ "out",
345
+ "Thumbs.db"
346
+ ];
347
+ async function extendPatterns(basePatterns, dirPath) {
348
+ try {
349
+ const gitignorePath = join2(dirPath, ".gitignore");
350
+ const content = await fs.readFile(gitignorePath, "utf8");
351
+ const lines = content.split(/\r?\n/).filter(Boolean);
352
+ return [...basePatterns, ...lines];
353
+ } catch {
354
+ return basePatterns;
355
+ }
356
+ }
357
+ function createIgnore(patterns) {
358
+ return ignore().add(patterns);
359
+ }
360
+ async function listFiles(dirPath, recursive, maxCount, cwd, excludeFiles, includeIgnored) {
361
+ let rootPatterns = [...excludeFiles || []];
362
+ if (!includeIgnored) {
363
+ rootPatterns.push(...DEFAULT_IGNORES);
364
+ try {
365
+ const rootGitignore = await fs.readFile(join2(cwd, ".gitignore"), "utf8");
366
+ const lines = rootGitignore.split(/\r?\n/).filter(Boolean);
367
+ rootPatterns = [...rootPatterns, ...lines];
368
+ } catch {
369
+ }
370
+ }
371
+ const results = [];
372
+ const processedDirs = /* @__PURE__ */ new Set();
373
+ const queue = [
374
+ {
375
+ path: resolve2(dirPath),
376
+ patterns: rootPatterns,
377
+ relPath: relative(cwd, resolve2(dirPath)).replace(/\\/g, "/") || "."
378
+ }
379
+ ];
380
+ while (queue.length > 0) {
381
+ const { path: currentPath, patterns: parentPatterns, relPath: currentRelPath } = queue.shift();
382
+ processedDirs.add(currentRelPath);
383
+ const mergedPatterns = includeIgnored ? parentPatterns : await extendPatterns(parentPatterns, currentPath);
384
+ const folderIg = createIgnore(mergedPatterns);
385
+ const entries = await fs.readdir(currentPath, { withFileTypes: true });
386
+ entries.sort((a, b) => a.name.localeCompare(b.name));
387
+ for (const entry of entries) {
388
+ const fullPath = join2(currentPath, entry.name);
389
+ const relPath = relative(cwd, fullPath).replace(/\\/g, "/");
390
+ if (folderIg.ignores(relPath)) {
391
+ continue;
392
+ }
393
+ if (entry.isDirectory()) {
394
+ if (recursive) {
395
+ queue.push({
396
+ path: fullPath,
397
+ patterns: mergedPatterns,
398
+ relPath
399
+ });
400
+ }
401
+ } else {
402
+ results.push(relPath);
403
+ if (results.length >= maxCount) {
404
+ const remainingEntries = entries.slice(entries.indexOf(entry) + 1);
405
+ const hasRemainingFiles = remainingEntries.some(
406
+ (e) => !e.isDirectory() && !folderIg.ignores(relative(cwd, join2(currentPath, e.name)).replace(/\\/g, "/"))
407
+ );
408
+ if (hasRemainingFiles) {
409
+ const marker = `${currentRelPath}/(files omitted)`;
410
+ results.push(marker);
411
+ }
412
+ for (const queueItem of queue) {
413
+ if (!processedDirs.has(queueItem.relPath)) {
414
+ const marker = `${queueItem.relPath}/(files omitted)`;
415
+ results.push(marker);
416
+ processedDirs.add(queueItem.relPath);
417
+ }
418
+ }
419
+ results.sort();
420
+ return [results, true];
421
+ }
422
+ }
423
+ }
424
+ }
425
+ results.sort();
426
+ return [results, false];
427
+ }
428
+
429
+ // src/utils/searchFiles.ts
430
+ import { spawn } from "child_process";
431
+
432
+ // src/utils/search.constants.ts
433
+ var SEARCH_CONSTANTS = {
434
+ /** Default number of context lines to show around search matches */
435
+ DEFAULT_CONTEXT_LINES: 5
436
+ };
437
+
438
+ // src/utils/searchFiles.ts
439
+ async function searchFiles(path, regex, filePattern, cwd, excludeFiles) {
440
+ const args = [
441
+ "--line-number",
442
+ // Show line numbers
443
+ `--context=${SEARCH_CONSTANTS.DEFAULT_CONTEXT_LINES}`,
444
+ // Show lines before and after matches
445
+ "--color=never",
446
+ // No color codes in output
447
+ "--with-filename",
448
+ // Show filenames
449
+ "--smart-case"
450
+ // Smart case sensitivity
451
+ ];
452
+ if (filePattern && filePattern !== "*") {
453
+ const patterns = filePattern.split(",").map((p) => p.trim()).filter(Boolean);
454
+ for (const pattern of patterns) {
455
+ if (pattern) {
456
+ args.push("--glob", pattern);
457
+ }
458
+ }
459
+ }
460
+ if (excludeFiles) {
461
+ for (const pattern of excludeFiles) {
462
+ args.push("--glob", `!${pattern}`);
463
+ }
464
+ }
465
+ args.push(regex, path);
466
+ return new Promise((resolve5, reject) => {
467
+ const results = [];
468
+ const rg = spawn("rg", args, {
469
+ cwd,
470
+ stdio: ["ignore", "pipe", "pipe"]
471
+ });
472
+ rg.stdout.on("data", (data) => {
473
+ const lines = data.toString().split("\n").filter(Boolean);
474
+ results.push(...lines);
475
+ });
476
+ rg.on("error", (error) => {
477
+ reject(new Error(`Failed to execute ripgrep: ${error.message}`));
478
+ });
479
+ rg.on("close", (code) => {
480
+ if (code !== 0 && code !== 1) {
481
+ reject(new Error(`Ripgrep process exited with code ${code}`));
482
+ return;
483
+ }
484
+ resolve5(results);
485
+ });
486
+ });
487
+ }
488
+
489
+ // src/provider.ts
490
+ var InMemoryStore = class {
491
+ #data;
492
+ async read() {
493
+ return this.#data;
494
+ }
495
+ async write(data) {
496
+ this.#data = data;
497
+ }
498
+ };
499
+ function isIMemoryStore(store) {
500
+ return "readMemory" in store && "updateMemory" in store;
501
+ }
502
+ var getProvider = (options = {}) => {
503
+ const ig = ignore2().add(options.excludeFiles ?? []);
504
+ const memoryStore = options.memoryStore ?? new InMemoryStore();
505
+ const todoItemStore = options.todoItemStore ?? new InMemoryStore();
506
+ const defaultMemoryTopic = ":default:";
507
+ const searchModel = options.getModel?.("search");
508
+ const readMemoryKV = async (topic) => {
509
+ if (!isIMemoryStore(memoryStore)) {
510
+ const data = await memoryStore.read() ?? {};
511
+ return data[topic];
512
+ }
513
+ return memoryStore.readMemory(topic);
514
+ };
515
+ const updateMemoryKV = async (operation, topic, content) => {
516
+ if (!isIMemoryStore(memoryStore)) {
517
+ const data = await memoryStore.read() ?? {};
518
+ switch (operation) {
519
+ case "append":
520
+ if (content === void 0) {
521
+ throw new Error("Content is required for append operation.");
522
+ }
523
+ data[topic] = `${data[topic] || ""}
524
+ ${content}`;
525
+ break;
526
+ case "replace":
527
+ if (content === void 0) {
528
+ throw new Error("Content is required for replace operation.");
529
+ }
530
+ data[topic] = content;
531
+ break;
532
+ case "remove":
533
+ delete data[topic];
534
+ break;
535
+ }
536
+ await memoryStore.write(data);
537
+ return;
538
+ }
539
+ await memoryStore.updateMemory(operation, topic, content);
540
+ };
541
+ const listMemoryTopicsKV = async () => {
542
+ if (!isIMemoryStore(memoryStore)) {
543
+ const data = await memoryStore.read() ?? {};
544
+ return Object.keys(data);
545
+ }
546
+ const entries = await memoryStore.queryMemory({});
547
+ if (Array.isArray(entries)) {
548
+ return entries.map((e) => e.name);
549
+ }
550
+ return [];
551
+ };
552
+ const provider = {
553
+ listTodoItems: async (id, status) => {
554
+ const todoItems = await todoItemStore.read() ?? [];
555
+ let items;
556
+ if (!id) {
557
+ items = todoItems.filter((i) => !i.id.includes("."));
558
+ } else {
559
+ const parent = todoItems.find((i) => i.id === id);
560
+ if (!parent) {
561
+ throw new Error(`To-do item with id ${id} not found`);
562
+ }
563
+ items = todoItems.filter((i) => i.id.startsWith(`${id}.`) && i.id.split(".").length === id.split(".").length + 1);
564
+ }
565
+ if (status) {
566
+ items = items.filter((item) => item.status === status);
567
+ }
568
+ items.sort((a, b) => {
569
+ const aParts = a.id.split(".");
570
+ const bParts = b.id.split(".");
571
+ const len = Math.min(aParts.length, bParts.length);
572
+ for (let i = 0; i < len; i++) {
573
+ const comparison = aParts[i].localeCompare(bParts[i], void 0, { numeric: true });
574
+ if (comparison !== 0) {
575
+ return comparison;
576
+ }
577
+ }
578
+ return aParts.length - bParts.length;
579
+ });
580
+ return items;
581
+ },
582
+ getTodoItem: async (id) => {
583
+ const todoItems = await todoItemStore.read() ?? [];
584
+ const item = todoItems.find((i) => i.id === id);
585
+ if (!item) {
586
+ throw new Error(`To-do item with id ${id} not found`);
587
+ }
588
+ const subItems = todoItems.filter((i) => i.id.startsWith(`${id}.`) && i.id.split(".").length === id.split(".").length + 1).map(({ id: id2, title }) => ({ id: id2, title }));
589
+ return { ...item, subItems };
590
+ },
591
+ updateTodoItem: async (input2) => {
592
+ const todoItems = await todoItemStore.read() ?? [];
593
+ if (input2.operation === "add") {
594
+ const { parentId, title, description, status } = input2;
595
+ if (!title) {
596
+ throw new Error("Title is required for add operation");
597
+ }
598
+ let newId;
599
+ if (parentId) {
600
+ const parent = todoItems.find((i) => i.id === parentId);
601
+ if (!parent) {
602
+ throw new Error(`Parent to-do item with id ${parentId} not found`);
603
+ }
604
+ const childItems = todoItems.filter(
605
+ (i) => i.id.startsWith(`${parentId}.`) && i.id.split(".").length === parentId.split(".").length + 1
606
+ );
607
+ const maxId = childItems.reduce((max, item) => {
608
+ const parts = item.id.split(".");
609
+ const lastPart = parseInt(parts[parts.length - 1], 10);
610
+ return Math.max(max, lastPart);
611
+ }, 0);
612
+ newId = `${parentId}.${maxId + 1}`;
613
+ } else {
614
+ const rootItems = todoItems.filter((i) => !i.id.includes("."));
615
+ const maxId = rootItems.reduce((max, item) => {
616
+ const idNum = parseInt(item.id, 10);
617
+ return Math.max(max, idNum);
618
+ }, 0);
619
+ newId = `${maxId + 1}`;
620
+ }
621
+ const newItem = {
622
+ id: newId,
623
+ title,
624
+ description: description ?? "",
625
+ status: status ?? "open"
626
+ };
627
+ await todoItemStore.write([...todoItems, newItem]);
628
+ return { id: newId };
629
+ } else {
630
+ const { id } = input2;
631
+ if (!id) {
632
+ throw new Error("ID is required for update operation");
633
+ }
634
+ const item = todoItems.find((i) => i.id === id);
635
+ if (!item) {
636
+ throw new Error(`To-do item with id ${id} not found`);
637
+ }
638
+ if (input2.title != null) {
639
+ item.title = input2.title;
640
+ }
641
+ if (input2.description != null) {
642
+ item.description = input2.description ?? "";
643
+ }
644
+ if (input2.status != null) {
645
+ item.status = input2.status;
646
+ }
647
+ await todoItemStore.write(todoItems);
648
+ return { id };
649
+ }
650
+ },
651
+ listMemoryTopics: async () => {
652
+ return listMemoryTopicsKV();
653
+ },
654
+ readMemory: async (topic = defaultMemoryTopic) => {
655
+ return readMemoryKV(topic);
656
+ },
657
+ updateMemory: async (operation, topic, content) => {
658
+ const memoryTopic = topic ?? defaultMemoryTopic;
659
+ await updateMemoryKV(operation, memoryTopic, content);
660
+ },
661
+ readFile: async (path, includeIgnored) => {
662
+ if (!includeIgnored && ig.ignores(path)) {
663
+ throw new Error(`Not allow to access file ${path}`);
664
+ }
665
+ try {
666
+ return await readFile2(path, "utf8");
667
+ } catch (_e) {
668
+ return void 0;
669
+ }
670
+ },
671
+ writeFile: async (path, content) => {
672
+ if (ig.ignores(path)) {
673
+ throw new Error(`Not allow to access file ${path}`);
674
+ }
675
+ await mkdir(dirname2(path), { recursive: true });
676
+ return await writeFile(path, content, "utf8");
677
+ },
678
+ removeFile: async (path) => {
679
+ if (ig.ignores(path)) {
680
+ throw new Error(`Not allow to access file ${path}`);
681
+ }
682
+ return await unlink(path);
683
+ },
684
+ renameFile: async (sourcePath, targetPath) => {
685
+ if (ig.ignores(sourcePath) || ig.ignores(targetPath)) {
686
+ throw new Error(`Not allow to access file ${sourcePath} or ${targetPath}`);
687
+ }
688
+ return await rename(sourcePath, targetPath);
689
+ },
690
+ listFiles: async (path, recursive, maxCount, includeIgnored) => {
691
+ return await listFiles(path, recursive, maxCount, process.cwd(), options.excludeFiles, includeIgnored);
692
+ },
693
+ readBinaryFile: async (url) => {
694
+ if (url.startsWith("file://")) {
695
+ const filePath = decodeURIComponent(url.substring("file://".length));
696
+ const resolvedPath = normalize2(resolve3(process.cwd(), filePath));
697
+ if (!resolvedPath.startsWith(process.cwd())) {
698
+ throw new Error(`Access to file path "${filePath}" is restricted.`);
699
+ }
700
+ const data2 = await readFile2(resolvedPath);
701
+ const mediaType2 = lookup(resolvedPath) || "application/octet-stream";
702
+ return {
703
+ base64Data: data2.toString("base64"),
704
+ mediaType: mediaType2
705
+ };
706
+ }
707
+ const response = await fetch(url);
708
+ if (!response.ok) {
709
+ throw new Error(`HTTP error! status: ${response.status}`);
710
+ }
711
+ const data = await response.arrayBuffer();
712
+ const mediaType = lookup(url) || "application/octet-stream";
713
+ return {
714
+ base64Data: Buffer.from(data).toString("base64"),
715
+ mediaType
716
+ };
717
+ },
718
+ executeCommand: (command, _needApprove) => {
719
+ return new Promise((resolve5, reject) => {
720
+ options.command?.onStarted(command);
721
+ const child = spawn2(command, [], {
722
+ shell: true,
723
+ stdio: "pipe"
724
+ });
725
+ let stdoutText = "";
726
+ let stderrText = "";
727
+ child.stdout.on("data", (data) => {
728
+ const dataStr = data.toString();
729
+ options.command?.onStdout(dataStr);
730
+ stdoutText += dataStr;
731
+ });
732
+ child.stderr.on("data", (data) => {
733
+ const dataStr = data.toString();
734
+ options.command?.onStderr(dataStr);
735
+ stderrText += dataStr;
736
+ });
737
+ child.on("close", async (code) => {
738
+ options.command?.onExit(code ?? 0);
739
+ const totalLength = stdoutText.length + stderrText.length;
740
+ if (totalLength > (options.summaryThreshold ?? 5e3) && options.summarizeOutput) {
741
+ try {
742
+ const summary = await options.summarizeOutput(stdoutText, stderrText);
743
+ if (summary) {
744
+ resolve5({
745
+ summary,
746
+ stdout: stdoutText,
747
+ stderr: stderrText,
748
+ exitCode: code ?? 0
749
+ });
750
+ return;
751
+ }
752
+ } catch (_e) {
753
+ console.error("Summarization failed:", _e);
754
+ }
755
+ }
756
+ resolve5({
757
+ stdout: stdoutText,
758
+ stderr: stderrText,
759
+ exitCode: code ?? 0
760
+ });
761
+ });
762
+ child.on("error", (err) => {
763
+ options.command?.onError(err);
764
+ reject(err);
765
+ });
766
+ });
767
+ },
768
+ askFollowupQuestion: async (question, answerOptions) => {
769
+ if (options.yes) {
770
+ if (answerOptions.length > 0) {
771
+ return answerOptions[0];
772
+ }
773
+ return "";
774
+ }
775
+ if (answerOptions.length === 0) {
776
+ return await input({ message: question });
777
+ }
778
+ const otherMessage = "Other (enter text)";
779
+ answerOptions.push(otherMessage);
780
+ const answer = await select({
781
+ message: question,
782
+ choices: answerOptions.map((option) => ({ name: option, value: option }))
783
+ });
784
+ if (answer === otherMessage) {
785
+ return await input({ message: "Enter your answer:" });
786
+ }
787
+ return answer;
788
+ },
789
+ fetchUrl: async (url) => {
790
+ const isRaw = url.startsWith("https://raw.githubusercontent.com/");
791
+ const urlToFetch = isRaw ? url : `https://r.jina.ai/${url}`;
792
+ try {
793
+ const response = await fetch(urlToFetch);
794
+ if (!response.ok) {
795
+ throw new Error(`HTTP error! status: ${response.status}`);
796
+ }
797
+ return await response.text();
798
+ } catch (error) {
799
+ console.error("Error fetching URL:", error);
800
+ throw error;
801
+ }
802
+ },
803
+ search: searchModel && (async (query) => {
804
+ const googleSearchTool = vertex.tools.googleSearch({});
805
+ const resp = await generateText({
806
+ model: searchModel,
807
+ system: "You are a web search assistant. When searching for information, provide comprehensive and detailed results. Include relevant facts, statistics, dates, and key details from the search results. Synthesize information from multiple sources when available. Structure your response clearly with the most relevant information first. Reference or cite sources when presenting specific claims or data.",
808
+ tools: {
809
+ google_search: googleSearchTool
810
+ },
811
+ prompt: query,
812
+ stopWhen: stepCountIs(5)
813
+ });
814
+ return resp.text;
815
+ })
816
+ };
817
+ if (checkRipgrep()) {
818
+ provider.searchFiles = async (path, regex, filePattern) => {
819
+ return await searchFiles(path, regex, filePattern, process.cwd(), options.excludeFiles);
820
+ };
821
+ } else {
822
+ console.error(
823
+ "Error: ripgrep (rg) is not installed. Search file tool is disabled. Please install it from https://github.com/BurntSushi/ripgrep#installation"
824
+ );
825
+ }
826
+ return provider;
827
+ };
828
+
829
+ // src/sqlite-memory-store.ts
830
+ import { AsyncLocalStorage } from "async_hooks";
831
+ import { randomUUID } from "crypto";
832
+ import { existsSync as existsSync3 } from "fs";
833
+ import { mkdir as mkdir2, readdir, readFile as readFile3, rename as rename2, unlink as unlink2, writeFile as writeFile2 } from "fs/promises";
834
+ import { basename, dirname as dirname3, resolve as resolve4 } from "path";
835
+ import { fileURLToPath } from "url";
836
+ import { DEFAULT_MEMORY_CONFIG, resolveHomePath } from "@polka-codes/core";
837
+ import initSqlJs from "sql.js";
838
+ var FileLock = class _FileLock {
839
+ lockfilePath;
840
+ static LOCK_TIMEOUT = 3e4;
841
+ // 30 seconds max lock time
842
+ static CLEANUP_AGE = 6e5;
843
+ // 10 minutes - cleanup old lock files
844
+ static lastCleanupTime = 0;
845
+ static CLEANUP_THROTTLE = 6e4;
846
+ // Throttle cleanup to once per minute
847
+ constructor(dbPath) {
848
+ this.lockfilePath = `${dbPath}.lock`;
849
+ }
850
+ /**
851
+ * Reset the cleanup throttle. For testing purposes only.
852
+ */
853
+ static resetCleanupThrottle() {
854
+ _FileLock.lastCleanupTime = 0;
855
+ }
856
+ /**
857
+ * Clean up old lock files (.released.*, .stale.*, .invalid.*, .corrupt.*)
858
+ * Only removes files older than CLEANUP_AGE (10 minutes by default)
859
+ * This method is safe to call multiple times concurrently
860
+ * Cleanup is throttled to run at most once per minute to avoid performance impact
861
+ * @param force - Skip throttling and force cleanup (for testing)
862
+ */
863
+ static async cleanupOldLockFiles(dbPath, maxAge = _FileLock.CLEANUP_AGE, force = false) {
864
+ const now = Date.now();
865
+ if (!force && now - _FileLock.lastCleanupTime < _FileLock.CLEANUP_THROTTLE) {
866
+ return;
867
+ }
868
+ _FileLock.lastCleanupTime = now;
869
+ try {
870
+ const lockDir = dirname3(dbPath);
871
+ const dbBaseName = basename(dbPath);
872
+ const files = await readdir(lockDir);
873
+ const now2 = Date.now();
874
+ for (const file of files) {
875
+ if (!file.startsWith(`${dbBaseName}.lock.`)) {
876
+ continue;
877
+ }
878
+ const match = file.match(/\.lock\.(released|stale|invalid|corrupt)\.(\d+)$/);
879
+ if (!match) {
880
+ continue;
881
+ }
882
+ const filePath = resolve4(lockDir, file);
883
+ const timestamp = Number.parseInt(match[2], 10);
884
+ const age = now2 - timestamp;
885
+ if (age > maxAge) {
886
+ try {
887
+ await unlink2(filePath);
888
+ } catch (error) {
889
+ const errorCode = error?.code;
890
+ if (errorCode !== "ENOENT") {
891
+ console.warn(`[FileLock] Failed to delete old lock file ${file}: ${error instanceof Error ? error.message : String(error)}`);
892
+ }
893
+ }
894
+ }
895
+ }
896
+ } catch (error) {
897
+ console.debug(`[FileLock] Cleanup encountered an error: ${error instanceof Error ? error.message : String(error)}`);
898
+ }
899
+ }
900
+ /**
901
+ * Try to acquire lock with retries
902
+ * @throws Error if lock cannot be acquired after retries
903
+ */
904
+ async acquire(retries = 10, delay = 100) {
905
+ for (let i = 0; i < retries; i++) {
906
+ try {
907
+ const lockData = JSON.stringify({
908
+ pid: process.pid,
909
+ acquiredAt: Date.now()
910
+ });
911
+ await writeFile2(this.lockfilePath, lockData, {
912
+ flag: "wx",
913
+ // Exclusive create - fails if file exists
914
+ mode: 384
915
+ });
916
+ return;
917
+ } catch (error) {
918
+ const errorCode = error?.code;
919
+ if (errorCode === "EEXIST") {
920
+ try {
921
+ const lockContent = await readFile3(this.lockfilePath, "utf-8");
922
+ const lockData = JSON.parse(lockContent);
923
+ if (!lockData || typeof lockData.acquiredAt !== "number" || lockData.acquiredAt <= 0) {
924
+ console.warn(`[FileLock] Lock file has invalid acquiredAt, treating as stale`);
925
+ await rename2(this.lockfilePath, `${this.lockfilePath}.invalid.${Date.now()}`);
926
+ continue;
927
+ }
928
+ const lockAge = Date.now() - lockData.acquiredAt;
929
+ if (lockAge > _FileLock.LOCK_TIMEOUT) {
930
+ console.warn(`[FileLock] Breaking stale lock (age: ${lockAge}ms)`);
931
+ await rename2(this.lockfilePath, `${this.lockfilePath}.stale.${Date.now()}`);
932
+ continue;
933
+ }
934
+ } catch (readError) {
935
+ if (readError instanceof SyntaxError) {
936
+ console.warn(`[FileLock] Lock file contains invalid JSON, treating as stale`);
937
+ await rename2(this.lockfilePath, `${this.lockfilePath}.corrupt.${Date.now()}`);
938
+ continue;
939
+ }
940
+ }
941
+ if (i < retries - 1) {
942
+ await new Promise((resolve5) => setTimeout(resolve5, delay));
943
+ } else {
944
+ throw new Error(`Cannot acquire lock after ${retries} retries (file: ${this.lockfilePath})`);
945
+ }
946
+ } else {
947
+ throw error;
948
+ }
949
+ }
950
+ }
951
+ }
952
+ /**
953
+ * Release the lock by removing the lockfile
954
+ */
955
+ async release() {
956
+ try {
957
+ await rename2(this.lockfilePath, `${this.lockfilePath}.released.${Date.now()}`);
958
+ const dbPath = this.lockfilePath.slice(0, -5);
959
+ _FileLock.cleanupOldLockFiles(dbPath).catch(() => {
960
+ });
961
+ } catch (error) {
962
+ const errorCode = error.code;
963
+ if (errorCode !== "ENOENT") {
964
+ console.warn(`[FileLock] Error releasing lock: ${error instanceof Error ? error.message : String(error)}`);
965
+ }
966
+ }
967
+ }
968
+ };
969
+ var ReentrantMutex = class {
970
+ queue = [];
971
+ locked = false;
972
+ lockCount = 0;
973
+ owner = null;
974
+ async acquire(owner) {
975
+ if (this.locked && this.owner === owner) {
976
+ this.lockCount++;
977
+ return () => this.release(owner);
978
+ }
979
+ while (this.locked) {
980
+ await new Promise((resolve5) => this.queue.push(resolve5));
981
+ }
982
+ this.locked = true;
983
+ this.owner = owner;
984
+ this.lockCount = 1;
985
+ return () => this.release(owner);
986
+ }
987
+ release(owner) {
988
+ if (this.owner !== owner) {
989
+ return;
990
+ }
991
+ this.lockCount--;
992
+ if (this.lockCount === 0) {
993
+ this.locked = false;
994
+ this.owner = null;
995
+ const next = this.queue.shift();
996
+ if (next) {
997
+ next();
998
+ }
999
+ }
1000
+ }
1001
+ };
1002
+ var SqlJs = null;
1003
+ var SqlJsInitPromise = null;
1004
+ var moduleDir = dirname3(fileURLToPath(import.meta.url));
1005
+ var bundledWasmPath = resolve4(moduleDir, "sql-wasm.wasm");
1006
+ var hasBundledWasm = existsSync3(bundledWasmPath);
1007
+ async function getSqlJs() {
1008
+ if (SqlJs) {
1009
+ return SqlJs;
1010
+ }
1011
+ if (SqlJsInitPromise) {
1012
+ return SqlJsInitPromise;
1013
+ }
1014
+ try {
1015
+ SqlJsInitPromise = initSqlJs({
1016
+ locateFile: (filename, prefix) => {
1017
+ if (hasBundledWasm) {
1018
+ return bundledWasmPath;
1019
+ }
1020
+ return prefix + filename;
1021
+ }
1022
+ });
1023
+ SqlJs = await SqlJsInitPromise;
1024
+ return SqlJs;
1025
+ } catch (error) {
1026
+ throw new Error(
1027
+ `Failed to initialize sql.js: ${error instanceof Error ? error.message : String(error)}
1028
+
1029
+ Attempted to locate WASM at:
1030
+ - ${bundledWasmPath}
1031
+
1032
+ If you're developing, run: bun run build in packages/cli-shared`
1033
+ );
1034
+ }
1035
+ }
1036
+ var transactionOwnerStorage = new AsyncLocalStorage();
1037
+ var SQLiteMemoryStore = class _SQLiteMemoryStore {
1038
+ db = null;
1039
+ dbPromise = null;
1040
+ config;
1041
+ currentScope;
1042
+ inTransaction = false;
1043
+ // Track if we're in a transaction
1044
+ transactionMutex = new ReentrantMutex();
1045
+ // Serialize transactions
1046
+ fileLock;
1047
+ // Cross-process file lock (initialized in getFileLock())
1048
+ /**
1049
+ * Reset the lock file cleanup throttle. For testing purposes only.
1050
+ */
1051
+ static resetCleanupThrottle() {
1052
+ FileLock.resetCleanupThrottle();
1053
+ }
1054
+ /**
1055
+ * Get the configured database path, or default if not set
1056
+ */
1057
+ getDbPath() {
1058
+ return this.config.path || DEFAULT_MEMORY_CONFIG.path;
1059
+ }
1060
+ /**
1061
+ * Get lockfile instance for database path
1062
+ */
1063
+ getFileLock() {
1064
+ if (!this.fileLock) {
1065
+ const dbPath = this.resolvePath(this.getDbPath());
1066
+ this.fileLock = new FileLock(dbPath);
1067
+ }
1068
+ return this.fileLock;
1069
+ }
1070
+ // Whitelists for validation
1071
+ static SORT_COLUMNS = {
1072
+ created: "created_at",
1073
+ updated: "updated_at",
1074
+ accessed: "last_accessed",
1075
+ name: "name"
1076
+ };
1077
+ static ALLOWED_SORT_ORDERS = ["asc", "desc"];
1078
+ static ALLOWED_PRIORITIES = ["low", "medium", "high", "critical"];
1079
+ constructor(config, scope) {
1080
+ this.config = config;
1081
+ this.currentScope = scope;
1082
+ }
1083
+ /**
1084
+ * Initialize database connection and schema
1085
+ */
1086
+ async initializeDatabase() {
1087
+ if (this.dbPromise) {
1088
+ return this.dbPromise;
1089
+ }
1090
+ this.dbPromise = (async () => {
1091
+ if (this.db) {
1092
+ return this.db;
1093
+ }
1094
+ const dbPath = this.resolvePath(this.getDbPath());
1095
+ try {
1096
+ const dir = dirname3(dbPath);
1097
+ if (!existsSync3(dir)) {
1098
+ await mkdir2(dir, { recursive: true, mode: 448 });
1099
+ }
1100
+ FileLock.cleanupOldLockFiles(dbPath).catch(() => {
1101
+ });
1102
+ let dbData;
1103
+ if (existsSync3(dbPath)) {
1104
+ const lock = this.getFileLock();
1105
+ await lock.acquire();
1106
+ try {
1107
+ try {
1108
+ dbData = await readFile3(dbPath);
1109
+ if (dbData.length >= 16) {
1110
+ const header = String.fromCharCode(...dbData.subarray(0, 15));
1111
+ if (header !== "SQLite format 3") {
1112
+ console.warn("[SQLiteMemoryStore] Invalid SQLite database header, will recreate");
1113
+ dbData = void 0;
1114
+ }
1115
+ }
1116
+ } catch (error) {
1117
+ const errorCode = error?.code;
1118
+ if (errorCode === "ENOENT") {
1119
+ dbData = void 0;
1120
+ } else {
1121
+ throw new Error(`Failed to read database file at ${dbPath}: ${error instanceof Error ? error.message : String(error)}`);
1122
+ }
1123
+ }
1124
+ } finally {
1125
+ await lock.release();
1126
+ }
1127
+ }
1128
+ const SqlJs2 = await getSqlJs();
1129
+ const db = new SqlJs2.Database(dbData);
1130
+ this.configurePragmas(db);
1131
+ this.checkIntegrity(db);
1132
+ this.initializeSchema(db);
1133
+ this.db = db;
1134
+ return db;
1135
+ } catch (error) {
1136
+ console.error("[SQLiteMemoryStore] Initialization failed:", error);
1137
+ if (existsSync3(dbPath)) {
1138
+ const backupPath = `${dbPath}.corrupted.${Date.now()}`;
1139
+ console.warn(`[SQLiteMemoryStore] Backing up corrupted database to: ${backupPath}`);
1140
+ try {
1141
+ await rename2(dbPath, backupPath);
1142
+ } catch (backupError) {
1143
+ console.error("[SQLiteMemoryStore] Failed to backup corrupted database:", backupError);
1144
+ this.dbPromise = null;
1145
+ throw backupError;
1146
+ }
1147
+ this.dbPromise = null;
1148
+ return this.initializeDatabase();
1149
+ }
1150
+ this.dbPromise = null;
1151
+ throw error;
1152
+ }
1153
+ })();
1154
+ return this.dbPromise;
1155
+ }
1156
+ /**
1157
+ * Persist database to disk using atomic write with file locking
1158
+ */
1159
+ async saveDatabase() {
1160
+ if (!this.db) {
1161
+ return;
1162
+ }
1163
+ const lock = this.getFileLock();
1164
+ await lock.acquire();
1165
+ try {
1166
+ const dbPath = this.resolvePath(this.getDbPath());
1167
+ const tempPath = `${dbPath}.tmp`;
1168
+ const data = this.db.export();
1169
+ await writeFile2(tempPath, data, { mode: 384 });
1170
+ await rename2(tempPath, dbPath);
1171
+ } finally {
1172
+ await lock.release();
1173
+ }
1174
+ }
1175
+ /**
1176
+ * Configure database pragmas
1177
+ */
1178
+ configurePragmas(db) {
1179
+ db.run("PRAGMA synchronous = NORMAL");
1180
+ db.run("PRAGMA busy_timeout = 5000");
1181
+ db.run("PRAGMA foreign_keys = ON");
1182
+ db.run("PRAGMA temp_store = MEMORY");
1183
+ }
1184
+ /**
1185
+ * Check database integrity
1186
+ */
1187
+ checkIntegrity(db) {
1188
+ try {
1189
+ const results = db.exec("SELECT 1");
1190
+ if (results.length === 0) {
1191
+ throw new Error("Database query returned no results");
1192
+ }
1193
+ } catch (error) {
1194
+ console.error("[SQLiteMemoryStore] Integrity check failed:", error);
1195
+ throw new Error("Database is corrupted");
1196
+ }
1197
+ }
1198
+ /**
1199
+ * Initialize database schema
1200
+ */
1201
+ initializeSchema(db) {
1202
+ db.run(`
1203
+ CREATE TABLE IF NOT EXISTS memory_entries (
1204
+ id TEXT PRIMARY KEY,
1205
+ name TEXT NOT NULL CHECK(length(name) > 0),
1206
+ scope TEXT NOT NULL CHECK(scope IN ('global') OR scope LIKE 'project:%'),
1207
+ content TEXT NOT NULL CHECK(length(content) > 0),
1208
+ entry_type TEXT NOT NULL CHECK(length(entry_type) > 0),
1209
+ status TEXT CHECK(status IS NULL OR length(status) > 0),
1210
+ priority TEXT CHECK(priority IS NULL OR priority IN ('low', 'medium', 'high', 'critical')),
1211
+ tags TEXT CHECK(tags IS NULL OR length(tags) > 0),
1212
+ metadata TEXT CHECK(metadata IS NULL OR json_valid(metadata)),
1213
+ created_at INTEGER NOT NULL CHECK(created_at > 0),
1214
+ updated_at INTEGER NOT NULL CHECK(updated_at > 0),
1215
+ last_accessed INTEGER NOT NULL CHECK(last_accessed > 0),
1216
+ UNIQUE(name, scope)
1217
+ )
1218
+ `);
1219
+ db.run("CREATE INDEX IF NOT EXISTS idx_memory_entries_scope_type ON memory_entries(scope, entry_type)");
1220
+ db.run("CREATE INDEX IF NOT EXISTS idx_memory_entries_updated ON memory_entries(updated_at)");
1221
+ }
1222
+ /**
1223
+ * Get database instance
1224
+ */
1225
+ async getDatabase() {
1226
+ if (!this.db) {
1227
+ this.db = await this.initializeDatabase();
1228
+ }
1229
+ return this.db;
1230
+ }
1231
+ /**
1232
+ * Resolve home directory in path using shared utility
1233
+ */
1234
+ resolvePath(path) {
1235
+ const resolved = resolveHomePath(path);
1236
+ return resolve4(resolved);
1237
+ }
1238
+ /**
1239
+ * Generate UUID v4
1240
+ */
1241
+ generateUUID() {
1242
+ return randomUUID();
1243
+ }
1244
+ /**
1245
+ * Get current timestamp in milliseconds
1246
+ */
1247
+ now() {
1248
+ return Date.now();
1249
+ }
1250
+ /**
1251
+ * Execute transaction
1252
+ * Uses Mutex to serialize concurrent transaction calls for safety
1253
+ * Supports reentrancy (nested transactions from the same call chain)
1254
+ */
1255
+ async transaction(callback) {
1256
+ let owner = transactionOwnerStorage.getStore();
1257
+ if (!owner) {
1258
+ owner = Symbol("transaction");
1259
+ }
1260
+ const release = await this.transactionMutex.acquire(owner);
1261
+ return transactionOwnerStorage.run(owner, async () => {
1262
+ try {
1263
+ const db = await this.getDatabase();
1264
+ const shouldBegin = !this.inTransaction;
1265
+ try {
1266
+ if (shouldBegin) {
1267
+ db.run("BEGIN TRANSACTION");
1268
+ this.inTransaction = true;
1269
+ }
1270
+ const result = await callback();
1271
+ if (shouldBegin) {
1272
+ db.run("COMMIT");
1273
+ this.inTransaction = false;
1274
+ try {
1275
+ await this.saveDatabase();
1276
+ } catch (saveError) {
1277
+ console.error("[SQLiteMemoryStore] Failed to save database after commit, closing:", saveError);
1278
+ await this.close(true);
1279
+ throw saveError;
1280
+ }
1281
+ }
1282
+ return result;
1283
+ } catch (error) {
1284
+ if (this.inTransaction) {
1285
+ try {
1286
+ db.run("ROLLBACK");
1287
+ } catch (rollbackError) {
1288
+ console.error("[SQLiteMemoryStore] ROLLBACK failed:", rollbackError);
1289
+ }
1290
+ this.inTransaction = false;
1291
+ }
1292
+ throw error;
1293
+ }
1294
+ } finally {
1295
+ release();
1296
+ }
1297
+ });
1298
+ }
1299
+ /**
1300
+ * Read memory by topic
1301
+ * Note: Does NOT update last_accessed timestamp to avoid expensive disk writes on every read.
1302
+ * The timestamp is updated when memory is modified through updateMemory operations.
1303
+ */
1304
+ async readMemory(topic) {
1305
+ const db = await this.getDatabase();
1306
+ const scope = this.currentScope;
1307
+ const stmt = db.prepare("SELECT content FROM memory_entries WHERE name = ? AND scope = ?");
1308
+ stmt.bind([topic, scope]);
1309
+ if (stmt.step()) {
1310
+ const row = stmt.getAsObject();
1311
+ const content = row.content;
1312
+ stmt.free();
1313
+ return content;
1314
+ }
1315
+ stmt.free();
1316
+ return void 0;
1317
+ }
1318
+ /**
1319
+ * Internal update memory without transaction (used by batchUpdateMemory)
1320
+ */
1321
+ async updateMemoryInternal(db, operation, topic, content, metadata) {
1322
+ const scope = this.currentScope;
1323
+ const now = this.now();
1324
+ const createdAt = metadata?.created_at ?? now;
1325
+ const updatedAt = metadata?.updated_at ?? now;
1326
+ const lastAccessed = metadata?.last_accessed ?? now;
1327
+ if (operation === "remove") {
1328
+ const stmt2 = db.prepare("DELETE FROM memory_entries WHERE name = ? AND scope = ?");
1329
+ stmt2.run([topic, scope]);
1330
+ stmt2.free();
1331
+ return;
1332
+ }
1333
+ const stmt = db.prepare("SELECT content, entry_type, status, priority, tags FROM memory_entries WHERE name = ? AND scope = ?");
1334
+ stmt.bind([topic, scope]);
1335
+ let existing;
1336
+ if (stmt.step()) {
1337
+ const row = stmt.getAsObject();
1338
+ existing = {
1339
+ content: row.content,
1340
+ entry_type: row.entry_type,
1341
+ status: row.status,
1342
+ priority: row.priority,
1343
+ tags: row.tags
1344
+ };
1345
+ stmt.free();
1346
+ } else {
1347
+ existing = void 0;
1348
+ stmt.free();
1349
+ }
1350
+ let finalContent;
1351
+ let entry_type;
1352
+ let status;
1353
+ let priority;
1354
+ let tags;
1355
+ if (existing) {
1356
+ if (operation === "append") {
1357
+ if (!content) {
1358
+ throw new Error("Content is required for append operation.");
1359
+ }
1360
+ finalContent = `${existing.content}
1361
+ ${content}`;
1362
+ } else {
1363
+ if (!content) {
1364
+ throw new Error("Content is required for replace operation.");
1365
+ }
1366
+ finalContent = content;
1367
+ }
1368
+ entry_type = metadata?.entry_type || existing.entry_type;
1369
+ status = (metadata?.status || existing.status) ?? void 0;
1370
+ priority = (metadata?.priority || existing.priority) ?? void 0;
1371
+ tags = (metadata?.tags || existing.tags) ?? void 0;
1372
+ } else {
1373
+ if (!content) {
1374
+ throw new Error("Content is required for new memory entries.");
1375
+ }
1376
+ finalContent = content;
1377
+ entry_type = metadata?.entry_type || "note";
1378
+ status = metadata?.status;
1379
+ priority = metadata?.priority;
1380
+ tags = metadata?.tags;
1381
+ }
1382
+ const upsertStmt = db.prepare(`
1383
+ INSERT INTO memory_entries (id, name, scope, content, entry_type, status, priority, tags, created_at, updated_at, last_accessed)
1384
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1385
+ ON CONFLICT(name, scope) DO UPDATE SET
1386
+ content = excluded.content,
1387
+ entry_type = excluded.entry_type,
1388
+ status = excluded.status,
1389
+ priority = excluded.priority,
1390
+ tags = excluded.tags,
1391
+ updated_at = excluded.updated_at,
1392
+ last_accessed = excluded.last_accessed
1393
+ `);
1394
+ upsertStmt.run([
1395
+ this.generateUUID(),
1396
+ topic,
1397
+ scope,
1398
+ finalContent,
1399
+ entry_type,
1400
+ status ?? null,
1401
+ priority ?? null,
1402
+ tags ?? null,
1403
+ createdAt,
1404
+ updatedAt,
1405
+ lastAccessed
1406
+ ]);
1407
+ upsertStmt.free();
1408
+ }
1409
+ /**
1410
+ * Update memory
1411
+ */
1412
+ async updateMemory(operation, topic, content, metadata) {
1413
+ return this.transaction(async () => {
1414
+ const db = await this.getDatabase();
1415
+ await this.updateMemoryInternal(db, operation, topic, content, metadata);
1416
+ });
1417
+ }
1418
+ /**
1419
+ * Query memory with filters
1420
+ */
1421
+ async queryMemory(query = {}, options = {}) {
1422
+ if (options.operation === "delete") {
1423
+ return this.transaction(async () => {
1424
+ const db2 = await this.getDatabase();
1425
+ const { sql: sql2, params: params2 } = this.buildQuery(query);
1426
+ const deleteSql = `DELETE FROM memory_entries WHERE id IN (SELECT id FROM (${sql2}))`;
1427
+ const stmt2 = db2.prepare(deleteSql);
1428
+ stmt2.bind(params2);
1429
+ stmt2.step();
1430
+ stmt2.free();
1431
+ return db2.getRowsModified();
1432
+ });
1433
+ }
1434
+ const db = await this.getDatabase();
1435
+ const { sql, params } = this.buildQuery(query);
1436
+ if (options.operation === "count") {
1437
+ const countSql = `SELECT COUNT(*) as count FROM (${sql})`;
1438
+ const stmt2 = db.prepare(countSql);
1439
+ stmt2.bind(params);
1440
+ let count = 0;
1441
+ if (stmt2.step()) {
1442
+ const row = stmt2.getAsObject();
1443
+ count = row.count;
1444
+ }
1445
+ stmt2.free();
1446
+ return count;
1447
+ }
1448
+ const stmt = db.prepare(sql);
1449
+ stmt.bind(params);
1450
+ const entries = [];
1451
+ while (stmt.step()) {
1452
+ entries.push(stmt.getAsObject());
1453
+ }
1454
+ stmt.free();
1455
+ return entries;
1456
+ }
1457
+ /**
1458
+ * Build SQL query safely with parameterized statements
1459
+ */
1460
+ buildQuery(query) {
1461
+ const conditions = [];
1462
+ const params = [];
1463
+ let sql = "SELECT * FROM memory_entries WHERE 1=1";
1464
+ const scope = query.scope === "auto" ? this.currentScope : query.scope;
1465
+ if (scope === "global") {
1466
+ conditions.push(`scope = ?`);
1467
+ params.push("global");
1468
+ } else if (scope === "project" || !scope && this.currentScope !== "global") {
1469
+ conditions.push(`scope = ?`);
1470
+ params.push(this.currentScope);
1471
+ }
1472
+ if (query.name) {
1473
+ if (!query.name.trim()) {
1474
+ throw new Error("Name cannot be empty");
1475
+ }
1476
+ conditions.push(`name = ?`);
1477
+ params.push(query.name.trim());
1478
+ }
1479
+ if (query.type) {
1480
+ if (!query.type.trim()) {
1481
+ throw new Error("Type cannot be empty");
1482
+ }
1483
+ conditions.push(`entry_type = ?`);
1484
+ params.push(query.type.trim());
1485
+ }
1486
+ if (query.status) {
1487
+ conditions.push(`status = ?`);
1488
+ params.push(query.status);
1489
+ }
1490
+ if (query.priority) {
1491
+ if (!_SQLiteMemoryStore.ALLOWED_PRIORITIES.includes(query.priority)) {
1492
+ throw new Error(`Invalid priority: ${query.priority}`);
1493
+ }
1494
+ conditions.push(`priority = ?`);
1495
+ params.push(query.priority);
1496
+ }
1497
+ if (query.tags) {
1498
+ const tags = Array.isArray(query.tags) ? query.tags : [query.tags];
1499
+ for (const tag of tags) {
1500
+ const trimmed = tag.trim();
1501
+ if (!trimmed) {
1502
+ throw new Error("Tags cannot be empty");
1503
+ }
1504
+ const escaped = trimmed.replace(/[\\_%]/g, "\\$&");
1505
+ conditions.push(`(tags = ? OR tags LIKE ? ESCAPE '\\' OR tags LIKE ? ESCAPE '\\' OR tags LIKE ? ESCAPE '\\')`);
1506
+ params.push(trimmed, `${escaped},%`, `%,${escaped}`, `%,${escaped},%`);
1507
+ }
1508
+ }
1509
+ if (query.search) {
1510
+ const searchTerm = query.search.trim();
1511
+ conditions.push(`(content LIKE ? ESCAPE '\\' OR name LIKE ? ESCAPE '\\')`);
1512
+ const searchPattern = `%${searchTerm.replace(/[\\_%]/g, "\\$&")}%`;
1513
+ params.push(searchPattern, searchPattern);
1514
+ }
1515
+ if (query.createdAfter) {
1516
+ conditions.push(`created_at >= ?`);
1517
+ params.push(query.createdAfter);
1518
+ }
1519
+ if (query.createdBefore) {
1520
+ conditions.push(`created_at <= ?`);
1521
+ params.push(query.createdBefore);
1522
+ }
1523
+ if (query.updatedAfter) {
1524
+ conditions.push(`updated_at >= ?`);
1525
+ params.push(query.updatedAfter);
1526
+ }
1527
+ if (query.updatedBefore) {
1528
+ conditions.push(`updated_at <= ?`);
1529
+ params.push(query.updatedBefore);
1530
+ }
1531
+ if (conditions.length > 0) {
1532
+ sql += ` AND ${conditions.join(" AND ")}`;
1533
+ }
1534
+ if (query.sortBy) {
1535
+ const column = _SQLiteMemoryStore.SORT_COLUMNS[query.sortBy];
1536
+ if (!column) {
1537
+ throw new Error(`Invalid sortBy: ${query.sortBy}`);
1538
+ }
1539
+ const order = query.sortOrder || "desc";
1540
+ if (!_SQLiteMemoryStore.ALLOWED_SORT_ORDERS.includes(order)) {
1541
+ throw new Error(`Invalid sortOrder: ${order}`);
1542
+ }
1543
+ sql += ` ORDER BY ${column} ${order.toUpperCase()}`;
1544
+ }
1545
+ if (query.limit) {
1546
+ const limit = Number(query.limit);
1547
+ if (Number.isNaN(limit) || limit < 1 || limit > 1e4) {
1548
+ throw new Error("Limit must be between 1 and 10000");
1549
+ }
1550
+ sql += ` LIMIT ?`;
1551
+ params.push(limit);
1552
+ }
1553
+ if (query.offset) {
1554
+ const offset = Number(query.offset);
1555
+ if (Number.isNaN(offset) || offset < 0) {
1556
+ throw new Error("Offset must be >= 0");
1557
+ }
1558
+ sql += ` OFFSET ?`;
1559
+ params.push(offset);
1560
+ }
1561
+ return { sql, params };
1562
+ }
1563
+ /**
1564
+ * Batch update memory
1565
+ */
1566
+ async batchUpdateMemory(operations) {
1567
+ return this.transaction(async () => {
1568
+ const db = await this.getDatabase();
1569
+ for (const op of operations) {
1570
+ await this.updateMemoryInternal(db, op.operation, op.name, op.content, op.metadata);
1571
+ }
1572
+ });
1573
+ }
1574
+ /**
1575
+ * Close database connection
1576
+ * @param skipSave - If true, skip saving before close (useful when save already failed)
1577
+ */
1578
+ async close(skipSave = false) {
1579
+ const db = this.db;
1580
+ if (db) {
1581
+ try {
1582
+ if (!skipSave) {
1583
+ await this.saveDatabase();
1584
+ }
1585
+ } finally {
1586
+ if (this.db === db) {
1587
+ db.close();
1588
+ this.db = null;
1589
+ }
1590
+ }
1591
+ }
1592
+ this.dbPromise = null;
1593
+ }
1594
+ /**
1595
+ * Get database statistics
1596
+ */
1597
+ async getStats() {
1598
+ const db = await this.getDatabase();
1599
+ const results = db.exec("SELECT COUNT(*) as count FROM memory_entries");
1600
+ const totalEntries = results[0]?.values[0]?.[0] || 0;
1601
+ const typeResults = db.exec("SELECT entry_type, COUNT(*) as count FROM memory_entries GROUP BY entry_type");
1602
+ const entriesByType = {};
1603
+ if (typeResults.length > 0) {
1604
+ for (const row of typeResults[0].values) {
1605
+ entriesByType[row[0]] = row[1];
1606
+ }
1607
+ }
1608
+ const dbPath = this.resolvePath(this.getDbPath());
1609
+ let databaseSize = 0;
1610
+ try {
1611
+ const stats = await import("fs/promises").then((fs2) => fs2.stat(dbPath));
1612
+ databaseSize = stats.size;
1613
+ } catch {
1614
+ }
1615
+ return {
1616
+ totalEntries,
1617
+ entriesByType,
1618
+ databaseSize
1619
+ };
1620
+ }
1621
+ };
1622
+
1623
+ // src/utils/eventHandler.ts
1624
+ import { Console } from "console";
1625
+ import { TaskEventKind } from "@polka-codes/core";
1626
+ import chalk from "chalk";
1627
+
1628
+ // src/utils/parameterSimplifier.ts
1629
+ function createSimplifier(keepFields) {
1630
+ return (params) => {
1631
+ const result = {};
1632
+ for (const field of keepFields) {
1633
+ if (params[field] !== void 0) {
1634
+ result[field] = params[field];
1635
+ }
1636
+ }
1637
+ return result;
1638
+ };
1639
+ }
1640
+ function createCustomSimplifier(transform) {
1641
+ return transform;
1642
+ }
1643
+ var SIMPLIFIERS = {
1644
+ // Simple field-based simplifiers
1645
+ replaceInFile: createSimplifier(["path"]),
1646
+ writeToFile: createSimplifier(["path"]),
1647
+ readFile: createSimplifier(["path", "includeIgnored"]),
1648
+ executeCommand: createSimplifier(["command", "requiresApproval"]),
1649
+ updateMemory: createSimplifier(["operation", "topic"]),
1650
+ // searchFiles passes through all params
1651
+ searchFiles: createCustomSimplifier((params) => ({ ...params })),
1652
+ // listFiles has custom logic for maxCount default value
1653
+ listFiles: createCustomSimplifier((params) => {
1654
+ const DEFAULT_MAX_COUNT = 2e3;
1655
+ const maxCount = params.maxCount;
1656
+ return {
1657
+ path: params.path,
1658
+ recursive: params.recursive,
1659
+ ...maxCount !== DEFAULT_MAX_COUNT && { maxCount }
1660
+ };
1661
+ })
1662
+ };
1663
+ function simplifyToolParameters(toolName, params) {
1664
+ if (params === void 0 || params === null) {
1665
+ return {};
1666
+ }
1667
+ const simplifier = SIMPLIFIERS[toolName];
1668
+ if (simplifier) {
1669
+ return simplifier(params);
1670
+ }
1671
+ return { ...params };
1672
+ }
1673
+
1674
+ // src/utils/eventHandler.ts
1675
+ var taskToolCallStats = /* @__PURE__ */ new Map();
1676
+ var globalToolCallStats = /* @__PURE__ */ new Map();
1677
+ function logToolCallStats(stream, statsMap, title) {
1678
+ const customConsole = new Console(stream, stream);
1679
+ customConsole.log(`
1680
+
1681
+ ======== ${title} ========`);
1682
+ if (statsMap.size > 0) {
1683
+ const tableData = [...statsMap.entries()].map(([tool, stats]) => {
1684
+ const successRate = stats.calls > 0 ? stats.success / stats.calls * 100 : 0;
1685
+ return {
1686
+ "Tool Name": tool,
1687
+ Calls: stats.calls,
1688
+ Success: stats.success,
1689
+ Errors: stats.errors,
1690
+ "Success Rate": `${successRate.toFixed(2)}%`
1691
+ };
1692
+ });
1693
+ customConsole.table(tableData);
1694
+ } else {
1695
+ customConsole.log("No tools were called.");
1696
+ }
1697
+ }
1698
+ var mergeToolCallStats = (a, b) => {
1699
+ const merged = /* @__PURE__ */ new Map();
1700
+ for (const [tool, stat] of a) {
1701
+ merged.set(tool, { ...stat });
1702
+ }
1703
+ for (const [tool, stat] of b) {
1704
+ const existing = merged.get(tool);
1705
+ if (existing) {
1706
+ existing.calls += stat.calls;
1707
+ existing.success += stat.success;
1708
+ existing.errors += stat.errors;
1709
+ } else {
1710
+ merged.set(tool, { ...stat });
1711
+ }
1712
+ }
1713
+ return merged;
1714
+ };
1715
+ function logGlobalToolCallStats(stream, verbose = 0) {
1716
+ if (verbose < 0) {
1717
+ return;
1718
+ }
1719
+ const merged = mergeToolCallStats(globalToolCallStats, taskToolCallStats);
1720
+ logToolCallStats(stream, merged, "Global Tool Call Stats");
1721
+ }
1722
+ var printEvent = (verbose, usageMeter, stream = process.stdout) => {
1723
+ if (verbose < 0) {
1724
+ return (_event) => {
1725
+ };
1726
+ }
1727
+ const customConsole = new Console(stream, stream);
1728
+ let hadReasoning = false;
1729
+ let hasText = false;
1730
+ const write = stream.write.bind(stream);
1731
+ return (event) => {
1732
+ switch (event.kind) {
1733
+ case TaskEventKind.StartTask:
1734
+ taskToolCallStats.clear();
1735
+ if (verbose > 2) {
1736
+ customConsole.log(`
1737
+ ====== System Prompt ======
1738
+ ${event.systemPrompt}`);
1739
+ customConsole.log("\n\n================\n");
1740
+ }
1741
+ break;
1742
+ case TaskEventKind.StartRequest:
1743
+ hasText = false;
1744
+ if (verbose > 0) {
1745
+ customConsole.log("\n\n======== New Request ========\n");
1746
+ }
1747
+ if (verbose > 1) {
1748
+ for (const message of event.userMessage) {
1749
+ const userMessage = message.content;
1750
+ if (typeof userMessage === "string") {
1751
+ customConsole.log(userMessage);
1752
+ } else {
1753
+ for (const content of userMessage) {
1754
+ switch (content.type) {
1755
+ case "text":
1756
+ customConsole.log(content.text);
1757
+ break;
1758
+ case "image":
1759
+ if (content.image instanceof URL) {
1760
+ customConsole.log(chalk.yellow(`[Image content from URL: ${content.image}]`));
1761
+ } else {
1762
+ customConsole.log(chalk.yellow(`[Image content: ${content.mediaType}]`));
1763
+ }
1764
+ break;
1765
+ case "file":
1766
+ customConsole.log(chalk.yellow(`[File name: ${content.filename}, type: ${content.mediaType}]`));
1767
+ break;
1768
+ case "tool-call":
1769
+ customConsole.log(chalk.yellow(`[Tool call: ${content.toolName}]`));
1770
+ break;
1771
+ case "tool-result":
1772
+ customConsole.log(chalk.yellow(`[Tool result: ${content.toolName}]`));
1773
+ if (verbose > 1) {
1774
+ customConsole.log(content.output);
1775
+ }
1776
+ break;
1777
+ case "reasoning":
1778
+ break;
1779
+ }
1780
+ }
1781
+ }
1782
+ }
1783
+ customConsole.log("\n\n======== Request Message Ended ========\n");
1784
+ }
1785
+ break;
1786
+ case TaskEventKind.EndRequest:
1787
+ if (verbose > 0) {
1788
+ customConsole.log("\n\n======== Request Ended ========\n");
1789
+ }
1790
+ if (verbose === 0 && hasText) {
1791
+ write("\n\n");
1792
+ }
1793
+ if (verbose > 1) {
1794
+ customConsole.log(usageMeter.getUsageText());
1795
+ }
1796
+ break;
1797
+ case TaskEventKind.Text: {
1798
+ if (hadReasoning) {
1799
+ write("\n\n");
1800
+ hadReasoning = false;
1801
+ }
1802
+ if (event.newText.trim().length > 0) {
1803
+ hasText = true;
1804
+ }
1805
+ write(event.newText);
1806
+ break;
1807
+ }
1808
+ case TaskEventKind.Reasoning: {
1809
+ if (verbose > 0) {
1810
+ write(chalk.dim(event.newText));
1811
+ hadReasoning = true;
1812
+ }
1813
+ break;
1814
+ }
1815
+ case TaskEventKind.ToolUse: {
1816
+ if (verbose > 0) {
1817
+ const params = verbose > 1 ? event.params : simplifyToolParameters(event.tool, event.params);
1818
+ customConsole.log(chalk.yellow("\n\nTool use:", event.tool), params);
1819
+ }
1820
+ const stats = taskToolCallStats.get(event.tool) ?? { calls: 0, success: 0, errors: 0 };
1821
+ stats.calls++;
1822
+ taskToolCallStats.set(event.tool, stats);
1823
+ break;
1824
+ }
1825
+ case TaskEventKind.ToolReply: {
1826
+ const stats = taskToolCallStats.get(event.tool) ?? { calls: 0, success: 0, errors: 0 };
1827
+ stats.success++;
1828
+ taskToolCallStats.set(event.tool, stats);
1829
+ break;
1830
+ }
1831
+ case TaskEventKind.ToolError: {
1832
+ customConsole.error(chalk.red("\n\nTool error:", event.tool));
1833
+ customConsole.error(event.error);
1834
+ const stats = taskToolCallStats.get(event.tool) ?? { calls: 0, success: 0, errors: 0 };
1835
+ stats.errors++;
1836
+ taskToolCallStats.set(event.tool, stats);
1837
+ break;
1838
+ }
1839
+ case TaskEventKind.UsageExceeded:
1840
+ customConsole.log("\n\n======= Usage Exceeded ========\n");
1841
+ break;
1842
+ case TaskEventKind.EndTask:
1843
+ customConsole.log("\n\n======== Task Ended ========\n");
1844
+ customConsole.log("Reason:", event.exitReason.type);
1845
+ switch (event.exitReason.type) {
1846
+ case "Error": {
1847
+ const { error } = event.exitReason;
1848
+ customConsole.error(chalk.red(`Workflow failed: ${error.message}`));
1849
+ if (verbose > 0 && error.stack) {
1850
+ customConsole.error(chalk.red(error.stack));
1851
+ }
1852
+ break;
1853
+ }
1854
+ }
1855
+ for (const [tool, taskStats] of taskToolCallStats.entries()) {
1856
+ const globalStats = globalToolCallStats.get(tool) ?? { calls: 0, success: 0, errors: 0 };
1857
+ globalStats.calls += taskStats.calls;
1858
+ globalStats.success += taskStats.success;
1859
+ globalStats.errors += taskStats.errors;
1860
+ globalToolCallStats.set(tool, globalStats);
1861
+ }
1862
+ if (verbose > 0) {
1863
+ logToolCallStats(stream, taskToolCallStats, "Task Tool Call Stats");
1864
+ }
1865
+ taskToolCallStats.clear();
1866
+ break;
1867
+ }
1868
+ };
1869
+ };
1870
+
1871
+ // src/utils/readMultiline.ts
1872
+ import readline from "readline";
1873
+ function readMultiline(prompt = "Enter text (Ctrl+D to finish):") {
1874
+ return new Promise((resolve5) => {
1875
+ console.log(prompt);
1876
+ const rl = readline.createInterface({
1877
+ input: process.stdin,
1878
+ output: process.stdout,
1879
+ prompt: ""
1880
+ });
1881
+ const lines = [];
1882
+ rl.on("line", (line) => {
1883
+ lines.push(line);
1884
+ });
1885
+ rl.on("close", () => {
1886
+ resolve5(lines.join("\n"));
1887
+ });
1888
+ });
1889
+ }
1890
+ export {
1891
+ InMemoryStore,
1892
+ MemoryManager,
1893
+ SQLiteMemoryStore,
1894
+ checkRipgrep,
1895
+ configSchema,
1896
+ detectProjectScope,
1897
+ getGlobalConfigPath,
1898
+ getProvider,
1899
+ listFiles,
1900
+ loadConfig,
1901
+ loadConfigAtPath,
1902
+ localConfigFileName,
1903
+ logGlobalToolCallStats,
1904
+ logToolCallStats,
1905
+ mergeConfigs,
1906
+ printEvent,
1907
+ readConfig,
1908
+ readLocalConfig,
1909
+ readMultiline,
1910
+ resolveRules,
1911
+ searchFiles,
1912
+ simplifyToolParameters
1913
+ };