jinzd-ai-cli 0.4.88 → 0.4.90

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/{batch-7XCYSPJU.js → batch-3MJ56YAA.js} +2 -2
  2. package/dist/chat-index-QKFH7ZP6.js +17 -0
  3. package/dist/chat-index-W2UZ34ZI.js +18 -0
  4. package/dist/{chunk-PFYAAX2S.js → chunk-2DXY7UGF.js} +16 -63
  5. package/dist/chunk-5S3PIG5O.js +453 -0
  6. package/dist/{chunk-QT2KNL3V.js → chunk-AB2LA33A.js} +1 -1
  7. package/dist/chunk-ANYYM4CF.js +460 -0
  8. package/dist/{chunk-P6EQZKKG.js → chunk-BJXGZFE6.js} +1 -1
  9. package/dist/{chunk-L3MBIO36.js → chunk-DJGP7AR6.js} +5 -106
  10. package/dist/{chunk-V3NMERIB.js → chunk-EEEAFWNK.js} +1 -1
  11. package/dist/{chunk-YDHIU24C.js → chunk-G65IDWVP.js} +76 -3
  12. package/dist/chunk-JV5N65KN.js +50 -0
  13. package/dist/chunk-KHYD3WXE.js +52 -0
  14. package/dist/{chunk-CQQQFNND.js → chunk-KJLJPUY2.js} +6 -4
  15. package/dist/{chunk-GTKJUEBS.js → chunk-MO7MWNWC.js} +6 -4
  16. package/dist/{chunk-XMA222FQ.js → chunk-PASCDYMH.js} +17 -63
  17. package/dist/{chunk-VGXNE37B.js → chunk-WPQ4D6T3.js} +1 -1
  18. package/dist/electron-server.js +187 -104
  19. package/dist/{hub-IR4INXSU.js → hub-B7NJSCWF.js} +1 -1
  20. package/dist/index.js +158 -19
  21. package/dist/{run-tests-FQHDUYOG.js → run-tests-2DYVHTIH.js} +2 -2
  22. package/dist/{run-tests-JVWIGY7P.js → run-tests-37FEBJTR.js} +1 -1
  23. package/dist/{semantic-MYAXLDCZ.js → semantic-3KJPAUW6.js} +3 -2
  24. package/dist/{semantic-ICJ536BG.js → semantic-YDRPPVWK.js} +3 -2
  25. package/dist/{server-UWKRV5DK.js → server-FCTPLKGO.js} +121 -13
  26. package/dist/{server-HTVVWKFN.js → server-S6JYNMMF.js} +7 -5
  27. package/dist/{task-orchestrator-6MI6LD7T.js → task-orchestrator-K6HDX4YE.js} +7 -5
  28. package/dist/{vector-store-UR7IARXB.js → vector-store-NDUFLNGN.js} +2 -1
  29. package/dist/{vector-store-YTVHACBV.js → vector-store-QARQ2P6D.js} +2 -1
  30. package/dist/web/client/app.js +201 -0
  31. package/dist/web/client/index.html +24 -0
  32. package/package.json +1 -1
@@ -7,19 +7,23 @@ import {
7
7
  import {
8
8
  fileCheckpoints
9
9
  } from "./chunk-4BKXL7SM.js";
10
+ import {
11
+ loadChatIndex,
12
+ searchChatMemory
13
+ } from "./chunk-ANYYM4CF.js";
10
14
  import {
11
15
  indexProject
12
16
  } from "./chunk-NHNWUBXB.js";
13
17
  import {
14
18
  hasSemanticIndex,
15
19
  semanticSearch
16
- } from "./chunk-CQQQFNND.js";
20
+ } from "./chunk-KJLJPUY2.js";
17
21
  import {
18
22
  loadIndex
19
23
  } from "./chunk-6VRJGH25.js";
20
24
  import {
21
25
  runTestsTool
22
- } from "./chunk-V3NMERIB.js";
26
+ } from "./chunk-EEEAFWNK.js";
23
27
  import {
24
28
  CONFIG_DIR_NAME,
25
29
  DEFAULT_MAX_TOOL_OUTPUT_CHARS_CAP,
@@ -27,7 +31,7 @@ import {
27
31
  SUBAGENT_ALLOWED_TOOLS,
28
32
  SUBAGENT_DEFAULT_MAX_ROUNDS,
29
33
  SUBAGENT_MAX_ROUNDS_LIMIT
30
- } from "./chunk-VGXNE37B.js";
34
+ } from "./chunk-WPQ4D6T3.js";
31
35
 
32
36
  // src/tools/types.ts
33
37
  function isFileWriteTool(name) {
@@ -4578,6 +4582,74 @@ ${lines.join("\n")}`;
4578
4582
  }
4579
4583
  };
4580
4584
 
4585
+ // src/tools/builtin/recall-memory.ts
4586
+ function formatHit(h, i) {
4587
+ const ts = h.chunk.timestamp.slice(0, 16).replace("T", " ");
4588
+ const title = h.chunk.sessionTitle ? ` \xB7 ${h.chunk.sessionTitle}` : "";
4589
+ const sid = h.chunk.sessionId.slice(0, 8);
4590
+ const score = h.score.toFixed(3);
4591
+ const body = h.chunk.text.length > 600 ? h.chunk.text.slice(0, 600) + "\u2026" : h.chunk.text;
4592
+ return `\u2500\u2500\u2500 Hit ${i + 1} (score ${score}, session ${sid}${title}, ${ts}) \u2500\u2500\u2500
4593
+ ` + body;
4594
+ }
4595
+ var recallMemoryTool = {
4596
+ definition: {
4597
+ name: "recall_memory",
4598
+ description: 'Semantic search over past chat sessions. Call this whenever the user references something that may have been discussed before ("last time", "remember", "\u4E4B\u524D", "\u4E0A\u6B21"), when context is ambiguous and continuity matters, or when you want to check what decisions or preferences have been established across prior conversations. Returns up to `topK` relevant snippets with session id, timestamp, and cosine similarity score. Prefer this over asking the user "can you remind me".',
4599
+ parameters: {
4600
+ query: {
4601
+ type: "string",
4602
+ description: "Natural-language description of what to recall. Chinese or English both work.",
4603
+ required: true
4604
+ },
4605
+ topK: {
4606
+ type: "number",
4607
+ description: "Max number of snippets to return (default 5, max 20).",
4608
+ required: false
4609
+ },
4610
+ excludeCurrentSession: {
4611
+ type: "boolean",
4612
+ description: "If true, exclude the current session from results (avoid echoing what you just said). Default false.",
4613
+ required: false
4614
+ },
4615
+ currentSessionId: {
4616
+ type: "string",
4617
+ description: "Session ID to exclude when excludeCurrentSession=true. Usually the active session.",
4618
+ required: false
4619
+ },
4620
+ minScore: {
4621
+ type: "number",
4622
+ description: "Drop hits below this cosine score. Default 0.25. Raise to 0.35+ for stricter matches.",
4623
+ required: false
4624
+ }
4625
+ },
4626
+ dangerous: false
4627
+ },
4628
+ async execute(args) {
4629
+ const query = String(args["query"] ?? "").trim();
4630
+ if (!query) throw new ToolError("recall_memory", "query is required");
4631
+ const topK = Math.max(1, Math.min(20, Number(args["topK"] ?? 5)));
4632
+ const excludeCurrent = Boolean(args["excludeCurrentSession"]);
4633
+ const currentId = args["currentSessionId"] ? String(args["currentSessionId"]) : void 0;
4634
+ const minScore = args["minScore"] !== void 0 ? Number(args["minScore"]) : 0.25;
4635
+ const status = loadChatIndex();
4636
+ if (!status) {
4637
+ return "No chat memory index found. The index is built on REPL startup, or run `/memory rebuild` manually. If you have no past sessions yet, this is expected.";
4638
+ }
4639
+ const hits = await searchChatMemory(query, {
4640
+ topK,
4641
+ minScore,
4642
+ excludeSessionId: excludeCurrent ? currentId : void 0
4643
+ });
4644
+ if (hits.length === 0) {
4645
+ return `No memories matched "${query}" above score ${minScore}. Index has ${status.idx.chunks.length} chunks across ${Object.keys(status.idx.sessionMtimes).length} sessions. Consider lowering minScore to 0.15 or rephrasing the query.`;
4646
+ }
4647
+ const header = `Found ${hits.length} memory hit(s) for "${query}" (min-score ${minScore}):
4648
+ `;
4649
+ return header + "\n" + hits.map(formatHit).join("\n\n");
4650
+ }
4651
+ };
4652
+
4581
4653
  // src/core/token-estimator.ts
4582
4654
  var CJK_REGEX = /[\u2E80-\u9FFF\uA000-\uA4FF\uAC00-\uD7FF\uF900-\uFAFF\uFE30-\uFE4F\uFF00-\uFFEF]/g;
4583
4655
  function estimateTokens(text) {
@@ -4637,6 +4709,7 @@ var ToolRegistry = class {
4637
4709
  this.register(getOutlineTool);
4638
4710
  this.register(findReferencesTool);
4639
4711
  this.register(searchCodeTool);
4712
+ this.register(recallMemoryTool);
4640
4713
  }
4641
4714
  register(tool) {
4642
4715
  this.tools.set(tool.definition.name, tool);
@@ -0,0 +1,50 @@
1
+ // src/symbols/embedder.ts
2
+ import path from "path";
3
+ import os from "os";
4
+ import fs from "fs";
5
+ var EMBEDDING_MODEL_ID = "Xenova/paraphrase-multilingual-MiniLM-L12-v2";
6
+ var EMBEDDING_DIM = 384;
7
+ var pipelinePromise = null;
8
+ function cacheDir() {
9
+ return path.join(os.homedir(), ".aicli", "models");
10
+ }
11
+ async function getEmbedder() {
12
+ if (pipelinePromise) return pipelinePromise;
13
+ pipelinePromise = (async () => {
14
+ const mod = await import("@huggingface/transformers");
15
+ const dir = cacheDir();
16
+ fs.mkdirSync(dir, { recursive: true });
17
+ mod.env.cacheDir = dir;
18
+ mod.env.allowRemoteModels = true;
19
+ mod.env.allowLocalModels = true;
20
+ const pipe = await mod.pipeline("feature-extraction", EMBEDDING_MODEL_ID, {
21
+ // Keep the ONNX session in float32; int8 quantization exists but the
22
+ // quality drop on short code identifiers is noticeable.
23
+ dtype: "fp32"
24
+ });
25
+ return pipe;
26
+ })();
27
+ return pipelinePromise;
28
+ }
29
+ async function embed(texts) {
30
+ if (texts.length === 0) return [];
31
+ const pipe = await getEmbedder();
32
+ const out = await pipe(texts, { pooling: "mean", normalize: true });
33
+ const batch = texts.length;
34
+ const dim = EMBEDDING_DIM;
35
+ const rows = new Array(batch);
36
+ for (let i = 0; i < batch; i++) {
37
+ rows[i] = new Float32Array(out.data.buffer, out.data.byteOffset + i * dim * 4, dim).slice();
38
+ }
39
+ return rows;
40
+ }
41
+ async function embedOne(text) {
42
+ const [vec] = await embed([text]);
43
+ return vec;
44
+ }
45
+
46
+ export {
47
+ EMBEDDING_DIM,
48
+ embed,
49
+ embedOne
50
+ };
@@ -0,0 +1,52 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/symbols/embedder.ts
4
+ import path from "path";
5
+ import os from "os";
6
+ import fs from "fs";
7
+ var EMBEDDING_MODEL_ID = "Xenova/paraphrase-multilingual-MiniLM-L12-v2";
8
+ var EMBEDDING_DIM = 384;
9
+ var pipelinePromise = null;
10
+ function cacheDir() {
11
+ return path.join(os.homedir(), ".aicli", "models");
12
+ }
13
+ async function getEmbedder() {
14
+ if (pipelinePromise) return pipelinePromise;
15
+ pipelinePromise = (async () => {
16
+ const mod = await import("@huggingface/transformers");
17
+ const dir = cacheDir();
18
+ fs.mkdirSync(dir, { recursive: true });
19
+ mod.env.cacheDir = dir;
20
+ mod.env.allowRemoteModels = true;
21
+ mod.env.allowLocalModels = true;
22
+ const pipe = await mod.pipeline("feature-extraction", EMBEDDING_MODEL_ID, {
23
+ // Keep the ONNX session in float32; int8 quantization exists but the
24
+ // quality drop on short code identifiers is noticeable.
25
+ dtype: "fp32"
26
+ });
27
+ return pipe;
28
+ })();
29
+ return pipelinePromise;
30
+ }
31
+ async function embed(texts) {
32
+ if (texts.length === 0) return [];
33
+ const pipe = await getEmbedder();
34
+ const out = await pipe(texts, { pooling: "mean", normalize: true });
35
+ const batch = texts.length;
36
+ const dim = EMBEDDING_DIM;
37
+ const rows = new Array(batch);
38
+ for (let i = 0; i < batch; i++) {
39
+ rows[i] = new Float32Array(out.data.buffer, out.data.byteOffset + i * dim * 4, dim).slice();
40
+ }
41
+ return rows;
42
+ }
43
+ async function embedOne(text) {
44
+ const [vec] = await embed([text]);
45
+ return vec;
46
+ }
47
+
48
+ export {
49
+ EMBEDDING_DIM,
50
+ embed,
51
+ embedOne
52
+ };
@@ -3,13 +3,15 @@ import {
3
3
  loadIndex
4
4
  } from "./chunk-6VRJGH25.js";
5
5
  import {
6
- EMBEDDING_DIM,
7
- embed,
8
- embedOne,
9
6
  loadVectorStore,
10
7
  saveVectorStore,
11
8
  searchVectorStore
12
- } from "./chunk-PFYAAX2S.js";
9
+ } from "./chunk-2DXY7UGF.js";
10
+ import {
11
+ EMBEDDING_DIM,
12
+ embed,
13
+ embedOne
14
+ } from "./chunk-KHYD3WXE.js";
13
15
 
14
16
  // src/symbols/semantic.ts
15
17
  function pathTokens(absFile, root) {
@@ -2,13 +2,15 @@ import {
2
2
  loadIndex
3
3
  } from "./chunk-BJAT4GNC.js";
4
4
  import {
5
- EMBEDDING_DIM,
6
- embed,
7
- embedOne,
8
5
  loadVectorStore,
9
6
  saveVectorStore,
10
7
  searchVectorStore
11
- } from "./chunk-XMA222FQ.js";
8
+ } from "./chunk-PASCDYMH.js";
9
+ import {
10
+ EMBEDDING_DIM,
11
+ embed,
12
+ embedOne
13
+ } from "./chunk-JV5N65KN.js";
12
14
 
13
15
  // src/symbols/semantic.ts
14
16
  function pathTokens(absFile, root) {
@@ -1,70 +1,27 @@
1
- // src/symbols/vector-store.ts
2
- import fs2 from "fs";
3
- import path2 from "path";
4
- import os2 from "os";
5
- import crypto from "crypto";
1
+ import {
2
+ EMBEDDING_DIM
3
+ } from "./chunk-JV5N65KN.js";
6
4
 
7
- // src/symbols/embedder.ts
5
+ // src/symbols/vector-store.ts
6
+ import fs from "fs";
8
7
  import path from "path";
9
8
  import os from "os";
10
- import fs from "fs";
11
- var EMBEDDING_MODEL_ID = "Xenova/paraphrase-multilingual-MiniLM-L12-v2";
12
- var EMBEDDING_DIM = 384;
13
- var pipelinePromise = null;
14
- function cacheDir() {
15
- return path.join(os.homedir(), ".aicli", "models");
16
- }
17
- async function getEmbedder() {
18
- if (pipelinePromise) return pipelinePromise;
19
- pipelinePromise = (async () => {
20
- const mod = await import("@huggingface/transformers");
21
- const dir = cacheDir();
22
- fs.mkdirSync(dir, { recursive: true });
23
- mod.env.cacheDir = dir;
24
- mod.env.allowRemoteModels = true;
25
- mod.env.allowLocalModels = true;
26
- const pipe = await mod.pipeline("feature-extraction", EMBEDDING_MODEL_ID, {
27
- // Keep the ONNX session in float32; int8 quantization exists but the
28
- // quality drop on short code identifiers is noticeable.
29
- dtype: "fp32"
30
- });
31
- return pipe;
32
- })();
33
- return pipelinePromise;
34
- }
35
- async function embed(texts) {
36
- if (texts.length === 0) return [];
37
- const pipe = await getEmbedder();
38
- const out = await pipe(texts, { pooling: "mean", normalize: true });
39
- const batch = texts.length;
40
- const dim = EMBEDDING_DIM;
41
- const rows = new Array(batch);
42
- for (let i = 0; i < batch; i++) {
43
- rows[i] = new Float32Array(out.data.buffer, out.data.byteOffset + i * dim * 4, dim).slice();
44
- }
45
- return rows;
46
- }
47
- async function embedOne(text) {
48
- const [vec] = await embed([text]);
49
- return vec;
50
- }
51
-
52
- // src/symbols/vector-store.ts
9
+ import crypto from "crypto";
53
10
  var MAGIC = 1094927190;
54
11
  var VERSION = 1;
55
12
  var HEADER_BYTES = 16;
56
13
  function indexDir() {
57
- return path2.join(os2.homedir(), ".aicli", "index");
14
+ return path.join(os.homedir(), ".aicli", "index");
58
15
  }
59
16
  function projectHash(root) {
60
- return crypto.createHash("sha1").update(path2.resolve(root).toLowerCase()).digest("hex").slice(0, 16);
17
+ return crypto.createHash("sha1").update(path.resolve(root).toLowerCase()).digest("hex").slice(0, 16);
61
18
  }
62
19
  function vecPath(root) {
63
- return path2.join(indexDir(), `${projectHash(root)}.vec`);
20
+ return path.join(indexDir(), `${projectHash(root)}.vec`);
64
21
  }
65
22
  function emptyVectorStore(root) {
66
23
  return {
67
- root: path2.resolve(root),
24
+ root: path.resolve(root),
68
25
  count: 0,
69
26
  dim: EMBEDDING_DIM,
70
27
  vectors: new Float32Array(0),
@@ -79,7 +36,7 @@ function saveVectorStore(root, indices, vectors) {
79
36
  }
80
37
  const count = indices.length;
81
38
  const dir = indexDir();
82
- fs2.mkdirSync(dir, { recursive: true });
39
+ fs.mkdirSync(dir, { recursive: true });
83
40
  const totalBytes = HEADER_BYTES + count * 4 + count * EMBEDDING_DIM * 4;
84
41
  const buf = Buffer.alloc(totalBytes);
85
42
  buf.writeUInt32LE(MAGIC, 0);
@@ -90,15 +47,15 @@ function saveVectorStore(root, indices, vectors) {
90
47
  Buffer.from(vectors.buffer, vectors.byteOffset, vectors.byteLength).copy(buf, HEADER_BYTES + count * 4);
91
48
  const target = vecPath(root);
92
49
  const tmp = `${target}.tmp`;
93
- fs2.writeFileSync(tmp, buf);
94
- fs2.renameSync(tmp, target);
50
+ fs.writeFileSync(tmp, buf);
51
+ fs.renameSync(tmp, target);
95
52
  }
96
53
  function loadVectorStore(root) {
97
54
  const p = vecPath(root);
98
- if (!fs2.existsSync(p)) return null;
55
+ if (!fs.existsSync(p)) return null;
99
56
  let buf;
100
57
  try {
101
- buf = fs2.readFileSync(p);
58
+ buf = fs.readFileSync(p);
102
59
  } catch {
103
60
  return null;
104
61
  }
@@ -119,12 +76,12 @@ function loadVectorStore(root) {
119
76
  buf.byteOffset + HEADER_BYTES + count * 4 + count * dim * 4
120
77
  )
121
78
  );
122
- return { root: path2.resolve(root), count, dim, vectors, symbolIdx };
79
+ return { root: path.resolve(root), count, dim, vectors, symbolIdx };
123
80
  }
124
81
  function clearVectorStore(root) {
125
82
  const p = vecPath(root);
126
83
  try {
127
- if (fs2.existsSync(p)) fs2.unlinkSync(p);
84
+ if (fs.existsSync(p)) fs.unlinkSync(p);
128
85
  } catch {
129
86
  }
130
87
  }
@@ -156,9 +113,6 @@ function searchVectorStore(store, queryVec, k) {
156
113
  }
157
114
 
158
115
  export {
159
- EMBEDDING_DIM,
160
- embed,
161
- embedOne,
162
116
  emptyVectorStore,
163
117
  saveVectorStore,
164
118
  loadVectorStore,
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/core/constants.ts
4
- var VERSION = "0.4.88";
4
+ var VERSION = "0.4.90";
5
5
  var APP_NAME = "ai-cli";
6
6
  var CONFIG_DIR_NAME = ".aicli";
7
7
  var CONFIG_FILE_NAME = "config.json";