reasonix 0.31.0 → 0.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -7
- package/README.zh-CN.md +2 -6
- package/dashboard/dist/app.js +348 -80
- package/dashboard/dist/app.js.map +1 -1
- package/dist/cli/chat-EIFLHBZ6.js +39 -0
- package/dist/cli/chunk-2AWTGJ2C.js +110 -0
- package/dist/cli/chunk-2AWTGJ2C.js.map +1 -0
- package/dist/cli/chunk-3Q3C4W66.js +30 -0
- package/dist/cli/chunk-3Q3C4W66.js.map +1 -0
- package/dist/cli/chunk-4DCHFFEY.js +149 -0
- package/dist/cli/chunk-4DCHFFEY.js.map +1 -0
- package/dist/cli/chunk-5X7LZJDE.js +36 -0
- package/dist/cli/chunk-5X7LZJDE.js.map +1 -0
- package/dist/cli/chunk-6TMHAK5D.js +576 -0
- package/dist/cli/chunk-6TMHAK5D.js.map +1 -0
- package/dist/cli/chunk-APPB3ZPQ.js +43 -0
- package/dist/cli/chunk-APPB3ZPQ.js.map +1 -0
- package/dist/cli/chunk-BQNUJJN7.js +42 -0
- package/dist/cli/chunk-BQNUJJN7.js.map +1 -0
- package/dist/cli/chunk-CPOV2O73.js +39 -0
- package/dist/cli/chunk-CPOV2O73.js.map +1 -0
- package/dist/cli/chunk-D5DKXIP5.js +368 -0
- package/dist/cli/chunk-D5DKXIP5.js.map +1 -0
- package/dist/cli/chunk-DFP4YSVM.js +247 -0
- package/dist/cli/chunk-DFP4YSVM.js.map +1 -0
- package/dist/cli/chunk-DULSP7JH.js +410 -0
- package/dist/cli/chunk-DULSP7JH.js.map +1 -0
- package/dist/cli/chunk-FM57FNPJ.js +46 -0
- package/dist/cli/chunk-FM57FNPJ.js.map +1 -0
- package/dist/cli/chunk-FWGEHRB7.js +54 -0
- package/dist/cli/chunk-FWGEHRB7.js.map +1 -0
- package/dist/cli/chunk-FXGQ5NHE.js +513 -0
- package/dist/cli/chunk-FXGQ5NHE.js.map +1 -0
- package/dist/cli/chunk-G3XNWSFN.js +53 -0
- package/dist/cli/chunk-G3XNWSFN.js.map +1 -0
- package/dist/cli/chunk-I6YIAK6C.js +757 -0
- package/dist/cli/chunk-I6YIAK6C.js.map +1 -0
- package/dist/cli/chunk-J5VLP23S.js +94 -0
- package/dist/cli/chunk-J5VLP23S.js.map +1 -0
- package/dist/cli/chunk-KMWKGPFZ.js +303 -0
- package/dist/cli/chunk-KMWKGPFZ.js.map +1 -0
- package/dist/cli/chunk-LVQX5KGF.js +14934 -0
- package/dist/cli/chunk-LVQX5KGF.js.map +1 -0
- package/dist/cli/chunk-MHDNZXJJ.js +48 -0
- package/dist/cli/chunk-MHDNZXJJ.js.map +1 -0
- package/dist/cli/chunk-ORM6PK57.js +140 -0
- package/dist/cli/chunk-ORM6PK57.js.map +1 -0
- package/dist/cli/chunk-Q5GRLZJF.js +99 -0
- package/dist/cli/chunk-Q5GRLZJF.js.map +1 -0
- package/dist/cli/chunk-Q6YFXW7H.js +4986 -0
- package/dist/cli/chunk-Q6YFXW7H.js.map +1 -0
- package/dist/cli/chunk-QGE6AF76.js +1467 -0
- package/dist/cli/chunk-QGE6AF76.js.map +1 -0
- package/dist/cli/chunk-RFX7TYVV.js +28 -0
- package/dist/cli/chunk-RFX7TYVV.js.map +1 -0
- package/dist/cli/chunk-RZILUXUC.js +940 -0
- package/dist/cli/chunk-RZILUXUC.js.map +1 -0
- package/dist/cli/chunk-SDE5U32Z.js +535 -0
- package/dist/cli/chunk-SDE5U32Z.js.map +1 -0
- package/dist/cli/chunk-SOZE7V7V.js +340 -0
- package/dist/cli/chunk-SOZE7V7V.js.map +1 -0
- package/dist/cli/chunk-U3V2ZQ5J.js +479 -0
- package/dist/cli/chunk-U3V2ZQ5J.js.map +1 -0
- package/dist/cli/chunk-W4LDFAZ6.js +1544 -0
- package/dist/cli/chunk-W4LDFAZ6.js.map +1 -0
- package/dist/cli/chunk-WBDE4IRI.js +208 -0
- package/dist/cli/chunk-WBDE4IRI.js.map +1 -0
- package/dist/cli/chunk-XHQIK7B6.js +189 -0
- package/dist/cli/chunk-XHQIK7B6.js.map +1 -0
- package/dist/cli/chunk-XJLZ4HKU.js +307 -0
- package/dist/cli/chunk-XJLZ4HKU.js.map +1 -0
- package/dist/cli/chunk-ZPTSJGX5.js +88 -0
- package/dist/cli/chunk-ZPTSJGX5.js.map +1 -0
- package/dist/cli/chunk-ZTLZO42A.js +231 -0
- package/dist/cli/chunk-ZTLZO42A.js.map +1 -0
- package/dist/cli/code-F4KJOE3K.js +151 -0
- package/dist/cli/code-F4KJOE3K.js.map +1 -0
- package/dist/cli/commands-JWT2MWVH.js +352 -0
- package/dist/cli/commands-JWT2MWVH.js.map +1 -0
- package/dist/cli/commit-RPZBOZS2.js +288 -0
- package/dist/cli/commit-RPZBOZS2.js.map +1 -0
- package/dist/cli/diff-NTEHCSDW.js +145 -0
- package/dist/cli/diff-NTEHCSDW.js.map +1 -0
- package/dist/cli/doctor-3TGB2NZN.js +19 -0
- package/dist/cli/doctor-3TGB2NZN.js.map +1 -0
- package/dist/cli/events-P27CX7LN.js +338 -0
- package/dist/cli/events-P27CX7LN.js.map +1 -0
- package/dist/cli/index.js +83 -34028
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/mcp-ARTNQ24O.js +266 -0
- package/dist/cli/mcp-ARTNQ24O.js.map +1 -0
- package/dist/cli/mcp-browse-HLO2ENDL.js +163 -0
- package/dist/cli/mcp-browse-HLO2ENDL.js.map +1 -0
- package/dist/cli/mcp-inspect-T2HBR22P.js +103 -0
- package/dist/cli/mcp-inspect-T2HBR22P.js.map +1 -0
- package/dist/cli/{prompt-XHICFAYN.js → prompt-V47QKSAR.js} +3 -2
- package/dist/cli/prompt-V47QKSAR.js.map +1 -0
- package/dist/cli/prune-sessions-ERL6B4G5.js +42 -0
- package/dist/cli/prune-sessions-ERL6B4G5.js.map +1 -0
- package/dist/cli/replay-TMJASRC4.js +273 -0
- package/dist/cli/replay-TMJASRC4.js.map +1 -0
- package/dist/cli/run-JMEOTQCG.js +215 -0
- package/dist/cli/run-JMEOTQCG.js.map +1 -0
- package/dist/cli/server-SYC3OVOP.js +2967 -0
- package/dist/cli/server-SYC3OVOP.js.map +1 -0
- package/dist/cli/sessions-MOJAALJI.js +102 -0
- package/dist/cli/sessions-MOJAALJI.js.map +1 -0
- package/dist/cli/setup-CCJZAWTY.js +404 -0
- package/dist/cli/setup-CCJZAWTY.js.map +1 -0
- package/dist/cli/stats-5RJCATCE.js +12 -0
- package/dist/cli/stats-5RJCATCE.js.map +1 -0
- package/dist/cli/update-4TJWRUIN.js +90 -0
- package/dist/cli/update-4TJWRUIN.js.map +1 -0
- package/dist/cli/version-3MYFE4G6.js +29 -0
- package/dist/cli/version-3MYFE4G6.js.map +1 -0
- package/dist/index.d.ts +49 -96
- package/dist/index.js +567 -759
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/dist/cli/chunk-VWFJNLIK.js +0 -1031
- package/dist/cli/chunk-VWFJNLIK.js.map +0 -1
- /package/dist/cli/{prompt-XHICFAYN.js.map → chat-EIFLHBZ6.js.map} +0 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
buildIndex,
|
|
4
|
+
checkOllamaStatus,
|
|
5
|
+
pullOllamaModel,
|
|
6
|
+
startOllamaDaemon
|
|
7
|
+
} from "./chunk-RZILUXUC.js";
|
|
8
|
+
import "./chunk-5X7LZJDE.js";
|
|
9
|
+
import {
|
|
10
|
+
loadIndexConfig,
|
|
11
|
+
resolveSemanticEmbeddingConfig
|
|
12
|
+
} from "./chunk-DULSP7JH.js";
|
|
13
|
+
|
|
14
|
+
// src/cli/commands/index.ts
|
|
15
|
+
import { resolve } from "path";
|
|
16
|
+
|
|
17
|
+
// src/index/semantic/i18n.ts
|
|
18
|
+
var cachedLocale = null;
|
|
19
|
+
function detectLocale() {
|
|
20
|
+
if (cachedLocale) return cachedLocale;
|
|
21
|
+
const override = (process.env.REASONIX_LANG ?? "").toLowerCase();
|
|
22
|
+
if (override === "zh" || override === "en") {
|
|
23
|
+
cachedLocale = override;
|
|
24
|
+
return cachedLocale;
|
|
25
|
+
}
|
|
26
|
+
const env = process.env.LANG ?? process.env.LC_ALL ?? process.env.LC_MESSAGES ?? "";
|
|
27
|
+
if (/^zh[-_]/i.test(env)) {
|
|
28
|
+
cachedLocale = "zh";
|
|
29
|
+
return "zh";
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
const sys = new Intl.DateTimeFormat().resolvedOptions().locale ?? "";
|
|
33
|
+
if (/^zh[-_]/i.test(sys)) {
|
|
34
|
+
cachedLocale = "zh";
|
|
35
|
+
return "zh";
|
|
36
|
+
}
|
|
37
|
+
} catch {
|
|
38
|
+
}
|
|
39
|
+
cachedLocale = "en";
|
|
40
|
+
return "en";
|
|
41
|
+
}
|
|
42
|
+
function t(key, vars = {}) {
|
|
43
|
+
const loc = detectLocale();
|
|
44
|
+
const dict = loc === "zh" ? ZH : EN;
|
|
45
|
+
const tpl = dict[key] ?? EN[key];
|
|
46
|
+
return tpl.replace(/\{(\w+)\}/g, (_m, name) => {
|
|
47
|
+
const v = vars[name];
|
|
48
|
+
return v === void 0 ? `{${name}}` : String(v);
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
var EN = {
|
|
52
|
+
// ── preflight ─────────────────────────────────────────────────────
|
|
53
|
+
ollamaNotFound: "\u2717 `ollama` not found on PATH.\n Install from https://ollama.com (one-time, ~150 MB), then retry.\n",
|
|
54
|
+
daemonNotReachableHint: "\u2717 Ollama daemon not reachable. Run `ollama serve` and retry, or pass --yes to start it automatically.\n",
|
|
55
|
+
daemonStartConfirm: "Ollama daemon isn't running. Start `ollama serve` now?",
|
|
56
|
+
daemonAbortStart: "\u2717 aborted \u2014 start `ollama serve` yourself and retry.\n",
|
|
57
|
+
daemonStarting: "\u25B8 starting `ollama serve`\u2026\n",
|
|
58
|
+
daemonStartTimeout: "\u2717 daemon didn't come up within 15s. Try `ollama serve` in a separate terminal and retry.\n",
|
|
59
|
+
daemonReady: "\u2713 daemon up{pid}\n",
|
|
60
|
+
modelNotPulledHint: '\u2717 embedding model "{model}" not pulled. Run `ollama pull {model}` and retry, or pass --yes to pull it automatically.\n',
|
|
61
|
+
modelPullConfirm: `Embedding model "{model}" isn't pulled yet. Pull it now? (~274 MB for nomic-embed-text)`,
|
|
62
|
+
modelAbortPull: "\u2717 aborted \u2014 pull the model yourself and retry.\n",
|
|
63
|
+
modelPulling: "\u25B8 pulling {model}\u2026\n",
|
|
64
|
+
modelPullFailed: "\u2717 `ollama pull {model}` failed (exit {code}).\n",
|
|
65
|
+
modelPulled: "\u2713 {model} pulled\n",
|
|
66
|
+
// ── progress ─────────────────────────────────────────────────────
|
|
67
|
+
// The TTY-mode progress writer paints `<spinner> <status> <elapsed>s`
|
|
68
|
+
// every 120ms. The status itself comes from one of these keys based
|
|
69
|
+
// on the current phase. {files}, {done}, {total}, {pct} are
|
|
70
|
+
// substituted by the writer.
|
|
71
|
+
progressStarting: "starting\u2026",
|
|
72
|
+
progressScan: "scanning project \xB7 {files} files",
|
|
73
|
+
progressEmbed: "embedding {done}/{total} chunks \xB7 {pct}%",
|
|
74
|
+
progressEmbedHeartbeat: " {done}/{total}\n",
|
|
75
|
+
progressScanLine: "scanning files\u2026\n",
|
|
76
|
+
progressEmbedLine: "embedding {total} chunks across {files} files\u2026\n",
|
|
77
|
+
// Final result line after a successful build.
|
|
78
|
+
indexSuccess: "\u2713 indexed {scanned} files ({changed} changed, {added} new chunks, {removed} stale removed) in {seconds}s\n",
|
|
79
|
+
indexSuccessWithSkips: "\u2713 indexed {scanned} files ({changed} changed, {added} new chunks, {removed} stale removed, {skipped} skipped due to embed errors) in {seconds}s\n",
|
|
80
|
+
indexNothingToDo: " (nothing to do \u2014 re-run with --rebuild to force a full rebuild)\n",
|
|
81
|
+
indexFailed: "\u2717 index failed: {msg}\n",
|
|
82
|
+
// ── /semantic slash ──────────────────────────────────────────────
|
|
83
|
+
slashHeader: "semantic_search status",
|
|
84
|
+
slashEnabled: "\u2713 enabled \u2014 index built, tool registered.",
|
|
85
|
+
slashEnabledDetail: " index size: {chunks} chunks across {files} files",
|
|
86
|
+
slashEnabledHowto: " the model will call semantic_search automatically when it fits.",
|
|
87
|
+
slashIndexMissing: "\u2717 no index built yet for this project.",
|
|
88
|
+
slashHowToBuild: " to enable, exit Reasonix and run in your shell:\n reasonix index",
|
|
89
|
+
slashOllamaMissing: " prerequisite: install Ollama from https://ollama.com",
|
|
90
|
+
slashDaemonDown: " Ollama is installed but the daemon isn't running. start it with: ollama serve",
|
|
91
|
+
slashIndexInfo: " what semantic_search does: cross-language code understanding via local embeddings.\n better than grep when you describe WHAT something does, not WHICH token to find."
|
|
92
|
+
};
|
|
93
|
+
var ZH = {
|
|
94
|
+
ollamaNotFound: "\u2717 \u672A\u627E\u5230 `ollama`\u3002\n \u8BF7\u8BBF\u95EE https://ollama.com \u5B89\u88C5\uFF08\u4E00\u6B21\u6027\uFF0C\u7EA6 150 MB\uFF09\uFF0C\u7136\u540E\u91CD\u8BD5\u3002\n",
|
|
95
|
+
daemonNotReachableHint: "\u2717 Ollama \u5B88\u62A4\u8FDB\u7A0B\u672A\u542F\u52A8\u3002\u8BF7\u8FD0\u884C `ollama serve` \u540E\u91CD\u8BD5\uFF0C\u6216\u52A0 --yes \u8BA9\u6211\u81EA\u52A8\u542F\u52A8\u3002\n",
|
|
96
|
+
daemonStartConfirm: "Ollama \u5B88\u62A4\u8FDB\u7A0B\u672A\u8FD0\u884C\u3002\u73B0\u5728\u542F\u52A8 `ollama serve` \u5417\uFF1F",
|
|
97
|
+
daemonAbortStart: "\u2717 \u5DF2\u53D6\u6D88\u2014\u2014\u8BF7\u81EA\u884C\u8FD0\u884C `ollama serve` \u540E\u91CD\u8BD5\u3002\n",
|
|
98
|
+
daemonStarting: "\u25B8 \u6B63\u5728\u542F\u52A8 `ollama serve`\u2026\n",
|
|
99
|
+
daemonStartTimeout: "\u2717 15 \u79D2\u5185\u5B88\u62A4\u8FDB\u7A0B\u672A\u5C31\u7EEA\u3002\u8BF7\u5728\u53E6\u4E00\u4E2A\u7EC8\u7AEF\u8FD0\u884C `ollama serve` \u540E\u91CD\u8BD5\u3002\n",
|
|
100
|
+
daemonReady: "\u2713 \u5B88\u62A4\u8FDB\u7A0B\u5DF2\u542F\u52A8{pid}\n",
|
|
101
|
+
modelNotPulledHint: '\u2717 \u5D4C\u5165\u6A21\u578B "{model}" \u672A\u4E0B\u8F7D\u3002\u8BF7\u8FD0\u884C `ollama pull {model}` \u540E\u91CD\u8BD5\uFF0C\u6216\u52A0 --yes \u8BA9\u6211\u81EA\u52A8\u4E0B\u8F7D\u3002\n',
|
|
102
|
+
modelPullConfirm: '\u5D4C\u5165\u6A21\u578B "{model}" \u8FD8\u672A\u4E0B\u8F7D\u3002\u73B0\u5728\u4E0B\u8F7D\u5417\uFF1F\uFF08nomic-embed-text \u7EA6 274 MB\uFF09',
|
|
103
|
+
modelAbortPull: "\u2717 \u5DF2\u53D6\u6D88\u2014\u2014\u8BF7\u81EA\u884C\u4E0B\u8F7D\u6A21\u578B\u540E\u91CD\u8BD5\u3002\n",
|
|
104
|
+
modelPulling: "\u25B8 \u6B63\u5728\u4E0B\u8F7D {model}\u2026\n",
|
|
105
|
+
modelPullFailed: "\u2717 `ollama pull {model}` \u5931\u8D25\uFF08\u9000\u51FA\u7801 {code}\uFF09\u3002\n",
|
|
106
|
+
modelPulled: "\u2713 {model} \u4E0B\u8F7D\u5B8C\u6210\n",
|
|
107
|
+
progressStarting: "\u6B63\u5728\u542F\u52A8\u2026",
|
|
108
|
+
progressScan: "\u626B\u63CF\u9879\u76EE \xB7 \u5DF2\u626B\u63CF {files} \u4E2A\u6587\u4EF6",
|
|
109
|
+
progressEmbed: "\u6B63\u5728\u5411\u91CF\u5316 {done}/{total} \u4E2A\u7247\u6BB5 \xB7 {pct}%",
|
|
110
|
+
progressEmbedHeartbeat: " {done}/{total}\n",
|
|
111
|
+
progressScanLine: "\u6B63\u5728\u626B\u63CF\u6587\u4EF6\u2026\n",
|
|
112
|
+
progressEmbedLine: "\u6B63\u5728\u5411\u91CF\u5316 {total} \u4E2A\u7247\u6BB5\uFF08\u6D89\u53CA {files} \u4E2A\u6587\u4EF6\uFF09\u2026\n",
|
|
113
|
+
indexSuccess: "\u2713 \u5DF2\u5EFA\u7ACB\u7D22\u5F15\uFF1A\u626B\u63CF {scanned} \u4E2A\u6587\u4EF6\uFF08{changed} \u4E2A\u6709\u53D8\u5316\uFF0C\u65B0\u589E {added} \u4E2A\u7247\u6BB5\uFF0C\u79FB\u9664 {removed} \u4E2A\u8FC7\u671F\uFF09\uFF1B\u8017\u65F6 {seconds}s\n",
|
|
114
|
+
indexSuccessWithSkips: "\u2713 \u5DF2\u5EFA\u7ACB\u7D22\u5F15\uFF1A\u626B\u63CF {scanned} \u4E2A\u6587\u4EF6\uFF08{changed} \u4E2A\u6709\u53D8\u5316\uFF0C\u65B0\u589E {added} \u4E2A\u7247\u6BB5\uFF0C\u79FB\u9664 {removed} \u4E2A\u8FC7\u671F\uFF0C\u8DF3\u8FC7 {skipped} \u4E2A\u5D4C\u5165\u5931\u8D25\u7684\u7247\u6BB5\uFF09\uFF1B\u8017\u65F6 {seconds}s\n",
|
|
115
|
+
indexNothingToDo: " \uFF08\u6CA1\u6709\u53D8\u5316\u2014\u2014\u52A0 --rebuild \u5F3A\u5236\u91CD\u5EFA\uFF09\n",
|
|
116
|
+
indexFailed: "\u2717 \u5EFA\u7ACB\u7D22\u5F15\u5931\u8D25\uFF1A{msg}\n",
|
|
117
|
+
slashHeader: "semantic_search \u72B6\u6001",
|
|
118
|
+
slashEnabled: "\u2713 \u5DF2\u542F\u7528\u2014\u2014\u7D22\u5F15\u5DF2\u5EFA\u597D\uFF0C\u5DE5\u5177\u5DF2\u6CE8\u518C\u3002",
|
|
119
|
+
slashEnabledDetail: " \u7D22\u5F15\u89C4\u6A21\uFF1A{chunks} \u4E2A\u7247\u6BB5\uFF0C{files} \u4E2A\u6587\u4EF6",
|
|
120
|
+
slashEnabledHowto: " \u6A21\u578B\u5728\u5408\u9002\u7684\u65F6\u5019\u4F1A\u81EA\u52A8\u8C03\u7528 semantic_search\u3002",
|
|
121
|
+
slashIndexMissing: "\u2717 \u5F53\u524D\u9879\u76EE\u8FD8\u6CA1\u6709\u7D22\u5F15\u3002",
|
|
122
|
+
slashHowToBuild: " \u542F\u7528\u65B9\u5F0F\uFF1A\u9000\u51FA Reasonix\uFF0C\u5728\u7EC8\u7AEF\u8FD0\u884C\uFF1A\n reasonix index",
|
|
123
|
+
slashOllamaMissing: " \u524D\u7F6E\u4F9D\u8D56\uFF1A\u4ECE https://ollama.com \u5B89\u88C5 Ollama",
|
|
124
|
+
slashDaemonDown: " \u5DF2\u88C5 Ollama \u4F46\u5B88\u62A4\u8FDB\u7A0B\u672A\u542F\u52A8\uFF0C\u8BF7\u8FD0\u884C\uFF1Aollama serve",
|
|
125
|
+
slashIndexInfo: ' semantic_search \u7528\u672C\u5730 embedding \u505A\u8DE8\u8BED\u8A00\u4EE3\u7801\u7406\u89E3\u3002\n \u5F53\u4F60\u63CF\u8FF0"\u505A\u4EC0\u4E48"\u800C\u4E0D\u662F\u5177\u4F53 token \u65F6\uFF0C\u6BD4 grep \u66F4\u597D\u3002'
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
// src/index/semantic/preflight.ts
|
|
129
|
+
import { stdin, stdout } from "process";
|
|
130
|
+
import { createInterface } from "readline/promises";
|
|
131
|
+
async function ollamaPreflight(opts) {
|
|
132
|
+
const log = opts.log ?? ((line) => process.stderr.write(line));
|
|
133
|
+
const status = await checkOllamaStatus(opts.model, opts.baseUrl);
|
|
134
|
+
if (!status.binaryFound) {
|
|
135
|
+
log(t("ollamaNotFound"));
|
|
136
|
+
return false;
|
|
137
|
+
}
|
|
138
|
+
if (!status.daemonRunning) {
|
|
139
|
+
if (!opts.interactive && !opts.yesToAll) {
|
|
140
|
+
log(t("daemonNotReachableHint"));
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
const ok = opts.yesToAll || await confirm(t("daemonStartConfirm"), true);
|
|
144
|
+
if (!ok) {
|
|
145
|
+
log(t("daemonAbortStart"));
|
|
146
|
+
return false;
|
|
147
|
+
}
|
|
148
|
+
log(t("daemonStarting"));
|
|
149
|
+
const started = await startOllamaDaemon({ baseUrl: opts.baseUrl, timeoutMs: 15e3 });
|
|
150
|
+
if (!started.ready) {
|
|
151
|
+
log(t("daemonStartTimeout"));
|
|
152
|
+
return false;
|
|
153
|
+
}
|
|
154
|
+
log(t("daemonReady", { pid: started.pid ? ` (pid ${started.pid})` : "" }));
|
|
155
|
+
}
|
|
156
|
+
const after = status.daemonRunning ? status : await checkOllamaStatus(opts.model, opts.baseUrl);
|
|
157
|
+
if (!after.modelPulled) {
|
|
158
|
+
if (!opts.interactive && !opts.yesToAll) {
|
|
159
|
+
log(t("modelNotPulledHint", { model: opts.model }));
|
|
160
|
+
return false;
|
|
161
|
+
}
|
|
162
|
+
const ok = opts.yesToAll || await confirm(t("modelPullConfirm", { model: opts.model }), true);
|
|
163
|
+
if (!ok) {
|
|
164
|
+
log(t("modelAbortPull"));
|
|
165
|
+
return false;
|
|
166
|
+
}
|
|
167
|
+
log(t("modelPulling", { model: opts.model }));
|
|
168
|
+
const ESC = String.fromCharCode(27);
|
|
169
|
+
const ANSI_CSI = new RegExp(`${ESC}\\[[0-9;]*[A-Za-z]`, "g");
|
|
170
|
+
const code = await pullOllamaModel(opts.model, {
|
|
171
|
+
onLine: (line) => {
|
|
172
|
+
const cleaned = line.replace(ANSI_CSI, "").trim();
|
|
173
|
+
if (cleaned.length === 0) return;
|
|
174
|
+
log(` ${cleaned}
|
|
175
|
+
`);
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
if (code !== 0) {
|
|
179
|
+
log(t("modelPullFailed", { model: opts.model, code }));
|
|
180
|
+
return false;
|
|
181
|
+
}
|
|
182
|
+
log(t("modelPulled", { model: opts.model }));
|
|
183
|
+
}
|
|
184
|
+
return true;
|
|
185
|
+
}
|
|
186
|
+
async function semanticPreflight(config, opts) {
|
|
187
|
+
if (config.provider === "openai-compat") return true;
|
|
188
|
+
return await ollamaPreflight({
|
|
189
|
+
...opts,
|
|
190
|
+
model: config.model,
|
|
191
|
+
baseUrl: config.baseUrl
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
async function confirm(question, defaultYes) {
|
|
195
|
+
const suffix = defaultYes ? "[Y/n]" : "[y/N]";
|
|
196
|
+
const rl = createInterface({ input: stdin, output: stdout });
|
|
197
|
+
try {
|
|
198
|
+
const raw = (await rl.question(`${question} ${suffix} `)).trim().toLowerCase();
|
|
199
|
+
if (raw === "") return defaultYes;
|
|
200
|
+
return raw === "y" || raw === "yes";
|
|
201
|
+
} finally {
|
|
202
|
+
rl.close();
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// src/cli/commands/index.ts
|
|
207
|
+
async function indexCommand(opts = {}) {
|
|
208
|
+
const root = resolve(opts.dir ?? process.cwd());
|
|
209
|
+
const tty = process.stderr.isTTY === true && process.stdin.isTTY === true;
|
|
210
|
+
const resolved = resolveSemanticEmbeddingConfig();
|
|
211
|
+
const embedding = resolved.provider === "ollama" ? {
|
|
212
|
+
...resolved,
|
|
213
|
+
model: opts.model ?? resolved.model,
|
|
214
|
+
baseUrl: opts.ollamaUrl ?? resolved.baseUrl
|
|
215
|
+
} : {
|
|
216
|
+
...resolved,
|
|
217
|
+
model: opts.model ?? resolved.model
|
|
218
|
+
};
|
|
219
|
+
const preflightOk = await semanticPreflight(embedding, {
|
|
220
|
+
interactive: tty && !opts.yes,
|
|
221
|
+
yesToAll: opts.yes ?? false
|
|
222
|
+
});
|
|
223
|
+
if (!preflightOk) process.exit(1);
|
|
224
|
+
const writer = makeProgressWriter(tty);
|
|
225
|
+
const t0 = Date.now();
|
|
226
|
+
let result;
|
|
227
|
+
try {
|
|
228
|
+
result = await buildIndex(root, {
|
|
229
|
+
...embedding,
|
|
230
|
+
rebuild: opts.rebuild,
|
|
231
|
+
indexConfig: loadIndexConfig(),
|
|
232
|
+
onProgress: (p) => writer.update(p)
|
|
233
|
+
});
|
|
234
|
+
} catch (err) {
|
|
235
|
+
writer.clear();
|
|
236
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
237
|
+
process.stderr.write(t("indexFailed", { msg }));
|
|
238
|
+
process.exit(1);
|
|
239
|
+
}
|
|
240
|
+
writer.clear();
|
|
241
|
+
const seconds = ((Date.now() - t0) / 1e3).toFixed(1);
|
|
242
|
+
const successKey = result.chunksSkipped > 0 ? "indexSuccessWithSkips" : "indexSuccess";
|
|
243
|
+
process.stderr.write(
|
|
244
|
+
t(successKey, {
|
|
245
|
+
scanned: result.filesScanned,
|
|
246
|
+
changed: result.filesChanged,
|
|
247
|
+
added: result.chunksAdded,
|
|
248
|
+
removed: result.chunksRemoved,
|
|
249
|
+
skipped: result.chunksSkipped,
|
|
250
|
+
seconds
|
|
251
|
+
})
|
|
252
|
+
);
|
|
253
|
+
const breakdown = renderSkipBreakdown(result.skipBuckets);
|
|
254
|
+
if (breakdown) process.stderr.write(`${breakdown}
|
|
255
|
+
`);
|
|
256
|
+
if (result.filesChanged === 0 && !opts.rebuild) {
|
|
257
|
+
process.stderr.write(t("indexNothingToDo"));
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
function renderSkipBreakdown(buckets) {
|
|
261
|
+
const total = Object.values(buckets).reduce((a, b) => a + b, 0);
|
|
262
|
+
if (total === 0) return "";
|
|
263
|
+
const parts = [];
|
|
264
|
+
if (buckets.gitignore) parts.push(`gitignore: ${buckets.gitignore}`);
|
|
265
|
+
if (buckets.pattern) parts.push(`pattern: ${buckets.pattern}`);
|
|
266
|
+
if (buckets.defaultDir) parts.push(`defaultDir: ${buckets.defaultDir}`);
|
|
267
|
+
if (buckets.defaultFile) parts.push(`defaultFile: ${buckets.defaultFile}`);
|
|
268
|
+
if (buckets.binaryExt) parts.push(`binaryExt: ${buckets.binaryExt}`);
|
|
269
|
+
if (buckets.binaryContent) parts.push(`binaryContent: ${buckets.binaryContent}`);
|
|
270
|
+
if (buckets.tooLarge) parts.push(`tooLarge: ${buckets.tooLarge}`);
|
|
271
|
+
if (buckets.readError) parts.push(`readError: ${buckets.readError}`);
|
|
272
|
+
return ` \xB7 skipped ${total} files (${parts.join(", ")})`;
|
|
273
|
+
}
|
|
274
|
+
var SPINNER_FRAMES = ["\u280B", "\u2819", "\u2839", "\u2838", "\u283C", "\u2834", "\u2826", "\u2827", "\u2807", "\u280F"];
|
|
275
|
+
var SPINNER_INTERVAL_MS = 120;
|
|
276
|
+
function makeProgressWriter(tty) {
|
|
277
|
+
if (!tty) return makeNonTtyWriter();
|
|
278
|
+
return makeTtyWriter();
|
|
279
|
+
}
|
|
280
|
+
function makeNonTtyWriter() {
|
|
281
|
+
let lastPhase = null;
|
|
282
|
+
let lastChunks = 0;
|
|
283
|
+
return {
|
|
284
|
+
update(p) {
|
|
285
|
+
if (p.phase !== lastPhase) {
|
|
286
|
+
lastPhase = p.phase;
|
|
287
|
+
if (p.phase === "scan") {
|
|
288
|
+
process.stderr.write(t("progressScanLine"));
|
|
289
|
+
} else if (p.phase === "embed") {
|
|
290
|
+
process.stderr.write(
|
|
291
|
+
t("progressEmbedLine", {
|
|
292
|
+
total: p.chunksTotal ?? 0,
|
|
293
|
+
files: p.filesChanged ?? 0
|
|
294
|
+
})
|
|
295
|
+
);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
if (p.phase === "embed" && p.chunksDone !== void 0 && p.chunksDone - lastChunks >= 50) {
|
|
299
|
+
lastChunks = p.chunksDone;
|
|
300
|
+
process.stderr.write(
|
|
301
|
+
t("progressEmbedHeartbeat", {
|
|
302
|
+
done: p.chunksDone,
|
|
303
|
+
total: p.chunksTotal ?? "?"
|
|
304
|
+
})
|
|
305
|
+
);
|
|
306
|
+
}
|
|
307
|
+
},
|
|
308
|
+
clear() {
|
|
309
|
+
}
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
function makeTtyWriter() {
|
|
313
|
+
let status = t("progressStarting");
|
|
314
|
+
let lastLineLen = 0;
|
|
315
|
+
let frameIdx = 0;
|
|
316
|
+
const startTs = Date.now();
|
|
317
|
+
const repaint = () => {
|
|
318
|
+
const frame = SPINNER_FRAMES[frameIdx % SPINNER_FRAMES.length];
|
|
319
|
+
frameIdx++;
|
|
320
|
+
const elapsed = ((Date.now() - startTs) / 1e3).toFixed(1);
|
|
321
|
+
const line = `${frame} ${status} ${elapsed}s`;
|
|
322
|
+
const padded = line + " ".repeat(Math.max(0, lastLineLen - line.length));
|
|
323
|
+
process.stderr.write(`\r${padded}`);
|
|
324
|
+
lastLineLen = line.length;
|
|
325
|
+
};
|
|
326
|
+
repaint();
|
|
327
|
+
const interval = setInterval(repaint, SPINNER_INTERVAL_MS);
|
|
328
|
+
return {
|
|
329
|
+
update(p) {
|
|
330
|
+
if (p.phase === "scan") {
|
|
331
|
+
status = t("progressScan", { files: p.filesScanned ?? 0 });
|
|
332
|
+
} else if (p.phase === "embed") {
|
|
333
|
+
const done = p.chunksDone ?? 0;
|
|
334
|
+
const total = p.chunksTotal ?? 0;
|
|
335
|
+
const pct = total > 0 ? (done / total * 100).toFixed(0) : "0";
|
|
336
|
+
status = t("progressEmbed", { done, total, pct });
|
|
337
|
+
}
|
|
338
|
+
repaint();
|
|
339
|
+
},
|
|
340
|
+
clear() {
|
|
341
|
+
clearInterval(interval);
|
|
342
|
+
if (lastLineLen > 0) {
|
|
343
|
+
process.stderr.write(`\r${" ".repeat(lastLineLen)}\r`);
|
|
344
|
+
lastLineLen = 0;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
export {
|
|
350
|
+
indexCommand
|
|
351
|
+
};
|
|
352
|
+
//# sourceMappingURL=commands-JWT2MWVH.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/cli/commands/index.ts","../../src/index/semantic/i18n.ts","../../src/index/semantic/preflight.ts"],"sourcesContent":["/** `reasonix index` — progress writes go to stderr so stdout stays pipeable. */\n\nimport { resolve } from \"node:path\";\nimport { loadIndexConfig, resolveSemanticEmbeddingConfig } from \"../../config.js\";\nimport { buildIndex } from \"../../index/semantic/builder.js\";\nimport type { BuildProgress, BuildResult, SkipBuckets } from \"../../index/semantic/builder.js\";\nimport { t } from \"../../index/semantic/i18n.js\";\nimport { semanticPreflight } from \"../../index/semantic/preflight.js\";\n\nexport interface IndexCommandOptions {\n rebuild?: boolean;\n model?: string;\n dir?: string;\n ollamaUrl?: string;\n yes?: boolean;\n}\n\nexport async function indexCommand(opts: IndexCommandOptions = {}): Promise<void> {\n const root = resolve(opts.dir ?? process.cwd());\n const tty = process.stderr.isTTY === true && process.stdin.isTTY === true;\n const resolved = resolveSemanticEmbeddingConfig();\n const embedding =\n resolved.provider === \"ollama\"\n ? {\n ...resolved,\n model: opts.model ?? resolved.model,\n baseUrl: opts.ollamaUrl ?? resolved.baseUrl,\n }\n : {\n ...resolved,\n model: opts.model ?? resolved.model,\n };\n\n const preflightOk = await semanticPreflight(embedding, {\n interactive: tty && !opts.yes,\n yesToAll: opts.yes ?? false,\n });\n if (!preflightOk) process.exit(1);\n\n const writer = makeProgressWriter(tty);\n\n const t0 = Date.now();\n let result: BuildResult;\n try {\n result = await buildIndex(root, {\n ...embedding,\n rebuild: opts.rebuild,\n indexConfig: loadIndexConfig(),\n onProgress: (p) => writer.update(p),\n });\n } catch (err) {\n writer.clear();\n const msg = err instanceof Error ? err.message : String(err);\n process.stderr.write(t(\"indexFailed\", { msg }));\n process.exit(1);\n }\n writer.clear();\n\n const seconds = ((Date.now() - t0) / 1000).toFixed(1);\n const successKey = result.chunksSkipped > 0 ? \"indexSuccessWithSkips\" : \"indexSuccess\";\n process.stderr.write(\n t(successKey, {\n scanned: result.filesScanned,\n changed: result.filesChanged,\n added: result.chunksAdded,\n removed: result.chunksRemoved,\n skipped: result.chunksSkipped,\n seconds,\n }),\n );\n const breakdown = renderSkipBreakdown(result.skipBuckets);\n if (breakdown) process.stderr.write(`${breakdown}\\n`);\n if (result.filesChanged === 0 && !opts.rebuild) {\n process.stderr.write(t(\"indexNothingToDo\"));\n }\n}\n\nfunction renderSkipBreakdown(buckets: SkipBuckets): string {\n const total = Object.values(buckets).reduce((a, b) => a + b, 0);\n if (total === 0) return \"\";\n const parts: string[] = [];\n if (buckets.gitignore) parts.push(`gitignore: ${buckets.gitignore}`);\n if (buckets.pattern) parts.push(`pattern: ${buckets.pattern}`);\n if (buckets.defaultDir) parts.push(`defaultDir: ${buckets.defaultDir}`);\n if (buckets.defaultFile) parts.push(`defaultFile: ${buckets.defaultFile}`);\n if (buckets.binaryExt) parts.push(`binaryExt: ${buckets.binaryExt}`);\n if (buckets.binaryContent) parts.push(`binaryContent: ${buckets.binaryContent}`);\n if (buckets.tooLarge) parts.push(`tooLarge: ${buckets.tooLarge}`);\n if (buckets.readError) parts.push(`readError: ${buckets.readError}`);\n return ` · skipped ${total} files (${parts.join(\", \")})`;\n}\n\ninterface ProgressWriter {\n update(p: BuildProgress): void;\n clear(): void;\n}\n\nconst SPINNER_FRAMES = [\"⠋\", \"⠙\", \"⠹\", \"⠸\", \"⠼\", \"⠴\", \"⠦\", \"⠧\", \"⠇\", \"⠏\"];\nconst SPINNER_INTERVAL_MS = 120;\n\nfunction makeProgressWriter(tty: boolean): ProgressWriter {\n if (!tty) return makeNonTtyWriter();\n return makeTtyWriter();\n}\n\nfunction makeNonTtyWriter(): ProgressWriter {\n let lastPhase: BuildProgress[\"phase\"] | null = null;\n let lastChunks = 0;\n return {\n update(p) {\n if (p.phase !== lastPhase) {\n lastPhase = p.phase;\n if (p.phase === \"scan\") {\n process.stderr.write(t(\"progressScanLine\"));\n } else if (p.phase === \"embed\") {\n process.stderr.write(\n t(\"progressEmbedLine\", {\n total: p.chunksTotal ?? 0,\n files: p.filesChanged ?? 0,\n }),\n );\n }\n }\n if (p.phase === \"embed\" && p.chunksDone !== undefined && p.chunksDone - lastChunks >= 50) {\n lastChunks = p.chunksDone;\n process.stderr.write(\n t(\"progressEmbedHeartbeat\", {\n done: p.chunksDone,\n total: p.chunksTotal ?? \"?\",\n }),\n );\n }\n },\n clear() {\n /* non-TTY keeps its accumulated lines */\n },\n };\n}\n\nfunction makeTtyWriter(): ProgressWriter {\n let status = t(\"progressStarting\");\n let lastLineLen = 0;\n let frameIdx = 0;\n const startTs = Date.now();\n\n const repaint = () => {\n const frame = SPINNER_FRAMES[frameIdx % SPINNER_FRAMES.length];\n frameIdx++;\n const elapsed = ((Date.now() - startTs) / 1000).toFixed(1);\n const line = `${frame} ${status} ${elapsed}s`;\n const padded = line + \" \".repeat(Math.max(0, lastLineLen - line.length));\n process.stderr.write(`\\r${padded}`);\n lastLineLen = line.length;\n };\n\n repaint();\n const interval = setInterval(repaint, SPINNER_INTERVAL_MS);\n\n return {\n update(p) {\n if (p.phase === \"scan\") {\n status = t(\"progressScan\", { files: p.filesScanned ?? 0 });\n } else if (p.phase === \"embed\") {\n const done = p.chunksDone ?? 0;\n const total = p.chunksTotal ?? 0;\n const pct = total > 0 ? ((done / total) * 100).toFixed(0) : \"0\";\n status = t(\"progressEmbed\", { done, total, pct });\n }\n repaint();\n },\n clear() {\n clearInterval(interval);\n if (lastLineLen > 0) {\n process.stderr.write(`\\r${\" \".repeat(lastLineLen)}\\r`);\n lastLineLen = 0;\n }\n },\n };\n}\n","/** EN+ZH for semantic-search prompts only; tool descriptions stay English to preserve prompt-cache. */\n\nexport type Locale = \"en\" | \"zh\";\n\nlet cachedLocale: Locale | null = null;\n\nexport function detectLocale(): Locale {\n if (cachedLocale) return cachedLocale;\n const override = (process.env.REASONIX_LANG ?? \"\").toLowerCase();\n if (override === \"zh\" || override === \"en\") {\n cachedLocale = override;\n return cachedLocale;\n }\n const env = process.env.LANG ?? process.env.LC_ALL ?? process.env.LC_MESSAGES ?? \"\";\n if (/^zh[-_]/i.test(env)) {\n cachedLocale = \"zh\";\n return \"zh\";\n }\n try {\n const sys = new Intl.DateTimeFormat().resolvedOptions().locale ?? \"\";\n if (/^zh[-_]/i.test(sys)) {\n cachedLocale = \"zh\";\n return \"zh\";\n }\n } catch {\n /* ignore — fall through to default */\n }\n cachedLocale = \"en\";\n return \"en\";\n}\n\n/** Reset the cached locale. Tests use this; production never needs it. */\nexport function resetLocaleCache(): void {\n cachedLocale = null;\n}\n\n/** Falls back to English so partial dictionary updates never show \"[missing]\". */\nexport function t(key: keyof typeof EN, vars: Record<string, string | number> = {}): string {\n const loc = detectLocale();\n const dict = loc === \"zh\" ? ZH : EN;\n const tpl = dict[key] ?? EN[key];\n return tpl.replace(/\\{(\\w+)\\}/g, (_m, name) => {\n const v = vars[name];\n return v === undefined ? `{${name}}` : String(v);\n });\n}\n\nconst EN = {\n // ── preflight ─────────────────────────────────────────────────────\n ollamaNotFound:\n \"✗ `ollama` not found on PATH.\\n Install from https://ollama.com (one-time, ~150 MB), then retry.\\n\",\n daemonNotReachableHint:\n \"✗ Ollama daemon not reachable. Run `ollama serve` and retry, or pass --yes to start it automatically.\\n\",\n daemonStartConfirm: \"Ollama daemon isn't running. Start `ollama serve` now?\",\n daemonAbortStart: \"✗ aborted — start `ollama serve` yourself and retry.\\n\",\n daemonStarting: \"▸ starting `ollama serve`…\\n\",\n daemonStartTimeout:\n \"✗ daemon didn't come up within 15s. Try `ollama serve` in a separate terminal and retry.\\n\",\n daemonReady: \"✓ daemon up{pid}\\n\",\n modelNotPulledHint:\n '✗ embedding model \"{model}\" not pulled. Run `ollama pull {model}` and retry, or pass --yes to pull it automatically.\\n',\n modelPullConfirm:\n 'Embedding model \"{model}\" isn\\'t pulled yet. Pull it now? (~274 MB for nomic-embed-text)',\n modelAbortPull: \"✗ aborted — pull the model yourself and retry.\\n\",\n modelPulling: \"▸ pulling {model}…\\n\",\n modelPullFailed: \"✗ `ollama pull {model}` failed (exit {code}).\\n\",\n modelPulled: \"✓ {model} pulled\\n\",\n\n // ── progress ─────────────────────────────────────────────────────\n // The TTY-mode progress writer paints `<spinner> <status> <elapsed>s`\n // every 120ms. The status itself comes from one of these keys based\n // on the current phase. {files}, {done}, {total}, {pct} are\n // substituted by the writer.\n progressStarting: \"starting…\",\n progressScan: \"scanning project · {files} files\",\n progressEmbed: \"embedding {done}/{total} chunks · {pct}%\",\n progressEmbedHeartbeat: \" {done}/{total}\\n\",\n progressScanLine: \"scanning files…\\n\",\n progressEmbedLine: \"embedding {total} chunks across {files} files…\\n\",\n // Final result line after a successful build.\n indexSuccess:\n \"✓ indexed {scanned} files ({changed} changed, {added} new chunks, {removed} stale removed) in {seconds}s\\n\",\n indexSuccessWithSkips:\n \"✓ indexed {scanned} files ({changed} changed, {added} new chunks, {removed} stale removed, {skipped} skipped due to embed errors) in {seconds}s\\n\",\n indexNothingToDo: \" (nothing to do — re-run with --rebuild to force a full rebuild)\\n\",\n indexFailed: \"✗ index failed: {msg}\\n\",\n\n // ── /semantic slash ──────────────────────────────────────────────\n slashHeader: \"semantic_search status\",\n slashEnabled: \"✓ enabled — index built, tool registered.\",\n slashEnabledDetail: \" index size: {chunks} chunks across {files} files\",\n slashEnabledHowto: \" the model will call semantic_search automatically when it fits.\",\n slashIndexMissing: \"✗ no index built yet for this project.\",\n slashHowToBuild: \" to enable, exit Reasonix and run in your shell:\\n reasonix index\",\n slashOllamaMissing: \" prerequisite: install Ollama from https://ollama.com\",\n slashDaemonDown:\n \" Ollama is installed but the daemon isn't running. start it with: ollama serve\",\n slashIndexInfo:\n \" what semantic_search does: cross-language code understanding via local embeddings.\\n better than grep when you describe WHAT something does, not WHICH token to find.\",\n} as const;\n\nconst ZH: Partial<Record<keyof typeof EN, string>> = {\n ollamaNotFound:\n \"✗ 未找到 `ollama`。\\n 请访问 https://ollama.com 安装(一次性,约 150 MB),然后重试。\\n\",\n daemonNotReachableHint:\n \"✗ Ollama 守护进程未启动。请运行 `ollama serve` 后重试,或加 --yes 让我自动启动。\\n\",\n daemonStartConfirm: \"Ollama 守护进程未运行。现在启动 `ollama serve` 吗?\",\n daemonAbortStart: \"✗ 已取消——请自行运行 `ollama serve` 后重试。\\n\",\n daemonStarting: \"▸ 正在启动 `ollama serve`…\\n\",\n daemonStartTimeout: \"✗ 15 秒内守护进程未就绪。请在另一个终端运行 `ollama serve` 后重试。\\n\",\n daemonReady: \"✓ 守护进程已启动{pid}\\n\",\n modelNotPulledHint:\n '✗ 嵌入模型 \"{model}\" 未下载。请运行 `ollama pull {model}` 后重试,或加 --yes 让我自动下载。\\n',\n modelPullConfirm: '嵌入模型 \"{model}\" 还未下载。现在下载吗?(nomic-embed-text 约 274 MB)',\n modelAbortPull: \"✗ 已取消——请自行下载模型后重试。\\n\",\n modelPulling: \"▸ 正在下载 {model}…\\n\",\n modelPullFailed: \"✗ `ollama pull {model}` 失败(退出码 {code})。\\n\",\n modelPulled: \"✓ {model} 下载完成\\n\",\n\n progressStarting: \"正在启动…\",\n progressScan: \"扫描项目 · 已扫描 {files} 个文件\",\n progressEmbed: \"正在向量化 {done}/{total} 个片段 · {pct}%\",\n progressEmbedHeartbeat: \" {done}/{total}\\n\",\n progressScanLine: \"正在扫描文件…\\n\",\n progressEmbedLine: \"正在向量化 {total} 个片段(涉及 {files} 个文件)…\\n\",\n indexSuccess:\n \"✓ 已建立索引:扫描 {scanned} 个文件({changed} 个有变化,新增 {added} 个片段,移除 {removed} 个过期);耗时 {seconds}s\\n\",\n indexSuccessWithSkips:\n \"✓ 已建立索引:扫描 {scanned} 个文件({changed} 个有变化,新增 {added} 个片段,移除 {removed} 个过期,跳过 {skipped} 个嵌入失败的片段);耗时 {seconds}s\\n\",\n indexNothingToDo: \" (没有变化——加 --rebuild 强制重建)\\n\",\n indexFailed: \"✗ 建立索引失败:{msg}\\n\",\n\n slashHeader: \"semantic_search 状态\",\n slashEnabled: \"✓ 已启用——索引已建好,工具已注册。\",\n slashEnabledDetail: \" 索引规模:{chunks} 个片段,{files} 个文件\",\n slashEnabledHowto: \" 模型在合适的时候会自动调用 semantic_search。\",\n slashIndexMissing: \"✗ 当前项目还没有索引。\",\n slashHowToBuild: \" 启用方式:退出 Reasonix,在终端运行:\\n reasonix index\",\n slashOllamaMissing: \" 前置依赖:从 https://ollama.com 安装 Ollama\",\n slashDaemonDown: \" 已装 Ollama 但守护进程未启动,请运行:ollama serve\",\n slashIndexInfo:\n ' semantic_search 用本地 embedding 做跨语言代码理解。\\n 当你描述\"做什么\"而不是具体 token 时,比 grep 更好。',\n};\n","import { stdin, stdout } from \"node:process\";\nimport { createInterface } from \"node:readline/promises\";\nimport type { ResolvedEmbeddingConfig } from \"../../config.js\";\nimport { t } from \"./i18n.js\";\nimport { checkOllamaStatus, pullOllamaModel, startOllamaDaemon } from \"./ollama-launcher.js\";\n\nexport interface PreflightOptions {\n model: string;\n baseUrl?: string | undefined;\n interactive: boolean;\n yesToAll: boolean;\n log?: (line: string) => void;\n}\n\nexport async function ollamaPreflight(opts: PreflightOptions): Promise<boolean> {\n const log = opts.log ?? ((line: string) => process.stderr.write(line));\n const status = await checkOllamaStatus(opts.model, opts.baseUrl);\n\n if (!status.binaryFound) {\n log(t(\"ollamaNotFound\"));\n return false;\n }\n\n if (!status.daemonRunning) {\n if (!opts.interactive && !opts.yesToAll) {\n log(t(\"daemonNotReachableHint\"));\n return false;\n }\n const ok = opts.yesToAll || (await confirm(t(\"daemonStartConfirm\"), true));\n if (!ok) {\n log(t(\"daemonAbortStart\"));\n return false;\n }\n log(t(\"daemonStarting\"));\n const started = await startOllamaDaemon({ baseUrl: opts.baseUrl, timeoutMs: 15_000 });\n if (!started.ready) {\n log(t(\"daemonStartTimeout\"));\n return false;\n }\n log(t(\"daemonReady\", { pid: started.pid ? ` (pid ${started.pid})` : \"\" }));\n }\n\n const after = status.daemonRunning ? status : await checkOllamaStatus(opts.model, opts.baseUrl);\n\n if (!after.modelPulled) {\n if (!opts.interactive && !opts.yesToAll) {\n log(t(\"modelNotPulledHint\", { model: opts.model }));\n return false;\n }\n const ok = opts.yesToAll || (await confirm(t(\"modelPullConfirm\", { model: opts.model }), true));\n if (!ok) {\n log(t(\"modelAbortPull\"));\n return false;\n }\n log(t(\"modelPulling\", { model: opts.model }));\n const ESC = String.fromCharCode(0x1b);\n const ANSI_CSI = new RegExp(`${ESC}\\\\[[0-9;]*[A-Za-z]`, \"g\");\n const code = await pullOllamaModel(opts.model, {\n onLine: (line) => {\n const cleaned = line.replace(ANSI_CSI, \"\").trim();\n if (cleaned.length === 0) return;\n log(` ${cleaned}\\n`);\n },\n });\n if (code !== 0) {\n log(t(\"modelPullFailed\", { model: opts.model, code }));\n return false;\n }\n log(t(\"modelPulled\", { model: opts.model }));\n }\n\n return true;\n}\n\nexport async function semanticPreflight(\n config: ResolvedEmbeddingConfig,\n opts: Omit<PreflightOptions, \"model\" | \"baseUrl\">,\n): Promise<boolean> {\n if (config.provider === \"openai-compat\") return true;\n return await ollamaPreflight({\n ...opts,\n model: config.model,\n baseUrl: config.baseUrl,\n });\n}\n\nexport async function confirm(question: string, defaultYes: boolean): Promise<boolean> {\n const suffix = defaultYes ? \"[Y/n]\" : \"[y/N]\";\n const rl = createInterface({ input: stdin, output: stdout });\n try {\n const raw = (await rl.question(`${question} ${suffix} `)).trim().toLowerCase();\n if (raw === \"\") return defaultYes;\n return raw === \"y\" || raw === \"yes\";\n } finally {\n rl.close();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAEA,SAAS,eAAe;;;ACExB,IAAI,eAA8B;AAE3B,SAAS,eAAuB;AACrC,MAAI,aAAc,QAAO;AACzB,QAAM,YAAY,QAAQ,IAAI,iBAAiB,IAAI,YAAY;AAC/D,MAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C,mBAAe;AACf,WAAO;AAAA,EACT;AACA,QAAM,MAAM,QAAQ,IAAI,QAAQ,QAAQ,IAAI,UAAU,QAAQ,IAAI,eAAe;AACjF,MAAI,WAAW,KAAK,GAAG,GAAG;AACxB,mBAAe;AACf,WAAO;AAAA,EACT;AACA,MAAI;AACF,UAAM,MAAM,IAAI,KAAK,eAAe,EAAE,gBAAgB,EAAE,UAAU;AAClE,QAAI,WAAW,KAAK,GAAG,GAAG;AACxB,qBAAe;AACf,aAAO;AAAA,IACT;AAAA,EACF,QAAQ;AAAA,EAER;AACA,iBAAe;AACf,SAAO;AACT;AAQO,SAAS,EAAE,KAAsB,OAAwC,CAAC,GAAW;AAC1F,QAAM,MAAM,aAAa;AACzB,QAAM,OAAO,QAAQ,OAAO,KAAK;AACjC,QAAM,MAAM,KAAK,GAAG,KAAK,GAAG,GAAG;AAC/B,SAAO,IAAI,QAAQ,cAAc,CAAC,IAAI,SAAS;AAC7C,UAAM,IAAI,KAAK,IAAI;AACnB,WAAO,MAAM,SAAY,IAAI,IAAI,MAAM,OAAO,CAAC;AAAA,EACjD,CAAC;AACH;AAEA,IAAM,KAAK;AAAA;AAAA,EAET,gBACE;AAAA,EACF,wBACE;AAAA,EACF,oBAAoB;AAAA,EACpB,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,oBACE;AAAA,EACF,aAAa;AAAA,EACb,oBACE;AAAA,EACF,kBACE;AAAA,EACF,gBAAgB;AAAA,EAChB,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOb,kBAAkB;AAAA,EAClB,cAAc;AAAA,EACd,eAAe;AAAA,EACf,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAClB,mBAAmB;AAAA;AAAA,EAEnB,cACE;AAAA,EACF,uBACE;AAAA,EACF,kBAAkB;AAAA,EAClB,aAAa;AAAA;AAAA,EAGb,aAAa;AAAA,EACb,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,mBAAmB;AAAA,EACnB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,oBAAoB;AAAA,EACpB,iBACE;AAAA,EACF,gBACE;AACJ;AAEA,IAAM,KAA+C;AAAA,EACnD,gBACE;AAAA,EACF,wBACE;AAAA,EACF,oBAAoB;AAAA,EACpB,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,oBAAoB;AAAA,EACpB,aAAa;AAAA,EACb,oBACE;AAAA,EACF,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,aAAa;AAAA,EAEb,kBAAkB;AAAA,EAClB,cAAc;AAAA,EACd,eAAe;AAAA,EACf,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAClB,mBAAmB;AAAA,EACnB,cACE;AAAA,EACF,uBACE;AAAA,EACF,kBAAkB;AAAA,EAClB,aAAa;AAAA,EAEb,aAAa;AAAA,EACb,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,mBAAmB;AAAA,EACnB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,oBAAoB;AAAA,EACpB,iBAAiB;AAAA,EACjB,gBACE;AACJ;;;AC9IA,SAAS,OAAO,cAAc;AAC9B,SAAS,uBAAuB;AAahC,eAAsB,gBAAgB,MAA0C;AAC9E,QAAM,MAAM,KAAK,QAAQ,CAAC,SAAiB,QAAQ,OAAO,MAAM,IAAI;AACpE,QAAM,SAAS,MAAM,kBAAkB,KAAK,OAAO,KAAK,OAAO;AAE/D,MAAI,CAAC,OAAO,aAAa;AACvB,QAAI,EAAE,gBAAgB,CAAC;AACvB,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,OAAO,eAAe;AACzB,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,UAAU;AACvC,UAAI,EAAE,wBAAwB,CAAC;AAC/B,aAAO;AAAA,IACT;AACA,UAAM,KAAK,KAAK,YAAa,MAAM,QAAQ,EAAE,oBAAoB,GAAG,IAAI;AACxE,QAAI,CAAC,IAAI;AACP,UAAI,EAAE,kBAAkB,CAAC;AACzB,aAAO;AAAA,IACT;AACA,QAAI,EAAE,gBAAgB,CAAC;AACvB,UAAM,UAAU,MAAM,kBAAkB,EAAE,SAAS,KAAK,SAAS,WAAW,KAAO,CAAC;AACpF,QAAI,CAAC,QAAQ,OAAO;AAClB,UAAI,EAAE,oBAAoB,CAAC;AAC3B,aAAO;AAAA,IACT;AACA,QAAI,EAAE,eAAe,EAAE,KAAK,QAAQ,MAAM,SAAS,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AAAA,EAC3E;AAEA,QAAM,QAAQ,OAAO,gBAAgB,SAAS,MAAM,kBAAkB,KAAK,OAAO,KAAK,OAAO;AAE9F,MAAI,CAAC,MAAM,aAAa;AACtB,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,UAAU;AACvC,UAAI,EAAE,sBAAsB,EAAE,OAAO,KAAK,MAAM,CAAC,CAAC;AAClD,aAAO;AAAA,IACT;AACA,UAAM,KAAK,KAAK,YAAa,MAAM,QAAQ,EAAE,oBAAoB,EAAE,OAAO,KAAK,MAAM,CAAC,GAAG,IAAI;AAC7F,QAAI,CAAC,IAAI;AACP,UAAI,EAAE,gBAAgB,CAAC;AACvB,aAAO;AAAA,IACT;AACA,QAAI,EAAE,gBAAgB,EAAE,OAAO,KAAK,MAAM,CAAC,CAAC;AAC5C,UAAM,MAAM,OAAO,aAAa,EAAI;AACpC,UAAM,WAAW,IAAI,OAAO,GAAG,GAAG,sBAAsB,GAAG;AAC3D,UAAM,OAAO,MAAM,gBAAgB,KAAK,OAAO;AAAA,MAC7C,QAAQ,CAAC,SAAS;AAChB,cAAM,UAAU,KAAK,QAAQ,UAAU,EAAE,EAAE,KAAK;AAChD,YAAI,QAAQ,WAAW,EAAG;AAC1B,YAAI,KAAK,OAAO;AAAA,CAAI;AAAA,MACtB;AAAA,IACF,CAAC;AACD,QAAI,SAAS,GAAG;AACd,UAAI,EAAE,mBAAmB,EAAE,OAAO,KAAK,OAAO,KAAK,CAAC,CAAC;AACrD,aAAO;AAAA,IACT;AACA,QAAI,EAAE,eAAe,EAAE,OAAO,KAAK,MAAM,CAAC,CAAC;AAAA,EAC7C;AAEA,SAAO;AACT;AAEA,eAAsB,kBACpB,QACA,MACkB;AAClB,MAAI,OAAO,aAAa,gBAAiB,QAAO;AAChD,SAAO,MAAM,gBAAgB;AAAA,IAC3B,GAAG;AAAA,IACH,OAAO,OAAO;AAAA,IACd,SAAS,OAAO;AAAA,EAClB,CAAC;AACH;AAEA,eAAsB,QAAQ,UAAkB,YAAuC;AACrF,QAAM,SAAS,aAAa,UAAU;AACtC,QAAM,KAAK,gBAAgB,EAAE,OAAO,OAAO,QAAQ,OAAO,CAAC;AAC3D,MAAI;AACF,UAAM,OAAO,MAAM,GAAG,SAAS,GAAG,QAAQ,IAAI,MAAM,GAAG,GAAG,KAAK,EAAE,YAAY;AAC7E,QAAI,QAAQ,GAAI,QAAO;AACvB,WAAO,QAAQ,OAAO,QAAQ;AAAA,EAChC,UAAE;AACA,OAAG,MAAM;AAAA,EACX;AACF;;;AF/EA,eAAsB,aAAa,OAA4B,CAAC,GAAkB;AAChF,QAAM,OAAO,QAAQ,KAAK,OAAO,QAAQ,IAAI,CAAC;AAC9C,QAAM,MAAM,QAAQ,OAAO,UAAU,QAAQ,QAAQ,MAAM,UAAU;AACrE,QAAM,WAAW,+BAA+B;AAChD,QAAM,YACJ,SAAS,aAAa,WAClB;AAAA,IACE,GAAG;AAAA,IACH,OAAO,KAAK,SAAS,SAAS;AAAA,IAC9B,SAAS,KAAK,aAAa,SAAS;AAAA,EACtC,IACA;AAAA,IACE,GAAG;AAAA,IACH,OAAO,KAAK,SAAS,SAAS;AAAA,EAChC;AAEN,QAAM,cAAc,MAAM,kBAAkB,WAAW;AAAA,IACrD,aAAa,OAAO,CAAC,KAAK;AAAA,IAC1B,UAAU,KAAK,OAAO;AAAA,EACxB,CAAC;AACD,MAAI,CAAC,YAAa,SAAQ,KAAK,CAAC;AAEhC,QAAM,SAAS,mBAAmB,GAAG;AAErC,QAAM,KAAK,KAAK,IAAI;AACpB,MAAI;AACJ,MAAI;AACF,aAAS,MAAM,WAAW,MAAM;AAAA,MAC9B,GAAG;AAAA,MACH,SAAS,KAAK;AAAA,MACd,aAAa,gBAAgB;AAAA,MAC7B,YAAY,CAAC,MAAM,OAAO,OAAO,CAAC;AAAA,IACpC,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,WAAO,MAAM;AACb,UAAM,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC3D,YAAQ,OAAO,MAAM,EAAE,eAAe,EAAE,IAAI,CAAC,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AACA,SAAO,MAAM;AAEb,QAAM,YAAY,KAAK,IAAI,IAAI,MAAM,KAAM,QAAQ,CAAC;AACpD,QAAM,aAAa,OAAO,gBAAgB,IAAI,0BAA0B;AACxE,UAAQ,OAAO;AAAA,IACb,EAAE,YAAY;AAAA,MACZ,SAAS,OAAO;AAAA,MAChB,SAAS,OAAO;AAAA,MAChB,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,SAAS,OAAO;AAAA,MAChB;AAAA,IACF,CAAC;AAAA,EACH;AACA,QAAM,YAAY,oBAAoB,OAAO,WAAW;AACxD,MAAI,UAAW,SAAQ,OAAO,MAAM,GAAG,SAAS;AAAA,CAAI;AACpD,MAAI,OAAO,iBAAiB,KAAK,CAAC,KAAK,SAAS;AAC9C,YAAQ,OAAO,MAAM,EAAE,kBAAkB,CAAC;AAAA,EAC5C;AACF;AAEA,SAAS,oBAAoB,SAA8B;AACzD,QAAM,QAAQ,OAAO,OAAO,OAAO,EAAE,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAC9D,MAAI,UAAU,EAAG,QAAO;AACxB,QAAM,QAAkB,CAAC;AACzB,MAAI,QAAQ,UAAW,OAAM,KAAK,cAAc,QAAQ,SAAS,EAAE;AACnE,MAAI,QAAQ,QAAS,OAAM,KAAK,YAAY,QAAQ,OAAO,EAAE;AAC7D,MAAI,QAAQ,WAAY,OAAM,KAAK,eAAe,QAAQ,UAAU,EAAE;AACtE,MAAI,QAAQ,YAAa,OAAM,KAAK,gBAAgB,QAAQ,WAAW,EAAE;AACzE,MAAI,QAAQ,UAAW,OAAM,KAAK,cAAc,QAAQ,SAAS,EAAE;AACnE,MAAI,QAAQ,cAAe,OAAM,KAAK,kBAAkB,QAAQ,aAAa,EAAE;AAC/E,MAAI,QAAQ,SAAU,OAAM,KAAK,aAAa,QAAQ,QAAQ,EAAE;AAChE,MAAI,QAAQ,UAAW,OAAM,KAAK,cAAc,QAAQ,SAAS,EAAE;AACnE,SAAO,kBAAe,KAAK,WAAW,MAAM,KAAK,IAAI,CAAC;AACxD;AAOA,IAAM,iBAAiB,CAAC,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,QAAG;AACxE,IAAM,sBAAsB;AAE5B,SAAS,mBAAmB,KAA8B;AACxD,MAAI,CAAC,IAAK,QAAO,iBAAiB;AAClC,SAAO,cAAc;AACvB;AAEA,SAAS,mBAAmC;AAC1C,MAAI,YAA2C;AAC/C,MAAI,aAAa;AACjB,SAAO;AAAA,IACL,OAAO,GAAG;AACR,UAAI,EAAE,UAAU,WAAW;AACzB,oBAAY,EAAE;AACd,YAAI,EAAE,UAAU,QAAQ;AACtB,kBAAQ,OAAO,MAAM,EAAE,kBAAkB,CAAC;AAAA,QAC5C,WAAW,EAAE,UAAU,SAAS;AAC9B,kBAAQ,OAAO;AAAA,YACb,EAAE,qBAAqB;AAAA,cACrB,OAAO,EAAE,eAAe;AAAA,cACxB,OAAO,EAAE,gBAAgB;AAAA,YAC3B,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AACA,UAAI,EAAE,UAAU,WAAW,EAAE,eAAe,UAAa,EAAE,aAAa,cAAc,IAAI;AACxF,qBAAa,EAAE;AACf,gBAAQ,OAAO;AAAA,UACb,EAAE,0BAA0B;AAAA,YAC1B,MAAM,EAAE;AAAA,YACR,OAAO,EAAE,eAAe;AAAA,UAC1B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAEA,SAAS,gBAAgC;AACvC,MAAI,SAAS,EAAE,kBAAkB;AACjC,MAAI,cAAc;AAClB,MAAI,WAAW;AACf,QAAM,UAAU,KAAK,IAAI;AAEzB,QAAM,UAAU,MAAM;AACpB,UAAM,QAAQ,eAAe,WAAW,eAAe,MAAM;AAC7D;AACA,UAAM,YAAY,KAAK,IAAI,IAAI,WAAW,KAAM,QAAQ,CAAC;AACzD,UAAM,OAAO,GAAG,KAAK,IAAI,MAAM,KAAK,OAAO;AAC3C,UAAM,SAAS,OAAO,IAAI,OAAO,KAAK,IAAI,GAAG,cAAc,KAAK,MAAM,CAAC;AACvE,YAAQ,OAAO,MAAM,KAAK,MAAM,EAAE;AAClC,kBAAc,KAAK;AAAA,EACrB;AAEA,UAAQ;AACR,QAAM,WAAW,YAAY,SAAS,mBAAmB;AAEzD,SAAO;AAAA,IACL,OAAO,GAAG;AACR,UAAI,EAAE,UAAU,QAAQ;AACtB,iBAAS,EAAE,gBAAgB,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAAA,MAC3D,WAAW,EAAE,UAAU,SAAS;AAC9B,cAAM,OAAO,EAAE,cAAc;AAC7B,cAAM,QAAQ,EAAE,eAAe;AAC/B,cAAM,MAAM,QAAQ,KAAM,OAAO,QAAS,KAAK,QAAQ,CAAC,IAAI;AAC5D,iBAAS,EAAE,iBAAiB,EAAE,MAAM,OAAO,IAAI,CAAC;AAAA,MAClD;AACA,cAAQ;AAAA,IACV;AAAA,IACA,QAAQ;AACN,oBAAc,QAAQ;AACtB,UAAI,cAAc,GAAG;AACnB,gBAAQ,OAAO,MAAM,KAAK,IAAI,OAAO,WAAW,CAAC,IAAI;AACrD,sBAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
|