nemoris 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -184,7 +184,7 @@ Requires Node.js >= 22.
184
184
  git clone https://github.com/amzer24/nemoris.git
185
185
  cd nemoris
186
186
  npm install
187
- npm test # 1179 tests
187
+ npm test # 1403 tests
188
188
  npm run test:e2e # End-to-end tests
189
189
  ```
190
190
 
package/SECURITY.md CHANGED
@@ -38,6 +38,22 @@ We follow responsible disclosure practices. If you report a vulnerability:
38
38
  - We will credit you in the release notes (unless you prefer anonymity)
39
39
  - We will coordinate disclosure timing with you
40
40
 
41
+ ## Deployment Boundaries
42
+
43
+ Nemoris is a single operator, single-user runtime designed to run on your own machine.
44
+
45
+ It is not designed to be:
46
+
47
+ - a hardened multi-tenant sandbox
48
+ - a public-facing web service
49
+ - a shared server runtime
50
+
51
+ For recovery procedures, see [docs/RECOVERY-FLOWS.md](docs/RECOVERY-FLOWS.md).
52
+
53
+ ## Vulnerability Tracking
54
+
55
+ Known vulnerabilities are tracked via GitHub Security Advisories on this repository.
56
+
41
57
  ## Dependencies
42
58
 
43
59
  Nemoris keeps dependencies minimal by design. We monitor for known vulnerabilities via `npm audit` and update promptly.
@@ -13,7 +13,7 @@ adapter = "openclaw_cli"
13
13
  enabled = true
14
14
  channel = "telegram"
15
15
  account_id = "default"
16
- chat_id = "7781763328"
16
+ chat_id = "YOUR_CHAT_ID"
17
17
  silent = true
18
18
  dry_run = true
19
19
 
@@ -22,7 +22,7 @@ adapter = "openclaw_cli"
22
22
  enabled = false
23
23
  channel = "telegram"
24
24
  account_id = "default"
25
- chat_id = "7781763328"
25
+ chat_id = "YOUR_CHAT_ID"
26
26
  silent = true
27
27
  dry_run = false
28
28
 
@@ -43,7 +43,7 @@ adapter = "telegram"
43
43
  enabled = false
44
44
  channel = "telegram"
45
45
  account_id = "default"
46
- chat_id = "7781763328"
46
+ chat_id = "YOUR_CHAT_ID"
47
47
  bot_token_env = "NEMORIS_TELEGRAM_BOT_TOKEN"
48
48
 
49
49
  [profiles.standalone_operator]
@@ -102,8 +102,8 @@ allowed_failure_classes = ["timeout", "provider_loading"]
102
102
  bot_token_env = "NEMORIS_TELEGRAM_BOT_TOKEN"
103
103
  polling_mode = true
104
104
  webhook_url = ""
105
- operator_chat_id = "7781763328"
106
- authorized_chat_ids = ["7781763328"]
105
+ operator_chat_id = "YOUR_CHAT_ID"
106
+ authorized_chat_ids = ["YOUR_CHAT_ID"]
107
107
  default_agent = "main"
108
108
 
109
109
  [slack]
@@ -63,7 +63,7 @@ Check if daemon is running:
63
63
  ```
64
64
  Daemon is running. Send any message to @kodi_nemoris_bot on Telegram...
65
65
  Waiting for your message...
66
- ✓ Found you chat_id: 7781763328, @leeUsername
66
+ ✓ Found you chat_id: YOUR_CHAT_ID, @leeUsername
67
67
  ```
68
68
 
69
69
  - Poll the SQLite state store (`state/active.db`) for the first interactive job row where `source = 'telegram'`
@@ -77,7 +77,7 @@ This avoids any conflict with the daemon's getUpdates polling connection. No `de
77
77
 
78
78
  ```
79
79
  Send any message to @kodi_nemoris_bot on Telegram, then press Enter...
80
- ✓ Found you chat_id: 7781763328, @leeUsername
80
+ ✓ Found you chat_id: YOUR_CHAT_ID, @leeUsername
81
81
  ```
82
82
 
83
83
  - Reuses existing `whoami(botToken)` function (deleteWebhook → getUpdates → extract chat)
@@ -110,8 +110,8 @@ Writes the `[telegram]` section to `config/runtime.toml`:
110
110
  bot_token_env = "NEMORIS_TELEGRAM_BOT_TOKEN"
111
111
  polling_mode = true
112
112
  webhook_url = ""
113
- operator_chat_id = "7781763328"
114
- authorized_chat_ids = ["7781763328"]
113
+ operator_chat_id = "YOUR_CHAT_ID"
114
+ authorized_chat_ids = ["YOUR_CHAT_ID"]
115
115
  default_agent = "kodi"
116
116
  ```
117
117
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nemoris",
3
- "version": "0.1.1",
3
+ "version": "0.1.3",
4
4
  "type": "module",
5
5
  "description": "Personal AI agent runtime — persistent memory, delivery guarantees, task contracts, self-healing. Local-first, no cloud.",
6
6
  "license": "MIT",
@@ -6,6 +6,7 @@
6
6
  */
7
7
 
8
8
  import fs from "node:fs";
9
+ import path from "node:path";
9
10
  import os from "node:os";
10
11
  import { execFile } from "node:child_process";
11
12
  import { promisify } from "node:util";
@@ -528,3 +529,80 @@ export async function runDoctor(
528
529
 
529
530
  return results;
530
531
  }
532
+
533
+ export function quickValidateConfig(installDir) {
534
+ const errors = [];
535
+ const suggestions = [];
536
+ const configDir = path.join(installDir, "config");
537
+
538
+ if (!fs.existsSync(path.join(configDir, "runtime.toml"))) {
539
+ errors.push("Missing config/runtime.toml");
540
+ suggestions.push("Run `nemoris setup` to regenerate config files.");
541
+ }
542
+
543
+ if (!fs.existsSync(path.join(configDir, "router.toml"))) {
544
+ errors.push("Missing config/router.toml");
545
+ suggestions.push("Run `nemoris setup` to regenerate the router.");
546
+ }
547
+
548
+ const providersDir = path.join(configDir, "providers");
549
+ if (!fs.existsSync(providersDir)) {
550
+ errors.push("No provider configs found");
551
+ suggestions.push("Run `nemoris setup` to configure a provider.");
552
+ } else {
553
+ const files = fs.readdirSync(providersDir).filter((f) => f.endsWith(".toml"));
554
+ if (files.length === 0) {
555
+ errors.push("No provider configs found in config/providers/");
556
+ suggestions.push("Run `nemoris setup` to configure a provider.");
557
+ }
558
+ for (const file of files) {
559
+ try {
560
+ const content = fs.readFileSync(path.join(providersDir, file), "utf8");
561
+ const authRefMatch = content.match(/auth_ref\s*=\s*"([^"]+)"/);
562
+ if (authRefMatch) {
563
+ const authRef = authRefMatch[1];
564
+ if (!authRef.startsWith("env:") && !authRef.startsWith("profile:")) {
565
+ errors.push(`Invalid auth_ref in ${file}: "${authRef}" (must start with env: or profile:)`);
566
+ suggestions.push(`Fix auth_ref in config/providers/${file}`);
567
+ } else if (authRef.startsWith("env:")) {
568
+ const envName = authRef.slice(4);
569
+ if (!process.env[envName]) {
570
+ errors.push(`Missing env var ${envName} referenced by ${file}`);
571
+ suggestions.push(`Set ${envName} in your .env file or environment.`);
572
+ }
573
+ }
574
+ }
575
+ } catch {
576
+ errors.push(`Cannot read ${file}`);
577
+ }
578
+ }
579
+ }
580
+
581
+ if (!fs.existsSync(path.join(configDir, "identity"))) {
582
+ errors.push("Missing config/identity/ directory");
583
+ suggestions.push("Run `nemoris setup` to create identity files.");
584
+ }
585
+
586
+ return { valid: errors.length === 0, errors, suggestions };
587
+ }
588
+
589
+ export function suggestRepairs(results) {
590
+ const repairs = [];
591
+ for (const error of results.errors) {
592
+ if (error.includes("No provider configs")) {
593
+ repairs.push({ error, fix: "nemoris setup", description: "Run setup to configure a provider" });
594
+ } else if (error.includes("Missing config/runtime.toml")) {
595
+ repairs.push({ error, fix: "nemoris setup", description: "Run setup to regenerate config files" });
596
+ } else if (error.includes("Missing config/router.toml")) {
597
+ repairs.push({ error, fix: "nemoris setup", description: "Run setup to regenerate the router" });
598
+ } else if (error.includes("Invalid auth_ref")) {
599
+ repairs.push({ error, fix: "Edit the provider TOML file", description: "Fix auth_ref to use env: or profile: prefix" });
600
+ } else if (error.includes("Missing env var")) {
601
+ const envVar = error.match(/Missing env var (\S+)/)?.[1] || "the referenced variable";
602
+ repairs.push({ error, fix: `Set ${envVar} in .env`, description: `Add ${envVar}=your_key to your .env file` });
603
+ } else if (error.includes("Missing config/identity")) {
604
+ repairs.push({ error, fix: "nemoris setup", description: "Run setup to create identity files" });
605
+ }
606
+ }
607
+ return repairs;
608
+ }
@@ -32,6 +32,19 @@ export function readLock(installDir) {
32
32
  }
33
33
  }
34
34
 
35
+ export function writeWizardMetadata(installDir, metadata) {
36
+ const lock = readLock(installDir) || {};
37
+ const updated = {
38
+ ...lock,
39
+ lastRunAt: metadata.lastRunAt || new Date().toISOString(),
40
+ lastRunVersion: metadata.lastRunVersion || "",
41
+ lastRunCommand: metadata.lastRunCommand || "setup",
42
+ lastRunMode: metadata.lastRunMode || "quickstart",
43
+ lastRunFlow: metadata.lastRunFlow || "interactive",
44
+ };
45
+ writeLock(installDir, updated);
46
+ }
47
+
35
48
  export function deleteLock(installDir) {
36
49
  const p = lockPath(installDir);
37
50
  try {
@@ -0,0 +1,83 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+
4
+ /**
5
+ * Writes the model catalog to state/models.json.
6
+ * @param {string} installDir
7
+ * @param {string} agentId
8
+ * @param {Array<object>} models
9
+ */
10
+ export function writeModelsCatalog(installDir, agentId, models) {
11
+ const stateDir = path.join(installDir, "state");
12
+ fs.mkdirSync(stateDir, { recursive: true, mode: 0o700 });
13
+ const catalogPath = path.join(stateDir, "models.json");
14
+ const catalog = {
15
+ version: 1,
16
+ agentId,
17
+ updatedAt: new Date().toISOString(),
18
+ models: models.map((m) => ({
19
+ id: String(m.id || ""),
20
+ provider: String(m.provider || ""),
21
+ displayName: String(m.displayName || m.name || m.id || ""),
22
+ contextWindow: typeof m.contextWindow === "number" && m.contextWindow > 0 ? m.contextWindow : undefined,
23
+ reasoning: typeof m.reasoning === "boolean" ? m.reasoning : undefined,
24
+ inputTypes: Array.isArray(m.inputTypes) ? m.inputTypes : undefined,
25
+ costTier: m.costTier || undefined,
26
+ role: m.role || undefined,
27
+ })),
28
+ };
29
+ const content = JSON.stringify(catalog, null, 2) + "\n";
30
+ const tempPath = `${catalogPath}.${process.pid}.${Date.now()}.tmp`;
31
+ fs.writeFileSync(tempPath, content, { encoding: "utf8", mode: 0o600 });
32
+ fs.renameSync(tempPath, catalogPath);
33
+ }
34
+
35
+ /**
36
+ * Reads the model catalog from state/models.json.
37
+ * @param {string} installDir
38
+ * @returns {{ version: number, agentId: string, updatedAt: string, models: Array<object> } | null}
39
+ */
40
+ export function readModelsCatalog(installDir) {
41
+ const catalogPath = path.join(installDir, "state", "models.json");
42
+ try {
43
+ const raw = fs.readFileSync(catalogPath, "utf8");
44
+ const parsed = JSON.parse(raw);
45
+ if (!parsed || !Array.isArray(parsed.models)) return null;
46
+ return parsed;
47
+ } catch {
48
+ return null;
49
+ }
50
+ }
51
+
52
+ /**
53
+ * Merges curated preset metadata with selected model IDs.
54
+ * @param {Array<object>} presets - curated presets with { id, label, description }
55
+ * @param {Array<object>} fetchedModels - fetched models with { id, contextWindow?, name? }
56
+ * @param {Array<string>} selectedIds - user-selected model IDs
57
+ * @param {string} provider - provider name
58
+ * @returns {Array<object>} enriched model entries
59
+ */
60
+ export function mergeModelMetadata(presets, fetchedModels, selectedIds, provider) {
61
+ const presetMap = new Map(presets.map((p) => [p.id, p]));
62
+ const fetchedMap = new Map(fetchedModels.map((m) => [m.id, m]));
63
+ const MODEL_ROLE_ORDER = ["cheap_interactive", "fallback", "manual_bump"];
64
+
65
+ return selectedIds.map((id, index) => {
66
+ const preset = presetMap.get(id);
67
+ const fetched = fetchedMap.get(id);
68
+ const displayId = id
69
+ .replace(/^openrouter\//, "")
70
+ .replace(/^openai-codex\//, "")
71
+ .replace(/^anthropic\//, "");
72
+
73
+ return {
74
+ id,
75
+ provider,
76
+ displayName: preset?.label || fetched?.name || displayId,
77
+ contextWindow: fetched?.contextWindow || undefined,
78
+ reasoning: fetched?.reasoning || undefined,
79
+ inputTypes: fetched?.inputTypes || undefined,
80
+ role: MODEL_ROLE_ORDER[index] || MODEL_ROLE_ORDER.at(-1),
81
+ };
82
+ });
83
+ }
@@ -169,20 +169,31 @@ export function detectProviderOptions({ env = process.env, ollamaResult = { ok:
169
169
  {
170
170
  value: "anthropic",
171
171
  label: "Anthropic",
172
- hint: anthropic.value ? "API key found in env" : "API key required",
172
+ hint: anthropic.value ? "API key found in env" : "Setup token or API key",
173
173
  detected: Boolean(anthropic.value),
174
+ authMethods: [
175
+ { value: "token", label: "Setup Token", hint: "From `claude setup-token`" },
176
+ { value: "api_key", label: "API Key", hint: "From console.anthropic.com" },
177
+ ],
174
178
  },
175
179
  {
176
180
  value: "openai",
177
181
  label: "OpenAI",
178
182
  hint: openai.value ? "API key found in env" : "API key or ChatGPT OAuth",
179
183
  detected: Boolean(openai.value),
184
+ authMethods: [
185
+ { value: "api_key", label: "API Key", hint: "From platform.openai.com" },
186
+ { value: "oauth", label: "ChatGPT OAuth", hint: "Browser login, refreshable token" },
187
+ ],
180
188
  },
181
189
  {
182
190
  value: "openrouter",
183
191
  label: "OpenRouter",
184
192
  hint: openrouter.value ? "API key found" : "One key, 100+ models — openrouter.ai",
185
193
  detected: Boolean(openrouter.value),
194
+ authMethods: [
195
+ { value: "api_key", label: "API Key", hint: "From openrouter.ai/keys" },
196
+ ],
186
197
  },
187
198
  {
188
199
  value: "ollama",
@@ -191,12 +202,16 @@ export function detectProviderOptions({ env = process.env, ollamaResult = { ok:
191
202
  ? `Local, free — ${ollamaResult.models?.length || 0} models installed`
192
203
  : "Local, free — not detected",
193
204
  detected: Boolean(ollamaResult.ok),
205
+ authMethods: [
206
+ { value: "local", label: "Local", hint: "No auth needed" },
207
+ ],
194
208
  },
195
209
  {
196
210
  value: "skip",
197
211
  label: "Skip for now",
198
212
  hint: "Finish setup without provider auth.",
199
213
  detected: false,
214
+ authMethods: [],
200
215
  },
201
216
  ];
202
217
  }
@@ -24,7 +24,7 @@ import {
24
24
  resolveOpenAICodexAccess,
25
25
  } from "../../auth/openai-codex-oauth.js";
26
26
  import { getAuthProfile, resolveAuthProfilesPath } from "../../auth/auth-profiles.js";
27
- import { buildModelSelectionOptions, fetchOpenAIModels } from "../model-catalog.js";
27
+ import { writeModelsCatalog, mergeModelMetadata } from "../model-catalog-writer.js";
28
28
 
29
29
  const PROVIDER_CONFIGS = {
30
30
  anthropic: {
@@ -178,26 +178,43 @@ function providerDisplayName(provider) {
178
178
  return provider;
179
179
  }
180
180
 
181
- async function fetchProviderModelIds(provider, key, { fetchImpl = globalThis.fetch } = {}) {
182
- if (provider === "anthropic") {
183
- return PROVIDER_MODEL_PRESETS.anthropic.map((item) => item.id);
184
- }
181
+ const OPENAI_CONTEXT_WINDOWS = {
182
+ "gpt-4.1": 1000000,
183
+ "gpt-4o": 128000,
184
+ "gpt-4o-mini": 128000,
185
+ "o4-mini": 200000,
186
+ "o3": 200000,
187
+ "o3-mini": 200000,
188
+ "o1": 200000,
189
+ "o1-mini": 128000,
190
+ "gpt-4-turbo": 128000,
191
+ "gpt-3.5-turbo": 16385,
192
+ };
185
193
 
186
- const target = provider === "openrouter"
187
- ? {
188
- url: "https://openrouter.ai/api/v1/models",
189
- headers: { authorization: `Bearer ${key}` },
190
- }
191
- : provider === "openai"
192
- ? {
193
- url: "https://api.openai.com/v1/models",
194
- headers: { authorization: `Bearer ${key}` },
195
- }
196
- : null;
194
+ function formatContextWindow(tokens) {
195
+ if (!tokens || tokens <= 0) return "";
196
+ if (tokens >= 1000000) return `ctx ${(tokens / 1000000).toFixed(0)}M`;
197
+ return `ctx ${Math.round(tokens / 1000).toFixed(0)}k`;
198
+ }
197
199
 
198
- if (!target) {
199
- return [];
200
- }
200
+ async function fetchProviderModels(provider, key, { fetchImpl = globalThis.fetch } = {}) {
201
+ const targets = {
202
+ anthropic: {
203
+ url: "https://api.anthropic.com/v1/models",
204
+ headers: { "x-api-key": key, "anthropic-version": "2023-06-01" },
205
+ },
206
+ openrouter: {
207
+ url: "https://openrouter.ai/api/v1/models",
208
+ headers: { authorization: `Bearer ${key}` },
209
+ },
210
+ openai: {
211
+ url: "https://api.openai.com/v1/models",
212
+ headers: { authorization: `Bearer ${key}` },
213
+ },
214
+ };
215
+
216
+ const target = targets[provider];
217
+ if (!target) return [];
201
218
 
202
219
  try {
203
220
  const response = await fetchImpl(target.url, {
@@ -207,56 +224,91 @@ async function fetchProviderModelIds(provider, key, { fetchImpl = globalThis.fet
207
224
  });
208
225
  const data = await response.json();
209
226
  if (!response.ok) {
210
- return [];
227
+ return (PROVIDER_MODEL_PRESETS[provider] || []).map((item) => ({
228
+ id: item.id,
229
+ name: item.label,
230
+ }));
211
231
  }
212
- const ids = Array.isArray(data?.data)
213
- ? data.data.map((item) => item?.id).filter(Boolean)
214
- : [];
215
- if (provider === "openrouter") {
216
- return ids.map((id) => ensureProviderModelPrefix(provider, id));
217
- }
218
- if (provider === "openai") {
219
- return ids.map((id) => ensureProviderModelPrefix(provider, id));
220
- }
221
- return ids;
232
+ const items = Array.isArray(data?.data) ? data.data.filter(Boolean) : [];
233
+ return items.map((item) => {
234
+ const rawId = item.id;
235
+ const prefixedId = ensureProviderModelPrefix(provider, rawId);
236
+ let contextWindow;
237
+ let name;
238
+ let reasoning;
239
+
240
+ if (provider === "anthropic") {
241
+ name = item.display_name || undefined;
242
+ contextWindow = item.context_window || undefined;
243
+ } else if (provider === "openrouter") {
244
+ name = item.name || undefined;
245
+ contextWindow = item.context_length || undefined;
246
+ } else if (provider === "openai") {
247
+ name = undefined;
248
+ contextWindow = OPENAI_CONTEXT_WINDOWS[rawId] || undefined;
249
+ }
250
+
251
+ return {
252
+ id: prefixedId,
253
+ contextWindow,
254
+ name,
255
+ reasoning,
256
+ };
257
+ });
222
258
  } catch {
223
- return [];
259
+ return (PROVIDER_MODEL_PRESETS[provider] || []).map((item) => ({
260
+ id: item.id,
261
+ name: item.label,
262
+ }));
224
263
  }
225
264
  }
226
265
 
227
266
  async function buildProviderSelectionOptions(provider, key, { fetchImpl = globalThis.fetch } = {}) {
228
- if (provider === "openai") {
229
- const discoveredModels = await fetchOpenAIModels(key, { fetchImpl });
230
- return buildModelSelectionOptions({
231
- provider,
232
- discoveredModels,
233
- includeKeep: false,
234
- includeManual: true,
235
- }).map((entry) => ({
236
- value: entry.value,
237
- label: entry.label,
238
- description: entry.hint,
239
- }));
240
- }
241
-
242
267
  const curated = PROVIDER_MODEL_PRESETS[provider] || [];
243
- const available = new Set(await fetchProviderModelIds(provider, key, { fetchImpl }));
244
- const selectable = curated.filter((item) => available.size === 0 || available.has(item.id));
245
- const options = selectable.length > 0 ? selectable : curated;
246
- return options.map((item) => ({
247
- value: item.id,
248
- label: item.label,
249
- description: item.description,
250
- }));
268
+ const fetchedModels = await fetchProviderModels(provider, key, { fetchImpl });
269
+ const fetchedIds = new Set(fetchedModels.map((m) => m.id));
270
+ const fetchedMap = new Map(fetchedModels.map((m) => [m.id, m]));
271
+
272
+ // Curated models that exist in the fetched list (fall back to all curated if fetch failed)
273
+ const curatedAvailable = fetchedModels.length > 0
274
+ ? curated.filter((item) => fetchedIds.has(item.id))
275
+ : curated;
276
+ const curatedIds = new Set(curatedAvailable.map((item) => item.id));
277
+
278
+ // Remaining fetched models not already shown as curated
279
+ const extra = fetchedModels
280
+ .filter((m) => !curatedIds.has(m.id))
281
+ .map((m) => {
282
+ const displayId = m.id
283
+ .replace(/^openrouter\//, "")
284
+ .replace(/^openai-codex\//, "")
285
+ .replace(/^anthropic\//, "");
286
+ const ctxStr = formatContextWindow(m.contextWindow);
287
+ const desc = ctxStr ? `available from provider \u00b7 ${ctxStr}` : "available from provider";
288
+ return { value: m.id, label: displayId, description: desc };
289
+ });
290
+
291
+ const options = [
292
+ ...curatedAvailable.map((item) => {
293
+ const fetched = fetchedMap.get(item.id);
294
+ const ctxStr = fetched ? formatContextWindow(fetched.contextWindow) : "";
295
+ const desc = ctxStr ? `${item.description} \u00b7 ${ctxStr}` : item.description;
296
+ return { value: item.id, label: item.label, description: desc };
297
+ }),
298
+ ...extra,
299
+ { value: "__custom__", label: "Enter a different model name...", description: "Use a specific model id not shown in the list." },
300
+ ];
301
+
302
+ return { options, fetchedModels };
251
303
  }
252
304
 
253
305
  async function promptForProviderModels(provider, key, tui, { fetchImpl = globalThis.fetch } = {}) {
254
306
  const { select, prompt, dim, cyan } = tui;
255
- if (!select) return [];
307
+ if (!select) return { chosen: [], fetchedModels: [] };
256
308
 
257
- const options = await buildProviderSelectionOptions(provider, key, { fetchImpl });
309
+ const { options, fetchedModels } = await buildProviderSelectionOptions(provider, key, { fetchImpl });
258
310
  const chosen = [];
259
- const manualOptionValue = provider === "openai" ? "__manual__" : "__custom__";
311
+ const manualOptionValue = "__custom__";
260
312
  const defaultModelValue = options.find((item) => !String(item.value).startsWith("__"))?.value || "";
261
313
 
262
314
  console.log(`\n ${cyan(`Choose ${provider === "openrouter" ? "OpenRouter" : provider === "openai" ? "OpenAI" : "Anthropic"} models`)}`);
@@ -271,19 +323,14 @@ async function promptForProviderModels(provider, key, tui, { fetchImpl = globalT
271
323
  }));
272
324
 
273
325
  if (chosen.length > 0) {
274
- pickerOptions.push({
275
- label: "Done",
276
- value: "__done__",
277
- description: "Continue setup with the models already selected.",
278
- });
279
- }
280
-
281
- if (provider !== "openai") {
282
- pickerOptions.push({
283
- label: "Enter a different model name...",
284
- value: manualOptionValue,
285
- description: "Use a specific model id not shown in the curated list.",
286
- });
326
+ // Insert "Done" before the custom-entry option at the end
327
+ const customIndex = pickerOptions.findIndex((item) => item.value === "__custom__");
328
+ const doneOption = { label: "Done", value: "__done__", description: "Continue setup with the models already selected." };
329
+ if (customIndex >= 0) {
330
+ pickerOptions.splice(customIndex, 0, doneOption);
331
+ } else {
332
+ pickerOptions.push(doneOption);
333
+ }
287
334
  }
288
335
 
289
336
  const picked = await select(
@@ -309,7 +356,7 @@ async function promptForProviderModels(provider, key, tui, { fetchImpl = globalT
309
356
  }
310
357
  }
311
358
 
312
- return chosen;
359
+ return { chosen, fetchedModels };
313
360
  }
314
361
 
315
362
  export function writeProviderConfigs(installDir, providerInput) {
@@ -551,11 +598,31 @@ export async function runAuthPhase(installDir, options = {}) {
551
598
  providerFlags.openai = true;
552
599
  }
553
600
 
601
+ const fetchedModelsCache = {};
554
602
  if (tui) {
555
603
  for (const provider of ["openrouter", "anthropic", "openai"]) {
556
604
  const providerToken = providerSecrets[provider];
557
605
  if (!providerToken) continue;
558
- selectedModels[provider] = await promptForProviderModels(provider, providerToken, tui, { fetchImpl });
606
+ const { chosen, fetchedModels } = await promptForProviderModels(provider, providerToken, tui, { fetchImpl });
607
+ selectedModels[provider] = chosen;
608
+ fetchedModelsCache[provider] = fetchedModels;
609
+ }
610
+ }
611
+
612
+ // Persist model catalog with metadata
613
+ const allCatalogModels = [];
614
+ for (const [provider, modelIds] of Object.entries(selectedModels)) {
615
+ if (!modelIds?.length) continue;
616
+ const presets = PROVIDER_MODEL_PRESETS[provider] || [];
617
+ const fetched = fetchedModelsCache[provider] || [];
618
+ const enriched = mergeModelMetadata(presets, fetched, modelIds, provider);
619
+ allCatalogModels.push(...enriched);
620
+ }
621
+ if (allCatalogModels.length > 0) {
622
+ try {
623
+ writeModelsCatalog(installDir, "default", allCatalogModels);
624
+ } catch {
625
+ // Non-fatal — catalog is optional
559
626
  }
560
627
  }
561
628
 
@@ -17,6 +17,7 @@ import { verify } from "./phases/verify.js";
17
17
  import { createClackPrompter, SetupCancelledError } from "./clack-prompter.js";
18
18
  import { detectPreferredProvider, detectProviderOptions, summarizeSelectedModels } from "./model-catalog.js";
19
19
  import { isDaemonRunning, loadDaemon, writeDaemonUnit } from "./platform.js";
20
+ import { writeWizardMetadata } from "./lock.js";
20
21
 
21
22
  const MIN_NODE_MAJOR = 22;
22
23
  const __dirname = path.dirname(fileURLToPath(import.meta.url));
@@ -63,6 +64,43 @@ function readRuntimeConfig(installDir) {
63
64
  }
64
65
  }
65
66
 
67
+ function readExistingIdentity(installDir) {
68
+ const result = { userName: "", agentName: "", agentId: "", provider: "" };
69
+ try {
70
+ const agentsDir = path.join(installDir, "config", "agents");
71
+ if (fs.existsSync(agentsDir)) {
72
+ const files = fs.readdirSync(agentsDir).filter((f) => f.endsWith(".toml"));
73
+ if (files.length > 0) {
74
+ const agentToml = parseToml(fs.readFileSync(path.join(agentsDir, files[0]), "utf8"));
75
+ result.agentId = agentToml?.id || files[0].replace(/\.toml$/, "");
76
+ }
77
+ }
78
+ const identityDir = path.join(installDir, "config", "identity");
79
+ if (fs.existsSync(identityDir)) {
80
+ const soulFiles = fs.readdirSync(identityDir).filter((f) => f.endsWith("-soul.md"));
81
+ for (const file of soulFiles) {
82
+ const content = fs.readFileSync(path.join(identityDir, file), "utf8");
83
+ const nameMatch = content.match(/^# Soul — (.+)$/m);
84
+ if (nameMatch) result.agentName = nameMatch[1].trim();
85
+ const operatorMatch = content.match(/^Operator:\s*(.+)$/m);
86
+ if (operatorMatch) result.userName = operatorMatch[1].trim();
87
+ if (result.agentName) break;
88
+ }
89
+ }
90
+ const providersDir = path.join(installDir, "config", "providers");
91
+ if (fs.existsSync(providersDir)) {
92
+ const providerFiles = fs.readdirSync(providersDir).filter((f) => f.endsWith(".toml") && f !== "ollama.toml");
93
+ if (providerFiles.length > 0) {
94
+ const name = providerFiles[0].replace(/\.toml$/, "").replace(/-codex$/, "");
95
+ result.provider = name === "openai" ? "openai" : name;
96
+ }
97
+ }
98
+ } catch {
99
+ // Non-fatal — fall back to defaults
100
+ }
101
+ return result;
102
+ }
103
+
66
104
  function summarizeExistingConfig(installDir) {
67
105
  const runtime = readRuntimeConfig(installDir);
68
106
  const providersDir = path.join(installDir, "config", "providers");
@@ -164,19 +202,42 @@ async function runInteractiveWizard({
164
202
  flowOverride = null,
165
203
  }) {
166
204
  const prompter = createClackPrompter();
205
+
206
+ // ASCII banner — ANSI Shadow font, brand accent colour
207
+ const BRAND = "\x1b[38;2;45;212;191m";
208
+ const RESET = "\x1b[0m";
209
+ const DIM = "\x1b[2m";
210
+ const ascii = [
211
+ "",
212
+ `${BRAND} ███╗ ██╗███████╗███╗ ███╗ ██████╗ ██████╗ ██╗███████╗${RESET}`,
213
+ `${BRAND} ████╗ ██║██╔════╝████╗ ████║██╔═══██╗██╔══██╗██║██╔════╝${RESET}`,
214
+ `${BRAND} ██╔██╗ ██║█████╗ ██╔████╔██║██║ ██║██████╔╝██║███████╗${RESET}`,
215
+ `${BRAND} ██║╚██╗██║██╔══╝ ██║╚██╔╝██║██║ ██║██╔══██╗██║╚════██║${RESET}`,
216
+ `${BRAND} ██║ ╚████║███████╗██║ ╚═╝ ██║╚██████╔╝██║ ██║██║███████║${RESET}`,
217
+ `${BRAND} ╚═╝ ╚═══╝╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝╚══════╝${RESET}`,
218
+ "",
219
+ ].join("\n");
220
+ console.log(ascii);
221
+
167
222
  await prompter.intro("Nemoris setup");
168
223
 
169
224
  await prompter.note([
170
- "Security warning please read.",
225
+ "Nemoris is a personal AI agent runtime.",
226
+ "",
227
+ "Before continuing, please understand:",
171
228
  "",
172
- "Nemoris runs as a background daemon on your machine.",
173
- "It can execute shell commands, read files, and make network requests always acting on your instructions, never autonomously.",
229
+ " Your agent can execute system commands if tools are enabled.",
230
+ " It can read and write files within its workspace directory.",
231
+ " It can make network requests to configured providers.",
232
+ " A malicious prompt could trick it into unsafe actions.",
174
233
  "",
175
- "This is open-source software. Review the code at:",
176
- "https://github.com/amzer24/nemoris",
234
+ "Recommended baseline:",
235
+ " - Keep API keys out of the agent's reachable workspace.",
236
+ " - Use the strongest available model for tool-enabled agents.",
237
+ " - Review agent actions in the run log regularly.",
238
+ " - Do not expose the daemon to untrusted networks.",
177
239
  "",
178
- "Run as a personal agent — one trusted operator boundary.",
179
- "Do not expose to the internet without hardening first.",
240
+ "More: https://github.com/amzer24/nemoris#security",
180
241
  ].join("\n"), "Security");
181
242
 
182
243
  const proceed = await prompter.confirm({
@@ -193,9 +254,9 @@ async function runInteractiveWizard({
193
254
  const action = await prompter.select({
194
255
  message: "Config handling",
195
256
  options: [
196
- { value: "keep", label: "Use existing values" },
197
- { value: "update", label: "Update values" },
198
- { value: "reset", label: "Reset everything" },
257
+ { value: "keep", label: "Use existing values", hint: "Skip setup — keep current config and start running" },
258
+ { value: "update", label: "Update values", hint: "Walk through setup again, pre-filled with current config" },
259
+ { value: "reset", label: "Reset everything", hint: "Wipe and start fresh" },
199
260
  ],
200
261
  });
201
262
 
@@ -208,9 +269,9 @@ async function runInteractiveWizard({
208
269
  const scope = await prompter.select({
209
270
  message: "Reset scope",
210
271
  options: [
211
- { value: "config", label: "Config only" },
212
- { value: "config+state", label: "Config + state (memory, runs, scheduler)" },
213
- { value: "full", label: "Full reset (everything)" },
272
+ { value: "config", label: "Config only", hint: "Rewrites agents, router, and provider config. Keeps memory and history." },
273
+ { value: "config+state", label: "Config + state", hint: "Config + wipes memory, run history, and scheduler data. Agent identities kept." },
274
+ { value: "full", label: "Full reset", hint: "Deletes everything and starts completely fresh. Like a first install." },
214
275
  ],
215
276
  });
216
277
  resetInstallArtifacts(installDir, scope);
@@ -247,12 +308,13 @@ async function runInteractiveWizard({
247
308
  const detection = await detect(installDir);
248
309
  await scaffold({ installDir });
249
310
 
311
+ const existing = readExistingIdentity(installDir);
250
312
  const userName = await prompter.text({
251
313
  message: "Your name",
252
- initialValue: process.env.NEMORIS_USER_NAME || process.env.USER || "",
314
+ initialValue: existing.userName || process.env.NEMORIS_USER_NAME || process.env.USER || "",
253
315
  placeholder: os.userInfo().username || "",
254
316
  });
255
- const defaultAgentName = process.env.NEMORIS_AGENT_NAME || resolveDefaultAgentName();
317
+ const defaultAgentName = existing.agentName || process.env.NEMORIS_AGENT_NAME || resolveDefaultAgentName();
256
318
  const agentName = await prompter.text({
257
319
  message: "What should your agent be called?",
258
320
  initialValue: defaultAgentName,
@@ -277,12 +339,30 @@ async function runInteractiveWizard({
277
339
  label: option.label,
278
340
  hint: option.hint,
279
341
  })),
280
- initialValue: detectPreferredProvider({
342
+ initialValue: existing.provider || detectPreferredProvider({
281
343
  env: process.env,
282
344
  ollamaResult: detection.ollama ? { ok: true } : { ok: false },
283
345
  }),
284
346
  });
285
347
 
348
+ // Select auth method if provider has multiple options
349
+ let selectedAuthMethod = "api_key";
350
+ const providerOptions = detectProviderOptions({
351
+ env: process.env,
352
+ ollamaResult: detection.ollama ? { ok: true, models: detection.ollama.models } : { ok: false, models: [] },
353
+ });
354
+ const selectedProvider = providerOptions.find((o) => o.value === provider);
355
+ if (selectedProvider?.authMethods?.length > 1) {
356
+ selectedAuthMethod = await prompter.select({
357
+ message: `${selectedProvider.label} auth method:`,
358
+ options: selectedProvider.authMethods.map((m) => ({
359
+ value: m.value,
360
+ label: m.label,
361
+ hint: m.hint,
362
+ })),
363
+ });
364
+ }
365
+
286
366
  let authResult = { providers: [], providerFlags: {}, selectedModels: {} };
287
367
  let telegramResult = { configured: false, verified: false };
288
368
  let ollamaResult = { configured: false, verified: false, models: [] };
@@ -297,7 +377,7 @@ async function runInteractiveWizard({
297
377
  ollamaResult: detection.ollama ? { ok: true, models: detection.ollama.models } : { ok: false, models: [] },
298
378
  },
299
379
  providerOrder: [provider],
300
- enableOpenAIOAuthChoice: provider === "openai",
380
+ authMethod: selectedAuthMethod,
301
381
  });
302
382
  authSpin.stop("Provider configured");
303
383
  }
@@ -359,27 +439,55 @@ async function runInteractiveWizard({
359
439
  installShellCompletion();
360
440
  }
361
441
 
362
- const modelSummary = summarizeSelectedModels([
363
- ...(authResult.selectedModels?.[provider] || []),
364
- ...(ollamaResult.models || []).map((model) => `ollama/${model}`),
365
- ]);
366
- const authMethod = provider === "ollama"
367
- ? "local"
368
- : authResult.providers.length > 0
369
- ? "api_key"
370
- : "skipped";
442
+ const summaryLines = [];
443
+
444
+ // Providers
445
+ const providerNames = [];
446
+ if (authResult.providerFlags?.anthropic) providerNames.push("Anthropic");
447
+ if (authResult.providerFlags?.openai) providerNames.push("OpenAI");
448
+ if (authResult.providerFlags?.openrouter) providerNames.push("OpenRouter");
449
+ if (ollamaResult.configured || authResult.providerFlags?.ollama) providerNames.push("Ollama");
450
+ summaryLines.push(`Providers: ${providerNames.length > 0 ? providerNames.join(", ") : "none configured"}`);
451
+
452
+ // Models
453
+ for (const [prov, models] of Object.entries(authResult.selectedModels || {})) {
454
+ if (!models?.length) continue;
455
+ const stripped = models.map((m) => m.replace(/^(openrouter|openai-codex|anthropic)\//, ""));
456
+ summaryLines.push(` ${prov}: ${stripped.join(", ")}`);
457
+ }
458
+
459
+ // Telegram
460
+ if (telegramResult.verified) {
461
+ summaryLines.push("Telegram: connected");
462
+ } else if (telegramResult.configured) {
463
+ summaryLines.push("Telegram: configured (not verified)");
464
+ }
371
465
 
372
- await prompter.note([
373
- "Auth overview:",
374
- ` ${provider} ${provider === "skip" ? "skip" : "✓"} ${authMethod}`,
375
- "",
376
- "Models:",
377
- ` Default : ${modelSummary.defaultModel || "not configured"}`,
378
- ` Fallback : ${modelSummary.fallbackModel || "not configured"}`,
379
- "",
380
- "Docs: https://github.com/amzer24/nemoris",
381
- "Issues: https://github.com/amzer24/nemoris/issues",
382
- ].join("\n"), "Ready");
466
+ // Ollama
467
+ if (ollamaResult.configured) {
468
+ summaryLines.push(`Ollama: ${ollamaResult.models?.length || 0} models`);
469
+ }
470
+
471
+ // Next steps
472
+ summaryLines.push("");
473
+ summaryLines.push("Next steps:");
474
+ summaryLines.push(" nemoris start Start the daemon");
475
+ summaryLines.push(" nemoris status Check runtime health");
476
+ summaryLines.push(" nemoris chat Open TUI chat");
477
+
478
+ await prompter.note(summaryLines.join("\n"), "Setup complete");
479
+
480
+ try {
481
+ const pkg = JSON.parse(fs.readFileSync(new URL("../../package.json", import.meta.url), "utf8"));
482
+ writeWizardMetadata(installDir, {
483
+ lastRunVersion: pkg.version || "",
484
+ lastRunCommand: "setup",
485
+ lastRunMode: flow === "manual" ? "manual" : "quickstart",
486
+ lastRunFlow: "interactive",
487
+ });
488
+ } catch {
489
+ // Non-fatal
490
+ }
383
491
 
384
492
  await prompter.outro(
385
493
  telegramResult.verified
@@ -454,6 +562,19 @@ async function runNonInteractiveWizard({
454
562
  if (result.status === "warning") {
455
563
  return 1;
456
564
  }
565
+
566
+ try {
567
+ const pkg = JSON.parse(fs.readFileSync(new URL("../../package.json", import.meta.url), "utf8"));
568
+ writeWizardMetadata(installDir, {
569
+ lastRunVersion: pkg.version || "",
570
+ lastRunCommand: "setup",
571
+ lastRunMode: flow === "quickstart" ? "quickstart" : "manual",
572
+ lastRunFlow: "non-interactive",
573
+ });
574
+ } catch {
575
+ // Non-fatal
576
+ }
577
+
457
578
  if (flow !== "quickstart" && buildResult.ollamaConfigured) {
458
579
  // Keep manual mode deterministic in CI while preserving quickstart defaults.
459
580
  return 0;