bloby-bot 0.47.0 → 0.47.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/cli.js CHANGED
@@ -17,13 +17,34 @@ const CONFIG_PATH = path.join(DATA_DIR, 'config.json');
17
17
  const BIN_DIR = path.join(DATA_DIR, 'bin');
18
18
  const CF_PATH = path.join(BIN_DIR, 'cloudflared');
19
19
 
20
- // ── Ensure dependencies exist (self-heal if postinstall npm install failed) ──
21
- if (!IS_DEV && !fs.existsSync(path.join(ROOT, 'node_modules', 'viem'))) {
20
+ // ── Ensure dependencies exist (self-heal if postinstall/update npm install failed) ──
21
+ // Check every declared dep, not just one sentinel: a release that adds a new
22
+ // dep would otherwise silently boot into an `ERR_MODULE_NOT_FOUND` crash loop.
23
+ function missingDeps(root) {
24
+ let deps;
22
25
  try {
23
- execSync('npm install --omit=dev', { cwd: ROOT, stdio: 'inherit' });
26
+ deps = JSON.parse(fs.readFileSync(path.join(root, 'package.json'), 'utf-8')).dependencies || {};
24
27
  } catch {
25
- console.error('\n ✗ Failed to install dependencies. Run manually:\n cd ~/.bloby && npm install\n');
26
- process.exit(1);
28
+ return [];
29
+ }
30
+ return Object.keys(deps).filter(d => !fs.existsSync(path.join(root, 'node_modules', d, 'package.json')));
31
+ }
32
+
33
+ if (!IS_DEV) {
34
+ const missing = missingDeps(ROOT);
35
+ if (missing.length > 0) {
36
+ console.error(`\n Installing missing dependencies: ${missing.slice(0, 5).join(', ')}${missing.length > 5 ? `, +${missing.length - 5} more` : ''}\n`);
37
+ try {
38
+ execSync('npm install --omit=dev', { cwd: ROOT, stdio: 'inherit' });
39
+ } catch {
40
+ console.error('\n ✗ Failed to install dependencies. Run manually:\n cd ~/.bloby && npm install\n');
41
+ process.exit(1);
42
+ }
43
+ const stillMissing = missingDeps(ROOT);
44
+ if (stillMissing.length > 0) {
45
+ console.error(`\n ✗ Dependencies still missing after npm install: ${stillMissing.join(', ')}\n Try: cd ~/.bloby && rm -rf node_modules package-lock.json && npm install --omit=dev\n`);
46
+ process.exit(1);
47
+ }
27
48
  }
28
49
  }
29
50
 
@@ -1498,11 +1519,24 @@ async function update() {
1498
1519
 
1499
1520
  const distDst = path.join(DATA_DIR, 'dist-bloby');
1500
1521
 
1501
- // Install dependencies (5 min timeout to prevent hanging forever)
1522
+ // Install dependencies (5 min timeout to prevent hanging forever).
1523
+ // A failed install while new source files are already in place leaves the
1524
+ // app permanently broken (e.g. crash loop on a new import). Treat as fatal,
1525
+ // surface the npm output so the cause is debuggable, and don't claim success.
1502
1526
  try {
1503
- execSync('npm install --omit=dev', { cwd: DATA_DIR, stdio: 'ignore', timeout: 300_000 });
1527
+ execSync('npm install --omit=dev', { cwd: DATA_DIR, stdio: 'inherit', timeout: 300_000 });
1504
1528
  } catch (e) {
1505
- console.log(` ${c.yellow}⚠${c.reset} npm install issue: ${e.message}`);
1529
+ console.log(`\n ${c.red}✗${c.reset} npm install failed during update: ${e.message}`);
1530
+ console.log(` Your install is now partially upgraded. To recover:\n cd ~/.bloby && npm install --omit=dev\n`);
1531
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1532
+ process.exit(1);
1533
+ }
1534
+ const stillMissing = missingDeps(DATA_DIR);
1535
+ if (stillMissing.length > 0) {
1536
+ console.log(`\n ${c.red}✗${c.reset} npm install reported success but these deps are missing: ${stillMissing.join(', ')}`);
1537
+ console.log(` Try: cd ~/.bloby && rm -rf node_modules package-lock.json && npm install --omit=dev\n`);
1538
+ fs.rmSync(tmpDir, { recursive: true, force: true });
1539
+ process.exit(1);
1506
1540
  }
1507
1541
  stepper.advance();
1508
1542
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bloby-bot",
3
- "version": "0.47.0",
3
+ "version": "0.47.1",
4
4
  "releaseNotes": [
5
5
  "1. # voice note (PTT bubble)",
6
6
  "2. # audio file + caption",
@@ -49,7 +49,8 @@
49
49
  "start": "node --import tsx/esm supervisor/index.ts",
50
50
  "postinstall": "node scripts/postinstall.js",
51
51
  "dev:workspace": "vite",
52
- "dev:docs": "cd ./docs && npx fumapress"
52
+ "dev:docs": "cd ./docs && npx fumapress",
53
+ "sync:pi-models": "tsx scripts/sync-pi-models.ts"
53
54
  },
54
55
  "dependencies": {
55
56
  "@anthropic-ai/claude-agent-sdk": "^0.2.138",
@@ -73,8 +73,25 @@ try {
73
73
  cwd: BLOBY_HOME,
74
74
  stdio: 'inherit',
75
75
  });
76
- } catch {
77
- console.error('Warning: failed to install dependencies in ~/.bloby/ run "cd ~/.bloby && npm install" manually');
76
+ } catch (e) {
77
+ // Don't swallow this partial deps leave bloby in a crash loop on first
78
+ // start (e.g. missing @anthropic-ai/claude-agent-sdk after a release that
79
+ // adds a new dep). The CLI has a self-heal pass but only triggers when the
80
+ // user runs `bloby` again, so make the failure visible here too.
81
+ console.error(`\nError: npm install failed in ${BLOBY_HOME}: ${e.message}`);
82
+ console.error(`Run manually: cd ${BLOBY_HOME} && npm install --omit=dev\n`);
83
+ process.exit(1);
84
+ }
85
+
86
+ // Verify every declared dependency actually landed on disk.
87
+ const installedDeps = JSON.parse(fs.readFileSync(path.join(BLOBY_HOME, 'package.json'), 'utf-8')).dependencies || {};
88
+ const missing = Object.keys(installedDeps).filter(
89
+ d => !fs.existsSync(path.join(BLOBY_HOME, 'node_modules', d, 'package.json'))
90
+ );
91
+ if (missing.length > 0) {
92
+ console.error(`\nError: npm install reported success but these deps are missing: ${missing.join(', ')}`);
93
+ console.error(`Try: cd ${BLOBY_HOME} && rm -rf node_modules package-lock.json && npm install --omit=dev\n`);
94
+ process.exit(1);
78
95
  }
79
96
 
80
97
  // ── Prune wrong-libc claude-agent-sdk native package ──
@@ -0,0 +1,146 @@
1
+ /**
2
+ * Sync the pi model catalog into Bloby.
3
+ *
4
+ * Reads upstream pi's `packages/ai/src/models.generated.ts` (vendored as a
5
+ * sibling checkout at ../pi-main) and emits a filtered, alphabetised TS file
6
+ * the wizard imports. Run on demand when you want to pull in newer model IDs.
7
+ *
8
+ * npm run sync:pi-models
9
+ *
10
+ * The OUTPUT (`supervisor/harnesses/pi/models-catalog.generated.ts`) is the
11
+ * file that ships with bloby. The pi-main checkout is dev-only and is not
12
+ * required at runtime.
13
+ */
14
+ import fs from 'fs';
15
+ import path from 'path';
16
+ import { fileURLToPath, pathToFileURL } from 'url';
17
+
18
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
19
+ const REPO_ROOT = path.resolve(__dirname, '..');
20
+ const PI_MODELS_PATH = path.join(REPO_ROOT, 'pi-main', 'packages', 'ai', 'src', 'models.generated.ts');
21
+ const OUTPUT_PATH = path.join(REPO_ROOT, 'supervisor', 'harnesses', 'pi', 'models-catalog.generated.ts');
22
+
23
+ if (!fs.existsSync(PI_MODELS_PATH)) {
24
+ console.error(`✗ pi catalog not found at ${PI_MODELS_PATH}`);
25
+ console.error(' Clone or download earendil-works/pi into ../pi-main first.');
26
+ process.exit(1);
27
+ }
28
+
29
+ // Bloby sub-provider id → pi MODELS top-level key.
30
+ // Sub-providers without a pi mapping (ollama, lm-studio, custom) stay dynamic.
31
+ const PROVIDER_MAP: Record<string, string> = {
32
+ google: 'google',
33
+ deepseek: 'deepseek',
34
+ groq: 'groq',
35
+ xai: 'xai',
36
+ cerebras: 'cerebras',
37
+ mistral: 'mistral',
38
+ 'openai-api': 'openai',
39
+ 'anthropic-api': 'anthropic',
40
+ // openrouter intentionally skipped — 270+ entries is wizard-hostile. Stays dynamic.
41
+ };
42
+
43
+ // Drop noisy variants: date-suffixed previews, custom-tool forks, live-audio,
44
+ // model-snapshot aliases. We keep the canonical id (e.g. "gemini-3.1-pro-preview")
45
+ // and the rolling "*-latest" handles, which is what users actually want to pick.
46
+ const HIDDEN_PATTERNS: RegExp[] = [
47
+ /-\d{2}-\d{2}$/, // ...-04-17
48
+ /-\d{2}-\d{4}$/, // ...-09-2025
49
+ /-\d{4}-\d{2}-\d{2}$/, // ...-2025-08-07
50
+ /-\d{8}$/, // ...-20250805
51
+ /-customtools$/,
52
+ /-live-/,
53
+ /-search-preview/,
54
+ /-realtime/,
55
+ /-audio/,
56
+ /-tts$/,
57
+ /-transcribe$/,
58
+ /^text-embedding-/,
59
+ /^omni-moderation/,
60
+ /^dall-e/,
61
+ /^whisper/,
62
+ /^gpt-3\.5/, // legacy
63
+ /^gpt-4-/, // legacy variants of plain gpt-4
64
+ /^o1-/, // dated o1 variants
65
+ /^gemma-/, // separate open-weight family — better served via Ollama
66
+ ];
67
+
68
+ function isHidden(id: string): boolean {
69
+ return HIDDEN_PATTERNS.some((re) => re.test(id));
70
+ }
71
+
72
+ async function loadPiModels(): Promise<Record<string, Record<string, { id: string; name?: string }>>> {
73
+ // pi's file has `import type { Model } from "./types.js"` — strip that line
74
+ // so the module loads without needing pi's full types graph at sync time.
75
+ const raw = fs.readFileSync(PI_MODELS_PATH, 'utf-8');
76
+ const sanitised = raw
77
+ .replace(/^import type[^;]+;\s*$/m, '')
78
+ .replace(/ satisfies Model<[^>]+>/g, '');
79
+
80
+ // Drop into a temp file next to the original so any relative paths in errors
81
+ // still make sense, then dynamic-import via file:// URL.
82
+ const tmpPath = `${PI_MODELS_PATH}.bloby-sync.tmp.ts`;
83
+ fs.writeFileSync(tmpPath, sanitised);
84
+ try {
85
+ const mod = await import(pathToFileURL(tmpPath).href);
86
+ return (mod as any).MODELS;
87
+ } finally {
88
+ fs.rmSync(tmpPath, { force: true });
89
+ }
90
+ }
91
+
92
+ function versionScore(id: string): number {
93
+ // Cheap "newer first" ordering: parse the first major.minor pair we find.
94
+ const m = id.match(/(\d+)(?:\.(\d+))?/);
95
+ if (!m) return 0;
96
+ const major = parseInt(m[1], 10);
97
+ const minor = m[2] ? parseInt(m[2], 10) : 0;
98
+ return major * 1000 + minor;
99
+ }
100
+
101
+ async function main() {
102
+ const MODELS = await loadPiModels();
103
+
104
+ const out: Record<string, { id: string; label: string }[]> = {};
105
+ let total = 0;
106
+ for (const [blobyId, piKey] of Object.entries(PROVIDER_MAP)) {
107
+ const provider = MODELS[piKey];
108
+ if (!provider) {
109
+ console.warn(`! no pi provider "${piKey}" (mapped from bloby "${blobyId}")`);
110
+ continue;
111
+ }
112
+ const entries: { id: string; label: string }[] = [];
113
+ for (const [id, m] of Object.entries(provider)) {
114
+ if (isHidden(id)) continue;
115
+ entries.push({ id, label: m?.name || id });
116
+ }
117
+ // Newest version first; alphabetical inside the same version.
118
+ entries.sort((a, b) => {
119
+ const dv = versionScore(b.id) - versionScore(a.id);
120
+ return dv !== 0 ? dv : a.id.localeCompare(b.id);
121
+ });
122
+ out[blobyId] = entries;
123
+ total += entries.length;
124
+ }
125
+
126
+ const banner =
127
+ `// Auto-generated by scripts/sync-pi-models.ts — DO NOT EDIT MANUALLY.\n` +
128
+ `// Source: earendil-works/pi @ packages/ai/src/models.generated.ts\n` +
129
+ `// Last sync: ${new Date().toISOString()}\n` +
130
+ `\n`;
131
+ const body =
132
+ `export interface PiCatalogModel { id: string; label: string }\n` +
133
+ `export const PI_MODELS_CATALOG: Record<string, PiCatalogModel[]> = ${JSON.stringify(out, null, 2)};\n`;
134
+ fs.writeFileSync(OUTPUT_PATH, banner + body);
135
+
136
+ console.log(`✓ wrote ${OUTPUT_PATH}`);
137
+ console.log(` ${Object.keys(out).length} providers, ${total} models`);
138
+ for (const [k, v] of Object.entries(out)) {
139
+ console.log(` · ${k}: ${v.length}`);
140
+ }
141
+ }
142
+
143
+ main().catch((err) => {
144
+ console.error('✗ sync failed:', err);
145
+ process.exit(1);
146
+ });
package/shared/config.ts CHANGED
@@ -17,7 +17,7 @@ export interface BotConfig {
17
17
  port: number;
18
18
  username: string;
19
19
  ai: {
20
- provider: 'openai' | 'anthropic' | 'ollama' | '';
20
+ provider: 'openai' | 'anthropic' | 'ollama' | 'pi' | '';
21
21
  model: string;
22
22
  apiKey: string;
23
23
  baseUrl?: string;
@@ -17,6 +17,7 @@
17
17
 
18
18
  import * as claude from './harnesses/claude.js';
19
19
  import * as codex from './harnesses/codex.js';
20
+ import * as pi from './harnesses/pi/index.js';
20
21
  import type { Harness, OnAgentMessage, RecentMessage, AgentAttachment, AgentQueryRequest, AgentQueryResult } from './harnesses/types.js';
21
22
  import type { SavedFile } from './file-saver.js';
22
23
  import { loadConfig } from '../shared/config.js';
@@ -26,6 +27,7 @@ export type { RecentMessage, AgentAttachment, AgentQueryRequest, AgentQueryResul
26
27
  const HARNESSES: Record<string, Harness> = {
27
28
  anthropic: claude,
28
29
  openai: codex,
30
+ pi: pi as unknown as Harness,
29
31
  };
30
32
 
31
33
  /** Resolve the harness for the currently-configured provider. */
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Async input queue — copy of the helper from `harnesses/claude.ts:37-68`.
3
+ *
4
+ * The Claude harness uses this exact shape as the input prompt to the Claude
5
+ * Agent SDK's long-lived `query()`. The pi session loop uses the same pattern
6
+ * so the non-blocking live-conversation behavior matches Claude byte-for-byte
7
+ * at the queue level: pushMessage() never awaits the model.
8
+ */
9
+ export interface AsyncQueue<T> extends AsyncIterable<T> {
10
+ push(item: T): void;
11
+ end(): void;
12
+ }
13
+
14
+ export function createAsyncQueue<T>(): AsyncQueue<T> {
15
+ const pending: T[] = [];
16
+ let resolve: ((value: IteratorResult<T>) => void) | null = null;
17
+ let done = false;
18
+
19
+ return {
20
+ push(item: T) {
21
+ if (done) return;
22
+ if (resolve) {
23
+ resolve({ value: item, done: false });
24
+ resolve = null;
25
+ } else {
26
+ pending.push(item);
27
+ }
28
+ },
29
+ end() {
30
+ done = true;
31
+ if (resolve) resolve({ value: undefined as any, done: true });
32
+ },
33
+ [Symbol.asyncIterator]() {
34
+ return {
35
+ next(): Promise<IteratorResult<T>> {
36
+ if (pending.length > 0) {
37
+ return Promise.resolve({ value: pending.shift()!, done: false });
38
+ }
39
+ if (done) return Promise.resolve({ value: undefined as any, done: true });
40
+ return new Promise((r) => { resolve = r; });
41
+ },
42
+ };
43
+ },
44
+ };
45
+ }