reeboot 1.0.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,466 @@
1
+ /**
2
+ * Web Search Extension
3
+ *
4
+ * Registers two pi tools:
5
+ * - fetch_url (always registered, regardless of provider)
6
+ * - web_search (registered when config.search.provider ≠ "none")
7
+ *
8
+ * Supports 6 search backends: DuckDuckGo (zero-config), Brave, Tavily,
9
+ * Serper, Exa (all API-key), SearXNG (self-hosted Docker).
10
+ *
11
+ * API key resolution: config.search.apiKey → env var fallback.
12
+ * SearXNG: health-checks on load; falls back to DDG if unreachable.
13
+ */
14
+
15
+ import { Type } from '@sinclair/typebox';
16
+ import type { ExtensionAPI } from '@mariozechner/pi-coding-agent';
17
+ import { createRequire } from 'module';
18
+ import { parseHTML } from 'linkedom';
19
+
20
+ // CJS require for @mozilla/readability (it's a CommonJS module)
21
+ const _require = createRequire(import.meta.url);
22
+ const { Readability } = _require('@mozilla/readability');
23
+
24
+ // ─── Types ────────────────────────────────────────────────────────────────────
25
+
26
+ export interface SearchResult {
27
+ title: string;
28
+ url: string;
29
+ snippet: string;
30
+ }
31
+
32
+ export interface SearchConfig {
33
+ provider?: string;
34
+ apiKey?: string;
35
+ searxngBaseUrl?: string;
36
+ }
37
+
38
+ // ─── stripTags ────────────────────────────────────────────────────────────────
39
+
40
+ function stripTags(html: string): string {
41
+ return html
42
+ .replace(/<style[^>]*>[\s\S]*?<\/style>/gi, ' ')
43
+ .replace(/<script[^>]*>[\s\S]*?<\/script>/gi, ' ')
44
+ .replace(/<[^>]+>/g, ' ')
45
+ .replace(/\s{2,}/g, ' ')
46
+ .trim();
47
+ }
48
+
49
+ // ─── fetchAndExtract ─────────────────────────────────────────────────────────
50
+
51
+ export async function fetchAndExtract(url: string): Promise<string> {
52
+ let res: Response;
53
+ try {
54
+ res = await fetch(url, {
55
+ headers: {
56
+ 'User-Agent':
57
+ 'Mozilla/5.0 (compatible; Reeboot/1.0; +https://github.com/mariozechner/reeboot)',
58
+ },
59
+ });
60
+ } catch (err: unknown) {
61
+ const msg = err instanceof Error ? err.message : String(err);
62
+ return `Error fetching URL: ${msg}`;
63
+ }
64
+
65
+ if (!res.ok) {
66
+ return `Error fetching URL: HTTP ${res.status}`;
67
+ }
68
+
69
+ const html = await res.text();
70
+
71
+ try {
72
+ const { document } = parseHTML(html);
73
+ const reader = new Readability(document);
74
+ const article = reader.parse();
75
+ if (article?.textContent?.trim()) {
76
+ return article.textContent.trim();
77
+ }
78
+ } catch {
79
+ // Readability failed — fall through to tag-stripping
80
+ }
81
+
82
+ return stripTags(html);
83
+ }
84
+
85
+ // ─── DDG Backend ─────────────────────────────────────────────────────────────
86
+
87
+ export async function searchDuckDuckGo(
88
+ query: string,
89
+ limit: number
90
+ ): Promise<SearchResult[]> {
91
+ try {
92
+ const url = `https://html.duckduckgo.com/html/?q=${encodeURIComponent(query)}`;
93
+ const res = await fetch(url, {
94
+ headers: {
95
+ 'User-Agent':
96
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
97
+ Accept: 'text/html',
98
+ },
99
+ });
100
+ const html = await res.text();
101
+ const { document } = parseHTML(html);
102
+
103
+ const anchors = document.querySelectorAll('a.result__a');
104
+ const snippetEls = document.querySelectorAll('.result__snippet');
105
+
106
+ if (!anchors || anchors.length === 0) return [];
107
+
108
+ const results: SearchResult[] = [];
109
+ const anchorArr = Array.from(anchors);
110
+ const snippetArr = Array.from(snippetEls);
111
+
112
+ for (let i = 0; i < Math.min(anchorArr.length, limit); i++) {
113
+ const a = anchorArr[i] as any;
114
+ const rawHref: string = a.getAttribute('href') ?? '';
115
+
116
+ // Extract decoded URL from DDG redirect param
117
+ let decodedUrl = rawHref;
118
+ try {
119
+ const u = new URL(rawHref, 'https://duckduckgo.com');
120
+ const uddg = u.searchParams.get('uddg');
121
+ if (uddg) decodedUrl = decodeURIComponent(uddg);
122
+ } catch {
123
+ // Leave as-is
124
+ }
125
+
126
+ const title = (a.textContent ?? '').trim();
127
+ const snippet = snippetArr[i]
128
+ ? ((snippetArr[i] as any).textContent ?? '').trim()
129
+ : '';
130
+
131
+ if (title && decodedUrl) {
132
+ results.push({ title, url: decodedUrl, snippet });
133
+ }
134
+ }
135
+
136
+ return results;
137
+ } catch (err) {
138
+ console.warn('[web-search] DDG backend error:', err);
139
+ return [];
140
+ }
141
+ }
142
+
143
+ // ─── Brave Backend ───────────────────────────────────────────────────────────
144
+
145
+ export async function searchBrave(
146
+ query: string,
147
+ apiKey: string,
148
+ limit: number
149
+ ): Promise<SearchResult[]> {
150
+ try {
151
+ const url = `https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(query)}&count=${limit}`;
152
+ const res = await fetch(url, {
153
+ headers: {
154
+ 'X-Subscription-Token': apiKey,
155
+ Accept: 'application/json',
156
+ },
157
+ });
158
+
159
+ if (!res.ok) {
160
+ console.warn(`[web-search] Brave API error: HTTP ${res.status}`);
161
+ return [];
162
+ }
163
+
164
+ const data = await res.json() as any;
165
+ const webResults: any[] = data?.web?.results ?? [];
166
+
167
+ return webResults.slice(0, limit).map((r: any) => ({
168
+ title: r.title ?? '',
169
+ url: r.url ?? '',
170
+ snippet: r.description ?? '',
171
+ }));
172
+ } catch (err) {
173
+ console.warn('[web-search] Brave backend error:', err);
174
+ return [];
175
+ }
176
+ }
177
+
178
+ // ─── Tavily Backend ──────────────────────────────────────────────────────────
179
+
180
+ export async function searchTavily(
181
+ query: string,
182
+ apiKey: string,
183
+ limit: number
184
+ ): Promise<SearchResult[]> {
185
+ try {
186
+ const res = await fetch('https://api.tavily.com/search', {
187
+ method: 'POST',
188
+ headers: { 'Content-Type': 'application/json' },
189
+ body: JSON.stringify({ api_key: apiKey, query, max_results: limit }),
190
+ });
191
+
192
+ if (!res.ok) {
193
+ console.warn(`[web-search] Tavily API error: HTTP ${res.status}`);
194
+ return [];
195
+ }
196
+
197
+ const data = await res.json() as any;
198
+ const results: any[] = data?.results ?? [];
199
+
200
+ return results.slice(0, limit).map((r: any) => ({
201
+ title: r.title ?? '',
202
+ url: r.url ?? '',
203
+ snippet: r.content ?? '',
204
+ }));
205
+ } catch (err) {
206
+ console.warn('[web-search] Tavily backend error:', err);
207
+ return [];
208
+ }
209
+ }
210
+
211
+ // ─── Serper Backend ──────────────────────────────────────────────────────────
212
+
213
+ export async function searchSerper(
214
+ query: string,
215
+ apiKey: string,
216
+ limit: number
217
+ ): Promise<SearchResult[]> {
218
+ try {
219
+ const res = await fetch('https://google.serper.dev/search', {
220
+ method: 'POST',
221
+ headers: {
222
+ 'X-API-KEY': apiKey,
223
+ 'Content-Type': 'application/json',
224
+ },
225
+ body: JSON.stringify({ q: query, num: limit }),
226
+ });
227
+
228
+ if (!res.ok) {
229
+ console.warn(`[web-search] Serper API error: HTTP ${res.status}`);
230
+ return [];
231
+ }
232
+
233
+ const data = await res.json() as any;
234
+ const organic: any[] = data?.organic ?? [];
235
+
236
+ return organic.slice(0, limit).map((r: any) => ({
237
+ title: r.title ?? '',
238
+ url: r.link ?? '',
239
+ snippet: r.snippet ?? '',
240
+ }));
241
+ } catch (err) {
242
+ console.warn('[web-search] Serper backend error:', err);
243
+ return [];
244
+ }
245
+ }
246
+
247
+ // ─── Exa Backend ─────────────────────────────────────────────────────────────
248
+
249
+ export async function searchExa(
250
+ query: string,
251
+ apiKey: string,
252
+ limit: number
253
+ ): Promise<SearchResult[]> {
254
+ try {
255
+ const res = await fetch('https://api.exa.ai/search', {
256
+ method: 'POST',
257
+ headers: {
258
+ 'x-api-key': apiKey,
259
+ 'Content-Type': 'application/json',
260
+ },
261
+ body: JSON.stringify({ query, numResults: limit, useAutoprompt: true }),
262
+ });
263
+
264
+ if (!res.ok) {
265
+ console.warn(`[web-search] Exa API error: HTTP ${res.status}`);
266
+ return [];
267
+ }
268
+
269
+ const data = await res.json() as any;
270
+ const results: any[] = data?.results ?? [];
271
+
272
+ return results.slice(0, limit).map((r: any) => {
273
+ const fullText: string = r.text ?? '';
274
+ return {
275
+ title: r.title ?? '',
276
+ url: r.url ?? '',
277
+ snippet: fullText.length > 200 ? fullText.slice(0, 200) : fullText,
278
+ };
279
+ });
280
+ } catch (err) {
281
+ console.warn('[web-search] Exa backend error:', err);
282
+ return [];
283
+ }
284
+ }
285
+
286
+ // ─── SearXNG Backend ─────────────────────────────────────────────────────────
287
+
288
+ export async function searchSearXNG(
289
+ query: string,
290
+ baseUrl: string,
291
+ limit: number
292
+ ): Promise<SearchResult[]> {
293
+ try {
294
+ const url = `${baseUrl}/search?q=${encodeURIComponent(query)}&format=json`;
295
+ const res = await fetch(url);
296
+
297
+ if (!res.ok) {
298
+ console.warn(`[web-search] SearXNG API error: HTTP ${res.status}`);
299
+ return [];
300
+ }
301
+
302
+ const data = await res.json() as any;
303
+ const results: any[] = data?.results ?? [];
304
+
305
+ return results.slice(0, limit).map((r: any) => ({
306
+ title: r.title ?? '',
307
+ url: r.url ?? '',
308
+ snippet: r.content ?? '',
309
+ }));
310
+ } catch (err) {
311
+ console.warn('[web-search] SearXNG backend error:', err);
312
+ return [];
313
+ }
314
+ }
315
+
316
+ // ─── SearXNG Health Check ─────────────────────────────────────────────────────
317
+
318
+ export async function checkSearXNGHealth(baseUrl: string): Promise<string> {
319
+ try {
320
+ await fetch(`${baseUrl}/search?q=test&format=json`, {
321
+ signal: AbortSignal.timeout(3000),
322
+ });
323
+ return 'searxng';
324
+ } catch {
325
+ console.warn(
326
+ `[web-search] SearXNG unreachable at ${baseUrl}, falling back to DuckDuckGo`
327
+ );
328
+ return 'duckduckgo';
329
+ }
330
+ }
331
+
332
+ // ─── resolveApiKey ────────────────────────────────────────────────────────────
333
+
334
+ export function resolveApiKey(config: SearchConfig): string | undefined {
335
+ if (config.apiKey) return config.apiKey;
336
+
337
+ const envVars: Record<string, string> = {
338
+ brave: 'BRAVE_API_KEY',
339
+ tavily: 'TAVILY_API_KEY',
340
+ serper: 'SERPER_API_KEY',
341
+ exa: 'EXA_API_KEY',
342
+ };
343
+
344
+ const varName = envVars[config.provider ?? ''];
345
+ if (varName && process.env[varName]) {
346
+ return process.env[varName];
347
+ }
348
+
349
+ return undefined;
350
+ }
351
+
352
+ // ─── searchBackend ────────────────────────────────────────────────────────────
353
+
354
+ export async function searchBackend(
355
+ provider: string,
356
+ config: SearchConfig,
357
+ params: { query: string; limit: number }
358
+ ): Promise<SearchResult[]> {
359
+ const { query, limit } = params;
360
+
361
+ if (!query.trim()) return [];
362
+
363
+ if (provider === 'duckduckgo') {
364
+ return searchDuckDuckGo(query, limit);
365
+ }
366
+
367
+ // API-key providers
368
+ const needsKey = ['brave', 'tavily', 'serper', 'exa'];
369
+ if (needsKey.includes(provider)) {
370
+ const apiKey = resolveApiKey(config);
371
+ if (!apiKey) {
372
+ console.warn(
373
+ `[web-search] No API key configured for ${provider} search`
374
+ );
375
+ return [];
376
+ }
377
+
378
+ if (provider === 'brave') return searchBrave(query, apiKey, limit);
379
+ if (provider === 'tavily') return searchTavily(query, apiKey, limit);
380
+ if (provider === 'serper') return searchSerper(query, apiKey, limit);
381
+ if (provider === 'exa') return searchExa(query, apiKey, limit);
382
+ }
383
+
384
+ if (provider === 'searxng') {
385
+ const baseUrl = config.searxngBaseUrl ?? 'http://localhost:8080';
386
+ return searchSearXNG(query, baseUrl, limit);
387
+ }
388
+
389
+ console.warn(`[web-search] Unknown provider: ${provider}`);
390
+ return [];
391
+ }
392
+
393
+ // ─── Extension Default Export ─────────────────────────────────────────────────
394
+
395
+ export default async function webSearchExtension(pi: ExtensionAPI): Promise<void> {
396
+ const config = (pi.getConfig() as any);
397
+ const searchConfig: SearchConfig = config?.search ?? {};
398
+ const rawProvider: string = searchConfig.provider ?? 'none';
399
+
400
+ // SearXNG startup health-check — fall back to DDG if unreachable
401
+ let resolvedProvider = rawProvider;
402
+ if (rawProvider === 'searxng') {
403
+ const baseUrl = searchConfig.searxngBaseUrl ?? 'http://localhost:8080';
404
+ resolvedProvider = await checkSearXNGHealth(baseUrl);
405
+ }
406
+
407
+ // ── Register fetch_url (always, regardless of provider) ───────────────────
408
+
409
+ pi.registerTool({
410
+ name: 'fetch_url',
411
+ label: 'Fetch URL',
412
+ description:
413
+ 'Fetch any URL and return the clean readable text content. Uses Readability to extract article text; falls back to HTML-stripped text for non-article pages.',
414
+ parameters: Type.Object({
415
+ url: Type.String({ description: 'The URL to fetch' }),
416
+ }),
417
+ execute: async (_id, params) => {
418
+ const result = await fetchAndExtract(params.url);
419
+ return {
420
+ content: [{ type: 'text', text: result }],
421
+ };
422
+ },
423
+ });
424
+
425
+ // ── Register web_search (only when provider ≠ "none") ─────────────────────
426
+
427
+ if (resolvedProvider === 'none' || resolvedProvider === '' || !resolvedProvider) {
428
+ return;
429
+ }
430
+
431
+ // Capture resolved provider in closure for tool execution
432
+ const activeProvider = resolvedProvider;
433
+ const activeConfig = searchConfig;
434
+
435
+ pi.registerTool({
436
+ name: 'web_search',
437
+ label: 'Web Search',
438
+ description:
439
+ 'Search the web and return a list of results with title, URL, and snippet. Uses the configured search backend.',
440
+ parameters: Type.Object({
441
+ query: Type.String({ description: 'The search query' }),
442
+ limit: Type.Optional(
443
+ Type.Number({ description: 'Maximum number of results (default: 10, max: 20)' })
444
+ ),
445
+ }),
446
+ execute: async (_id, params) => {
447
+ const query = params.query ?? '';
448
+ const limit = Math.min(params.limit ?? 10, 20);
449
+
450
+ try {
451
+ const results = await searchBackend(activeProvider, activeConfig, {
452
+ query,
453
+ limit,
454
+ });
455
+ return {
456
+ content: [{ type: 'text', text: JSON.stringify(results) }],
457
+ };
458
+ } catch (err) {
459
+ console.warn('[web-search] web_search error:', err);
460
+ return {
461
+ content: [{ type: 'text', text: JSON.stringify([]) }],
462
+ };
463
+ }
464
+ },
465
+ });
466
+ }
package/package.json CHANGED
@@ -1,7 +1,8 @@
1
1
  {
2
2
  "name": "reeboot",
3
- "version": "1.0.0",
3
+ "version": "1.3.0",
4
4
  "description": "Personal AI agent running locally, reachable via WhatsApp, Signal, or WebChat",
5
+ "author": "Bogdan Nenu",
5
6
  "license": "MIT",
6
7
  "type": "module",
7
8
  "bin": {
@@ -41,16 +42,18 @@
41
42
  "@fastify/static": "^9.0.0",
42
43
  "@fastify/websocket": "^11.2.0",
43
44
  "@mariozechner/pi-coding-agent": "latest",
45
+ "@mozilla/readability": "^0.5.0",
44
46
  "@types/ws": "^8.18.1",
45
47
  "@whiskeysockets/baileys": "6.7.21",
46
48
  "better-sqlite3": "^11.6.0",
47
49
  "commander": "^12.1.0",
50
+ "cron-parser": "^4.9.0",
48
51
  "drizzle-kit": "^0.20.0",
49
52
  "drizzle-orm": "^0.31.0",
50
53
  "fastify": "^5.0.0",
51
54
  "inquirer": "^12.0.0",
55
+ "linkedom": "^0.16.11",
52
56
  "nanoid": "^5.0.0",
53
- "node-cron": "^4.2.1",
54
57
  "pino": "^9.0.0",
55
58
  "pino-pretty": "^11.0.0",
56
59
  "qrcode-terminal": "^0.12.0",
@@ -60,7 +63,6 @@
60
63
  "devDependencies": {
61
64
  "@types/better-sqlite3": "^7.6.0",
62
65
  "@types/node": "^20.11.0",
63
- "@types/node-cron": "^3.0.11",
64
66
  "tsx": "^4.7.0",
65
67
  "typescript": "^5.4.0",
66
68
  "vitest": "^1.3.0"
@@ -0,0 +1,131 @@
1
+ ---
2
+ name: docker
3
+ description: Docker and Docker Compose operations via docker CLI — manage containers, images, volumes, networks, and compose services. Use when working with Docker containers or Docker Compose deployments.
4
+ ---
5
+
6
+ # Docker
7
+
8
+ Wraps the `docker` and `docker compose` CLIs for container management, image operations, and Docker Compose service orchestration.
9
+
10
+ ## Setup
11
+
12
+ 1. Install Docker:
13
+ - **macOS/Windows**: Install [Docker Desktop](https://www.docker.com/products/docker-desktop/)
14
+ - **Linux**: Install [Docker Engine](https://docs.docker.com/engine/install/)
15
+
16
+ 2. Verify installation:
17
+ ```bash
18
+ docker info
19
+ docker --version
20
+ docker compose version
21
+ ```
22
+
23
+ 3. Ensure your user has Docker permissions (Linux):
24
+ ```bash
25
+ sudo usermod -aG docker $USER
26
+ # Log out and back in for this to take effect
27
+ ```
28
+
29
+ ## Usage
30
+
31
+ ### Containers
32
+
33
+ ```bash
34
+ # List running containers
35
+ docker ps
36
+
37
+ # List all containers (including stopped)
38
+ docker ps -a
39
+
40
+ # Start a container
41
+ docker start <container_name_or_id>
42
+
43
+ # Stop a container
44
+ docker stop <container_name_or_id>
45
+
46
+ # Remove a container
47
+ docker rm <container_name_or_id>
48
+
49
+ # View container logs
50
+ docker logs <container_name_or_id>
51
+ docker logs -f <container_name_or_id> # follow (stream) logs
52
+ docker logs --tail 100 <container> # last 100 lines
53
+
54
+ # Execute a command in a running container
55
+ docker exec -it <container> bash
56
+ docker exec <container> cat /etc/hosts
57
+
58
+ # Inspect container details
59
+ docker inspect <container>
60
+ ```
61
+
62
+ ### Images
63
+
64
+ ```bash
65
+ # List images
66
+ docker images
67
+
68
+ # Pull an image
69
+ docker pull nginx:latest
70
+ docker pull postgres:16
71
+
72
+ # Build an image from Dockerfile
73
+ docker build -t myapp:latest .
74
+ docker build -t myapp:1.0 -f Dockerfile.prod .
75
+
76
+ # Remove an image
77
+ docker rmi nginx:latest
78
+
79
+ # Tag an image
80
+ docker tag myapp:latest registry.example.com/myapp:1.0
81
+
82
+ # Push an image
83
+ docker push registry.example.com/myapp:1.0
84
+ ```
85
+
86
+ ### Docker Compose
87
+
88
+ ```bash
89
+ # Start services (detached)
90
+ docker compose up -d
91
+
92
+ # Start specific service
93
+ docker compose up -d postgres
94
+
95
+ # Stop services
96
+ docker compose down
97
+
98
+ # Stop and remove volumes
99
+ docker compose down -v
100
+
101
+ # View service logs
102
+ docker compose logs -f api
103
+
104
+ # Restart a service
105
+ docker compose restart api
106
+
107
+ # Scale a service
108
+ docker compose up -d --scale worker=3
109
+
110
+ # Rebuild and restart
111
+ docker compose up -d --build
112
+
113
+ # List compose services
114
+ docker compose ps
115
+ ```
116
+
117
+ ### Volumes and Networks
118
+
119
+ ```bash
120
+ # List volumes
121
+ docker volume ls
122
+
123
+ # Inspect a volume
124
+ docker volume inspect myapp_data
125
+
126
+ # List networks
127
+ docker network ls
128
+
129
+ # Inspect a network
130
+ docker network inspect bridge
131
+ ```