pplx-zero 2.4.0 → 2.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -11,7 +11,8 @@
11
11
  <p align="center">
12
12
  <a href="https://www.npmjs.com/package/pplx-zero"><img src="https://img.shields.io/npm/v/pplx-zero.svg?color=00d4ff" alt="npm"></a>
13
13
  <a href="https://aur.archlinux.org/packages/pplx-zero"><img src="https://img.shields.io/aur/version/pplx-zero?color=00d4ff" alt="AUR"></a>
14
- <img src="https://img.shields.io/badge/bun-runtime-f9f1e1" alt="Bun">
14
+ <img src="https://img.shields.io/npm/dw/pplx-zero?color=00d4ff&label=downloads" alt="npm downloads">
15
+ <img src="https://img.shields.io/badge/bun-runtime-fbf0df?logo=bun" alt="Bun">
15
16
  <img src="https://img.shields.io/badge/license-MIT-blue" alt="License">
16
17
  </p>
17
18
 
@@ -45,7 +46,7 @@ yay -S pplx-zero # arch linux
45
46
  ## Setup
46
47
 
47
48
  ```bash
48
- export PERPLEXITY_API_KEY="pplx-..."
49
+ export PERPLEXITY_API_KEY="pplx-..." # or PERPLEXITY_AI_API_KEY
49
50
  ```
50
51
 
51
52
  Get your key at [perplexity.ai/settings/api](https://www.perplexity.ai/settings/api)
package/bin/pplx.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  import { spawn, execSync } from 'child_process';
4
- import { dirname, join } from 'path';
5
- import { fileURLToPath } from 'url';
4
+ import { dirname, join } from 'node:path';
5
+ import { fileURLToPath } from 'node:url';
6
6
 
7
7
  const __dirname = dirname(fileURLToPath(import.meta.url));
8
8
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pplx-zero",
3
- "version": "2.4.0",
3
+ "version": "2.4.2",
4
4
  "description": "Fast, minimal Perplexity AI CLI with local RAG. Stream answers, search your notes, analyze files. Zero bloat.",
5
5
  "author": "kenzo",
6
6
  "license": "MIT",
@@ -15,10 +15,10 @@
15
15
  "test": "bun test"
16
16
  },
17
17
  "dependencies": {
18
- "zod": "^4.0.0"
18
+ "zod": "^4.3.6"
19
19
  },
20
20
  "devDependencies": {
21
- "@types/bun": "latest"
21
+ "@types/bun": "^1.3.8"
22
22
  },
23
23
  "peerDependencies": {
24
24
  "typescript": "^5"
package/src/files.ts CHANGED
@@ -26,11 +26,11 @@ export async function encodeFile(path: string): Promise<FileAttachment> {
26
26
  if (path.includes('..')) {
27
27
  throw new Error('Path traversal not allowed');
28
28
  }
29
-
29
+
30
30
  const resolved = resolve(path);
31
31
  const ext = extname(resolved).toLowerCase();
32
32
  const mimeType = MIME_TYPES[ext];
33
-
33
+
34
34
  if (!mimeType) {
35
35
  throw new Error(`Unsupported file type: ${ext}`);
36
36
  }
package/src/history.ts CHANGED
@@ -62,10 +62,10 @@ export async function appendHistory(entry: Omit<HistoryEntry, 'ts'>): Promise<vo
62
62
  export async function readHistory(limit = 20): Promise<HistoryEntry[]> {
63
63
  const file = Bun.file(HISTORY_PATH);
64
64
  if (!(await file.exists())) return [];
65
-
65
+
66
66
  const text = await file.text();
67
67
  const lines = text.trim().split('\n').filter(l => l.length > 0);
68
-
68
+
69
69
  return lines
70
70
  .map(line => {
71
71
  try {
package/src/index.ts CHANGED
@@ -76,7 +76,7 @@ if (values.history) {
76
76
 
77
77
  if (values.ingest) {
78
78
  const target = positionals[0];
79
-
79
+
80
80
  try {
81
81
  if (target) {
82
82
  const stats = await ingestPath(target);
@@ -90,7 +90,7 @@ if (values.ingest) {
90
90
  const stats = await ingestDirectory();
91
91
  console.log(`Done! Added: ${stats.added}, Updated: ${stats.updated}, Skipped: ${stats.skipped}`);
92
92
  }
93
-
93
+
94
94
  console.log(`Total documents: ${getDocCount()}`);
95
95
  } catch (err) {
96
96
  console.error(fmt.error(err instanceof Error ? err.message : 'Ingest failed'));
@@ -107,16 +107,16 @@ if (values.local) {
107
107
  console.error(fmt.error('No query provided for local search.'));
108
108
  process.exit(2);
109
109
  }
110
-
110
+
111
111
  const results = searchForRag(query);
112
-
112
+
113
113
  if (results.length === 0) {
114
114
  console.log('No local documents match. Proceeding with Perplexity only...\n');
115
115
  } else {
116
116
  if (!values.json) {
117
117
  console.log(`${fmt.model('local')} Found ${results.length} relevant doc(s), using as context...\n`);
118
118
  }
119
-
119
+
120
120
  ragContext = 'Context from user\'s knowledge base:\n---\n';
121
121
  for (const r of results) {
122
122
  ragContext += `[${r.title}]:\n${r.content}\n\n`;
@@ -146,7 +146,9 @@ if (ragContext) {
146
146
  if (values.continue) {
147
147
  const last = await getLastEntry();
148
148
  if (last) {
149
- const context = `Previous question: "${last.q}"\nPrevious answer: "${last.a.slice(0, 500)}..."\n\nFollow-up question: ${query || 'Continue and elaborate on the previous answer.'}`;
149
+ const userQuery = positionals.join(' ') || 'Continue and elaborate on the previous answer.';
150
+ const historyContext = `Previous question: "${last.q}"\nPrevious answer: "${last.a.slice(0, 500)}..."\n\nFollow-up question: ${userQuery}`;
151
+ const context = ragContext ? ragContext + historyContext : historyContext;
150
152
  query = context;
151
153
  if (!values.json) {
152
154
  await write(fmt.continuing(last.q));
@@ -221,7 +223,7 @@ await search(query, model, {
221
223
  if (values.output) {
222
224
  const ext = values.output.split('.').pop()?.toLowerCase();
223
225
  let content = '';
224
-
226
+
225
227
  if (ext === 'md') {
226
228
  content = `# ${positionals.join(' ') || 'Query'}\n\n`;
227
229
  content += `**Model:** ${model}\n`;
@@ -242,7 +244,7 @@ await search(query, model, {
242
244
  });
243
245
  }
244
246
  }
245
-
247
+
246
248
  await Bun.write(values.output, content);
247
249
  if (!values.json) {
248
250
  await writeLn(`\n${fmt.model('saved')} ${values.output}`);
package/src/output.ts CHANGED
@@ -14,12 +14,12 @@ export const fmt = {
14
14
  searching: () => `${c.dim}Searching...${c.reset}\n`,
15
15
  error: (msg: string) => `${c.red}Error: ${msg}${c.reset}\n`,
16
16
  citation: (i: number, url: string) => `${c.dim} ${i}. ${url}${c.reset}`,
17
- stats: (tokens: number, ms: number) =>
17
+ stats: (tokens: number, ms: number) =>
18
18
  `\n${c.gray}[${tokens} tokens, ${(ms / 1000).toFixed(1)}s]${c.reset}\n`,
19
19
  sources: () => `\n${c.yellow}Sources:${c.reset}`,
20
20
  historyEntry: (ts: number, model: string, query: string) => {
21
- const date = new Date(ts).toLocaleString('en-US', {
22
- month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit'
21
+ const date = new Date(ts).toLocaleString('en-US', {
22
+ month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit'
23
23
  });
24
24
  return `${c.dim}${date}${c.reset} ${c.cyan}[${model}]${c.reset} ${query}`;
25
25
  },
package/src/rag.ts CHANGED
@@ -12,9 +12,9 @@ let db: Database | null = null;
12
12
 
13
13
  function getDb(): Database {
14
14
  if (db) return db;
15
-
15
+
16
16
  db = new Database(DB_PATH);
17
-
17
+
18
18
  db.exec(`
19
19
  CREATE VIRTUAL TABLE IF NOT EXISTS docs_fts USING fts5(
20
20
  path,
@@ -23,7 +23,7 @@ function getDb(): Database {
23
23
  tokenize = 'porter unicode61'
24
24
  );
25
25
  `);
26
-
26
+
27
27
  db.exec(`
28
28
  CREATE TABLE IF NOT EXISTS docs (
29
29
  id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -32,7 +32,7 @@ function getDb(): Database {
32
32
  ingested_at INTEGER DEFAULT (unixepoch())
33
33
  );
34
34
  `);
35
-
35
+
36
36
  return db;
37
37
  }
38
38
 
@@ -56,61 +56,61 @@ export async function ensureKnowledgeDir(): Promise<void> {
56
56
  export async function ingestFile(filePath: string): Promise<'added' | 'updated' | 'skipped'> {
57
57
  const db = getDb();
58
58
  const content = await Bun.file(filePath).text();
59
-
59
+
60
60
  if (!content.trim()) return 'skipped';
61
-
61
+
62
62
  const title = filePath.split('/').pop()?.replace(/\.(md|txt)$/, '') || filePath;
63
-
63
+
64
64
  const existing = db.query('SELECT id FROM docs WHERE path = ?').get(filePath) as { id: number } | null;
65
-
65
+
66
66
  if (existing) {
67
67
  db.run('DELETE FROM docs_fts WHERE rowid = ?', [existing.id]);
68
68
  db.run('UPDATE docs SET title = ?, ingested_at = unixepoch() WHERE id = ?', [title, existing.id]);
69
- db.run('INSERT INTO docs_fts (rowid, path, title, content) VALUES (?, ?, ?, ?)',
69
+ db.run('INSERT INTO docs_fts (rowid, path, title, content) VALUES (?, ?, ?, ?)',
70
70
  [existing.id, filePath, title, content]);
71
71
  return 'updated';
72
72
  }
73
-
73
+
74
74
  const result = db.run('INSERT INTO docs (path, title) VALUES (?, ?)', [filePath, title]);
75
75
  const docId = result.lastInsertRowid;
76
- db.run('INSERT INTO docs_fts (rowid, path, title, content) VALUES (?, ?, ?, ?)',
76
+ db.run('INSERT INTO docs_fts (rowid, path, title, content) VALUES (?, ?, ?, ?)',
77
77
  [docId, filePath, title, content]);
78
-
78
+
79
79
  return 'added';
80
80
  }
81
81
 
82
82
  export async function ingestDirectory(dir?: string): Promise<IngestStats> {
83
83
  await ensureKnowledgeDir();
84
-
84
+
85
85
  const targetDir = dir || KNOWLEDGE_DIR;
86
86
  const glob = new Glob('**/*.{md,txt}');
87
87
  const files: string[] = [];
88
-
88
+
89
89
  for await (const file of glob.scan({ cwd: targetDir, absolute: true })) {
90
90
  files.push(file);
91
91
  }
92
-
92
+
93
93
  const stats: IngestStats = { added: 0, updated: 0, skipped: 0 };
94
-
94
+
95
95
  for (const file of files) {
96
96
  const result = await ingestFile(file);
97
97
  stats[result]++;
98
98
  }
99
-
99
+
100
100
  return stats;
101
101
  }
102
102
 
103
103
  export function search(query: string, limit = 5): SearchResult[] {
104
104
  const db = getDb();
105
-
105
+
106
106
  const ftsQuery = query
107
107
  .trim()
108
108
  .split(/\s+/)
109
109
  .map(word => `${word}*`)
110
110
  .join(' OR ');
111
-
111
+
112
112
  const stmt = db.prepare(`
113
- SELECT
113
+ SELECT
114
114
  path,
115
115
  title,
116
116
  snippet(docs_fts, 2, '>', '<', '...', 40) as snippet,
@@ -120,7 +120,7 @@ export function search(query: string, limit = 5): SearchResult[] {
120
120
  ORDER BY score
121
121
  LIMIT ?
122
122
  `);
123
-
123
+
124
124
  try {
125
125
  return stmt.all(ftsQuery, limit) as SearchResult[];
126
126
  } catch {
@@ -145,13 +145,13 @@ function truncateUtf8Safe(str: string, maxLen: number): string {
145
145
 
146
146
  export function searchForRag(query: string, limit = 3, maxChars = 4000): RagContext[] {
147
147
  const db = getDb();
148
-
148
+
149
149
  const ftsQuery = query
150
150
  .trim()
151
151
  .split(/\s+/)
152
152
  .map(word => `${word}*`)
153
153
  .join(' OR ');
154
-
154
+
155
155
  const stmt = db.prepare(`
156
156
  SELECT title, content
157
157
  FROM docs_fts
@@ -159,16 +159,16 @@ export function searchForRag(query: string, limit = 3, maxChars = 4000): RagCont
159
159
  ORDER BY bm25(docs_fts)
160
160
  LIMIT ?
161
161
  `);
162
-
162
+
163
163
  try {
164
164
  const results = stmt.all(ftsQuery, limit) as { title: string; content: string }[];
165
165
  let totalChars = 0;
166
166
  const truncated: RagContext[] = [];
167
-
167
+
168
168
  for (const r of results) {
169
169
  const remaining = maxChars - totalChars;
170
170
  if (remaining <= 0) break;
171
-
171
+
172
172
  const content = truncateUtf8Safe(r.content, remaining);
173
173
  truncated.push({
174
174
  title: r.title,
@@ -176,7 +176,7 @@ export function searchForRag(query: string, limit = 3, maxChars = 4000): RagCont
176
176
  });
177
177
  totalChars += content.length;
178
178
  }
179
-
179
+
180
180
  return truncated;
181
181
  } catch {
182
182
  return [];
@@ -213,9 +213,9 @@ export async function ingestPath(target: string): Promise<IngestStats> {
213
213
  await ensureKnowledgeDir();
214
214
  const stats: IngestStats = { added: 0, updated: 0, skipped: 0 };
215
215
  const resolved = resolve(target);
216
-
216
+
217
217
  const isGlob = target.includes('*');
218
-
218
+
219
219
  if (isGlob) {
220
220
  const glob = new Glob(target);
221
221
  for await (const file of glob.scan({ cwd: process.cwd(), absolute: true })) {
@@ -232,14 +232,14 @@ export async function ingestPath(target: string): Promise<IngestStats> {
232
232
  }
233
233
  return stats;
234
234
  }
235
-
235
+
236
236
  let info;
237
237
  try {
238
238
  info = await stat(resolved);
239
239
  } catch {
240
240
  throw new Error(`Path not found: ${target}`);
241
241
  }
242
-
242
+
243
243
  if (info.isDirectory()) {
244
244
  console.log(`Indexing directory: ${resolved}`);
245
245
  const glob = new Glob('**/*.{md,txt}');
@@ -251,7 +251,7 @@ export async function ingestPath(target: string): Promise<IngestStats> {
251
251
  }
252
252
  return stats;
253
253
  }
254
-
254
+
255
255
  const ext = extname(resolved).toLowerCase();
256
256
  if (ext === '.pdf') {
257
257
  throw new Error('PDF indexing not supported (requires text extraction library). Use -f to send PDFs to Perplexity API instead.');
@@ -259,11 +259,11 @@ export async function ingestPath(target: string): Promise<IngestStats> {
259
259
  if (!SUPPORTED_EXTS.has(ext)) {
260
260
  throw new Error(`Unsupported file type: ${ext}. Supported: .md, .txt`);
261
261
  }
262
-
262
+
263
263
  const dest = await copyToKnowledge(resolved);
264
264
  const result = await ingestFile(dest);
265
265
  stats[result]++;
266
266
  console.log(`${result}: ${basename(resolved)} → ~/.pplx/knowledge/`);
267
-
267
+
268
268
  return stats;
269
269
  }