pplx-zero 1.1.8 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +84 -497
- package/bin/pplx.js +28 -0
- package/package.json +28 -62
- package/src/api.test.ts +21 -0
- package/src/api.ts +116 -0
- package/src/env.ts +37 -0
- package/src/files.test.ts +71 -0
- package/src/files.ts +44 -0
- package/src/history.test.ts +84 -0
- package/src/history.ts +86 -0
- package/src/index.ts +179 -0
- package/src/output.test.ts +41 -0
- package/src/output.ts +35 -0
- package/dist/index.js +0 -10532
package/package.json
CHANGED
|
@@ -1,80 +1,46 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pplx-zero",
|
|
3
|
-
"version": "
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "2.1.0",
|
|
4
|
+
"description": "Minimal Perplexity AI CLI - search from terminal",
|
|
5
|
+
"author": "kenzo",
|
|
6
|
+
"license": "MIT",
|
|
5
7
|
"type": "module",
|
|
6
|
-
"
|
|
8
|
+
"module": "src/index.ts",
|
|
7
9
|
"bin": {
|
|
8
|
-
"pplx": "
|
|
10
|
+
"pplx": "bin/pplx.js"
|
|
9
11
|
},
|
|
10
12
|
"scripts": {
|
|
11
|
-
"
|
|
12
|
-
"dev
|
|
13
|
-
"
|
|
14
|
-
"build:legacy": "rm -rf dist && bun build src/cli.ts --target node --outdir dist",
|
|
15
|
-
"build:binary": "bun build --compile src/cli/index.ts --outfile=dist/pplx",
|
|
16
|
-
"build:binary:legacy": "bun build --compile src/cli.ts --outfile=dist/pplx-legacy",
|
|
17
|
-
"test": "bun test",
|
|
18
|
-
"test:watch": "bun test --watch",
|
|
19
|
-
"typecheck": "bun tsc --noEmit",
|
|
20
|
-
"lint": "bun run --bun eslint src/**/*.ts",
|
|
21
|
-
"lint:fix": "bun run --bun eslint src/**/*.ts --fix",
|
|
22
|
-
"clean": "rm -rf dist",
|
|
23
|
-
"dev:debug": "bun --inspect src/cli/index.ts",
|
|
24
|
-
"dev:debug:legacy": "bun --inspect src/cli.ts"
|
|
13
|
+
"build": "bun build src/index.ts --compile --outfile=pplx",
|
|
14
|
+
"dev": "bun run src/index.ts",
|
|
15
|
+
"test": "bun test"
|
|
25
16
|
},
|
|
26
17
|
"dependencies": {
|
|
27
|
-
"
|
|
28
|
-
"abort-controller": "^3.0.0",
|
|
29
|
-
"commander": "^12.0.0",
|
|
30
|
-
"dotenv": "^16.3.1",
|
|
31
|
-
"zod": "^3.22.4"
|
|
18
|
+
"zod": "^4.0.0"
|
|
32
19
|
},
|
|
33
20
|
"devDependencies": {
|
|
34
|
-
"@types/
|
|
35
|
-
"typescript": "^5.0.0",
|
|
36
|
-
"bun-types": "latest",
|
|
37
|
-
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
|
38
|
-
"@typescript-eslint/parser": "^6.0.0",
|
|
39
|
-
"eslint": "^8.0.0"
|
|
21
|
+
"@types/bun": "latest"
|
|
40
22
|
},
|
|
41
|
-
"
|
|
42
|
-
"
|
|
23
|
+
"peerDependencies": {
|
|
24
|
+
"typescript": "^5"
|
|
43
25
|
},
|
|
44
|
-
"
|
|
45
|
-
"
|
|
46
|
-
"
|
|
47
|
-
"search",
|
|
48
|
-
"ai",
|
|
49
|
-
"cli",
|
|
50
|
-
"command-line",
|
|
51
|
-
"tool",
|
|
52
|
-
"typescript",
|
|
53
|
-
"bun",
|
|
54
|
-
"api",
|
|
55
|
-
"minimal",
|
|
56
|
-
"fast",
|
|
57
|
-
"productivity",
|
|
58
|
-
"zero-config",
|
|
59
|
-
"multimodal",
|
|
60
|
-
"attachments",
|
|
61
|
-
"images",
|
|
62
|
-
"documents",
|
|
63
|
-
"sonar",
|
|
64
|
-
"reasoning",
|
|
65
|
-
"research",
|
|
66
|
-
"async"
|
|
26
|
+
"files": [
|
|
27
|
+
"src",
|
|
28
|
+
"bin"
|
|
67
29
|
],
|
|
68
|
-
"author": "Kenzo",
|
|
69
|
-
"license": "MIT",
|
|
70
30
|
"repository": {
|
|
71
31
|
"type": "git",
|
|
72
32
|
"url": "git+https://github.com/codewithkenzo/pplx-zero.git"
|
|
73
33
|
},
|
|
74
|
-
"
|
|
75
|
-
"
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
34
|
+
"bugs": {
|
|
35
|
+
"url": "https://github.com/codewithkenzo/pplx-zero/issues"
|
|
36
|
+
},
|
|
37
|
+
"homepage": "https://github.com/codewithkenzo/pplx-zero#readme",
|
|
38
|
+
"keywords": [
|
|
39
|
+
"perplexity",
|
|
40
|
+
"ai",
|
|
41
|
+
"search",
|
|
42
|
+
"cli",
|
|
43
|
+
"terminal",
|
|
44
|
+
"bun"
|
|
79
45
|
]
|
|
80
|
-
}
|
|
46
|
+
}
|
package/src/api.test.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { test, expect, describe } from 'bun:test';
|
|
2
|
+
import { MODELS, type Model } from './api';
|
|
3
|
+
|
|
4
|
+
describe('MODELS', () => {
|
|
5
|
+
test('includes all expected models', () => {
|
|
6
|
+
expect(MODELS).toContain('sonar');
|
|
7
|
+
expect(MODELS).toContain('sonar-pro');
|
|
8
|
+
expect(MODELS).toContain('sonar-reasoning');
|
|
9
|
+
expect(MODELS).toContain('sonar-reasoning-pro');
|
|
10
|
+
expect(MODELS).toContain('sonar-deep-research');
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
test('has exactly 5 models', () => {
|
|
14
|
+
expect(MODELS).toHaveLength(5);
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test('Model type matches MODELS array', () => {
|
|
18
|
+
const model: Model = MODELS[0]!;
|
|
19
|
+
expect(MODELS.includes(model)).toBe(true);
|
|
20
|
+
});
|
|
21
|
+
});
|
package/src/api.ts
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { getEnv } from './env';
|
|
2
|
+
import type { FileAttachment } from './files';
|
|
3
|
+
|
|
4
|
+
const API_URL = 'https://api.perplexity.ai/chat/completions';
|
|
5
|
+
|
|
6
|
+
export const MODELS = ['sonar', 'sonar-pro', 'sonar-reasoning', 'sonar-reasoning-pro', 'sonar-deep-research'] as const;
|
|
7
|
+
export type Model = (typeof MODELS)[number];
|
|
8
|
+
|
|
9
|
+
export interface SearchResult {
|
|
10
|
+
title: string;
|
|
11
|
+
url: string;
|
|
12
|
+
date?: string;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface StreamCallbacks {
|
|
16
|
+
onContent: (text: string) => void;
|
|
17
|
+
onDone: (citations: SearchResult[], usage: { prompt_tokens: number; completion_tokens: number }) => void;
|
|
18
|
+
onError: (error: Error) => void;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
interface MessageContent {
|
|
22
|
+
type: 'text' | 'file_url';
|
|
23
|
+
text?: string;
|
|
24
|
+
file_url?: { url: string };
|
|
25
|
+
file_name?: string;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function buildMessages(query: string, file?: FileAttachment): { role: string; content: string | MessageContent[] }[] {
|
|
29
|
+
if (!file) {
|
|
30
|
+
return [{ role: 'user', content: query }];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const content: MessageContent[] = [
|
|
34
|
+
{ type: 'text', text: query },
|
|
35
|
+
{
|
|
36
|
+
type: 'file_url',
|
|
37
|
+
file_url: { url: file.data },
|
|
38
|
+
file_name: file.filename,
|
|
39
|
+
},
|
|
40
|
+
];
|
|
41
|
+
|
|
42
|
+
return [{ role: 'user', content }];
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export async function search(
|
|
46
|
+
query: string,
|
|
47
|
+
model: Model,
|
|
48
|
+
callbacks: StreamCallbacks,
|
|
49
|
+
file?: FileAttachment
|
|
50
|
+
): Promise<void> {
|
|
51
|
+
const body = JSON.stringify({
|
|
52
|
+
model,
|
|
53
|
+
messages: buildMessages(query, file),
|
|
54
|
+
stream: true,
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
const response = await fetch(API_URL, {
|
|
58
|
+
method: 'POST',
|
|
59
|
+
headers: {
|
|
60
|
+
'Authorization': `Bearer ${getEnv().PERPLEXITY_API_KEY}`,
|
|
61
|
+
'Content-Type': 'application/json',
|
|
62
|
+
},
|
|
63
|
+
body,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
if (!response.ok) {
|
|
67
|
+
const text = await response.text();
|
|
68
|
+
callbacks.onError(new Error(`API error ${response.status}: ${text}`));
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (!response.body) {
|
|
73
|
+
callbacks.onError(new Error('No response body'));
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const reader = response.body.getReader();
|
|
78
|
+
const decoder = new TextDecoder();
|
|
79
|
+
let buffer = '';
|
|
80
|
+
let citations: SearchResult[] = [];
|
|
81
|
+
let usage = { prompt_tokens: 0, completion_tokens: 0 };
|
|
82
|
+
|
|
83
|
+
while (true) {
|
|
84
|
+
const { done, value } = await reader.read();
|
|
85
|
+
if (done) break;
|
|
86
|
+
|
|
87
|
+
buffer += decoder.decode(value, { stream: true });
|
|
88
|
+
const lines = buffer.split('\n');
|
|
89
|
+
buffer = lines.pop() || '';
|
|
90
|
+
|
|
91
|
+
for (const line of lines) {
|
|
92
|
+
if (!line.startsWith('data: ')) continue;
|
|
93
|
+
const data = line.slice(6).trim();
|
|
94
|
+
if (data === '[DONE]') continue;
|
|
95
|
+
|
|
96
|
+
try {
|
|
97
|
+
const parsed = JSON.parse(data);
|
|
98
|
+
const delta = parsed.choices?.[0]?.delta?.content;
|
|
99
|
+
if (delta) {
|
|
100
|
+
callbacks.onContent(delta);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (parsed.search_results) {
|
|
104
|
+
citations = parsed.search_results;
|
|
105
|
+
}
|
|
106
|
+
if (parsed.usage) {
|
|
107
|
+
usage = parsed.usage;
|
|
108
|
+
}
|
|
109
|
+
} catch {
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
callbacks.onDone(citations, usage);
|
|
116
|
+
}
|
package/src/env.ts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
|
|
3
|
+
const c = {
|
|
4
|
+
reset: '\x1b[0m',
|
|
5
|
+
red: '\x1b[31m',
|
|
6
|
+
yellow: '\x1b[33m',
|
|
7
|
+
cyan: '\x1b[36m',
|
|
8
|
+
dim: '\x1b[2m',
|
|
9
|
+
} as const;
|
|
10
|
+
|
|
11
|
+
const envSchema = z.object({
|
|
12
|
+
PERPLEXITY_API_KEY: z.string().min(1),
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
let _env: z.infer<typeof envSchema> | null = null;
|
|
16
|
+
|
|
17
|
+
export function getEnv() {
|
|
18
|
+
if (_env) return _env;
|
|
19
|
+
|
|
20
|
+
const key = process.env.PERPLEXITY_API_KEY || process.env.PERPLEXITY_AI_API_KEY;
|
|
21
|
+
|
|
22
|
+
if (!key) {
|
|
23
|
+
console.error(`
|
|
24
|
+
${c.red}✗ Missing API Key${c.reset}
|
|
25
|
+
|
|
26
|
+
Set your Perplexity API key:
|
|
27
|
+
|
|
28
|
+
${c.cyan}export PERPLEXITY_API_KEY="pplx-..."${c.reset}
|
|
29
|
+
|
|
30
|
+
${c.dim}Get one at: https://perplexity.ai/settings/api${c.reset}
|
|
31
|
+
`);
|
|
32
|
+
process.exit(2);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
_env = { PERPLEXITY_API_KEY: key };
|
|
36
|
+
return _env;
|
|
37
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { test, expect, describe } from 'bun:test';
|
|
2
|
+
import { encodeFile, toDataUrl, type FileAttachment } from './files';
|
|
3
|
+
import { writeFile, unlink } from 'node:fs/promises';
|
|
4
|
+
import { join } from 'node:path';
|
|
5
|
+
|
|
6
|
+
const TMP_DIR = '/tmp';
|
|
7
|
+
|
|
8
|
+
describe('encodeFile', () => {
|
|
9
|
+
test('encodes text file correctly', async () => {
|
|
10
|
+
const testPath = join(TMP_DIR, 'test.txt');
|
|
11
|
+
await writeFile(testPath, 'hello world');
|
|
12
|
+
|
|
13
|
+
const result = await encodeFile(testPath);
|
|
14
|
+
|
|
15
|
+
expect(result.type).toBe('file');
|
|
16
|
+
expect(result.mimeType).toBe('text/plain');
|
|
17
|
+
expect(result.filename).toBe('test.txt');
|
|
18
|
+
expect(result.data).toBe(Buffer.from('hello world').toString('base64'));
|
|
19
|
+
|
|
20
|
+
await unlink(testPath);
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
test('encodes PDF as file type', async () => {
|
|
24
|
+
const testPath = join(TMP_DIR, 'test.pdf');
|
|
25
|
+
await writeFile(testPath, '%PDF-1.4 test');
|
|
26
|
+
|
|
27
|
+
const result = await encodeFile(testPath);
|
|
28
|
+
|
|
29
|
+
expect(result.type).toBe('file');
|
|
30
|
+
expect(result.mimeType).toBe('application/pdf');
|
|
31
|
+
|
|
32
|
+
await unlink(testPath);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
test('encodes PNG as image type', async () => {
|
|
36
|
+
const testPath = join(TMP_DIR, 'test.png');
|
|
37
|
+
const pngHeader = Buffer.from([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]);
|
|
38
|
+
await writeFile(testPath, pngHeader);
|
|
39
|
+
|
|
40
|
+
const result = await encodeFile(testPath);
|
|
41
|
+
|
|
42
|
+
expect(result.type).toBe('image');
|
|
43
|
+
expect(result.mimeType).toBe('image/png');
|
|
44
|
+
|
|
45
|
+
await unlink(testPath);
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
test('throws on unsupported file type', async () => {
|
|
49
|
+
const testPath = join(TMP_DIR, 'test.xyz');
|
|
50
|
+
await writeFile(testPath, 'test');
|
|
51
|
+
|
|
52
|
+
await expect(encodeFile(testPath)).rejects.toThrow('Unsupported file type: .xyz');
|
|
53
|
+
|
|
54
|
+
await unlink(testPath);
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
describe('toDataUrl', () => {
|
|
59
|
+
test('creates valid data URL', () => {
|
|
60
|
+
const attachment: FileAttachment = {
|
|
61
|
+
type: 'image',
|
|
62
|
+
data: 'aGVsbG8gd29ybGQ=',
|
|
63
|
+
mimeType: 'image/png',
|
|
64
|
+
filename: 'test.png',
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
const result = toDataUrl(attachment);
|
|
68
|
+
|
|
69
|
+
expect(result).toBe('data:image/png;base64,aGVsbG8gd29ybGQ=');
|
|
70
|
+
});
|
|
71
|
+
});
|
package/src/files.ts
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { readFile } from 'node:fs/promises';
|
|
2
|
+
import { extname } from 'node:path';
|
|
3
|
+
|
|
4
|
+
const MIME_TYPES: Record<string, string> = {
|
|
5
|
+
'.pdf': 'application/pdf',
|
|
6
|
+
'.txt': 'text/plain',
|
|
7
|
+
'.md': 'text/markdown',
|
|
8
|
+
'.png': 'image/png',
|
|
9
|
+
'.jpg': 'image/jpeg',
|
|
10
|
+
'.jpeg': 'image/jpeg',
|
|
11
|
+
'.gif': 'image/gif',
|
|
12
|
+
'.webp': 'image/webp',
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
export interface FileAttachment {
|
|
16
|
+
type: 'file' | 'image';
|
|
17
|
+
data: string;
|
|
18
|
+
mimeType: string;
|
|
19
|
+
filename: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export async function encodeFile(path: string): Promise<FileAttachment> {
|
|
23
|
+
const ext = extname(path).toLowerCase();
|
|
24
|
+
const mimeType = MIME_TYPES[ext];
|
|
25
|
+
|
|
26
|
+
if (!mimeType) {
|
|
27
|
+
throw new Error(`Unsupported file type: ${ext}`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const buffer = await readFile(path);
|
|
31
|
+
const data = buffer.toString('base64');
|
|
32
|
+
const isImage = mimeType.startsWith('image/');
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
type: isImage ? 'image' : 'file',
|
|
36
|
+
data,
|
|
37
|
+
mimeType,
|
|
38
|
+
filename: path.split('/').pop() || 'file',
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function toDataUrl(attachment: FileAttachment): string {
|
|
43
|
+
return `data:${attachment.mimeType};base64,${attachment.data}`;
|
|
44
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { test, expect, beforeEach, afterAll } from 'bun:test';
|
|
2
|
+
import { appendHistory, readHistory, getLastEntry, clearHistory } from './history';
|
|
3
|
+
|
|
4
|
+
beforeEach(async () => {
|
|
5
|
+
await clearHistory();
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
afterAll(async () => {
|
|
9
|
+
await clearHistory();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
test('appendHistory creates entry', async () => {
|
|
13
|
+
await appendHistory({ q: 'test query', m: 'sonar', a: 'test answer' });
|
|
14
|
+
const entries = await readHistory();
|
|
15
|
+
expect(entries.length).toBe(1);
|
|
16
|
+
expect(entries[0]!.q).toBe('test query');
|
|
17
|
+
expect(entries[0]!.m).toBe('sonar');
|
|
18
|
+
expect(entries[0]!.a).toBe('test answer');
|
|
19
|
+
expect(entries[0]!.ts).toBeGreaterThan(0);
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
test('readHistory returns entries in reverse order', async () => {
|
|
23
|
+
await appendHistory({ q: 'first', m: 'sonar', a: 'a1' });
|
|
24
|
+
await appendHistory({ q: 'second', m: 'sonar-pro', a: 'a2' });
|
|
25
|
+
await appendHistory({ q: 'third', m: 'sonar', a: 'a3' });
|
|
26
|
+
|
|
27
|
+
const entries = await readHistory();
|
|
28
|
+
expect(entries.length).toBe(3);
|
|
29
|
+
expect(entries[0]!.q).toBe('third');
|
|
30
|
+
expect(entries[1]!.q).toBe('second');
|
|
31
|
+
expect(entries[2]!.q).toBe('first');
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
test('readHistory respects limit', async () => {
|
|
35
|
+
await appendHistory({ q: 'one', m: 'sonar', a: 'a' });
|
|
36
|
+
await appendHistory({ q: 'two', m: 'sonar', a: 'a' });
|
|
37
|
+
await appendHistory({ q: 'three', m: 'sonar', a: 'a' });
|
|
38
|
+
|
|
39
|
+
const entries = await readHistory(2);
|
|
40
|
+
expect(entries.length).toBe(2);
|
|
41
|
+
expect(entries[0]!.q).toBe('three');
|
|
42
|
+
expect(entries[1]!.q).toBe('two');
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
test('getLastEntry returns most recent', async () => {
|
|
46
|
+
await appendHistory({ q: 'old', m: 'sonar', a: 'old answer' });
|
|
47
|
+
await appendHistory({ q: 'new', m: 'sonar-pro', a: 'new answer' });
|
|
48
|
+
|
|
49
|
+
const last = await getLastEntry();
|
|
50
|
+
expect(last?.q).toBe('new');
|
|
51
|
+
expect(last?.m).toBe('sonar-pro');
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
test('getLastEntry returns null when empty', async () => {
|
|
55
|
+
const last = await getLastEntry();
|
|
56
|
+
expect(last).toBeNull();
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
test('clearHistory removes all entries', async () => {
|
|
60
|
+
await appendHistory({ q: 'test', m: 'sonar', a: 'answer' });
|
|
61
|
+
await clearHistory();
|
|
62
|
+
const entries = await readHistory();
|
|
63
|
+
expect(entries.length).toBe(0);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
test('appendHistory stores citations', async () => {
|
|
67
|
+
await appendHistory({
|
|
68
|
+
q: 'query',
|
|
69
|
+
m: 'sonar',
|
|
70
|
+
a: 'answer',
|
|
71
|
+
citations: ['https://example.com', 'https://test.com']
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const entries = await readHistory();
|
|
75
|
+
expect(entries[0]!.citations).toEqual(['https://example.com', 'https://test.com']);
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
test('appendHistory truncates long answers', async () => {
|
|
79
|
+
const longAnswer = 'x'.repeat(3000);
|
|
80
|
+
await appendHistory({ q: 'query', m: 'sonar', a: longAnswer });
|
|
81
|
+
|
|
82
|
+
const entries = await readHistory();
|
|
83
|
+
expect(entries[0]!.a.length).toBe(2000);
|
|
84
|
+
});
|
package/src/history.ts
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import type { Model } from './api';
|
|
2
|
+
|
|
3
|
+
export interface HistoryEntry {
|
|
4
|
+
ts: number;
|
|
5
|
+
q: string;
|
|
6
|
+
m: Model;
|
|
7
|
+
a: string;
|
|
8
|
+
citations?: string[];
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const HISTORY_DIR = `${process.env.HOME}/.pplx`;
|
|
12
|
+
const HISTORY_PATH = `${HISTORY_DIR}/history.jsonl`;
|
|
13
|
+
const MAX_ENTRIES = 1000;
|
|
14
|
+
const MAX_ANSWER_LENGTH = 2000;
|
|
15
|
+
|
|
16
|
+
async function ensureDir(): Promise<void> {
|
|
17
|
+
const dir = Bun.file(HISTORY_DIR);
|
|
18
|
+
if (!(await dir.exists())) {
|
|
19
|
+
await Bun.$`mkdir -p ${HISTORY_DIR}`;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function appendHistory(entry: Omit<HistoryEntry, 'ts'>): Promise<void> {
|
|
24
|
+
await ensureDir();
|
|
25
|
+
|
|
26
|
+
const file = Bun.file(HISTORY_PATH);
|
|
27
|
+
const exists = await file.exists();
|
|
28
|
+
|
|
29
|
+
if (exists) {
|
|
30
|
+
const text = await file.text();
|
|
31
|
+
const lines = text.trim().split('\n').filter(l => l.length > 0);
|
|
32
|
+
if (lines.length >= MAX_ENTRIES) {
|
|
33
|
+
const keep = lines.slice(-MAX_ENTRIES + 1).join('\n') + '\n';
|
|
34
|
+
await Bun.write(HISTORY_PATH, keep);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const record: HistoryEntry = {
|
|
39
|
+
ts: Date.now(),
|
|
40
|
+
q: entry.q,
|
|
41
|
+
m: entry.m,
|
|
42
|
+
a: entry.a.slice(0, MAX_ANSWER_LENGTH),
|
|
43
|
+
...(entry.citations?.length ? { citations: entry.citations } : {}),
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
const line = JSON.stringify(record) + '\n';
|
|
47
|
+
|
|
48
|
+
if (exists) {
|
|
49
|
+
const current = await Bun.file(HISTORY_PATH).text();
|
|
50
|
+
await Bun.write(HISTORY_PATH, current + line);
|
|
51
|
+
} else {
|
|
52
|
+
await Bun.write(HISTORY_PATH, line);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export async function readHistory(limit = 20): Promise<HistoryEntry[]> {
|
|
57
|
+
const file = Bun.file(HISTORY_PATH);
|
|
58
|
+
if (!(await file.exists())) return [];
|
|
59
|
+
|
|
60
|
+
const text = await file.text();
|
|
61
|
+
const lines = text.trim().split('\n').filter(l => l.length > 0);
|
|
62
|
+
|
|
63
|
+
return lines
|
|
64
|
+
.map(line => {
|
|
65
|
+
try {
|
|
66
|
+
return JSON.parse(line) as HistoryEntry;
|
|
67
|
+
} catch {
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
})
|
|
71
|
+
.filter((e): e is HistoryEntry => e !== null)
|
|
72
|
+
.reverse()
|
|
73
|
+
.slice(0, limit);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export async function getLastEntry(): Promise<HistoryEntry | null> {
|
|
77
|
+
const entries = await readHistory(1);
|
|
78
|
+
return entries[0] ?? null;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export async function clearHistory(): Promise<void> {
|
|
82
|
+
const file = Bun.file(HISTORY_PATH);
|
|
83
|
+
if (await file.exists()) {
|
|
84
|
+
await Bun.write(HISTORY_PATH, '');
|
|
85
|
+
}
|
|
86
|
+
}
|