pplx-zero 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +101 -14
- package/package.json +1 -1
- package/src/history.test.ts +84 -0
- package/src/history.ts +86 -0
- package/src/index.ts +87 -4
- package/src/output.ts +7 -0
package/README.md
CHANGED
|
@@ -1,46 +1,133 @@
|
|
|
1
1
|
# pplx-zero
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
[](https://www.npmjs.com/package/pplx-zero)
|
|
4
|
+
[](https://aur.archlinux.org/packages/pplx-zero)
|
|
5
|
+

|
|
6
|
+

|
|
4
7
|
|
|
5
|
-
|
|
8
|
+
Search the web with AI from your terminal. Zero bloat, maximum speed.
|
|
6
9
|
|
|
7
10
|
```bash
|
|
8
|
-
|
|
11
|
+
pplx "what is bun"
|
|
9
12
|
```
|
|
10
13
|
|
|
11
|
-
|
|
14
|
+
## Why pplx-zero?
|
|
15
|
+
|
|
16
|
+
- **Fast** — Bun-native, streams responses as they arrive
|
|
17
|
+
- **Minimal** — ~400 lines of code, one dependency (zod)
|
|
18
|
+
- **Powerful** — 5 models including deep research, file & image support
|
|
19
|
+
- **Conversational** — Continue previous queries with `-c`
|
|
20
|
+
- **Unix-friendly** — Pipes, JSON output, history, exit codes done right
|
|
21
|
+
|
|
22
|
+
## Installation
|
|
12
23
|
|
|
13
24
|
```bash
|
|
25
|
+
# Bun (recommended)
|
|
26
|
+
bun install -g pplx-zero
|
|
27
|
+
|
|
28
|
+
# npm (requires bun installed)
|
|
14
29
|
npm install -g pplx-zero
|
|
30
|
+
|
|
31
|
+
# Arch Linux
|
|
32
|
+
yay -S pplx-zero
|
|
15
33
|
```
|
|
16
34
|
|
|
17
35
|
## Setup
|
|
18
36
|
|
|
37
|
+
Get your API key from [Perplexity Settings](https://www.perplexity.ai/settings/api).
|
|
38
|
+
|
|
19
39
|
```bash
|
|
20
|
-
export PERPLEXITY_API_KEY="
|
|
40
|
+
export PERPLEXITY_API_KEY="pplx-..."
|
|
21
41
|
```
|
|
22
42
|
|
|
23
43
|
## Usage
|
|
24
44
|
|
|
25
45
|
```bash
|
|
26
|
-
|
|
27
|
-
pplx
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
pplx -
|
|
31
|
-
|
|
46
|
+
# Quick search
|
|
47
|
+
pplx "best practices for error handling in typescript"
|
|
48
|
+
|
|
49
|
+
# Use a more powerful model
|
|
50
|
+
pplx -m sonar-pro "explain quantum entanglement simply"
|
|
51
|
+
|
|
52
|
+
# Deep research mode (takes longer, more comprehensive)
|
|
53
|
+
pplx -m sonar-deep-research "comprehensive analysis of AI regulation in 2024"
|
|
54
|
+
|
|
55
|
+
# Analyze a document
|
|
56
|
+
pplx -f report.pdf "summarize the key findings"
|
|
57
|
+
|
|
58
|
+
# Describe an image
|
|
59
|
+
pplx -i screenshot.png "what's happening in this image"
|
|
60
|
+
|
|
61
|
+
# Continue a conversation
|
|
62
|
+
pplx "what is rust"
|
|
63
|
+
pplx -c "how does it compare to go?"
|
|
64
|
+
pplx -c "which should I learn first?"
|
|
65
|
+
|
|
66
|
+
# Save research to markdown
|
|
67
|
+
pplx -m sonar-deep-research "AI trends 2025" -o research.md
|
|
68
|
+
|
|
69
|
+
# Get JSON output for scripting
|
|
70
|
+
pplx --json "capital of france" | jq .answer
|
|
71
|
+
|
|
72
|
+
# View query history
|
|
73
|
+
pplx --history
|
|
74
|
+
|
|
75
|
+
# Search without saving to history
|
|
76
|
+
pplx --no-history "sensitive query"
|
|
32
77
|
```
|
|
33
78
|
|
|
79
|
+
## Models
|
|
80
|
+
|
|
81
|
+
| Model | Best For |
|
|
82
|
+
|-------|----------|
|
|
83
|
+
| `sonar` | Quick answers (default) |
|
|
84
|
+
| `sonar-pro` | Complex questions |
|
|
85
|
+
| `sonar-reasoning` | Step-by-step thinking |
|
|
86
|
+
| `sonar-reasoning-pro` | Advanced reasoning |
|
|
87
|
+
| `sonar-deep-research` | Comprehensive research |
|
|
88
|
+
|
|
34
89
|
## Options
|
|
35
90
|
|
|
36
91
|
| Flag | Description |
|
|
37
92
|
|------|-------------|
|
|
38
|
-
| `-m, --model
|
|
39
|
-
| `-f, --file
|
|
40
|
-
| `-i, --image
|
|
93
|
+
| `-m, --model <name>` | Select model |
|
|
94
|
+
| `-f, --file <path>` | Attach document (PDF, TXT, MD, etc.) |
|
|
95
|
+
| `-i, --image <path>` | Attach image (PNG, JPG, WebP, etc.) |
|
|
96
|
+
| `-o, --output <path>` | Save output to file (.md, .txt) |
|
|
97
|
+
| `-c, --continue` | Continue from last query |
|
|
98
|
+
| `--history` | Show query history |
|
|
99
|
+
| `--no-history` | Don't save query to history |
|
|
41
100
|
| `--json` | Output as JSON |
|
|
42
101
|
| `-h, --help` | Show help |
|
|
43
102
|
|
|
103
|
+
## History & Sessions
|
|
104
|
+
|
|
105
|
+
pplx-zero keeps a local history of your queries at `~/.pplx/history.jsonl`.
|
|
106
|
+
|
|
107
|
+
```bash
|
|
108
|
+
# View recent queries
|
|
109
|
+
pplx --history
|
|
110
|
+
|
|
111
|
+
# Filter with grep
|
|
112
|
+
pplx --history | grep "typescript"
|
|
113
|
+
|
|
114
|
+
# Continue last conversation
|
|
115
|
+
pplx -c "tell me more"
|
|
116
|
+
|
|
117
|
+
# Skip history for sensitive queries
|
|
118
|
+
pplx --no-history "private question"
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
History auto-rotates at 1000 entries to keep the file small.
|
|
122
|
+
|
|
123
|
+
## Exit Codes
|
|
124
|
+
|
|
125
|
+
| Code | Meaning |
|
|
126
|
+
|------|---------|
|
|
127
|
+
| `0` | Success |
|
|
128
|
+
| `1` | API error |
|
|
129
|
+
| `2` | Configuration error |
|
|
130
|
+
|
|
44
131
|
## License
|
|
45
132
|
|
|
46
133
|
MIT
|
package/package.json
CHANGED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { test, expect, beforeEach, afterAll } from 'bun:test';
|
|
2
|
+
import { appendHistory, readHistory, getLastEntry, clearHistory } from './history';
|
|
3
|
+
|
|
4
|
+
beforeEach(async () => {
|
|
5
|
+
await clearHistory();
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
afterAll(async () => {
|
|
9
|
+
await clearHistory();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
test('appendHistory creates entry', async () => {
|
|
13
|
+
await appendHistory({ q: 'test query', m: 'sonar', a: 'test answer' });
|
|
14
|
+
const entries = await readHistory();
|
|
15
|
+
expect(entries.length).toBe(1);
|
|
16
|
+
expect(entries[0]!.q).toBe('test query');
|
|
17
|
+
expect(entries[0]!.m).toBe('sonar');
|
|
18
|
+
expect(entries[0]!.a).toBe('test answer');
|
|
19
|
+
expect(entries[0]!.ts).toBeGreaterThan(0);
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
test('readHistory returns entries in reverse order', async () => {
|
|
23
|
+
await appendHistory({ q: 'first', m: 'sonar', a: 'a1' });
|
|
24
|
+
await appendHistory({ q: 'second', m: 'sonar-pro', a: 'a2' });
|
|
25
|
+
await appendHistory({ q: 'third', m: 'sonar', a: 'a3' });
|
|
26
|
+
|
|
27
|
+
const entries = await readHistory();
|
|
28
|
+
expect(entries.length).toBe(3);
|
|
29
|
+
expect(entries[0]!.q).toBe('third');
|
|
30
|
+
expect(entries[1]!.q).toBe('second');
|
|
31
|
+
expect(entries[2]!.q).toBe('first');
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
test('readHistory respects limit', async () => {
|
|
35
|
+
await appendHistory({ q: 'one', m: 'sonar', a: 'a' });
|
|
36
|
+
await appendHistory({ q: 'two', m: 'sonar', a: 'a' });
|
|
37
|
+
await appendHistory({ q: 'three', m: 'sonar', a: 'a' });
|
|
38
|
+
|
|
39
|
+
const entries = await readHistory(2);
|
|
40
|
+
expect(entries.length).toBe(2);
|
|
41
|
+
expect(entries[0]!.q).toBe('three');
|
|
42
|
+
expect(entries[1]!.q).toBe('two');
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
test('getLastEntry returns most recent', async () => {
|
|
46
|
+
await appendHistory({ q: 'old', m: 'sonar', a: 'old answer' });
|
|
47
|
+
await appendHistory({ q: 'new', m: 'sonar-pro', a: 'new answer' });
|
|
48
|
+
|
|
49
|
+
const last = await getLastEntry();
|
|
50
|
+
expect(last?.q).toBe('new');
|
|
51
|
+
expect(last?.m).toBe('sonar-pro');
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
test('getLastEntry returns null when empty', async () => {
|
|
55
|
+
const last = await getLastEntry();
|
|
56
|
+
expect(last).toBeNull();
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
test('clearHistory removes all entries', async () => {
|
|
60
|
+
await appendHistory({ q: 'test', m: 'sonar', a: 'answer' });
|
|
61
|
+
await clearHistory();
|
|
62
|
+
const entries = await readHistory();
|
|
63
|
+
expect(entries.length).toBe(0);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
test('appendHistory stores citations', async () => {
|
|
67
|
+
await appendHistory({
|
|
68
|
+
q: 'query',
|
|
69
|
+
m: 'sonar',
|
|
70
|
+
a: 'answer',
|
|
71
|
+
citations: ['https://example.com', 'https://test.com']
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const entries = await readHistory();
|
|
75
|
+
expect(entries[0]!.citations).toEqual(['https://example.com', 'https://test.com']);
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
test('appendHistory truncates long answers', async () => {
|
|
79
|
+
const longAnswer = 'x'.repeat(3000);
|
|
80
|
+
await appendHistory({ q: 'query', m: 'sonar', a: longAnswer });
|
|
81
|
+
|
|
82
|
+
const entries = await readHistory();
|
|
83
|
+
expect(entries[0]!.a.length).toBe(2000);
|
|
84
|
+
});
|
package/src/history.ts
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import type { Model } from './api';
|
|
2
|
+
|
|
3
|
+
export interface HistoryEntry {
|
|
4
|
+
ts: number;
|
|
5
|
+
q: string;
|
|
6
|
+
m: Model;
|
|
7
|
+
a: string;
|
|
8
|
+
citations?: string[];
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const HISTORY_DIR = `${process.env.HOME}/.pplx`;
|
|
12
|
+
const HISTORY_PATH = `${HISTORY_DIR}/history.jsonl`;
|
|
13
|
+
const MAX_ENTRIES = 1000;
|
|
14
|
+
const MAX_ANSWER_LENGTH = 2000;
|
|
15
|
+
|
|
16
|
+
async function ensureDir(): Promise<void> {
|
|
17
|
+
const dir = Bun.file(HISTORY_DIR);
|
|
18
|
+
if (!(await dir.exists())) {
|
|
19
|
+
await Bun.$`mkdir -p ${HISTORY_DIR}`;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function appendHistory(entry: Omit<HistoryEntry, 'ts'>): Promise<void> {
|
|
24
|
+
await ensureDir();
|
|
25
|
+
|
|
26
|
+
const file = Bun.file(HISTORY_PATH);
|
|
27
|
+
const exists = await file.exists();
|
|
28
|
+
|
|
29
|
+
if (exists) {
|
|
30
|
+
const text = await file.text();
|
|
31
|
+
const lines = text.trim().split('\n').filter(l => l.length > 0);
|
|
32
|
+
if (lines.length >= MAX_ENTRIES) {
|
|
33
|
+
const keep = lines.slice(-MAX_ENTRIES + 1).join('\n') + '\n';
|
|
34
|
+
await Bun.write(HISTORY_PATH, keep);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const record: HistoryEntry = {
|
|
39
|
+
ts: Date.now(),
|
|
40
|
+
q: entry.q,
|
|
41
|
+
m: entry.m,
|
|
42
|
+
a: entry.a.slice(0, MAX_ANSWER_LENGTH),
|
|
43
|
+
...(entry.citations?.length ? { citations: entry.citations } : {}),
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
const line = JSON.stringify(record) + '\n';
|
|
47
|
+
|
|
48
|
+
if (exists) {
|
|
49
|
+
const current = await Bun.file(HISTORY_PATH).text();
|
|
50
|
+
await Bun.write(HISTORY_PATH, current + line);
|
|
51
|
+
} else {
|
|
52
|
+
await Bun.write(HISTORY_PATH, line);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export async function readHistory(limit = 20): Promise<HistoryEntry[]> {
|
|
57
|
+
const file = Bun.file(HISTORY_PATH);
|
|
58
|
+
if (!(await file.exists())) return [];
|
|
59
|
+
|
|
60
|
+
const text = await file.text();
|
|
61
|
+
const lines = text.trim().split('\n').filter(l => l.length > 0);
|
|
62
|
+
|
|
63
|
+
return lines
|
|
64
|
+
.map(line => {
|
|
65
|
+
try {
|
|
66
|
+
return JSON.parse(line) as HistoryEntry;
|
|
67
|
+
} catch {
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
})
|
|
71
|
+
.filter((e): e is HistoryEntry => e !== null)
|
|
72
|
+
.reverse()
|
|
73
|
+
.slice(0, limit);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export async function getLastEntry(): Promise<HistoryEntry | null> {
|
|
77
|
+
const entries = await readHistory(1);
|
|
78
|
+
return entries[0] ?? null;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export async function clearHistory(): Promise<void> {
|
|
82
|
+
const file = Bun.file(HISTORY_PATH);
|
|
83
|
+
if (await file.exists()) {
|
|
84
|
+
await Bun.write(HISTORY_PATH, '');
|
|
85
|
+
}
|
|
86
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -4,10 +4,10 @@ import { search, MODELS, type Model } from './api';
|
|
|
4
4
|
import { encodeFile } from './files';
|
|
5
5
|
import { getEnv } from './env';
|
|
6
6
|
import { fmt, write, writeLn } from './output';
|
|
7
|
+
import { appendHistory, readHistory, getLastEntry } from './history';
|
|
7
8
|
|
|
8
9
|
getEnv();
|
|
9
10
|
|
|
10
|
-
|
|
11
11
|
const { values, positionals } = parseArgs({
|
|
12
12
|
args: Bun.argv.slice(2),
|
|
13
13
|
options: {
|
|
@@ -16,12 +16,16 @@ const { values, positionals } = parseArgs({
|
|
|
16
16
|
image: { type: 'string', short: 'i' },
|
|
17
17
|
json: { type: 'boolean', default: false },
|
|
18
18
|
help: { type: 'boolean', short: 'h' },
|
|
19
|
+
history: { type: 'boolean', default: false },
|
|
20
|
+
'no-history': { type: 'boolean', default: false },
|
|
21
|
+
continue: { type: 'boolean', short: 'c', default: false },
|
|
22
|
+
output: { type: 'string', short: 'o' },
|
|
19
23
|
},
|
|
20
24
|
allowPositionals: true,
|
|
21
25
|
strict: true,
|
|
22
26
|
});
|
|
23
27
|
|
|
24
|
-
if (values.help
|
|
28
|
+
if (values.help) {
|
|
25
29
|
console.log(`
|
|
26
30
|
pplx - Perplexity AI search from terminal
|
|
27
31
|
|
|
@@ -31,6 +35,10 @@ Options:
|
|
|
31
35
|
-m, --model <name> Model: ${MODELS.join(', ')} (default: sonar)
|
|
32
36
|
-f, --file <path> Attach a file (PDF, TXT, etc.)
|
|
33
37
|
-i, --image <path> Attach an image (PNG, JPG, etc.)
|
|
38
|
+
-o, --output <path> Save output to file (.md, .txt)
|
|
39
|
+
-c, --continue Continue from last query (add context)
|
|
40
|
+
--history Show query history
|
|
41
|
+
--no-history Don't save this query to history
|
|
34
42
|
--json Output as JSON
|
|
35
43
|
-h, --help Show this help
|
|
36
44
|
|
|
@@ -38,18 +46,52 @@ Examples:
|
|
|
38
46
|
pplx "what is bun"
|
|
39
47
|
pplx -m sonar-pro "explain quantum computing"
|
|
40
48
|
pplx -f report.pdf "summarize this document"
|
|
49
|
+
pplx -c "tell me more about that"
|
|
50
|
+
pplx --history | grep "bun"
|
|
41
51
|
`);
|
|
42
52
|
process.exit(0);
|
|
43
53
|
}
|
|
44
54
|
|
|
45
|
-
|
|
55
|
+
if (values.history) {
|
|
56
|
+
const entries = await readHistory(20);
|
|
57
|
+
if (entries.length === 0) {
|
|
58
|
+
console.log('No history yet.');
|
|
59
|
+
} else {
|
|
60
|
+
for (const entry of entries) {
|
|
61
|
+
console.log(fmt.historyEntry(entry.ts, entry.m, entry.q));
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
process.exit(0);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (positionals.length === 0 && !values.continue) {
|
|
68
|
+
console.error(fmt.error('No query provided. Use -h for help.'));
|
|
69
|
+
process.exit(2);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
let query = positionals.join(' ');
|
|
46
73
|
const model = (MODELS.includes(values.model as Model) ? values.model : 'sonar') as Model;
|
|
47
74
|
|
|
75
|
+
if (values.continue) {
|
|
76
|
+
const last = await getLastEntry();
|
|
77
|
+
if (last) {
|
|
78
|
+
const context = `Previous question: "${last.q}"\nPrevious answer: "${last.a.slice(0, 500)}..."\n\nFollow-up question: ${query || 'Continue and elaborate on the previous answer.'}`;
|
|
79
|
+
query = context;
|
|
80
|
+
if (!values.json) {
|
|
81
|
+
await write(fmt.continuing(last.q));
|
|
82
|
+
}
|
|
83
|
+
} else if (!query) {
|
|
84
|
+
console.error(fmt.error('No previous query to continue from.'));
|
|
85
|
+
process.exit(2);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
48
89
|
const filePath = values.file || values.image;
|
|
49
90
|
const file = filePath ? await encodeFile(filePath) : undefined;
|
|
50
91
|
|
|
51
92
|
const startTime = Date.now();
|
|
52
93
|
let fullContent = '';
|
|
94
|
+
let outputBuffer = '';
|
|
53
95
|
|
|
54
96
|
if (!values.json) {
|
|
55
97
|
await write(fmt.model(model) + ' ');
|
|
@@ -65,11 +107,12 @@ await search(query, model, {
|
|
|
65
107
|
},
|
|
66
108
|
onDone: async (citations, usage) => {
|
|
67
109
|
const elapsed = Date.now() - startTime;
|
|
110
|
+
const citationUrls = citations.map((c) => c.url);
|
|
68
111
|
|
|
69
112
|
if (values.json) {
|
|
70
113
|
const output = {
|
|
71
114
|
answer: fullContent,
|
|
72
|
-
citations:
|
|
115
|
+
citations: citationUrls,
|
|
73
116
|
model,
|
|
74
117
|
tokens: usage.prompt_tokens + usage.completion_tokens,
|
|
75
118
|
latency_ms: elapsed,
|
|
@@ -84,6 +127,46 @@ await search(query, model, {
|
|
|
84
127
|
}
|
|
85
128
|
await write(fmt.stats(usage.prompt_tokens + usage.completion_tokens, elapsed));
|
|
86
129
|
}
|
|
130
|
+
|
|
131
|
+
if (values.output) {
|
|
132
|
+
const ext = values.output.split('.').pop()?.toLowerCase();
|
|
133
|
+
let content = '';
|
|
134
|
+
|
|
135
|
+
if (ext === 'md') {
|
|
136
|
+
content = `# ${positionals.join(' ') || 'Query'}\n\n`;
|
|
137
|
+
content += `**Model:** ${model}\n`;
|
|
138
|
+
content += `**Date:** ${new Date().toISOString()}\n\n`;
|
|
139
|
+
content += `## Answer\n\n${fullContent}\n\n`;
|
|
140
|
+
if (citationUrls.length > 0) {
|
|
141
|
+
content += `## Sources\n\n`;
|
|
142
|
+
citationUrls.forEach((url, i) => {
|
|
143
|
+
content += `${i + 1}. ${url}\n`;
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
} else {
|
|
147
|
+
content = fullContent;
|
|
148
|
+
if (citationUrls.length > 0) {
|
|
149
|
+
content += '\n\nSources:\n';
|
|
150
|
+
citationUrls.forEach((url, i) => {
|
|
151
|
+
content += `${i + 1}. ${url}\n`;
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
await Bun.write(values.output, content);
|
|
157
|
+
if (!values.json) {
|
|
158
|
+
await writeLn(`\n${fmt.model('saved')} ${values.output}`);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
if (!values['no-history'] && !values.json) {
|
|
163
|
+
await appendHistory({
|
|
164
|
+
q: positionals.join(' ') || '(continued)',
|
|
165
|
+
m: model,
|
|
166
|
+
a: fullContent,
|
|
167
|
+
citations: citationUrls,
|
|
168
|
+
});
|
|
169
|
+
}
|
|
87
170
|
},
|
|
88
171
|
onError: async (error) => {
|
|
89
172
|
if (values.json) {
|
package/src/output.ts
CHANGED
|
@@ -17,6 +17,13 @@ export const fmt = {
|
|
|
17
17
|
stats: (tokens: number, ms: number) =>
|
|
18
18
|
`\n${c.gray}[${tokens} tokens, ${(ms / 1000).toFixed(1)}s]${c.reset}\n`,
|
|
19
19
|
sources: () => `\n${c.yellow}Sources:${c.reset}`,
|
|
20
|
+
historyEntry: (ts: number, model: string, query: string) => {
|
|
21
|
+
const date = new Date(ts).toLocaleString('en-US', {
|
|
22
|
+
month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit'
|
|
23
|
+
});
|
|
24
|
+
return `${c.dim}${date}${c.reset} ${c.cyan}[${model}]${c.reset} ${query}`;
|
|
25
|
+
},
|
|
26
|
+
continuing: (query: string) => `${c.dim}Continuing from:${c.reset} ${query.slice(0, 50)}${query.length > 50 ? '...' : ''}\n`,
|
|
20
27
|
};
|
|
21
28
|
|
|
22
29
|
export async function write(text: string): Promise<void> {
|