@1a35e1/sonar-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +422 -0
- package/dist/cli.js +4 -0
- package/dist/commands/account.js +75 -0
- package/dist/commands/config/data/download.js +53 -0
- package/dist/commands/config/data/path.js +11 -0
- package/dist/commands/config/data/sql.js +12 -0
- package/dist/commands/config/data/sync.js +85 -0
- package/dist/commands/config/env.js +15 -0
- package/dist/commands/config/index.js +12 -0
- package/dist/commands/config/nuke.js +19 -0
- package/dist/commands/config/set.js +38 -0
- package/dist/commands/config/setup.js +29 -0
- package/dist/commands/config/skill.js +15 -0
- package/dist/commands/feed.js +172 -0
- package/dist/commands/inbox/archive.js +41 -0
- package/dist/commands/inbox/index.js +92 -0
- package/dist/commands/inbox/later.js +41 -0
- package/dist/commands/inbox/read.js +41 -0
- package/dist/commands/inbox/skip.js +41 -0
- package/dist/commands/index.js +5 -0
- package/dist/commands/ingest/bookmarks.js +31 -0
- package/dist/commands/ingest/index.js +5 -0
- package/dist/commands/ingest/tweets.js +31 -0
- package/dist/commands/interests/create.js +94 -0
- package/dist/commands/interests/index.js +56 -0
- package/dist/commands/interests/match.js +33 -0
- package/dist/commands/interests/update.js +142 -0
- package/dist/commands/monitor.js +81 -0
- package/dist/components/AccountCard.js +6 -0
- package/dist/components/InteractiveSession.js +241 -0
- package/dist/components/InterestCard.js +10 -0
- package/dist/components/RefreshTip.js +5 -0
- package/dist/components/Spinner.js +14 -0
- package/dist/components/Table.js +23 -0
- package/dist/lib/ai.js +160 -0
- package/dist/lib/client.js +33 -0
- package/dist/lib/config.js +74 -0
- package/dist/lib/data-queries.js +61 -0
- package/dist/lib/db.js +73 -0
- package/dist/lib/skill.js +290 -0
- package/dist/types/sonar.js +42 -0
- package/package.json +47 -0
package/README.md
ADDED
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
# 🔊 Sonar (Preview)
|
|
2
|
+
|
|
3
|
+
Experimental X CLI for OpenClaw 🦞 power users.
|
|
4
|
+
|
|
5
|
+
Sonar matches interests from your X graph using various AI pipelines. We built this to automate our social intelligence.
|
|
6
|
+
|
|
7
|
+
This cli has been designed to handover indexing and consumption to agents.
|
|
8
|
+
|
|
9
|
+
* Pipe it into scripts,
|
|
10
|
+
* automate your morning briefing,
|
|
11
|
+
* Or just discover tweets you probably missed out on the web interface.
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## Get started
|
|
16
|
+
|
|
17
|
+
* Register with `X` to get an API key from `https://sonar.8640p.info/`
|
|
18
|
+
* Learn more about which [scopes](#scopes) we request and why.
|
|
19
|
+
|
|
20
|
+
Install the CLI
|
|
21
|
+
|
|
22
|
+
```sh
|
|
23
|
+
pnpm add -g @1a35e1/sonar-cli
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
Register your API key.
|
|
27
|
+
|
|
28
|
+
```sh
|
|
29
|
+
# Make "SONAR_API_KEY" avaliable in your env
|
|
30
|
+
export SONAR_API_KEY=snr_xxxxx
|
|
31
|
+
|
|
32
|
+
# or, manually register
|
|
33
|
+
sonar config setup key=<YOUR_API_KEY>
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
View your account to ensure evrything works.
|
|
37
|
+
|
|
38
|
+
```sh
|
|
39
|
+
sonar account
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
Ingest your first `tweets` and check to `monitor` progress.
|
|
43
|
+
|
|
44
|
+
> The first time this you run this command it will take some time.
|
|
45
|
+
|
|
46
|
+
```sh
|
|
47
|
+
sonar ingest tweets
|
|
48
|
+
|
|
49
|
+
sonar ingest monitor
|
|
50
|
+
sonar ingest monitor --watch
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
---
|
|
54
|
+
|
|
55
|
+
## Scopes
|
|
56
|
+
|
|
57
|
+
* We currently request `read:*` and `offline:processing` scopes based on <<https://docs.x.com/fundamentals/authentication/oauth-2-0/>. If there is an appite
|
|
58
|
+
|
|
59
|
+
* So we can stay connected to your account until you revoke access.
|
|
60
|
+
* Posts you’ve liked and likes you can view.
|
|
61
|
+
* All the posts you can view, including posts from protected accounts.
|
|
62
|
+
* Accounts you’ve muted.
|
|
63
|
+
* Accounts you’ve blocked.
|
|
64
|
+
* People who follow you and people who you follow.
|
|
65
|
+
* All your Bookmarks.
|
|
66
|
+
* Lists, list members, and list followers of lists you’ve created or are a member of, including private lists.
|
|
67
|
+
* Any account you can view, including protected accounts.
|
|
68
|
+
|
|
69
|
+
## Why Sonar exists
|
|
70
|
+
|
|
71
|
+
Setting up your own social data pipeline is genuinely awful. You're looking at OAuth flows, rate limit math, pagination handling, webhook plumbing, deduplication logic, and a SQLite schema you'll regret in three weeks — before you've seen a single useful result. Most developers who try it abandon it halfway through.
|
|
72
|
+
|
|
73
|
+
**Sonar skips all of that. Get actionalable data for OpenClaw in 15 minutes.**
|
|
74
|
+
|
|
75
|
+
We believe your data is yours. So you want to go deeper than our platform allows — build your own models, run custom queries, pipe it into your own tooling — you can download everything we have indexed on your behalf into a local SQLite database and do whatever you want with it:
|
|
76
|
+
|
|
77
|
+
```bash
|
|
78
|
+
pnpm run cli -- data download # full snapshot → ~/.sonar/data.db
|
|
79
|
+
pnpm run cli -- data sync # incremental updates
|
|
80
|
+
pnpm run cli -- data sql # drop into a sqlite3 shell
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
No lock-in. If you outgrow us, you leave with your data intact.
|
|
84
|
+
|
|
85
|
+
## Design philosophy
|
|
86
|
+
|
|
87
|
+
There's a quiet shift happening in how developer tools are built.
|
|
88
|
+
|
|
89
|
+
In the early web2 era, API-first was a revelation. Stripe, Twilio, Sendgrid — companies that exposed clean REST contracts unlocked entire ecosystems of products built on top of them. The insight was simple: if your service has strong, reliable APIs, anyone can build anything. The interface didn't matter as much as the contract underneath.
|
|
90
|
+
We're at a similar inflection point now, but the interface layer has changed dramatically.
|
|
91
|
+
|
|
92
|
+
The goal for most workflows today is fire and forget — you define what you want, set it in motion, and let agents handle the execution. That only works if the underlying APIs are strong enough to support complex, long-running ETL pipelines without hand-holding. Sonar is built with that assumption: the API is the product, the CLI is just one interface into it.
|
|
93
|
+
Which raises an interesting question about CLIs themselves. Traditionally a CLI was developer-first by definition — you were writing for someone comfortable with flags, pipes, and man pages. But if the primary consumer of your CLI is increasingly an agent (OpenClaw, a cron job, an LLM with tool access), the design principles shift:
|
|
94
|
+
|
|
95
|
+
Output should be machine-readable by default. Every command has a --json flag. Agents don't parse card renders.
|
|
96
|
+
Commands should be composable. Small, single-purpose commands that pipe cleanly into each other are more useful to an agent than monolithic workflows.
|
|
97
|
+
|
|
98
|
+
Side effects should be explicit. An agent calling index --force should know exactly what it's triggering. No surprises.
|
|
99
|
+
Errors should be structured. A human reads an error message. An agent needs to know whether to retry, skip, or escalate.
|
|
100
|
+
|
|
101
|
+
The CLI still needs to work well for humans — interactive mode, card renders, readable output — but those are progressive enhancements on top of a foundation built for automation. Design for the agent, polish for the human.
|
|
102
|
+
This is what API-first looks like in the agentic era: strong contracts at the service layer, composable interfaces at the CLI layer, and a clear separation between the two.
|
|
103
|
+
|
|
104
|
+
---
|
|
105
|
+
|
|
106
|
+
## What you can do with it
|
|
107
|
+
|
|
108
|
+
### Morning briefing in one command
|
|
109
|
+
|
|
110
|
+
Pull everything relevant that happened while you slept:
|
|
111
|
+
|
|
112
|
+
```bash
|
|
113
|
+
pnpm run cli -- feed --hours 8 --render card
|
|
114
|
+
pnpm run cli -- inbox --status inbox
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
### Track a topic you care about — right now
|
|
118
|
+
|
|
119
|
+
Create a new interest from a plain English prompt and get content immediately:
|
|
120
|
+
|
|
121
|
+
```bash
|
|
122
|
+
pnpm run cli -- interests create \
|
|
123
|
+
--from-prompt "I want to follow AI evals and agent infrastructure"
|
|
124
|
+
|
|
125
|
+
pnpm run cli -- index suggestions --days 1
|
|
126
|
+
pnpm run cli -- feed --hours 24
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
Sonar generates keywords and topics from your prompt, kicks off indexing, and your feed updates with relevant posts.
|
|
130
|
+
|
|
131
|
+
### Build a scriptable news digest
|
|
132
|
+
|
|
133
|
+
Combine `--json` output with `jq` to pipe Sonar content wherever you want:
|
|
134
|
+
|
|
135
|
+
```bash
|
|
136
|
+
# Get today's top feed items as JSON
|
|
137
|
+
pnpm run cli -- feed --hours 24 --json | jq '.[] | {author, text, url}'
|
|
138
|
+
|
|
139
|
+
# Summarize your inbox with an LLM
|
|
140
|
+
pnpm run cli -- inbox --json | jq '.[].text' | your-summarizer-script
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
### Keep your local data fresh and queryable
|
|
144
|
+
|
|
145
|
+
Download a full SQLite snapshot of your Sonar data and query it directly:
|
|
146
|
+
|
|
147
|
+
```bash
|
|
148
|
+
pnpm run cli -- data download
|
|
149
|
+
pnpm run cli -- data sql
|
|
150
|
+
# Now you have a full sqlite3 shell — write any query you want
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
Run incremental syncs on a cron to keep it current:
|
|
154
|
+
|
|
155
|
+
```bash
|
|
156
|
+
# crontab: sync every 30 minutes
|
|
157
|
+
*/30 * * * * cd /your/project && pnpm run cli -- data sync
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
### Interactive triage
|
|
161
|
+
|
|
162
|
+
Work through your inbox without leaving the terminal:
|
|
163
|
+
|
|
164
|
+
```bash
|
|
165
|
+
pnpm run cli -- inbox --interactive
|
|
166
|
+
pnpm run cli -- feed --interactive
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
Mark suggestions as read, skip, archive, or save for later — keyboard-driven.
|
|
170
|
+
|
|
171
|
+
### Monitor indexing jobs
|
|
172
|
+
|
|
173
|
+
Watch the queue in real time while you trigger a full re-index:
|
|
174
|
+
|
|
175
|
+
```bash
|
|
176
|
+
pnpm run cli -- index # trigger all jobs
|
|
177
|
+
pnpm run cli -- index status --watch # watch until complete
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
---
|
|
181
|
+
|
|
182
|
+
## What Sonar doesn't do
|
|
183
|
+
|
|
184
|
+
Sonar is **not a global search engine**. It won't crawl the entire internet or index trending posts from people you've never heard of.
|
|
185
|
+
|
|
186
|
+
Instead, it searches within your social graph — your followers and the people you follow — up to **2 degrees of separation**. That's it. This is an intentional constraint, not a limitation we're working around.
|
|
187
|
+
|
|
188
|
+
The reason is practical: API rate limits make broad crawling impossible at any useful refresh frequency. But the reason it works is more interesting — **the people in your network are already a curated signal layer**. The accounts you follow, and the accounts they follow, are a surprisingly high-quality filter for what's relevant to your domain. Sonar's job is to surface what's moving through that graph before it reaches mainstream feeds.
|
|
189
|
+
|
|
190
|
+
What this means in practice:
|
|
191
|
+
|
|
192
|
+
* Results reflect your network's attention, not global virality
|
|
193
|
+
* You won't see noise from accounts you have no connection to
|
|
194
|
+
* The feed gets more useful the more intentional you are about who you follow
|
|
195
|
+
* Adding interests with specific keywords and topics sharpens what Sonar surfaces *within* that graph
|
|
196
|
+
|
|
197
|
+
If you want global trend monitoring, tools like Brandwatch or Twitter's native search are better fits. Sonar is for developers who want a focused, low-noise signal from a network they've already curated.
|
|
198
|
+
|
|
199
|
+
---
|
|
200
|
+
|
|
201
|
+
## Pair with OpenClaw
|
|
202
|
+
|
|
203
|
+
[OpenClaw](https://github.com/openclaw/openclaw) is a local-first autonomous AI agent that runs on your machine and talks to you through WhatsApp, Telegram, Discord, Slack, or iMessage. It can execute shell commands, run on a schedule, and be extended with custom skills.
|
|
204
|
+
|
|
205
|
+
Sonar + OpenClaw is a natural stack: **Sonar handles the signal filtering and curation, OpenClaw handles delivery and action.** Together they turn your social feed into an ambient intelligence layer you don't have to babysit.
|
|
206
|
+
|
|
207
|
+
### Morning briefing delivered to your phone
|
|
208
|
+
|
|
209
|
+
Set up a cron job in OpenClaw to run your Sonar digest and pipe it back to you on Telegram every morning:
|
|
210
|
+
|
|
211
|
+
```
|
|
212
|
+
# In OpenClaw: schedule a daily 8am briefing
|
|
213
|
+
"Every morning at 8am, run `sonar feed --hours 8 --json` and summarize the top 5 posts for me"
|
|
214
|
+
```
|
|
215
|
+
|
|
216
|
+
OpenClaw will execute the CLI, pass the JSON output to your LLM, and send a clean summary straight to your phone — no dashboard to open.
|
|
217
|
+
|
|
218
|
+
### Ask your feed questions in natural language
|
|
219
|
+
|
|
220
|
+
Because `--json` makes Sonar output composable, OpenClaw can reason over it conversationally:
|
|
221
|
+
|
|
222
|
+
```
|
|
223
|
+
# Example prompts you can send OpenClaw via WhatsApp:
|
|
224
|
+
"What's the most discussed topic in my Sonar feed today?"
|
|
225
|
+
"Did anyone in my feed mention Uniswap V4 in the last 48 hours?"
|
|
226
|
+
"Summarize my unread Sonar inbox"
|
|
227
|
+
```
|
|
228
|
+
|
|
229
|
+
Wire it up once as an OpenClaw skill and your feed becomes queryable from any messaging app.
|
|
230
|
+
|
|
231
|
+
### Triage your inbox hands-free
|
|
232
|
+
|
|
233
|
+
Combine OpenClaw's scheduling with Sonar's inbox API to automatically mark low-signal suggestions:
|
|
234
|
+
|
|
235
|
+
```bash
|
|
236
|
+
# Shell script you can hand to OpenClaw as a scheduled skill
|
|
237
|
+
sonar inbox --json | \
|
|
238
|
+
jq '[.[] | select(.score < 0.4) | .id]' | \
|
|
239
|
+
xargs -I{} sonar inbox skip {}
|
|
240
|
+
```
|
|
241
|
+
|
|
242
|
+
Run this nightly and your inbox stays clean without manual triage.
|
|
243
|
+
|
|
244
|
+
### Get alerted when a topic spikes
|
|
245
|
+
|
|
246
|
+
Use OpenClaw's Heartbeat (scheduled wake-up) to watch for signal surges and notify you:
|
|
247
|
+
|
|
248
|
+
```
|
|
249
|
+
# OpenClaw cron: check every 2 hours
|
|
250
|
+
"Run `sonar feed --hours 2 --json` — if there are more than 10 posts about
|
|
251
|
+
'token launchpad' or 'LVR', send me a Telegram alert with the highlights"
|
|
252
|
+
```
|
|
253
|
+
|
|
254
|
+
Effectively a custom Google Alert, but filtered through your actual interest graph.
|
|
255
|
+
|
|
256
|
+
### Build a Sonar skill for OpenClaw
|
|
257
|
+
|
|
258
|
+
The cleanest integration is wrapping Sonar as a reusable OpenClaw skill. Drop a skill file in your OpenClaw workspace:
|
|
259
|
+
|
|
260
|
+
```typescript
|
|
261
|
+
// skills/sonar.ts
|
|
262
|
+
export async function getFeed(hours = 12) {
|
|
263
|
+
const { stdout } = await exec(`sonar feed --hours ${hours} --json`);
|
|
264
|
+
return JSON.parse(stdout);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
export async function getInbox() {
|
|
268
|
+
const { stdout } = await exec(`sonar inbox --json`);
|
|
269
|
+
return JSON.parse(stdout);
|
|
270
|
+
}
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
Once registered, OpenClaw can call these tools autonomously whenever it decides they're relevant — no manual prompting required.
|
|
274
|
+
|
|
275
|
+
---
|
|
276
|
+
|
|
277
|
+
## Setup
|
|
278
|
+
|
|
279
|
+
### Prerequisites
|
|
280
|
+
|
|
281
|
+
* Node.js 20+
|
|
282
|
+
* `pnpm`
|
|
283
|
+
* A Sonar API key from [sonar.sh/account](https://sonar.sh/account?tab=api-keys)
|
|
284
|
+
* Optional: `sqlite3` CLI (only needed for `data sql`)
|
|
285
|
+
|
|
286
|
+
### Install and authenticate
|
|
287
|
+
|
|
288
|
+
```bash
|
|
289
|
+
pnpm install
|
|
290
|
+
|
|
291
|
+
export SONAR_API_KEY="your_api_key_here"
|
|
292
|
+
pnpm run cli -- init
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
`init` writes your config to `~/.sonar/config.json`. If `SONAR_API_KEY` is set in your environment, it always takes precedence.
|
|
296
|
+
|
|
297
|
+
Verify it works:
|
|
298
|
+
|
|
299
|
+
```bash
|
|
300
|
+
pnpm run cli -- account
|
|
301
|
+
pnpm run cli -- interests
|
|
302
|
+
```
|
|
303
|
+
|
|
304
|
+
---
|
|
305
|
+
|
|
306
|
+
## Command Reference
|
|
307
|
+
|
|
308
|
+
### Account & Config
|
|
309
|
+
|
|
310
|
+
```bash
|
|
311
|
+
pnpm run cli -- account # plan, usage, suggestion counters
|
|
312
|
+
pnpm run cli -- config # show current config
|
|
313
|
+
pnpm run cli -- config set vendor anthropic # or openai
|
|
314
|
+
pnpm run cli -- config set feed-render card # or table
|
|
315
|
+
pnpm run cli -- config set feed-width 100
|
|
316
|
+
```
|
|
317
|
+
|
|
318
|
+
### Interests
|
|
319
|
+
|
|
320
|
+
```bash
|
|
321
|
+
pnpm run cli -- interests # list all
|
|
322
|
+
pnpm run cli -- interests --json # JSON output
|
|
323
|
+
|
|
324
|
+
# Create manually
|
|
325
|
+
pnpm run cli -- interests create \
|
|
326
|
+
--name "Rust Systems" \
|
|
327
|
+
--description "Rust, compilers, and systems tooling" \
|
|
328
|
+
--keywords "rust,cargo,wasm" \
|
|
329
|
+
--topics "systems programming,performance"
|
|
330
|
+
|
|
331
|
+
# Create from a natural language prompt (requires OPENAI_API_KEY or ANTHROPIC_API_KEY)
|
|
332
|
+
pnpm run cli -- interests create \
|
|
333
|
+
--from-prompt "I want to follow AI evals and agent infra"
|
|
334
|
+
|
|
335
|
+
# Update
|
|
336
|
+
pnpm run cli -- interests update --id <id> --name "New Name"
|
|
337
|
+
pnpm run cli -- interests update --id <id> --add-keywords "mcp,langgraph"
|
|
338
|
+
pnpm run cli -- interests update --id <id> --remove-topics "old-topic"
|
|
339
|
+
```
|
|
340
|
+
|
|
341
|
+
### Feed
|
|
342
|
+
|
|
343
|
+
```bash
|
|
344
|
+
pnpm run cli -- feed # last 12h, limit 20, card render
|
|
345
|
+
pnpm run cli -- feed --hours 24
|
|
346
|
+
pnpm run cli -- feed --days 3
|
|
347
|
+
pnpm run cli -- feed --kind bookmarks # default | bookmarks | followers | following
|
|
348
|
+
pnpm run cli -- feed --render table --limit 50
|
|
349
|
+
pnpm run cli -- feed --interactive
|
|
350
|
+
pnpm run cli -- feed --json
|
|
351
|
+
```
|
|
352
|
+
|
|
353
|
+
### Inbox
|
|
354
|
+
|
|
355
|
+
```bash
|
|
356
|
+
pnpm run cli -- inbox # list inbox suggestions
|
|
357
|
+
pnpm run cli -- inbox --all
|
|
358
|
+
pnpm run cli -- inbox --status inbox --limit 50
|
|
359
|
+
pnpm run cli -- inbox --interactive
|
|
360
|
+
pnpm run cli -- inbox --json
|
|
361
|
+
|
|
362
|
+
pnpm run cli -- inbox read --id <suggestion_id>
|
|
363
|
+
pnpm run cli -- inbox skip --id <suggestion_id>
|
|
364
|
+
pnpm run cli -- inbox later --id <suggestion_id>
|
|
365
|
+
pnpm run cli -- inbox archive --id <suggestion_id>
|
|
366
|
+
```
|
|
367
|
+
|
|
368
|
+
### Indexing
|
|
369
|
+
|
|
370
|
+
```bash
|
|
371
|
+
pnpm run cli -- reindex # run all jobs
|
|
372
|
+
pnpm run cli -- reindex tweets
|
|
373
|
+
pnpm run cli -- reindex graph
|
|
374
|
+
pnpm run cli -- reindex graph --force
|
|
375
|
+
pnpm run cli -- reindex suggestions --days 1
|
|
376
|
+
pnpm run cli -- reindex bookmarks
|
|
377
|
+
pnpm run cli -- reindex status
|
|
378
|
+
pnpm run cli -- reindex status --watch
|
|
379
|
+
```
|
|
380
|
+
|
|
381
|
+
### Local Data
|
|
382
|
+
|
|
383
|
+
```bash
|
|
384
|
+
pnpm run cli -- data download # full download → ~/.sonar/data.db
|
|
385
|
+
pnpm run cli -- data sync # incremental sync
|
|
386
|
+
pnpm run cli -- data path # print DB path
|
|
387
|
+
pnpm run cli -- data sql # open sqlite3 shell
|
|
388
|
+
```
|
|
389
|
+
|
|
390
|
+
---
|
|
391
|
+
|
|
392
|
+
## Environment Variables
|
|
393
|
+
|
|
394
|
+
| Variable | Required | Purpose |
|
|
395
|
+
|---|---|---|
|
|
396
|
+
| `SONAR_API_KEY` | Yes (unless saved by `init`) | Auth token |
|
|
397
|
+
| `SONAR_API_URL` | No | GraphQL endpoint (default: `http://localhost:8000/graphql`) |
|
|
398
|
+
| `SONAR_AI_VENDOR` | No | AI vendor for prompt generation (`openai` or `anthropic`) |
|
|
399
|
+
| `SONAR_FEED_RENDER` | No | Default render style (`card` or `table`) |
|
|
400
|
+
| `SONAR_FEED_WIDTH` | No | Default card width |
|
|
401
|
+
| `OPENAI_API_KEY` | Sometimes | Required for OpenAI-powered `--from-prompt` |
|
|
402
|
+
| `ANTHROPIC_API_KEY` | Sometimes | Required for Anthropic-powered `--from-prompt` |
|
|
403
|
+
|
|
404
|
+
## Local Files
|
|
405
|
+
|
|
406
|
+
| Path | Contents |
|
|
407
|
+
|---|---|
|
|
408
|
+
| `~/.sonar/config.json` | Token, API URL, CLI defaults |
|
|
409
|
+
| `~/.sonar/data.db` | Local synced SQLite database |
|
|
410
|
+
|
|
411
|
+
---
|
|
412
|
+
|
|
413
|
+
## Troubleshooting
|
|
414
|
+
|
|
415
|
+
**`No token found. Set SONAR_API_KEY or run: sonar init`**
|
|
416
|
+
Set `SONAR_API_KEY` in your environment, then run `pnpm run cli -- init`.
|
|
417
|
+
|
|
418
|
+
**`Unable to reach server, please try again shortly.`**
|
|
419
|
+
Check `SONAR_API_URL`, your network, and API availability.
|
|
420
|
+
|
|
421
|
+
**`OPENAI_API_KEY is not set` / `ANTHROPIC_API_KEY is not set`**
|
|
422
|
+
Set the key for your chosen vendor before using `--from-prompt` or interactive reply generation.
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { jsxs as _jsxs, jsx as _jsx, Fragment as _Fragment } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useState } from 'react';
|
|
3
|
+
import zod from 'zod';
|
|
4
|
+
import { Box, Text } from 'ink';
|
|
5
|
+
import { formatDistanceToNow } from 'date-fns';
|
|
6
|
+
import { gql } from '../lib/client.js';
|
|
7
|
+
import { Spinner } from '../components/Spinner.js';
|
|
8
|
+
import { AccountCard } from '../components/AccountCard.js';
|
|
9
|
+
export const options = zod.object({
|
|
10
|
+
json: zod.boolean().default(false).describe('Raw JSON output'),
|
|
11
|
+
debug: zod.boolean().default(false).describe('Debug mode'),
|
|
12
|
+
});
|
|
13
|
+
const QUERY = `
|
|
14
|
+
query Status {
|
|
15
|
+
me {
|
|
16
|
+
accountId
|
|
17
|
+
email
|
|
18
|
+
xHandle
|
|
19
|
+
xid
|
|
20
|
+
isPayingCustomer
|
|
21
|
+
indexingAccounts
|
|
22
|
+
indexedTweets
|
|
23
|
+
pendingEmbeddings
|
|
24
|
+
twitterIndexedAt
|
|
25
|
+
refreshedSuggestionsAt
|
|
26
|
+
}
|
|
27
|
+
suggestionCounts {
|
|
28
|
+
inbox
|
|
29
|
+
later
|
|
30
|
+
replied
|
|
31
|
+
read
|
|
32
|
+
skipped
|
|
33
|
+
archived
|
|
34
|
+
total
|
|
35
|
+
}
|
|
36
|
+
usage {
|
|
37
|
+
plan
|
|
38
|
+
interests { used limit atLimit }
|
|
39
|
+
apiKeys { used limit atLimit }
|
|
40
|
+
bookmarksEnabled
|
|
41
|
+
socialGraphDegrees
|
|
42
|
+
socialGraphMaxUsers
|
|
43
|
+
suggestionRefreshes { used limit atLimit resetsAt }
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
`;
|
|
47
|
+
export default function Account({ options: flags }) {
|
|
48
|
+
const [data, setData] = useState(null);
|
|
49
|
+
const [error, setError] = useState(null);
|
|
50
|
+
useEffect(() => {
|
|
51
|
+
async function run() {
|
|
52
|
+
try {
|
|
53
|
+
const result = await gql(QUERY, {}, { debug: flags.debug });
|
|
54
|
+
if (flags.json) {
|
|
55
|
+
process.stdout.write(JSON.stringify(result, null, 2) + '\n');
|
|
56
|
+
process.exit(0);
|
|
57
|
+
}
|
|
58
|
+
setData(result);
|
|
59
|
+
}
|
|
60
|
+
catch (err) {
|
|
61
|
+
if (flags.debug) {
|
|
62
|
+
console.error(JSON.stringify(err, null, 2));
|
|
63
|
+
}
|
|
64
|
+
setError(err instanceof Error ? err.message : String(err));
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
run();
|
|
68
|
+
}, []);
|
|
69
|
+
if (error)
|
|
70
|
+
return _jsxs(Text, { color: "red", children: ["Error: ", error] });
|
|
71
|
+
if (!data)
|
|
72
|
+
return _jsx(Spinner, { label: "Fetching account..." });
|
|
73
|
+
const { me, suggestionCounts, usage } = data;
|
|
74
|
+
return (_jsxs(Box, { flexDirection: "column", gap: 1, children: [me ? _jsx(AccountCard, { me: me }) : (_jsxs(Box, { flexDirection: "column", children: [_jsx(Text, { bold: true, color: "cyan", children: "Account" }), _jsx(Text, { dimColor: true, children: "Not authenticated" })] })), usage && (_jsxs(Box, { flexDirection: "column", children: [_jsx(Text, { bold: true, color: "cyan", children: "Plan" }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "plan: " }), _jsx(Text, { color: usage.plan === 'free' ? 'yellow' : 'green', children: usage.plan })] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "interests: " }), _jsxs(Text, { color: usage.interests.atLimit ? 'red' : undefined, children: [usage.interests.used, usage.interests.limit !== null ? `/${usage.interests.limit}` : ''] })] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "api keys: " }), _jsxs(Text, { color: usage.apiKeys.atLimit ? 'red' : undefined, children: [usage.apiKeys.used, usage.apiKeys.limit !== null ? `/${usage.apiKeys.limit}` : ''] })] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "bookmarks: " }), usage.bookmarksEnabled ? _jsx(Text, { color: "green", children: "enabled" }) : _jsx(Text, { dimColor: true, children: "upgrade to unlock" })] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "social graph: " }), usage.socialGraphDegrees, " degree", usage.socialGraphDegrees !== 1 ? 's' : '', usage.socialGraphMaxUsers !== null ? `, up to ${usage.socialGraphMaxUsers.toLocaleString()} users` : ', unlimited'] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "suggestion refreshes: " }), usage.suggestionRefreshes.limit !== null ? (_jsxs(_Fragment, { children: [_jsxs(Text, { color: usage.suggestionRefreshes.atLimit ? 'red' : undefined, children: [usage.suggestionRefreshes.used, "/", usage.suggestionRefreshes.limit] }), usage.suggestionRefreshes.resetsAt && (_jsxs(Text, { dimColor: true, children: [' ', "(resets ", formatDistanceToNow(new Date(usage.suggestionRefreshes.resetsAt), { addSuffix: true }), ")"] }))] })) : (_jsx(Text, { color: "green", children: "unlimited" }))] })] })), _jsxs(Box, { flexDirection: "column", children: [_jsx(Text, { bold: true, color: "cyan", children: "Suggestions" }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "inbox: " }), _jsx(Text, { color: suggestionCounts.inbox > 0 ? 'green' : undefined, children: suggestionCounts.inbox })] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "later: " }), suggestionCounts.later] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "replied: " }), suggestionCounts.replied] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "archived: " }), suggestionCounts.archived] }), _jsxs(Text, { children: [_jsx(Text, { dimColor: true, children: "total: " }), suggestionCounts.total] })] })] }));
|
|
75
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { jsxs as _jsxs, jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useState } from 'react';
|
|
3
|
+
import { Box, Text } from 'ink';
|
|
4
|
+
import { unlinkSync, existsSync } from 'node:fs';
|
|
5
|
+
import { gql } from '../../../lib/client.js';
|
|
6
|
+
import { Spinner } from '../../../components/Spinner.js';
|
|
7
|
+
import { DB_PATH, openDb, upsertTweet, upsertFeedItem, upsertSuggestion, upsertInterest, setSyncState, } from '../../../lib/db.js';
|
|
8
|
+
import { FEED_QUERY, SUGGESTIONS_QUERY, INTERESTS_QUERY } from '../../../lib/data-queries.js';
|
|
9
|
+
export default function DataDownload() {
|
|
10
|
+
const [result, setResult] = useState(null);
|
|
11
|
+
const [error, setError] = useState(null);
|
|
12
|
+
useEffect(() => {
|
|
13
|
+
async function run() {
|
|
14
|
+
try {
|
|
15
|
+
if (existsSync(DB_PATH))
|
|
16
|
+
unlinkSync(DB_PATH);
|
|
17
|
+
const db = openDb();
|
|
18
|
+
const [feedResult, suggestionsResult, interestsResult] = await Promise.all([
|
|
19
|
+
gql(FEED_QUERY, { hours: null, days: 7, limit: 500 }),
|
|
20
|
+
gql(SUGGESTIONS_QUERY, { status: null, limit: 500 }),
|
|
21
|
+
gql(INTERESTS_QUERY),
|
|
22
|
+
]);
|
|
23
|
+
for (const item of feedResult.feed) {
|
|
24
|
+
upsertTweet(db, item.tweet);
|
|
25
|
+
upsertFeedItem(db, { tweetId: item.tweet.id, score: item.score, matchedKeywords: item.matchedKeywords });
|
|
26
|
+
}
|
|
27
|
+
for (const s of suggestionsResult.suggestions) {
|
|
28
|
+
upsertTweet(db, s.tweet);
|
|
29
|
+
upsertSuggestion(db, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched });
|
|
30
|
+
}
|
|
31
|
+
for (const i of interestsResult.projects) {
|
|
32
|
+
upsertInterest(db, i);
|
|
33
|
+
}
|
|
34
|
+
setSyncState(db, 'last_synced_at', new Date().toISOString());
|
|
35
|
+
db.close();
|
|
36
|
+
setResult({
|
|
37
|
+
feedCount: feedResult.feed.length,
|
|
38
|
+
suggestionsCount: suggestionsResult.suggestions.length,
|
|
39
|
+
interestsCount: interestsResult.projects.length,
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
setError(err instanceof Error ? err.message : String(err));
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
run();
|
|
47
|
+
}, []);
|
|
48
|
+
if (error)
|
|
49
|
+
return _jsxs(Text, { color: "red", children: ["Error: ", error] });
|
|
50
|
+
if (!result)
|
|
51
|
+
return _jsx(Spinner, { label: "Downloading data..." });
|
|
52
|
+
return (_jsxs(Box, { flexDirection: "column", children: [_jsxs(Box, { children: [_jsx(Text, { bold: true, children: "Download complete" }), _jsxs(Text, { dimColor: true, children: [" ", DB_PATH] })] }), _jsxs(Text, { children: [_jsx(Text, { color: "cyan", children: result.feedCount }), _jsx(Text, { dimColor: true, children: " feed items " }), _jsx(Text, { color: "cyan", children: result.suggestionsCount }), _jsx(Text, { dimColor: true, children: " suggestions " }), _jsx(Text, { color: "cyan", children: result.interestsCount }), _jsx(Text, { dimColor: true, children: " interests" })] })] }));
|
|
53
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect } from 'react';
|
|
3
|
+
import { Text } from 'ink';
|
|
4
|
+
import { DB_PATH } from '../../../lib/db.js';
|
|
5
|
+
export default function DataPath() {
|
|
6
|
+
useEffect(() => {
|
|
7
|
+
process.stdout.write(`${DB_PATH}\n`);
|
|
8
|
+
process.exit(0);
|
|
9
|
+
}, []);
|
|
10
|
+
return _jsx(Text, { children: DB_PATH });
|
|
11
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect } from 'react';
|
|
3
|
+
import { Text } from 'ink';
|
|
4
|
+
import { spawnSync } from 'node:child_process';
|
|
5
|
+
import { DB_PATH } from '../../../lib/db.js';
|
|
6
|
+
export default function DataSql() {
|
|
7
|
+
useEffect(() => {
|
|
8
|
+
const { status } = spawnSync('sqlite3', [DB_PATH], { stdio: 'inherit' });
|
|
9
|
+
process.exit(status ?? 0);
|
|
10
|
+
}, []);
|
|
11
|
+
return _jsx(Text, { dimColor: true, children: "Opening sqlite3..." });
|
|
12
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { jsxs as _jsxs, jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useState } from 'react';
|
|
3
|
+
import { Box, Text } from 'ink';
|
|
4
|
+
import { unlinkSync, existsSync } from 'node:fs';
|
|
5
|
+
import { gql } from '../../../lib/client.js';
|
|
6
|
+
import { Spinner } from '../../../components/Spinner.js';
|
|
7
|
+
import { DB_PATH, openDb, upsertTweet, upsertFeedItem, upsertSuggestion, upsertInterest, getSyncState, setSyncState, } from '../../../lib/db.js';
|
|
8
|
+
import { FEED_QUERY, SUGGESTIONS_QUERY, INTERESTS_QUERY } from '../../../lib/data-queries.js';
|
|
9
|
+
export default function DataSync() {
|
|
10
|
+
const [result, setResult] = useState(null);
|
|
11
|
+
const [error, setError] = useState(null);
|
|
12
|
+
useEffect(() => {
|
|
13
|
+
async function run() {
|
|
14
|
+
try {
|
|
15
|
+
const db = openDb();
|
|
16
|
+
const lastSyncedAt = getSyncState(db, 'last_synced_at');
|
|
17
|
+
if (!lastSyncedAt) {
|
|
18
|
+
db.close();
|
|
19
|
+
if (existsSync(DB_PATH))
|
|
20
|
+
unlinkSync(DB_PATH);
|
|
21
|
+
const freshDb = openDb();
|
|
22
|
+
const [feedResult, suggestionsResult, interestsResult] = await Promise.all([
|
|
23
|
+
gql(FEED_QUERY, { hours: null, days: 7, limit: 500 }),
|
|
24
|
+
gql(SUGGESTIONS_QUERY, { status: null, limit: 500 }),
|
|
25
|
+
gql(INTERESTS_QUERY),
|
|
26
|
+
]);
|
|
27
|
+
for (const item of feedResult.feed) {
|
|
28
|
+
upsertTweet(freshDb, item.tweet);
|
|
29
|
+
upsertFeedItem(freshDb, { tweetId: item.tweet.id, score: item.score, matchedKeywords: item.matchedKeywords });
|
|
30
|
+
}
|
|
31
|
+
for (const s of suggestionsResult.suggestions) {
|
|
32
|
+
upsertTweet(freshDb, s.tweet);
|
|
33
|
+
upsertSuggestion(freshDb, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched });
|
|
34
|
+
}
|
|
35
|
+
for (const i of interestsResult.projects) {
|
|
36
|
+
upsertInterest(freshDb, i);
|
|
37
|
+
}
|
|
38
|
+
setSyncState(freshDb, 'last_synced_at', new Date().toISOString());
|
|
39
|
+
freshDb.close();
|
|
40
|
+
setResult({ feedCount: feedResult.feed.length, suggestionsCount: suggestionsResult.suggestions.length, interestsCount: interestsResult.projects.length });
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const hoursSinceSync = Math.min(Math.ceil((Date.now() - new Date(lastSyncedAt).getTime()) / 3600000), 168);
|
|
44
|
+
const [feedResult, suggestionsResult] = await Promise.all([
|
|
45
|
+
gql(FEED_QUERY, { hours: hoursSinceSync, days: null, limit: 500 }),
|
|
46
|
+
gql(SUGGESTIONS_QUERY, { status: null, limit: 500 }),
|
|
47
|
+
]);
|
|
48
|
+
const prevFeedCount = db.prepare('SELECT COUNT(*) as n FROM feed_items').get().n;
|
|
49
|
+
const prevSuggestionsCount = db.prepare('SELECT COUNT(*) as n FROM suggestions').get().n;
|
|
50
|
+
for (const item of feedResult.feed) {
|
|
51
|
+
upsertTweet(db, item.tweet);
|
|
52
|
+
upsertFeedItem(db, { tweetId: item.tweet.id, score: item.score, matchedKeywords: item.matchedKeywords });
|
|
53
|
+
}
|
|
54
|
+
for (const s of suggestionsResult.suggestions) {
|
|
55
|
+
upsertTweet(db, s.tweet);
|
|
56
|
+
upsertSuggestion(db, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched });
|
|
57
|
+
}
|
|
58
|
+
setSyncState(db, 'last_synced_at', new Date().toISOString());
|
|
59
|
+
const newFeedCount = db.prepare('SELECT COUNT(*) as n FROM feed_items').get().n;
|
|
60
|
+
const newSuggestionsCount = db.prepare('SELECT COUNT(*) as n FROM suggestions').get().n;
|
|
61
|
+
db.close();
|
|
62
|
+
setResult({
|
|
63
|
+
feedCount: newFeedCount,
|
|
64
|
+
suggestionsCount: newSuggestionsCount,
|
|
65
|
+
interestsCount: 0,
|
|
66
|
+
isSync: true,
|
|
67
|
+
deltaFeed: newFeedCount - prevFeedCount,
|
|
68
|
+
deltaSuggestions: newSuggestionsCount - prevSuggestionsCount,
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
catch (err) {
|
|
72
|
+
setError(err instanceof Error ? err.message : String(err));
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
run();
|
|
76
|
+
}, []);
|
|
77
|
+
if (error)
|
|
78
|
+
return _jsxs(Text, { color: "red", children: ["Error: ", error] });
|
|
79
|
+
if (!result)
|
|
80
|
+
return _jsx(Spinner, { label: "Syncing data..." });
|
|
81
|
+
if (result.isSync) {
|
|
82
|
+
return (_jsxs(Box, { flexDirection: "column", children: [_jsxs(Box, { children: [_jsx(Text, { bold: true, children: "Sync complete" }), _jsxs(Text, { dimColor: true, children: [" ", DB_PATH] })] }), _jsxs(Text, { children: [_jsx(Text, { color: "green", children: "feed" }), _jsxs(Text, { dimColor: true, children: [" +", result.deltaFeed ?? 0, " (", result.feedCount, " total) "] }), _jsx(Text, { color: "green", children: "suggestions" }), _jsxs(Text, { dimColor: true, children: [" +", result.deltaSuggestions ?? 0, " (", result.suggestionsCount, " total)"] })] })] }));
|
|
83
|
+
}
|
|
84
|
+
return (_jsxs(Box, { flexDirection: "column", children: [_jsxs(Box, { children: [_jsx(Text, { bold: true, children: "Download complete" }), _jsxs(Text, { dimColor: true, children: [" ", DB_PATH] })] }), _jsxs(Text, { children: [_jsx(Text, { color: "cyan", children: result.feedCount }), _jsx(Text, { dimColor: true, children: " feed items " }), _jsx(Text, { color: "cyan", children: result.suggestionsCount }), _jsx(Text, { dimColor: true, children: " suggestions " }), _jsx(Text, { color: "cyan", children: result.interestsCount }), _jsx(Text, { dimColor: true, children: " interests" })] })] }));
|
|
85
|
+
}
|