@1a35e1/sonar-cli 0.2.1 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +151 -265
  2. package/dist/commands/{inbox/archive.js → archive.js} +2 -2
  3. package/dist/commands/config/data/download.js +2 -2
  4. package/dist/commands/config/data/sync.js +2 -2
  5. package/dist/commands/config/nuke.js +20 -2
  6. package/dist/commands/feed.js +105 -155
  7. package/dist/commands/index.js +172 -4
  8. package/dist/commands/{inbox/later.js → later.js} +2 -2
  9. package/dist/commands/refresh.js +41 -0
  10. package/dist/commands/{inbox/skip.js → skip.js} +2 -2
  11. package/dist/commands/status.js +128 -0
  12. package/dist/commands/sync/bookmarks.js +35 -0
  13. package/dist/commands/topics/add.js +71 -0
  14. package/dist/commands/topics/delete.js +42 -0
  15. package/dist/commands/topics/edit.js +97 -0
  16. package/dist/commands/topics/index.js +54 -0
  17. package/dist/commands/topics/suggest.js +125 -0
  18. package/dist/commands/topics/view.js +48 -0
  19. package/dist/components/AccountCard.js +1 -1
  20. package/dist/components/Banner.js +11 -0
  21. package/dist/components/InteractiveSession.js +95 -210
  22. package/dist/components/Spinner.js +5 -4
  23. package/dist/components/TopicCard.js +15 -0
  24. package/dist/components/TweetCard.js +76 -0
  25. package/dist/lib/ai.js +85 -0
  26. package/dist/lib/client.js +66 -40
  27. package/dist/lib/config.js +3 -2
  28. package/dist/lib/data-queries.js +1 -3
  29. package/dist/lib/skill.js +66 -226
  30. package/package.json +13 -3
  31. package/dist/commands/account.js +0 -75
  32. package/dist/commands/inbox/index.js +0 -103
  33. package/dist/commands/inbox/read.js +0 -41
  34. package/dist/commands/ingest/bookmarks.js +0 -55
  35. package/dist/commands/ingest/index.js +0 -5
  36. package/dist/commands/ingest/tweets.js +0 -55
  37. package/dist/commands/interests/create.js +0 -107
  38. package/dist/commands/interests/index.js +0 -56
  39. package/dist/commands/interests/match.js +0 -33
  40. package/dist/commands/interests/update.js +0 -153
  41. package/dist/commands/monitor.js +0 -93
  42. package/dist/commands/quickstart.js +0 -231
  43. package/dist/components/InterestCard.js +0 -10
package/README.md CHANGED
@@ -1,21 +1,14 @@
1
- # 🔊 Sonar (Preview)
1
+ # 🔊 Sonar (Alpha)
2
2
 
3
- Experimental X CLI for OpenClaw 🦞 power users.
3
+ Agent optimised [X](https://x.com) CLI for founders who want to stay ahead of the curve.
4
4
 
5
- Sonar matches interests from your X graph using various AI pipelines. We built this to automate our social intelligence.
5
+ We got tired of missing important content in our feed and built Sonar to fix it.
6
6
 
7
- This cli has been designed to handover indexing and consumption to agents.
8
-
9
- * Pipe it into scripts,
10
- * automate your morning briefing,
11
- * Or just discover tweets you probably missed out on the web interface.
12
-
13
- ---
7
+ Sonar matches your interests from your X network, filtering only relevant content from your graph using a variety of AI pipelines. We built this to automate our social intelligence at [@LighthouseGov](https://x.com/LighthouseGov).
14
8
 
15
9
  ## Get started
16
10
 
17
- * Register with `X` to get an API key from `https://sonar.8640p.info/`
18
- * Learn more about which [scopes](#scopes) we request and why.
11
+ * Login with your `X` account to obtain a [free API key](https://sonar.8640p.info/).
19
12
 
20
13
  Install the CLI
21
14
 
@@ -26,397 +19,290 @@ pnpm add -g @1a35e1/sonar-cli@latest
26
19
  Register your API key.
27
20
 
28
21
  ```sh
29
- # Make "SONAR_API_KEY" avaliable in your env
22
+ # Ensure "SONAR_API_KEY" available in your env
30
23
  export SONAR_API_KEY=snr_xxxxx
31
24
 
32
25
  # or, manually register
33
26
  sonar config setup key=<YOUR_API_KEY>
34
27
  ```
35
28
 
36
- View your account to ensure evrything works.
29
+ View your account status:
37
30
 
38
31
  ```sh
39
- sonar account
32
+ sonar status
40
33
  ```
41
34
 
42
- Ingest your first `tweets` and check to `monitor` progress.
35
+ Run your first refresh to index tweets and generate suggestions:
43
36
 
44
- > The first time this you run this command it will take some time.
37
+ > The first time you run this it will take some time.
45
38
 
46
39
  ```sh
47
- sonar ingest tweets
48
-
49
- sonar monitor
50
- sonar monitor --watch
40
+ sonar refresh
41
+ sonar status --watch
51
42
  ```
52
43
 
53
44
  ---
54
45
 
55
46
  ## Scopes
56
47
 
57
- * We currently request `read:*` and `offline:processing` scopes based on <<https://docs.x.com/fundamentals/authentication/oauth-2-0/>. If there is an appite
58
-
59
- * So we can stay connected to your account until you revoke access.
60
- * Posts you’ve liked and likes you can view.
61
- * All the posts you can view, including posts from protected accounts.
62
- * Accounts you’ve muted.
63
- * Accounts you’ve blocked.
64
- * People who follow you and people who you follow.
65
- * All your Bookmarks.
66
- * Lists, list members, and list followers of lists you’ve created or are a member of, including private lists.
67
- * Any account you can view, including protected accounts.
48
+ * We currently request `read:*` and `offline:processing` scopes
49
+ * This allows us to read your feed, bookmarks, followers/following, and other account data to power our signal filtering and topic suggestions.
68
50
 
69
- ## Why Sonar exists
70
51
 
71
- Setting up your own social data pipeline is genuinely awful. You're looking at OAuth flows, rate limit math, pagination handling, webhook plumbing, deduplication logic, and a SQLite schema you'll regret in three weeks — before you've seen a single useful result. Most developers who try it abandon it halfway through.
52
+ ## Use cases
72
53
 
73
- **Sonar skips all of that. Get actionalable data for OpenClaw in 15 minutes.**
54
+ ### Morning briefing in one command
74
55
 
75
- We believe your data is yours. So you want to go deeper than our platform allows — build your own models, run custom queries, pipe it into your own tooling — you can download everything we have indexed on your behalf into a local SQLite database and do whatever you want with it:
56
+ Pull everything relevant that happened while you slept:
76
57
 
77
58
  ```bash
78
- pnpm run cli -- data download # full snapshot → ~/.sonar/data.db
79
- pnpm run cli -- data sync # incremental updates
80
- pnpm run cli -- data sql # drop into a sqlite3 shell
59
+ sonar feed --hours 8
81
60
  ```
82
61
 
83
- No lock-in. If you outgrow us, you leave with your data intact.
84
-
85
- ## Design philosophy
86
-
87
- There's a quiet shift happening in how developer tools are built.
88
-
89
- In the early web2 era, API-first was a revelation. Stripe, Twilio, Sendgrid — companies that exposed clean REST contracts unlocked entire ecosystems of products built on top of them. The insight was simple: if your service has strong, reliable APIs, anyone can build anything. The interface didn't matter as much as the contract underneath.
90
- We're at a similar inflection point now, but the interface layer has changed dramatically.
62
+ ### Stream your feed in real time
91
63
 
92
- The goal for most workflows today is fire and forget — you define what you want, set it in motion, and let agents handle the execution. That only works if the underlying APIs are strong enough to support complex, long-running ETL pipelines without hand-holding. Sonar is built with that assumption: the API is the product, the CLI is just one interface into it.
93
- Which raises an interesting question about CLIs themselves. Traditionally a CLI was developer-first by definition — you were writing for someone comfortable with flags, pipes, and man pages. But if the primary consumer of your CLI is increasingly an agent (OpenClaw, a cron job, an LLM with tool access), the design principles shift:
64
+ Watch for new items as they appear:
94
65
 
95
- Output should be machine-readable by default. Every command has a --json flag. Agents don't parse card renders.
96
- Commands should be composable. Small, single-purpose commands that pipe cleanly into each other are more useful to an agent than monolithic workflows.
97
-
98
- Side effects should be explicit. An agent calling index --force should know exactly what it's triggering. No surprises.
99
- Errors should be structured. A human reads an error message. An agent needs to know whether to retry, skip, or escalate.
100
-
101
- The CLI still needs to work well for humans — interactive mode, card renders, readable output — but those are progressive enhancements on top of a foundation built for automation. Design for the agent, polish for the human.
102
- This is what API-first looks like in the agentic era: strong contracts at the service layer, composable interfaces at the CLI layer, and a clear separation between the two.
103
-
104
- ---
105
-
106
- ## What you can do with it
66
+ ```bash
67
+ sonar feed --follow # visual cards, polls every 30s
68
+ sonar feed --follow --json | jq .score # NDJSON stream for agents
69
+ ```
107
70
 
108
- ### Morning briefing in one command
71
+ ### Discover new topics with AI
109
72
 
110
- Pull everything relevant that happened while you slept:
73
+ Let Sonar suggest topics based on your interests and feed:
111
74
 
112
75
  ```bash
113
- pnpm run cli -- feed --hours 8 --render card
114
- pnpm run cli -- inbox --status inbox
76
+ sonar topics suggest # interactive accept/reject
77
+ sonar topics suggest --count 3 # just 3 suggestions
115
78
  ```
116
79
 
117
- ### Track a topic you care about — right now
80
+ ### Track a topic you care about
118
81
 
119
- Create a new interest from a plain English prompt and get content immediately:
82
+ Add a topic, then refresh:
120
83
 
121
84
  ```bash
122
- pnpm run cli -- interests create \
123
- --from-prompt "I want to follow AI evals and agent infrastructure"
124
-
125
- pnpm run cli -- index suggestions --days 1
126
- pnpm run cli -- feed --hours 24
85
+ sonar topics add "AI agents"
86
+ sonar refresh
87
+ sonar feed --hours 24
127
88
  ```
128
89
 
129
- Sonar generates keywords and topics from your prompt, kicks off indexing, and your feed updates with relevant posts.
90
+ Sonar rebuilds your social graph, indexes recent tweets, and generates suggestions matched against your topics and interest profile.
130
91
 
131
92
  ### Build a scriptable news digest
132
93
 
133
94
  Combine `--json` output with `jq` to pipe Sonar content wherever you want:
134
95
 
135
96
  ```bash
136
- # Get today's top feed items as JSON
137
- pnpm run cli -- feed --hours 24 --json | jq '.[] | {author, text, url}'
138
-
139
- # Summarize your inbox with an LLM
140
- pnpm run cli -- inbox --json | jq '.[].text' | your-summarizer-script
141
- ```
142
-
143
- ### Keep your local data fresh and queryable
97
+ # Get today's feed as JSON
98
+ sonar feed --hours 24 --json | jq '.[] | {author: .tweet.user.username, text: .tweet.text}'
144
99
 
145
- Download a full SQLite snapshot of your Sonar data and query it directly:
100
+ # Summarize with an LLM
101
+ sonar feed --json | jq '.[].tweet.text' | your-summarizer-script
146
102
 
147
- ```bash
148
- pnpm run cli -- data download
149
- pnpm run cli -- data sql
150
- # Now you have a full sqlite3 shell — write any query you want
103
+ # Stream high-score items to a file
104
+ sonar feed --follow --json | jq --unbuffered 'select(.score > 0.7)' >> highlights.jsonl
151
105
  ```
152
106
 
153
- Run incremental syncs on a cron to keep it current:
107
+ ### Monitor the pipeline
108
+
109
+ Watch the queue in real time while refresh runs:
154
110
 
155
111
  ```bash
156
- # crontab: sync every 30 minutes
157
- */30 * * * * cd /your/project && pnpm run cli -- data sync
112
+ sonar refresh
113
+ sonar status --watch
158
114
  ```
159
115
 
160
116
  ### Interactive triage
161
117
 
162
- Work through your inbox without leaving the terminal:
118
+ Work through suggestions without leaving the terminal:
163
119
 
164
120
  ```bash
165
- pnpm run cli -- inbox --interactive
166
- pnpm run cli -- feed --interactive
121
+ sonar # interactive triage is on by default
122
+ sonar --no-interactive # disable for scripting
167
123
  ```
168
124
 
169
- Mark suggestions as read, skip, archive, or save for later — keyboard-driven.
125
+ Mark suggestions as skip, later, or archive — keyboard-driven.
170
126
 
171
- ### Monitor indexing jobs
127
+ ### Build your own filters and dashboards (WIP)
172
128
 
173
- Watch the queue in real time while you trigger a full re-index:
129
+ Download your data and build your own tools on top of it.
174
130
 
175
131
  ```bash
176
- pnpm run cli -- index # trigger all jobs
177
- pnpm run cli -- index status --watch # watch until complete
132
+ sonar sync # sync data to ~/.sonar/data.db
178
133
  ```
179
134
 
180
- ---
181
-
182
- ## What Sonar doesn't do
135
+ No lock-in. If you outgrow us, you leave with your data intact.
183
136
 
184
- Sonar is **not a global search engine**. It won't crawl the entire internet or index trending posts from people you've never heard of.
137
+ ---
185
138
 
186
- Instead, it searches within your social graph — your followers and the people you follow — up to **2 degrees of separation**. That's it. This is an intentional constraint, not a limitation we're working around.
139
+ ## How Sonar finds relevant content
187
140
 
188
- The reason is practical: API rate limits make broad crawling impossible at any useful refresh frequency. But the reason it works is more interesting **the people in your network are already a curated signal layer**. The accounts you follow, and the accounts they follow, are a surprisingly high-quality filter for what's relevant to your domain. Sonar's job is to surface what's moving through that graph before it reaches mainstream feeds.
141
+ Sonar surfaces relevant content from your immediate network the people you follow and who follow you. Your network is already a curated signal layer. Sonar's job is to surface what's moving through that graph before it reaches mainstream feeds.
189
142
 
190
143
  What this means in practice:
191
144
 
192
145
  * Results reflect your network's attention, not global virality
193
- * You won't see noise from accounts you have no connection to
194
146
  * The feed gets more useful the more intentional you are about who you follow
195
- * Adding interests with specific keywords and topics sharpens what Sonar surfaces *within* that graph
196
-
197
- If you want global trend monitoring, tools like Brandwatch or Twitter's native search are better fits. Sonar is for developers who want a focused, low-noise signal from a network they've already curated.
198
-
199
- ---
147
+ * Bookmarking and liking content improves your recommendations over time
148
+ * Topics sharpen what Sonar surfaces within your graph
200
149
 
201
- ## Pair with OpenClaw
202
-
203
- [OpenClaw](https://github.com/openclaw/openclaw) is a local-first autonomous AI agent that runs on your machine and talks to you through WhatsApp, Telegram, Discord, Slack, or iMessage. It can execute shell commands, run on a schedule, and be extended with custom skills.
204
-
205
- Sonar + OpenClaw is a natural stack: **Sonar handles the signal filtering and curation, OpenClaw handles delivery and action.** Together they turn your social feed into an ambient intelligence layer you don't have to babysit.
206
-
207
- ### Morning briefing delivered to your phone
208
-
209
- Set up a cron job in OpenClaw to run your Sonar digest and pipe it back to you on Telegram every morning:
150
+ ## Setup
210
151
 
211
- ```
212
- # In OpenClaw: schedule a daily 8am briefing
213
- "Every morning at 8am, run `sonar feed --hours 8 --json` and summarize the top 5 posts for me"
214
- ```
152
+ ### Prerequisites
215
153
 
216
- OpenClaw will execute the CLI, pass the JSON output to your LLM, and send a clean summary straight to your phone — no dashboard to open.
154
+ * Node.js 20+
155
+ * `pnpm`
156
+ * A Sonar API key from [sonar.8640p.info](https://sonar.8640p.info/)
217
157
 
218
- ### Ask your feed questions in natural language
158
+ ### Install and authenticate
219
159
 
220
- Because `--json` makes Sonar output composable, OpenClaw can reason over it conversationally:
160
+ ```bash
161
+ pnpm add -g @1a35e1/sonar-cli@latest
221
162
 
163
+ export SONAR_API_KEY="your_api_key_here"
164
+ sonar config setup key=<YOUR_API_KEY>
222
165
  ```
223
- # Example prompts you can send OpenClaw via WhatsApp:
224
- "What's the most discussed topic in my Sonar feed today?"
225
- "Did anyone in my feed mention Uniswap V4 in the last 48 hours?"
226
- "Summarize my unread Sonar inbox"
227
- ```
228
-
229
- Wire it up once as an OpenClaw skill and your feed becomes queryable from any messaging app.
230
166
 
231
- ### Triage your inbox hands-free
232
-
233
- Combine OpenClaw's scheduling with Sonar's inbox API to automatically mark low-signal suggestions:
167
+ Verify it works:
234
168
 
235
169
  ```bash
236
- # Shell script you can hand to OpenClaw as a scheduled skill
237
- sonar inbox --json | \
238
- jq '[.[] | select(.score < 0.4) | .id]' | \
239
- xargs -I{} sonar inbox skip {}
170
+ sonar status
171
+ sonar topics
240
172
  ```
241
173
 
242
- Run this nightly and your inbox stays clean without manual triage.
174
+ ---
243
175
 
244
- ### Get alerted when a topic spikes
176
+ ## Command Reference
245
177
 
246
- Use OpenClaw's Heartbeat (scheduled wake-up) to watch for signal surges and notify you:
178
+ ### Default triage suggestions
247
179
 
180
+ ```bash
181
+ sonar # interactive triage (default)
182
+ sonar --hours 24 # widen time window
183
+ sonar --days 3 # last 3 days
184
+ sonar --kind bookmarks # default | bookmarks | followers | following
185
+ sonar --render table --limit 50 # table layout
186
+ sonar --json # raw JSON output
187
+ sonar --no-interactive # disable interactive mode
248
188
  ```
249
- # OpenClaw cron: check every 2 hours
250
- "Run `sonar feed --hours 2 --json` — if there are more than 10 posts about
251
- 'token launchpad' or 'LVR', send me a Telegram alert with the highlights"
252
- ```
253
-
254
- Effectively a custom Google Alert, but filtered through your actual interest graph.
255
-
256
- ### Build a Sonar skill for OpenClaw
257
189
 
258
- The cleanest integration is wrapping Sonar as a reusable OpenClaw skill. Drop a skill file in your OpenClaw workspace:
190
+ ### Feed read-only view
259
191
 
260
- ```typescript
261
- // skills/sonar.ts
262
- export async function getFeed(hours = 12) {
263
- const { stdout } = await exec(`sonar feed --hours ${hours} --json`);
264
- return JSON.parse(stdout);
265
- }
266
-
267
- export async function getInbox() {
268
- const { stdout } = await exec(`sonar inbox --json`);
269
- return JSON.parse(stdout);
270
- }
192
+ ```bash
193
+ sonar feed # read-only feed (last 12h, limit 20)
194
+ sonar feed --hours 48 --limit 50 # widen window
195
+ sonar feed --kind bookmarks # bookmarks | followers | following
196
+ sonar feed --render table # table layout
197
+ sonar feed --json | jq . # pipe to jq
271
198
  ```
272
199
 
273
- Once registered, OpenClaw can call these tools autonomously whenever it decides they're relevant — no manual prompting required.
274
-
275
- ---
276
-
277
- ## Setup
278
-
279
- ### Prerequisites
280
-
281
- * Node.js 20+
282
- * `pnpm`
283
- * A Sonar API key from [sonar.sh/account](https://sonar.sh/account?tab=api-keys)
284
- * Optional: `sqlite3` CLI (only needed for `data sql`)
200
+ #### Streaming with --follow
285
201
 
286
- ### Install and authenticate
202
+ Poll for new items continuously and stream them to your terminal or another process:
287
203
 
288
204
  ```bash
289
- pnpm install
290
-
291
- export SONAR_API_KEY="your_api_key_here"
292
- pnpm run cli -- init
205
+ sonar feed --follow # poll every 30s, visual cards
206
+ sonar feed --follow --interval 10 # poll every 10s
207
+ sonar feed --follow --json # NDJSON stream (one JSON per line)
208
+ sonar feed --follow --json | jq --unbuffered '.score'
293
209
  ```
294
210
 
295
- `init` writes your config to `~/.sonar/config.json`. If `SONAR_API_KEY` is set in your environment, it always takes precedence.
211
+ Press `q` to quit follow mode.
296
212
 
297
- Verify it works:
213
+ ### Topics
298
214
 
299
215
  ```bash
300
- pnpm run cli -- account
301
- pnpm run cli -- interests
216
+ sonar topics # list all topics
217
+ sonar topics --json # JSON output
218
+ sonar topics add "AI agents" # add a topic
219
+ sonar topics view <id> # view a topic
220
+ sonar topics edit <id> --name "New Name"
221
+ sonar topics delete <id> # delete a topic
302
222
  ```
303
223
 
304
- ---
224
+ #### AI-powered topic suggestions
305
225
 
306
- ## Command Reference
307
-
308
- ### Account & Config
226
+ Let Sonar suggest new topics based on your existing interests and recent feed:
309
227
 
310
228
  ```bash
311
- pnpm run cli -- account # plan, usage, suggestion counters
312
- pnpm run cli -- config # show current config
313
- pnpm run cli -- config set vendor anthropic # or openai
314
- pnpm run cli -- config set feed-render card # or table
315
- pnpm run cli -- config set feed-width 100
229
+ sonar topics suggest # interactive y/n/q per suggestion
230
+ sonar topics suggest --count 3 # limit to 3 suggestions
231
+ sonar topics suggest --vendor anthropic # use Anthropic instead of OpenAI
232
+ sonar topics suggest --json # raw suggestions as JSON
316
233
  ```
317
234
 
318
- ### Interests
319
-
320
- ```bash
321
- pnpm run cli -- interests # list all
322
- pnpm run cli -- interests --json # JSON output
323
-
324
- # Create manually
325
- pnpm run cli -- interests create \
326
- --name "Rust Systems" \
327
- --description "Rust, compilers, and systems tooling" \
328
- --keywords "rust,cargo,wasm" \
329
- --topics "systems programming,performance"
330
-
331
- # Create from a natural language prompt (requires OPENAI_API_KEY or ANTHROPIC_API_KEY)
332
- pnpm run cli -- interests create \
333
- --from-prompt "I want to follow AI evals and agent infra"
334
-
335
- # Update
336
- pnpm run cli -- interests update --id <id> --name "New Name"
337
- pnpm run cli -- interests update --id <id> --add-keywords "mcp,langgraph"
338
- pnpm run cli -- interests update --id <id> --remove-topics "old-topic"
339
- ```
235
+ Requires `OPENAI_API_KEY` or `ANTHROPIC_API_KEY` depending on vendor.
340
236
 
341
- ### Feed
237
+ ### Pipeline
342
238
 
343
239
  ```bash
344
- pnpm run cli -- feed # last 12h, limit 20, card render
345
- pnpm run cli -- feed --hours 24
346
- pnpm run cli -- feed --days 3
347
- pnpm run cli -- feed --kind bookmarks # default | bookmarks | followers | following
348
- pnpm run cli -- feed --render table --limit 50
349
- pnpm run cli -- feed --interactive
350
- pnpm run cli -- feed --json
240
+ sonar refresh # full pipeline: graph tweets suggestions
241
+ sonar status # account status, queue activity
242
+ sonar status --watch # poll every 2s
351
243
  ```
352
244
 
353
- ### Inbox
245
+ ### Triage
354
246
 
355
247
  ```bash
356
- pnpm run cli -- inbox # list inbox suggestions
357
- pnpm run cli -- inbox --all
358
- pnpm run cli -- inbox --status inbox --limit 50
359
- pnpm run cli -- inbox --interactive
360
- pnpm run cli -- inbox --json
361
-
362
- pnpm run cli -- inbox read --id <suggestion_id>
363
- pnpm run cli -- inbox skip --id <suggestion_id>
364
- pnpm run cli -- inbox later --id <suggestion_id>
365
- pnpm run cli -- inbox archive --id <suggestion_id>
248
+ sonar skip --id <suggestion_id> # skip a suggestion
249
+ sonar later --id <suggestion_id> # save for later
250
+ sonar archive --id <suggestion_id> # archive a suggestion
366
251
  ```
367
252
 
368
- ### Indexing
253
+ ### Config
369
254
 
370
255
  ```bash
371
- pnpm run cli -- reindex # run all jobs
372
- pnpm run cli -- reindex tweets
373
- pnpm run cli -- reindex graph
374
- pnpm run cli -- reindex graph --force
375
- pnpm run cli -- reindex suggestions --days 1
376
- pnpm run cli -- reindex bookmarks
377
- pnpm run cli -- reindex status
378
- pnpm run cli -- reindex status --watch
256
+ sonar config # show current config
257
+ sonar config setup key=<API_KEY> # set API key
379
258
  ```
380
259
 
381
- ### Local Data
260
+ ### Sync
382
261
 
383
262
  ```bash
384
- pnpm run cli -- data download # full download → ~/.sonar/data.db
385
- pnpm run cli -- data sync # incremental sync
386
- pnpm run cli -- data path # print DB path
387
- pnpm run cli -- data sql # open sqlite3 shell
263
+ sonar sync bookmarks # sync bookmarks to local SQLite
388
264
  ```
389
265
 
390
266
  ---
391
267
 
392
268
  ## Environment Variables
393
269
 
394
- | Variable | Required | Purpose |
395
- |---|---|---|
396
- | `SONAR_API_KEY` | Yes (unless saved by `init`) | Auth token |
397
- | `SONAR_API_URL` | No | GraphQL endpoint (default: `http://localhost:8000/graphql`) |
398
- | `SONAR_AI_VENDOR` | No | AI vendor for prompt generation (`openai` or `anthropic`) |
399
- | `SONAR_FEED_RENDER` | No | Default render style (`card` or `table`) |
400
- | `SONAR_FEED_WIDTH` | No | Default card width |
401
- | `OPENAI_API_KEY` | Sometimes | Required for OpenAI-powered `--from-prompt` |
402
- | `ANTHROPIC_API_KEY` | Sometimes | Required for Anthropic-powered `--from-prompt` |
270
+ | Variable | Required | Purpose |
271
+ | ------------------- | -------------------- | ------------------------------------------------------------------- |
272
+ | `SONAR_API_KEY` | Yes | Auth token from [sonar.8640p.info](https://sonar.8640p.info/) |
273
+ | `SONAR_API_URL` | No | GraphQL endpoint (default: production API) |
274
+ | `SONAR_MAX_RETRIES` | No | Max retry attempts on transient failures (default: 3, 0 to disable) |
275
+ | `OPENAI_API_KEY` | For `topics suggest` | Required when using OpenAI vendor for AI suggestions |
276
+ | `ANTHROPIC_API_KEY` | For `topics suggest` | Required when using Anthropic vendor for AI suggestions |
403
277
 
404
278
  ## Local Files
405
279
 
406
- | Path | Contents |
407
- |---|---|
280
+ | Path | Contents |
281
+ | ---------------------- | ---------------------------- |
408
282
  | `~/.sonar/config.json` | Token, API URL, CLI defaults |
409
- | `~/.sonar/data.db` | Local synced SQLite database |
283
+ | `~/.sonar/data.db` | Local synced SQLite database |
284
+
285
+ ---
286
+
287
+ ## Drift Prevention Checks
288
+
289
+ ```bash
290
+ # Run all drift checks (surface/docs/data/schema)
291
+ pnpm drift:check
292
+
293
+ # Refresh committed command snapshot after intentional command changes
294
+ pnpm drift:surface:update
295
+ ```
296
+
297
+ `drift:schema:check` validates GraphQL documents against the live schema.
298
+ Locally, it skips when offline; in CI (`CI=true`) it is enforced.
410
299
 
411
300
  ---
412
301
 
413
302
  ## Troubleshooting
414
303
 
415
- **`No token found. Set SONAR_API_KEY or run: sonar init`**
416
- Set `SONAR_API_KEY` in your environment, then run `pnpm run cli -- init`.
304
+ **`No token found. Set SONAR_API_KEY or run: sonar config setup`**
305
+ Set `SONAR_API_KEY` in your environment or run `sonar config setup key=<YOUR_KEY>`.
417
306
 
418
307
  **`Unable to reach server, please try again shortly.`**
419
- Check `SONAR_API_URL`, your network, and API availability.
420
-
421
- **`OPENAI_API_KEY is not set` / `ANTHROPIC_API_KEY is not set`**
422
- Set the key for your chosen vendor before using `--from-prompt` or interactive reply generation.
308
+ Check your network connection and API availability. The CLI automatically retries transient failures (network errors, 5xx) up to 3 times with exponential backoff. Use `--debug` to see retry attempts. Set `SONAR_MAX_RETRIES=0` to disable retries.
@@ -2,8 +2,8 @@ import { jsxs as _jsxs, jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useEffect, useState } from 'react';
3
3
  import zod from 'zod';
4
4
  import { Text } from 'ink';
5
- import { gql } from '../../lib/client.js';
6
- import { Spinner } from '../../components/Spinner.js';
5
+ import { gql } from '../lib/client.js';
6
+ import { Spinner } from '../components/Spinner.js';
7
7
  export const options = zod.object({
8
8
  id: zod.string().describe('Suggestion ID to archive'),
9
9
  });
@@ -28,7 +28,7 @@ export default function DataDownload() {
28
28
  upsertTweet(db, s.tweet);
29
29
  upsertSuggestion(db, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched });
30
30
  }
31
- for (const i of interestsResult.projects) {
31
+ for (const i of interestsResult.topics) {
32
32
  upsertInterest(db, i);
33
33
  }
34
34
  setSyncState(db, 'last_synced_at', new Date().toISOString());
@@ -36,7 +36,7 @@ export default function DataDownload() {
36
36
  setResult({
37
37
  feedCount: feedResult.feed.length,
38
38
  suggestionsCount: suggestionsResult.suggestions.length,
39
- interestsCount: interestsResult.projects.length,
39
+ interestsCount: interestsResult.topics.length,
40
40
  });
41
41
  }
42
42
  catch (err) {
@@ -32,12 +32,12 @@ export default function DataSync() {
32
32
  upsertTweet(freshDb, s.tweet);
33
33
  upsertSuggestion(freshDb, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched });
34
34
  }
35
- for (const i of interestsResult.projects) {
35
+ for (const i of interestsResult.topics) {
36
36
  upsertInterest(freshDb, i);
37
37
  }
38
38
  setSyncState(freshDb, 'last_synced_at', new Date().toISOString());
39
39
  freshDb.close();
40
- setResult({ feedCount: feedResult.feed.length, suggestionsCount: suggestionsResult.suggestions.length, interestsCount: interestsResult.projects.length });
40
+ setResult({ feedCount: feedResult.feed.length, suggestionsCount: suggestionsResult.suggestions.length, interestsCount: interestsResult.topics.length });
41
41
  return;
42
42
  }
43
43
  const hoursSinceSync = Math.min(Math.ceil((Date.now() - new Date(lastSyncedAt).getTime()) / 3600000), 168);