@ramtinj95/opencode-tokenscope 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +526 -0
  3. package/dist/tokenscope-lib/analyzer.d.ts +29 -0
  4. package/dist/tokenscope-lib/analyzer.d.ts.map +1 -0
  5. package/dist/tokenscope-lib/analyzer.js +321 -0
  6. package/dist/tokenscope-lib/analyzer.js.map +1 -0
  7. package/dist/tokenscope-lib/config.d.ts +10 -0
  8. package/dist/tokenscope-lib/config.d.ts.map +1 -0
  9. package/dist/tokenscope-lib/config.js +102 -0
  10. package/dist/tokenscope-lib/config.js.map +1 -0
  11. package/dist/tokenscope-lib/context.d.ts +81 -0
  12. package/dist/tokenscope-lib/context.d.ts.map +1 -0
  13. package/dist/tokenscope-lib/context.js +453 -0
  14. package/dist/tokenscope-lib/context.js.map +1 -0
  15. package/dist/tokenscope-lib/cost.d.ts +9 -0
  16. package/dist/tokenscope-lib/cost.d.ts.map +1 -0
  17. package/dist/tokenscope-lib/cost.js +51 -0
  18. package/dist/tokenscope-lib/cost.js.map +1 -0
  19. package/dist/tokenscope-lib/formatter.d.ts +26 -0
  20. package/dist/tokenscope-lib/formatter.d.ts.map +1 -0
  21. package/dist/tokenscope-lib/formatter.js +415 -0
  22. package/dist/tokenscope-lib/formatter.js.map +1 -0
  23. package/dist/tokenscope-lib/subagent.d.ts +13 -0
  24. package/dist/tokenscope-lib/subagent.d.ts.map +1 -0
  25. package/dist/tokenscope-lib/subagent.js +134 -0
  26. package/dist/tokenscope-lib/subagent.js.map +1 -0
  27. package/dist/tokenscope-lib/tokenizer.d.ts +17 -0
  28. package/dist/tokenscope-lib/tokenizer.d.ts.map +1 -0
  29. package/dist/tokenscope-lib/tokenizer.js +117 -0
  30. package/dist/tokenscope-lib/tokenizer.js.map +1 -0
  31. package/dist/tokenscope-lib/types.d.ts +252 -0
  32. package/dist/tokenscope-lib/types.d.ts.map +1 -0
  33. package/dist/tokenscope-lib/types.js +12 -0
  34. package/dist/tokenscope-lib/types.js.map +1 -0
  35. package/dist/tokenscope.d.ts +4 -0
  36. package/dist/tokenscope.d.ts.map +1 -0
  37. package/dist/tokenscope.js +98 -0
  38. package/dist/tokenscope.js.map +1 -0
  39. package/models.json +290 -0
  40. package/package.json +54 -0
  41. package/tokenscope-config.json +6 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Ramtin Javanmardi
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,526 @@
1
+ # OpenCode-Tokenscope, Token Analyzer Plugin
2
+
3
+ > Comprehensive token usage analysis and cost tracking for OpenCode AI sessions
4
+
5
+ Track and optimize your token usage across system prompts, user messages, tool outputs, and more. Get detailed breakdowns, accurate cost estimates, and visual insights for your AI development workflow.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ curl -sSL https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/install.sh | bash
11
+ ```
12
+
13
+ Then restart OpenCode and run `/tokenscope`
14
+
15
+ ## Updating
16
+
17
+ **Option 1: Local script** (if you have the plugin installed)
18
+ ```bash
19
+ bash ~/.config/opencode/plugin/install.sh --update
20
+ ```
21
+
22
+ **Option 2: Remote script** (always works)
23
+ ```bash
24
+ curl -sSL https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/install.sh | bash -s -- --update
25
+ ```
26
+
27
+ The `--update` flag skips dependency installation for faster updates.
28
+
29
+ ## Usage
30
+
31
+ Simply type in OpenCode:
32
+ ```
33
+ /tokenscope
34
+ ```
35
+
36
+ The plugin will:
37
+ 1. Analyze the current session
38
+ 2. Count tokens across all categories
39
+ 3. Analyze all subagent (Task tool) child sessions recursively
40
+ 4. Calculate costs based on API telemetry
41
+ 5. Save detailed report to `token-usage-output.txt`
42
+
43
+ ### Options
44
+
45
+ - **sessionID**: Analyze a specific session instead of the current one
46
+ - **limitMessages**: Limit entries shown per category (1-10, default: 3)
47
+ - **includeSubagents**: Include subagent child session costs (default: true)
48
+
49
+ ### Reading the Full Report
50
+
51
+ ```bash
52
+ cat token-usage-output.txt
53
+ ```
54
+
55
+ ## Features
56
+
57
+ ### Comprehensive Token Analysis
58
+ - **5 Category Breakdown**: System prompts, user messages, assistant responses, tool outputs, and reasoning traces
59
+ - **Visual Charts**: Easy-to-read ASCII bar charts with percentages and token counts
60
+ - **Smart Inference**: Automatically infers system prompts from API telemetry (since they're not exposed in session messages)
61
+
62
+ ### Context Breakdown Analysis (New in v1.4.0)
63
+ - **System Prompt Components**: See token distribution across base prompt, tool definitions, environment context, project tree, and custom instructions
64
+ - **Automatic Estimation**: Estimates breakdown from `cache_write` tokens when system prompt content isn't directly available
65
+ - **Tool Count**: Shows how many tools are loaded and their combined token cost
66
+
67
+ ### Tool Definition Cost Estimates (New in v1.4.0)
68
+ - **Per-Tool Estimates**: Lists all enabled tools with estimated schema token costs
69
+ - **Argument Analysis**: Infers argument count and complexity from actual tool calls in the session
70
+ - **Complexity Detection**: Distinguishes between simple arguments and complex ones (arrays/objects)
71
+
72
+ ### Cache Efficiency Metrics (New in v1.4.0)
73
+ - **Cache Hit Rate**: Visual display of cache read vs fresh input token distribution
74
+ - **Cost Savings**: Calculates actual savings from prompt caching
75
+ - **Effective Rate**: Shows what you're actually paying per token vs standard rates
76
+
77
+ ### Accurate Cost Tracking
78
+ - **41+ Models Supported**: Comprehensive pricing database for Claude, GPT, DeepSeek, Llama, Mistral, and more
79
+ - **Cache-Aware Pricing**: Properly handles cache read/write tokens with discounted rates
80
+ - **Session-Wide Billing**: Aggregates costs across all API calls in your session
81
+
82
+ ### Subagent Cost Tracking
83
+ - **Child Session Analysis**: Recursively analyzes all subagent sessions spawned by the Task tool
84
+ - **Aggregated Totals**: Shows combined tokens, costs, and API calls across main session and all subagents
85
+ - **Per-Agent Breakdown**: Lists each subagent with its type, token usage, cost, and API call count
86
+ - **Optional Toggle**: Enable/disable subagent analysis with the `includeSubagents` parameter
87
+
88
+ ### Advanced Features
89
+ - **Tool Usage Stats**: Track which tools consume the most tokens and how many times each is called
90
+ - **API Call Tracking**: See total API calls for main session and subagents
91
+ - **Top Contributors**: Identify the biggest token consumers
92
+ - **Model Normalization**: Handles `provider/model` format automatically
93
+ - **Multi-Tokenizer Support**: Uses official tokenizers (tiktoken for OpenAI, transformers for others)
94
+ - **Configurable Sections**: Enable/disable analysis features via `tokenscope-config.json`
95
+
96
+ ## Example Output
97
+
98
+ ```
99
+ ═══════════════════════════════════════════════════════════════════════════
100
+ Token Analysis: Session ses_50c712089ffeshuuuJPmOoXCPX
101
+ Model: claude-opus-4-5
102
+ ═══════════════════════════════════════════════════════════════════════════
103
+
104
+ TOKEN BREAKDOWN BY CATEGORY
105
+ ─────────────────────────────────────────────────────────────────────────
106
+ Estimated using tokenizer analysis of message content:
107
+
108
+ Input Categories:
109
+ SYSTEM ██████████████░░░░░░░░░░░░░░░░ 45.8% (22,367)
110
+ USER ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.8% (375)
111
+ TOOLS ████████████████░░░░░░░░░░░░░░ 53.5% (26,146)
112
+
113
+ Subtotal: 48,888 estimated input tokens
114
+
115
+ Output Categories:
116
+ ASSISTANT ██████████████████████████████ 100.0% (1,806)
117
+
118
+ Subtotal: 1,806 estimated output tokens
119
+
120
+ Local Total: 50,694 tokens (estimated)
121
+
122
+ TOOL USAGE BREAKDOWN
123
+ ─────────────────────────────────────────────────────────────────────────
124
+ bash ██████████░░░░░░░░░░░░░░░░░░░░ 34.0% (8,886) 4x
125
+ read ██████████░░░░░░░░░░░░░░░░░░░░ 33.1% (8,643) 3x
126
+ task ████████░░░░░░░░░░░░░░░░░░░░░░ 27.7% (7,245) 4x
127
+ webfetch █░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 4.9% (1,286) 1x
128
+ tokenscope ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.3% (75) 2x
129
+ batch ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0% (11) 1x
130
+
131
+ TOP CONTRIBUTORS
132
+ ─────────────────────────────────────────────────────────────────────────
133
+ • System (inferred from API) 22,367 tokens (44.1%)
134
+ • bash 8,886 tokens (17.5%)
135
+ • read 8,643 tokens (17.0%)
136
+ • task 7,245 tokens (14.3%)
137
+ • webfetch 1,286 tokens (2.5%)
138
+
139
+ ═══════════════════════════════════════════════════════════════════════════
140
+ MOST RECENT API CALL
141
+ ─────────────────────────────────────────────────────────────────────────
142
+
143
+ Raw telemetry from last API response:
144
+ Input (fresh): 2 tokens
145
+ Cache read: 48,886 tokens
146
+ Cache write: 54 tokens
147
+ Output: 391 tokens
148
+ ───────────────────────────────────
149
+ Total: 49,333 tokens
150
+
151
+ ═══════════════════════════════════════════════════════════════════════════
152
+ SESSION TOTALS (All 15 API calls)
153
+ ─────────────────────────────────────────────────────────────────────────
154
+
155
+ Total tokens processed across the entire session (for cost calculation):
156
+
157
+ Input tokens: 10 (fresh tokens across all calls)
158
+ Cache read: 320,479 (cached tokens across all calls)
159
+ Cache write: 51,866 (tokens written to cache)
160
+ Output tokens: 3,331 (all model responses)
161
+ ───────────────────────────────────
162
+ Session Total: 375,686 tokens (for billing)
163
+
164
+ ═══════════════════════════════════════════════════════════════════════════
165
+ ESTIMATED SESSION COST (API Key Pricing)
166
+ ─────────────────────────────────────────────────────────────────────────
167
+
168
+ You appear to be on a subscription plan (API cost is $0).
169
+ Here's what this session would cost with direct API access:
170
+
171
+ Input tokens: 10 × $5.00/M = $0.0001
172
+ Output tokens: 3,331 × $25.00/M = $0.0833
173
+ Cache read: 320,479 × $0.50/M = $0.1602
174
+ Cache write: 51,866 × $6.25/M = $0.3242
175
+ ─────────────────────────────────────────────────────────────────────────
176
+ ESTIMATED TOTAL: $0.5677
177
+
178
+ Note: This estimate uses standard API pricing from models.json.
179
+ Actual API costs may vary based on provider and context size.
180
+
181
+ ═══════════════════════════════════════════════════════════════════════════
182
+ CONTEXT BREAKDOWN (Estimated from cache_write tokens)
183
+ ─────────────────────────────────────────────────────────────────────────
184
+
185
+ Base System Prompt ████████████░░░░░░░░░░░░░░░░░░ ~42,816 tokens
186
+ Tool Definitions (14)██████░░░░░░░░░░░░░░░░░░░░░░░░ ~4,900 tokens
187
+ Environment Context █░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ ~150 tokens
188
+ Project Tree ████░░░░░░░░░░░░░░░░░░░░░░░░░░ ~4,000 tokens
189
+ ───────────────────────────────────────────────────────────────────────
190
+ Total Cached Context: ~51,866 tokens
191
+
192
+ Note: Breakdown estimated from first cache_write. Actual distribution may vary.
193
+
194
+ ═══════════════════════════════════════════════════════════════════════════
195
+ TOOL DEFINITION COSTS (Estimated from argument analysis)
196
+ ─────────────────────────────────────────────────────────────────────────
197
+
198
+ Tool Est. Tokens Args Complexity
199
+ ───────────────────────────────────────────────────────────────────────
200
+ task ~480 3 complex (arrays/objects)
201
+ batch ~410 1 complex (arrays/objects)
202
+ edit ~370 4 simple
203
+ read ~340 3 simple
204
+ bash ~340 3 simple
205
+ ───────────────────────────────────────────────────────────────────────
206
+ Total: ~4,520 tokens (14 enabled tools)
207
+
208
+ Note: Estimates inferred from tool call arguments in this session.
209
+ Actual schema tokens may vary +/-20%.
210
+
211
+ ═══════════════════════════════════════════════════════════════════════════
212
+ CACHE EFFICIENCY
213
+ ─────────────────────────────────────────────────────────────────────────
214
+
215
+ Token Distribution:
216
+ Cache Read: 320,479 tokens ████████████████████████████░░ 86.2%
217
+ Fresh Input: 51,320 tokens ████░░░░░░░░░░░░░░░░░░░░░░░░░░ 13.8%
218
+ ───────────────────────────────────────────────────────────────────────
219
+ Cache Hit Rate: 86.2%
220
+
221
+ Cost Analysis (claude-opus-4-5 @ $5.00/M input, $0.50/M cache read):
222
+ Without caching: $1.8590 (371,799 tokens x $5.00/M)
223
+ With caching: $0.4169 (fresh x $5.00/M + cached x $0.50/M)
224
+ ───────────────────────────────────────────────────────────────────────
225
+ Cost Savings: $1.4421 (77.6% reduction)
226
+ Effective Rate: $1.12/M tokens (vs. $5.00/M standard)
227
+
228
+ ═══════════════════════════════════════════════════════════════════════════
229
+ SUBAGENT COSTS (4 child sessions, 23 API calls)
230
+ ─────────────────────────────────────────────────────────────────────────
231
+
232
+ docs $0.3190 (194,701 tokens, 8 calls)
233
+ general $0.2957 (104,794 tokens, 4 calls)
234
+ docs $0.2736 (69,411 tokens, 4 calls)
235
+ general $0.5006 (197,568 tokens, 7 calls)
236
+ ─────────────────────────────────────────────────────────────────────────
237
+ Subagent Total: $1.3888 (566,474 tokens, 23 calls)
238
+
239
+ ═══════════════════════════════════════════════════════════════════════════
240
+ SUMMARY
241
+ ─────────────────────────────────────────────────────────────────────────
242
+
243
+ Cost Tokens API Calls
244
+ Main session: $ 0.5677 375,686 15
245
+ Subagents: $ 1.3888 566,474 23
246
+ ─────────────────────────────────────────────────────────────────────────
247
+ TOTAL: $ 1.9565 942,160 38
248
+
249
+ ═══════════════════════════════════════════════════════════════════════════
250
+
251
+ ```
252
+
253
+ ## Supported Models
254
+
255
+ **41+ models with accurate pricing:**
256
+
257
+ ### Claude Models
258
+ - Claude Opus 4.5, 4.1, 4
259
+ - Claude Sonnet 4, 4-5, 3.7, 3.5, 3
260
+ - Claude Haiku 4-5, 3.5, 3
261
+
262
+ ### OpenAI Models
263
+ - GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o Mini
264
+ - GPT-3.5 Turbo
265
+ - GPT-5 and all its variations
266
+
267
+ ### Other Models
268
+ - DeepSeek (R1, V2, V3)
269
+ - Llama (3.1, 3.2, 3.3)
270
+ - Mistral (Large, Small)
271
+ - Qwen, Kimi, GLM, Grok
272
+ - And more...
273
+
274
+ **Free/Open models** are marked with zero pricing.
275
+
276
+ ## Customization
277
+
278
+ ### Add New Model Pricing
279
+
280
+ Edit `~/.config/opencode/plugin/models.json`:
281
+
282
+ ```json
283
+ {
284
+ "your-model-name": {
285
+ "input": 1.50,
286
+ "output": 5.00,
287
+ "cacheWrite": 0.50,
288
+ "cacheRead": 0.10
289
+ }
290
+ }
291
+ ```
292
+
293
+ Save the file and restart OpenCode. The plugin will automatically use the new pricing.
294
+
295
+ ### Update Existing Model Pricing
296
+
297
+ Simply edit the values in `models.json` and restart OpenCode. No code changes needed!
298
+
299
+ ### Configure Analysis Features
300
+
301
+ Edit `~/.config/opencode/plugin/tokenscope-config.json` to enable/disable sections:
302
+
303
+ ```json
304
+ {
305
+ "enableContextBreakdown": true,
306
+ "enableToolSchemaEstimation": true,
307
+ "enableCacheEfficiency": true,
308
+ "enableSubagentAnalysis": true
309
+ }
310
+ ```
311
+
312
+ Set any option to `false` to hide that section from the output. All features are enabled by default.
313
+
314
+ ## How It Works
315
+
316
+ ### System Prompt Inference
317
+ OpenCode doesn't expose system prompts in the session messages API. The plugin intelligently infers them using:
318
+
319
+ ```
320
+ System Tokens = (API Input + Cache Read) - (User Tokens + Tool Tokens)
321
+ ```
322
+
323
+ This works because the API input includes everything sent to the model.
324
+
325
+ ### Dual Tracking
326
+ - **Current Context**: Uses the most recent API call with non-zero tokens (matches TUI)
327
+ - **Session Total**: Aggregates all API calls for accurate billing
328
+
329
+ ### Subagent Analysis
330
+ The plugin uses OpenCode's session API to:
331
+ 1. Fetch all child sessions spawned by the Task tool
332
+ 2. Recursively analyze nested subagents (subagents can spawn their own subagents)
333
+ 3. Aggregate tokens, costs, and API call counts
334
+ 4. Calculate estimated costs using the same pricing as the main session
335
+
336
+ ### Model Name Normalization
337
+ Automatically handles `provider/model` format (e.g., `qwen/qwen3-coder` → `qwen3-coder`)
338
+
339
+ ## Understanding the Numbers
340
+
341
+ ### Current Context vs Session Total
342
+
343
+ - **Current Context**: What's in your context window right now
344
+ - Based on most recent API call
345
+ - Used to understand current memory usage
346
+
347
+ - **Session Total**: All tokens processed in this session
348
+ - Sum of all API calls in the main session
349
+ - What you're billed for (main session only)
350
+ - Used for cost calculation
351
+
352
+ ### Subagent Totals
353
+
354
+ When using the Task tool, OpenCode spawns subagent sessions. These are tracked separately:
355
+
356
+ - **Subagent Tokens**: Combined tokens from all child sessions
357
+ - **Subagent API Calls**: Total API calls made by all subagents
358
+ - **Grand Total**: Main session + all subagents combined
359
+
360
+ ### Cache Tokens
361
+
362
+ - **Cache Read**: Tokens retrieved from cache (discounted rate ~90% off)
363
+ - **Cache Write**: Tokens written to cache (slight premium ~25% more)
364
+ - **Note**: Cache write is a billing charge, not additional context tokens
365
+
366
+ ## Troubleshooting
367
+
368
+ ### "Dependencies missing" Error
369
+
370
+ Run the installer:
371
+ ```bash
372
+ curl -sSL https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/install.sh | bash
373
+ ```
374
+
375
+ ### Command Not Appearing
376
+
377
+ 1. Verify `tokenscope.md` exists:
378
+ ```bash
379
+ ls ~/.config/opencode/command/tokenscope.md
380
+ ```
381
+ 2. Restart OpenCode completely
382
+ 3. Check OpenCode logs for plugin errors
383
+
384
+ ### Wrong Token Counts
385
+
386
+ The plugin uses API telemetry (ground truth). If counts seem off:
387
+ - **Expected ~2K difference from TUI**: Plugin analyzes before its own response is added
388
+ - **Model detection**: Check that the model name is recognized in the output
389
+ - **Tokenizer not installed**: Re-run the installer
390
+
391
+ ### New Model Not Showing Correct Pricing
392
+
393
+ 1. Check if model exists in `models.json`
394
+ 2. Try exact match or prefix match (e.g., `claude-sonnet-4` matches `claude-sonnet-4-20250514`)
395
+ 3. Add entry to `models.json` if missing
396
+ 4. Restart OpenCode after editing `models.json`
397
+
398
+ ### Plugin Fails to Load
399
+
400
+ 1. Validate JSON syntax:
401
+ ```bash
402
+ cd ~/.config/opencode/plugin
403
+ node -e "JSON.parse(require('fs').readFileSync('models.json', 'utf8'))"
404
+ ```
405
+ 2. Check for trailing commas or syntax errors
406
+ 3. Plugin falls back to default pricing if file is invalid
407
+
408
+ ## Architecture
409
+
410
+ ### File Structure
411
+
412
+ ```
413
+ plugin/
414
+ ├── tokenscope.ts # Main entry point - Plugin export
415
+ ├── tokenscope-lib/
416
+ │ ├── types.ts # All interfaces and type definitions
417
+ │ ├── config.ts # Constants, model maps, pricing loader
418
+ │ ├── tokenizer.ts # TokenizerManager class
419
+ │ ├── analyzer.ts # ModelResolver, ContentCollector, TokenAnalysisEngine
420
+ │ ├── cost.ts # CostCalculator class
421
+ │ ├── subagent.ts # SubagentAnalyzer class
422
+ │ ├── formatter.ts # OutputFormatter class
423
+ │ └── context.ts # ContextAnalyzer class (context breakdown, tool estimates, cache efficiency)
424
+ ├── models.json # Pricing data for 41+ models
425
+ ├── tokenscope-config.json # Feature toggles configuration
426
+ ├── package.json # Plugin metadata
427
+ └── install.sh # Installation script
428
+ ```
429
+
430
+ ### Core Components
431
+
432
+ 1. **TokenizerManager** (`tokenscope-lib/tokenizer.ts`): Loads and caches tokenizers (tiktoken, transformers)
433
+ 2. **ModelResolver** (`tokenscope-lib/analyzer.ts`): Detects model and selects appropriate tokenizer
434
+ 3. **ContentCollector** (`tokenscope-lib/analyzer.ts`): Extracts content from session messages, including tool call counts
435
+ 4. **TokenAnalysisEngine** (`tokenscope-lib/analyzer.ts`): Counts tokens and applies API telemetry adjustments
436
+ 5. **CostCalculator** (`tokenscope-lib/cost.ts`): Calculates costs from pricing database with cache-aware pricing
437
+ 6. **SubagentAnalyzer** (`tokenscope-lib/subagent.ts`): Recursively fetches and analyzes child sessions from Task tool calls
438
+ 7. **ContextAnalyzer** (`tokenscope-lib/context.ts`): Analyzes context breakdown, tool schema estimates, and cache efficiency
439
+ 8. **OutputFormatter** (`tokenscope-lib/formatter.ts`): Generates visual reports with charts and summaries
440
+
441
+ ## Privacy & Security
442
+
443
+ - **All processing is local**: No session data sent to external services
444
+ - **Tokenizers from official sources**:
445
+ - OpenAI tokenizers: npm registry
446
+ - Transformers: Hugging Face Hub
447
+ - **Open source**: Audit the code yourself
448
+
449
+ ## Performance
450
+
451
+ - **Fast**: Tokenizers cached after first load
452
+ - **Parallel**: Categories processed concurrently
453
+ - **Efficient**: Only analyzes on demand
454
+ - **First-run download**: Transformers models download on demand (5-50MB per model)
455
+ - **Subsequent runs**: Instant (uses cache)
456
+
457
+ ## Manual Installation
458
+
459
+ <details>
460
+ <summary>Click to expand manual installation steps</summary>
461
+
462
+ ### Requirements
463
+ - OpenCode installed (`~/.config/opencode` directory exists)
464
+ - npm (for tokenizer dependencies)
465
+ - ~50MB disk space (for tokenizer models)
466
+
467
+ ### Installation Steps
468
+
469
+ 1. **Navigate to OpenCode config**:
470
+ ```bash
471
+ cd ~/.config/opencode
472
+ ```
473
+
474
+ 2. **Download plugin files**:
475
+ ```bash
476
+ mkdir -p plugin/tokenscope-lib
477
+ cd plugin
478
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope.ts
479
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/models.json
480
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/package.json
481
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-config.json
482
+ cd tokenscope-lib
483
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/types.ts
484
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/config.ts
485
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/tokenizer.ts
486
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/analyzer.ts
487
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/cost.ts
488
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/subagent.ts
489
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/formatter.ts
490
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/plugin/tokenscope-lib/context.ts
491
+ ```
492
+
493
+ 3. **Download command file**:
494
+ ```bash
495
+ cd ../../command
496
+ curl -O https://raw.githubusercontent.com/ramtinJ95/opencode-tokenscope/main/command/tokenscope.md
497
+ ```
498
+
499
+ 4. **Install dependencies**:
500
+ ```bash
501
+ cd ../plugin
502
+ npm install js-tiktoken@1.0.15 @huggingface/transformers@3.1.2
503
+ ```
504
+
505
+ 5. **Restart OpenCode**
506
+
507
+ 6. **Test**: Run `/tokenscope` in any session
508
+
509
+ </details>
510
+
511
+ ## Contributing
512
+
513
+ Contributions welcome! Ideas for enhancement:
514
+
515
+ - Historical trend analysis
516
+ - Export to CSV/JSON/PDF
517
+ - Optimization suggestions
518
+ - Custom categorization rules
519
+ - Real-time monitoring with alerts
520
+ - Compare sessions
521
+ - Token burn rate calculation
522
+
523
+ ## Support
524
+
525
+ - **Issues**: [GitHub Issues](https://github.com/ramtinJ95/opencode-tokenscope/issues)
526
+ - **Discussions**: [GitHub Discussions](https://github.com/ramtinJ95/opencode-tokenscope/discussions)
@@ -0,0 +1,29 @@
1
+ import type { SessionMessage, TokenModel, TokenAnalysis, CategoryEntrySource } from "./types";
2
+ import { TokenizerManager } from "./tokenizer";
3
+ export declare class ModelResolver {
4
+ resolveTokenModel(messages: SessionMessage[]): TokenModel;
5
+ private resolveOpenAIModel;
6
+ private resolveTransformersModel;
7
+ private mapOpenAI;
8
+ private canonicalize;
9
+ }
10
+ export declare class ContentCollector {
11
+ collectSystemPrompts(messages: SessionMessage[]): CategoryEntrySource[];
12
+ collectMessageTexts(messages: SessionMessage[], role: "user" | "assistant"): CategoryEntrySource[];
13
+ collectToolOutputs(messages: SessionMessage[]): CategoryEntrySource[];
14
+ collectToolCallCounts(messages: SessionMessage[]): Map<string, number>;
15
+ collectAllToolsCalled(messages: SessionMessage[]): string[];
16
+ collectReasoningTexts(messages: SessionMessage[]): CategoryEntrySource[];
17
+ private extractText;
18
+ private identifySystemPrompt;
19
+ private capitalize;
20
+ }
21
+ export declare class TokenAnalysisEngine {
22
+ private tokenizerManager;
23
+ private contentCollector;
24
+ constructor(tokenizerManager: TokenizerManager, contentCollector: ContentCollector);
25
+ analyze(sessionID: string, messages: SessionMessage[], tokenModel: TokenModel, entryLimit: number): Promise<TokenAnalysis>;
26
+ private buildCategory;
27
+ private applyTelemetryAdjustments;
28
+ }
29
+ //# sourceMappingURL=analyzer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"analyzer.d.ts","sourceRoot":"","sources":["../../tokenscope-lib/analyzer.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EACV,cAAc,EAEd,UAAU,EACV,aAAa,EACb,mBAAmB,EAMpB,MAAM,SAAS,CAAA;AAGhB,OAAO,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAA;AAI9C,qBAAa,aAAa;IACxB,iBAAiB,CAAC,QAAQ,EAAE,cAAc,EAAE,GAAG,UAAU;IAezD,OAAO,CAAC,kBAAkB;IAa1B,OAAO,CAAC,wBAAwB;IAgChC,OAAO,CAAC,SAAS;IAKjB,OAAO,CAAC,YAAY;CAGrB;AAID,qBAAa,gBAAgB;IAC3B,oBAAoB,CAAC,QAAQ,EAAE,cAAc,EAAE,GAAG,mBAAmB,EAAE;IAuBvE,mBAAmB,CAAC,QAAQ,EAAE,cAAc,EAAE,EAAE,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,mBAAmB,EAAE;IAgBlG,kBAAkB,CAAC,QAAQ,EAAE,cAAc,EAAE,GAAG,mBAAmB,EAAE;IAwBrE,qBAAqB,CAAC,QAAQ,EAAE,cAAc,EAAE,GAAG,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC;IAiBtE,qBAAqB,CAAC,QAAQ,EAAE,cAAc,EAAE,GAAG,MAAM,EAAE;IAI3D,qBAAqB,CAAC,QAAQ,EAAE,cAAc,EAAE,GAAG,mBAAmB,EAAE;IAmBxE,OAAO,CAAC,WAAW;IASnB,OAAO,CAAC,oBAAoB;IAoB5B,OAAO,CAAC,UAAU;CAInB;AAID,qBAAa,mBAAmB;IAE5B,OAAO,CAAC,gBAAgB;IACxB,OAAO,CAAC,gBAAgB;gBADhB,gBAAgB,EAAE,gBAAgB,EAClC,gBAAgB,EAAE,gBAAgB;IAGtC,OAAO,CACX,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,cAAc,EAAE,EAC1B,UAAU,EAAE,UAAU,EACtB,UAAU,EAAE,MAAM,GACjB,OAAO,CAAC,aAAa,CAAC;YA6CX,aAAa;IAsB3B,OAAO,CAAC,yBAAyB;CAsFlC"}