free-coding-models 0.1.82 → 0.1.84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,245 @@
1
+ /**
2
+ * @file lib/usage-reader.js
3
+ * @description Pure functions to read provider-scoped Usage snapshots from token-stats.json.
4
+ *
5
+ * Designed for TUI consumption: reads the pre-computed provider-scoped quota
6
+ * snapshots written by TokenStats. Never reads the JSONL log.
7
+ *
8
+ * The UI must distinguish the same model served by different Origins
9
+ * (for example NVIDIA vs Groq). Because of that, the canonical snapshot source
10
+ * is `quotaSnapshots.byProviderModel`, not the legacy `byModel` aggregate.
11
+ *
12
+ * All functions are pure (no shared mutable state) and handle missing/malformed
13
+ * files gracefully by returning safe fallback values.
14
+ *
15
+ * Default path: ~/.free-coding-models/token-stats.json
16
+ *
17
+ * ## Freshness contract
18
+ * Usage snapshots carry an `updatedAt` ISO timestamp. Any entry whose
19
+ * `updatedAt` is older than SNAPSHOT_TTL_MS (30 minutes) is excluded and
20
+ * treated as `N/A` by the UI. Entries that predate this feature (no
21
+ * `updatedAt` field) are included for backward compatibility.
22
+ *
23
+ * ## Parse cache
24
+ * `loadUsageSnapshot` maintains a module-level in-memory cache keyed by the
25
+ * resolved stats-file path. Each cache entry is valid for CACHE_TTL_MS
26
+ * (500 ms – 1 000 ms). This avoids redundant synchronous disk reads when the
27
+ * TUI rerenders multiple times within the same tick or across a few frames.
28
+ * The 30-minute data-freshness filter (SNAPSHOT_TTL_MS) is applied every time
29
+ * the snapshot is parsed — caching never bypasses it.
30
+ *
31
+ * Use `clearUsageCache()` to evict all entries (useful in tests).
32
+ *
33
+ * @exports SNAPSHOT_TTL_MS
34
+ * @exports CACHE_TTL_MS
35
+ * @exports clearUsageCache
36
+ * @exports loadUsageSnapshot
37
+ * @exports buildUsageSnapshotKey
38
+ * @exports loadUsageMap
39
+ * @exports usageForModelId
40
+ * @exports usageForRow
41
+ */
42
+
43
+ import { readFileSync, existsSync } from 'node:fs'
44
+ import { join } from 'node:path'
45
+ import { homedir } from 'node:os'
46
+ import { supportsUsagePercent, usageResetsDaily } from './quota-capabilities.js'
47
+
48
+ const DEFAULT_STATS_FILE = join(homedir(), '.free-coding-models', 'token-stats.json')
49
+
50
+ /**
51
+ * Freshness TTL for quota snapshots in milliseconds (30 minutes).
52
+ * Snapshots older than this are treated as stale and excluded from results.
53
+ * The UI renders stale/missing entries as `N/A`.
54
+ */
55
+ export const SNAPSHOT_TTL_MS = 30 * 60 * 1000
56
+
57
+ /**
58
+ * TTL for the module-level parse cache in milliseconds (750 ms).
59
+ * Within this window repeated calls to loadUsageSnapshot with the same path
60
+ * return the already-parsed result without touching the filesystem.
61
+ */
62
+ export const CACHE_TTL_MS = 750
63
+
64
+ /**
65
+ * Module-level cache: path → { snapshot, expiresAt }
66
+ * @type {Map<string, { snapshot: { byProviderModel: Record<string, number>, byProvider: Record<string, number>, legacyByModel: Record<string, number> }, expiresAt: number }>}
67
+ */
68
+ const _cache = new Map()
69
+
70
+ /**
71
+ * Evict all cached parse results. Subsequent calls to loadUsageSnapshot will
72
+ * re-read from disk. Primarily intended for use in tests.
73
+ */
74
+ export function clearUsageCache() {
75
+ _cache.clear()
76
+ }
77
+
78
+ /**
79
+ * Returns true when the snapshot entry is considered fresh enough to display.
80
+ *
81
+ * Rules:
82
+ * - If `updatedAt` is absent (older format): include for backward compatibility.
83
+ * - If `updatedAt` parses to a time older than SNAPSHOT_TTL_MS ago: exclude (stale).
84
+ * - If `updatedAt` is within TTL (strictly less than TTL ms ago): include.
85
+ *
86
+ * @param {{ updatedAt?: string }} entry
87
+ * @param {number} [nowMs] - optional current time (ms) for testability
88
+ * @returns {boolean}
89
+ */
90
+ function isSnapshotFresh(entry, nowMs = Date.now(), providerKey = null) {
91
+ if (!entry || typeof entry.updatedAt !== 'string') return true // backward compat
92
+ const updatedMs = Date.parse(entry.updatedAt)
93
+ if (!Number.isFinite(updatedMs)) return true // unparseable: be generous
94
+ if (providerKey && usageResetsDaily(providerKey)) {
95
+ const nowDay = new Date(nowMs).toISOString().slice(0, 10)
96
+ const updatedDay = entry.updatedAt.slice(0, 10)
97
+ if (updatedDay !== nowDay) return false
98
+ }
99
+ return nowMs - updatedMs < SNAPSHOT_TTL_MS
100
+ }
101
+
102
+ /**
103
+ * Build the canonical map key for one Origin + model pair.
104
+ *
105
+ * @param {string} providerKey
106
+ * @param {string} modelId
107
+ * @returns {string}
108
+ */
109
+ export function buildUsageSnapshotKey(providerKey, modelId) {
110
+ return `${providerKey}::${modelId}`
111
+ }
112
+
113
+ /**
114
+ * Load token-stats.json and return model/provider usage maps.
115
+ * Entries with stale `updatedAt` (older than SNAPSHOT_TTL_MS) are excluded.
116
+ *
117
+ * Results are cached in memory for CACHE_TTL_MS to avoid repeated disk reads.
118
+ * The 30-minute data freshness filter is re-applied on every cache miss (parse).
119
+ *
120
+ * @param {string} [statsFile]
121
+ * @returns {{ byProviderModel: Record<string, number>, byProvider: Record<string, number>, legacyByModel: Record<string, number> }}
122
+ */
123
+ export function loadUsageSnapshot(statsFile = DEFAULT_STATS_FILE) {
124
+ const now = Date.now()
125
+
126
+ // Return cached result if still valid
127
+ const cached = _cache.get(statsFile)
128
+ if (cached && now < cached.expiresAt) {
129
+ return cached.snapshot
130
+ }
131
+
132
+ // Cache miss — parse from disk
133
+ const snapshot = _parseSnapshot(statsFile, now)
134
+ _cache.set(statsFile, { snapshot, expiresAt: now + CACHE_TTL_MS })
135
+ return snapshot
136
+ }
137
+
138
+ /**
139
+ * Internal: read and parse token-stats.json without caching.
140
+ *
141
+ * @param {string} statsFile
142
+ * @param {number} now - current time in ms (for freshness checks)
143
+ * @returns {{ byProviderModel: Record<string, number>, byProvider: Record<string, number>, legacyByModel: Record<string, number> }}
144
+ */
145
+ function _parseSnapshot(statsFile, now) {
146
+ try {
147
+ if (!existsSync(statsFile)) return { byProviderModel: {}, byProvider: {}, legacyByModel: {} }
148
+ const raw = readFileSync(statsFile, 'utf8')
149
+ const data = JSON.parse(raw)
150
+
151
+ const byProviderModelSrc = data?.quotaSnapshots?.byProviderModel
152
+ const byModelSrc = data?.quotaSnapshots?.byModel
153
+ const byProviderSrc = data?.quotaSnapshots?.byProvider
154
+
155
+ const byProviderModel = {}
156
+ if (byProviderModelSrc && typeof byProviderModelSrc === 'object') {
157
+ for (const [snapshotKey, entry] of Object.entries(byProviderModelSrc)) {
158
+ const providerKey = typeof entry?.providerKey === 'string'
159
+ ? entry.providerKey
160
+ : snapshotKey.split('::', 1)[0]
161
+ if (!supportsUsagePercent(providerKey)) continue
162
+ if (entry && typeof entry.quotaPercent === 'number' && Number.isFinite(entry.quotaPercent)) {
163
+ if (isSnapshotFresh(entry, now, providerKey)) {
164
+ byProviderModel[snapshotKey] = entry.quotaPercent
165
+ }
166
+ }
167
+ }
168
+ }
169
+
170
+ // 📖 Legacy map kept only for backward compatibility helpers/tests.
171
+ const legacyByModel = {}
172
+ if (byModelSrc && typeof byModelSrc === 'object') {
173
+ for (const [modelId, entry] of Object.entries(byModelSrc)) {
174
+ if (entry && typeof entry.quotaPercent === 'number' && Number.isFinite(entry.quotaPercent)) {
175
+ if (isSnapshotFresh(entry, now)) {
176
+ legacyByModel[modelId] = entry.quotaPercent
177
+ }
178
+ }
179
+ }
180
+ }
181
+
182
+ const byProvider = {}
183
+ if (byProviderSrc && typeof byProviderSrc === 'object') {
184
+ for (const [providerKey, entry] of Object.entries(byProviderSrc)) {
185
+ if (!supportsUsagePercent(providerKey)) continue
186
+ if (entry && typeof entry.quotaPercent === 'number' && Number.isFinite(entry.quotaPercent)) {
187
+ if (isSnapshotFresh(entry, now, providerKey)) {
188
+ byProvider[providerKey] = entry.quotaPercent
189
+ }
190
+ }
191
+ }
192
+ }
193
+
194
+ return { byProviderModel, byProvider, legacyByModel }
195
+ } catch {
196
+ return { byProviderModel: {}, byProvider: {}, legacyByModel: {} }
197
+ }
198
+ }
199
+
200
+ /**
201
+ * Load token-stats.json and return a plain object mapping provider+model → quotaPercent.
202
+ *
203
+ * Only includes models whose `quotaPercent` is a finite number and whose
204
+ * snapshot is fresh (within SNAPSHOT_TTL_MS).
205
+ * Returns an empty object on any error (missing file, bad JSON, missing keys).
206
+ *
207
+ * @param {string} [statsFile] - Path to token-stats.json (defaults to ~/.free-coding-models/token-stats.json)
208
+ * @returns {Record<string, number>} e.g. { 'groq::openai/gpt-oss-120b': 37 }
209
+ */
210
+ export function loadUsageMap(statsFile = DEFAULT_STATS_FILE) {
211
+ return loadUsageSnapshot(statsFile).byProviderModel
212
+ }
213
+
214
+ /**
215
+ * Return the legacy quota percent remaining for a specific modelId.
216
+ * This helper is retained for backward compatibility tests only.
217
+ *
218
+ * @param {string} modelId
219
+ * @param {string} [statsFile] - Path to token-stats.json (defaults to ~/.free-coding-models/token-stats.json)
220
+ * @returns {number | null} quota percent (0–100), or null if unknown/stale
221
+ */
222
+ export function usageForModelId(modelId, statsFile = DEFAULT_STATS_FILE) {
223
+ const map = loadUsageSnapshot(statsFile).legacyByModel
224
+ const value = map[modelId]
225
+ return value !== undefined ? value : null
226
+ }
227
+
228
+ /**
229
+ * Return quota percent for a table row with model-first, provider fallback.
230
+ * Both model and provider snapshots are checked for freshness independently.
231
+ * Returns null when both are absent or stale.
232
+ *
233
+ * @param {string} providerKey
234
+ * @param {string} modelId
235
+ * @param {string} [statsFile]
236
+ * @returns {number | null}
237
+ */
238
+ export function usageForRow(providerKey, modelId, statsFile = DEFAULT_STATS_FILE) {
239
+ if (!supportsUsagePercent(providerKey)) return null
240
+ const { byProviderModel, byProvider } = loadUsageSnapshot(statsFile)
241
+ const providerModelKey = buildUsageSnapshotKey(providerKey, modelId)
242
+ if (byProviderModel[providerModelKey] !== undefined) return byProviderModel[providerModelKey]
243
+ if (byProvider[providerKey] !== undefined) return byProvider[providerKey]
244
+ return null
245
+ }
@@ -310,6 +310,12 @@ export const sortResults = (results, sortColumn, sortDirection) => {
310
310
  // 📖 Models with no data (-1) sort to the bottom
311
311
  cmp = getStabilityScore(a) - getStabilityScore(b)
312
312
  break
313
+ case 'usage':
314
+ // 📖 Sort by quota usage percent (usagePercent numeric field, 0–100)
315
+ // 📖 Models with no usage data (undefined/null) are treated as 0 — stable tie-break
316
+ // 📖 via JS stable sort preserving original order when values are equal
317
+ cmp = (a.usagePercent ?? 0) - (b.usagePercent ?? 0)
318
+ break
313
319
  }
314
320
 
315
321
  // 📖 Flip comparison for descending order
@@ -598,3 +604,52 @@ export function getTopRecommendations(results, taskType, priority, contextBudget
598
604
 
599
605
  return scored.slice(0, topN)
600
606
  }
607
+
608
+ /**
609
+ * 📖 getProxyStatusInfo: Pure function that maps startup proxy status + active proxy state
610
+ * 📖 to a normalised descriptor object consumed by the TUI footer indicator.
611
+ *
612
+ * 📖 Priority of evaluation:
613
+ * 1. proxyStartupStatus.phase === 'starting' → state:'starting'
614
+ * 2. proxyStartupStatus.phase === 'running' → state:'running' with port/accountCount
615
+ * 3. proxyStartupStatus.phase === 'failed' → state:'failed' with truncated reason
616
+ * 4. isProxyActive (legacy activeProxy flag) → state:'running' (no port detail)
617
+ * 5. otherwise → state:'stopped'
618
+ *
619
+ * 📖 Reason is clamped to 80 characters to keep footer readable (no stack traces).
620
+ *
621
+ * @param {object|null} proxyStartupStatus — state.proxyStartupStatus value
622
+ * @param {boolean} isProxyActive — truthy when the module-level activeProxy is non-null
623
+ * @returns {{ state: string, port?: number, accountCount?: number, reason?: string }}
624
+ */
625
+ export function getProxyStatusInfo(proxyStartupStatus, isProxyActive) {
626
+ const MAX_REASON = 80
627
+
628
+ if (proxyStartupStatus) {
629
+ const { phase } = proxyStartupStatus
630
+ if (phase === 'starting') {
631
+ return { state: 'starting' }
632
+ }
633
+ if (phase === 'running') {
634
+ return {
635
+ state: 'running',
636
+ port: proxyStartupStatus.port,
637
+ accountCount: proxyStartupStatus.accountCount,
638
+ }
639
+ }
640
+ if (phase === 'failed') {
641
+ const raw = proxyStartupStatus.reason ?? 'unknown error'
642
+ return {
643
+ state: 'failed',
644
+ reason: raw.length > MAX_REASON ? raw.slice(0, MAX_REASON - 1) + '…' : raw,
645
+ }
646
+ }
647
+ }
648
+
649
+ // 📖 Legacy fallback: activeProxy set directly (e.g. from manual proxy start without startup status)
650
+ if (isProxyActive) {
651
+ return { state: 'running' }
652
+ }
653
+
654
+ return { state: 'stopped' }
655
+ }