@leo000001/opencode-quota-sidebar 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +70 -0
- package/CONTRIBUTING.md +102 -0
- package/LICENSE +21 -0
- package/README.md +216 -0
- package/SECURITY.md +26 -0
- package/dist/cache.d.ts +6 -0
- package/dist/cache.js +22 -0
- package/dist/cost.d.ts +13 -0
- package/dist/cost.js +76 -0
- package/dist/format.d.ts +21 -0
- package/dist/format.js +426 -0
- package/dist/helpers.d.ts +14 -0
- package/dist/helpers.js +50 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +699 -0
- package/dist/period.d.ts +1 -0
- package/dist/period.js +14 -0
- package/dist/providers/common.d.ts +24 -0
- package/dist/providers/common.js +114 -0
- package/dist/providers/core/anthropic.d.ts +2 -0
- package/dist/providers/core/anthropic.js +46 -0
- package/dist/providers/core/copilot.d.ts +2 -0
- package/dist/providers/core/copilot.js +117 -0
- package/dist/providers/core/openai.d.ts +2 -0
- package/dist/providers/core/openai.js +159 -0
- package/dist/providers/index.d.ts +8 -0
- package/dist/providers/index.js +14 -0
- package/dist/providers/registry.d.ts +9 -0
- package/dist/providers/registry.js +38 -0
- package/dist/providers/third_party/rightcode.d.ts +2 -0
- package/dist/providers/third_party/rightcode.js +230 -0
- package/dist/providers/types.d.ts +58 -0
- package/dist/providers/types.js +1 -0
- package/dist/quota.d.ts +49 -0
- package/dist/quota.js +116 -0
- package/dist/quota_render.d.ts +5 -0
- package/dist/quota_render.js +85 -0
- package/dist/storage.d.ts +32 -0
- package/dist/storage.js +328 -0
- package/dist/storage_chunks.d.ts +9 -0
- package/dist/storage_chunks.js +147 -0
- package/dist/storage_dates.d.ts +9 -0
- package/dist/storage_dates.js +88 -0
- package/dist/storage_parse.d.ts +4 -0
- package/dist/storage_parse.js +149 -0
- package/dist/storage_paths.d.ts +14 -0
- package/dist/storage_paths.js +31 -0
- package/dist/title.d.ts +8 -0
- package/dist/title.js +38 -0
- package/dist/types.d.ts +116 -0
- package/dist/types.js +1 -0
- package/dist/usage.d.ts +51 -0
- package/dist/usage.js +243 -0
- package/package.json +68 -0
- package/quota-sidebar.config.example.json +25 -0
package/dist/storage.js
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { asBoolean, asNumber, debug, isRecord, swallow } from './helpers.js';
|
|
4
|
+
import { discoverChunks, readDayChunk, safeWriteFile, writeDayChunk, } from './storage_chunks.js';
|
|
5
|
+
import { dateKeyFromTimestamp, dateKeysInRange, dateStartFromKey, isDateKey, normalizeTimestampMs, } from './storage_dates.js';
|
|
6
|
+
import { parseQuotaCache, parseSessionTitleForMigration, } from './storage_parse.js';
|
|
7
|
+
import { authFilePath, chunkRootPathFromStateFile, resolveOpencodeDataDir, stateFilePath, } from './storage_paths.js';
|
|
8
|
+
export { authFilePath, dateKeyFromTimestamp, normalizeTimestampMs, resolveOpencodeDataDir, stateFilePath, };
|
|
9
|
+
// ─── Default config ──────────────────────────────────────────────────────────
|
|
10
|
+
export const defaultConfig = {
|
|
11
|
+
sidebar: {
|
|
12
|
+
enabled: true,
|
|
13
|
+
width: 36,
|
|
14
|
+
showCost: true,
|
|
15
|
+
showQuota: true,
|
|
16
|
+
},
|
|
17
|
+
quota: {
|
|
18
|
+
refreshMs: 5 * 60 * 1000,
|
|
19
|
+
includeOpenAI: true,
|
|
20
|
+
includeCopilot: true,
|
|
21
|
+
includeAnthropic: true,
|
|
22
|
+
providers: {},
|
|
23
|
+
refreshAccessToken: false,
|
|
24
|
+
requestTimeoutMs: 8_000,
|
|
25
|
+
},
|
|
26
|
+
toast: {
|
|
27
|
+
durationMs: 12_000,
|
|
28
|
+
},
|
|
29
|
+
retentionDays: 730,
|
|
30
|
+
};
|
|
31
|
+
export function defaultState() {
|
|
32
|
+
return {
|
|
33
|
+
version: 2,
|
|
34
|
+
titleEnabled: true,
|
|
35
|
+
sessionDateMap: {},
|
|
36
|
+
sessions: {},
|
|
37
|
+
quotaCache: {},
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
// ─── Config loading ──────────────────────────────────────────────────────────
|
|
41
|
+
export async function loadConfig(paths) {
|
|
42
|
+
const existing = await Promise.all(paths.map(async (filePath) => {
|
|
43
|
+
const stat = await fs.stat(filePath).catch(swallow('loadConfig:stat'));
|
|
44
|
+
if (!stat || !stat.isFile())
|
|
45
|
+
return undefined;
|
|
46
|
+
return filePath;
|
|
47
|
+
}));
|
|
48
|
+
const selected = existing.find((value) => value);
|
|
49
|
+
if (!selected)
|
|
50
|
+
return defaultConfig;
|
|
51
|
+
const parsed = await fs
|
|
52
|
+
.readFile(selected, 'utf8')
|
|
53
|
+
.then((value) => JSON.parse(value))
|
|
54
|
+
.catch(swallow('loadConfig:read'));
|
|
55
|
+
if (!isRecord(parsed))
|
|
56
|
+
return defaultConfig;
|
|
57
|
+
const sidebar = isRecord(parsed.sidebar) ? parsed.sidebar : {};
|
|
58
|
+
const quota = isRecord(parsed.quota) ? parsed.quota : {};
|
|
59
|
+
const toast = isRecord(parsed.toast) ? parsed.toast : {};
|
|
60
|
+
const providers = isRecord(quota.providers) ? quota.providers : {};
|
|
61
|
+
return {
|
|
62
|
+
sidebar: {
|
|
63
|
+
enabled: asBoolean(sidebar.enabled, defaultConfig.sidebar.enabled),
|
|
64
|
+
width: Math.max(20, Math.min(60, asNumber(sidebar.width, defaultConfig.sidebar.width))),
|
|
65
|
+
showCost: asBoolean(sidebar.showCost, defaultConfig.sidebar.showCost),
|
|
66
|
+
showQuota: asBoolean(sidebar.showQuota, defaultConfig.sidebar.showQuota),
|
|
67
|
+
},
|
|
68
|
+
quota: {
|
|
69
|
+
refreshMs: Math.max(30_000, asNumber(quota.refreshMs, defaultConfig.quota.refreshMs)),
|
|
70
|
+
includeOpenAI: asBoolean(quota.includeOpenAI, defaultConfig.quota.includeOpenAI),
|
|
71
|
+
includeCopilot: asBoolean(quota.includeCopilot, defaultConfig.quota.includeCopilot),
|
|
72
|
+
includeAnthropic: asBoolean(quota.includeAnthropic, defaultConfig.quota.includeAnthropic),
|
|
73
|
+
providers: Object.entries(providers).reduce((acc, [id, value]) => {
|
|
74
|
+
if (!isRecord(value))
|
|
75
|
+
return acc;
|
|
76
|
+
if (typeof value.enabled === 'boolean') {
|
|
77
|
+
acc[id] = { enabled: value.enabled };
|
|
78
|
+
}
|
|
79
|
+
return acc;
|
|
80
|
+
}, {}),
|
|
81
|
+
refreshAccessToken: asBoolean(quota.refreshAccessToken, defaultConfig.quota.refreshAccessToken),
|
|
82
|
+
requestTimeoutMs: Math.max(1000, asNumber(quota.requestTimeoutMs, defaultConfig.quota.requestTimeoutMs)),
|
|
83
|
+
},
|
|
84
|
+
toast: {
|
|
85
|
+
durationMs: Math.max(1000, asNumber(toast.durationMs, defaultConfig.toast.durationMs)),
|
|
86
|
+
},
|
|
87
|
+
retentionDays: Math.max(1, asNumber(parsed.retentionDays, defaultConfig.retentionDays)),
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
// ─── State loading ───────────────────────────────────────────────────────────
|
|
91
|
+
/** P2: Lazy chunk loading — only load chunks for sessions in sessionDateMap. */
|
|
92
|
+
async function loadVersion2State(raw, statePath) {
|
|
93
|
+
const titleEnabled = asBoolean(raw.titleEnabled, true);
|
|
94
|
+
const quotaCache = parseQuotaCache(raw.quotaCache);
|
|
95
|
+
const rootPath = chunkRootPathFromStateFile(statePath);
|
|
96
|
+
const sessionDateMapRaw = isRecord(raw.sessionDateMap)
|
|
97
|
+
? raw.sessionDateMap
|
|
98
|
+
: {};
|
|
99
|
+
const sessionDateMap = Object.entries(sessionDateMapRaw).reduce((acc, [sessionID, value]) => {
|
|
100
|
+
if (typeof value !== 'string')
|
|
101
|
+
return acc;
|
|
102
|
+
if (!isDateKey(value))
|
|
103
|
+
return acc;
|
|
104
|
+
acc[sessionID] = value;
|
|
105
|
+
return acc;
|
|
106
|
+
}, {});
|
|
107
|
+
const explicitDateKeys = Array.from(new Set(Object.values(sessionDateMap)));
|
|
108
|
+
const discoveredDateKeys = explicitDateKeys.length
|
|
109
|
+
? []
|
|
110
|
+
: await discoverChunks(rootPath);
|
|
111
|
+
const dateKeys = explicitDateKeys.length
|
|
112
|
+
? explicitDateKeys
|
|
113
|
+
: discoveredDateKeys;
|
|
114
|
+
const chunks = await Promise.all(dateKeys.map(async (dateKey) => {
|
|
115
|
+
const sessions = await readDayChunk(rootPath, dateKey);
|
|
116
|
+
return [dateKey, sessions];
|
|
117
|
+
}));
|
|
118
|
+
const sessions = {};
|
|
119
|
+
for (const [dateKey, chunkSessions] of chunks) {
|
|
120
|
+
for (const [sessionID, session] of Object.entries(chunkSessions)) {
|
|
121
|
+
sessions[sessionID] = session;
|
|
122
|
+
if (!sessionDateMap[sessionID])
|
|
123
|
+
sessionDateMap[sessionID] = dateKey;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return {
|
|
127
|
+
version: 2,
|
|
128
|
+
titleEnabled,
|
|
129
|
+
sessionDateMap,
|
|
130
|
+
sessions,
|
|
131
|
+
quotaCache,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* M3 fix: use session.createdAt from v1 state if available,
|
|
136
|
+
* otherwise fall back to Date.now() (unavoidable for truly missing data).
|
|
137
|
+
*/
|
|
138
|
+
function migrateVersion1State(raw) {
|
|
139
|
+
const titleEnabled = asBoolean(raw.titleEnabled, true);
|
|
140
|
+
const quotaCache = parseQuotaCache(raw.quotaCache);
|
|
141
|
+
const sessionsRaw = isRecord(raw.sessions) ? raw.sessions : {};
|
|
142
|
+
const sessions = {};
|
|
143
|
+
const sessionDateMap = {};
|
|
144
|
+
for (const [sessionID, value] of Object.entries(sessionsRaw)) {
|
|
145
|
+
const title = parseSessionTitleForMigration(value);
|
|
146
|
+
if (!title)
|
|
147
|
+
continue;
|
|
148
|
+
// M3: try to recover createdAt from v1 data
|
|
149
|
+
const rawCreatedAt = isRecord(value) ? asNumber(value.createdAt) : undefined;
|
|
150
|
+
const createdAt = rawCreatedAt
|
|
151
|
+
? normalizeTimestampMs(rawCreatedAt)
|
|
152
|
+
: Date.now();
|
|
153
|
+
const dateKey = dateKeyFromTimestamp(createdAt);
|
|
154
|
+
sessions[sessionID] = {
|
|
155
|
+
...title,
|
|
156
|
+
createdAt,
|
|
157
|
+
};
|
|
158
|
+
sessionDateMap[sessionID] = dateKey;
|
|
159
|
+
}
|
|
160
|
+
return {
|
|
161
|
+
version: 2,
|
|
162
|
+
titleEnabled,
|
|
163
|
+
sessionDateMap,
|
|
164
|
+
sessions,
|
|
165
|
+
quotaCache,
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
export async function loadState(statePath) {
|
|
169
|
+
const raw = await fs
|
|
170
|
+
.readFile(statePath, 'utf8')
|
|
171
|
+
.then((value) => JSON.parse(value))
|
|
172
|
+
.catch(swallow('loadState'));
|
|
173
|
+
if (!isRecord(raw))
|
|
174
|
+
return defaultState();
|
|
175
|
+
if (raw.version === 2)
|
|
176
|
+
return loadVersion2State(raw, statePath);
|
|
177
|
+
if (raw.version === 1) {
|
|
178
|
+
const migrated = migrateVersion1State(raw);
|
|
179
|
+
// Persist immediately so chunk files exist for range scans.
|
|
180
|
+
await saveState(statePath, migrated, { writeAll: true }).catch(swallow('loadState:migrate'));
|
|
181
|
+
return migrated;
|
|
182
|
+
}
|
|
183
|
+
return defaultState();
|
|
184
|
+
}
|
|
185
|
+
// ─── State saving ────────────────────────────────────────────────────────────
|
|
186
|
+
const MAX_QUOTA_CACHE_AGE_MS = 24 * 60 * 60 * 1000;
|
|
187
|
+
function pruneState(state) {
|
|
188
|
+
const now = Date.now();
|
|
189
|
+
for (const [key, value] of Object.entries(state.quotaCache)) {
|
|
190
|
+
if (now - value.checkedAt > MAX_QUOTA_CACHE_AGE_MS) {
|
|
191
|
+
delete state.quotaCache[key];
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* H1 fix: when dirtyDateKeys is empty and writeAll is not set, skip chunk writes entirely.
|
|
197
|
+
* M11 fix: only iterate sessions belonging to dirty date keys (not all sessions).
|
|
198
|
+
* M4 fix: atomic writes via safeWriteFile.
|
|
199
|
+
* P4 fix: sessionDateMap dirty flag tracked externally.
|
|
200
|
+
*/
|
|
201
|
+
export async function saveState(statePath, state, options) {
|
|
202
|
+
pruneState(state);
|
|
203
|
+
const rootPath = chunkRootPathFromStateFile(statePath);
|
|
204
|
+
const writeAll = options?.writeAll === true;
|
|
205
|
+
// H1 fix: if no dirty keys and not writeAll, only write the state file (no chunks)
|
|
206
|
+
const dirtySet = writeAll
|
|
207
|
+
? undefined
|
|
208
|
+
: new Set((options?.dirtyDateKeys ?? []).filter((key) => isDateKey(key)));
|
|
209
|
+
const skipChunks = !writeAll && (!dirtySet || dirtySet.size === 0);
|
|
210
|
+
// M11 fix: only build sessionsByDate for dirty keys (or all if writeAll)
|
|
211
|
+
const sessionsByDate = {};
|
|
212
|
+
if (!skipChunks) {
|
|
213
|
+
for (const [sessionID, session] of Object.entries(state.sessions)) {
|
|
214
|
+
const normalizedCreatedAt = Number.isFinite(session.createdAt) && session.createdAt > 0
|
|
215
|
+
? session.createdAt
|
|
216
|
+
: Date.now();
|
|
217
|
+
session.createdAt = normalizedCreatedAt;
|
|
218
|
+
const dateKey = state.sessionDateMap[sessionID] ||
|
|
219
|
+
dateKeyFromTimestamp(normalizedCreatedAt);
|
|
220
|
+
state.sessionDateMap[sessionID] = dateKey;
|
|
221
|
+
// M11: skip sessions not in dirty set
|
|
222
|
+
if (!writeAll && dirtySet && !dirtySet.has(dateKey))
|
|
223
|
+
continue;
|
|
224
|
+
const dateBucket = sessionsByDate[dateKey] || {};
|
|
225
|
+
dateBucket[sessionID] = session;
|
|
226
|
+
sessionsByDate[dateKey] = dateBucket;
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
await fs.mkdir(path.dirname(statePath), { recursive: true });
|
|
230
|
+
await fs.mkdir(rootPath, { recursive: true });
|
|
231
|
+
// M4: atomic state file write
|
|
232
|
+
await safeWriteFile(statePath, `${JSON.stringify({
|
|
233
|
+
version: 2,
|
|
234
|
+
titleEnabled: state.titleEnabled,
|
|
235
|
+
sessionDateMap: state.sessionDateMap,
|
|
236
|
+
quotaCache: state.quotaCache,
|
|
237
|
+
}, null, 2)}\n`);
|
|
238
|
+
if (skipChunks)
|
|
239
|
+
return;
|
|
240
|
+
const keysToWrite = writeAll
|
|
241
|
+
? Object.keys(sessionsByDate)
|
|
242
|
+
: Array.from(dirtySet ?? []);
|
|
243
|
+
await Promise.all(keysToWrite
|
|
244
|
+
.map((dateKey) => {
|
|
245
|
+
const sessions = sessionsByDate[dateKey];
|
|
246
|
+
if (!sessions)
|
|
247
|
+
return undefined;
|
|
248
|
+
return writeDayChunk(rootPath, dateKey, sessions);
|
|
249
|
+
})
|
|
250
|
+
.filter((promise) => Boolean(promise)));
|
|
251
|
+
}
|
|
252
|
+
// ─── Eviction (M2) ──────────────────────────────────────────────────────────
|
|
253
|
+
/**
|
|
254
|
+
* M2 fix: evict sessions older than retentionDays from memory.
|
|
255
|
+
* Chunk files remain on disk for historical range scans.
|
|
256
|
+
*/
|
|
257
|
+
export function evictOldSessions(state, retentionDays) {
|
|
258
|
+
const cutoff = Date.now() - retentionDays * 24 * 60 * 60 * 1000;
|
|
259
|
+
let evicted = 0;
|
|
260
|
+
for (const [sessionID, session] of Object.entries(state.sessions)) {
|
|
261
|
+
if (session.createdAt < cutoff) {
|
|
262
|
+
delete state.sessions[sessionID];
|
|
263
|
+
delete state.sessionDateMap[sessionID];
|
|
264
|
+
evicted++;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
if (evicted > 0) {
|
|
268
|
+
debug(`evicted ${evicted} sessions older than ${retentionDays} days`);
|
|
269
|
+
}
|
|
270
|
+
return evicted;
|
|
271
|
+
}
|
|
272
|
+
// ─── Range scan (M9 fix: prefer memory, fall back to disk) ──────────────────
|
|
273
|
+
/**
|
|
274
|
+
* M9 fix: scan from in-memory state first, only read disk for date keys
|
|
275
|
+
* not represented in memory.
|
|
276
|
+
*/
|
|
277
|
+
export async function scanSessionsByCreatedRange(statePath, startAt, endAt = Date.now(), memoryState) {
|
|
278
|
+
const rootPath = chunkRootPathFromStateFile(statePath);
|
|
279
|
+
const dateKeys = dateKeysInRange(startAt, endAt);
|
|
280
|
+
if (!dateKeys.length) {
|
|
281
|
+
return [];
|
|
282
|
+
}
|
|
283
|
+
const results = [];
|
|
284
|
+
const seenSessionIDs = new Set();
|
|
285
|
+
// First pass: collect from memory
|
|
286
|
+
if (memoryState) {
|
|
287
|
+
const dateKeySet = new Set(dateKeys);
|
|
288
|
+
for (const [sessionID, session] of Object.entries(memoryState.sessions)) {
|
|
289
|
+
const dk = memoryState.sessionDateMap[sessionID];
|
|
290
|
+
if (!dk || !dateKeySet.has(dk))
|
|
291
|
+
continue;
|
|
292
|
+
const createdAt = Number.isFinite(session.createdAt) && session.createdAt > 0
|
|
293
|
+
? session.createdAt
|
|
294
|
+
: dateStartFromKey(dk);
|
|
295
|
+
if (createdAt >= startAt && createdAt <= endAt) {
|
|
296
|
+
results.push({ sessionID, dateKey: dk, state: session });
|
|
297
|
+
seenSessionIDs.add(sessionID);
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
// Second pass: read disk chunks for date keys that may have sessions not in memory
|
|
302
|
+
const memoryDateKeys = memoryState
|
|
303
|
+
? new Set(Object.values(memoryState.sessionDateMap))
|
|
304
|
+
: new Set();
|
|
305
|
+
const diskDateKeys = dateKeys.filter((dk) => !memoryDateKeys.has(dk));
|
|
306
|
+
if (diskDateKeys.length > 0) {
|
|
307
|
+
const chunkEntries = await Promise.all(diskDateKeys.map(async (dateKey) => {
|
|
308
|
+
const sessions = await readDayChunk(rootPath, dateKey);
|
|
309
|
+
return Object.entries(sessions).map(([sessionID, state]) => ({
|
|
310
|
+
sessionID,
|
|
311
|
+
dateKey,
|
|
312
|
+
state,
|
|
313
|
+
}));
|
|
314
|
+
}));
|
|
315
|
+
for (const entry of chunkEntries.flat()) {
|
|
316
|
+
if (seenSessionIDs.has(entry.sessionID))
|
|
317
|
+
continue;
|
|
318
|
+
const createdAt = Number.isFinite(entry.state.createdAt) && entry.state.createdAt > 0
|
|
319
|
+
? entry.state.createdAt
|
|
320
|
+
: dateStartFromKey(entry.dateKey);
|
|
321
|
+
if (createdAt >= startAt && createdAt <= endAt) {
|
|
322
|
+
results.push(entry);
|
|
323
|
+
seenSessionIDs.add(entry.sessionID);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return results;
|
|
328
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { SessionState } from './types.js';
|
|
2
|
+
export declare function readDayChunk(rootPath: string, dateKey: string): Promise<Record<string, SessionState>>;
|
|
3
|
+
/**
|
|
4
|
+
* S3 fix: check for symlink before writing.
|
|
5
|
+
* M4 fix: write to temp file then rename for atomicity.
|
|
6
|
+
*/
|
|
7
|
+
export declare function safeWriteFile(filePath: string, content: string): Promise<void>;
|
|
8
|
+
export declare function writeDayChunk(rootPath: string, dateKey: string, sessions: Record<string, SessionState>): Promise<void>;
|
|
9
|
+
export declare function discoverChunks(rootPath: string): Promise<string[]>;
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { randomBytes } from 'node:crypto';
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { debug, isRecord, swallow } from './helpers.js';
|
|
5
|
+
import { isDateKey } from './storage_dates.js';
|
|
6
|
+
import { parseSessionState } from './storage_parse.js';
|
|
7
|
+
import { chunkFilePath } from './storage_paths.js';
|
|
8
|
+
/** P2: Simple LRU cache for loaded chunks. */
|
|
9
|
+
class ChunkCache {
|
|
10
|
+
cache = new Map();
|
|
11
|
+
maxSize;
|
|
12
|
+
constructor(maxSize = 64) {
|
|
13
|
+
this.maxSize = maxSize;
|
|
14
|
+
}
|
|
15
|
+
get(dateKey) {
|
|
16
|
+
const entry = this.cache.get(dateKey);
|
|
17
|
+
if (!entry)
|
|
18
|
+
return undefined;
|
|
19
|
+
entry.accessedAt = Date.now();
|
|
20
|
+
return entry.sessions;
|
|
21
|
+
}
|
|
22
|
+
set(dateKey, sessions) {
|
|
23
|
+
if (this.cache.size >= this.maxSize) {
|
|
24
|
+
// Evict least recently accessed
|
|
25
|
+
let oldestKey;
|
|
26
|
+
let oldestTime = Infinity;
|
|
27
|
+
for (const [key, entry] of this.cache) {
|
|
28
|
+
if (entry.accessedAt < oldestTime) {
|
|
29
|
+
oldestTime = entry.accessedAt;
|
|
30
|
+
oldestKey = key;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
if (oldestKey)
|
|
34
|
+
this.cache.delete(oldestKey);
|
|
35
|
+
}
|
|
36
|
+
this.cache.set(dateKey, { sessions, accessedAt: Date.now() });
|
|
37
|
+
}
|
|
38
|
+
invalidate(dateKey) {
|
|
39
|
+
this.cache.delete(dateKey);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
const chunkCache = new ChunkCache();
|
|
43
|
+
export async function readDayChunk(rootPath, dateKey) {
|
|
44
|
+
const cached = chunkCache.get(dateKey);
|
|
45
|
+
if (cached)
|
|
46
|
+
return cached;
|
|
47
|
+
const filePath = chunkFilePath(rootPath, dateKey);
|
|
48
|
+
const parsed = await fs
|
|
49
|
+
.readFile(filePath, 'utf8')
|
|
50
|
+
.then((value) => JSON.parse(value))
|
|
51
|
+
.catch(swallow('readDayChunk'));
|
|
52
|
+
if (!isRecord(parsed))
|
|
53
|
+
return {};
|
|
54
|
+
if (parsed.version !== 1)
|
|
55
|
+
return {};
|
|
56
|
+
const sessionsRaw = isRecord(parsed.sessions) ? parsed.sessions : {};
|
|
57
|
+
const sessions = Object.entries(sessionsRaw).reduce((acc, [sessionID, value]) => {
|
|
58
|
+
const parsedSession = parseSessionState(value);
|
|
59
|
+
if (!parsedSession)
|
|
60
|
+
return acc;
|
|
61
|
+
acc[sessionID] = parsedSession;
|
|
62
|
+
return acc;
|
|
63
|
+
}, {});
|
|
64
|
+
chunkCache.set(dateKey, sessions);
|
|
65
|
+
return sessions;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* S3 fix: check for symlink before writing.
|
|
69
|
+
* M4 fix: write to temp file then rename for atomicity.
|
|
70
|
+
*/
|
|
71
|
+
export async function safeWriteFile(filePath, content) {
|
|
72
|
+
// S3: refuse to write through symlinks
|
|
73
|
+
const stat = await fs.lstat(filePath).catch(() => undefined);
|
|
74
|
+
if (stat?.isSymbolicLink()) {
|
|
75
|
+
const message = `refusing to write through symlink: ${filePath}`;
|
|
76
|
+
debug(message);
|
|
77
|
+
throw new Error(message);
|
|
78
|
+
}
|
|
79
|
+
// M4: atomic write via temp + rename
|
|
80
|
+
const dir = path.dirname(filePath);
|
|
81
|
+
const name = path.basename(filePath);
|
|
82
|
+
const maxAttempts = 5;
|
|
83
|
+
let lastError;
|
|
84
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
85
|
+
const suffix = randomBytes(4).toString('hex');
|
|
86
|
+
const tmpPath = path.join(dir, `${name}.tmp.${process.pid}.${suffix}`);
|
|
87
|
+
try {
|
|
88
|
+
await fs.writeFile(tmpPath, content, { encoding: 'utf8', flag: 'wx' });
|
|
89
|
+
}
|
|
90
|
+
catch (error) {
|
|
91
|
+
const code = error.code;
|
|
92
|
+
if (code === 'EEXIST') {
|
|
93
|
+
lastError = error;
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
throw error;
|
|
97
|
+
}
|
|
98
|
+
try {
|
|
99
|
+
await fs.rename(tmpPath, filePath);
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
catch (error) {
|
|
103
|
+
await fs.rm(tmpPath, { force: true }).catch(() => undefined);
|
|
104
|
+
throw error;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
throw lastError instanceof Error
|
|
108
|
+
? lastError
|
|
109
|
+
: new Error(`safeWriteFile failed for ${filePath}`);
|
|
110
|
+
}
|
|
111
|
+
export async function writeDayChunk(rootPath, dateKey, sessions) {
|
|
112
|
+
const filePath = chunkFilePath(rootPath, dateKey);
|
|
113
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
114
|
+
const chunk = {
|
|
115
|
+
version: 1,
|
|
116
|
+
dateKey,
|
|
117
|
+
sessions,
|
|
118
|
+
};
|
|
119
|
+
await safeWriteFile(filePath, `${JSON.stringify(chunk, null, 2)}\n`);
|
|
120
|
+
chunkCache.invalidate(dateKey);
|
|
121
|
+
}
|
|
122
|
+
export async function discoverChunks(rootPath) {
|
|
123
|
+
const years = await fs.readdir(rootPath).catch(() => []);
|
|
124
|
+
const dateKeys = [];
|
|
125
|
+
for (const year of years) {
|
|
126
|
+
if (!/^\d{4}$/.test(year))
|
|
127
|
+
continue;
|
|
128
|
+
const yearPath = path.join(rootPath, year);
|
|
129
|
+
const months = await fs.readdir(yearPath).catch(() => []);
|
|
130
|
+
for (const month of months) {
|
|
131
|
+
if (!/^\d{2}$/.test(month))
|
|
132
|
+
continue;
|
|
133
|
+
const monthPath = path.join(yearPath, month);
|
|
134
|
+
const days = await fs.readdir(monthPath).catch(() => []);
|
|
135
|
+
for (const dayFile of days) {
|
|
136
|
+
const match = dayFile.match(/^(\d{2})\.json$/);
|
|
137
|
+
if (!match)
|
|
138
|
+
continue;
|
|
139
|
+
const day = match[1];
|
|
140
|
+
const key = `${year}-${month}-${day}`;
|
|
141
|
+
if (isDateKey(key))
|
|
142
|
+
dateKeys.push(key);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return Array.from(new Set(dateKeys)).sort();
|
|
147
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export declare function normalizeTimestampMs(value: unknown, fallback?: number): number;
|
|
2
|
+
export declare function isDateKey(value: string): boolean;
|
|
3
|
+
/**
|
|
4
|
+
* Convert a timestamp (already in ms) to a date key string.
|
|
5
|
+
* M12 fix: no double normalization — caller must pass ms.
|
|
6
|
+
*/
|
|
7
|
+
export declare function dateKeyFromTimestamp(timestampMs: number): string;
|
|
8
|
+
export declare function dateStartFromKey(dateKey: string): number;
|
|
9
|
+
export declare function dateKeysInRange(startAt: number, endAt: number): string[];
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { asNumber } from './helpers.js';
|
|
2
|
+
export function normalizeTimestampMs(value, fallback = Date.now()) {
|
|
3
|
+
const num = asNumber(value);
|
|
4
|
+
if (num === undefined)
|
|
5
|
+
return fallback;
|
|
6
|
+
// Seconds -> ms heuristic
|
|
7
|
+
if (num > 0 && num < 1_000_000_000_000)
|
|
8
|
+
return num * 1000;
|
|
9
|
+
if (num > 0)
|
|
10
|
+
return num;
|
|
11
|
+
return fallback;
|
|
12
|
+
}
|
|
13
|
+
function pad2(value) {
|
|
14
|
+
return `${value}`.padStart(2, '0');
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Extract date parts from a timestamp.
|
|
18
|
+
* M12 fix: accepts already-normalized ms timestamp — no double normalization.
|
|
19
|
+
*/
|
|
20
|
+
function datePartsFromMs(timestampMs) {
|
|
21
|
+
const date = new Date(timestampMs);
|
|
22
|
+
if (Number.isNaN(date.getTime())) {
|
|
23
|
+
const now = new Date();
|
|
24
|
+
return {
|
|
25
|
+
year: `${now.getFullYear()}`,
|
|
26
|
+
month: pad2(now.getMonth() + 1),
|
|
27
|
+
day: pad2(now.getDate()),
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
return {
|
|
31
|
+
year: `${date.getFullYear()}`,
|
|
32
|
+
month: pad2(date.getMonth() + 1),
|
|
33
|
+
day: pad2(date.getDate()),
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
export function isDateKey(value) {
|
|
37
|
+
if (!/^\d{4}-\d{2}-\d{2}$/.test(value))
|
|
38
|
+
return false;
|
|
39
|
+
const [yearText, monthText, dayText] = value.split('-');
|
|
40
|
+
const year = Number(yearText);
|
|
41
|
+
const month = Number(monthText);
|
|
42
|
+
const day = Number(dayText);
|
|
43
|
+
if (!Number.isInteger(year))
|
|
44
|
+
return false;
|
|
45
|
+
if (!Number.isInteger(month) || month < 1 || month > 12)
|
|
46
|
+
return false;
|
|
47
|
+
if (!Number.isInteger(day) || day < 1 || day > 31)
|
|
48
|
+
return false;
|
|
49
|
+
const probe = new Date(year, month - 1, day);
|
|
50
|
+
return (probe.getFullYear() === year &&
|
|
51
|
+
probe.getMonth() === month - 1 &&
|
|
52
|
+
probe.getDate() === day);
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Convert a timestamp (already in ms) to a date key string.
|
|
56
|
+
* M12 fix: no double normalization — caller must pass ms.
|
|
57
|
+
*/
|
|
58
|
+
export function dateKeyFromTimestamp(timestampMs) {
|
|
59
|
+
const { year, month, day } = datePartsFromMs(timestampMs);
|
|
60
|
+
return `${year}-${month}-${day}`;
|
|
61
|
+
}
|
|
62
|
+
export function dateStartFromKey(dateKey) {
|
|
63
|
+
if (!isDateKey(dateKey))
|
|
64
|
+
return 0;
|
|
65
|
+
const [yearText, monthText, dayText] = dateKey.split('-');
|
|
66
|
+
return new Date(Number(yearText), Number(monthText) - 1, Number(dayText)).getTime();
|
|
67
|
+
}
|
|
68
|
+
/** M7 fix: cap iteration at 400 days (~13 months). */
|
|
69
|
+
const MAX_DATE_RANGE_DAYS = 400;
|
|
70
|
+
export function dateKeysInRange(startAt, endAt) {
|
|
71
|
+
const startDate = new Date(startAt);
|
|
72
|
+
if (Number.isNaN(startDate.getTime()))
|
|
73
|
+
return [];
|
|
74
|
+
const endDate = new Date(endAt);
|
|
75
|
+
if (Number.isNaN(endDate.getTime()))
|
|
76
|
+
return [];
|
|
77
|
+
const cursor = new Date(startDate.getFullYear(), startDate.getMonth(), startDate.getDate());
|
|
78
|
+
const endDay = new Date(endDate.getFullYear(), endDate.getMonth(), endDate.getDate());
|
|
79
|
+
const keys = [];
|
|
80
|
+
let iterations = 0;
|
|
81
|
+
while (cursor.getTime() <= endDay.getTime() &&
|
|
82
|
+
iterations < MAX_DATE_RANGE_DAYS) {
|
|
83
|
+
keys.push(dateKeyFromTimestamp(cursor.getTime()));
|
|
84
|
+
cursor.setDate(cursor.getDate() + 1);
|
|
85
|
+
iterations++;
|
|
86
|
+
}
|
|
87
|
+
return keys;
|
|
88
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { SessionState, SessionTitleState } from './types.js';
|
|
2
|
+
export declare function parseSessionState(value: unknown): SessionState | undefined;
|
|
3
|
+
export declare function parseSessionTitleForMigration(value: unknown): SessionTitleState | undefined;
|
|
4
|
+
export declare function parseQuotaCache(value: unknown): Record<string, import("./types.js").QuotaSnapshot>;
|