@chrysb/alphaclaw 0.5.1-beta.0 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/alphaclaw.js +3 -1
- package/lib/public/css/theme.css +57 -0
- package/lib/public/js/app.js +63 -26
- package/lib/public/js/components/models-tab/index.js +203 -48
- package/lib/public/js/components/models-tab/provider-auth-card.js +20 -2
- package/lib/public/js/components/models.js +8 -9
- package/lib/public/js/components/onboarding/use-welcome-storage.js +2 -2
- package/lib/public/js/components/onboarding/welcome-config.js +1 -1
- package/lib/public/js/components/onboarding/welcome-form-step.js +2 -23
- package/lib/public/js/components/onboarding/welcome-header.js +19 -13
- package/lib/public/js/components/onboarding/welcome-import-step.js +6 -5
- package/lib/public/js/components/onboarding/welcome-pre-step.js +81 -0
- package/lib/public/js/components/onboarding/welcome-secret-review-step.js +12 -6
- package/lib/public/js/components/onboarding/welcome-secret-review-utils.js +19 -0
- package/lib/public/js/components/providers.js +9 -13
- package/lib/public/js/components/usage-tab/index.js +0 -43
- package/lib/public/js/components/usage-tab/use-usage-tab.js +0 -48
- package/lib/public/js/components/welcome/index.js +13 -2
- package/lib/public/js/components/welcome/use-welcome.js +19 -4
- package/lib/public/js/lib/api.js +0 -14
- package/lib/public/js/lib/model-config.js +149 -2
- package/lib/server/auth-profiles.js +14 -0
- package/lib/server/constants.js +23 -4
- package/lib/server/db/usage/index.js +0 -4
- package/lib/server/gateway.js +18 -2
- package/lib/server/onboarding/import/import-applier.js +127 -0
- package/lib/server/onboarding/import/import-scanner.js +8 -1
- package/lib/server/onboarding/import/secret-detector.js +52 -6
- package/lib/server/onboarding/index.js +126 -0
- package/lib/server/onboarding/openclaw.js +88 -5
- package/lib/server/onboarding/workspace.js +10 -0
- package/lib/server/routes/onboarding.js +12 -3
- package/lib/server/routes/proxy.js +7 -4
- package/lib/server/routes/system.js +14 -0
- package/lib/server/routes/usage.js +0 -80
- package/lib/server/webhook-middleware.js +5 -2
- package/lib/server.js +6 -11
- package/package.json +1 -1
- package/lib/server/db/usage/backfill.js +0 -416
|
@@ -1,416 +0,0 @@
|
|
|
1
|
-
const fs = require("fs");
|
|
2
|
-
const path = require("path");
|
|
3
|
-
const readline = require("readline");
|
|
4
|
-
|
|
5
|
-
const kSessionsStoreFileName = "sessions.json";
|
|
6
|
-
const kSessionFileSuffix = ".jsonl";
|
|
7
|
-
|
|
8
|
-
const toFiniteNumber = (value) => {
|
|
9
|
-
if (typeof value === "number" && Number.isFinite(value)) return value;
|
|
10
|
-
if (typeof value === "string" && value.trim()) {
|
|
11
|
-
const parsed = Number(value);
|
|
12
|
-
if (Number.isFinite(parsed)) return parsed;
|
|
13
|
-
}
|
|
14
|
-
return undefined;
|
|
15
|
-
};
|
|
16
|
-
|
|
17
|
-
const toNonNegativeInt = (value) => {
|
|
18
|
-
const parsed = toFiniteNumber(value);
|
|
19
|
-
if (parsed === undefined) return undefined;
|
|
20
|
-
if (parsed <= 0) return 0;
|
|
21
|
-
return Math.floor(parsed);
|
|
22
|
-
};
|
|
23
|
-
|
|
24
|
-
const normalizeUsage = (rawUsage) => {
|
|
25
|
-
if (!rawUsage || typeof rawUsage !== "object") return null;
|
|
26
|
-
const inputTokens = toNonNegativeInt(
|
|
27
|
-
rawUsage.input ??
|
|
28
|
-
rawUsage.inputTokens ??
|
|
29
|
-
rawUsage.input_tokens ??
|
|
30
|
-
rawUsage.promptTokens ??
|
|
31
|
-
rawUsage.prompt_tokens,
|
|
32
|
-
);
|
|
33
|
-
const outputTokens = toNonNegativeInt(
|
|
34
|
-
rawUsage.output ??
|
|
35
|
-
rawUsage.outputTokens ??
|
|
36
|
-
rawUsage.output_tokens ??
|
|
37
|
-
rawUsage.completionTokens ??
|
|
38
|
-
rawUsage.completion_tokens,
|
|
39
|
-
);
|
|
40
|
-
const cacheReadTokens = toNonNegativeInt(
|
|
41
|
-
rawUsage.cacheRead ??
|
|
42
|
-
rawUsage.cache_read ??
|
|
43
|
-
rawUsage.cache_read_input_tokens ??
|
|
44
|
-
rawUsage.cached_tokens ??
|
|
45
|
-
rawUsage.prompt_tokens_details?.cached_tokens,
|
|
46
|
-
);
|
|
47
|
-
const cacheWriteTokens = toNonNegativeInt(
|
|
48
|
-
rawUsage.cacheWrite ??
|
|
49
|
-
rawUsage.cache_write ??
|
|
50
|
-
rawUsage.cache_creation_input_tokens,
|
|
51
|
-
);
|
|
52
|
-
const totalTokens = toNonNegativeInt(
|
|
53
|
-
rawUsage.total ?? rawUsage.totalTokens ?? rawUsage.total_tokens,
|
|
54
|
-
);
|
|
55
|
-
if (
|
|
56
|
-
inputTokens === undefined &&
|
|
57
|
-
outputTokens === undefined &&
|
|
58
|
-
cacheReadTokens === undefined &&
|
|
59
|
-
cacheWriteTokens === undefined &&
|
|
60
|
-
totalTokens === undefined
|
|
61
|
-
) {
|
|
62
|
-
return null;
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
const normalized = {
|
|
66
|
-
inputTokens: inputTokens ?? 0,
|
|
67
|
-
outputTokens: outputTokens ?? 0,
|
|
68
|
-
cacheReadTokens: cacheReadTokens ?? 0,
|
|
69
|
-
cacheWriteTokens: cacheWriteTokens ?? 0,
|
|
70
|
-
totalTokens:
|
|
71
|
-
totalTokens ??
|
|
72
|
-
(inputTokens ?? 0) +
|
|
73
|
-
(outputTokens ?? 0) +
|
|
74
|
-
(cacheReadTokens ?? 0) +
|
|
75
|
-
(cacheWriteTokens ?? 0),
|
|
76
|
-
};
|
|
77
|
-
if (normalized.totalTokens <= 0) return null;
|
|
78
|
-
return normalized;
|
|
79
|
-
};
|
|
80
|
-
|
|
81
|
-
const parseTimestampMs = (entry) => {
|
|
82
|
-
const rawTimestamp = toFiniteNumber(entry?.timestamp);
|
|
83
|
-
if (rawTimestamp !== undefined && rawTimestamp > 0) {
|
|
84
|
-
return Math.floor(rawTimestamp);
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
const rawTimestampIso = String(entry?.timestamp || "").trim();
|
|
88
|
-
if (rawTimestampIso) {
|
|
89
|
-
const parsedDate = new Date(rawTimestampIso);
|
|
90
|
-
const parsedMs = parsedDate.valueOf();
|
|
91
|
-
if (Number.isFinite(parsedMs) && parsedMs > 0) return parsedMs;
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
const messageTimestamp = toFiniteNumber(entry?.message?.timestamp);
|
|
95
|
-
if (messageTimestamp !== undefined && messageTimestamp > 0) {
|
|
96
|
-
return Math.floor(messageTimestamp);
|
|
97
|
-
}
|
|
98
|
-
return null;
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
const toDayKey = (timestampMs) =>
|
|
102
|
-
new Date(Number(timestampMs || 0)).toISOString().slice(0, 10);
|
|
103
|
-
|
|
104
|
-
const resolveSessionsDir = (openclawDir) =>
|
|
105
|
-
path.join(String(openclawDir || ""), "agents", "main", "sessions");
|
|
106
|
-
|
|
107
|
-
const listBackfillCandidateFiles = ({
|
|
108
|
-
openclawDir,
|
|
109
|
-
earliestExistingTimestampMs = null,
|
|
110
|
-
fsModule = fs,
|
|
111
|
-
}) => {
|
|
112
|
-
const sessionsDir = resolveSessionsDir(openclawDir);
|
|
113
|
-
if (!sessionsDir || !fsModule.existsSync(sessionsDir)) return [];
|
|
114
|
-
const entries = fsModule.readdirSync(sessionsDir, { withFileTypes: true });
|
|
115
|
-
const candidateFiles = [];
|
|
116
|
-
for (const entry of entries) {
|
|
117
|
-
if (!entry?.isFile?.()) continue;
|
|
118
|
-
const fileName = String(entry.name || "");
|
|
119
|
-
if (!fileName.endsWith(kSessionFileSuffix)) continue;
|
|
120
|
-
if (!earliestExistingTimestampMs || earliestExistingTimestampMs <= 0) {
|
|
121
|
-
candidateFiles.push(fileName);
|
|
122
|
-
continue;
|
|
123
|
-
}
|
|
124
|
-
const absolutePath = path.join(sessionsDir, fileName);
|
|
125
|
-
try {
|
|
126
|
-
const stats = fsModule.statSync(absolutePath);
|
|
127
|
-
if (Number(stats?.mtimeMs || 0) < earliestExistingTimestampMs) {
|
|
128
|
-
candidateFiles.push(fileName);
|
|
129
|
-
}
|
|
130
|
-
} catch {}
|
|
131
|
-
}
|
|
132
|
-
return candidateFiles.sort((leftValue, rightValue) =>
|
|
133
|
-
leftValue.localeCompare(rightValue),
|
|
134
|
-
);
|
|
135
|
-
};
|
|
136
|
-
|
|
137
|
-
const getEarliestUsageTimestampMs = (database) => {
|
|
138
|
-
const row = database
|
|
139
|
-
.prepare("SELECT MIN(timestamp) AS earliest_timestamp FROM usage_events")
|
|
140
|
-
.get();
|
|
141
|
-
const parsed = toNonNegativeInt(row?.earliest_timestamp);
|
|
142
|
-
return parsed && parsed > 0 ? parsed : null;
|
|
143
|
-
};
|
|
144
|
-
|
|
145
|
-
const readSessionsStore = ({ sessionsDir, fsModule = fs }) => {
|
|
146
|
-
const sessionsStorePath = path.join(sessionsDir, kSessionsStoreFileName);
|
|
147
|
-
if (!fsModule.existsSync(sessionsStorePath)) return {};
|
|
148
|
-
try {
|
|
149
|
-
const raw = fsModule.readFileSync(sessionsStorePath, "utf8");
|
|
150
|
-
const parsed = JSON.parse(raw);
|
|
151
|
-
return parsed && typeof parsed === "object" ? parsed : {};
|
|
152
|
-
} catch {
|
|
153
|
-
return {};
|
|
154
|
-
}
|
|
155
|
-
};
|
|
156
|
-
|
|
157
|
-
const buildSessionContextByFileName = ({ sessionsDir, fsModule = fs }) => {
|
|
158
|
-
const sessionStore = readSessionsStore({ sessionsDir, fsModule });
|
|
159
|
-
const byFileName = new Map();
|
|
160
|
-
for (const [sessionKey, entry] of Object.entries(sessionStore)) {
|
|
161
|
-
const safeSessionKey = String(sessionKey || "").trim();
|
|
162
|
-
if (!safeSessionKey) continue;
|
|
163
|
-
const rawSessionId = String(entry?.sessionId || "").trim();
|
|
164
|
-
const rawSessionFile = String(entry?.sessionFile || "").trim();
|
|
165
|
-
const baseContext = {
|
|
166
|
-
sessionKey: safeSessionKey,
|
|
167
|
-
sessionId: rawSessionId,
|
|
168
|
-
};
|
|
169
|
-
if (rawSessionFile) {
|
|
170
|
-
byFileName.set(path.basename(rawSessionFile), baseContext);
|
|
171
|
-
}
|
|
172
|
-
if (rawSessionId) {
|
|
173
|
-
byFileName.set(`${rawSessionId}${kSessionFileSuffix}`, baseContext);
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
return byFileName;
|
|
177
|
-
};
|
|
178
|
-
|
|
179
|
-
const resolveSessionContext = ({ fileName, sessionContextByFileName }) => {
|
|
180
|
-
const mappedContext = sessionContextByFileName.get(fileName) || null;
|
|
181
|
-
if (mappedContext) return mappedContext;
|
|
182
|
-
return {
|
|
183
|
-
sessionKey: "",
|
|
184
|
-
sessionId: String(fileName || "").replace(/\.jsonl$/i, ""),
|
|
185
|
-
};
|
|
186
|
-
};
|
|
187
|
-
|
|
188
|
-
const scanUsageRecordsFromFile = async ({
|
|
189
|
-
filePath,
|
|
190
|
-
onRecord = () => {},
|
|
191
|
-
onSkip = () => {},
|
|
192
|
-
earliestExistingTimestampMs = null,
|
|
193
|
-
}) => {
|
|
194
|
-
const stream = fs.createReadStream(filePath, { encoding: "utf8" });
|
|
195
|
-
const lines = readline.createInterface({ input: stream, crlfDelay: Infinity });
|
|
196
|
-
try {
|
|
197
|
-
for await (const rawLine of lines) {
|
|
198
|
-
const trimmedLine = String(rawLine || "").trim();
|
|
199
|
-
if (!trimmedLine) {
|
|
200
|
-
onSkip();
|
|
201
|
-
continue;
|
|
202
|
-
}
|
|
203
|
-
let parsed = null;
|
|
204
|
-
try {
|
|
205
|
-
parsed = JSON.parse(trimmedLine);
|
|
206
|
-
} catch {
|
|
207
|
-
onSkip();
|
|
208
|
-
continue;
|
|
209
|
-
}
|
|
210
|
-
const message = parsed?.message;
|
|
211
|
-
if (!message || typeof message !== "object") {
|
|
212
|
-
onSkip();
|
|
213
|
-
continue;
|
|
214
|
-
}
|
|
215
|
-
if (String(message.role || "").trim() !== "assistant") {
|
|
216
|
-
onSkip();
|
|
217
|
-
continue;
|
|
218
|
-
}
|
|
219
|
-
const usage = normalizeUsage(message.usage ?? parsed?.usage);
|
|
220
|
-
if (!usage) {
|
|
221
|
-
onSkip();
|
|
222
|
-
continue;
|
|
223
|
-
}
|
|
224
|
-
const timestampMs = parseTimestampMs(parsed);
|
|
225
|
-
if (!timestampMs) {
|
|
226
|
-
onSkip();
|
|
227
|
-
continue;
|
|
228
|
-
}
|
|
229
|
-
if (
|
|
230
|
-
earliestExistingTimestampMs &&
|
|
231
|
-
timestampMs >= earliestExistingTimestampMs
|
|
232
|
-
) {
|
|
233
|
-
onSkip();
|
|
234
|
-
continue;
|
|
235
|
-
}
|
|
236
|
-
const provider =
|
|
237
|
-
String(message.provider || parsed?.provider || "").trim() || "unknown";
|
|
238
|
-
const model = String(message.model || parsed?.model || "").trim() || "unknown";
|
|
239
|
-
const runId =
|
|
240
|
-
String(
|
|
241
|
-
parsed?.runId ||
|
|
242
|
-
parsed?.run_id ||
|
|
243
|
-
message?.runId ||
|
|
244
|
-
message?.run_id ||
|
|
245
|
-
"",
|
|
246
|
-
).trim() || "";
|
|
247
|
-
onRecord({
|
|
248
|
-
timestampMs,
|
|
249
|
-
provider,
|
|
250
|
-
model,
|
|
251
|
-
runId,
|
|
252
|
-
...usage,
|
|
253
|
-
});
|
|
254
|
-
}
|
|
255
|
-
} finally {
|
|
256
|
-
lines.close();
|
|
257
|
-
stream.destroy();
|
|
258
|
-
}
|
|
259
|
-
};
|
|
260
|
-
|
|
261
|
-
const getBackfillStatus = ({ database, openclawDir, fsModule = fs }) => {
|
|
262
|
-
if (!database) throw new Error("Usage DB is not initialized");
|
|
263
|
-
const earliestExistingTimestampMs = getEarliestUsageTimestampMs(database);
|
|
264
|
-
const files = listBackfillCandidateFiles({
|
|
265
|
-
openclawDir,
|
|
266
|
-
earliestExistingTimestampMs,
|
|
267
|
-
fsModule,
|
|
268
|
-
});
|
|
269
|
-
return {
|
|
270
|
-
available: files.length > 0,
|
|
271
|
-
estimatedFiles: files.length,
|
|
272
|
-
earliestExistingTimestampMs,
|
|
273
|
-
};
|
|
274
|
-
};
|
|
275
|
-
|
|
276
|
-
const backfillFromTranscripts = async ({
|
|
277
|
-
database,
|
|
278
|
-
openclawDir,
|
|
279
|
-
fsModule = fs,
|
|
280
|
-
}) => {
|
|
281
|
-
if (!database) throw new Error("Usage DB is not initialized");
|
|
282
|
-
const earliestExistingTimestampMs = getEarliestUsageTimestampMs(database);
|
|
283
|
-
const sessionsDir = resolveSessionsDir(openclawDir);
|
|
284
|
-
const files = listBackfillCandidateFiles({
|
|
285
|
-
openclawDir,
|
|
286
|
-
earliestExistingTimestampMs,
|
|
287
|
-
fsModule,
|
|
288
|
-
});
|
|
289
|
-
const sessionContextByFileName = buildSessionContextByFileName({
|
|
290
|
-
sessionsDir,
|
|
291
|
-
fsModule,
|
|
292
|
-
});
|
|
293
|
-
|
|
294
|
-
const insertUsageEventStmt = database.prepare(`
|
|
295
|
-
INSERT INTO usage_events (
|
|
296
|
-
timestamp,
|
|
297
|
-
session_id,
|
|
298
|
-
session_key,
|
|
299
|
-
run_id,
|
|
300
|
-
provider,
|
|
301
|
-
model,
|
|
302
|
-
input_tokens,
|
|
303
|
-
output_tokens,
|
|
304
|
-
cache_read_tokens,
|
|
305
|
-
cache_write_tokens,
|
|
306
|
-
total_tokens
|
|
307
|
-
) VALUES (
|
|
308
|
-
$timestamp,
|
|
309
|
-
$session_id,
|
|
310
|
-
$session_key,
|
|
311
|
-
$run_id,
|
|
312
|
-
$provider,
|
|
313
|
-
$model,
|
|
314
|
-
$input_tokens,
|
|
315
|
-
$output_tokens,
|
|
316
|
-
$cache_read_tokens,
|
|
317
|
-
$cache_write_tokens,
|
|
318
|
-
$total_tokens
|
|
319
|
-
)
|
|
320
|
-
`);
|
|
321
|
-
const upsertUsageDailyStmt = database.prepare(`
|
|
322
|
-
INSERT INTO usage_daily (
|
|
323
|
-
date,
|
|
324
|
-
model,
|
|
325
|
-
provider,
|
|
326
|
-
input_tokens,
|
|
327
|
-
output_tokens,
|
|
328
|
-
cache_read_tokens,
|
|
329
|
-
cache_write_tokens,
|
|
330
|
-
total_tokens,
|
|
331
|
-
turn_count
|
|
332
|
-
) VALUES (
|
|
333
|
-
$date,
|
|
334
|
-
$model,
|
|
335
|
-
$provider,
|
|
336
|
-
$input_tokens,
|
|
337
|
-
$output_tokens,
|
|
338
|
-
$cache_read_tokens,
|
|
339
|
-
$cache_write_tokens,
|
|
340
|
-
$total_tokens,
|
|
341
|
-
1
|
|
342
|
-
)
|
|
343
|
-
ON CONFLICT(date, model) DO UPDATE SET
|
|
344
|
-
provider = COALESCE(excluded.provider, usage_daily.provider),
|
|
345
|
-
input_tokens = usage_daily.input_tokens + excluded.input_tokens,
|
|
346
|
-
output_tokens = usage_daily.output_tokens + excluded.output_tokens,
|
|
347
|
-
cache_read_tokens = usage_daily.cache_read_tokens + excluded.cache_read_tokens,
|
|
348
|
-
cache_write_tokens = usage_daily.cache_write_tokens + excluded.cache_write_tokens,
|
|
349
|
-
total_tokens = usage_daily.total_tokens + excluded.total_tokens,
|
|
350
|
-
turn_count = usage_daily.turn_count + 1
|
|
351
|
-
`);
|
|
352
|
-
|
|
353
|
-
let backfilledEvents = 0;
|
|
354
|
-
let skippedEvents = 0;
|
|
355
|
-
let filesScanned = 0;
|
|
356
|
-
database.exec("BEGIN");
|
|
357
|
-
try {
|
|
358
|
-
for (const fileName of files) {
|
|
359
|
-
filesScanned += 1;
|
|
360
|
-
const filePath = path.join(sessionsDir, fileName);
|
|
361
|
-
const sessionContext = resolveSessionContext({
|
|
362
|
-
fileName,
|
|
363
|
-
sessionContextByFileName,
|
|
364
|
-
});
|
|
365
|
-
await scanUsageRecordsFromFile({
|
|
366
|
-
filePath,
|
|
367
|
-
earliestExistingTimestampMs,
|
|
368
|
-
onSkip: () => {
|
|
369
|
-
skippedEvents += 1;
|
|
370
|
-
},
|
|
371
|
-
onRecord: (record) => {
|
|
372
|
-
insertUsageEventStmt.run({
|
|
373
|
-
$timestamp: record.timestampMs,
|
|
374
|
-
$session_id: String(sessionContext.sessionId || ""),
|
|
375
|
-
$session_key: String(sessionContext.sessionKey || ""),
|
|
376
|
-
$run_id: String(record.runId || ""),
|
|
377
|
-
$provider: record.provider,
|
|
378
|
-
$model: record.model,
|
|
379
|
-
$input_tokens: record.inputTokens,
|
|
380
|
-
$output_tokens: record.outputTokens,
|
|
381
|
-
$cache_read_tokens: record.cacheReadTokens,
|
|
382
|
-
$cache_write_tokens: record.cacheWriteTokens,
|
|
383
|
-
$total_tokens: record.totalTokens,
|
|
384
|
-
});
|
|
385
|
-
upsertUsageDailyStmt.run({
|
|
386
|
-
$date: toDayKey(record.timestampMs),
|
|
387
|
-
$model: record.model,
|
|
388
|
-
$provider: record.provider,
|
|
389
|
-
$input_tokens: record.inputTokens,
|
|
390
|
-
$output_tokens: record.outputTokens,
|
|
391
|
-
$cache_read_tokens: record.cacheReadTokens,
|
|
392
|
-
$cache_write_tokens: record.cacheWriteTokens,
|
|
393
|
-
$total_tokens: record.totalTokens,
|
|
394
|
-
});
|
|
395
|
-
backfilledEvents += 1;
|
|
396
|
-
},
|
|
397
|
-
});
|
|
398
|
-
}
|
|
399
|
-
database.exec("COMMIT");
|
|
400
|
-
} catch (error) {
|
|
401
|
-
database.exec("ROLLBACK");
|
|
402
|
-
throw error;
|
|
403
|
-
}
|
|
404
|
-
|
|
405
|
-
return {
|
|
406
|
-
backfilledEvents,
|
|
407
|
-
skippedEvents,
|
|
408
|
-
filesScanned,
|
|
409
|
-
cutoffMs: earliestExistingTimestampMs,
|
|
410
|
-
};
|
|
411
|
-
};
|
|
412
|
-
|
|
413
|
-
module.exports = {
|
|
414
|
-
getBackfillStatus,
|
|
415
|
-
backfillFromTranscripts,
|
|
416
|
-
};
|