@chrysb/alphaclaw 0.4.3 → 0.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/public/js/components/file-tree.js +37 -8
- package/lib/public/js/components/gateway.js +74 -42
- package/lib/public/js/components/icons.js +13 -0
- package/lib/public/js/components/usage-tab/overview-section.js +100 -26
- package/lib/public/js/lib/api.js +31 -0
- package/lib/server/db/usage/index.js +35 -0
- package/lib/server/db/usage/pricing.js +82 -0
- package/lib/server/db/usage/schema.js +87 -0
- package/lib/server/db/usage/sessions.js +217 -0
- package/lib/server/db/usage/shared.js +139 -0
- package/lib/server/db/usage/summary.js +280 -0
- package/lib/server/db/usage/timeseries.js +64 -0
- package/lib/server/{watchdog-db.js → db/watchdog/index.js} +1 -18
- package/lib/server/db/watchdog/schema.js +21 -0
- package/lib/server/{webhooks-db.js → db/webhooks/index.js} +1 -22
- package/lib/server/db/webhooks/schema.js +25 -0
- package/lib/server/routes/browse/index.js +29 -0
- package/lib/server.js +3 -3
- package/package.json +1 -1
- package/lib/server/usage-db.js +0 -838
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
const {
|
|
2
|
+
kDefaultDays,
|
|
3
|
+
kDayMs,
|
|
4
|
+
kUtcTimeZone,
|
|
5
|
+
coerceInt,
|
|
6
|
+
toDayKey,
|
|
7
|
+
toTimeZoneDayKey,
|
|
8
|
+
getPeriodRange,
|
|
9
|
+
getUsageMetricsFromEventRow,
|
|
10
|
+
parseAgentAndSourceFromSessionRef,
|
|
11
|
+
} = require("./shared");
|
|
12
|
+
|
|
13
|
+
const getAgentCostDistribution = ({
|
|
14
|
+
eventsRows = [],
|
|
15
|
+
startDay = "",
|
|
16
|
+
timeZone = kUtcTimeZone,
|
|
17
|
+
}) => {
|
|
18
|
+
const byAgent = new Map();
|
|
19
|
+
const ensureAgentBucket = (agent) => {
|
|
20
|
+
if (byAgent.has(agent)) return byAgent.get(agent);
|
|
21
|
+
const bucket = {
|
|
22
|
+
agent,
|
|
23
|
+
inputTokens: 0,
|
|
24
|
+
outputTokens: 0,
|
|
25
|
+
cacheReadTokens: 0,
|
|
26
|
+
cacheWriteTokens: 0,
|
|
27
|
+
totalTokens: 0,
|
|
28
|
+
totalCost: 0,
|
|
29
|
+
turnCount: 0,
|
|
30
|
+
sourceBreakdown: {
|
|
31
|
+
chat: {
|
|
32
|
+
source: "chat",
|
|
33
|
+
inputTokens: 0,
|
|
34
|
+
outputTokens: 0,
|
|
35
|
+
cacheReadTokens: 0,
|
|
36
|
+
cacheWriteTokens: 0,
|
|
37
|
+
totalTokens: 0,
|
|
38
|
+
totalCost: 0,
|
|
39
|
+
turnCount: 0,
|
|
40
|
+
},
|
|
41
|
+
hooks: {
|
|
42
|
+
source: "hooks",
|
|
43
|
+
inputTokens: 0,
|
|
44
|
+
outputTokens: 0,
|
|
45
|
+
cacheReadTokens: 0,
|
|
46
|
+
cacheWriteTokens: 0,
|
|
47
|
+
totalTokens: 0,
|
|
48
|
+
totalCost: 0,
|
|
49
|
+
turnCount: 0,
|
|
50
|
+
},
|
|
51
|
+
cron: {
|
|
52
|
+
source: "cron",
|
|
53
|
+
inputTokens: 0,
|
|
54
|
+
outputTokens: 0,
|
|
55
|
+
cacheReadTokens: 0,
|
|
56
|
+
cacheWriteTokens: 0,
|
|
57
|
+
totalTokens: 0,
|
|
58
|
+
totalCost: 0,
|
|
59
|
+
turnCount: 0,
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
byAgent.set(agent, bucket);
|
|
64
|
+
return bucket;
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
for (const eventRow of eventsRows) {
|
|
68
|
+
const timestamp = coerceInt(eventRow.timestamp);
|
|
69
|
+
const dayKey = timeZone === kUtcTimeZone
|
|
70
|
+
? toDayKey(timestamp)
|
|
71
|
+
: toTimeZoneDayKey(timestamp, timeZone);
|
|
72
|
+
if (dayKey < startDay) continue;
|
|
73
|
+
|
|
74
|
+
const metrics = getUsageMetricsFromEventRow(eventRow);
|
|
75
|
+
const sessionRef = String(eventRow.session_key || eventRow.session_id || "");
|
|
76
|
+
const { agent, source } = parseAgentAndSourceFromSessionRef(sessionRef);
|
|
77
|
+
const agentBucket = ensureAgentBucket(agent);
|
|
78
|
+
const sourceBucket = agentBucket.sourceBreakdown[source];
|
|
79
|
+
|
|
80
|
+
agentBucket.inputTokens += metrics.inputTokens;
|
|
81
|
+
agentBucket.outputTokens += metrics.outputTokens;
|
|
82
|
+
agentBucket.cacheReadTokens += metrics.cacheReadTokens;
|
|
83
|
+
agentBucket.cacheWriteTokens += metrics.cacheWriteTokens;
|
|
84
|
+
agentBucket.totalTokens += metrics.totalTokens;
|
|
85
|
+
agentBucket.totalCost += metrics.totalCost;
|
|
86
|
+
agentBucket.turnCount += 1;
|
|
87
|
+
|
|
88
|
+
sourceBucket.inputTokens += metrics.inputTokens;
|
|
89
|
+
sourceBucket.outputTokens += metrics.outputTokens;
|
|
90
|
+
sourceBucket.cacheReadTokens += metrics.cacheReadTokens;
|
|
91
|
+
sourceBucket.cacheWriteTokens += metrics.cacheWriteTokens;
|
|
92
|
+
sourceBucket.totalTokens += metrics.totalTokens;
|
|
93
|
+
sourceBucket.totalCost += metrics.totalCost;
|
|
94
|
+
sourceBucket.turnCount += 1;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const agents = Array.from(byAgent.values())
|
|
98
|
+
.map((bucket) => ({
|
|
99
|
+
agent: bucket.agent,
|
|
100
|
+
inputTokens: bucket.inputTokens,
|
|
101
|
+
outputTokens: bucket.outputTokens,
|
|
102
|
+
cacheReadTokens: bucket.cacheReadTokens,
|
|
103
|
+
cacheWriteTokens: bucket.cacheWriteTokens,
|
|
104
|
+
totalTokens: bucket.totalTokens,
|
|
105
|
+
totalCost: bucket.totalCost,
|
|
106
|
+
turnCount: bucket.turnCount,
|
|
107
|
+
sourceBreakdown: ["chat", "hooks", "cron"].map(
|
|
108
|
+
(source) => bucket.sourceBreakdown[source],
|
|
109
|
+
),
|
|
110
|
+
}))
|
|
111
|
+
.sort((a, b) => b.totalCost - a.totalCost);
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
agents,
|
|
115
|
+
totals: agents.reduce(
|
|
116
|
+
(acc, agentBucket) => {
|
|
117
|
+
acc.totalCost += Number(agentBucket.totalCost || 0);
|
|
118
|
+
acc.totalTokens += Number(agentBucket.totalTokens || 0);
|
|
119
|
+
acc.turnCount += Number(agentBucket.turnCount || 0);
|
|
120
|
+
return acc;
|
|
121
|
+
},
|
|
122
|
+
{ totalCost: 0, totalTokens: 0, turnCount: 0 },
|
|
123
|
+
),
|
|
124
|
+
};
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
const getDailySummary = ({ database, days = kDefaultDays, timeZone = kUtcTimeZone } = {}) => {
|
|
128
|
+
const { now, safeDays, startDay, timeZone: normalizedTimeZone } = getPeriodRange(
|
|
129
|
+
days,
|
|
130
|
+
timeZone,
|
|
131
|
+
);
|
|
132
|
+
const lookbackMs = now - (safeDays + 2) * kDayMs;
|
|
133
|
+
const eventsRows = database
|
|
134
|
+
.prepare(`
|
|
135
|
+
SELECT
|
|
136
|
+
timestamp,
|
|
137
|
+
session_id,
|
|
138
|
+
session_key,
|
|
139
|
+
provider,
|
|
140
|
+
model,
|
|
141
|
+
input_tokens,
|
|
142
|
+
output_tokens,
|
|
143
|
+
cache_read_tokens,
|
|
144
|
+
cache_write_tokens,
|
|
145
|
+
total_tokens
|
|
146
|
+
FROM usage_events
|
|
147
|
+
WHERE timestamp >= $lookbackMs
|
|
148
|
+
ORDER BY timestamp ASC
|
|
149
|
+
`)
|
|
150
|
+
.all({ $lookbackMs: lookbackMs });
|
|
151
|
+
const byDateModel = new Map();
|
|
152
|
+
for (const eventRow of eventsRows) {
|
|
153
|
+
const timestamp = coerceInt(eventRow.timestamp);
|
|
154
|
+
const dayKey = normalizedTimeZone === kUtcTimeZone
|
|
155
|
+
? toDayKey(timestamp)
|
|
156
|
+
: toTimeZoneDayKey(timestamp, normalizedTimeZone);
|
|
157
|
+
if (dayKey < startDay) continue;
|
|
158
|
+
const model = String(eventRow.model || "unknown");
|
|
159
|
+
const mapKey = `${dayKey}\u0000${model}`;
|
|
160
|
+
if (!byDateModel.has(mapKey)) {
|
|
161
|
+
byDateModel.set(mapKey, {
|
|
162
|
+
date: dayKey,
|
|
163
|
+
model,
|
|
164
|
+
provider: String(eventRow.provider || "unknown"),
|
|
165
|
+
inputTokens: 0,
|
|
166
|
+
outputTokens: 0,
|
|
167
|
+
cacheReadTokens: 0,
|
|
168
|
+
cacheWriteTokens: 0,
|
|
169
|
+
totalTokens: 0,
|
|
170
|
+
turnCount: 0,
|
|
171
|
+
totalCost: 0,
|
|
172
|
+
inputCost: 0,
|
|
173
|
+
outputCost: 0,
|
|
174
|
+
cacheReadCost: 0,
|
|
175
|
+
cacheWriteCost: 0,
|
|
176
|
+
pricingFound: false,
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
const aggregate = byDateModel.get(mapKey);
|
|
180
|
+
const metrics = getUsageMetricsFromEventRow(eventRow);
|
|
181
|
+
aggregate.inputTokens += metrics.inputTokens;
|
|
182
|
+
aggregate.outputTokens += metrics.outputTokens;
|
|
183
|
+
aggregate.cacheReadTokens += metrics.cacheReadTokens;
|
|
184
|
+
aggregate.cacheWriteTokens += metrics.cacheWriteTokens;
|
|
185
|
+
aggregate.totalTokens += metrics.totalTokens;
|
|
186
|
+
aggregate.turnCount += 1;
|
|
187
|
+
aggregate.totalCost += metrics.totalCost;
|
|
188
|
+
aggregate.inputCost += metrics.inputCost;
|
|
189
|
+
aggregate.outputCost += metrics.outputCost;
|
|
190
|
+
aggregate.cacheReadCost += metrics.cacheReadCost;
|
|
191
|
+
aggregate.cacheWriteCost += metrics.cacheWriteCost;
|
|
192
|
+
aggregate.pricingFound = aggregate.pricingFound || metrics.pricingFound;
|
|
193
|
+
if (!aggregate.provider && eventRow.provider) {
|
|
194
|
+
aggregate.provider = String(eventRow.provider || "unknown");
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
const enriched = Array.from(byDateModel.values()).sort((a, b) => {
|
|
198
|
+
if (a.date === b.date) return b.totalTokens - a.totalTokens;
|
|
199
|
+
return a.date.localeCompare(b.date);
|
|
200
|
+
});
|
|
201
|
+
const costByAgent = getAgentCostDistribution({
|
|
202
|
+
eventsRows,
|
|
203
|
+
startDay,
|
|
204
|
+
timeZone: normalizedTimeZone,
|
|
205
|
+
});
|
|
206
|
+
const byDate = new Map();
|
|
207
|
+
for (const row of enriched) {
|
|
208
|
+
if (!byDate.has(row.date)) byDate.set(row.date, []);
|
|
209
|
+
byDate.get(row.date).push({
|
|
210
|
+
model: row.model,
|
|
211
|
+
provider: row.provider,
|
|
212
|
+
inputTokens: row.inputTokens,
|
|
213
|
+
outputTokens: row.outputTokens,
|
|
214
|
+
cacheReadTokens: row.cacheReadTokens,
|
|
215
|
+
cacheWriteTokens: row.cacheWriteTokens,
|
|
216
|
+
totalTokens: row.totalTokens,
|
|
217
|
+
turnCount: row.turnCount,
|
|
218
|
+
totalCost: row.totalCost,
|
|
219
|
+
inputCost: row.inputCost,
|
|
220
|
+
outputCost: row.outputCost,
|
|
221
|
+
cacheReadCost: row.cacheReadCost,
|
|
222
|
+
cacheWriteCost: row.cacheWriteCost,
|
|
223
|
+
pricingFound: row.pricingFound,
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
const daily = [];
|
|
227
|
+
const totals = {
|
|
228
|
+
inputTokens: 0,
|
|
229
|
+
outputTokens: 0,
|
|
230
|
+
cacheReadTokens: 0,
|
|
231
|
+
cacheWriteTokens: 0,
|
|
232
|
+
totalTokens: 0,
|
|
233
|
+
totalCost: 0,
|
|
234
|
+
turnCount: 0,
|
|
235
|
+
modelCount: 0,
|
|
236
|
+
};
|
|
237
|
+
for (const [date, modelRows] of byDate.entries()) {
|
|
238
|
+
const aggregate = modelRows.reduce(
|
|
239
|
+
(acc, row) => ({
|
|
240
|
+
inputTokens: acc.inputTokens + row.inputTokens,
|
|
241
|
+
outputTokens: acc.outputTokens + row.outputTokens,
|
|
242
|
+
cacheReadTokens: acc.cacheReadTokens + row.cacheReadTokens,
|
|
243
|
+
cacheWriteTokens: acc.cacheWriteTokens + row.cacheWriteTokens,
|
|
244
|
+
totalTokens: acc.totalTokens + row.totalTokens,
|
|
245
|
+
totalCost: acc.totalCost + row.totalCost,
|
|
246
|
+
turnCount: acc.turnCount + row.turnCount,
|
|
247
|
+
}),
|
|
248
|
+
{
|
|
249
|
+
inputTokens: 0,
|
|
250
|
+
outputTokens: 0,
|
|
251
|
+
cacheReadTokens: 0,
|
|
252
|
+
cacheWriteTokens: 0,
|
|
253
|
+
totalTokens: 0,
|
|
254
|
+
totalCost: 0,
|
|
255
|
+
turnCount: 0,
|
|
256
|
+
},
|
|
257
|
+
);
|
|
258
|
+
daily.push({ date, ...aggregate, models: modelRows });
|
|
259
|
+
totals.inputTokens += aggregate.inputTokens;
|
|
260
|
+
totals.outputTokens += aggregate.outputTokens;
|
|
261
|
+
totals.cacheReadTokens += aggregate.cacheReadTokens;
|
|
262
|
+
totals.cacheWriteTokens += aggregate.cacheWriteTokens;
|
|
263
|
+
totals.totalTokens += aggregate.totalTokens;
|
|
264
|
+
totals.totalCost += aggregate.totalCost;
|
|
265
|
+
totals.turnCount += aggregate.turnCount;
|
|
266
|
+
totals.modelCount += modelRows.length;
|
|
267
|
+
}
|
|
268
|
+
return {
|
|
269
|
+
updatedAt: Date.now(),
|
|
270
|
+
days: safeDays,
|
|
271
|
+
timeZone: normalizedTimeZone,
|
|
272
|
+
daily,
|
|
273
|
+
totals,
|
|
274
|
+
costByAgent,
|
|
275
|
+
};
|
|
276
|
+
};
|
|
277
|
+
|
|
278
|
+
module.exports = {
|
|
279
|
+
getDailySummary,
|
|
280
|
+
};
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
const {
|
|
2
|
+
kDefaultMaxPoints,
|
|
3
|
+
kMaxMaxPoints,
|
|
4
|
+
coerceInt,
|
|
5
|
+
clampInt,
|
|
6
|
+
getUsageMetricsFromEventRow,
|
|
7
|
+
downsamplePoints,
|
|
8
|
+
} = require("./shared");
|
|
9
|
+
|
|
10
|
+
const getSessionTimeSeries = ({
|
|
11
|
+
database,
|
|
12
|
+
sessionId,
|
|
13
|
+
maxPoints = kDefaultMaxPoints,
|
|
14
|
+
}) => {
|
|
15
|
+
const safeSessionRef = String(sessionId || "").trim();
|
|
16
|
+
if (!safeSessionRef) return { sessionId: safeSessionRef, points: [] };
|
|
17
|
+
const rows = database
|
|
18
|
+
.prepare(`
|
|
19
|
+
SELECT
|
|
20
|
+
timestamp,
|
|
21
|
+
session_key,
|
|
22
|
+
session_id,
|
|
23
|
+
model,
|
|
24
|
+
input_tokens,
|
|
25
|
+
output_tokens,
|
|
26
|
+
cache_read_tokens,
|
|
27
|
+
cache_write_tokens,
|
|
28
|
+
total_tokens
|
|
29
|
+
FROM usage_events
|
|
30
|
+
WHERE COALESCE(NULLIF(session_key, ''), NULLIF(session_id, '')) = $sessionRef
|
|
31
|
+
ORDER BY timestamp ASC
|
|
32
|
+
`)
|
|
33
|
+
.all({ $sessionRef: safeSessionRef });
|
|
34
|
+
let cumulativeTokens = 0;
|
|
35
|
+
let cumulativeCost = 0;
|
|
36
|
+
const points = rows.map((row) => {
|
|
37
|
+
const metrics = getUsageMetricsFromEventRow(row);
|
|
38
|
+
cumulativeTokens += metrics.totalTokens;
|
|
39
|
+
cumulativeCost += metrics.totalCost;
|
|
40
|
+
return {
|
|
41
|
+
timestamp: coerceInt(row.timestamp),
|
|
42
|
+
sessionKey: String(row.session_key || ""),
|
|
43
|
+
rawSessionId: String(row.session_id || ""),
|
|
44
|
+
model: String(row.model || ""),
|
|
45
|
+
inputTokens: metrics.inputTokens,
|
|
46
|
+
outputTokens: metrics.outputTokens,
|
|
47
|
+
cacheReadTokens: metrics.cacheReadTokens,
|
|
48
|
+
cacheWriteTokens: metrics.cacheWriteTokens,
|
|
49
|
+
totalTokens: metrics.totalTokens,
|
|
50
|
+
cost: metrics.totalCost,
|
|
51
|
+
cumulativeTokens,
|
|
52
|
+
cumulativeCost,
|
|
53
|
+
};
|
|
54
|
+
});
|
|
55
|
+
const safeMaxPoints = clampInt(maxPoints, 10, kMaxMaxPoints, kDefaultMaxPoints);
|
|
56
|
+
return {
|
|
57
|
+
sessionId: safeSessionRef,
|
|
58
|
+
points: downsamplePoints(points, safeMaxPoints),
|
|
59
|
+
};
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
module.exports = {
|
|
63
|
+
getSessionTimeSeries,
|
|
64
|
+
};
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
const fs = require("fs");
|
|
2
2
|
const path = require("path");
|
|
3
3
|
const { DatabaseSync } = require("node:sqlite");
|
|
4
|
+
const { createSchema } = require("./schema");
|
|
4
5
|
|
|
5
6
|
let db = null;
|
|
6
7
|
let pruneTimer = null;
|
|
@@ -14,24 +15,6 @@ const ensureDb = () => {
|
|
|
14
15
|
return db;
|
|
15
16
|
};
|
|
16
17
|
|
|
17
|
-
const createSchema = (database) => {
|
|
18
|
-
database.exec(`
|
|
19
|
-
CREATE TABLE IF NOT EXISTS watchdog_events (
|
|
20
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
21
|
-
event_type TEXT NOT NULL,
|
|
22
|
-
source TEXT NOT NULL,
|
|
23
|
-
status TEXT NOT NULL,
|
|
24
|
-
details TEXT,
|
|
25
|
-
correlation_id TEXT,
|
|
26
|
-
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
|
27
|
-
);
|
|
28
|
-
`);
|
|
29
|
-
database.exec(`
|
|
30
|
-
CREATE INDEX IF NOT EXISTS idx_watchdog_events_ts
|
|
31
|
-
ON watchdog_events(created_at DESC);
|
|
32
|
-
`);
|
|
33
|
-
};
|
|
34
|
-
|
|
35
18
|
const initWatchdogDb = ({ rootDir, pruneDays = 30 }) => {
|
|
36
19
|
const dbDir = path.join(rootDir, "db");
|
|
37
20
|
fs.mkdirSync(dbDir, { recursive: true });
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
const createSchema = (database) => {
|
|
2
|
+
database.exec(`
|
|
3
|
+
CREATE TABLE IF NOT EXISTS watchdog_events (
|
|
4
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
5
|
+
event_type TEXT NOT NULL,
|
|
6
|
+
source TEXT NOT NULL,
|
|
7
|
+
status TEXT NOT NULL,
|
|
8
|
+
details TEXT,
|
|
9
|
+
correlation_id TEXT,
|
|
10
|
+
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
|
11
|
+
);
|
|
12
|
+
`);
|
|
13
|
+
database.exec(`
|
|
14
|
+
CREATE INDEX IF NOT EXISTS idx_watchdog_events_ts
|
|
15
|
+
ON watchdog_events(created_at DESC);
|
|
16
|
+
`);
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
module.exports = {
|
|
20
|
+
createSchema,
|
|
21
|
+
};
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
const fs = require("fs");
|
|
2
2
|
const path = require("path");
|
|
3
3
|
const { DatabaseSync } = require("node:sqlite");
|
|
4
|
+
const { createSchema } = require("./schema");
|
|
4
5
|
|
|
5
6
|
let db = null;
|
|
6
7
|
let pruneTimer = null;
|
|
@@ -14,28 +15,6 @@ const ensureDb = () => {
|
|
|
14
15
|
return db;
|
|
15
16
|
};
|
|
16
17
|
|
|
17
|
-
const createSchema = (database) => {
|
|
18
|
-
database.exec(`
|
|
19
|
-
CREATE TABLE IF NOT EXISTS webhook_requests (
|
|
20
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
21
|
-
hook_name TEXT NOT NULL,
|
|
22
|
-
method TEXT,
|
|
23
|
-
headers TEXT,
|
|
24
|
-
payload TEXT,
|
|
25
|
-
payload_truncated INTEGER DEFAULT 0,
|
|
26
|
-
payload_size INTEGER,
|
|
27
|
-
source_ip TEXT,
|
|
28
|
-
gateway_status INTEGER,
|
|
29
|
-
gateway_body TEXT,
|
|
30
|
-
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
|
31
|
-
);
|
|
32
|
-
`);
|
|
33
|
-
database.exec(`
|
|
34
|
-
CREATE INDEX IF NOT EXISTS idx_webhook_requests_hook_ts
|
|
35
|
-
ON webhook_requests(hook_name, created_at DESC);
|
|
36
|
-
`);
|
|
37
|
-
};
|
|
38
|
-
|
|
39
18
|
const initWebhooksDb = ({ rootDir, pruneDays = 30 }) => {
|
|
40
19
|
const dbDir = path.join(rootDir, "db");
|
|
41
20
|
fs.mkdirSync(dbDir, { recursive: true });
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
const createSchema = (database) => {
|
|
2
|
+
database.exec(`
|
|
3
|
+
CREATE TABLE IF NOT EXISTS webhook_requests (
|
|
4
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
5
|
+
hook_name TEXT NOT NULL,
|
|
6
|
+
method TEXT,
|
|
7
|
+
headers TEXT,
|
|
8
|
+
payload TEXT,
|
|
9
|
+
payload_truncated INTEGER DEFAULT 0,
|
|
10
|
+
payload_size INTEGER,
|
|
11
|
+
source_ip TEXT,
|
|
12
|
+
gateway_status INTEGER,
|
|
13
|
+
gateway_body TEXT,
|
|
14
|
+
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
|
15
|
+
);
|
|
16
|
+
`);
|
|
17
|
+
database.exec(`
|
|
18
|
+
CREATE INDEX IF NOT EXISTS idx_webhook_requests_hook_ts
|
|
19
|
+
ON webhook_requests(hook_name, created_at DESC);
|
|
20
|
+
`);
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
module.exports = {
|
|
24
|
+
createSchema,
|
|
25
|
+
};
|
|
@@ -156,6 +156,35 @@ const registerBrowseRoutes = ({ app, fs, kRootDir }) => {
|
|
|
156
156
|
}
|
|
157
157
|
});
|
|
158
158
|
|
|
159
|
+
app.get("/api/browse/download", (req, res) => {
|
|
160
|
+
const resolvedPath = resolveSafePath(
|
|
161
|
+
req.query.path,
|
|
162
|
+
kRootResolved,
|
|
163
|
+
kRootWithSep,
|
|
164
|
+
kRootDisplayName,
|
|
165
|
+
);
|
|
166
|
+
if (!resolvedPath.ok) {
|
|
167
|
+
return res.status(400).json({ ok: false, error: resolvedPath.error });
|
|
168
|
+
}
|
|
169
|
+
try {
|
|
170
|
+
const stats = fs.statSync(resolvedPath.absolutePath);
|
|
171
|
+
if (!stats.isFile()) {
|
|
172
|
+
return res.status(400).json({ ok: false, error: "Path is not a file" });
|
|
173
|
+
}
|
|
174
|
+
const fileName = path.basename(resolvedPath.relativePath || resolvedPath.absolutePath);
|
|
175
|
+
return res.download(resolvedPath.absolutePath, fileName, (error) => {
|
|
176
|
+
if (!error || res.headersSent) return;
|
|
177
|
+
return res
|
|
178
|
+
.status(500)
|
|
179
|
+
.json({ ok: false, error: error.message || "Could not download file" });
|
|
180
|
+
});
|
|
181
|
+
} catch (error) {
|
|
182
|
+
return res
|
|
183
|
+
.status(500)
|
|
184
|
+
.json({ ok: false, error: error.message || "Could not download file" });
|
|
185
|
+
}
|
|
186
|
+
});
|
|
187
|
+
|
|
159
188
|
app.get("/api/browse/git-summary", async (req, res) => {
|
|
160
189
|
try {
|
|
161
190
|
const envRepoSlug = parseGithubRepoSlug(
|
package/lib/server.js
CHANGED
|
@@ -30,19 +30,19 @@ const {
|
|
|
30
30
|
getRequestById,
|
|
31
31
|
getHookSummaries,
|
|
32
32
|
deleteRequestsByHook,
|
|
33
|
-
} = require("./server/webhooks
|
|
33
|
+
} = require("./server/db/webhooks");
|
|
34
34
|
const {
|
|
35
35
|
initWatchdogDb,
|
|
36
36
|
insertWatchdogEvent,
|
|
37
37
|
getRecentEvents,
|
|
38
|
-
} = require("./server/watchdog
|
|
38
|
+
} = require("./server/db/watchdog");
|
|
39
39
|
const {
|
|
40
40
|
initUsageDb,
|
|
41
41
|
getDailySummary,
|
|
42
42
|
getSessionsList,
|
|
43
43
|
getSessionDetail,
|
|
44
44
|
getSessionTimeSeries,
|
|
45
|
-
} = require("./server/usage
|
|
45
|
+
} = require("./server/db/usage");
|
|
46
46
|
const { createWebhookMiddleware } = require("./server/webhook-middleware");
|
|
47
47
|
const {
|
|
48
48
|
readEnvFile,
|