@chrysb/alphaclaw 0.4.3 → 0.4.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/lib/public/js/components/file-tree.js +37 -8
  2. package/lib/public/js/components/gateway.js +74 -42
  3. package/lib/public/js/components/icons.js +13 -0
  4. package/lib/public/js/components/usage-tab/overview-section.js +100 -26
  5. package/lib/public/js/lib/api.js +31 -0
  6. package/lib/server/constants.js +22 -26
  7. package/lib/server/db/usage/index.js +35 -0
  8. package/lib/server/db/usage/pricing.js +82 -0
  9. package/lib/server/db/usage/schema.js +87 -0
  10. package/lib/server/db/usage/sessions.js +217 -0
  11. package/lib/server/db/usage/shared.js +139 -0
  12. package/lib/server/db/usage/summary.js +280 -0
  13. package/lib/server/db/usage/timeseries.js +64 -0
  14. package/lib/server/{watchdog-db.js → db/watchdog/index.js} +1 -18
  15. package/lib/server/db/watchdog/schema.js +21 -0
  16. package/lib/server/{webhooks-db.js → db/webhooks/index.js} +1 -22
  17. package/lib/server/db/webhooks/schema.js +25 -0
  18. package/lib/server/gmail-push.js +102 -6
  19. package/lib/server/gmail-watch.js +5 -20
  20. package/lib/server/helpers.js +5 -21
  21. package/lib/server/routes/browse/index.js +29 -0
  22. package/lib/server/routes/google.js +2 -10
  23. package/lib/server/routes/telegram.js +3 -14
  24. package/lib/server/routes/usage.js +1 -5
  25. package/lib/server/routes/webhooks.js +2 -6
  26. package/lib/server/utils/boolean.js +22 -0
  27. package/lib/server/utils/json.js +31 -0
  28. package/lib/server/utils/network.js +5 -0
  29. package/lib/server/utils/number.js +8 -0
  30. package/lib/server/utils/shell.js +16 -0
  31. package/lib/server/webhook-middleware.js +1 -2
  32. package/lib/server.js +3 -3
  33. package/package.json +1 -1
  34. package/lib/server/usage-db.js +0 -838
@@ -0,0 +1,280 @@
1
+ const {
2
+ kDefaultDays,
3
+ kDayMs,
4
+ kUtcTimeZone,
5
+ coerceInt,
6
+ toDayKey,
7
+ toTimeZoneDayKey,
8
+ getPeriodRange,
9
+ getUsageMetricsFromEventRow,
10
+ parseAgentAndSourceFromSessionRef,
11
+ } = require("./shared");
12
+
13
+ const getAgentCostDistribution = ({
14
+ eventsRows = [],
15
+ startDay = "",
16
+ timeZone = kUtcTimeZone,
17
+ }) => {
18
+ const byAgent = new Map();
19
+ const ensureAgentBucket = (agent) => {
20
+ if (byAgent.has(agent)) return byAgent.get(agent);
21
+ const bucket = {
22
+ agent,
23
+ inputTokens: 0,
24
+ outputTokens: 0,
25
+ cacheReadTokens: 0,
26
+ cacheWriteTokens: 0,
27
+ totalTokens: 0,
28
+ totalCost: 0,
29
+ turnCount: 0,
30
+ sourceBreakdown: {
31
+ chat: {
32
+ source: "chat",
33
+ inputTokens: 0,
34
+ outputTokens: 0,
35
+ cacheReadTokens: 0,
36
+ cacheWriteTokens: 0,
37
+ totalTokens: 0,
38
+ totalCost: 0,
39
+ turnCount: 0,
40
+ },
41
+ hooks: {
42
+ source: "hooks",
43
+ inputTokens: 0,
44
+ outputTokens: 0,
45
+ cacheReadTokens: 0,
46
+ cacheWriteTokens: 0,
47
+ totalTokens: 0,
48
+ totalCost: 0,
49
+ turnCount: 0,
50
+ },
51
+ cron: {
52
+ source: "cron",
53
+ inputTokens: 0,
54
+ outputTokens: 0,
55
+ cacheReadTokens: 0,
56
+ cacheWriteTokens: 0,
57
+ totalTokens: 0,
58
+ totalCost: 0,
59
+ turnCount: 0,
60
+ },
61
+ },
62
+ };
63
+ byAgent.set(agent, bucket);
64
+ return bucket;
65
+ };
66
+
67
+ for (const eventRow of eventsRows) {
68
+ const timestamp = coerceInt(eventRow.timestamp);
69
+ const dayKey = timeZone === kUtcTimeZone
70
+ ? toDayKey(timestamp)
71
+ : toTimeZoneDayKey(timestamp, timeZone);
72
+ if (dayKey < startDay) continue;
73
+
74
+ const metrics = getUsageMetricsFromEventRow(eventRow);
75
+ const sessionRef = String(eventRow.session_key || eventRow.session_id || "");
76
+ const { agent, source } = parseAgentAndSourceFromSessionRef(sessionRef);
77
+ const agentBucket = ensureAgentBucket(agent);
78
+ const sourceBucket = agentBucket.sourceBreakdown[source];
79
+
80
+ agentBucket.inputTokens += metrics.inputTokens;
81
+ agentBucket.outputTokens += metrics.outputTokens;
82
+ agentBucket.cacheReadTokens += metrics.cacheReadTokens;
83
+ agentBucket.cacheWriteTokens += metrics.cacheWriteTokens;
84
+ agentBucket.totalTokens += metrics.totalTokens;
85
+ agentBucket.totalCost += metrics.totalCost;
86
+ agentBucket.turnCount += 1;
87
+
88
+ sourceBucket.inputTokens += metrics.inputTokens;
89
+ sourceBucket.outputTokens += metrics.outputTokens;
90
+ sourceBucket.cacheReadTokens += metrics.cacheReadTokens;
91
+ sourceBucket.cacheWriteTokens += metrics.cacheWriteTokens;
92
+ sourceBucket.totalTokens += metrics.totalTokens;
93
+ sourceBucket.totalCost += metrics.totalCost;
94
+ sourceBucket.turnCount += 1;
95
+ }
96
+
97
+ const agents = Array.from(byAgent.values())
98
+ .map((bucket) => ({
99
+ agent: bucket.agent,
100
+ inputTokens: bucket.inputTokens,
101
+ outputTokens: bucket.outputTokens,
102
+ cacheReadTokens: bucket.cacheReadTokens,
103
+ cacheWriteTokens: bucket.cacheWriteTokens,
104
+ totalTokens: bucket.totalTokens,
105
+ totalCost: bucket.totalCost,
106
+ turnCount: bucket.turnCount,
107
+ sourceBreakdown: ["chat", "hooks", "cron"].map(
108
+ (source) => bucket.sourceBreakdown[source],
109
+ ),
110
+ }))
111
+ .sort((a, b) => b.totalCost - a.totalCost);
112
+
113
+ return {
114
+ agents,
115
+ totals: agents.reduce(
116
+ (acc, agentBucket) => {
117
+ acc.totalCost += Number(agentBucket.totalCost || 0);
118
+ acc.totalTokens += Number(agentBucket.totalTokens || 0);
119
+ acc.turnCount += Number(agentBucket.turnCount || 0);
120
+ return acc;
121
+ },
122
+ { totalCost: 0, totalTokens: 0, turnCount: 0 },
123
+ ),
124
+ };
125
+ };
126
+
127
+ const getDailySummary = ({ database, days = kDefaultDays, timeZone = kUtcTimeZone } = {}) => {
128
+ const { now, safeDays, startDay, timeZone: normalizedTimeZone } = getPeriodRange(
129
+ days,
130
+ timeZone,
131
+ );
132
+ const lookbackMs = now - (safeDays + 2) * kDayMs;
133
+ const eventsRows = database
134
+ .prepare(`
135
+ SELECT
136
+ timestamp,
137
+ session_id,
138
+ session_key,
139
+ provider,
140
+ model,
141
+ input_tokens,
142
+ output_tokens,
143
+ cache_read_tokens,
144
+ cache_write_tokens,
145
+ total_tokens
146
+ FROM usage_events
147
+ WHERE timestamp >= $lookbackMs
148
+ ORDER BY timestamp ASC
149
+ `)
150
+ .all({ $lookbackMs: lookbackMs });
151
+ const byDateModel = new Map();
152
+ for (const eventRow of eventsRows) {
153
+ const timestamp = coerceInt(eventRow.timestamp);
154
+ const dayKey = normalizedTimeZone === kUtcTimeZone
155
+ ? toDayKey(timestamp)
156
+ : toTimeZoneDayKey(timestamp, normalizedTimeZone);
157
+ if (dayKey < startDay) continue;
158
+ const model = String(eventRow.model || "unknown");
159
+ const mapKey = `${dayKey}\u0000${model}`;
160
+ if (!byDateModel.has(mapKey)) {
161
+ byDateModel.set(mapKey, {
162
+ date: dayKey,
163
+ model,
164
+ provider: String(eventRow.provider || "unknown"),
165
+ inputTokens: 0,
166
+ outputTokens: 0,
167
+ cacheReadTokens: 0,
168
+ cacheWriteTokens: 0,
169
+ totalTokens: 0,
170
+ turnCount: 0,
171
+ totalCost: 0,
172
+ inputCost: 0,
173
+ outputCost: 0,
174
+ cacheReadCost: 0,
175
+ cacheWriteCost: 0,
176
+ pricingFound: false,
177
+ });
178
+ }
179
+ const aggregate = byDateModel.get(mapKey);
180
+ const metrics = getUsageMetricsFromEventRow(eventRow);
181
+ aggregate.inputTokens += metrics.inputTokens;
182
+ aggregate.outputTokens += metrics.outputTokens;
183
+ aggregate.cacheReadTokens += metrics.cacheReadTokens;
184
+ aggregate.cacheWriteTokens += metrics.cacheWriteTokens;
185
+ aggregate.totalTokens += metrics.totalTokens;
186
+ aggregate.turnCount += 1;
187
+ aggregate.totalCost += metrics.totalCost;
188
+ aggregate.inputCost += metrics.inputCost;
189
+ aggregate.outputCost += metrics.outputCost;
190
+ aggregate.cacheReadCost += metrics.cacheReadCost;
191
+ aggregate.cacheWriteCost += metrics.cacheWriteCost;
192
+ aggregate.pricingFound = aggregate.pricingFound || metrics.pricingFound;
193
+ if (!aggregate.provider && eventRow.provider) {
194
+ aggregate.provider = String(eventRow.provider || "unknown");
195
+ }
196
+ }
197
+ const enriched = Array.from(byDateModel.values()).sort((a, b) => {
198
+ if (a.date === b.date) return b.totalTokens - a.totalTokens;
199
+ return a.date.localeCompare(b.date);
200
+ });
201
+ const costByAgent = getAgentCostDistribution({
202
+ eventsRows,
203
+ startDay,
204
+ timeZone: normalizedTimeZone,
205
+ });
206
+ const byDate = new Map();
207
+ for (const row of enriched) {
208
+ if (!byDate.has(row.date)) byDate.set(row.date, []);
209
+ byDate.get(row.date).push({
210
+ model: row.model,
211
+ provider: row.provider,
212
+ inputTokens: row.inputTokens,
213
+ outputTokens: row.outputTokens,
214
+ cacheReadTokens: row.cacheReadTokens,
215
+ cacheWriteTokens: row.cacheWriteTokens,
216
+ totalTokens: row.totalTokens,
217
+ turnCount: row.turnCount,
218
+ totalCost: row.totalCost,
219
+ inputCost: row.inputCost,
220
+ outputCost: row.outputCost,
221
+ cacheReadCost: row.cacheReadCost,
222
+ cacheWriteCost: row.cacheWriteCost,
223
+ pricingFound: row.pricingFound,
224
+ });
225
+ }
226
+ const daily = [];
227
+ const totals = {
228
+ inputTokens: 0,
229
+ outputTokens: 0,
230
+ cacheReadTokens: 0,
231
+ cacheWriteTokens: 0,
232
+ totalTokens: 0,
233
+ totalCost: 0,
234
+ turnCount: 0,
235
+ modelCount: 0,
236
+ };
237
+ for (const [date, modelRows] of byDate.entries()) {
238
+ const aggregate = modelRows.reduce(
239
+ (acc, row) => ({
240
+ inputTokens: acc.inputTokens + row.inputTokens,
241
+ outputTokens: acc.outputTokens + row.outputTokens,
242
+ cacheReadTokens: acc.cacheReadTokens + row.cacheReadTokens,
243
+ cacheWriteTokens: acc.cacheWriteTokens + row.cacheWriteTokens,
244
+ totalTokens: acc.totalTokens + row.totalTokens,
245
+ totalCost: acc.totalCost + row.totalCost,
246
+ turnCount: acc.turnCount + row.turnCount,
247
+ }),
248
+ {
249
+ inputTokens: 0,
250
+ outputTokens: 0,
251
+ cacheReadTokens: 0,
252
+ cacheWriteTokens: 0,
253
+ totalTokens: 0,
254
+ totalCost: 0,
255
+ turnCount: 0,
256
+ },
257
+ );
258
+ daily.push({ date, ...aggregate, models: modelRows });
259
+ totals.inputTokens += aggregate.inputTokens;
260
+ totals.outputTokens += aggregate.outputTokens;
261
+ totals.cacheReadTokens += aggregate.cacheReadTokens;
262
+ totals.cacheWriteTokens += aggregate.cacheWriteTokens;
263
+ totals.totalTokens += aggregate.totalTokens;
264
+ totals.totalCost += aggregate.totalCost;
265
+ totals.turnCount += aggregate.turnCount;
266
+ totals.modelCount += modelRows.length;
267
+ }
268
+ return {
269
+ updatedAt: Date.now(),
270
+ days: safeDays,
271
+ timeZone: normalizedTimeZone,
272
+ daily,
273
+ totals,
274
+ costByAgent,
275
+ };
276
+ };
277
+
278
+ module.exports = {
279
+ getDailySummary,
280
+ };
@@ -0,0 +1,64 @@
1
+ const {
2
+ kDefaultMaxPoints,
3
+ kMaxMaxPoints,
4
+ coerceInt,
5
+ clampInt,
6
+ getUsageMetricsFromEventRow,
7
+ downsamplePoints,
8
+ } = require("./shared");
9
+
10
+ const getSessionTimeSeries = ({
11
+ database,
12
+ sessionId,
13
+ maxPoints = kDefaultMaxPoints,
14
+ }) => {
15
+ const safeSessionRef = String(sessionId || "").trim();
16
+ if (!safeSessionRef) return { sessionId: safeSessionRef, points: [] };
17
+ const rows = database
18
+ .prepare(`
19
+ SELECT
20
+ timestamp,
21
+ session_key,
22
+ session_id,
23
+ model,
24
+ input_tokens,
25
+ output_tokens,
26
+ cache_read_tokens,
27
+ cache_write_tokens,
28
+ total_tokens
29
+ FROM usage_events
30
+ WHERE COALESCE(NULLIF(session_key, ''), NULLIF(session_id, '')) = $sessionRef
31
+ ORDER BY timestamp ASC
32
+ `)
33
+ .all({ $sessionRef: safeSessionRef });
34
+ let cumulativeTokens = 0;
35
+ let cumulativeCost = 0;
36
+ const points = rows.map((row) => {
37
+ const metrics = getUsageMetricsFromEventRow(row);
38
+ cumulativeTokens += metrics.totalTokens;
39
+ cumulativeCost += metrics.totalCost;
40
+ return {
41
+ timestamp: coerceInt(row.timestamp),
42
+ sessionKey: String(row.session_key || ""),
43
+ rawSessionId: String(row.session_id || ""),
44
+ model: String(row.model || ""),
45
+ inputTokens: metrics.inputTokens,
46
+ outputTokens: metrics.outputTokens,
47
+ cacheReadTokens: metrics.cacheReadTokens,
48
+ cacheWriteTokens: metrics.cacheWriteTokens,
49
+ totalTokens: metrics.totalTokens,
50
+ cost: metrics.totalCost,
51
+ cumulativeTokens,
52
+ cumulativeCost,
53
+ };
54
+ });
55
+ const safeMaxPoints = clampInt(maxPoints, 10, kMaxMaxPoints, kDefaultMaxPoints);
56
+ return {
57
+ sessionId: safeSessionRef,
58
+ points: downsamplePoints(points, safeMaxPoints),
59
+ };
60
+ };
61
+
62
+ module.exports = {
63
+ getSessionTimeSeries,
64
+ };
@@ -1,6 +1,7 @@
1
1
  const fs = require("fs");
2
2
  const path = require("path");
3
3
  const { DatabaseSync } = require("node:sqlite");
4
+ const { createSchema } = require("./schema");
4
5
 
5
6
  let db = null;
6
7
  let pruneTimer = null;
@@ -14,24 +15,6 @@ const ensureDb = () => {
14
15
  return db;
15
16
  };
16
17
 
17
- const createSchema = (database) => {
18
- database.exec(`
19
- CREATE TABLE IF NOT EXISTS watchdog_events (
20
- id INTEGER PRIMARY KEY AUTOINCREMENT,
21
- event_type TEXT NOT NULL,
22
- source TEXT NOT NULL,
23
- status TEXT NOT NULL,
24
- details TEXT,
25
- correlation_id TEXT,
26
- created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
27
- );
28
- `);
29
- database.exec(`
30
- CREATE INDEX IF NOT EXISTS idx_watchdog_events_ts
31
- ON watchdog_events(created_at DESC);
32
- `);
33
- };
34
-
35
18
  const initWatchdogDb = ({ rootDir, pruneDays = 30 }) => {
36
19
  const dbDir = path.join(rootDir, "db");
37
20
  fs.mkdirSync(dbDir, { recursive: true });
@@ -0,0 +1,21 @@
1
+ const createSchema = (database) => {
2
+ database.exec(`
3
+ CREATE TABLE IF NOT EXISTS watchdog_events (
4
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
5
+ event_type TEXT NOT NULL,
6
+ source TEXT NOT NULL,
7
+ status TEXT NOT NULL,
8
+ details TEXT,
9
+ correlation_id TEXT,
10
+ created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
11
+ );
12
+ `);
13
+ database.exec(`
14
+ CREATE INDEX IF NOT EXISTS idx_watchdog_events_ts
15
+ ON watchdog_events(created_at DESC);
16
+ `);
17
+ };
18
+
19
+ module.exports = {
20
+ createSchema,
21
+ };
@@ -1,6 +1,7 @@
1
1
  const fs = require("fs");
2
2
  const path = require("path");
3
3
  const { DatabaseSync } = require("node:sqlite");
4
+ const { createSchema } = require("./schema");
4
5
 
5
6
  let db = null;
6
7
  let pruneTimer = null;
@@ -14,28 +15,6 @@ const ensureDb = () => {
14
15
  return db;
15
16
  };
16
17
 
17
- const createSchema = (database) => {
18
- database.exec(`
19
- CREATE TABLE IF NOT EXISTS webhook_requests (
20
- id INTEGER PRIMARY KEY AUTOINCREMENT,
21
- hook_name TEXT NOT NULL,
22
- method TEXT,
23
- headers TEXT,
24
- payload TEXT,
25
- payload_truncated INTEGER DEFAULT 0,
26
- payload_size INTEGER,
27
- source_ip TEXT,
28
- gateway_status INTEGER,
29
- gateway_body TEXT,
30
- created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
31
- );
32
- `);
33
- database.exec(`
34
- CREATE INDEX IF NOT EXISTS idx_webhook_requests_hook_ts
35
- ON webhook_requests(hook_name, created_at DESC);
36
- `);
37
- };
38
-
39
18
  const initWebhooksDb = ({ rootDir, pruneDays = 30 }) => {
40
19
  const dbDir = path.join(rootDir, "db");
41
20
  fs.mkdirSync(dbDir, { recursive: true });
@@ -0,0 +1,25 @@
1
+ const createSchema = (database) => {
2
+ database.exec(`
3
+ CREATE TABLE IF NOT EXISTS webhook_requests (
4
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
5
+ hook_name TEXT NOT NULL,
6
+ method TEXT,
7
+ headers TEXT,
8
+ payload TEXT,
9
+ payload_truncated INTEGER DEFAULT 0,
10
+ payload_size INTEGER,
11
+ source_ip TEXT,
12
+ gateway_status INTEGER,
13
+ gateway_body TEXT,
14
+ created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
15
+ );
16
+ `);
17
+ database.exec(`
18
+ CREATE INDEX IF NOT EXISTS idx_webhook_requests_hook_ts
19
+ ON webhook_requests(hook_name, created_at DESC);
20
+ `);
21
+ };
22
+
23
+ module.exports = {
24
+ createSchema,
25
+ };
@@ -1,4 +1,14 @@
1
1
  const http = require("http");
2
+ const { parsePositiveInt } = require("./utils/number");
3
+
4
+ const kGmailPushDedupeWindowMs = parsePositiveInt(
5
+ process.env.GMAIL_PUSH_DEDUPE_WINDOW_MS,
6
+ 24 * 60 * 60 * 1000,
7
+ );
8
+ const kGmailPushDedupeMaxEntries = parsePositiveInt(
9
+ process.env.GMAIL_PUSH_DEDUPE_MAX_ENTRIES,
10
+ 50000,
11
+ );
2
12
 
3
13
  const extractBodyBuffer = (body) => {
4
14
  if (Buffer.isBuffer(body)) return body;
@@ -21,6 +31,68 @@ const parsePushEnvelope = (bodyBuffer) => {
21
31
  };
22
32
  };
23
33
 
34
+ const createPushEventDedupeKey = ({ envelope, payload }) => {
35
+ const messageId = String(
36
+ envelope?.message?.messageId || envelope?.message?.message_id || "",
37
+ ).trim();
38
+ if (messageId) return `msg:${messageId}`;
39
+ const email = String(payload?.emailAddress || "")
40
+ .trim()
41
+ .toLowerCase();
42
+ const historyId = String(payload?.historyId || "").trim();
43
+ if (email && historyId) return `hist:${email}:${historyId}`;
44
+ if (historyId) return `hist:${historyId}`;
45
+ return "";
46
+ };
47
+
48
+ const createGmailPushEventDeduper = ({
49
+ ttlMs = kGmailPushDedupeWindowMs,
50
+ maxEntries = kGmailPushDedupeMaxEntries,
51
+ } = {}) => {
52
+ const seenEvents = new Map();
53
+
54
+ const pruneExpiredEntries = (receivedAt) => {
55
+ const cutoff = receivedAt - ttlMs;
56
+ for (const [eventKey, seenAt] of seenEvents.entries()) {
57
+ if (seenAt > cutoff) break;
58
+ seenEvents.delete(eventKey);
59
+ }
60
+ while (seenEvents.size > maxEntries) {
61
+ const oldestKey = seenEvents.keys().next().value;
62
+ if (!oldestKey) break;
63
+ seenEvents.delete(oldestKey);
64
+ }
65
+ };
66
+
67
+ const shouldProcessPushEvent = ({ envelope, payload, receivedAt = Date.now() }) => {
68
+ const timestamp = Number.isFinite(receivedAt) ? receivedAt : Date.now();
69
+ pruneExpiredEntries(timestamp);
70
+ const eventKey = createPushEventDedupeKey({ envelope, payload });
71
+ if (!eventKey) return true;
72
+ return !seenEvents.has(eventKey);
73
+ };
74
+
75
+ shouldProcessPushEvent.markProcessed = ({
76
+ envelope,
77
+ payload,
78
+ receivedAt = Date.now(),
79
+ }) => {
80
+ const timestamp = Number.isFinite(receivedAt) ? receivedAt : Date.now();
81
+ pruneExpiredEntries(timestamp);
82
+ const eventKey = createPushEventDedupeKey({ envelope, payload });
83
+ if (!eventKey) return true;
84
+ seenEvents.set(eventKey, timestamp);
85
+ return true;
86
+ };
87
+
88
+ return shouldProcessPushEvent;
89
+ };
90
+
91
+ const isSuccessfulProxyStatus = (statusCode) => {
92
+ const numericStatus = Number.parseInt(String(statusCode || 0), 10);
93
+ return numericStatus >= 200 && numericStatus < 300;
94
+ };
95
+
24
96
  const proxyPushToServe = async ({
25
97
  port,
26
98
  bodyBuffer,
@@ -58,6 +130,8 @@ const createGmailPushHandler = ({
58
130
  resolvePushToken,
59
131
  resolveTargetByEmail,
60
132
  markPushReceived,
133
+ shouldProcessPushEvent = createGmailPushEventDeduper(),
134
+ proxyPushToServeImpl = proxyPushToServe,
61
135
  }) =>
62
136
  async (req, res) => {
63
137
  try {
@@ -68,11 +142,24 @@ const createGmailPushHandler = ({
68
142
  }
69
143
 
70
144
  const bodyBuffer = extractBodyBuffer(req.body);
71
- const { payload } = parsePushEnvelope(bodyBuffer);
145
+ const { envelope, payload } = parsePushEnvelope(bodyBuffer);
72
146
  const email = String(payload?.emailAddress || "").trim().toLowerCase();
73
147
  if (!email) {
74
148
  return res.status(200).json({ ok: true, ignored: true, reason: "missing_email" });
75
149
  }
150
+ if (
151
+ !shouldProcessPushEvent({
152
+ envelope,
153
+ payload,
154
+ receivedAt: Date.now(),
155
+ })
156
+ ) {
157
+ return res.status(200).json({
158
+ ok: true,
159
+ ignored: true,
160
+ reason: "duplicate_event",
161
+ });
162
+ }
76
163
 
77
164
  const target = resolveTargetByEmail?.(email);
78
165
  if (!target?.port) {
@@ -80,15 +167,22 @@ const createGmailPushHandler = ({
80
167
  }
81
168
 
82
169
  try {
83
- const proxied = await proxyPushToServe({
170
+ const proxied = await proxyPushToServeImpl({
84
171
  port: target.port,
85
172
  bodyBuffer,
86
173
  headers: req.headers || {},
87
174
  });
88
- await markPushReceived?.({
89
- accountId: target.accountId,
90
- at: Date.now(),
91
- });
175
+ if (isSuccessfulProxyStatus(proxied.statusCode)) {
176
+ shouldProcessPushEvent.markProcessed?.({
177
+ envelope,
178
+ payload,
179
+ receivedAt: Date.now(),
180
+ });
181
+ await markPushReceived?.({
182
+ accountId: target.accountId,
183
+ at: Date.now(),
184
+ });
185
+ }
92
186
  return res
93
187
  .status(proxied.statusCode)
94
188
  .send(proxied.body || "");
@@ -106,4 +200,6 @@ const createGmailPushHandler = ({
106
200
 
107
201
  module.exports = {
108
202
  createGmailPushHandler,
203
+ createGmailPushEventDeduper,
204
+ createPushEventDedupeKey,
109
205
  };
@@ -14,29 +14,14 @@ const {
14
14
  allocateServePort,
15
15
  } = require("./google-state");
16
16
  const { createGmailServeManager } = require("./gmail-serve");
17
+ const { parseJsonObjectFromNoisyOutput, parseJsonSafe } = require("./utils/json");
17
18
  const { createWebhook } = require("./webhooks");
18
-
19
- const quoteShellArg = (value) =>
20
- `"${String(value || "").replace(/(["\\$`])/g, "\\$1")}"`;
21
-
22
- const parseJsonMaybe = (raw) => {
23
- const text = String(raw || "").trim();
24
- if (!text) return null;
25
- try {
26
- return JSON.parse(text);
27
- } catch {}
28
- const firstBrace = text.indexOf("{");
29
- const lastBrace = text.lastIndexOf("}");
30
- if (firstBrace >= 0 && lastBrace > firstBrace) {
31
- try {
32
- return JSON.parse(text.slice(firstBrace, lastBrace + 1));
33
- } catch {}
34
- }
35
- return null;
36
- };
19
+ const { quoteShellArg } = require("./utils/shell");
37
20
 
38
21
  const parseExpirationFromOutput = (raw) => {
39
- const parsed = parseJsonMaybe(raw);
22
+ const parsed =
23
+ parseJsonSafe(raw, null, { trim: true }) ||
24
+ parseJsonObjectFromNoisyOutput(raw);
40
25
  if (parsed?.expiration) {
41
26
  const numeric = Number.parseInt(String(parsed.expiration), 10);
42
27
  if (Number.isFinite(numeric) && numeric > 0) return numeric;
@@ -5,6 +5,9 @@ const {
5
5
  kOnboardingModelProviders,
6
6
  gogClientCredentialsPath,
7
7
  } = require("./constants");
8
+ const { isTruthyFlag } = require("./utils/boolean");
9
+ const { parseJsonObjectFromNoisyOutput } = require("./utils/json");
10
+ const { normalizeIp } = require("./utils/network");
8
11
 
9
12
  const normalizeOpenclawVersion = (rawVersion) => {
10
13
  if (!rawVersion) return null;
@@ -32,19 +35,7 @@ const compareVersionParts = (a, b) => {
32
35
  return 0;
33
36
  };
34
37
 
35
- const parseJsonFromNoisyOutput = (raw) => {
36
- const text = String(raw || "");
37
- const firstBrace = text.indexOf("{");
38
- const lastBrace = text.lastIndexOf("}");
39
- if (firstBrace === -1 || lastBrace === -1 || lastBrace <= firstBrace) {
40
- return null;
41
- }
42
- try {
43
- return JSON.parse(text.slice(firstBrace, lastBrace + 1));
44
- } catch {
45
- return null;
46
- }
47
- };
38
+ const parseJsonFromNoisyOutput = (raw) => parseJsonObjectFromNoisyOutput(raw);
48
39
 
49
40
  const parseJwtPayload = (token) => {
50
41
  try {
@@ -63,14 +54,7 @@ const getCodexAccountId = (accessToken) => {
63
54
  return typeof accountId === "string" && accountId ? accountId : null;
64
55
  };
65
56
 
66
- const normalizeIp = (ip) => String(ip || "").replace(/^::ffff:/, "");
67
-
68
- const isTruthyEnvFlag = (value) =>
69
- ["1", "true", "yes", "on"].includes(
70
- String(value || "")
71
- .trim()
72
- .toLowerCase(),
73
- );
57
+ const isTruthyEnvFlag = (value) => isTruthyFlag(value);
74
58
  const isDebugEnabled = () =>
75
59
  isTruthyEnvFlag(process.env.ALPHACLAW_DEBUG) ||
76
60
  isTruthyEnvFlag(process.env.DEBUG);