@naisys/hub 3.0.0-beta.38 → 3.0.0-beta.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -46,7 +46,9 @@ The hub owns mail, context logs and [attachments](../../docs/011-mail-attachment
46
46
 
47
47
  - `Authorization: Bearer` header auth with a rotatable access key
48
48
  - Hardened spawning: no shell interpretation, timeouts on `execFileSync`
49
- - API keys read from headers, not query params
49
+ - API keys stored as SHA-256 hashes; per-user keys read from headers, not query params
50
+ - Dynamic runtime API keys minted per agent and re-issued on hub restart
51
+ - Redaction service scrubs sensitive variables and runtime keys from logs and mail before they hit the DB
50
52
  - Hub socket served at `/hub` for reverse-proxy friendliness (TLS terminated at the proxy)
51
53
 
52
54
  ### Deployment
@@ -1,29 +1,7 @@
1
- import { hashToken } from "@naisys/common-node";
2
1
  import { AgentPeekRequestSchema, AgentRunCommandRequestSchema, AgentRunPauseRequestSchema, AgentStartInboundSchema, AgentStopRequestSchema, HubEvents, } from "@naisys/hub-protocol";
3
- import { randomBytes } from "crypto";
4
2
  /** Handles agent_start requests by routing them to the least-loaded eligible host */
5
- export function createHubAgentService(naisysServer, { hubDb }, logService, heartbeatService, sendMailService, hostRegistrar) {
6
- /**
7
- * Mint a fresh runtime API key for a user, rotating any prior key. Plaintext
8
- * is returned only here and travels once over the AGENT_START message; the
9
- * DB only stores the hash. Keys persist across hub restarts/crashes/updates;
10
- * revocation lives on the user disable/archive/delete paths and on graceful
11
- * AGENT_STOP.
12
- */
13
- async function issueRuntimeApiKey(userId) {
14
- const token = randomBytes(32).toString("hex");
15
- await hubDb.users.update({
16
- where: { id: userId },
17
- data: { api_key_hash: hashToken(token) },
18
- });
19
- return token;
20
- }
21
- async function revokeRuntimeApiKey(userId) {
22
- await hubDb.users.update({
23
- where: { id: userId },
24
- data: { api_key_hash: null },
25
- });
26
- }
3
+ export function createHubAgentService(naisysServer, { hubDb }, logService, heartbeatService, sendMailService, hostRegistrar, runtimeKeyService) {
4
+ const { issueRuntimeApiKey, revokeRuntimeApiKey } = runtimeKeyService;
27
5
  /** Find the least-loaded eligible host for a given user */
28
6
  async function findBestHost(startUserId) {
29
7
  // Look up which hosts this user is assigned to
@@ -93,22 +71,102 @@ export function createHubAgentService(naisysServer, { hubDb }, logService, heart
93
71
  return { kind: "go", bestHostId };
94
72
  }
95
73
  /**
96
- * Issue a runtime key and send AGENT_START. Stranded keys from a failed
97
- * send or failed response stay in the DB until the next AGENT_START for
98
- * that user rotates them, or the user is disabled/archived/deleted — the
99
- * agent process that would have used the key never started, so there's
100
- * nobody to authenticate with it in the meantime.
74
+ * Mint and ship a key with AGENT_START so the agent's authenticated from
75
+ * spawn time, avoiding the one-RTT window heartbeat-only would create.
76
+ * Heartbeat-driven reissue covers later hash mismatches.
77
+ *
78
+ * The run_session row is written up front with model_name="" so the
79
+ * agent's command loop (which starts before the host acks) can FK-safely
80
+ * write logs/costs. model_name is patched in from the ack; the session
81
+ * is pushed to supervisors and runId/sessionId returned to the caller
82
+ * only on success. Any failure rolls back the placeholder.
101
83
  */
102
84
  async function dispatchAgentStart(args) {
103
85
  const { bestHostId, payload, onResponse } = args;
104
86
  const startUserId = payload.startUserId;
105
87
  const runtimeApiKey = await issueRuntimeApiKey(startUserId);
106
- const sent = naisysServer.sendMessage(bestHostId, HubEvents.AGENT_START, { ...payload, runtimeApiKey }, (response) => {
107
- if (response.success) {
108
- heartbeatService.addStartedAgent(bestHostId, startUserId);
88
+ const lastRun = await hubDb.run_session.findFirst({
89
+ select: { run_id: true },
90
+ orderBy: { run_id: "desc" },
91
+ });
92
+ const runId = lastRun ? lastRun.run_id + 1 : 1;
93
+ const sessionId = 1;
94
+ const subagentId = 0;
95
+ const now = new Date().toISOString();
96
+ const rowWhere = {
97
+ user_id: startUserId,
98
+ run_id: runId,
99
+ subagent_id: subagentId,
100
+ session_id: sessionId,
101
+ };
102
+ await hubDb.run_session.create({
103
+ data: {
104
+ ...rowWhere,
105
+ host_id: bestHostId,
106
+ model_name: "",
107
+ created_at: now,
108
+ last_active: now,
109
+ },
110
+ });
111
+ async function rollbackPlaceholder(reason) {
112
+ try {
113
+ await hubDb.run_session.deleteMany({ where: rowWhere });
114
+ }
115
+ catch (err) {
116
+ logService.error(`[Hub:Agents] Failed to roll back run_session row for run ${runId} (${reason}): ${err}`);
117
+ }
118
+ }
119
+ const sent = naisysServer.sendMessage(bestHostId, HubEvents.AGENT_START, { ...payload, runtimeApiKey, runId, sessionId }, async (response) => {
120
+ if (response.success && response.modelName) {
121
+ const modelName = response.modelName;
122
+ try {
123
+ await hubDb.run_session.updateMany({
124
+ where: rowWhere,
125
+ data: { model_name: modelName },
126
+ });
127
+ heartbeatService.addStartedAgent(bestHostId, startUserId);
128
+ naisysServer.broadcastToSupervisors(HubEvents.SESSION_PUSH, {
129
+ session: {
130
+ userId: startUserId,
131
+ runId,
132
+ sessionId,
133
+ modelName,
134
+ createdAt: now,
135
+ lastActive: now,
136
+ latestLogId: 0,
137
+ totalLines: 0,
138
+ totalCost: 0,
139
+ },
140
+ });
141
+ onResponse({ ...response, runId, sessionId });
142
+ }
143
+ catch (err) {
144
+ logService.error(`[Hub:Agents] Failed to finalize run_session row for run ${runId}: ${err}`);
145
+ await rollbackPlaceholder("update failed");
146
+ onResponse({
147
+ success: false,
148
+ error: `Failed to finalize run_session row: ${err}`,
149
+ hostname: response.hostname,
150
+ });
151
+ }
152
+ }
153
+ else {
154
+ if (response.success && !response.modelName) {
155
+ logService.error(`[Hub:Agents] Host ${bestHostId} acked agent_start without modelName for user ${startUserId}; treating as failure`);
156
+ }
157
+ await rollbackPlaceholder(response.success ? "missing modelName" : "host failure");
158
+ onResponse(response.success
159
+ ? {
160
+ success: false,
161
+ error: "Host did not return modelName",
162
+ hostname: response.hostname,
163
+ }
164
+ : response);
109
165
  }
110
- onResponse(response);
111
166
  });
167
+ if (!sent) {
168
+ await rollbackPlaceholder("send failed");
169
+ }
112
170
  return { sent };
113
171
  }
114
172
  /** Try to start an agent on the best available host (fire-and-forget) */
@@ -256,6 +314,7 @@ export function createHubAgentService(naisysServer, { hubDb }, logService, heart
256
314
  const sent = naisysServer.sendMessage(targetHostId, event, {
257
315
  userId: parsed.userId,
258
316
  runId: parsed.runId,
317
+ subagentId: parsed.subagentId,
259
318
  sessionId: parsed.sessionId,
260
319
  sourceHostId: hostId,
261
320
  }, (response) => {
@@ -300,6 +359,7 @@ export function createHubAgentService(naisysServer, { hubDb }, logService, heart
300
359
  const sent = naisysServer.sendMessage(targetHostId, HubEvents.AGENT_RUN_COMMAND, {
301
360
  userId: parsed.userId,
302
361
  runId: parsed.runId,
362
+ subagentId: parsed.subagentId,
303
363
  sessionId: parsed.sessionId,
304
364
  command: parsed.command,
305
365
  sourceHostId: hostId,
@@ -1,4 +1,4 @@
1
- import { buildClientConfig, builtInImageModels, builtInLlmModels, } from "@naisys/common";
1
+ import { buildClientConfig, builtInImageModels, builtInLlmModels, OPENAI_CODEX_ACCESS_TOKEN_VAR, OPENAI_CODEX_REFRESH_TOKEN_VAR, } from "@naisys/common";
2
2
  import { HubEvents } from "@naisys/hub-protocol";
3
3
  import dotenv from "dotenv";
4
4
  /** Pushes the global config to NAISYS instances when they connect or when variables change */
@@ -7,10 +7,13 @@ export async function createHubConfigService(naisysServer, { hubDb }, logService
7
7
  success: false,
8
8
  error: "Not yet loaded",
9
9
  };
10
- // API key variable names referenced by built-in models — always sensitive
10
+ // API key variable names referenced by built-in models — always sensitive.
11
+ // EXPIRES_AT is a timestamp, not a credential — keep it out so nearby
12
+ // millisecond values in logs don't get rewritten as redactions.
11
13
  const sensitiveKeys = new Set([...builtInLlmModels, ...builtInImageModels]
12
14
  .map((m) => m.apiKeyVar)
13
- .filter(Boolean));
15
+ .filter(Boolean)
16
+ .concat([OPENAI_CODEX_ACCESS_TOKEN_VAR, OPENAI_CODEX_REFRESH_TOKEN_VAR]));
14
17
  // Seed DB from .env on first run
15
18
  const existing = await hubDb.variables.findMany();
16
19
  if (existing.length > 0) {
@@ -40,6 +43,7 @@ export async function createHubConfigService(naisysServer, { hubDb }, logService
40
43
  { key: "SPEND_LIMIT_DOLLARS" },
41
44
  { key: "SPEND_LIMIT_HOURS" },
42
45
  { key: "TARGET_VERSION" },
46
+ { key: "MAIL_ENABLED" },
43
47
  ]);
44
48
  /** Read variables from DB and build a ConfigResponse */
45
49
  async function buildConfigPayload() {
@@ -92,6 +96,7 @@ export async function createHubConfigService(naisysServer, { hubDb }, logService
92
96
  await buildConfigPayload();
93
97
  return {
94
98
  getConfig: () => cachedConfig,
99
+ broadcastConfig,
95
100
  };
96
101
  }
97
102
  /** Create variable placeholders if they don't already exist.
@@ -3,9 +3,10 @@ import { CostWriteRequestSchema, HubEvents, } from "@naisys/hub-protocol";
3
3
  const SPEND_LIMIT_CHECK_INTERVAL_MS = 10_000;
4
4
  /** Handles cost_write events from NAISYS instances (fire-and-forget) */
5
5
  export function createHubCostService(naisysServer, { hubDb }, logService, heartbeatService, configService) {
6
- // Track which users have been suspended due to spend limit overrun
7
- const suspendedByGlobal = new Set();
8
- const suspendedByAgent = new Set();
6
+ // Suspended users original suspension reason. Stored so a defensive
7
+ // re-send (when a suspended user keeps writing costs) can use the same text.
8
+ const suspendedByGlobal = new Map();
9
+ const suspendedByAgent = new Map();
9
10
  naisysServer.registerEvent(HubEvents.COST_WRITE, async (hostId, data, ack) => {
10
11
  try {
11
12
  const parsed = CostWriteRequestSchema.parse(data);
@@ -14,10 +15,14 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
14
15
  const costPushMap = new Map();
15
16
  const userCostTotals = new Map();
16
17
  for (const entry of parsed.entries) {
18
+ const subagentId = entry.subagentId ?? 0;
19
+ // Wire format: undefined for parent (subagent_id 0 in DB), number otherwise
20
+ const wireSubagentId = subagentId === 0 ? undefined : subagentId;
17
21
  await hubDb.costs.create({
18
22
  data: {
19
23
  user_id: entry.userId,
20
24
  run_id: entry.runId,
25
+ subagent_id: subagentId,
21
26
  session_id: entry.sessionId,
22
27
  host_id: hostId,
23
28
  source: entry.source,
@@ -34,13 +39,14 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
34
39
  where: {
35
40
  user_id: entry.userId,
36
41
  run_id: entry.runId,
42
+ subagent_id: subagentId,
37
43
  session_id: entry.sessionId,
38
44
  },
39
45
  data: {
40
46
  total_cost: { increment: entry.cost },
41
47
  },
42
48
  });
43
- const key = `${entry.userId}:${entry.runId}:${entry.sessionId}`;
49
+ const key = `${entry.userId}:${entry.runId}:${subagentId}:${entry.sessionId}`;
44
50
  const existing = costPushMap.get(key);
45
51
  if (existing) {
46
52
  existing.costDelta += entry.cost;
@@ -49,6 +55,7 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
49
55
  costPushMap.set(key, {
50
56
  userId: entry.userId,
51
57
  runId: entry.runId,
58
+ subagentId: wireSubagentId,
52
59
  sessionId: entry.sessionId,
53
60
  costDelta: entry.cost,
54
61
  });
@@ -61,10 +68,12 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
61
68
  entries: Array.from(costPushMap.values()),
62
69
  });
63
70
  }
64
- // Re-send cost_control to any suspended users still writing costs
71
+ // Re-send cost_control to suspended users still writing costs.
72
+ // Per-agent reason wins: a per-agent limit overrides the global one.
65
73
  for (const userId of userCostTotals.keys()) {
66
- if (suspendedByGlobal.has(userId) || suspendedByAgent.has(userId)) {
67
- sendCostControl(userId, false, "Spend limit exceeded");
74
+ const reason = suspendedByAgent.get(userId) ?? suspendedByGlobal.get(userId);
75
+ if (reason !== undefined) {
76
+ sendCostControl(userId, false, reason);
68
77
  }
69
78
  }
70
79
  // Decrement budget_left and return updated values
@@ -83,9 +92,9 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
83
92
  async function checkSpendLimits(candidateUserIds) {
84
93
  const activeUserIds = heartbeatService.getActiveUserIds();
85
94
  const usersToCheck = new Set(activeUserIds);
86
- for (const userId of suspendedByGlobal)
95
+ for (const userId of suspendedByGlobal.keys())
87
96
  usersToCheck.add(userId);
88
- for (const userId of suspendedByAgent)
97
+ for (const userId of suspendedByAgent.keys())
89
98
  usersToCheck.add(userId);
90
99
  if (candidateUserIds) {
91
100
  for (const userId of candidateUserIds)
@@ -209,7 +218,7 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
209
218
  const reason = `Global spend limit of $${spendLimit} reached (total: $${totalCost.toFixed(2)})`;
210
219
  logService.log(`[Hub:Costs] Suspending user ${userId} (global limit): ${reason}`);
211
220
  sendCostControl(userId, false, reason);
212
- suspendedByGlobal.add(userId);
221
+ suspendedByGlobal.set(userId, reason);
213
222
  await setCostSuspendedReason(hubDb, userId, reason);
214
223
  }
215
224
  else if (!isOverLimit && wasSuspended) {
@@ -232,7 +241,7 @@ export function createHubCostService(naisysServer, { hubDb }, logService, heartb
232
241
  const reason = `Spend limit of $${spendLimit} reached (current: $${periodCost.toFixed(2)})`;
233
242
  logService.log(`[Hub:Costs] Suspending user ${userId}: ${reason}`);
234
243
  sendCostControl(userId, false, reason);
235
- suspendedByAgent.add(userId);
244
+ suspendedByAgent.set(userId, reason);
236
245
  await setCostSuspendedReason(hubDb, userId, reason);
237
246
  }
238
247
  else if (!isOverLimit && wasSuspended) {
@@ -0,0 +1,263 @@
1
+ import { buildDefaultAgentConfig } from "@naisys/common";
2
+ import { HubEvents } from "@naisys/hub-protocol";
3
+ import { describe, expect, test, vi } from "vitest";
4
+ import { createHubCostService } from "./hubCostService.js";
5
+ function createServerHarness() {
6
+ const handlers = new Map();
7
+ const server = {
8
+ registerEvent: vi.fn((event, handler) => {
9
+ handlers.set(event, handler);
10
+ }),
11
+ broadcastToSupervisors: vi.fn(),
12
+ sendMessage: vi.fn(() => true),
13
+ };
14
+ async function emitCostWrite(hostId, data) {
15
+ const handler = handlers.get(HubEvents.COST_WRITE);
16
+ if (!handler)
17
+ throw new Error("COST_WRITE handler was not registered");
18
+ let ackResponse;
19
+ await handler(hostId, data, (response) => {
20
+ ackResponse = response;
21
+ });
22
+ return ackResponse;
23
+ }
24
+ return { server, emitCostWrite };
25
+ }
26
+ function createHubDb() {
27
+ const budgetLeft = new Map([[1, 5]]);
28
+ const hubDb = {
29
+ costs: {
30
+ create: vi.fn(() => Promise.resolve({})),
31
+ aggregate: vi.fn(() => Promise.resolve({ _sum: { cost: 0 } })),
32
+ },
33
+ run_session: {
34
+ updateMany: vi.fn(() => Promise.resolve({})),
35
+ },
36
+ users: {
37
+ findMany: vi.fn(() => Promise.resolve([])),
38
+ },
39
+ user_notifications: {
40
+ findUnique: vi.fn(({ where }) => {
41
+ const value = budgetLeft.get(where.user_id);
42
+ return Promise.resolve(value === undefined ? null : { budget_left: value });
43
+ }),
44
+ update: vi.fn(({ where, data, }) => {
45
+ budgetLeft.set(where.user_id, data.budget_left);
46
+ return Promise.resolve({ budget_left: data.budget_left });
47
+ }),
48
+ updateMany: vi.fn(() => Promise.resolve({})),
49
+ },
50
+ };
51
+ return { hubDb, budgetLeft };
52
+ }
53
+ function createLogger() {
54
+ return {
55
+ log: vi.fn(),
56
+ error: vi.fn(),
57
+ disableConsole: vi.fn(),
58
+ };
59
+ }
60
+ function createHeartbeatService(activeUserIds, hostIdsByUser = new Map([[1, [101]]])) {
61
+ return {
62
+ getActiveUserIds: vi.fn(() => activeUserIds),
63
+ findHostsForAgent: vi.fn((userId) => hostIdsByUser.get(userId) ?? []),
64
+ };
65
+ }
66
+ function createConfigService(config) {
67
+ return {
68
+ getConfig: vi.fn(() => ({ config })),
69
+ };
70
+ }
71
+ function userRow(id, configOverrides, spendLimitResetAt) {
72
+ return {
73
+ id,
74
+ config: JSON.stringify({
75
+ ...buildDefaultAgentConfig(`agent-${id}`),
76
+ ...configOverrides,
77
+ }),
78
+ user_notifications: {
79
+ spend_limit_reset_at: spendLimitResetAt,
80
+ },
81
+ };
82
+ }
83
+ describe("hubCostService", () => {
84
+ test("persists subagent cost entries, pushes scoped deltas, and decrements budget", async () => {
85
+ const { server, emitCostWrite } = createServerHarness();
86
+ const { hubDb } = createHubDb();
87
+ const logger = createLogger();
88
+ const heartbeatService = createHeartbeatService([]);
89
+ const configService = createConfigService({});
90
+ const service = createHubCostService(server, { hubDb }, logger, heartbeatService, configService);
91
+ const ack = await emitCostWrite(42, {
92
+ entries: [
93
+ {
94
+ userId: 1,
95
+ runId: 7,
96
+ subagentId: -1,
97
+ sessionId: 1,
98
+ source: "genimg",
99
+ model: "mock-image",
100
+ cost: 0.5,
101
+ inputTokens: 0,
102
+ outputTokens: 0,
103
+ cacheWriteTokens: 0,
104
+ cacheReadTokens: 0,
105
+ },
106
+ {
107
+ userId: 1,
108
+ runId: 7,
109
+ subagentId: -1,
110
+ sessionId: 1,
111
+ source: "genimg",
112
+ model: "mock-image",
113
+ cost: 0.25,
114
+ inputTokens: 0,
115
+ outputTokens: 0,
116
+ cacheWriteTokens: 0,
117
+ cacheReadTokens: 0,
118
+ },
119
+ ],
120
+ });
121
+ expect(hubDb.costs.create).toHaveBeenCalledWith({
122
+ data: expect.objectContaining({
123
+ user_id: 1,
124
+ run_id: 7,
125
+ subagent_id: -1,
126
+ session_id: 1,
127
+ host_id: 42,
128
+ cost: 0.5,
129
+ }),
130
+ });
131
+ expect(hubDb.run_session.updateMany).toHaveBeenCalledWith({
132
+ where: {
133
+ user_id: 1,
134
+ run_id: 7,
135
+ subagent_id: -1,
136
+ session_id: 1,
137
+ },
138
+ data: {
139
+ total_cost: { increment: 0.5 },
140
+ },
141
+ });
142
+ expect(server.broadcastToSupervisors).toHaveBeenCalledWith(HubEvents.COST_PUSH, {
143
+ entries: [
144
+ {
145
+ userId: 1,
146
+ runId: 7,
147
+ subagentId: -1,
148
+ sessionId: 1,
149
+ costDelta: 0.75,
150
+ },
151
+ ],
152
+ });
153
+ expect(hubDb.user_notifications.update).toHaveBeenCalledWith({
154
+ where: { user_id: 1 },
155
+ data: { budget_left: 4.25 },
156
+ });
157
+ expect(ack).toEqual({ budgets: [{ userId: 1, budgetLeft: 4.25 }] });
158
+ service.cleanup();
159
+ });
160
+ test("normalizes parent-agent subagentId to undefined on the wire (DB row stays 0)", async () => {
161
+ const { server, emitCostWrite } = createServerHarness();
162
+ const { hubDb } = createHubDb();
163
+ const logger = createLogger();
164
+ const heartbeatService = createHeartbeatService([]);
165
+ const configService = createConfigService({});
166
+ const service = createHubCostService(server, { hubDb }, logger, heartbeatService, configService);
167
+ await emitCostWrite(42, {
168
+ entries: [
169
+ {
170
+ userId: 1,
171
+ runId: 7,
172
+ // subagentId omitted — represents the parent agent
173
+ sessionId: 1,
174
+ source: "console",
175
+ model: "mock",
176
+ cost: 0.1,
177
+ inputTokens: 0,
178
+ outputTokens: 0,
179
+ cacheWriteTokens: 0,
180
+ cacheReadTokens: 0,
181
+ },
182
+ ],
183
+ });
184
+ expect(hubDb.costs.create).toHaveBeenCalledWith({
185
+ data: expect.objectContaining({ subagent_id: 0 }),
186
+ });
187
+ expect(server.broadcastToSupervisors).toHaveBeenCalledWith(HubEvents.COST_PUSH, {
188
+ entries: [
189
+ {
190
+ userId: 1,
191
+ runId: 7,
192
+ subagentId: undefined,
193
+ sessionId: 1,
194
+ costDelta: 0.1,
195
+ },
196
+ ],
197
+ });
198
+ service.cleanup();
199
+ });
200
+ test("suspends, re-sends, and resumes per-agent cost control", async () => {
201
+ const { server, emitCostWrite } = createServerHarness();
202
+ const { hubDb } = createHubDb();
203
+ const logger = createLogger();
204
+ const heartbeatService = createHeartbeatService([1]);
205
+ const configService = createConfigService({});
206
+ vi.mocked(hubDb.users.findMany).mockResolvedValue([
207
+ userRow(1, { spendLimitDollars: 1 }),
208
+ ]);
209
+ vi.mocked(hubDb.costs.aggregate)
210
+ .mockResolvedValueOnce({ _sum: { cost: 1.25 } })
211
+ .mockResolvedValueOnce({ _sum: { cost: 0.25 } });
212
+ const service = createHubCostService(server, { hubDb }, logger, heartbeatService, configService);
213
+ await service.checkSpendLimits();
214
+ const suspendReason = "Spend limit of $1 reached (current: $1.25)";
215
+ expect(server.sendMessage).toHaveBeenCalledWith(101, HubEvents.COST_CONTROL, {
216
+ userId: 1,
217
+ enabled: false,
218
+ reason: suspendReason,
219
+ });
220
+ expect(service.isUserSpendSuspended(1)).toBe(true);
221
+ expect(hubDb.user_notifications.updateMany).toHaveBeenCalledWith({
222
+ where: { user_id: 1 },
223
+ data: { cost_suspended_reason: suspendReason },
224
+ });
225
+ vi.mocked(server.sendMessage).mockClear();
226
+ await emitCostWrite(42, {
227
+ entries: [
228
+ {
229
+ userId: 1,
230
+ runId: 7,
231
+ subagentId: -1,
232
+ sessionId: 1,
233
+ source: "genimg",
234
+ model: "mock-image",
235
+ cost: 0.1,
236
+ inputTokens: 0,
237
+ outputTokens: 0,
238
+ cacheWriteTokens: 0,
239
+ cacheReadTokens: 0,
240
+ },
241
+ ],
242
+ });
243
+ expect(server.sendMessage).toHaveBeenCalledWith(101, HubEvents.COST_CONTROL, {
244
+ userId: 1,
245
+ enabled: false,
246
+ reason: suspendReason,
247
+ });
248
+ vi.mocked(server.sendMessage).mockClear();
249
+ await service.checkSpendLimits();
250
+ expect(server.sendMessage).toHaveBeenCalledWith(101, HubEvents.COST_CONTROL, {
251
+ userId: 1,
252
+ enabled: true,
253
+ reason: "Spend limit period reset (current: $0.25, limit: $1)",
254
+ });
255
+ expect(service.isUserSpendSuspended(1)).toBe(false);
256
+ expect(hubDb.user_notifications.updateMany).toHaveBeenCalledWith({
257
+ where: { user_id: 1 },
258
+ data: { cost_suspended_reason: null },
259
+ });
260
+ service.cleanup();
261
+ });
262
+ });
263
+ //# sourceMappingURL=hubCostService.test.js.map