@checkstack/backend 0.4.2 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,45 @@
1
1
  # @checkstack/backend
2
2
 
3
+ ## 0.4.4
4
+
5
+ ### Patch Changes
6
+
7
+ - 18fa8e3: Add notification suppression toggle for maintenance windows
8
+
9
+ **New Feature:** When creating or editing a maintenance window, you can now enable "Suppress health notifications" to prevent health status change notifications from being sent for affected systems while the maintenance is active (in_progress status). This is useful for planned downtime where health alerts are expected and would otherwise create noise.
10
+
11
+ **Changes:**
12
+
13
+ - Added `suppressNotifications` field to maintenance schema
14
+ - Added new service-to-service API `hasActiveMaintenanceWithSuppression`
15
+ - Healthcheck queue executor now checks for suppression before sending notifications
16
+ - MaintenanceEditor UI includes new toggle checkbox
17
+
18
+ **Bug Fix:** Fixed migration system to correctly set PostgreSQL search_path when running plugin migrations. Previously, migrations could fail with "relation does not exist" errors because the schema context wasn't properly set.
19
+
20
+ - db9b37c: Fixed 500 errors on healthcheck `getHistory` and `getDetailedHistory` endpoints caused by the scoped database proxy not handling Drizzle's `$count()` utility method.
21
+
22
+ **Root Cause:** The `$count()` method returns a Promise directly (not a query builder), bypassing the chain-replay mechanism used for schema isolation. This caused queries to run without the proper `search_path`, resulting in database errors.
23
+
24
+ **Changes:**
25
+
26
+ - Added explicit `$count` method handling in `scoped-db.ts` to wrap count operations in transactions with proper schema isolation
27
+ - Wrapped `$count` return values with `Number()` in healthcheck service to handle BigInt serialization
28
+
29
+ ## 0.4.3
30
+
31
+ ### Patch Changes
32
+
33
+ - Updated dependencies [83557c7]
34
+ - Updated dependencies [83557c7]
35
+ - @checkstack/backend-api@0.4.0
36
+ - @checkstack/common@0.4.0
37
+ - @checkstack/queue-api@0.1.2
38
+ - @checkstack/signal-backend@0.1.4
39
+ - @checkstack/api-docs-common@0.1.2
40
+ - @checkstack/auth-common@0.5.1
41
+ - @checkstack/signal-common@0.1.2
42
+
3
43
  ## 0.4.2
4
44
 
5
45
  ### Patch Changes
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@checkstack/backend",
3
- "version": "0.4.2",
3
+ "version": "0.4.4",
4
4
  "type": "module",
5
5
  "scripts": {
6
6
  "dev": "bun --env-file=../../.env --watch src/index.ts",
@@ -2,7 +2,7 @@ import { migrate } from "drizzle-orm/node-postgres/migrator";
2
2
  import path from "node:path";
3
3
  import fs from "node:fs";
4
4
  import type { Hono } from "hono";
5
- import { eq, and } from "drizzle-orm";
5
+ import { eq, and, sql } from "drizzle-orm";
6
6
  import type { NodePgDatabase } from "drizzle-orm/node-postgres";
7
7
  import {
8
8
  coreServices,
@@ -77,7 +77,7 @@ export function registerPlugin({
77
77
  rootLogger.warn(
78
78
  `Plugin ${
79
79
  backendPlugin?.metadata?.pluginId || "unknown"
80
- } is not using new API. Skipping.`
80
+ } is not using new API. Skipping.`,
81
81
  );
82
82
  return;
83
83
  }
@@ -91,13 +91,13 @@ export function registerPlugin({
91
91
  backendPlugin.register({
92
92
  registerInit: <
93
93
  D extends Deps,
94
- S extends Record<string, unknown> | undefined = undefined
94
+ S extends Record<string, unknown> | undefined = undefined,
95
95
  >(args: {
96
96
  deps: D;
97
97
  schema?: S;
98
98
  init: (deps: ResolvedDeps<D> & DatabaseDeps<S>) => Promise<void>;
99
99
  afterPluginsReady?: (
100
- deps: ResolvedDeps<D> & DatabaseDeps<S> & AfterPluginsReadyContext
100
+ deps: ResolvedDeps<D> & DatabaseDeps<S> & AfterPluginsReadyContext,
101
101
  ) => Promise<void>;
102
102
  }) => {
103
103
  pendingInits.push({
@@ -129,12 +129,12 @@ export function registerPlugin({
129
129
  }));
130
130
  deps.registeredAccessRules.push(...prefixed);
131
131
  rootLogger.debug(
132
- ` -> Registered ${prefixed.length} access rules for ${pluginId}`
132
+ ` -> Registered ${prefixed.length} access rules for ${pluginId}`,
133
133
  );
134
134
  },
135
135
  registerRouter: (
136
136
  router: Router<AnyContractRouter, RpcContext>,
137
- contract: AnyContractRouter
137
+ contract: AnyContractRouter,
138
138
  ) => {
139
139
  deps.pluginRpcRouters.set(pluginId, router);
140
140
  deps.pluginContractRegistry.set(pluginId, contract);
@@ -184,7 +184,7 @@ export async function loadPlugins({
184
184
  });
185
185
 
186
186
  rootLogger.debug(
187
- ` -> Found ${localPlugins.length} local backend plugin(s) in workspace`
187
+ ` -> Found ${localPlugins.length} local backend plugin(s) in workspace`,
188
188
  );
189
189
  rootLogger.debug(" -> Discovered plugins:");
190
190
  for (const p of localPlugins) {
@@ -201,7 +201,7 @@ export async function loadPlugins({
201
201
  .where(and(eq(plugins.enabled, true), eq(plugins.type, "backend")));
202
202
 
203
203
  rootLogger.debug(
204
- ` -> ${allPlugins.length} enabled backend plugins in database:`
204
+ ` -> ${allPlugins.length} enabled backend plugins in database:`,
205
205
  );
206
206
  for (const p of allPlugins) {
207
207
  rootLogger.debug(` • ${p.name}`);
@@ -226,7 +226,7 @@ export async function loadPlugins({
226
226
  pluginModule = await import(plugin.name);
227
227
  } catch {
228
228
  rootLogger.debug(
229
- ` -> Package name import failed, trying path: ${plugin.path}`
229
+ ` -> Package name import failed, trying path: ${plugin.path}`,
230
230
  );
231
231
  pluginModule = await import(plugin.path);
232
232
  }
@@ -278,10 +278,58 @@ export async function loadPlugins({
278
278
  rootLogger.info(`🚀 Initializing ${p.metadata.pluginId}...`);
279
279
 
280
280
  try {
281
- const pluginDb = await deps.registry.get(
282
- coreServices.database,
283
- p.metadata
284
- );
281
+ /**
282
+ * =======================================================================
283
+ * PLUGIN MIGRATIONS WITH SCHEMA ISOLATION
284
+ * =======================================================================
285
+ *
286
+ * Each plugin's database objects live in a dedicated PostgreSQL schema
287
+ * (e.g., "plugin_maintenance", "plugin_healthcheck"). This isolation is
288
+ * achieved through PostgreSQL's `search_path` mechanism.
289
+ *
290
+ * ## Why SET search_path is Required for Migrations
291
+ *
292
+ * Drizzle's `migrate()` function reads SQL files and executes them directly.
293
+ * These SQL files contain unqualified table names like:
294
+ *
295
+ * ALTER TABLE "maintenances" ADD COLUMN "foo" boolean;
296
+ *
297
+ * Without setting search_path, PostgreSQL defaults to the `public` schema,
298
+ * causing "relation does not exist" errors since the tables are actually in
299
+ * the plugin's schema (e.g., `plugin_maintenance.maintenances`).
300
+ *
301
+ * ## Session-Level vs Transaction-Level search_path
302
+ *
303
+ * We use **session-level** `SET search_path` (not `SET LOCAL`) here because:
304
+ * - `migrate()` runs multiple statements and may manage its own transactions
305
+ * - `SET LOCAL` only persists within a single transaction
306
+ * - Session-level SET persists until explicitly changed or session ends
307
+ *
308
+ * ## Why This Doesn't Affect Runtime Queries
309
+ *
310
+ * After migrations complete, plugins receive their database via
311
+ * `createScopedDb()` which wraps every query in a transaction with
312
+ * `SET LOCAL search_path`. This ensures runtime queries always use the
313
+ * correct schema, regardless of the session-level search_path.
314
+ *
315
+ * ## Potential Hazards
316
+ *
317
+ * 1. **Error During Migration**: If a migration fails, the search_path may
318
+ * remain set to that plugin's schema. The next plugin's migration would
319
+ * fail visibly (wrong schema), which is better than silent data corruption.
320
+ *
321
+ * 2. **Parallel Migration Execution**: This code assumes sequential plugin
322
+ * initialization (which is enforced by the topologically-sorted loop).
323
+ * If migrations ever run in parallel, search_path conflicts would occur.
324
+ *
325
+ * 3. **Connection Pool Pollution**: `SET` without `LOCAL` affects the entire
326
+ * session. However, we reset to `public` after each plugin's migrations,
327
+ * and runtime queries use `SET LOCAL` anyway, so this is safe.
328
+ *
329
+ * @see createScopedDb in ../utils/scoped-db.ts for runtime query isolation
330
+ * @see getPluginSchemaName in @checkstack/drizzle-helper for schema naming
331
+ * =======================================================================
332
+ */
285
333
 
286
334
  // Run Migrations
287
335
  const migrationsFolder = path.join(p.pluginPath, "drizzle");
@@ -291,13 +339,24 @@ export async function loadPlugins({
291
339
  // Strip "public". schema references from migration SQL at runtime
292
340
  stripPublicSchemaFromMigrations(migrationsFolder);
293
341
  rootLogger.debug(
294
- ` -> Running migrations for ${p.metadata.pluginId} from ${migrationsFolder}`
342
+ ` -> Running migrations for ${p.metadata.pluginId} from ${migrationsFolder}`,
343
+ );
344
+
345
+ // Set search_path to plugin schema before running migrations.
346
+ // Uses session-level SET (not SET LOCAL) because migrate() may run
347
+ // multiple statements across transaction boundaries.
348
+ await deps.db.execute(
349
+ sql.raw(`SET search_path = "${migrationsSchema}", public`),
295
350
  );
296
- await migrate(pluginDb, { migrationsFolder, migrationsSchema });
351
+ await migrate(deps.db, { migrationsFolder, migrationsSchema });
352
+
353
+ // Reset search_path to public after migrations complete.
354
+ // This prevents search_path leaking into subsequent plugin migrations.
355
+ await deps.db.execute(sql.raw(`SET search_path = public`));
297
356
  } catch (error) {
298
357
  rootLogger.error(
299
358
  `❌ Failed migration of plugin ${p.metadata.pluginId}:`,
300
- error
359
+ error,
301
360
  );
302
361
  throw new Error(`Failed to migrate plugin ${p.metadata.pluginId}`, {
303
362
  cause: error,
@@ -305,7 +364,7 @@ export async function loadPlugins({
305
364
  }
306
365
  } else {
307
366
  rootLogger.debug(
308
- ` -> No migrations found for ${p.metadata.pluginId} (skipping)`
367
+ ` -> No migrations found for ${p.metadata.pluginId} (skipping)`,
309
368
  );
310
369
  }
311
370
 
@@ -314,7 +373,7 @@ export async function loadPlugins({
314
373
  for (const [key, ref] of Object.entries(p.deps)) {
315
374
  resolvedDeps[key] = await deps.registry.get(
316
375
  ref as ServiceRef<unknown>,
317
- p.metadata
376
+ p.metadata,
318
377
  );
319
378
  }
320
379
 
@@ -330,7 +389,7 @@ export async function loadPlugins({
330
389
  } catch (error) {
331
390
  rootLogger.error(
332
391
  `❌ Failed to initialize ${p.metadata.pluginId}:`,
333
- error
392
+ error,
334
393
  );
335
394
  throw new Error(`Failed to initialize plugin ${p.metadata.pluginId}`, {
336
395
  cause: error,
@@ -339,7 +398,7 @@ export async function loadPlugins({
339
398
  } catch (error) {
340
399
  rootLogger.error(
341
400
  `❌ Critical error loading plugin ${p.metadata.pluginId}:`,
342
- error
401
+ error,
343
402
  );
344
403
  throw new Error(`Critical error loading plugin ${p.metadata.pluginId}`, {
345
404
  cause: error,
@@ -360,7 +419,7 @@ export async function loadPlugins({
360
419
  } catch (error) {
361
420
  rootLogger.error(
362
421
  `Failed to emit pluginInitialized hook for ${p.metadata.pluginId}:`,
363
- error
422
+ error,
364
423
  );
365
424
  }
366
425
  }
@@ -386,7 +445,7 @@ export async function loadPlugins({
386
445
  } catch (error) {
387
446
  rootLogger.error(
388
447
  `Failed to emit accessRulesRegistered hook for ${pluginId}:`,
389
- error
448
+ error,
390
449
  );
391
450
  }
392
451
  }
@@ -397,7 +456,7 @@ export async function loadPlugins({
397
456
  for (const [key, ref] of Object.entries(p.deps)) {
398
457
  resolvedDeps[key] = await deps.registry.get(
399
458
  ref as ServiceRef<unknown>,
400
- p.metadata
459
+ p.metadata,
401
460
  );
402
461
  }
403
462
 
@@ -412,36 +471,36 @@ export async function loadPlugins({
412
471
  resolvedDeps["onHook"] = <T>(
413
472
  hook: { id: string },
414
473
  listener: (payload: T) => Promise<void>,
415
- options?: HookSubscribeOptions
474
+ options?: HookSubscribeOptions,
416
475
  ) => {
417
476
  return eventBus.subscribe(
418
477
  p.metadata.pluginId,
419
478
  hook,
420
479
  listener,
421
- options
480
+ options,
422
481
  );
423
482
  };
424
483
  resolvedDeps["emitHook"] = async <T>(
425
484
  hook: { id: string },
426
- payload: T
485
+ payload: T,
427
486
  ) => {
428
487
  await eventBus.emit(hook, payload);
429
488
  };
430
489
 
431
490
  await p.afterPluginsReady(resolvedDeps);
432
491
  rootLogger.debug(
433
- ` -> ${p.metadata.pluginId} afterPluginsReady complete`
492
+ ` -> ${p.metadata.pluginId} afterPluginsReady complete`,
434
493
  );
435
494
  } catch (error) {
436
495
  rootLogger.error(
437
496
  `❌ Failed afterPluginsReady for ${p.metadata.pluginId}:`,
438
- error
497
+ error,
439
498
  );
440
499
  throw new Error(
441
500
  `Failed afterPluginsReady for plugin ${p.metadata.pluginId}`,
442
501
  {
443
502
  cause: error,
444
- }
503
+ },
445
504
  );
446
505
  }
447
506
  }
@@ -183,7 +183,7 @@ export type ScopedDatabase<TSchema extends Record<string, unknown>> = Omit<
183
183
  */
184
184
  export function createScopedDb<TSchema extends Record<string, unknown>>(
185
185
  baseDb: NodePgDatabase<Record<string, unknown>>,
186
- schemaName: string
186
+ schemaName: string,
187
187
  ): ScopedDatabase<TSchema> {
188
188
  const wrappedDb = baseDb as NodePgDatabase<TSchema>;
189
189
 
@@ -225,7 +225,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
225
225
  builder: T,
226
226
  initialMethod: string,
227
227
  initialArgs: unknown[],
228
- chain: Array<{ method: string; args: unknown[] }> = []
228
+ chain: Array<{ method: string; args: unknown[] }> = [],
229
229
  ): T {
230
230
  // Store chain info for this builder instance
231
231
  pendingChains.set(builder, {
@@ -253,7 +253,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
253
253
  if (prop === "then" && typeof value === "function") {
254
254
  return (
255
255
  onFulfilled?: (value: unknown) => unknown,
256
- onRejected?: (reason: unknown) => unknown
256
+ onRejected?: (reason: unknown) => unknown,
257
257
  ) => {
258
258
  const chainInfo = pendingChains.get(builder);
259
259
  if (!chainInfo) {
@@ -262,7 +262,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
262
262
  return (value as Function).call(
263
263
  builderTarget,
264
264
  onFulfilled,
265
- onRejected
265
+ onRejected,
266
266
  );
267
267
  }
268
268
 
@@ -271,7 +271,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
271
271
  // Set the schema search_path for this transaction
272
272
  // SET LOCAL ensures it only affects this transaction
273
273
  await tx.execute(
274
- sql.raw(`SET LOCAL search_path = "${schemaName}", public`)
274
+ sql.raw(`SET LOCAL search_path = "${schemaName}", public`),
275
275
  );
276
276
 
277
277
  // Rebuild the query on the transaction connection
@@ -284,7 +284,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
284
284
  // Replay all the chained method calls (from, where, orderBy, etc.)
285
285
  for (const call of chainInfo.chain) {
286
286
  txQuery = (txQuery as Record<string, TxMethod>)[call.method](
287
- ...call.args
287
+ ...call.args,
288
288
  );
289
289
  }
290
290
 
@@ -310,13 +310,13 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
310
310
  if (!chainInfo) {
311
311
  return (value as (...a: unknown[]) => Promise<unknown>).apply(
312
312
  builderTarget,
313
- args
313
+ args,
314
314
  );
315
315
  }
316
316
 
317
317
  return baseDb.transaction(async (tx) => {
318
318
  await tx.execute(
319
- sql.raw(`SET LOCAL search_path = "${schemaName}", public`)
319
+ sql.raw(`SET LOCAL search_path = "${schemaName}", public`),
320
320
  );
321
321
 
322
322
  type TxMethod = (...args: unknown[]) => unknown;
@@ -326,7 +326,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
326
326
 
327
327
  for (const call of chainInfo.chain) {
328
328
  txQuery = (txQuery as Record<string, TxMethod>)[call.method](
329
- ...call.args
329
+ ...call.args,
330
330
  );
331
331
  }
332
332
 
@@ -352,7 +352,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
352
352
  // Call the original method
353
353
  const result = (value as (...a: unknown[]) => unknown).apply(
354
354
  builderTarget,
355
- args
355
+ args,
356
356
  );
357
357
 
358
358
  // If it returns an object (likely another builder), wrap it
@@ -367,7 +367,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
367
367
  result as object,
368
368
  chainInfo?.method || initialMethod,
369
369
  chainInfo?.args || initialArgs,
370
- newChain
370
+ newChain,
371
371
  );
372
372
  }
373
373
  return result;
@@ -410,7 +410,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
410
410
  `isolation. Use the standard query builder API instead:\n` +
411
411
  ` - db.select().from(table) instead of db.query.table.findMany()\n` +
412
412
  ` - db.select().from(table).where(...).limit(1) instead of db.query.table.findFirst()\n` +
413
- `Current schema: "${schemaName}"`
413
+ `Current schema: "${schemaName}"`,
414
414
  );
415
415
  }
416
416
 
@@ -424,12 +424,12 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
424
424
  */
425
425
  if (prop === "transaction") {
426
426
  return async <T>(
427
- callback: (tx: ScopedDatabase<TSchema>) => Promise<T>
427
+ callback: (tx: ScopedDatabase<TSchema>) => Promise<T>,
428
428
  ): Promise<T> => {
429
429
  return target.transaction(async (tx) => {
430
430
  // Set search_path once at transaction start
431
431
  await tx.execute(
432
- sql.raw(`SET LOCAL search_path = "${schemaName}", public`)
432
+ sql.raw(`SET LOCAL search_path = "${schemaName}", public`),
433
433
  );
434
434
  // User's callback runs with the correct schema
435
435
  return callback(tx as ScopedDatabase<TSchema>);
@@ -446,11 +446,33 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
446
446
  return async (...args: unknown[]) => {
447
447
  return target.transaction(async (tx) => {
448
448
  await tx.execute(
449
- sql.raw(`SET LOCAL search_path = "${schemaName}", public`)
449
+ sql.raw(`SET LOCAL search_path = "${schemaName}", public`),
450
450
  );
451
451
  return (tx.execute as (...a: unknown[]) => Promise<unknown>).apply(
452
452
  tx,
453
- args
453
+ args,
454
+ );
455
+ });
456
+ };
457
+ }
458
+
459
+ /**
460
+ * Handle db.$count() calls.
461
+ *
462
+ * The $count utility is a newer Drizzle method that returns a Promise
463
+ * directly (not a query builder), so it's not caught by the entityKind
464
+ * detection for query builders. We need to explicitly wrap it in a
465
+ * transaction with the search_path set.
466
+ */
467
+ if (prop === "$count" && typeof value === "function") {
468
+ return async (...args: unknown[]) => {
469
+ return target.transaction(async (tx) => {
470
+ await tx.execute(
471
+ sql.raw(`SET LOCAL search_path = "${schemaName}", public`),
472
+ );
473
+ return (tx.$count as (...a: unknown[]) => Promise<unknown>).apply(
474
+ tx,
475
+ args,
454
476
  );
455
477
  });
456
478
  };
@@ -470,7 +492,7 @@ export function createScopedDb<TSchema extends Record<string, unknown>>(
470
492
  return (...args: unknown[]) => {
471
493
  const result = (value as (...a: unknown[]) => unknown).apply(
472
494
  target,
473
- args
495
+ args,
474
496
  );
475
497
 
476
498
  // Check if the result is a query builder that needs wrapping