@concavejs/core 0.0.1-alpha.6 → 0.0.1-alpha.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,6 +36,11 @@ export type JWTValidationConfig = {
36
36
  secret?: string;
37
37
  skipVerification?: boolean;
38
38
  clockTolerance?: number;
39
+ /**
40
+ * Optional TTL (in milliseconds) for cached remote JWKS resolvers.
41
+ * Defaults to 5 minutes when omitted.
42
+ */
43
+ jwksCacheTtlMs?: number;
39
44
  };
40
45
  export type JWTValidationErrorCode = "MISSING_CONFIG" | "INVALID_SIGNATURE" | "TOKEN_EXPIRED" | "TOKEN_NOT_ACTIVE" | "CLAIM_VALIDATION_FAILED" | "MISSING_SUBJECT" | "MISSING_ISSUER" | "INVALID_TOKEN";
41
46
  export declare class JWTValidationError extends Error {
package/dist/auth/jwt.js CHANGED
@@ -123,12 +123,17 @@ export const WELL_KNOWN_JWKS_URLS = {
123
123
  firebase: () => "https://www.googleapis.com/service_accounts/v1/jwk/securetoken@system.gserviceaccount.com",
124
124
  };
125
125
  const DEFAULT_CLOCK_TOLERANCE_SECONDS = 60;
126
+ const DEFAULT_JWKS_CACHE_TTL_MS = 5 * 60 * 1000;
127
+ const MAX_JWKS_CACHE_TTL_MS = 24 * 60 * 60 * 1000;
126
128
  const JWKS_CACHE = new Map();
127
129
  let defaultValidationConfig;
128
130
  let adminAuthConfig;
129
131
  let systemAuthConfig;
130
132
  export function setJwtValidationConfig(config) {
131
133
  defaultValidationConfig = config;
134
+ // Config updates can change issuer/audience/JWKS source semantics.
135
+ // Clearing resolver cache avoids stale long-lived entries across reconfiguration.
136
+ JWKS_CACHE.clear();
132
137
  }
133
138
  export function getJwtValidationConfig() {
134
139
  return defaultValidationConfig;
@@ -268,7 +273,15 @@ export function resolveJwtValidationConfigFromEnv(env) {
268
273
  parseBoolean(getEnvValue("CONCAVE_JWT_SKIP_VERIFICATION", env));
269
274
  const clockTolerance = parseNumber(getEnvValue("AUTH_CLOCK_TOLERANCE", env)) ??
270
275
  parseNumber(getEnvValue("CONCAVE_JWT_CLOCK_TOLERANCE", env));
271
- if (!jwksUrl && !issuer && !audience && !secret && skipVerification === undefined && clockTolerance === undefined) {
276
+ const jwksCacheTtlMs = parseNumber(getEnvValue("AUTH_JWKS_CACHE_TTL_MS", env)) ??
277
+ parseNumber(getEnvValue("CONCAVE_JWT_JWKS_CACHE_TTL_MS", env));
278
+ if (!jwksUrl &&
279
+ !issuer &&
280
+ !audience &&
281
+ !secret &&
282
+ skipVerification === undefined &&
283
+ clockTolerance === undefined &&
284
+ jwksCacheTtlMs === undefined) {
272
285
  return undefined;
273
286
  }
274
287
  return {
@@ -278,6 +291,7 @@ export function resolveJwtValidationConfigFromEnv(env) {
278
291
  secret,
279
292
  skipVerification,
280
293
  clockTolerance,
294
+ jwksCacheTtlMs,
281
295
  };
282
296
  }
283
297
  function normalizeList(value) {
@@ -324,15 +338,33 @@ function validateClaims(claims, config) {
324
338
  throw new JWTValidationError("CLAIM_VALIDATION_FAILED", "JWT claim validation failed: aud");
325
339
  }
326
340
  }
327
- function getRemoteJwks(jwksUrl) {
341
+ function getRemoteJwks(jwksUrl, config) {
342
+ const now = Date.now();
328
343
  const cached = JWKS_CACHE.get(jwksUrl);
344
+ if (cached && cached.expiresAtMs > now) {
345
+ return cached.resolver;
346
+ }
329
347
  if (cached) {
330
- return cached;
348
+ JWKS_CACHE.delete(jwksUrl);
331
349
  }
332
350
  const jwks = createRemoteJWKSet(new URL(jwksUrl));
333
- JWKS_CACHE.set(jwksUrl, jwks);
351
+ const configuredTtl = config?.jwksCacheTtlMs ?? defaultValidationConfig?.jwksCacheTtlMs;
352
+ const ttlMs = resolveJwksCacheTtlMs(configuredTtl);
353
+ JWKS_CACHE.set(jwksUrl, {
354
+ resolver: jwks,
355
+ expiresAtMs: now + ttlMs,
356
+ });
334
357
  return jwks;
335
358
  }
359
+ function resolveJwksCacheTtlMs(configuredTtl) {
360
+ if (configuredTtl === undefined) {
361
+ return DEFAULT_JWKS_CACHE_TTL_MS;
362
+ }
363
+ if (!Number.isFinite(configuredTtl)) {
364
+ return DEFAULT_JWKS_CACHE_TTL_MS;
365
+ }
366
+ return Math.max(0, Math.min(MAX_JWKS_CACHE_TTL_MS, Math.floor(configuredTtl)));
367
+ }
336
368
  export function decodeJwtUnsafe(token) {
337
369
  if (!token)
338
370
  return null;
@@ -366,7 +398,7 @@ export async function verifyJwt(token, config) {
366
398
  ({ payload } = await jwtVerify(token, key, options));
367
399
  }
368
400
  else {
369
- ({ payload } = await jwtVerify(token, getRemoteJwks(effectiveConfig.jwksUrl), options));
401
+ ({ payload } = await jwtVerify(token, getRemoteJwks(effectiveConfig.jwksUrl, effectiveConfig), options));
370
402
  }
371
403
  const claims = payload;
372
404
  validateClaims(claims, effectiveConfig);
@@ -7,6 +7,7 @@ export interface FunctionExecutionParams {
7
7
  args: Record<string, any>;
8
8
  auth?: AuthContext | UserIdentityAttributes;
9
9
  componentPath?: string;
10
+ snapshotTimestamp?: bigint;
10
11
  request: Request;
11
12
  }
12
13
  export interface FunctionExecutionResult {
@@ -36,6 +37,7 @@ export interface CoreHttpApiOptions {
36
37
  notifyWrites?: (writtenRanges?: SerializedKeyRange[], writtenTables?: string[], commitTimestamp?: bigint) => Promise<void> | void;
37
38
  storage?: StorageAdapter;
38
39
  corsHeaders?: Record<string, string>;
40
+ getSnapshotTimestamp?: (request: Request) => Promise<bigint> | bigint;
39
41
  }
40
42
  export interface CoreHttpApiResult {
41
43
  handled: boolean;
@@ -81,6 +81,40 @@ export async function resolveAuthContext(bodyAuth, headerToken, headerIdentity)
81
81
  }
82
82
  return bodyAuth;
83
83
  }
84
+ function parseTimestampInput(value) {
85
+ if (value === undefined || value === null) {
86
+ return undefined;
87
+ }
88
+ if (typeof value === "bigint") {
89
+ return value >= 0n ? value : undefined;
90
+ }
91
+ if (typeof value === "number") {
92
+ if (!Number.isFinite(value) || !Number.isInteger(value) || value < 0) {
93
+ return undefined;
94
+ }
95
+ return BigInt(value);
96
+ }
97
+ if (typeof value === "string") {
98
+ const trimmed = value.trim();
99
+ if (!/^\d+$/.test(trimmed)) {
100
+ return undefined;
101
+ }
102
+ try {
103
+ return BigInt(trimmed);
104
+ }
105
+ catch {
106
+ return undefined;
107
+ }
108
+ }
109
+ return undefined;
110
+ }
111
+ async function resolveSnapshotTimestamp(options, request) {
112
+ const fromCallback = options.getSnapshotTimestamp ? await options.getSnapshotTimestamp(request) : undefined;
113
+ if (typeof fromCallback === "bigint") {
114
+ return fromCallback;
115
+ }
116
+ return BigInt(Date.now());
117
+ }
84
118
  export async function handleCoreHttpApiRequest(request, options) {
85
119
  const url = new URL(request.url);
86
120
  const segments = url.pathname.split("/").filter(Boolean);
@@ -124,6 +158,19 @@ export async function handleCoreHttpApiRequest(request, options) {
124
158
  throw error;
125
159
  }
126
160
  const route = routeSegments[0];
161
+ if (route === "query_ts") {
162
+ if (request.method !== "POST") {
163
+ return {
164
+ handled: true,
165
+ response: apply(Response.json({ error: "Method not allowed" }, { status: 405 })),
166
+ };
167
+ }
168
+ const snapshotTimestamp = await resolveSnapshotTimestamp(options, request);
169
+ return {
170
+ handled: true,
171
+ response: apply(Response.json({ ts: snapshotTimestamp.toString() })),
172
+ };
173
+ }
127
174
  if (route === "storage") {
128
175
  if (!options.storage) {
129
176
  return {
@@ -183,7 +230,7 @@ export async function handleCoreHttpApiRequest(request, options) {
183
230
  }
184
231
  }
185
232
  }
186
- if (route === "query" || route === "mutation" || route === "action") {
233
+ if (route === "query" || route === "mutation" || route === "action" || route === "query_at_ts") {
187
234
  if (request.method !== "POST") {
188
235
  return {
189
236
  handled: true,
@@ -207,7 +254,7 @@ export async function handleCoreHttpApiRequest(request, options) {
207
254
  response: apply(Response.json({ error: "Invalid request body" }, { status: 400 })),
208
255
  };
209
256
  }
210
- const { path, args, format, auth: bodyAuth, componentPath } = body;
257
+ const { path, args, format, auth: bodyAuth, componentPath, ts } = body;
211
258
  if (!path || typeof path !== "string") {
212
259
  return {
213
260
  handled: true,
@@ -236,6 +283,14 @@ export async function handleCoreHttpApiRequest(request, options) {
236
283
  };
237
284
  }
238
285
  const jsonArgs = rawArgs ?? {};
286
+ const executionType = route === "query_at_ts" ? "query" : route;
287
+ const snapshotTimestamp = route === "query_at_ts" ? parseTimestampInput(ts) : undefined;
288
+ if (route === "query_at_ts" && snapshotTimestamp === undefined) {
289
+ return {
290
+ handled: true,
291
+ response: apply(Response.json({ error: "Invalid or missing ts" }, { status: 400 })),
292
+ };
293
+ }
239
294
  let authForExecution;
240
295
  try {
241
296
  authForExecution = await resolveAuthContext(bodyAuth, headerToken, headerIdentity);
@@ -252,11 +307,12 @@ export async function handleCoreHttpApiRequest(request, options) {
252
307
  throw error;
253
308
  }
254
309
  const executionParams = {
255
- type: route,
310
+ type: executionType,
256
311
  path,
257
312
  args: jsonArgs,
258
313
  auth: authForExecution,
259
314
  componentPath,
315
+ snapshotTimestamp,
260
316
  request,
261
317
  };
262
318
  try {
@@ -273,7 +329,7 @@ export async function handleCoreHttpApiRequest(request, options) {
273
329
  }
274
330
  const result = await options.executeFunction(executionParams);
275
331
  if (options.notifyWrites &&
276
- (route === "mutation" || route === "action") &&
332
+ (executionType === "mutation" || executionType === "action") &&
277
333
  (result.writtenRanges?.length || result.writtenTables?.length)) {
278
334
  await options.notifyWrites(result.writtenRanges, result.writtenTables ?? writtenTablesFromRanges(result.writtenRanges));
279
335
  }
@@ -20,6 +20,8 @@ function isReservedApiPath(pathname) {
20
20
  normalizedPath === "/api/sync" ||
21
21
  normalizedPath === "/api/reset-test-state" ||
22
22
  normalizedPath === "/api/query" ||
23
+ normalizedPath === "/api/query_ts" ||
24
+ normalizedPath === "/api/query_at_ts" ||
23
25
  normalizedPath === "/api/mutation" ||
24
26
  normalizedPath === "/api/action") {
25
27
  return true;
@@ -108,7 +110,7 @@ export class HttpHandler {
108
110
  }
109
111
  };
110
112
  const coreResult = await handleCoreHttpApiRequest(request, {
111
- executeFunction: async ({ type, path, args, auth, componentPath }) => this.adapter.executeUdf(path, args, type, auth, componentPath),
113
+ executeFunction: async ({ type, path, args, auth, componentPath, snapshotTimestamp }) => this.adapter.executeUdf(path, args, type, auth, componentPath, undefined, snapshotTimestamp),
112
114
  notifyWrites,
113
115
  storage: this.docstore && this.blobstore
114
116
  ? {
@@ -130,6 +132,19 @@ export class HttpHandler {
130
132
  }
131
133
  : undefined,
132
134
  corsHeaders,
135
+ getSnapshotTimestamp: () => {
136
+ const oracle = this.docstore?.timestampOracle;
137
+ const oracleTimestamp = typeof oracle?.beginSnapshot === "function"
138
+ ? oracle.beginSnapshot()
139
+ : typeof oracle?.getCurrentTimestamp === "function"
140
+ ? oracle.getCurrentTimestamp()
141
+ : undefined;
142
+ const wallClock = BigInt(Date.now());
143
+ if (typeof oracleTimestamp === "bigint" && oracleTimestamp > wallClock) {
144
+ return oracleTimestamp;
145
+ }
146
+ return wallClock;
147
+ },
133
148
  });
134
149
  if (coreResult?.handled) {
135
150
  return coreResult.response;
@@ -6,7 +6,7 @@
6
6
  *
7
7
  * Compatible with Convex's cron system - parses cron specs from convex/crons.ts
8
8
  */
9
- import type { DocStore } from "../docstore";
9
+ import { type DocStore } from "../docstore";
10
10
  import type { SerializedKeyRange } from "../queryengine";
11
11
  import type { UdfExec } from "../udf";
12
12
  /**
@@ -71,6 +71,8 @@ export interface CronExecutorOptions {
71
71
  allocateTimestamp?: () => bigint;
72
72
  now?: () => number;
73
73
  logger?: Pick<typeof console, "info" | "warn" | "error">;
74
+ maxConcurrentJobs?: number;
75
+ scanPageSize?: number;
74
76
  }
75
77
  export interface CronExecutionResult {
76
78
  executed: number;
@@ -83,6 +85,8 @@ export declare class CronExecutor {
83
85
  private readonly allocateTimestamp?;
84
86
  private readonly now;
85
87
  private readonly logger;
88
+ private readonly maxConcurrentJobs;
89
+ private readonly scanPageSize;
86
90
  constructor(options: CronExecutorOptions);
87
91
  /**
88
92
  * Sync cron definitions from convex/crons.ts to the _crons table
@@ -96,7 +100,7 @@ export declare class CronExecutor {
96
100
  * Get the next scheduled execution time across all cron jobs
97
101
  */
98
102
  getNextScheduledTime(): Promise<number | null>;
99
- private computeNextScheduledTime;
103
+ private forEachCronJob;
100
104
  private executeJob;
101
105
  private updateJob;
102
106
  }
@@ -6,6 +6,7 @@
6
6
  *
7
7
  * Compatible with Convex's cron system - parses cron specs from convex/crons.ts
8
8
  */
9
+ import { Order } from "../docstore";
9
10
  import { stringToHex, writtenTablesFromRanges } from "../utils";
10
11
  import { runAsServerCall } from "../udf/module-loader/call-context";
11
12
  const CRONS_TABLE = "_crons";
@@ -16,6 +17,8 @@ export class CronExecutor {
16
17
  allocateTimestamp;
17
18
  now;
18
19
  logger;
20
+ maxConcurrentJobs;
21
+ scanPageSize;
19
22
  constructor(options) {
20
23
  this.docstore = options.docstore;
21
24
  this.udfExecutor = options.udfExecutor;
@@ -23,6 +26,8 @@ export class CronExecutor {
23
26
  this.allocateTimestamp = options.allocateTimestamp;
24
27
  this.now = options.now ?? (() => Date.now());
25
28
  this.logger = options.logger ?? console;
29
+ this.maxConcurrentJobs = Math.max(1, options.maxConcurrentJobs ?? 8);
30
+ this.scanPageSize = Math.max(1, options.scanPageSize ?? 256);
26
31
  }
27
32
  /**
28
33
  * Sync cron definitions from convex/crons.ts to the _crons table
@@ -119,40 +124,63 @@ export class CronExecutor {
119
124
  */
120
125
  async runDueJobs() {
121
126
  const tableId = stringToHex(CRONS_TABLE);
122
- const allJobs = await this.docstore.scan(tableId);
123
127
  const now = this.now();
124
- const dueJobs = allJobs.filter((doc) => {
125
- const value = doc.value?.value;
126
- return value && value.nextRun <= now;
127
- });
128
- if (dueJobs.length === 0) {
129
- return {
130
- executed: 0,
131
- nextScheduledTime: this.computeNextScheduledTime(allJobs),
132
- };
133
- }
134
- await Promise.all(dueJobs.map((job) => this.executeJob(job, tableId)));
135
- // Re-scan to get updated next times
136
- const updatedJobs = await this.docstore.scan(tableId);
137
- return {
138
- executed: dueJobs.length,
139
- nextScheduledTime: this.computeNextScheduledTime(updatedJobs),
128
+ let executed = 0;
129
+ const inFlight = new Set();
130
+ const schedule = async (job) => {
131
+ const run = this.executeJob(job, tableId)
132
+ .then(() => {
133
+ executed += 1;
134
+ })
135
+ .finally(() => {
136
+ inFlight.delete(run);
137
+ });
138
+ inFlight.add(run);
139
+ if (inFlight.size >= this.maxConcurrentJobs) {
140
+ await Promise.race(inFlight);
141
+ }
140
142
  };
143
+ await this.forEachCronJob(async (job) => {
144
+ const value = job.value?.value;
145
+ if (!value || typeof value.nextRun !== "number") {
146
+ return;
147
+ }
148
+ if (value.nextRun <= now) {
149
+ await schedule(job);
150
+ }
151
+ });
152
+ await Promise.all(inFlight);
153
+ return { executed, nextScheduledTime: await this.getNextScheduledTime() };
141
154
  }
142
155
  /**
143
156
  * Get the next scheduled execution time across all cron jobs
144
157
  */
145
158
  async getNextScheduledTime() {
146
- const tableId = stringToHex(CRONS_TABLE);
147
- const allJobs = await this.docstore.scan(tableId);
148
- return this.computeNextScheduledTime(allJobs);
159
+ let nextScheduledTime = null;
160
+ await this.forEachCronJob(async (job) => {
161
+ const nextRun = job.value?.value?.nextRun;
162
+ if (typeof nextRun !== "number") {
163
+ return;
164
+ }
165
+ if (nextScheduledTime === null || nextRun < nextScheduledTime) {
166
+ nextScheduledTime = nextRun;
167
+ }
168
+ });
169
+ return nextScheduledTime;
149
170
  }
150
- computeNextScheduledTime(allJobs) {
151
- const nextTimes = allJobs
152
- .map((doc) => doc.value?.value?.nextRun)
153
- .filter((time) => typeof time === "number")
154
- .sort((a, b) => a - b);
155
- return nextTimes.length > 0 ? nextTimes[0] : null;
171
+ async forEachCronJob(visitor) {
172
+ const tableId = stringToHex(CRONS_TABLE);
173
+ let cursor = null;
174
+ while (true) {
175
+ const page = await this.docstore.scanPaginated(tableId, cursor, this.scanPageSize, Order.Asc);
176
+ for (const job of page.documents) {
177
+ await visitor(job);
178
+ }
179
+ if (!page.hasMore || !page.nextCursor) {
180
+ break;
181
+ }
182
+ cursor = page.nextCursor;
183
+ }
156
184
  }
157
185
  async executeJob(job, tableId) {
158
186
  const value = job.value?.value;
@@ -1,4 +1,4 @@
1
- import type { DocStore } from "../docstore";
1
+ import { type DocStore } from "../docstore";
2
2
  import type { SerializedKeyRange } from "../queryengine";
3
3
  import type { UdfExec } from "../udf";
4
4
  export interface ScheduledFunctionExecutorOptions {
@@ -10,6 +10,8 @@ export interface ScheduledFunctionExecutorOptions {
10
10
  logger?: Pick<typeof console, "info" | "warn" | "error">;
11
11
  runMutationInTransaction?: <T>(operation: () => Promise<T>) => Promise<T>;
12
12
  tableName?: string;
13
+ maxConcurrentJobs?: number;
14
+ scanPageSize?: number;
13
15
  }
14
16
  export interface ScheduledExecutionResult {
15
17
  executed: number;
@@ -24,10 +26,12 @@ export declare class ScheduledFunctionExecutor {
24
26
  private readonly logger;
25
27
  private readonly runMutationInTransaction?;
26
28
  private readonly tableName;
29
+ private readonly maxConcurrentJobs;
30
+ private readonly scanPageSize;
27
31
  constructor(options: ScheduledFunctionExecutorOptions);
28
32
  runDueJobs(): Promise<ScheduledExecutionResult>;
29
33
  getNextScheduledTime(): Promise<number | null>;
30
- private computeNextScheduledTime;
34
+ private forEachScheduledJob;
31
35
  private executeJob;
32
36
  private updateJobState;
33
37
  private getDocumentId;
@@ -1,3 +1,4 @@
1
+ import { Order } from "../docstore";
1
2
  import { deserializeDeveloperId, stringToHex, writtenTablesFromRanges } from "../utils";
2
3
  import { runAsServerCall } from "../udf/module-loader/call-context";
3
4
  const SCHEDULED_FUNCTIONS_TABLE = "_scheduled_functions";
@@ -10,6 +11,8 @@ export class ScheduledFunctionExecutor {
10
11
  logger;
11
12
  runMutationInTransaction;
12
13
  tableName;
14
+ maxConcurrentJobs;
15
+ scanPageSize;
13
16
  constructor(options) {
14
17
  this.docstore = options.docstore;
15
18
  this.udfExecutor = options.udfExecutor;
@@ -19,51 +22,70 @@ export class ScheduledFunctionExecutor {
19
22
  this.logger = options.logger ?? console;
20
23
  this.runMutationInTransaction = options.runMutationInTransaction;
21
24
  this.tableName = options.tableName ?? SCHEDULED_FUNCTIONS_TABLE;
25
+ this.maxConcurrentJobs = Math.max(1, options.maxConcurrentJobs ?? 8);
26
+ this.scanPageSize = Math.max(1, options.scanPageSize ?? 256);
22
27
  }
23
28
  async runDueJobs() {
24
29
  const tableId = stringToHex(this.tableName);
25
- const allJobs = await this.docstore.scan(tableId);
26
30
  const now = this.now();
27
- const pendingJobs = allJobs.filter((doc) => {
28
- const value = doc.value?.value;
29
- if (!value || typeof value !== "object") {
30
- return false;
31
+ let executed = 0;
32
+ const inFlight = new Set();
33
+ const schedule = async (jobValue) => {
34
+ const run = this.executeJob(jobValue, tableId)
35
+ .then(() => {
36
+ executed += 1;
37
+ })
38
+ .finally(() => {
39
+ inFlight.delete(run);
40
+ });
41
+ inFlight.add(run);
42
+ if (inFlight.size >= this.maxConcurrentJobs) {
43
+ await Promise.race(inFlight);
31
44
  }
32
- const state = value.state;
33
- const scheduledTime = value.scheduledTime;
34
- return state?.kind === "pending" && typeof scheduledTime === "number" && scheduledTime <= now;
35
- });
36
- if (pendingJobs.length === 0) {
37
- return {
38
- executed: 0,
39
- nextScheduledTime: this.computeNextScheduledTime(allJobs),
40
- };
41
- }
42
- await Promise.all(pendingJobs.map((job) => {
43
- const jobValue = job.value?.value;
44
- if (!jobValue) {
45
- throw new Error("Job value unexpectedly missing after filter");
46
- }
47
- return this.executeJob(jobValue, tableId);
48
- }));
49
- return {
50
- executed: pendingJobs.length,
51
- nextScheduledTime: this.computeNextScheduledTime(await this.docstore.scan(tableId)),
52
45
  };
46
+ await this.forEachScheduledJob(async (jobValue) => {
47
+ const state = jobValue.state;
48
+ const scheduledTime = jobValue.scheduledTime;
49
+ if (state?.kind !== "pending" || typeof scheduledTime !== "number") {
50
+ return;
51
+ }
52
+ if (scheduledTime <= now) {
53
+ await schedule(jobValue);
54
+ }
55
+ });
56
+ await Promise.all(inFlight);
57
+ return { executed, nextScheduledTime: await this.getNextScheduledTime() };
53
58
  }
54
59
  async getNextScheduledTime() {
55
- const tableId = stringToHex(this.tableName);
56
- const allJobs = await this.docstore.scan(tableId);
57
- return this.computeNextScheduledTime(allJobs);
60
+ let nextScheduledTime = null;
61
+ await this.forEachScheduledJob(async (jobValue) => {
62
+ const state = jobValue.state;
63
+ const scheduledTime = jobValue.scheduledTime;
64
+ if (state?.kind !== "pending" || typeof scheduledTime !== "number") {
65
+ return;
66
+ }
67
+ if (nextScheduledTime === null || scheduledTime < nextScheduledTime) {
68
+ nextScheduledTime = scheduledTime;
69
+ }
70
+ });
71
+ return nextScheduledTime;
58
72
  }
59
- computeNextScheduledTime(allJobs) {
60
- const pending = allJobs
61
- .map((doc) => doc.value?.value)
62
- .filter((value) => !!value && value.state?.kind === "pending")
63
- .map((value) => value.scheduledTime)
64
- .filter((time) => typeof time === "number")
65
- .sort((a, b) => a - b);
66
- return pending.length > 0 ? pending[0] : null;
73
+ async forEachScheduledJob(visitor) {
74
+ const tableId = stringToHex(this.tableName);
75
+ let cursor = null;
76
+ while (true) {
77
+ const page = await this.docstore.scanPaginated(tableId, cursor, this.scanPageSize, Order.Asc);
78
+ for (const doc of page.documents) {
79
+ const value = doc.value?.value;
80
+ if (value && typeof value === "object") {
81
+ await visitor(value);
82
+ }
83
+ }
84
+ if (!page.hasMore || !page.nextCursor) {
85
+ break;
86
+ }
87
+ cursor = page.nextCursor;
88
+ }
67
89
  }
68
90
  async executeJob(jobValue, tableId) {
69
91
  const jobId = jobValue?._id;
@@ -98,15 +98,37 @@ export interface RoutedMessage {
98
98
  */
99
99
  export interface SyncProtocolOptions {
100
100
  isDev?: boolean;
101
+ /**
102
+ * Maximum number of client messages accepted per session per rolling window.
103
+ */
104
+ maxMessagesPerWindow?: number;
105
+ /**
106
+ * Rolling window duration for message rate limiting.
107
+ */
108
+ rateLimitWindowMs?: number;
109
+ /**
110
+ * Timeout for a single locked operation.
111
+ * This does not cancel the underlying work; it bounds client-visible wait time.
112
+ */
113
+ operationTimeoutMs?: number;
114
+ /**
115
+ * Maximum number of active subscribed queries a single session can hold.
116
+ */
117
+ maxActiveQueriesPerSession?: number;
101
118
  }
102
119
  export declare class SyncProtocolHandler {
103
120
  private readonly udfExecutor;
104
121
  private sessions;
122
+ private rateLimitStates;
105
123
  private subscriptionManager;
106
124
  private readonly instanceName;
107
125
  private readonly backpressureController;
108
126
  private readonly heartbeatController;
109
127
  private readonly isDev;
128
+ private readonly maxMessagesPerWindow;
129
+ private readonly rateLimitWindowMs;
130
+ private readonly operationTimeoutMs;
131
+ private readonly maxActiveQueriesPerSession;
110
132
  constructor(instanceName: string, udfExecutor: SyncUdfExecutor, options?: SyncProtocolOptions);
111
133
  /**
112
134
  * Create a new client session
@@ -156,6 +178,9 @@ export declare class SyncProtocolHandler {
156
178
  private rerunSpecificQueries;
157
179
  private logRanges;
158
180
  private withSessionLock;
181
+ private withOperationTimeout;
182
+ private consumeRateLimit;
183
+ private computeProjectedActiveQueryCount;
159
184
  private sendPing;
160
185
  /**
161
186
  * Send a message to a client with backpressure handling.
@@ -60,6 +60,11 @@ export const WEBSOCKET_READY_STATE_OPEN = 1;
60
60
  const BACKPRESSURE_HIGH_WATER_MARK = 100; // Max messages in queue before dropping
61
61
  const BACKPRESSURE_BUFFER_LIMIT = 1024 * 1024; // 1MB buffer limit
62
62
  const SLOW_CLIENT_TIMEOUT_MS = 30000; // 30 seconds to drain queue before disconnect
63
+ const DEFAULT_RATE_LIMIT_WINDOW_MS = 5000;
64
+ const DEFAULT_MAX_MESSAGES_PER_WINDOW = 1000;
65
+ const DEFAULT_OPERATION_TIMEOUT_MS = 15000;
66
+ const DEFAULT_MAX_ACTIVE_QUERIES_PER_SESSION = 1_000;
67
+ const RATE_LIMIT_HARD_MULTIPLIER = 5;
63
68
  /**
64
69
  * Session state for a connected client
65
70
  */
@@ -96,15 +101,27 @@ export class SyncSession {
96
101
  export class SyncProtocolHandler {
97
102
  udfExecutor;
98
103
  sessions = new Map();
104
+ rateLimitStates = new Map();
99
105
  subscriptionManager;
100
106
  instanceName;
101
107
  backpressureController;
102
108
  heartbeatController;
103
109
  isDev;
110
+ maxMessagesPerWindow;
111
+ rateLimitWindowMs;
112
+ operationTimeoutMs;
113
+ maxActiveQueriesPerSession;
104
114
  constructor(instanceName, udfExecutor, options) {
105
115
  this.udfExecutor = udfExecutor;
106
116
  this.instanceName = instanceName;
107
117
  this.isDev = options?.isDev ?? true;
118
+ this.maxMessagesPerWindow = Math.max(1, options?.maxMessagesPerWindow ?? DEFAULT_MAX_MESSAGES_PER_WINDOW);
119
+ this.rateLimitWindowMs = Math.max(1, options?.rateLimitWindowMs ?? DEFAULT_RATE_LIMIT_WINDOW_MS);
120
+ const configuredOperationTimeout = options?.operationTimeoutMs ?? DEFAULT_OPERATION_TIMEOUT_MS;
121
+ this.operationTimeoutMs = Number.isFinite(configuredOperationTimeout)
122
+ ? Math.max(0, Math.floor(configuredOperationTimeout))
123
+ : DEFAULT_OPERATION_TIMEOUT_MS;
124
+ this.maxActiveQueriesPerSession = Math.max(1, options?.maxActiveQueriesPerSession ?? DEFAULT_MAX_ACTIVE_QUERIES_PER_SESSION);
108
125
  this.subscriptionManager = new SubscriptionManager();
109
126
  this.backpressureController = new SessionBackpressureController({
110
127
  websocketReadyStateOpen: WEBSOCKET_READY_STATE_OPEN,
@@ -125,6 +142,10 @@ export class SyncProtocolHandler {
125
142
  createSession(sessionId, websocket) {
126
143
  const session = new SyncSession(websocket);
127
144
  this.sessions.set(sessionId, session);
145
+ this.rateLimitStates.set(sessionId, {
146
+ windowStartedAt: Date.now(),
147
+ messagesInWindow: 0,
148
+ });
128
149
  return session;
129
150
  }
130
151
  /**
@@ -146,6 +167,7 @@ export class SyncProtocolHandler {
146
167
  session.isDraining = false;
147
168
  this.subscriptionManager.unsubscribeAll(sessionId);
148
169
  this.sessions.delete(sessionId);
170
+ this.rateLimitStates.delete(sessionId);
149
171
  }
150
172
  }
151
173
  /**
@@ -156,6 +178,11 @@ export class SyncProtocolHandler {
156
178
  if (session) {
157
179
  this.sessions.delete(oldSessionId);
158
180
  this.sessions.set(newSessionId, session);
181
+ const rateLimitState = this.rateLimitStates.get(oldSessionId);
182
+ if (rateLimitState) {
183
+ this.rateLimitStates.delete(oldSessionId);
184
+ this.rateLimitStates.set(newSessionId, rateLimitState);
185
+ }
159
186
  // Update subscription manager mappings
160
187
  this.subscriptionManager.updateSessionId(oldSessionId, newSessionId);
161
188
  }
@@ -169,6 +196,32 @@ export class SyncProtocolHandler {
169
196
  if (!session && message.type !== "Connect") {
170
197
  throw new Error("Session not found");
171
198
  }
199
+ if (session) {
200
+ const rateLimitDecision = this.consumeRateLimit(sessionId);
201
+ if (rateLimitDecision === "reject") {
202
+ return [
203
+ {
204
+ type: "FatalError",
205
+ error: "Rate limit exceeded, retry shortly",
206
+ },
207
+ ];
208
+ }
209
+ if (rateLimitDecision === "close") {
210
+ try {
211
+ session.websocket.close(1013, "Rate limit exceeded");
212
+ }
213
+ catch {
214
+ // Ignore close errors and still destroy local session state.
215
+ }
216
+ this.destroySession(sessionId);
217
+ return [
218
+ {
219
+ type: "FatalError",
220
+ error: "Rate limit exceeded",
221
+ },
222
+ ];
223
+ }
224
+ }
172
225
  switch (message.type) {
173
226
  case "Connect":
174
227
  return this.handleConnect(sessionId, message);
@@ -241,6 +294,15 @@ export class SyncProtocolHandler {
241
294
  return [fatalError];
242
295
  }
243
296
  const startVersion = makeStateVersion(session.querySetVersion, session.identityVersion, session.timestamp);
297
+ const projectedActiveQueryCount = this.computeProjectedActiveQueryCount(session, message);
298
+ if (projectedActiveQueryCount > this.maxActiveQueriesPerSession) {
299
+ return [
300
+ {
301
+ type: "FatalError",
302
+ error: `Too many active queries: ${projectedActiveQueryCount} exceeds limit ${this.maxActiveQueriesPerSession}`,
303
+ },
304
+ ];
305
+ }
244
306
  // Update version before async work
245
307
  session.querySetVersion = message.newVersion;
246
308
  const modifications = [];
@@ -614,7 +676,55 @@ export class SyncProtocolHandler {
614
676
  }
615
677
  });
616
678
  session.lock = run.then(() => undefined, () => undefined);
617
- return run;
679
+ return this.withOperationTimeout(run);
680
+ }
681
+ withOperationTimeout(promise) {
682
+ if (this.operationTimeoutMs <= 0) {
683
+ return promise;
684
+ }
685
+ let timeoutHandle;
686
+ const timeoutPromise = new Promise((_, reject) => {
687
+ timeoutHandle = setTimeout(() => {
688
+ reject(new Error(`Sync operation timed out after ${this.operationTimeoutMs}ms`));
689
+ }, this.operationTimeoutMs);
690
+ });
691
+ return Promise.race([promise, timeoutPromise]).finally(() => {
692
+ if (timeoutHandle) {
693
+ clearTimeout(timeoutHandle);
694
+ }
695
+ });
696
+ }
697
+ consumeRateLimit(sessionId) {
698
+ const state = this.rateLimitStates.get(sessionId);
699
+ if (!state) {
700
+ return "allow";
701
+ }
702
+ const now = Date.now();
703
+ if (now - state.windowStartedAt >= this.rateLimitWindowMs) {
704
+ state.windowStartedAt = now;
705
+ state.messagesInWindow = 0;
706
+ }
707
+ state.messagesInWindow += 1;
708
+ if (state.messagesInWindow <= this.maxMessagesPerWindow) {
709
+ return "allow";
710
+ }
711
+ const hardLimit = Math.max(this.maxMessagesPerWindow + 1, this.maxMessagesPerWindow * RATE_LIMIT_HARD_MULTIPLIER);
712
+ if (state.messagesInWindow >= hardLimit) {
713
+ return "close";
714
+ }
715
+ return "reject";
716
+ }
717
+ computeProjectedActiveQueryCount(session, message) {
718
+ const projected = new Set(session.activeQueries.keys());
719
+ for (const mod of message.modifications) {
720
+ if (mod.type === "Add") {
721
+ projected.add(mod.queryId);
722
+ }
723
+ else if (mod.type === "Remove") {
724
+ projected.delete(mod.queryId);
725
+ }
726
+ }
727
+ return projected.size;
618
728
  }
619
729
  sendPing(session) {
620
730
  if (!session.websocket || session.websocket.readyState !== WEBSOCKET_READY_STATE_OPEN) {
@@ -42,7 +42,7 @@ export declare class UdfExecutionAdapter {
42
42
  * @param requestId - Optional request ID for tracing
43
43
  * @returns UDF execution result
44
44
  */
45
- executeUdf(path: string, jsonArgs: JsonArgs, type: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string): Promise<UdfResult>;
45
+ executeUdf(path: string, jsonArgs: JsonArgs, type: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint): Promise<UdfResult>;
46
46
  }
47
47
  /**
48
48
  * Factory function to create a client-side adapter (for HTTP/WebSocket requests)
@@ -58,7 +58,7 @@ export class UdfExecutionAdapter {
58
58
  * @param requestId - Optional request ID for tracing
59
59
  * @returns UDF execution result
60
60
  */
61
- async executeUdf(path, jsonArgs, type, auth, componentPath, requestId) {
61
+ async executeUdf(path, jsonArgs, type, auth, componentPath, requestId, snapshotTimestamp) {
62
62
  // Step 1: Convert args with type safety
63
63
  const convexArgs = convertClientArgs(jsonArgs);
64
64
  const target = normalizeExecutionTarget(path, componentPath);
@@ -104,7 +104,7 @@ export class UdfExecutionAdapter {
104
104
  const executeWithContext = this.callType === "client" ? runAsClientCall : runAsServerCall;
105
105
  // Step 4: Execute with all context properly set
106
106
  return executeWithContext(async () => {
107
- return await this.executor.execute(target.path, convexArgs, type, authContext ?? userIdentity, normalizeComponentPath(target.componentPath), requestId);
107
+ return await this.executor.execute(target.path, convexArgs, type, authContext ?? userIdentity, normalizeComponentPath(target.componentPath), requestId, snapshotTimestamp);
108
108
  });
109
109
  });
110
110
  }
@@ -22,7 +22,7 @@ export declare class InlineUdfExecutor implements UdfExec {
22
22
  private moduleRegistry?;
23
23
  private readonly logSink?;
24
24
  constructor(options: InlineUdfExecutorOptions);
25
- execute(functionPath: string, args: Record<string, any>, udfType: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string): Promise<UdfResult>;
25
+ execute(functionPath: string, args: Record<string, any>, udfType: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint): Promise<UdfResult>;
26
26
  executeHttp(request: Request, auth?: AuthContext | UserIdentityAttributes, requestId?: string): Promise<Response>;
27
27
  setModuleRegistry(moduleRegistry?: ModuleRegistry): void;
28
28
  protected loadModule(moduleName: string, componentPath?: string): Promise<any>;
@@ -21,7 +21,7 @@ export class InlineUdfExecutor {
21
21
  this.moduleRegistry = options.moduleRegistry;
22
22
  this.logSink = options.logSink;
23
23
  }
24
- async execute(functionPath, args, udfType, auth, componentPath, requestId) {
24
+ async execute(functionPath, args, udfType, auth, componentPath, requestId, snapshotTimestamp) {
25
25
  const [moduleName, functionName] = this.parseUdfPath(functionPath);
26
26
  const finalRequestId = requestId ?? this.requestIdFactory(udfType, functionPath);
27
27
  // Skip logging for system functions to avoid recursion
@@ -47,7 +47,7 @@ export class InlineUdfExecutor {
47
47
  const runWithType = () => {
48
48
  switch (udfType) {
49
49
  case "query":
50
- return runUdfQuery(this.docstore, runUdf, auth, this.blobstore, finalRequestId, this, componentPath);
50
+ return runUdfQuery(this.docstore, runUdf, auth, this.blobstore, finalRequestId, this, componentPath, snapshotTimestamp);
51
51
  case "mutation":
52
52
  return runUdfMutation(this.docstore, runUdf, auth, this.blobstore, finalRequestId, this, componentPath);
53
53
  case "action":
@@ -106,7 +106,7 @@ export class InlineUdfExecutor {
106
106
  async executeHttp(request, auth, requestId) {
107
107
  const url = new URL(request.url);
108
108
  const runHttpUdf = async () => {
109
- const httpModule = await this.loadModule("http", request.url);
109
+ const httpModule = await this.loadModule("http");
110
110
  const router = httpModule?.default;
111
111
  if (!router?.isRouter || typeof router.lookup !== "function") {
112
112
  throw new Error("convex/http.ts must export a default httpRouter()");
@@ -3,6 +3,6 @@ import type { AuthContext } from "../../sync/protocol-handler";
3
3
  import type { UserIdentityAttributes } from "convex/server";
4
4
  export type { UdfResult };
5
5
  export interface UdfExec {
6
- execute(path: string, args: Record<string, unknown>, type: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string): Promise<UdfResult>;
6
+ execute(path: string, args: Record<string, unknown>, type: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint): Promise<UdfResult>;
7
7
  executeHttp(request: Request, auth?: AuthContext | UserIdentityAttributes, requestId?: string): Promise<Response>;
8
8
  }
@@ -24,7 +24,7 @@ export interface UdfRuntimeOps {
24
24
  console: typeof console | null;
25
25
  convex: ConvexInterface;
26
26
  }
27
- export declare function runUdfQuery(docstore: DocStore, fn: () => Promise<JSONValue>, auth?: any, storage?: BlobStore, requestId?: string, udfExecutor?: any, componentPath?: string): Promise<UdfResult>;
27
+ export declare function runUdfQuery(docstore: DocStore, fn: () => Promise<JSONValue>, auth?: any, storage?: BlobStore, requestId?: string, udfExecutor?: any, componentPath?: string, snapshotOverride?: bigint): Promise<UdfResult>;
28
28
  export declare function runUdfMutation(docstore: DocStore, fn: () => Promise<JSONValue>, auth?: any, storage?: BlobStore, requestId?: string, udfExecutor?: any, componentPath?: string): Promise<UdfResult>;
29
29
  export declare function runUdfAction(docstore: DocStore, fn: () => Promise<JSONValue>, auth?: any, storage?: BlobStore, requestId?: string, udfExecutor?: any, componentPath?: string): Promise<UdfResult>;
30
30
  export declare function runUdfHttpAction(docstore: DocStore, fn: () => Promise<Response>, auth?: any, storage?: BlobStore, requestId?: string, udfExecutor?: any): Promise<Response>;
@@ -116,7 +116,7 @@ class ForbiddenInQueriesOrMutations extends Error {
116
116
  }
117
117
  }
118
118
  async function runUdfAndGetLogs(docstore, fn, ops, auth, // Still accept for backwards compatibility, but prefer ambient context
119
- udfType, storage, deterministicSeed, mutationTransaction, udfExecutor, componentPath) {
119
+ udfType, storage, deterministicSeed, mutationTransaction, udfExecutor, componentPath, snapshotOverride) {
120
120
  // Get auth from ambient context (set by execution adapter) or fallback to explicit param
121
121
  const ambientIdentity = getAuthContext();
122
122
  let effectiveAuth;
@@ -133,7 +133,7 @@ udfType, storage, deterministicSeed, mutationTransaction, udfExecutor, component
133
133
  const inheritedSnapshot = snapshotContext.getStore() ?? null;
134
134
  const existingIdGenerator = idGeneratorContext.getStore() ?? undefined;
135
135
  const idGenerator = existingIdGenerator ?? (deterministicSeed ? createDeterministicIdGenerator(deterministicSeed) : undefined);
136
- const convex = new UdfKernel(docstore, effectiveAuth, storage, inheritedSnapshot, mutationTransaction, udfExecutor, componentPath, idGenerator);
136
+ const convex = new UdfKernel(docstore, effectiveAuth, storage, snapshotOverride ?? inheritedSnapshot, mutationTransaction, udfExecutor, componentPath, idGenerator);
137
137
  convex.clearAccessLogs();
138
138
  const logLines = [];
139
139
  const logger = (level) => {
@@ -196,7 +196,7 @@ udfType, storage, deterministicSeed, mutationTransaction, udfExecutor, component
196
196
  // Globals are unpatched in runWithUdfRuntime
197
197
  }
198
198
  }
199
- export function runUdfQuery(docstore, fn, auth, storage, requestId, udfExecutor, componentPath) {
199
+ export function runUdfQuery(docstore, fn, auth, storage, requestId, udfExecutor, componentPath, snapshotOverride) {
200
200
  const tnow = Date.now();
201
201
  const seed = resolveSeed("query", requestId, tnow);
202
202
  const rng = udfRng(seed);
@@ -210,7 +210,7 @@ export function runUdfQuery(docstore, fn, auth, storage, requestId, udfExecutor,
210
210
  fetch: forbiddenAsyncOp("fetch"),
211
211
  setInterval: forbiddenAsyncOp("setInterval"),
212
212
  setTimeout: forbiddenAsyncOp("setTimeout"),
213
- }, auth, "query", storage, seed, undefined, udfExecutor, componentPath);
213
+ }, auth, "query", storage, seed, undefined, udfExecutor, componentPath, snapshotOverride);
214
214
  }
215
215
  export function runUdfMutation(docstore, fn, auth, storage, requestId, udfExecutor, componentPath) {
216
216
  const tnow = Date.now();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@concavejs/core",
3
- "version": "0.0.1-alpha.6",
3
+ "version": "0.0.1-alpha.7",
4
4
  "license": "FSL-1.1-Apache-2.0",
5
5
  "publishConfig": {
6
6
  "access": "public"