@optimystic/db-core 0.5.2 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. package/dist/src/btree/btree.d.ts +2 -0
  2. package/dist/src/btree/btree.d.ts.map +1 -1
  3. package/dist/src/btree/btree.js +72 -52
  4. package/dist/src/btree/btree.js.map +1 -1
  5. package/dist/src/cluster/structs.d.ts +13 -0
  6. package/dist/src/cluster/structs.d.ts.map +1 -1
  7. package/dist/src/collection/collection.d.ts +10 -0
  8. package/dist/src/collection/collection.d.ts.map +1 -1
  9. package/dist/src/collection/collection.js +34 -0
  10. package/dist/src/collection/collection.js.map +1 -1
  11. package/dist/src/index.d.ts +1 -0
  12. package/dist/src/index.d.ts.map +1 -1
  13. package/dist/src/index.js +1 -0
  14. package/dist/src/index.js.map +1 -1
  15. package/dist/src/log/log.js +1 -1
  16. package/dist/src/log/log.js.map +1 -1
  17. package/dist/src/logger.d.ts +4 -0
  18. package/dist/src/logger.d.ts.map +1 -0
  19. package/dist/src/logger.js +8 -0
  20. package/dist/src/logger.js.map +1 -0
  21. package/dist/src/transaction/coordinator.d.ts +31 -8
  22. package/dist/src/transaction/coordinator.d.ts.map +1 -1
  23. package/dist/src/transaction/coordinator.js +206 -53
  24. package/dist/src/transaction/coordinator.js.map +1 -1
  25. package/dist/src/transaction/index.d.ts +2 -2
  26. package/dist/src/transaction/index.d.ts.map +1 -1
  27. package/dist/src/transaction/index.js +1 -1
  28. package/dist/src/transaction/index.js.map +1 -1
  29. package/dist/src/transaction/session.d.ts +11 -7
  30. package/dist/src/transaction/session.d.ts.map +1 -1
  31. package/dist/src/transaction/session.js +27 -14
  32. package/dist/src/transaction/session.js.map +1 -1
  33. package/dist/src/transaction/transaction.d.ts +9 -3
  34. package/dist/src/transaction/transaction.d.ts.map +1 -1
  35. package/dist/src/transaction/transaction.js +14 -7
  36. package/dist/src/transaction/transaction.js.map +1 -1
  37. package/dist/src/transaction/validator.d.ts +9 -2
  38. package/dist/src/transaction/validator.d.ts.map +1 -1
  39. package/dist/src/transaction/validator.js +26 -6
  40. package/dist/src/transaction/validator.js.map +1 -1
  41. package/dist/src/transactor/network-transactor.d.ts.map +1 -1
  42. package/dist/src/transactor/network-transactor.js +84 -9
  43. package/dist/src/transactor/network-transactor.js.map +1 -1
  44. package/dist/src/transactor/transactor-source.d.ts +4 -0
  45. package/dist/src/transactor/transactor-source.d.ts.map +1 -1
  46. package/dist/src/transactor/transactor-source.js +25 -9
  47. package/dist/src/transactor/transactor-source.js.map +1 -1
  48. package/dist/src/transform/atomic-proxy.d.ts +26 -0
  49. package/dist/src/transform/atomic-proxy.d.ts.map +1 -0
  50. package/dist/src/transform/atomic-proxy.js +47 -0
  51. package/dist/src/transform/atomic-proxy.js.map +1 -0
  52. package/dist/src/transform/cache-source.d.ts +3 -2
  53. package/dist/src/transform/cache-source.d.ts.map +1 -1
  54. package/dist/src/transform/cache-source.js +15 -3
  55. package/dist/src/transform/cache-source.js.map +1 -1
  56. package/dist/src/transform/index.d.ts +1 -0
  57. package/dist/src/transform/index.d.ts.map +1 -1
  58. package/dist/src/transform/index.js +1 -0
  59. package/dist/src/transform/index.js.map +1 -1
  60. package/dist/src/utility/batch-coordinator.d.ts +2 -0
  61. package/dist/src/utility/batch-coordinator.d.ts.map +1 -1
  62. package/dist/src/utility/batch-coordinator.js +6 -1
  63. package/dist/src/utility/batch-coordinator.js.map +1 -1
  64. package/dist/src/utility/hash-string.d.ts +3 -6
  65. package/dist/src/utility/hash-string.d.ts.map +1 -1
  66. package/dist/src/utility/hash-string.js +8 -11
  67. package/dist/src/utility/hash-string.js.map +1 -1
  68. package/dist/src/utility/lru-map.d.ts +18 -0
  69. package/dist/src/utility/lru-map.d.ts.map +1 -0
  70. package/dist/src/utility/lru-map.js +52 -0
  71. package/dist/src/utility/lru-map.js.map +1 -0
  72. package/package.json +15 -8
  73. package/src/btree/btree.ts +71 -50
  74. package/src/cluster/structs.ts +11 -0
  75. package/src/collection/collection.ts +44 -0
  76. package/src/index.ts +1 -0
  77. package/src/log/log.ts +1 -1
  78. package/src/logger.ts +10 -0
  79. package/src/transaction/coordinator.ts +244 -57
  80. package/src/transaction/index.ts +4 -2
  81. package/src/transaction/session.ts +38 -14
  82. package/src/transaction/transaction.ts +23 -10
  83. package/src/transaction/validator.ts +34 -7
  84. package/src/transactor/network-transactor.ts +94 -13
  85. package/src/transactor/transactor-source.ts +28 -9
  86. package/src/transform/atomic-proxy.ts +49 -0
  87. package/src/transform/cache-source.ts +18 -4
  88. package/src/transform/index.ts +1 -0
  89. package/src/utility/batch-coordinator.ts +9 -1
  90. package/src/utility/hash-string.ts +14 -17
  91. package/src/utility/lru-map.ts +55 -0
  92. package/dist/index.min.js +0 -9
  93. package/dist/index.min.js.map +0 -7
@@ -21,6 +21,9 @@ export type TransactionStamp = {
21
21
  /** Which engine (e.g., 'quereus@0.5.3', 'actions@1.0.0') */
22
22
  engineId: string;
23
23
 
24
+ /** Absolute ms epoch after which transaction is invalid */
25
+ expiration: number;
26
+
24
27
  /** Hash of the stamp fields (computed) - stable identifier throughout transaction */
25
28
  id: string;
26
29
  };
@@ -67,32 +70,42 @@ export type ReadDependency = {
67
70
  */
68
71
  export type TransactionRef = string; // The transaction ID
69
72
 
73
+ /** Default transaction time-to-live in milliseconds (30 seconds). */
74
+ export const DEFAULT_TRANSACTION_TTL_MS = 30_000;
75
+
76
+ /** Check whether a transaction stamp has expired. */
77
+ export function isTransactionExpired(stamp: TransactionStamp): boolean {
78
+ return Date.now() > stamp.expiration;
79
+ }
80
+
70
81
  /**
71
82
  * Create a transaction stamp with computed id.
72
- * The id is a hash of the stamp fields.
83
+ * The id is a hash of the stamp fields (including expiration).
73
84
  */
74
- export function createTransactionStamp(
85
+ export async function createTransactionStamp(
75
86
  peerId: string,
76
87
  timestamp: number,
77
88
  schemaHash: string,
78
- engineId: string
79
- ): TransactionStamp {
80
- const stampData = JSON.stringify({ peerId, timestamp, schemaHash, engineId });
81
- const id = `stamp:${hashString(stampData)}`;
82
- return { peerId, timestamp, schemaHash, engineId, id };
89
+ engineId: string,
90
+ ttlMs: number = DEFAULT_TRANSACTION_TTL_MS
91
+ ): Promise<TransactionStamp> {
92
+ const expiration = timestamp + ttlMs;
93
+ const stampData = JSON.stringify({ peerId, timestamp, schemaHash, engineId, expiration });
94
+ const id = `stamp:${await hashString(stampData)}`;
95
+ return { peerId, timestamp, schemaHash, engineId, expiration, id };
83
96
  }
84
97
 
85
98
  /**
86
99
  * Create a transaction id from stamp id, statements, and reads.
87
100
  * This is the final transaction identity used in logs.
88
101
  */
89
- export function createTransactionId(
102
+ export async function createTransactionId(
90
103
  stampId: string,
91
104
  statements: string[],
92
105
  reads: ReadDependency[]
93
- ): string {
106
+ ): Promise<string> {
94
107
  const txData = JSON.stringify({ stampId, statements, reads });
95
- return `tx:${hashString(txData)}`;
108
+ return `tx:${await hashString(txData)}`;
96
109
  }
97
110
 
98
111
 
@@ -1,5 +1,7 @@
1
1
  import type { BlockId, CollectionId, IBlock, BlockOperations, Transforms, ITransactor } from '../index.js';
2
- import type { Transaction, ITransactionEngine, ITransactionValidator, ValidationResult, CollectionActions } from './transaction.js';
2
+ import type { Transaction, ITransactionEngine, ITransactionValidator, ValidationResult, CollectionActions, ReadDependency } from './transaction.js';
3
+ import type { BlockActionState } from '../network/struct.js';
4
+ import { isTransactionExpired } from './transaction.js';
3
5
  import type { Collection } from '../collection/collection.js';
4
6
  import { Tracker } from '../transform/tracker.js';
5
7
  import { hashString } from '../utility/hash-string.js';
@@ -36,6 +38,12 @@ export type ValidationCoordinatorFactory = () => {
36
38
  dispose(): void;
37
39
  };
38
40
 
41
+ /**
42
+ * Provides current block state for read dependency validation.
43
+ * Returns the latest BlockActionState for a given block, or undefined if the block doesn't exist.
44
+ */
45
+ export type BlockStateProvider = (blockId: BlockId) => Promise<BlockActionState | undefined>;
46
+
39
47
  /**
40
48
  * Transaction validator implementation.
41
49
  *
@@ -45,12 +53,21 @@ export type ValidationCoordinatorFactory = () => {
45
53
  export class TransactionValidator implements ITransactionValidator {
46
54
  constructor(
47
55
  private readonly engines: Map<string, EngineRegistration>,
48
- private readonly createValidationCoordinator: ValidationCoordinatorFactory
56
+ private readonly createValidationCoordinator: ValidationCoordinatorFactory,
57
+ private readonly blockStateProvider?: BlockStateProvider
49
58
  ) {}
50
59
 
51
60
  async validate(transaction: Transaction, operationsHash: string): Promise<ValidationResult> {
52
61
  const { stamp, statements } = transaction;
53
62
 
63
+ // 0. Check expiration before any other work
64
+ if (isTransactionExpired(stamp)) {
65
+ return {
66
+ valid: false,
67
+ reason: `Transaction expired at ${stamp.expiration}`
68
+ };
69
+ }
70
+
54
71
  // 1. Verify engine exists
55
72
  const registration = this.engines.get(stamp.engineId);
56
73
  if (!registration) {
@@ -70,8 +87,18 @@ export class TransactionValidator implements ITransactionValidator {
70
87
  }
71
88
 
72
89
  // 3. Verify read dependencies (optimistic concurrency)
73
- // TODO: Implement read dependency validation
74
- // For now, we skip this check - will be implemented with proper block versioning
90
+ if (this.blockStateProvider && transaction.reads.length > 0) {
91
+ for (const read of transaction.reads) {
92
+ const currentState = await this.blockStateProvider(read.blockId);
93
+ const currentRev = currentState?.latest?.rev ?? 0;
94
+ if (currentRev !== read.revision) {
95
+ return {
96
+ valid: false,
97
+ reason: `Stale read: block ${read.blockId} was at revision ${read.revision} but is now at ${currentRev}`
98
+ };
99
+ }
100
+ }
101
+ }
75
102
 
76
103
  // 4. Create isolated validation coordinator
77
104
  const validationCoordinator = this.createValidationCoordinator();
@@ -96,7 +123,7 @@ export class TransactionValidator implements ITransactionValidator {
96
123
  const allOperations = this.collectOperations(transforms);
97
124
 
98
125
  // 8. Compute hash
99
- const computedHash = this.hashOperations(allOperations);
126
+ const computedHash = await this.hashOperations(allOperations);
100
127
 
101
128
  // 9. Compare with sender's hash
102
129
  if (computedHash !== operationsHash) {
@@ -139,9 +166,9 @@ export class TransactionValidator implements ITransactionValidator {
139
166
  * Compute hash of all operations.
140
167
  * Must match TransactionCoordinator.hashOperations for consistent validation.
141
168
  */
142
- private hashOperations(operations: readonly Operation[]): string {
169
+ private async hashOperations(operations: readonly Operation[]): Promise<string> {
143
170
  const operationsData = JSON.stringify(operations);
144
- return `ops:${hashString(operationsData)}`;
171
+ return `ops:${await hashString(operationsData)}`;
145
172
  }
146
173
  }
147
174
 
@@ -5,6 +5,9 @@ import { transformForBlockId, groupBy, concatTransforms, concatTransform, transf
5
5
  import { blockIdToBytes } from "../utility/block-id-to-bytes.js";
6
6
  import { isRecordEmpty } from "../utility/is-record-empty.js";
7
7
  import { type CoordinatorBatch, makeBatchesByPeer, incompleteBatches, everyBatch, allBatches, mergeBlocks, processBatches, createBatchesForPayload } from "../utility/batch-coordinator.js";
8
+ import { createLogger, verbose } from "../logger.js";
9
+
10
+ const log = createLogger('network-transactor');
8
11
 
9
12
  type NetworkTransactorInit = {
10
13
  timeoutMs: number;
@@ -31,6 +34,8 @@ export class NetworkTransactor implements ITransactor {
31
34
  async get(blockGets: BlockGets): Promise<GetBlockResults> {
32
35
  // Group by block id
33
36
  const distinctBlockIds = Array.from(new Set(blockGets.blockIds));
37
+ const t0 = Date.now();
38
+ log('get blockIds=%d', distinctBlockIds.length);
34
39
 
35
40
  const batches = await this.batchesForPayload<BlockId[], GetBlockResults>(
36
41
  distinctBlockIds,
@@ -77,6 +82,7 @@ export class NetworkTransactor implements ITransactor {
77
82
  ) as CoordinatorBatch<BlockId[], GetBlockResults>[];
78
83
 
79
84
  if (retryable.length > 0 && Date.now() < expiration) {
85
+ log('get:retry retryable=%d', retryable.length);
80
86
  try {
81
87
  const excludedByRoot = new Map<CoordinatorBatch<BlockId[], GetBlockResults>, Set<PeerId>>();
82
88
  for (const b of retryable) {
@@ -128,6 +134,7 @@ export class NetworkTransactor implements ITransactor {
128
134
  // Ensure we have at least one response per requested block id
129
135
  const missingIds = distinctBlockIds.filter(bid => !resultEntries.has(bid));
130
136
  if (missingIds.length > 0) {
137
+ log('get:missing blockIds=%o', missingIds);
131
138
  const details = this.formatBatchStatuses(batches,
132
139
  b => (b.request?.isResponse as boolean) ?? false,
133
140
  b => {
@@ -139,6 +146,7 @@ export class NetworkTransactor implements ITransactor {
139
146
  throw aggregate;
140
147
  }
141
148
 
149
+ log('get:done blockIds=%d ms=%d', distinctBlockIds.length, Date.now() - t0);
142
150
  return Object.fromEntries(resultEntries) as GetBlockResults;
143
151
  }
144
152
 
@@ -181,24 +189,77 @@ export class NetworkTransactor implements ITransactor {
181
189
  transforms: Transforms,
182
190
  transformForBlock: (payload: Transforms, blockId: BlockId, mergeWith?: Transforms) => Transforms
183
191
  ): Promise<CoordinatorBatch<Transforms, PendResult>[]> {
184
- const blockCoordinators = await Promise.all(
185
- blockIds.map(async bid => ({
192
+ // Use cluster intersections to minimize the number of coordinators.
193
+ // For each block, find its full cluster, then greedily assign blocks to
194
+ // peers that appear in the most clusters — reducing round trips when
195
+ // blocks share cluster members.
196
+
197
+ // Step 1: Get cluster peer sets for each block
198
+ const blockClusterPeerIds: Map<BlockId, Set<string>> = new Map();
199
+ const fallbackBlocks: BlockId[] = [];
200
+
201
+ await Promise.all(blockIds.map(async bid => {
202
+ try {
203
+ const clusterPeers = await this.keyNetwork.findCluster(await blockIdToBytes(bid));
204
+ blockClusterPeerIds.set(bid, new Set(Object.keys(clusterPeers)));
205
+ } catch {
206
+ fallbackBlocks.push(bid);
207
+ }
208
+ }));
209
+
210
+ // Step 2: Build peer → blocks index (which blocks each peer can coordinate)
211
+ const peerBlocks = new Map<string, BlockId[]>();
212
+ for (const [blockId, peerIds] of blockClusterPeerIds) {
213
+ for (const peerId of peerIds) {
214
+ const blocks = peerBlocks.get(peerId) ?? [];
215
+ blocks.push(blockId);
216
+ peerBlocks.set(peerId, blocks);
217
+ }
218
+ }
219
+
220
+ // Step 3: Greedy set cover — assign blocks to peers covering the most uncovered blocks
221
+ const uncovered = new Set(blockClusterPeerIds.keys());
222
+ const assignments = new Map<string, BlockId[]>(); // peerIdStr → assigned blockIds
223
+
224
+ while (uncovered.size > 0) {
225
+ let bestPeer: string | undefined;
226
+ let bestCount = 0;
227
+
228
+ for (const [peerId, blocks] of peerBlocks) {
229
+ const coverCount = blocks.filter(bid => uncovered.has(bid)).length;
230
+ if (coverCount > bestCount) {
231
+ bestCount = coverCount;
232
+ bestPeer = peerId;
233
+ }
234
+ }
235
+
236
+ if (!bestPeer || bestCount === 0) break;
237
+
238
+ const covered = peerBlocks.get(bestPeer)!.filter(bid => uncovered.has(bid));
239
+ assignments.set(bestPeer, covered);
240
+ for (const bid of covered) uncovered.delete(bid);
241
+ }
242
+
243
+ // Step 4: Any remaining uncovered blocks fall back to findCoordinator
244
+ for (const bid of uncovered) fallbackBlocks.push(bid);
245
+
246
+ const fallbackCoordinators = await Promise.all(
247
+ fallbackBlocks.map(async bid => ({
186
248
  blockId: bid,
187
249
  coordinator: await this.keyNetwork.findCoordinator(await blockIdToBytes(bid), { excludedPeers: [] })
188
250
  }))
189
251
  );
190
-
191
- const byCoordinator = new Map<string, BlockId[]>();
192
- for (const { blockId, coordinator } of blockCoordinators) {
252
+ for (const { blockId, coordinator } of fallbackCoordinators) {
193
253
  const key = coordinator.toString();
194
- const blocks = byCoordinator.get(key) ?? [];
195
- blocks.push(blockId);
196
- byCoordinator.set(key, blocks);
254
+ const existing = assignments.get(key) ?? [];
255
+ existing.push(blockId);
256
+ assignments.set(key, existing);
197
257
  }
198
258
 
259
+ // Step 5: Convert assignments to batches
199
260
  const batches: CoordinatorBatch<Transforms, PendResult>[] = [];
200
- for (const [coordinatorStr, consolidatedBlocks] of byCoordinator) {
201
- const coordinator = blockCoordinators.find(bc => bc.coordinator.toString() === coordinatorStr)!.coordinator;
261
+ for (const [peerIdStr, consolidatedBlocks] of assignments) {
262
+ const peerId = peerIdFromString(peerIdStr);
202
263
 
203
264
  let batchTransforms: Transforms = { inserts: {}, updates: {}, deletes: [] };
204
265
  for (const bid of consolidatedBlocks) {
@@ -207,18 +268,19 @@ export class NetworkTransactor implements ITransactor {
207
268
  }
208
269
 
209
270
  batches.push({
210
- peerId: coordinator,
271
+ peerId,
211
272
  payload: batchTransforms,
212
273
  blockId: consolidatedBlocks[0]!,
213
274
  coordinatingBlockIds: consolidatedBlocks,
214
275
  excludedPeers: []
215
- } as any);
276
+ });
216
277
  }
217
278
 
218
279
  return batches;
219
280
  }
220
281
 
221
282
  async pend(blockAction: PendRequest): Promise<PendResult> {
283
+ const t0 = Date.now();
222
284
  const transformForBlock = (payload: Transforms, blockId: BlockId, mergeWithPayload: Transforms | undefined): Transforms => {
223
285
  const filteredTransform = transformForBlockId(payload, blockId);
224
286
  return mergeWithPayload
@@ -227,6 +289,17 @@ export class NetworkTransactor implements ITransactor {
227
289
  };
228
290
  const blockIds = blockIdsForTransforms(blockAction.transforms);
229
291
  const batches = await this.consolidateCoordinators(blockIds, blockAction.transforms, transformForBlock);
292
+ log('pend actionId=%s blockIds=%d batches=%d', blockAction.actionId, blockIds.length, batches.length);
293
+ if (verbose) {
294
+ const batchSummary = batches.map(b => ({
295
+ peer: b.peerId.toString().substring(0, 12),
296
+ blocks: b.coordinatingBlockIds ?? [b.blockId],
297
+ inserts: Object.keys(b.payload.inserts ?? {}).length,
298
+ updates: Object.keys(b.payload.updates ?? {}).length,
299
+ deletes: b.payload.deletes?.length ?? 0
300
+ }));
301
+ log('pend:batches actionId=%s detail=%o', blockAction.actionId, batchSummary);
302
+ }
230
303
  const expiration = Date.now() + this.timeoutMs;
231
304
 
232
305
  let error: Error | undefined;
@@ -238,7 +311,7 @@ export class NetworkTransactor implements ITransactor {
238
311
  { ...blockAction, transforms: batch.payload },
239
312
  {
240
313
  expiration,
241
- coordinatingBlockIds: (batch as any).coordinatingBlockIds
314
+ coordinatingBlockIds: batch.coordinatingBlockIds
242
315
  } as any
243
316
  ),
244
317
  batch => blockIdsForTransforms(batch.payload),
@@ -274,9 +347,11 @@ export class NetworkTransactor implements ITransactor {
274
347
  }
275
348
 
276
349
  if (error) { // If any failures, cancel all pending actions as background microtask
350
+ log('pend:cancel actionId=%s', blockAction.actionId);
277
351
  void Promise.resolve().then(() => this.cancelBatch(batches, { blockIds, actionId: blockAction.actionId }));
278
352
  const stale = Array.from(allBatches(batches, b => b.request?.isResponse as boolean && !b.request!.response!.success));
279
353
  if (stale.length > 0) { // Any active stale failures should preempt reporting connection or other potential transient errors (we have information)
354
+ log('pend:stale actionId=%s staleCount=%d', blockAction.actionId, stale.length);
280
355
  return {
281
356
  success: false,
282
357
  missing: distinctBlockActionTransforms(stale.flatMap(b => (b.request!.response! as StaleFailure).missing).filter((x): x is ActionTransforms => x !== undefined)),
@@ -287,6 +362,7 @@ export class NetworkTransactor implements ITransactor {
287
362
 
288
363
  // Collect replies back into result structure
289
364
  const completed = Array.from(allBatches(batches, b => b.request?.isResponse as boolean && b.request!.response!.success));
365
+ log('pend:done actionId=%s ms=%d batches=%d', blockAction.actionId, Date.now() - t0, batches.length);
290
366
  return {
291
367
  success: true,
292
368
  pending: completed.flatMap(b => (b.request!.response! as PendSuccess).pending),
@@ -295,6 +371,7 @@ export class NetworkTransactor implements ITransactor {
295
371
  }
296
372
 
297
373
  async cancel(actionRef: ActionBlocks): Promise<void> {
374
+ log('cancel actionId=%s blockIds=%d', actionRef.actionId, actionRef.blockIds.length);
298
375
  const batches = await this.batchesForPayload<BlockId[], void>(
299
376
  actionRef.blockIds,
300
377
  actionRef.blockIds,
@@ -320,6 +397,8 @@ export class NetworkTransactor implements ITransactor {
320
397
  }
321
398
 
322
399
  async commit(request: CommitRequest): Promise<CommitResult> {
400
+ const t0 = Date.now();
401
+ log('commit actionId=%s rev=%d blockIds=%d', request.actionId, request.rev, request.blockIds.length);
323
402
  const allBlockIds = [...new Set([...request.blockIds, request.tailId])];
324
403
 
325
404
  // Commit the header block if provided and not already in blockIds
@@ -350,6 +429,7 @@ export class NetworkTransactor implements ITransactor {
350
429
  }
351
430
  }
352
431
 
432
+ log('commit:done actionId=%s ms=%d', request.actionId, Date.now() - t0);
353
433
  return { success: true };
354
434
  }
355
435
 
@@ -372,6 +452,7 @@ export class NetworkTransactor implements ITransactor {
372
452
  private async commitBlocks({ blockIds, actionId, rev }: RepoCommitRequest) {
373
453
  const expiration = Date.now() + this.timeoutMs;
374
454
  const batches = await this.batchesForPayload<BlockId[], CommitResult>(blockIds, blockIds, mergeBlocks, []);
455
+ log('commitBlocks actionId=%s rev=%d batches=%d', actionId, rev, batches.length);
375
456
  let error: Error | undefined;
376
457
  try {
377
458
  await processBatches(
@@ -1,8 +1,11 @@
1
1
  import { randomBytes } from '@noble/hashes/utils.js'
2
2
  import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
3
3
  import type { IBlock, BlockId, BlockHeader, ITransactor, ActionId, StaleFailure, ActionContext, BlockType, BlockSource, Transforms } from "../index.js";
4
+ import type { ReadDependency } from "../transaction/transaction.js";
4
5
 
5
6
  export class TransactorSource<TBlock extends IBlock> implements BlockSource<TBlock> {
7
+ private readDependencies: ReadDependency[] = [];
8
+
6
9
  constructor(
7
10
  private readonly collectionId: BlockId,
8
11
  private readonly transactor: ITransactor,
@@ -26,12 +29,22 @@ export class TransactorSource<TBlock extends IBlock> implements BlockSource<TBlo
26
29
  const result = await this.transactor.get({ blockIds: [id], context: this.actionContext });
27
30
  if (result) {
28
31
  const { block, state } = result[id]!;
32
+ // Record read dependency for optimistic concurrency control
33
+ this.readDependencies.push({ blockId: id, revision: state.latest?.rev ?? 0 });
29
34
  // TODO: if the state reports that there is a pending action, record this so that we are sure to update before syncing
30
35
  //state.pendings
31
36
  return block as TBlock;
32
37
  }
33
38
  }
34
39
 
40
+ getReadDependencies(): ReadDependency[] {
41
+ return this.readDependencies;
42
+ }
43
+
44
+ clearReadDependencies(): void {
45
+ this.readDependencies = [];
46
+ }
47
+
35
48
  /**
36
49
  * Attempts to apply the given transforms in a transactional manner.
37
50
  * @param transform - The transforms to apply.
@@ -50,15 +63,21 @@ export class TransactorSource<TBlock extends IBlock> implements BlockSource<TBlo
50
63
  return pendResult;
51
64
  }
52
65
  const isNew = transform.inserts && Object.hasOwn(transform.inserts, headerId);
53
- const commitResult = await this.transactor.commit({
54
- headerId: isNew ? headerId : undefined,
55
- tailId,
56
- blockIds: pendResult.blockIds,
57
- actionId,
58
- rev
59
- });
60
- if (!commitResult.success) {
61
- return commitResult;
66
+ try {
67
+ const commitResult = await this.transactor.commit({
68
+ headerId: isNew ? headerId : undefined,
69
+ tailId,
70
+ blockIds: pendResult.blockIds,
71
+ actionId,
72
+ rev
73
+ });
74
+ if (!commitResult.success) {
75
+ await this.transactor.cancel({ actionId, blockIds: pendResult.blockIds });
76
+ return commitResult;
77
+ }
78
+ } catch (e) {
79
+ await this.transactor.cancel({ actionId, blockIds: pendResult.blockIds });
80
+ throw e;
62
81
  }
63
82
  }
64
83
  }
@@ -0,0 +1,49 @@
1
+ import { Atomic } from './atomic.js';
2
+ import type { IBlock, BlockId, BlockStore, BlockType, BlockHeader, BlockOperation } from '../index.js';
3
+
4
+ /**
5
+ * A BlockStore proxy that enables scoped atomic operations.
6
+ * Operations normally delegate directly to the underlying store,
7
+ * but during an `atomic()` call, they route through an Atomic tracker
8
+ * that commits all-or-nothing on success, or rolls back on error.
9
+ *
10
+ * Both the BTree and its trunk should share the same AtomicProxy instance
11
+ * so that all mutations (including root pointer updates) are part of the
12
+ * same atomic batch.
13
+ */
14
+ export class AtomicProxy<T extends IBlock> implements BlockStore<T> {
15
+ private _base: BlockStore<T>;
16
+ private _active: BlockStore<T>;
17
+
18
+ constructor(store: BlockStore<T>) {
19
+ this._base = store;
20
+ this._active = store;
21
+ }
22
+
23
+ async tryGet(id: BlockId): Promise<T | undefined> { return this._active.tryGet(id); }
24
+ insert(block: T): void { this._active.insert(block); }
25
+ update(blockId: BlockId, op: BlockOperation): void { this._active.update(blockId, op); }
26
+ delete(blockId: BlockId): void { this._active.delete(blockId); }
27
+ generateId(): BlockId { return this._active.generateId(); }
28
+ createBlockHeader(type: BlockType, newId?: BlockId): BlockHeader { return this._active.createBlockHeader(type, newId); }
29
+
30
+ /** Execute fn within an atomic scope. All store mutations are collected
31
+ * and committed on success, or discarded on error. Re-entrant safe. */
32
+ async atomic<R>(fn: () => Promise<R>): Promise<R> {
33
+ if (this._active !== this._base) {
34
+ return fn(); // Already in atomic context
35
+ }
36
+ const atomic = new Atomic<T>(this._base);
37
+ this._active = atomic;
38
+ try {
39
+ const result = await fn();
40
+ atomic.commit();
41
+ return result;
42
+ } catch (e) {
43
+ atomic.reset();
44
+ throw e;
45
+ } finally {
46
+ this._active = this._base;
47
+ }
48
+ }
49
+ }
@@ -1,19 +1,33 @@
1
1
  import type { IBlock, BlockHeader, BlockId, BlockSource, BlockType, Transforms } from "../index.js";
2
2
  import { applyOperation } from "../index.js";
3
+ import { LruMap } from "../utility/lru-map.js";
4
+ import { createLogger } from "../logger.js";
5
+
6
+ const log = createLogger('cache');
7
+
8
+ const DefaultMaxSize = 128;
3
9
 
4
10
  export class CacheSource<T extends IBlock> implements BlockSource<T> {
5
- protected cache = new Map<BlockId, T>();
11
+ protected cache: LruMap<BlockId, T>;
6
12
 
7
13
  constructor(
8
- protected readonly source: BlockSource<T>
9
- ) { }
14
+ protected readonly source: BlockSource<T>,
15
+ maxSize = DefaultMaxSize
16
+ ) {
17
+ this.cache = new LruMap(maxSize);
18
+ }
10
19
 
11
20
  async tryGet(id: BlockId): Promise<T | undefined> {
12
21
  let block = this.cache.get(id);
13
- if (!block) {
22
+ if (block) {
23
+ log('hit id=%s', id);
24
+ } else {
14
25
  block = await this.source.tryGet(id);
15
26
  if (block) {
16
27
  this.cache.set(id, block);
28
+ log('miss:loaded id=%s cacheSize=%d', id, this.cache.size);
29
+ } else {
30
+ log('miss:absent id=%s', id);
17
31
  }
18
32
  }
19
33
  return structuredClone(block);
@@ -1,4 +1,5 @@
1
1
  export * from "./atomic.js";
2
+ export * from "./atomic-proxy.js";
2
3
  export * from "./cache-source.js";
3
4
  export * from "./helpers.js";
4
5
  export * from "./struct.js";
@@ -1,6 +1,9 @@
1
1
  import type { PeerId } from "../network/types.js";
2
2
  import type { BlockId } from "../index.js";
3
3
  import { Pending } from "./pending.js";
4
+ import { createLogger } from "../logger.js";
5
+
6
+ const log = createLogger('batch-coordinator');
4
7
 
5
8
  /**
6
9
  * Represents a batch of operations for a specific block coordinated by a peer
@@ -14,6 +17,8 @@ export type CoordinatorBatch<TPayload, TResponse> = {
14
17
  subsumedBy?: CoordinatorBatch<TPayload, TResponse>[];
15
18
  /** Peers that have already been tried (and failed) */
16
19
  excludedPeers?: PeerId[];
20
+ /** When a batch coordinates multiple blocks (via cluster intersection), lists all block IDs */
21
+ coordinatingBlockIds?: BlockId[];
17
22
  }
18
23
 
19
24
  /**
@@ -122,6 +127,7 @@ export async function processBatches<TPayload, TResponse>(
122
127
  .catch(async e => {
123
128
  if (expiration > Date.now()) {
124
129
  const excludedPeers = [batch.peerId, ...(batch.excludedPeers ?? [])];
130
+ log('retry peer=%s excluded=%d', batch.peerId.toString(), excludedPeers.length);
125
131
  const retries = await createBatchesForPayload<TPayload, TResponse>(
126
132
  getBlockIds(batch),
127
133
  batch.payload,
@@ -170,5 +176,7 @@ export async function createBatchesForPayload<TPayload, TResponse>(
170
176
  );
171
177
 
172
178
  // Group blocks around their coordinating peers
173
- return makeBatchesByPeer<TPayload, TResponse>(blockIdPeerId, payload, getBlockPayload, excludedPeers);
179
+ const batches = makeBatchesByPeer<TPayload, TResponse>(blockIdPeerId, payload, getBlockPayload, excludedPeers);
180
+ log('createBatches blockIds=%d batches=%d excluded=%d', distinctBlockIds.size, batches.length, excludedPeers.length);
181
+ return batches;
174
182
  }
@@ -1,17 +1,14 @@
1
- /**
2
- * Simple djb2 string hash function.
3
- *
4
- * This is a non-cryptographic hash suitable for generating short identifiers.
5
- * For security-critical hashing, use SHA-256 from multiformats/hashes/sha2.
6
- *
7
- * @param str - The string to hash
8
- * @returns A base-36 encoded hash string
9
- */
10
- export function hashString(str: string): string {
11
- let hash = 5381;
12
- for (let i = 0; i < str.length; i++) {
13
- hash = ((hash << 5) + hash + str.charCodeAt(i)) | 0;
14
- }
15
- return Math.abs(hash).toString(36);
16
- }
17
-
1
+ import { sha256 } from 'multiformats/hashes/sha2';
2
+ import { toString } from 'uint8arrays/to-string';
3
+
4
+ /**
5
+ * SHA-256 string hash function.
6
+ *
7
+ * @param str - The string to hash
8
+ * @returns A base64url-encoded SHA-256 hash string
9
+ */
10
+ export async function hashString(str: string): Promise<string> {
11
+ const input = new TextEncoder().encode(str);
12
+ const mh = await sha256.digest(input);
13
+ return toString(mh.digest, 'base64url');
14
+ }
@@ -0,0 +1,55 @@
1
+ /**
2
+ * A simple LRU (Least Recently Used) map backed by JavaScript's Map insertion order.
3
+ * Accessing or setting an entry refreshes it to the most-recently-used position.
4
+ * When the map exceeds maxSize, the least-recently-used entry is evicted.
5
+ */
6
+ export class LruMap<K, V> {
7
+ private readonly map = new Map<K, V>();
8
+
9
+ constructor(private readonly maxSize: number) {
10
+ if (maxSize < 1) throw new Error('LruMap maxSize must be >= 1');
11
+ }
12
+
13
+ get(key: K): V | undefined {
14
+ const value = this.map.get(key);
15
+ if (value !== undefined) {
16
+ // Refresh: delete and re-insert to move to end (most recent)
17
+ this.map.delete(key);
18
+ this.map.set(key, value);
19
+ }
20
+ return value;
21
+ }
22
+
23
+ set(key: K, value: V): this {
24
+ // If already present, delete first to refresh position
25
+ if (this.map.has(key)) {
26
+ this.map.delete(key);
27
+ } else if (this.map.size >= this.maxSize) {
28
+ // Evict the oldest (first) entry
29
+ const oldest = this.map.keys().next().value!;
30
+ this.map.delete(oldest);
31
+ }
32
+ this.map.set(key, value);
33
+ return this;
34
+ }
35
+
36
+ has(key: K): boolean {
37
+ return this.map.has(key);
38
+ }
39
+
40
+ delete(key: K): boolean {
41
+ return this.map.delete(key);
42
+ }
43
+
44
+ clear(): void {
45
+ this.map.clear();
46
+ }
47
+
48
+ get size(): number {
49
+ return this.map.size;
50
+ }
51
+
52
+ [Symbol.iterator](): IterableIterator<[K, V]> {
53
+ return this.map[Symbol.iterator]();
54
+ }
55
+ }