@rocicorp/zero 0.26.0 → 0.26.1-canary.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/out/analyze-query/src/run-ast.d.ts.map +1 -1
  2. package/out/analyze-query/src/run-ast.js +4 -1
  3. package/out/analyze-query/src/run-ast.js.map +1 -1
  4. package/out/replicache/src/btree/node.js +4 -4
  5. package/out/replicache/src/btree/node.js.map +1 -1
  6. package/out/replicache/src/btree/write.js +2 -2
  7. package/out/replicache/src/btree/write.js.map +1 -1
  8. package/out/replicache/src/dag/gc.js +5 -2
  9. package/out/replicache/src/dag/gc.js.map +1 -1
  10. package/out/replicache/src/db/write.d.ts.map +1 -1
  11. package/out/replicache/src/db/write.js +21 -6
  12. package/out/replicache/src/db/write.js.map +1 -1
  13. package/out/replicache/src/error-responses.d.ts.map +1 -1
  14. package/out/replicache/src/error-responses.js +4 -1
  15. package/out/replicache/src/error-responses.js.map +1 -1
  16. package/out/replicache/src/persist/clients.d.ts.map +1 -1
  17. package/out/replicache/src/persist/clients.js +4 -1
  18. package/out/replicache/src/persist/clients.js.map +1 -1
  19. package/out/replicache/src/persist/collect-idb-databases.d.ts.map +1 -1
  20. package/out/replicache/src/persist/collect-idb-databases.js +2 -1
  21. package/out/replicache/src/persist/collect-idb-databases.js.map +1 -1
  22. package/out/replicache/src/persist/idb-databases-store.d.ts.map +1 -1
  23. package/out/replicache/src/persist/idb-databases-store.js +4 -1
  24. package/out/replicache/src/persist/idb-databases-store.js.map +1 -1
  25. package/out/replicache/src/process-scheduler.js +4 -1
  26. package/out/replicache/src/process-scheduler.js.map +1 -1
  27. package/out/replicache/src/replicache-impl.js +2 -2
  28. package/out/replicache/src/replicache-impl.js.map +1 -1
  29. package/out/replicache/src/subscriptions.d.ts.map +1 -1
  30. package/out/replicache/src/subscriptions.js +5 -2
  31. package/out/replicache/src/subscriptions.js.map +1 -1
  32. package/out/replicache/src/sync/diff.d.ts.map +1 -1
  33. package/out/replicache/src/sync/diff.js +4 -1
  34. package/out/replicache/src/sync/diff.js.map +1 -1
  35. package/out/replicache/src/sync/pull.d.ts.map +1 -1
  36. package/out/replicache/src/sync/pull.js +4 -1
  37. package/out/replicache/src/sync/pull.js.map +1 -1
  38. package/out/replicache/src/sync/push.d.ts.map +1 -1
  39. package/out/replicache/src/sync/push.js +5 -2
  40. package/out/replicache/src/sync/push.js.map +1 -1
  41. package/out/shared/src/asserts.d.ts +1 -1
  42. package/out/shared/src/asserts.d.ts.map +1 -1
  43. package/out/shared/src/asserts.js +1 -1
  44. package/out/shared/src/asserts.js.map +1 -1
  45. package/out/z2s/src/compiler.d.ts.map +1 -1
  46. package/out/z2s/src/compiler.js +8 -2
  47. package/out/z2s/src/compiler.js.map +1 -1
  48. package/out/zero/package.json.js +1 -1
  49. package/out/zero-cache/src/config/zero-config.d.ts +4 -0
  50. package/out/zero-cache/src/config/zero-config.d.ts.map +1 -1
  51. package/out/zero-cache/src/config/zero-config.js +17 -0
  52. package/out/zero-cache/src/config/zero-config.js.map +1 -1
  53. package/out/zero-cache/src/db/transaction-pool.d.ts.map +1 -1
  54. package/out/zero-cache/src/db/transaction-pool.js +17 -11
  55. package/out/zero-cache/src/db/transaction-pool.js.map +1 -1
  56. package/out/zero-cache/src/observability/events.d.ts.map +1 -1
  57. package/out/zero-cache/src/observability/events.js +28 -9
  58. package/out/zero-cache/src/observability/events.js.map +1 -1
  59. package/out/zero-cache/src/server/change-streamer.d.ts.map +1 -1
  60. package/out/zero-cache/src/server/change-streamer.js +3 -1
  61. package/out/zero-cache/src/server/change-streamer.js.map +1 -1
  62. package/out/zero-cache/src/services/analyze.js +1 -0
  63. package/out/zero-cache/src/services/analyze.js.map +1 -1
  64. package/out/zero-cache/src/services/change-source/pg/backfill-stream.d.ts.map +1 -1
  65. package/out/zero-cache/src/services/change-source/pg/backfill-stream.js +29 -14
  66. package/out/zero-cache/src/services/change-source/pg/backfill-stream.js.map +1 -1
  67. package/out/zero-cache/src/services/change-source/pg/initial-sync.d.ts +6 -1
  68. package/out/zero-cache/src/services/change-source/pg/initial-sync.d.ts.map +1 -1
  69. package/out/zero-cache/src/services/change-source/pg/initial-sync.js +69 -25
  70. package/out/zero-cache/src/services/change-source/pg/initial-sync.js.map +1 -1
  71. package/out/zero-cache/src/services/change-source/pg/schema/ddl.d.ts.map +1 -1
  72. package/out/zero-cache/src/services/change-source/pg/schema/ddl.js +6 -1
  73. package/out/zero-cache/src/services/change-source/pg/schema/ddl.js.map +1 -1
  74. package/out/zero-cache/src/services/change-source/pg/schema/init.d.ts.map +1 -1
  75. package/out/zero-cache/src/services/change-source/pg/schema/init.js +12 -8
  76. package/out/zero-cache/src/services/change-source/pg/schema/init.js.map +1 -1
  77. package/out/zero-cache/src/services/change-source/protocol/current/data.d.ts +26 -0
  78. package/out/zero-cache/src/services/change-source/protocol/current/data.d.ts.map +1 -1
  79. package/out/zero-cache/src/services/change-source/protocol/current/data.js +15 -3
  80. package/out/zero-cache/src/services/change-source/protocol/current/data.js.map +1 -1
  81. package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts +30 -0
  82. package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts.map +1 -1
  83. package/out/zero-cache/src/services/change-source/protocol/current.js +2 -1
  84. package/out/zero-cache/src/services/change-streamer/broadcast.d.ts +100 -0
  85. package/out/zero-cache/src/services/change-streamer/broadcast.d.ts.map +1 -0
  86. package/out/zero-cache/src/services/change-streamer/broadcast.js +171 -0
  87. package/out/zero-cache/src/services/change-streamer/broadcast.js.map +1 -0
  88. package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts +1 -1
  89. package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts.map +1 -1
  90. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js +22 -9
  91. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js.map +1 -1
  92. package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts +10 -0
  93. package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts.map +1 -1
  94. package/out/zero-cache/src/services/change-streamer/forwarder.d.ts +17 -1
  95. package/out/zero-cache/src/services/change-streamer/forwarder.d.ts.map +1 -1
  96. package/out/zero-cache/src/services/change-streamer/forwarder.js +52 -4
  97. package/out/zero-cache/src/services/change-streamer/forwarder.js.map +1 -1
  98. package/out/zero-cache/src/services/change-streamer/subscriber.d.ts +18 -0
  99. package/out/zero-cache/src/services/change-streamer/subscriber.d.ts.map +1 -1
  100. package/out/zero-cache/src/services/change-streamer/subscriber.js +68 -12
  101. package/out/zero-cache/src/services/change-streamer/subscriber.js.map +1 -1
  102. package/out/zero-cache/src/services/replicator/change-processor.d.ts +2 -0
  103. package/out/zero-cache/src/services/replicator/change-processor.d.ts.map +1 -1
  104. package/out/zero-cache/src/services/replicator/change-processor.js +8 -6
  105. package/out/zero-cache/src/services/replicator/change-processor.js.map +1 -1
  106. package/out/zero-cache/src/services/replicator/incremental-sync.d.ts.map +1 -1
  107. package/out/zero-cache/src/services/replicator/incremental-sync.js +39 -1
  108. package/out/zero-cache/src/services/replicator/incremental-sync.js.map +1 -1
  109. package/out/zero-cache/src/services/replicator/replication-status.d.ts +4 -3
  110. package/out/zero-cache/src/services/replicator/replication-status.d.ts.map +1 -1
  111. package/out/zero-cache/src/services/replicator/replication-status.js +25 -10
  112. package/out/zero-cache/src/services/replicator/replication-status.js.map +1 -1
  113. package/out/zero-cache/src/services/run-ast.d.ts.map +1 -1
  114. package/out/zero-cache/src/services/run-ast.js +22 -2
  115. package/out/zero-cache/src/services/run-ast.js.map +1 -1
  116. package/out/zero-cache/src/services/running-state.d.ts +1 -0
  117. package/out/zero-cache/src/services/running-state.d.ts.map +1 -1
  118. package/out/zero-cache/src/services/running-state.js +4 -0
  119. package/out/zero-cache/src/services/running-state.js.map +1 -1
  120. package/out/zero-cache/src/services/view-syncer/cvr.d.ts.map +1 -1
  121. package/out/zero-cache/src/services/view-syncer/cvr.js +8 -2
  122. package/out/zero-cache/src/services/view-syncer/cvr.js.map +1 -1
  123. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
  124. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js +10 -1
  125. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
  126. package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts +1 -1
  127. package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts.map +1 -1
  128. package/out/zero-cache/src/services/view-syncer/snapshotter.js +15 -7
  129. package/out/zero-cache/src/services/view-syncer/snapshotter.js.map +1 -1
  130. package/out/zero-cache/src/types/subscription.d.ts +3 -1
  131. package/out/zero-cache/src/types/subscription.d.ts.map +1 -1
  132. package/out/zero-cache/src/types/subscription.js +21 -9
  133. package/out/zero-cache/src/types/subscription.js.map +1 -1
  134. package/out/zero-client/src/client/http-string.js.map +1 -1
  135. package/out/zero-client/src/client/version.js +1 -1
  136. package/out/zero-client/src/client/zero.js.map +1 -1
  137. package/out/zero-events/src/status.d.ts +8 -0
  138. package/out/zero-events/src/status.d.ts.map +1 -1
  139. package/out/zero-schema/src/permissions.d.ts.map +1 -1
  140. package/out/zero-schema/src/permissions.js +4 -1
  141. package/out/zero-schema/src/permissions.js.map +1 -1
  142. package/out/zero-server/src/process-mutations.d.ts.map +1 -1
  143. package/out/zero-server/src/process-mutations.js +13 -19
  144. package/out/zero-server/src/process-mutations.js.map +1 -1
  145. package/out/zql/src/builder/filter.d.ts.map +1 -1
  146. package/out/zql/src/builder/filter.js +5 -2
  147. package/out/zql/src/builder/filter.js.map +1 -1
  148. package/out/zql/src/ivm/constraint.js.map +1 -1
  149. package/package.json +1 -1
@@ -1 +1 @@
1
- {"version":3,"file":"run-ast.d.ts","sourceRoot":"","sources":["../../../../analyze-query/src/run-ast.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAOjD,OAAO,KAAK,EAAC,cAAc,EAAC,MAAM,kCAAkC,CAAC;AAErE,OAAO,KAAK,EAAC,kBAAkB,EAAC,MAAM,iDAAiD,CAAC;AACxF,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,gCAAgC,CAAC;AAExD,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,0CAA0C,CAAC;AAG3E,OAAO,KAAK,EAAC,iBAAiB,EAAC,MAAM,+CAA+C,CAAC;AACrF,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,sCAAsC,CAAC;AACrE,OAAO,EAEL,KAAK,eAAe,EACrB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,wBAAwB,CAAC;AAErD,MAAM,MAAM,aAAa,GAAG;IAC1B,gBAAgB,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACvC,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,oBAAoB,CAAC,EAAE,UAAU,GAAG,SAAS,CAAC;IAC9C,EAAE,EAAE,QAAQ,CAAC;IACb,IAAI,EAAE,eAAe,CAAC;IACtB,WAAW,CAAC,EAAE,iBAAiB,GAAG,SAAS,CAAC;IAC5C,UAAU,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACjC,UAAU,EAAE,GAAG,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;IACxC,UAAU,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;CAClC,CAAC;AAEF,wBAAsB,MAAM,CAC1B,EAAE,EAAE,UAAU,EACd,YAAY,EAAE,YAAY,EAC1B,GAAG,EAAE,GAAG,EACR,aAAa,EAAE,OAAO,EACtB,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,kBAAkB,CAAC,CA8F7B"}
1
+ {"version":3,"file":"run-ast.d.ts","sourceRoot":"","sources":["../../../../analyze-query/src/run-ast.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAOjD,OAAO,KAAK,EAAC,cAAc,EAAC,MAAM,kCAAkC,CAAC;AAErE,OAAO,KAAK,EAAC,kBAAkB,EAAC,MAAM,iDAAiD,CAAC;AACxF,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,gCAAgC,CAAC;AAExD,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,0CAA0C,CAAC;AAG3E,OAAO,KAAK,EAAC,iBAAiB,EAAC,MAAM,+CAA+C,CAAC;AACrF,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,sCAAsC,CAAC;AACrE,OAAO,EAEL,KAAK,eAAe,EACrB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,wBAAwB,CAAC;AAErD,MAAM,MAAM,aAAa,GAAG;IAC1B,gBAAgB,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACvC,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,oBAAoB,CAAC,EAAE,UAAU,GAAG,SAAS,CAAC;IAC9C,EAAE,EAAE,QAAQ,CAAC;IACb,IAAI,EAAE,eAAe,CAAC;IACtB,WAAW,CAAC,EAAE,iBAAiB,GAAG,SAAS,CAAC;IAC5C,UAAU,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACjC,UAAU,EAAE,GAAG,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;IACxC,UAAU,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;CAClC,CAAC;AAEF,wBAAsB,MAAM,CAC1B,EAAE,EAAE,UAAU,EACd,YAAY,EAAE,YAAY,EAC1B,GAAG,EAAE,GAAG,EACR,aAAa,EAAE,OAAO,EACtB,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,kBAAkB,CAAC,CAiG7B"}
@@ -56,7 +56,10 @@ async function runAst(lc, clientSchema, ast, isTransformed, options) {
56
56
  if (rowChange === "yield") {
57
57
  continue;
58
58
  }
59
- assert(rowChange.type === "add");
59
+ assert(
60
+ rowChange.type === "add",
61
+ () => `Expected rowChange type 'add', got '${rowChange.type}'`
62
+ );
60
63
  let rows = rowsByTable[rowChange.table];
61
64
  const s = rowChange.table + "." + JSON.stringify(rowChange.row);
62
65
  if (seenByTable.has(s)) {
@@ -1 +1 @@
1
- {"version":3,"file":"run-ast.js","sources":["../../../../analyze-query/src/run-ast.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {astToZQL} from '../../ast-to-zql/src/ast-to-zql.ts';\nimport {formatOutput} from '../../ast-to-zql/src/format.ts';\nimport {assert} from '../../shared/src/asserts.ts';\nimport {must} from '../../shared/src/must.ts';\nimport type {JWTAuth} from '../../zero-cache/src/auth/auth.ts';\nimport {transformAndHashQuery} from '../../zero-cache/src/auth/read-authorizer.ts';\nimport type {LiteAndZqlSpec} from '../../zero-cache/src/db/specs.ts';\nimport {hydrate} from '../../zero-cache/src/services/view-syncer/pipeline-driver.ts';\nimport type {AnalyzeQueryResult} from '../../zero-protocol/src/analyze-query-result.ts';\nimport type {AST} from '../../zero-protocol/src/ast.ts';\nimport {mapAST} from '../../zero-protocol/src/ast.ts';\nimport type {ClientSchema} from '../../zero-protocol/src/client-schema.ts';\nimport type {Row} from '../../zero-protocol/src/data.ts';\nimport {hashOfAST} from '../../zero-protocol/src/query-hash.ts';\nimport type {PermissionsConfig} from '../../zero-schema/src/compiled-permissions.ts';\nimport type {NameMapper} from '../../zero-schema/src/name-mapper.ts';\nimport {\n buildPipeline,\n type BuilderDelegate,\n} from '../../zql/src/builder/builder.ts';\nimport type {Database} from '../../zqlite/src/db.ts';\n\nexport type RunAstOptions = {\n applyPermissions?: boolean | undefined;\n authData?: string | undefined;\n clientToServerMapper?: NameMapper | undefined;\n db: Database;\n host: BuilderDelegate;\n permissions?: PermissionsConfig | undefined;\n syncedRows?: boolean | undefined;\n tableSpecs: Map<string, LiteAndZqlSpec>;\n vendedRows?: boolean | undefined;\n};\n\nexport async function runAst(\n lc: LogContext,\n clientSchema: ClientSchema,\n ast: AST,\n isTransformed: boolean,\n options: RunAstOptions,\n): Promise<AnalyzeQueryResult> {\n const {clientToServerMapper, permissions, host} = options;\n const result: AnalyzeQueryResult = {\n warnings: [],\n syncedRows: undefined,\n syncedRowCount: 0,\n start: 0,\n end: 0,\n afterPermissions: undefined,\n readRows: undefined,\n readRowCountsByQuery: {},\n readRowCount: undefined,\n };\n\n if (!isTransformed) {\n // map the AST to server names if not already transformed\n ast = mapAST(ast, must(clientToServerMapper));\n }\n if (options.applyPermissions) {\n result.warnings.push(\n 'Permissions are deprecated and will be removed in an upcoming release. See: https://zero.rocicorp.dev/docs/auth.',\n );\n\n const authData = options.authData ? JSON.parse(options.authData) : {};\n if (!options.authData) {\n result.warnings.push(\n 'No auth data provided. Permission rules will compare to `NULL` wherever an auth data field is referenced.',\n );\n }\n const auth: JWTAuth = {type: 'jwt', raw: '', decoded: authData};\n ast = transformAndHashQuery(\n lc,\n 'clientGroupIDForAnalyze',\n ast,\n must(\n permissions,\n 'Permissions are required when applyPermissions is true',\n ),\n auth,\n false,\n ).transformedAst;\n result.afterPermissions = await formatOutput(ast.table + astToZQL(ast));\n }\n const pipeline = buildPipeline(ast, host, 'query-id');\n\n const start = performance.now();\n\n let syncedRowCount = 0;\n const rowsByTable: Record<string, Row[]> = {};\n const seenByTable: Set<string> = new Set();\n for (const rowChange of hydrate(pipeline, hashOfAST(ast), clientSchema)) {\n if (rowChange === 'yield') {\n continue;\n }\n assert(rowChange.type === 'add');\n\n let rows: Row[] = rowsByTable[rowChange.table];\n const s = rowChange.table + '.' + JSON.stringify(rowChange.row);\n if (seenByTable.has(s)) {\n continue; // skip duplicates\n }\n syncedRowCount++;\n seenByTable.add(s);\n if (options.syncedRows) {\n if (!rows) {\n rows = [];\n rowsByTable[rowChange.table] = rows;\n }\n rows.push(rowChange.row);\n }\n }\n\n const end = performance.now();\n if (options.syncedRows) {\n result.syncedRows = rowsByTable;\n }\n result.start = start;\n result.end = end;\n\n // Always include the count of synced and vended rows.\n result.syncedRowCount = syncedRowCount;\n result.readRowCountsByQuery = host.debug?.getVendedRowCounts() ?? {};\n let readRowCount = 0;\n for (const c of Object.values(result.readRowCountsByQuery)) {\n for (const v of Object.values(c)) {\n readRowCount += v;\n }\n }\n result.readRowCount = readRowCount;\n\n if (options.vendedRows) {\n result.readRows = host.debug?.getVendedRows();\n }\n return result;\n}\n"],"names":[],"mappings":";;;;;;;;;AAmCA,eAAsB,OACpB,IACA,cACA,KACA,eACA,SAC6B;AAC7B,QAAM,EAAC,sBAAsB,aAAa,KAAA,IAAQ;AAClD,QAAM,SAA6B;AAAA,IACjC,UAAU,CAAA;AAAA,IACV,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP,KAAK;AAAA,IACL,kBAAkB;AAAA,IAClB,UAAU;AAAA,IACV,sBAAsB,CAAA;AAAA,IACtB,cAAc;AAAA,EAAA;AAGhB,MAAI,CAAC,eAAe;AAElB,UAAM,OAAO,KAAK,KAAK,oBAAoB,CAAC;AAAA,EAC9C;AACA,MAAI,QAAQ,kBAAkB;AAC5B,WAAO,SAAS;AAAA,MACd;AAAA,IAAA;AAGF,UAAM,WAAW,QAAQ,WAAW,KAAK,MAAM,QAAQ,QAAQ,IAAI,CAAA;AACnE,QAAI,CAAC,QAAQ,UAAU;AACrB,aAAO,SAAS;AAAA,QACd;AAAA,MAAA;AAAA,IAEJ;AACA,UAAM,OAAgB,EAAuB,SAAS,SAAA;AACtD,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,QACE;AAAA,QACA;AAAA,MAAA;AAAA,MAEF;AAAA,MACA;AAAA,IAAA,EACA;AACF,WAAO,mBAAmB,MAAM,aAAa,IAAI,QAAQ,SAAS,GAAG,CAAC;AAAA,EACxE;AACA,QAAM,WAAW,cAAc,KAAK,MAAM,UAAU;AAEpD,QAAM,QAAQ,YAAY,IAAA;AAE1B,MAAI,iBAAiB;AACrB,QAAM,cAAqC,CAAA;AAC3C,QAAM,kCAA+B,IAAA;AACrC,aAAW,aAAa,QAAQ,UAAU,UAAU,GAAG,GAAG,YAAY,GAAG;AACvE,QAAI,cAAc,SAAS;AACzB;AAAA,IACF;AACA,WAAO,UAAU,SAAS,KAAK;AAE/B,QAAI,OAAc,YAAY,UAAU,KAAK;AAC7C,UAAM,IAAI,UAAU,QAAQ,MAAM,KAAK,UAAU,UAAU,GAAG;AAC9D,QAAI,YAAY,IAAI,CAAC,GAAG;AACtB;AAAA,IACF;AACA;AACA,gBAAY,IAAI,CAAC;AACjB,QAAI,QAAQ,YAAY;AACtB,UAAI,CAAC,MAAM;AACT,eAAO,CAAA;AACP,oBAAY,UAAU,KAAK,IAAI;AAAA,MACjC;AACA,WAAK,KAAK,UAAU,GAAG;AAAA,IACzB;AAAA,EACF;AAEA,QAAM,MAAM,YAAY,IAAA;AACxB,MAAI,QAAQ,YAAY;AACtB,WAAO,aAAa;AAAA,EACtB;AACA,SAAO,QAAQ;AACf,SAAO,MAAM;AAGb,SAAO,iBAAiB;AACxB,SAAO,uBAAuB,KAAK,OAAO,mBAAA,KAAwB,CAAA;AAClE,MAAI,eAAe;AACnB,aAAW,KAAK,OAAO,OAAO,OAAO,oBAAoB,GAAG;AAC1D,eAAW,KAAK,OAAO,OAAO,CAAC,GAAG;AAChC,sBAAgB;AAAA,IAClB;AAAA,EACF;AACA,SAAO,eAAe;AAEtB,MAAI,QAAQ,YAAY;AACtB,WAAO,WAAW,KAAK,OAAO,cAAA;AAAA,EAChC;AACA,SAAO;AACT;"}
1
+ {"version":3,"file":"run-ast.js","sources":["../../../../analyze-query/src/run-ast.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {astToZQL} from '../../ast-to-zql/src/ast-to-zql.ts';\nimport {formatOutput} from '../../ast-to-zql/src/format.ts';\nimport {assert} from '../../shared/src/asserts.ts';\nimport {must} from '../../shared/src/must.ts';\nimport type {JWTAuth} from '../../zero-cache/src/auth/auth.ts';\nimport {transformAndHashQuery} from '../../zero-cache/src/auth/read-authorizer.ts';\nimport type {LiteAndZqlSpec} from '../../zero-cache/src/db/specs.ts';\nimport {hydrate} from '../../zero-cache/src/services/view-syncer/pipeline-driver.ts';\nimport type {AnalyzeQueryResult} from '../../zero-protocol/src/analyze-query-result.ts';\nimport type {AST} from '../../zero-protocol/src/ast.ts';\nimport {mapAST} from '../../zero-protocol/src/ast.ts';\nimport type {ClientSchema} from '../../zero-protocol/src/client-schema.ts';\nimport type {Row} from '../../zero-protocol/src/data.ts';\nimport {hashOfAST} from '../../zero-protocol/src/query-hash.ts';\nimport type {PermissionsConfig} from '../../zero-schema/src/compiled-permissions.ts';\nimport type {NameMapper} from '../../zero-schema/src/name-mapper.ts';\nimport {\n buildPipeline,\n type BuilderDelegate,\n} from '../../zql/src/builder/builder.ts';\nimport type {Database} from '../../zqlite/src/db.ts';\n\nexport type RunAstOptions = {\n applyPermissions?: boolean | undefined;\n authData?: string | undefined;\n clientToServerMapper?: NameMapper | undefined;\n db: Database;\n host: BuilderDelegate;\n permissions?: PermissionsConfig | undefined;\n syncedRows?: boolean | undefined;\n tableSpecs: Map<string, LiteAndZqlSpec>;\n vendedRows?: boolean | undefined;\n};\n\nexport async function runAst(\n lc: LogContext,\n clientSchema: ClientSchema,\n ast: AST,\n isTransformed: boolean,\n options: RunAstOptions,\n): Promise<AnalyzeQueryResult> {\n const {clientToServerMapper, permissions, host} = options;\n const result: AnalyzeQueryResult = {\n warnings: [],\n syncedRows: undefined,\n syncedRowCount: 0,\n start: 0,\n end: 0,\n afterPermissions: undefined,\n readRows: undefined,\n readRowCountsByQuery: {},\n readRowCount: undefined,\n };\n\n if (!isTransformed) {\n // map the AST to server names if not already transformed\n ast = mapAST(ast, must(clientToServerMapper));\n }\n if (options.applyPermissions) {\n result.warnings.push(\n 'Permissions are deprecated and will be removed in an upcoming release. See: https://zero.rocicorp.dev/docs/auth.',\n );\n\n const authData = options.authData ? JSON.parse(options.authData) : {};\n if (!options.authData) {\n result.warnings.push(\n 'No auth data provided. Permission rules will compare to `NULL` wherever an auth data field is referenced.',\n );\n }\n const auth: JWTAuth = {type: 'jwt', raw: '', decoded: authData};\n ast = transformAndHashQuery(\n lc,\n 'clientGroupIDForAnalyze',\n ast,\n must(\n permissions,\n 'Permissions are required when applyPermissions is true',\n ),\n auth,\n false,\n ).transformedAst;\n result.afterPermissions = await formatOutput(ast.table + astToZQL(ast));\n }\n const pipeline = buildPipeline(ast, host, 'query-id');\n\n const start = performance.now();\n\n let syncedRowCount = 0;\n const rowsByTable: Record<string, Row[]> = {};\n const seenByTable: Set<string> = new Set();\n for (const rowChange of hydrate(pipeline, hashOfAST(ast), clientSchema)) {\n if (rowChange === 'yield') {\n continue;\n }\n assert(\n rowChange.type === 'add',\n () => `Expected rowChange type 'add', got '${rowChange.type}'`,\n );\n\n let rows: Row[] = rowsByTable[rowChange.table];\n const s = rowChange.table + '.' + JSON.stringify(rowChange.row);\n if (seenByTable.has(s)) {\n continue; // skip duplicates\n }\n syncedRowCount++;\n seenByTable.add(s);\n if (options.syncedRows) {\n if (!rows) {\n rows = [];\n rowsByTable[rowChange.table] = rows;\n }\n rows.push(rowChange.row);\n }\n }\n\n const end = performance.now();\n if (options.syncedRows) {\n result.syncedRows = rowsByTable;\n }\n result.start = start;\n result.end = end;\n\n // Always include the count of synced and vended rows.\n result.syncedRowCount = syncedRowCount;\n result.readRowCountsByQuery = host.debug?.getVendedRowCounts() ?? {};\n let readRowCount = 0;\n for (const c of Object.values(result.readRowCountsByQuery)) {\n for (const v of Object.values(c)) {\n readRowCount += v;\n }\n }\n result.readRowCount = readRowCount;\n\n if (options.vendedRows) {\n result.readRows = host.debug?.getVendedRows();\n }\n return result;\n}\n"],"names":[],"mappings":";;;;;;;;;AAmCA,eAAsB,OACpB,IACA,cACA,KACA,eACA,SAC6B;AAC7B,QAAM,EAAC,sBAAsB,aAAa,KAAA,IAAQ;AAClD,QAAM,SAA6B;AAAA,IACjC,UAAU,CAAA;AAAA,IACV,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,OAAO;AAAA,IACP,KAAK;AAAA,IACL,kBAAkB;AAAA,IAClB,UAAU;AAAA,IACV,sBAAsB,CAAA;AAAA,IACtB,cAAc;AAAA,EAAA;AAGhB,MAAI,CAAC,eAAe;AAElB,UAAM,OAAO,KAAK,KAAK,oBAAoB,CAAC;AAAA,EAC9C;AACA,MAAI,QAAQ,kBAAkB;AAC5B,WAAO,SAAS;AAAA,MACd;AAAA,IAAA;AAGF,UAAM,WAAW,QAAQ,WAAW,KAAK,MAAM,QAAQ,QAAQ,IAAI,CAAA;AACnE,QAAI,CAAC,QAAQ,UAAU;AACrB,aAAO,SAAS;AAAA,QACd;AAAA,MAAA;AAAA,IAEJ;AACA,UAAM,OAAgB,EAAuB,SAAS,SAAA;AACtD,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,QACE;AAAA,QACA;AAAA,MAAA;AAAA,MAEF;AAAA,MACA;AAAA,IAAA,EACA;AACF,WAAO,mBAAmB,MAAM,aAAa,IAAI,QAAQ,SAAS,GAAG,CAAC;AAAA,EACxE;AACA,QAAM,WAAW,cAAc,KAAK,MAAM,UAAU;AAEpD,QAAM,QAAQ,YAAY,IAAA;AAE1B,MAAI,iBAAiB;AACrB,QAAM,cAAqC,CAAA;AAC3C,QAAM,kCAA+B,IAAA;AACrC,aAAW,aAAa,QAAQ,UAAU,UAAU,GAAG,GAAG,YAAY,GAAG;AACvE,QAAI,cAAc,SAAS;AACzB;AAAA,IACF;AACA;AAAA,MACE,UAAU,SAAS;AAAA,MACnB,MAAM,uCAAuC,UAAU,IAAI;AAAA,IAAA;AAG7D,QAAI,OAAc,YAAY,UAAU,KAAK;AAC7C,UAAM,IAAI,UAAU,QAAQ,MAAM,KAAK,UAAU,UAAU,GAAG;AAC9D,QAAI,YAAY,IAAI,CAAC,GAAG;AACtB;AAAA,IACF;AACA;AACA,gBAAY,IAAI,CAAC;AACjB,QAAI,QAAQ,YAAY;AACtB,UAAI,CAAC,MAAM;AACT,eAAO,CAAA;AACP,oBAAY,UAAU,KAAK,IAAI;AAAA,MACjC;AACA,WAAK,KAAK,UAAU,GAAG;AAAA,IACzB;AAAA,EACF;AAEA,QAAM,MAAM,YAAY,IAAA;AACxB,MAAI,QAAQ,YAAY;AACtB,WAAO,aAAa;AAAA,EACtB;AACA,SAAO,QAAQ;AACf,SAAO,MAAM;AAGb,SAAO,iBAAiB;AACxB,SAAO,uBAAuB,KAAK,OAAO,mBAAA,KAAwB,CAAA;AAClE,MAAI,eAAe;AACnB,aAAW,KAAK,OAAO,OAAO,OAAO,oBAAoB,GAAG;AAC1D,eAAW,KAAK,OAAO,OAAO,CAAC,GAAG;AAChC,sBAAgB;AAAA,IAClB;AAAA,EACF;AACA,SAAO,eAAe;AAEtB,MAAI,QAAQ,YAAY;AACtB,WAAO,WAAW,KAAK,OAAO,cAAA;AAAA,EAChC;AACA,SAAO;AACT;"}
@@ -46,7 +46,7 @@ function parseBTreeNode(v, formatVersion, getSizeOfEntry) {
46
46
  }
47
47
  assertArray(v);
48
48
  assertDeepFrozen(v);
49
- assert(v.length >= 2);
49
+ assert(v.length >= 2, "Expected node array to have at least 2 elements");
50
50
  const [level, entries] = v;
51
51
  assertNumber(level);
52
52
  assertArray(entries);
@@ -62,14 +62,14 @@ function parseBTreeNode(v, formatVersion, getSizeOfEntry) {
62
62
  }
63
63
  function assertEntry(entry, f) {
64
64
  assertArray(entry);
65
- assert(entry.length >= 3);
65
+ assert(entry.length >= 3, "Expected entry array to have at least 3 elements");
66
66
  assertString(entry[0]);
67
67
  f(entry[1]);
68
68
  assertNumber(entry[2]);
69
69
  }
70
70
  function convertNonV7Entry(entry, f, getSizeOfEntry) {
71
71
  assertArray(entry);
72
- assert(entry.length >= 2);
72
+ assert(entry.length >= 2, "Expected entry array to have at least 2 elements");
73
73
  assertString(entry[0]);
74
74
  f(entry[1]);
75
75
  const entrySize = getSizeOfEntry(entry[0], entry[1]);
@@ -310,7 +310,7 @@ class InternalNodeImpl extends NodeImpl {
310
310
  }
311
311
  return new InternalNodeImpl(entries2, newRandomHash(), level - 1, true);
312
312
  }
313
- assert(level === 1);
313
+ assert(level === 1, "Expected level to be 1");
314
314
  const entries = [];
315
315
  for (const child of output) {
316
316
  entries.push(...child.entries);
@@ -1 +1 @@
1
- {"version":3,"file":"node.js","sources":["../../../../../replicache/src/btree/node.ts"],"sourcesContent":["import {compareUTF8} from 'compare-utf8';\nimport {\n assert,\n assertArray,\n assertNumber,\n assertString,\n} from '../../../shared/src/asserts.ts';\nimport {binarySearch as binarySearchWithFunc} from '../../../shared/src/binary-search.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {joinIterables} from '../../../shared/src/iterables.ts';\nimport {\n type JSONValue,\n type ReadonlyJSONValue,\n assertJSONValue,\n} from '../../../shared/src/json.ts';\nimport {skipBTreeNodeAsserts} from '../config.ts';\nimport type {IndexKey} from '../db/index.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport {\n type FrozenJSONValue,\n type FrozenTag,\n assertDeepFrozen,\n deepFreeze,\n} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport type {BTreeRead} from './read.ts';\nimport type {BTreeWrite} from './write.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport type Entry<V> = readonly [key: string, value: V, sizeOfEntry: number];\n\nexport const NODE_LEVEL = 0;\nexport const NODE_ENTRIES = 1;\n\n/**\n * The type of B+Tree node chunk data\n */\ntype BaseNode<V> = FrozenTag<\n readonly [level: number, entries: ReadonlyArray<Entry<V>>]\n>;\nexport type InternalNode = BaseNode<Hash>;\n\nexport type DataNode = BaseNode<FrozenJSONValue>;\n\nexport function makeNodeChunkData<V>(\n level: number,\n entries: ReadonlyArray<Entry<V>>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return deepFreeze([\n level,\n (formatVersion >= FormatVersion.V7\n ? entries\n : entries.map(e => e.slice(0, 2))) as readonly ReadonlyJSONValue[],\n ]) as BaseNode<V>;\n}\n\nexport type Node = DataNode | InternalNode;\n\n/**\n * Describes the changes that happened to Replicache after a\n * {@link WriteTransaction} was committed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type Diff = IndexDiff | NoIndexDiff;\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type IndexDiff = readonly DiffOperation<IndexKey>[];\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type NoIndexDiff = readonly DiffOperation<string>[];\n\n/**\n * InternalDiff uses string keys even for the secondary index maps.\n */\nexport type InternalDiff = readonly InternalDiffOperation[];\n\nexport type DiffOperationAdd<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'add';\n readonly key: Key;\n readonly newValue: Value;\n};\n\nexport type DiffOperationDel<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'del';\n readonly key: Key;\n readonly oldValue: Value;\n};\n\nexport type DiffOperationChange<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'change';\n readonly key: Key;\n readonly oldValue: Value;\n readonly newValue: Value;\n};\n\n/**\n * The individual parts describing the changes that happened to the Replicache\n * data. There are three different kinds of operations:\n * - `add`: A new entry was added.\n * - `del`: An entry was deleted.\n * - `change`: An entry was changed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type DiffOperation<Key> =\n | DiffOperationAdd<Key>\n | DiffOperationDel<Key>\n | DiffOperationChange<Key>;\n\n// Duplicated with DiffOperation to make the docs less confusing.\nexport type InternalDiffOperation<Key = string, Value = FrozenJSONValue> =\n | DiffOperationAdd<Key, Value>\n | DiffOperationDel<Key, Value>\n | DiffOperationChange<Key, Value>;\n\n/**\n * Finds the leaf where a key is (if present) or where it should go if not\n * present.\n */\nexport async function findLeaf(\n key: string,\n hash: Hash,\n source: BTreeRead,\n expectedRootHash: Hash,\n): Promise<DataNodeImpl> {\n const node = await source.getNode(hash);\n // The root changed. Try again\n if (expectedRootHash !== source.rootHash) {\n return findLeaf(key, source.rootHash, source, source.rootHash);\n }\n if (isDataNodeImpl(node)) {\n return node;\n }\n const {entries} = node;\n let i = binarySearch(key, entries);\n if (i === entries.length) {\n i--;\n }\n const entry = entries[i];\n return findLeaf(key, entry[1], source, expectedRootHash);\n}\n\ntype BinarySearchEntries = readonly Entry<unknown>[];\n\n/**\n * Does a binary search over entries\n *\n * If the key found then the return value is the index it was found at.\n *\n * If the key was *not* found then the return value is the index where it should\n * be inserted at\n */\nexport function binarySearch(\n key: string,\n entries: BinarySearchEntries,\n): number {\n return binarySearchWithFunc(entries.length, i =>\n compareUTF8(key, entries[i][0]),\n );\n}\n\nexport function binarySearchFound(\n i: number,\n entries: BinarySearchEntries,\n key: string,\n): boolean {\n return i !== entries.length && entries[i][0] === key;\n}\n\nexport function parseBTreeNode(\n v: unknown,\n formatVersion: FormatVersion,\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): InternalNode | DataNode {\n if (skipBTreeNodeAsserts && formatVersion >= FormatVersion.V7) {\n return v as InternalNode | DataNode;\n }\n\n assertArray(v);\n assertDeepFrozen(v);\n // Be relaxed about what we accept.\n assert(v.length >= 2);\n const [level, entries] = v;\n assertNumber(level);\n assertArray(entries);\n\n const f = level > 0 ? assertString : assertJSONValue;\n\n // For V7 we do not need to change the entries. Just assert that they are correct.\n if (formatVersion >= FormatVersion.V7) {\n for (const e of entries) {\n assertEntry(e, f);\n }\n return v as unknown as InternalNode | DataNode;\n }\n\n const newEntries = entries.map(e => convertNonV7Entry(e, f, getSizeOfEntry));\n return [level, newEntries] as unknown as InternalNode | DataNode;\n}\n\nfunction assertEntry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n): asserts entry is Entry<Hash | JSONValue> {\n assertArray(entry);\n // Be relaxed about what we accept.\n assert(entry.length >= 3);\n assertString(entry[0]);\n f(entry[1]);\n assertNumber(entry[2]);\n}\n\n/**\n * Converts an entry that was from a format version before V7 to the format\n * wanted by V7.\n */\nfunction convertNonV7Entry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): Entry<Hash | JSONValue> {\n assertArray(entry);\n assert(entry.length >= 2);\n assertString(entry[0]);\n f(entry[1]);\n const entrySize = getSizeOfEntry(entry[0], entry[1]);\n return [entry[0], entry[1], entrySize] as Entry<Hash | JSONValue>;\n}\n\nexport function isInternalNode(node: Node): node is InternalNode {\n return node[NODE_LEVEL] > 0;\n}\n\nabstract class NodeImpl<Value> {\n entries: Array<Entry<Value>>;\n hash: Hash;\n abstract readonly level: number;\n readonly isMutable: boolean;\n\n #childNodeSize = -1;\n\n constructor(entries: Array<Entry<Value>>, hash: Hash, isMutable: boolean) {\n this.entries = entries;\n this.hash = hash;\n this.isMutable = isMutable;\n }\n\n abstract set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value>>;\n\n abstract del(\n key: string,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value> | DataNodeImpl>;\n\n maxKey(): string {\n return this.entries[this.entries.length - 1][0];\n }\n\n getChildNodeSize(tree: BTreeRead): number {\n if (this.#childNodeSize !== -1) {\n return this.#childNodeSize;\n }\n\n let sum = tree.chunkHeaderSize;\n for (const entry of this.entries) {\n sum += entry[2];\n }\n return (this.#childNodeSize = sum);\n }\n\n protected _updateNode(tree: BTreeWrite) {\n this.#childNodeSize = -1;\n tree.updateNode(\n this as NodeImpl<unknown> as DataNodeImpl | InternalNodeImpl,\n );\n }\n}\n\nexport function toChunkData<V>(\n node: NodeImpl<V>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return makeNodeChunkData(node.level, node.entries, formatVersion);\n}\n\nexport class DataNodeImpl extends NodeImpl<FrozenJSONValue> {\n readonly level = 0;\n\n set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<DataNodeImpl> {\n let deleteCount: number;\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found, insert.\n deleteCount = 0;\n } else {\n deleteCount = 1;\n }\n\n return Promise.resolve(\n this.#splice(tree, i, deleteCount, [key, value, entrySize]),\n );\n }\n\n #splice(\n tree: BTreeWrite,\n start: number,\n deleteCount: number,\n ...items: Entry<FrozenJSONValue>[]\n ): DataNodeImpl {\n if (this.isMutable) {\n this.entries.splice(start, deleteCount, ...items);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(this.entries, start, deleteCount, ...items);\n return tree.newDataNodeImpl(entries);\n }\n\n del(key: string, tree: BTreeWrite): Promise<DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found. Return this without changes.\n return Promise.resolve(this);\n }\n\n // Found. Create new node or mutate existing one.\n return Promise.resolve(this.#splice(tree, i, 1));\n }\n\n async *keys(_tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n yield entry[0];\n }\n }\n\n async *entriesIter(\n _tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n yield entry;\n }\n }\n}\n\nfunction readonlySplice<T>(\n array: ReadonlyArray<T>,\n start: number,\n deleteCount: number,\n ...items: T[]\n): T[] {\n const arr = array.slice(0, start);\n for (let i = 0; i < items.length; i++) {\n arr.push(items[i]);\n }\n for (let i = start + deleteCount; i < array.length; i++) {\n arr.push(array[i]);\n }\n return arr;\n}\n\nexport class InternalNodeImpl extends NodeImpl<Hash> {\n readonly level: number;\n\n constructor(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n ) {\n super(entries, hash, isMutable);\n this.level = level;\n }\n\n async set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl> {\n let i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // We are going to insert into last (right most) leaf.\n i--;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n\n const childNode = await oldChildNode.set(key, value, entrySize, tree);\n\n const childNodeSize = childNode.getChildNodeSize(tree);\n if (childNodeSize > tree.maxSize || childNodeSize < tree.minSize) {\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n const newEntry = createNewInternalEntryForNode(\n childNode,\n tree.getEntrySize,\n );\n return this.#replaceChild(tree, i, newEntry);\n }\n\n /**\n * This merges the child node entries with previous or next sibling and then\n * partitions the merged entries.\n */\n async #mergeAndPartition(\n tree: BTreeWrite,\n i: number,\n childNode: DataNodeImpl | InternalNodeImpl,\n ): Promise<InternalNodeImpl> {\n const level = this.level - 1;\n const thisEntries = this.entries;\n\n type IterableHashEntries = Iterable<Entry<Hash>>;\n\n let values: IterableHashEntries;\n let startIndex: number;\n let removeCount: number;\n if (i > 0) {\n const hash = thisEntries[i - 1][1];\n const previousSibling = await tree.getNode(hash);\n values = joinIterables(\n previousSibling.entries as IterableHashEntries,\n childNode.entries as IterableHashEntries,\n );\n startIndex = i - 1;\n removeCount = 2;\n } else if (i < thisEntries.length - 1) {\n const hash = thisEntries[i + 1][1];\n const nextSibling = await tree.getNode(hash);\n values = joinIterables(\n childNode.entries as IterableHashEntries,\n nextSibling.entries as IterableHashEntries,\n );\n startIndex = i;\n removeCount = 2;\n } else {\n values = childNode.entries as IterableHashEntries;\n startIndex = i;\n removeCount = 1;\n }\n\n const partitions = partition(\n values,\n value => value[2],\n tree.minSize - tree.chunkHeaderSize,\n tree.maxSize - tree.chunkHeaderSize,\n );\n\n // TODO: There are cases where we can reuse the old nodes. Creating new ones\n // means more memory churn but also more writes to the underlying KV store.\n const newEntries: Entry<Hash>[] = [];\n for (const entries of partitions) {\n const node = tree.newNodeImpl(entries, level);\n const newHashEntry = createNewInternalEntryForNode(\n node,\n tree.getEntrySize,\n );\n newEntries.push(newHashEntry);\n }\n\n if (this.isMutable) {\n this.entries.splice(startIndex, removeCount, ...newEntries);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(\n thisEntries,\n startIndex,\n removeCount,\n ...newEntries,\n );\n\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n #replaceChild(\n tree: BTreeWrite,\n index: number,\n newEntry: Entry<Hash>,\n ): InternalNodeImpl {\n if (this.isMutable) {\n this.entries.splice(index, 1, newEntry);\n this._updateNode(tree);\n return this;\n }\n const entries = readonlySplice(this.entries, index, 1, newEntry);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n async del(\n key: string,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // Key is larger than maxKey of rightmost entry so it is not present.\n return this;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n const oldHash = oldChildNode.hash;\n\n const childNode = await oldChildNode.del(key, tree);\n if (childNode.hash === oldHash) {\n // Not changed so not found.\n return this;\n }\n\n if (childNode.entries.length === 0) {\n // Subtree is now empty. Remove internal node.\n const entries = readonlySplice(this.entries, i, 1);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n if (i === 0 && this.entries.length === 1) {\n // There was only one node at this level and it was removed. We can return\n // the modified subtree.\n return childNode;\n }\n\n // The child node is still a good size.\n if (childNode.getChildNodeSize(tree) > tree.minSize) {\n // No merging needed.\n const entry = createNewInternalEntryForNode(childNode, tree.getEntrySize);\n return this.#replaceChild(tree, i, entry);\n }\n\n // Child node size is too small.\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n async *keys(tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.keys(tree);\n }\n }\n\n async *entriesIter(\n tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.entriesIter(tree);\n }\n }\n\n getChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<Array<InternalNodeImpl | DataNodeImpl>> {\n const ps: Promise<DataNodeImpl | InternalNodeImpl>[] = [];\n for (let i = start; i < length && i < this.entries.length; i++) {\n ps.push(tree.getNode(this.entries[i][1]));\n }\n return Promise.all(ps);\n }\n\n async getCompositeChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const {level} = this;\n\n if (length === 0) {\n return new InternalNodeImpl([], newRandomHash(), level - 1, true);\n }\n\n const output = await this.getChildren(start, start + length, tree);\n\n if (level > 1) {\n const entries: Entry<Hash>[] = [];\n for (const child of output as InternalNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new InternalNodeImpl(entries, newRandomHash(), level - 1, true);\n }\n\n assert(level === 1);\n const entries: Entry<FrozenJSONValue>[] = [];\n for (const child of output as DataNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new DataNodeImpl(entries, newRandomHash(), true);\n }\n}\n\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl {\n if (level === 0) {\n return new DataNodeImpl(\n entries as Entry<FrozenJSONValue>[],\n hash,\n isMutable,\n );\n }\n return new InternalNodeImpl(entries as Entry<Hash>[], hash, level, isMutable);\n}\n\nexport function isDataNodeImpl(\n node: DataNodeImpl | InternalNodeImpl,\n): node is DataNodeImpl {\n return node.level === 0;\n}\n\nexport function partition<T>(\n values: Iterable<T>,\n // This is the size of each Entry\n getSizeOfEntry: (v: T) => number,\n min: number,\n max: number,\n): T[][] {\n const partitions: T[][] = [];\n const sizes: number[] = [];\n let sum = 0;\n let accum: T[] = [];\n for (const value of values) {\n const size = getSizeOfEntry(value);\n if (size >= max) {\n if (accum.length > 0) {\n partitions.push(accum);\n sizes.push(sum);\n }\n partitions.push([value]);\n sizes.push(size);\n sum = 0;\n accum = [];\n } else if (sum + size >= min) {\n accum.push(value);\n partitions.push(accum);\n sizes.push(sum + size);\n sum = 0;\n accum = [];\n } else {\n sum += size;\n accum.push(value);\n }\n }\n\n if (sum > 0) {\n if (sizes.length > 0 && sum + sizes[sizes.length - 1] <= max) {\n partitions[partitions.length - 1].push(...accum);\n } else {\n partitions.push(accum);\n }\n }\n\n return partitions;\n}\n\nexport const emptyDataNode = makeNodeChunkData<ReadonlyJSONValue>(\n 0,\n [],\n FormatVersion.Latest,\n);\nexport const emptyDataNodeImpl = new DataNodeImpl([], emptyHash, false);\n\nexport function createNewInternalEntryForNode(\n node: NodeImpl<unknown>,\n getSizeOfEntry: <K, V>(k: K, v: V) => number,\n): [string, Hash, number] {\n const key = node.maxKey();\n const value = node.hash;\n const size = getSizeOfEntry(key, value);\n return [key, value, size];\n}\n"],"names":["FormatVersion.V7","binarySearchWithFunc","skipBTreeNodeAsserts","entries","FormatVersion.Latest"],"mappings":";;;;;;;;;AAgCO,MAAM,aAAa;AACnB,MAAM,eAAe;AAYrB,SAAS,kBACd,OACA,SACA,eACa;AACb,SAAO,WAAW;AAAA,IAChB;AAAA,IACC,iBAAiBA,KACd,UACA,QAAQ,IAAI,CAAA,MAAK,EAAE,MAAM,GAAG,CAAC,CAAC;AAAA,EAAA,CACnC;AACH;AAsEA,eAAsB,SACpB,KACA,MACA,QACA,kBACuB;AACvB,QAAM,OAAO,MAAM,OAAO,QAAQ,IAAI;AAEtC,MAAI,qBAAqB,OAAO,UAAU;AACxC,WAAO,SAAS,KAAK,OAAO,UAAU,QAAQ,OAAO,QAAQ;AAAA,EAC/D;AACA,MAAI,eAAe,IAAI,GAAG;AACxB,WAAO;AAAA,EACT;AACA,QAAM,EAAC,YAAW;AAClB,MAAI,IAAI,aAAa,KAAK,OAAO;AACjC,MAAI,MAAM,QAAQ,QAAQ;AACxB;AAAA,EACF;AACA,QAAM,QAAQ,QAAQ,CAAC;AACvB,SAAO,SAAS,KAAK,MAAM,CAAC,GAAG,QAAQ,gBAAgB;AACzD;AAYO,SAAS,aACd,KACA,SACQ;AACR,SAAOC;AAAAA,IAAqB,QAAQ;AAAA,IAAQ,OAC1C,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC,CAAC;AAAA,EAAA;AAElC;AAEO,SAAS,kBACd,GACA,SACA,KACS;AACT,SAAO,MAAM,QAAQ,UAAU,QAAQ,CAAC,EAAE,CAAC,MAAM;AACnD;AAEO,SAAS,eACd,GACA,eACA,gBACyB;AACzB,MAAIC,UAAwB,iBAAiBF,IAAkB;AAC7D,WAAO;AAAA,EACT;AAEA,cAAY,CAAC;AACb,mBAAiB,CAAC;AAElB,SAAO,EAAE,UAAU,CAAC;AACpB,QAAM,CAAC,OAAO,OAAO,IAAI;AACzB,eAAa,KAAK;AAClB,cAAY,OAAO;AAEnB,QAAM,IAAI,QAAQ,IAAI,eAAe;AAGrC,MAAI,iBAAiBA,IAAkB;AACrC,eAAW,KAAK,SAAS;AACvB,kBAAY,GAAG,CAAC;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,QAAQ,IAAI,CAAA,MAAK,kBAAkB,GAAG,GAAG,cAAc,CAAC;AAC3E,SAAO,CAAC,OAAO,UAAU;AAC3B;AAEA,SAAS,YACP,OACA,GAG0C;AAC1C,cAAY,KAAK;AAEjB,SAAO,MAAM,UAAU,CAAC;AACxB,eAAa,MAAM,CAAC,CAAC;AACrB,IAAE,MAAM,CAAC,CAAC;AACV,eAAa,MAAM,CAAC,CAAC;AACvB;AAMA,SAAS,kBACP,OACA,GAGA,gBACyB;AACzB,cAAY,KAAK;AACjB,SAAO,MAAM,UAAU,CAAC;AACxB,eAAa,MAAM,CAAC,CAAC;AACrB,IAAE,MAAM,CAAC,CAAC;AACV,QAAM,YAAY,eAAe,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC;AACnD,SAAO,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,GAAG,SAAS;AACvC;AAMA,MAAe,SAAgB;AAAA,EAC7B;AAAA,EACA;AAAA,EAES;AAAA,EAET,iBAAiB;AAAA,EAEjB,YAAY,SAA8B,MAAY,WAAoB;AACxE,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAcA,SAAiB;AACf,WAAO,KAAK,QAAQ,KAAK,QAAQ,SAAS,CAAC,EAAE,CAAC;AAAA,EAChD;AAAA,EAEA,iBAAiB,MAAyB;AACxC,QAAI,KAAK,mBAAmB,IAAI;AAC9B,aAAO,KAAK;AAAA,IACd;AAEA,QAAI,MAAM,KAAK;AACf,eAAW,SAAS,KAAK,SAAS;AAChC,aAAO,MAAM,CAAC;AAAA,IAChB;AACA,WAAQ,KAAK,iBAAiB;AAAA,EAChC;AAAA,EAEU,YAAY,MAAkB;AACtC,SAAK,iBAAiB;AACtB,SAAK;AAAA,MACH;AAAA,IAAA;AAAA,EAEJ;AACF;AAEO,SAAS,YACd,MACA,eACa;AACb,SAAO,kBAAkB,KAAK,OAAO,KAAK,SAAS,aAAa;AAClE;AAEO,MAAM,qBAAqB,SAA0B;AAAA,EACjD,QAAQ;AAAA,EAEjB,IACE,KACA,OACA,WACA,MACuB;AACvB,QAAI;AACJ,UAAM,IAAI,aAAa,KAAK,KAAK,OAAO;AACxC,QAAI,CAAC,kBAAkB,GAAG,KAAK,SAAS,GAAG,GAAG;AAE5C,oBAAc;AAAA,IAChB,OAAO;AACL,oBAAc;AAAA,IAChB;AAEA,WAAO,QAAQ;AAAA,MACb,KAAK,QAAQ,MAAM,GAAG,aAAa,CAAC,KAAK,OAAO,SAAS,CAAC;AAAA,IAAA;AAAA,EAE9D;AAAA,EAEA,QACE,MACA,OACA,gBACG,OACW;AACd,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,OAAO,aAAa,GAAG,KAAK;AAChD,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,eAAe,KAAK,SAAS,OAAO,aAAa,GAAG,KAAK;AACzE,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,IAAI,KAAa,MAAyC;AACxD,UAAM,IAAI,aAAa,KAAK,KAAK,OAAO;AACxC,QAAI,CAAC,kBAAkB,GAAG,KAAK,SAAS,GAAG,GAAG;AAE5C,aAAO,QAAQ,QAAQ,IAAI;AAAA,IAC7B;AAGA,WAAO,QAAQ,QAAQ,KAAK,QAAQ,MAAM,GAAG,CAAC,CAAC;AAAA,EACjD;AAAA,EAEA,OAAO,KAAK,OAAgD;AAC1D,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,MAAM,CAAC;AAAA,IACf;AAAA,EACF;AAAA,EAEA,OAAO,YACL,OAC8C;AAC9C,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,SAAS,eACP,OACA,OACA,gBACG,OACE;AACL,QAAM,MAAM,MAAM,MAAM,GAAG,KAAK;AAChC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,KAAK,MAAM,CAAC,CAAC;AAAA,EACnB;AACA,WAAS,IAAI,QAAQ,aAAa,IAAI,MAAM,QAAQ,KAAK;AACvD,QAAI,KAAK,MAAM,CAAC,CAAC;AAAA,EACnB;AACA,SAAO;AACT;AAEO,MAAM,yBAAyB,SAAe;AAAA,EAC1C;AAAA,EAET,YACE,SACA,MACA,OACA,WACA;AACA,UAAM,SAAS,MAAM,SAAS;AAC9B,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,MAAM,IACJ,KACA,OACA,WACA,MAC2B;AAC3B,QAAI,IAAI,aAAa,KAAK,KAAK,OAAO;AACtC,QAAI,MAAM,KAAK,QAAQ,QAAQ;AAE7B;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC;AACnC,UAAM,eAAe,MAAM,KAAK,QAAQ,SAAS;AAEjD,UAAM,YAAY,MAAM,aAAa,IAAI,KAAK,OAAO,WAAW,IAAI;AAEpE,UAAM,gBAAgB,UAAU,iBAAiB,IAAI;AACrD,QAAI,gBAAgB,KAAK,WAAW,gBAAgB,KAAK,SAAS;AAChE,aAAO,KAAK,mBAAmB,MAAM,GAAG,SAAS;AAAA,IACnD;AAEA,UAAM,WAAW;AAAA,MACf;AAAA,MACA,KAAK;AAAA,IAAA;AAEP,WAAO,KAAK,cAAc,MAAM,GAAG,QAAQ;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,mBACJ,MACA,GACA,WAC2B;AAC3B,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,cAAc,KAAK;AAIzB,QAAI;AACJ,QAAI;AACJ,QAAI;AACJ,QAAI,IAAI,GAAG;AACT,YAAM,OAAO,YAAY,IAAI,CAAC,EAAE,CAAC;AACjC,YAAM,kBAAkB,MAAM,KAAK,QAAQ,IAAI;AAC/C,eAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,UAAU;AAAA,MAAA;AAEZ,mBAAa,IAAI;AACjB,oBAAc;AAAA,IAChB,WAAW,IAAI,YAAY,SAAS,GAAG;AACrC,YAAM,OAAO,YAAY,IAAI,CAAC,EAAE,CAAC;AACjC,YAAM,cAAc,MAAM,KAAK,QAAQ,IAAI;AAC3C,eAAS;AAAA,QACP,UAAU;AAAA,QACV,YAAY;AAAA,MAAA;AAEd,mBAAa;AACb,oBAAc;AAAA,IAChB,OAAO;AACL,eAAS,UAAU;AACnB,mBAAa;AACb,oBAAc;AAAA,IAChB;AAEA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA,CAAA,UAAS,MAAM,CAAC;AAAA,MAChB,KAAK,UAAU,KAAK;AAAA,MACpB,KAAK,UAAU,KAAK;AAAA,IAAA;AAKtB,UAAM,aAA4B,CAAA;AAClC,eAAWG,YAAW,YAAY;AAChC,YAAM,OAAO,KAAK,YAAYA,UAAS,KAAK;AAC5C,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,KAAK;AAAA,MAAA;AAEP,iBAAW,KAAK,YAAY;AAAA,IAC9B;AAEA,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,YAAY,aAAa,GAAG,UAAU;AAC1D,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IAAA;AAGL,WAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,EACrD;AAAA,EAEA,cACE,MACA,OACA,UACkB;AAClB,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,OAAO,GAAG,QAAQ;AACtC,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AACA,UAAM,UAAU,eAAe,KAAK,SAAS,OAAO,GAAG,QAAQ;AAC/D,WAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,EACrD;AAAA,EAEA,MAAM,IACJ,KACA,MAC0C;AAC1C,UAAM,IAAI,aAAa,KAAK,KAAK,OAAO;AACxC,QAAI,MAAM,KAAK,QAAQ,QAAQ;AAE7B,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC;AACnC,UAAM,eAAe,MAAM,KAAK,QAAQ,SAAS;AACjD,UAAM,UAAU,aAAa;AAE7B,UAAM,YAAY,MAAM,aAAa,IAAI,KAAK,IAAI;AAClD,QAAI,UAAU,SAAS,SAAS;AAE9B,aAAO;AAAA,IACT;AAEA,QAAI,UAAU,QAAQ,WAAW,GAAG;AAElC,YAAM,UAAU,eAAe,KAAK,SAAS,GAAG,CAAC;AACjD,aAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,IACrD;AAEA,QAAI,MAAM,KAAK,KAAK,QAAQ,WAAW,GAAG;AAGxC,aAAO;AAAA,IACT;AAGA,QAAI,UAAU,iBAAiB,IAAI,IAAI,KAAK,SAAS;AAEnD,YAAM,QAAQ,8BAA8B,WAAW,KAAK,YAAY;AACxE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK;AAAA,IAC1C;AAGA,WAAO,KAAK,mBAAmB,MAAM,GAAG,SAAS;AAAA,EACnD;AAAA,EAEA,OAAO,KAAK,MAA+C;AACzD,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,YAAY,MAAM,KAAK,QAAQ,MAAM,CAAC,CAAC;AAC7C,aAAO,UAAU,KAAK,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,OAAO,YACL,MAC8C;AAC9C,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,YAAY,MAAM,KAAK,QAAQ,MAAM,CAAC,CAAC;AAC7C,aAAO,UAAU,YAAY,IAAI;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,YACE,OACA,QACA,MACiD;AACjD,UAAM,KAAiD,CAAA;AACvD,aAAS,IAAI,OAAO,IAAI,UAAU,IAAI,KAAK,QAAQ,QAAQ,KAAK;AAC9D,SAAG,KAAK,KAAK,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;AAAA,IAC1C;AACA,WAAO,QAAQ,IAAI,EAAE;AAAA,EACvB;AAAA,EAEA,MAAM,qBACJ,OACA,QACA,MAC0C;AAC1C,UAAM,EAAC,UAAS;AAEhB,QAAI,WAAW,GAAG;AAChB,aAAO,IAAI,iBAAiB,IAAI,iBAAiB,QAAQ,GAAG,IAAI;AAAA,IAClE;AAEA,UAAM,SAAS,MAAM,KAAK,YAAY,OAAO,QAAQ,QAAQ,IAAI;AAEjE,QAAI,QAAQ,GAAG;AACb,YAAMA,WAAyB,CAAA;AAC/B,iBAAW,SAAS,QAA8B;AAChDA,iBAAQ,KAAK,GAAG,MAAM,OAAO;AAAA,MAC/B;AACA,aAAO,IAAI,iBAAiBA,UAAS,iBAAiB,QAAQ,GAAG,IAAI;AAAA,IACvE;AAEA,WAAO,UAAU,CAAC;AAClB,UAAM,UAAoC,CAAA;AAC1C,eAAW,SAAS,QAA0B;AAC5C,cAAQ,KAAK,GAAG,MAAM,OAAO;AAAA,IAC/B;AACA,WAAO,IAAI,aAAa,SAAS,cAAA,GAAiB,IAAI;AAAA,EACxD;AACF;AAoBO,SAAS,YACd,SACA,MACA,OACA,WACiC;AACjC,MAAI,UAAU,GAAG;AACf,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AACA,SAAO,IAAI,iBAAiB,SAA0B,MAAM,OAAO,SAAS;AAC9E;AAEO,SAAS,eACd,MACsB;AACtB,SAAO,KAAK,UAAU;AACxB;AAEO,SAAS,UACd,QAEA,gBACA,KACA,KACO;AACP,QAAM,aAAoB,CAAA;AAC1B,QAAM,QAAkB,CAAA;AACxB,MAAI,MAAM;AACV,MAAI,QAAa,CAAA;AACjB,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAO,eAAe,KAAK;AACjC,QAAI,QAAQ,KAAK;AACf,UAAI,MAAM,SAAS,GAAG;AACpB,mBAAW,KAAK,KAAK;AACrB,cAAM,KAAK,GAAG;AAAA,MAChB;AACA,iBAAW,KAAK,CAAC,KAAK,CAAC;AACvB,YAAM,KAAK,IAAI;AACf,YAAM;AACN,cAAQ,CAAA;AAAA,IACV,WAAW,MAAM,QAAQ,KAAK;AAC5B,YAAM,KAAK,KAAK;AAChB,iBAAW,KAAK,KAAK;AACrB,YAAM,KAAK,MAAM,IAAI;AACrB,YAAM;AACN,cAAQ,CAAA;AAAA,IACV,OAAO;AACL,aAAO;AACP,YAAM,KAAK,KAAK;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,MAAM,GAAG;AACX,QAAI,MAAM,SAAS,KAAK,MAAM,MAAM,MAAM,SAAS,CAAC,KAAK,KAAK;AAC5D,iBAAW,WAAW,SAAS,CAAC,EAAE,KAAK,GAAG,KAAK;AAAA,IACjD,OAAO;AACL,iBAAW,KAAK,KAAK;AAAA,IACvB;AAAA,EACF;AAEA,SAAO;AACT;AAEO,MAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA,CAAA;AAAA,EACAC;AACF;AACO,MAAM,oBAAoB,IAAI,aAAa,CAAA,GAAI,WAAW,KAAK;AAE/D,SAAS,8BACd,MACA,gBACwB;AACxB,QAAM,MAAM,KAAK,OAAA;AACjB,QAAM,QAAQ,KAAK;AACnB,QAAM,OAAO,eAAe,KAAK,KAAK;AACtC,SAAO,CAAC,KAAK,OAAO,IAAI;AAC1B;"}
1
+ {"version":3,"file":"node.js","sources":["../../../../../replicache/src/btree/node.ts"],"sourcesContent":["import {compareUTF8} from 'compare-utf8';\nimport {\n assert,\n assertArray,\n assertNumber,\n assertString,\n} from '../../../shared/src/asserts.ts';\nimport {binarySearch as binarySearchWithFunc} from '../../../shared/src/binary-search.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {joinIterables} from '../../../shared/src/iterables.ts';\nimport {\n type JSONValue,\n type ReadonlyJSONValue,\n assertJSONValue,\n} from '../../../shared/src/json.ts';\nimport {skipBTreeNodeAsserts} from '../config.ts';\nimport type {IndexKey} from '../db/index.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport {\n type FrozenJSONValue,\n type FrozenTag,\n assertDeepFrozen,\n deepFreeze,\n} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport type {BTreeRead} from './read.ts';\nimport type {BTreeWrite} from './write.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport type Entry<V> = readonly [key: string, value: V, sizeOfEntry: number];\n\nexport const NODE_LEVEL = 0;\nexport const NODE_ENTRIES = 1;\n\n/**\n * The type of B+Tree node chunk data\n */\ntype BaseNode<V> = FrozenTag<\n readonly [level: number, entries: ReadonlyArray<Entry<V>>]\n>;\nexport type InternalNode = BaseNode<Hash>;\n\nexport type DataNode = BaseNode<FrozenJSONValue>;\n\nexport function makeNodeChunkData<V>(\n level: number,\n entries: ReadonlyArray<Entry<V>>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return deepFreeze([\n level,\n (formatVersion >= FormatVersion.V7\n ? entries\n : entries.map(e => e.slice(0, 2))) as readonly ReadonlyJSONValue[],\n ]) as BaseNode<V>;\n}\n\nexport type Node = DataNode | InternalNode;\n\n/**\n * Describes the changes that happened to Replicache after a\n * {@link WriteTransaction} was committed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type Diff = IndexDiff | NoIndexDiff;\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type IndexDiff = readonly DiffOperation<IndexKey>[];\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type NoIndexDiff = readonly DiffOperation<string>[];\n\n/**\n * InternalDiff uses string keys even for the secondary index maps.\n */\nexport type InternalDiff = readonly InternalDiffOperation[];\n\nexport type DiffOperationAdd<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'add';\n readonly key: Key;\n readonly newValue: Value;\n};\n\nexport type DiffOperationDel<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'del';\n readonly key: Key;\n readonly oldValue: Value;\n};\n\nexport type DiffOperationChange<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'change';\n readonly key: Key;\n readonly oldValue: Value;\n readonly newValue: Value;\n};\n\n/**\n * The individual parts describing the changes that happened to the Replicache\n * data. There are three different kinds of operations:\n * - `add`: A new entry was added.\n * - `del`: An entry was deleted.\n * - `change`: An entry was changed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type DiffOperation<Key> =\n | DiffOperationAdd<Key>\n | DiffOperationDel<Key>\n | DiffOperationChange<Key>;\n\n// Duplicated with DiffOperation to make the docs less confusing.\nexport type InternalDiffOperation<Key = string, Value = FrozenJSONValue> =\n | DiffOperationAdd<Key, Value>\n | DiffOperationDel<Key, Value>\n | DiffOperationChange<Key, Value>;\n\n/**\n * Finds the leaf where a key is (if present) or where it should go if not\n * present.\n */\nexport async function findLeaf(\n key: string,\n hash: Hash,\n source: BTreeRead,\n expectedRootHash: Hash,\n): Promise<DataNodeImpl> {\n const node = await source.getNode(hash);\n // The root changed. Try again\n if (expectedRootHash !== source.rootHash) {\n return findLeaf(key, source.rootHash, source, source.rootHash);\n }\n if (isDataNodeImpl(node)) {\n return node;\n }\n const {entries} = node;\n let i = binarySearch(key, entries);\n if (i === entries.length) {\n i--;\n }\n const entry = entries[i];\n return findLeaf(key, entry[1], source, expectedRootHash);\n}\n\ntype BinarySearchEntries = readonly Entry<unknown>[];\n\n/**\n * Does a binary search over entries\n *\n * If the key found then the return value is the index it was found at.\n *\n * If the key was *not* found then the return value is the index where it should\n * be inserted at\n */\nexport function binarySearch(\n key: string,\n entries: BinarySearchEntries,\n): number {\n return binarySearchWithFunc(entries.length, i =>\n compareUTF8(key, entries[i][0]),\n );\n}\n\nexport function binarySearchFound(\n i: number,\n entries: BinarySearchEntries,\n key: string,\n): boolean {\n return i !== entries.length && entries[i][0] === key;\n}\n\nexport function parseBTreeNode(\n v: unknown,\n formatVersion: FormatVersion,\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): InternalNode | DataNode {\n if (skipBTreeNodeAsserts && formatVersion >= FormatVersion.V7) {\n return v as InternalNode | DataNode;\n }\n\n assertArray(v);\n assertDeepFrozen(v);\n // Be relaxed about what we accept.\n assert(v.length >= 2, 'Expected node array to have at least 2 elements');\n const [level, entries] = v;\n assertNumber(level);\n assertArray(entries);\n\n const f = level > 0 ? assertString : assertJSONValue;\n\n // For V7 we do not need to change the entries. Just assert that they are correct.\n if (formatVersion >= FormatVersion.V7) {\n for (const e of entries) {\n assertEntry(e, f);\n }\n return v as unknown as InternalNode | DataNode;\n }\n\n const newEntries = entries.map(e => convertNonV7Entry(e, f, getSizeOfEntry));\n return [level, newEntries] as unknown as InternalNode | DataNode;\n}\n\nfunction assertEntry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n): asserts entry is Entry<Hash | JSONValue> {\n assertArray(entry);\n // Be relaxed about what we accept.\n assert(entry.length >= 3, 'Expected entry array to have at least 3 elements');\n assertString(entry[0]);\n f(entry[1]);\n assertNumber(entry[2]);\n}\n\n/**\n * Converts an entry that was from a format version before V7 to the format\n * wanted by V7.\n */\nfunction convertNonV7Entry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): Entry<Hash | JSONValue> {\n assertArray(entry);\n assert(entry.length >= 2, 'Expected entry array to have at least 2 elements');\n assertString(entry[0]);\n f(entry[1]);\n const entrySize = getSizeOfEntry(entry[0], entry[1]);\n return [entry[0], entry[1], entrySize] as Entry<Hash | JSONValue>;\n}\n\nexport function isInternalNode(node: Node): node is InternalNode {\n return node[NODE_LEVEL] > 0;\n}\n\nabstract class NodeImpl<Value> {\n entries: Array<Entry<Value>>;\n hash: Hash;\n abstract readonly level: number;\n readonly isMutable: boolean;\n\n #childNodeSize = -1;\n\n constructor(entries: Array<Entry<Value>>, hash: Hash, isMutable: boolean) {\n this.entries = entries;\n this.hash = hash;\n this.isMutable = isMutable;\n }\n\n abstract set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value>>;\n\n abstract del(\n key: string,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value> | DataNodeImpl>;\n\n maxKey(): string {\n return this.entries[this.entries.length - 1][0];\n }\n\n getChildNodeSize(tree: BTreeRead): number {\n if (this.#childNodeSize !== -1) {\n return this.#childNodeSize;\n }\n\n let sum = tree.chunkHeaderSize;\n for (const entry of this.entries) {\n sum += entry[2];\n }\n return (this.#childNodeSize = sum);\n }\n\n protected _updateNode(tree: BTreeWrite) {\n this.#childNodeSize = -1;\n tree.updateNode(\n this as NodeImpl<unknown> as DataNodeImpl | InternalNodeImpl,\n );\n }\n}\n\nexport function toChunkData<V>(\n node: NodeImpl<V>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return makeNodeChunkData(node.level, node.entries, formatVersion);\n}\n\nexport class DataNodeImpl extends NodeImpl<FrozenJSONValue> {\n readonly level = 0;\n\n set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<DataNodeImpl> {\n let deleteCount: number;\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found, insert.\n deleteCount = 0;\n } else {\n deleteCount = 1;\n }\n\n return Promise.resolve(\n this.#splice(tree, i, deleteCount, [key, value, entrySize]),\n );\n }\n\n #splice(\n tree: BTreeWrite,\n start: number,\n deleteCount: number,\n ...items: Entry<FrozenJSONValue>[]\n ): DataNodeImpl {\n if (this.isMutable) {\n this.entries.splice(start, deleteCount, ...items);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(this.entries, start, deleteCount, ...items);\n return tree.newDataNodeImpl(entries);\n }\n\n del(key: string, tree: BTreeWrite): Promise<DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found. Return this without changes.\n return Promise.resolve(this);\n }\n\n // Found. Create new node or mutate existing one.\n return Promise.resolve(this.#splice(tree, i, 1));\n }\n\n async *keys(_tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n yield entry[0];\n }\n }\n\n async *entriesIter(\n _tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n yield entry;\n }\n }\n}\n\nfunction readonlySplice<T>(\n array: ReadonlyArray<T>,\n start: number,\n deleteCount: number,\n ...items: T[]\n): T[] {\n const arr = array.slice(0, start);\n for (let i = 0; i < items.length; i++) {\n arr.push(items[i]);\n }\n for (let i = start + deleteCount; i < array.length; i++) {\n arr.push(array[i]);\n }\n return arr;\n}\n\nexport class InternalNodeImpl extends NodeImpl<Hash> {\n readonly level: number;\n\n constructor(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n ) {\n super(entries, hash, isMutable);\n this.level = level;\n }\n\n async set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl> {\n let i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // We are going to insert into last (right most) leaf.\n i--;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n\n const childNode = await oldChildNode.set(key, value, entrySize, tree);\n\n const childNodeSize = childNode.getChildNodeSize(tree);\n if (childNodeSize > tree.maxSize || childNodeSize < tree.minSize) {\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n const newEntry = createNewInternalEntryForNode(\n childNode,\n tree.getEntrySize,\n );\n return this.#replaceChild(tree, i, newEntry);\n }\n\n /**\n * This merges the child node entries with previous or next sibling and then\n * partitions the merged entries.\n */\n async #mergeAndPartition(\n tree: BTreeWrite,\n i: number,\n childNode: DataNodeImpl | InternalNodeImpl,\n ): Promise<InternalNodeImpl> {\n const level = this.level - 1;\n const thisEntries = this.entries;\n\n type IterableHashEntries = Iterable<Entry<Hash>>;\n\n let values: IterableHashEntries;\n let startIndex: number;\n let removeCount: number;\n if (i > 0) {\n const hash = thisEntries[i - 1][1];\n const previousSibling = await tree.getNode(hash);\n values = joinIterables(\n previousSibling.entries as IterableHashEntries,\n childNode.entries as IterableHashEntries,\n );\n startIndex = i - 1;\n removeCount = 2;\n } else if (i < thisEntries.length - 1) {\n const hash = thisEntries[i + 1][1];\n const nextSibling = await tree.getNode(hash);\n values = joinIterables(\n childNode.entries as IterableHashEntries,\n nextSibling.entries as IterableHashEntries,\n );\n startIndex = i;\n removeCount = 2;\n } else {\n values = childNode.entries as IterableHashEntries;\n startIndex = i;\n removeCount = 1;\n }\n\n const partitions = partition(\n values,\n value => value[2],\n tree.minSize - tree.chunkHeaderSize,\n tree.maxSize - tree.chunkHeaderSize,\n );\n\n // TODO: There are cases where we can reuse the old nodes. Creating new ones\n // means more memory churn but also more writes to the underlying KV store.\n const newEntries: Entry<Hash>[] = [];\n for (const entries of partitions) {\n const node = tree.newNodeImpl(entries, level);\n const newHashEntry = createNewInternalEntryForNode(\n node,\n tree.getEntrySize,\n );\n newEntries.push(newHashEntry);\n }\n\n if (this.isMutable) {\n this.entries.splice(startIndex, removeCount, ...newEntries);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(\n thisEntries,\n startIndex,\n removeCount,\n ...newEntries,\n );\n\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n #replaceChild(\n tree: BTreeWrite,\n index: number,\n newEntry: Entry<Hash>,\n ): InternalNodeImpl {\n if (this.isMutable) {\n this.entries.splice(index, 1, newEntry);\n this._updateNode(tree);\n return this;\n }\n const entries = readonlySplice(this.entries, index, 1, newEntry);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n async del(\n key: string,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // Key is larger than maxKey of rightmost entry so it is not present.\n return this;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n const oldHash = oldChildNode.hash;\n\n const childNode = await oldChildNode.del(key, tree);\n if (childNode.hash === oldHash) {\n // Not changed so not found.\n return this;\n }\n\n if (childNode.entries.length === 0) {\n // Subtree is now empty. Remove internal node.\n const entries = readonlySplice(this.entries, i, 1);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n if (i === 0 && this.entries.length === 1) {\n // There was only one node at this level and it was removed. We can return\n // the modified subtree.\n return childNode;\n }\n\n // The child node is still a good size.\n if (childNode.getChildNodeSize(tree) > tree.minSize) {\n // No merging needed.\n const entry = createNewInternalEntryForNode(childNode, tree.getEntrySize);\n return this.#replaceChild(tree, i, entry);\n }\n\n // Child node size is too small.\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n async *keys(tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.keys(tree);\n }\n }\n\n async *entriesIter(\n tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.entriesIter(tree);\n }\n }\n\n getChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<Array<InternalNodeImpl | DataNodeImpl>> {\n const ps: Promise<DataNodeImpl | InternalNodeImpl>[] = [];\n for (let i = start; i < length && i < this.entries.length; i++) {\n ps.push(tree.getNode(this.entries[i][1]));\n }\n return Promise.all(ps);\n }\n\n async getCompositeChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const {level} = this;\n\n if (length === 0) {\n return new InternalNodeImpl([], newRandomHash(), level - 1, true);\n }\n\n const output = await this.getChildren(start, start + length, tree);\n\n if (level > 1) {\n const entries: Entry<Hash>[] = [];\n for (const child of output as InternalNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new InternalNodeImpl(entries, newRandomHash(), level - 1, true);\n }\n\n assert(level === 1, 'Expected level to be 1');\n const entries: Entry<FrozenJSONValue>[] = [];\n for (const child of output as DataNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new DataNodeImpl(entries, newRandomHash(), true);\n }\n}\n\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl {\n if (level === 0) {\n return new DataNodeImpl(\n entries as Entry<FrozenJSONValue>[],\n hash,\n isMutable,\n );\n }\n return new InternalNodeImpl(entries as Entry<Hash>[], hash, level, isMutable);\n}\n\nexport function isDataNodeImpl(\n node: DataNodeImpl | InternalNodeImpl,\n): node is DataNodeImpl {\n return node.level === 0;\n}\n\nexport function partition<T>(\n values: Iterable<T>,\n // This is the size of each Entry\n getSizeOfEntry: (v: T) => number,\n min: number,\n max: number,\n): T[][] {\n const partitions: T[][] = [];\n const sizes: number[] = [];\n let sum = 0;\n let accum: T[] = [];\n for (const value of values) {\n const size = getSizeOfEntry(value);\n if (size >= max) {\n if (accum.length > 0) {\n partitions.push(accum);\n sizes.push(sum);\n }\n partitions.push([value]);\n sizes.push(size);\n sum = 0;\n accum = [];\n } else if (sum + size >= min) {\n accum.push(value);\n partitions.push(accum);\n sizes.push(sum + size);\n sum = 0;\n accum = [];\n } else {\n sum += size;\n accum.push(value);\n }\n }\n\n if (sum > 0) {\n if (sizes.length > 0 && sum + sizes[sizes.length - 1] <= max) {\n partitions[partitions.length - 1].push(...accum);\n } else {\n partitions.push(accum);\n }\n }\n\n return partitions;\n}\n\nexport const emptyDataNode = makeNodeChunkData<ReadonlyJSONValue>(\n 0,\n [],\n FormatVersion.Latest,\n);\nexport const emptyDataNodeImpl = new DataNodeImpl([], emptyHash, false);\n\nexport function createNewInternalEntryForNode(\n node: NodeImpl<unknown>,\n getSizeOfEntry: <K, V>(k: K, v: V) => number,\n): [string, Hash, number] {\n const key = node.maxKey();\n const value = node.hash;\n const size = getSizeOfEntry(key, value);\n return [key, value, size];\n}\n"],"names":["FormatVersion.V7","binarySearchWithFunc","skipBTreeNodeAsserts","entries","FormatVersion.Latest"],"mappings":";;;;;;;;;AAgCO,MAAM,aAAa;AACnB,MAAM,eAAe;AAYrB,SAAS,kBACd,OACA,SACA,eACa;AACb,SAAO,WAAW;AAAA,IAChB;AAAA,IACC,iBAAiBA,KACd,UACA,QAAQ,IAAI,CAAA,MAAK,EAAE,MAAM,GAAG,CAAC,CAAC;AAAA,EAAA,CACnC;AACH;AAsEA,eAAsB,SACpB,KACA,MACA,QACA,kBACuB;AACvB,QAAM,OAAO,MAAM,OAAO,QAAQ,IAAI;AAEtC,MAAI,qBAAqB,OAAO,UAAU;AACxC,WAAO,SAAS,KAAK,OAAO,UAAU,QAAQ,OAAO,QAAQ;AAAA,EAC/D;AACA,MAAI,eAAe,IAAI,GAAG;AACxB,WAAO;AAAA,EACT;AACA,QAAM,EAAC,YAAW;AAClB,MAAI,IAAI,aAAa,KAAK,OAAO;AACjC,MAAI,MAAM,QAAQ,QAAQ;AACxB;AAAA,EACF;AACA,QAAM,QAAQ,QAAQ,CAAC;AACvB,SAAO,SAAS,KAAK,MAAM,CAAC,GAAG,QAAQ,gBAAgB;AACzD;AAYO,SAAS,aACd,KACA,SACQ;AACR,SAAOC;AAAAA,IAAqB,QAAQ;AAAA,IAAQ,OAC1C,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC,CAAC;AAAA,EAAA;AAElC;AAEO,SAAS,kBACd,GACA,SACA,KACS;AACT,SAAO,MAAM,QAAQ,UAAU,QAAQ,CAAC,EAAE,CAAC,MAAM;AACnD;AAEO,SAAS,eACd,GACA,eACA,gBACyB;AACzB,MAAIC,UAAwB,iBAAiBF,IAAkB;AAC7D,WAAO;AAAA,EACT;AAEA,cAAY,CAAC;AACb,mBAAiB,CAAC;AAElB,SAAO,EAAE,UAAU,GAAG,iDAAiD;AACvE,QAAM,CAAC,OAAO,OAAO,IAAI;AACzB,eAAa,KAAK;AAClB,cAAY,OAAO;AAEnB,QAAM,IAAI,QAAQ,IAAI,eAAe;AAGrC,MAAI,iBAAiBA,IAAkB;AACrC,eAAW,KAAK,SAAS;AACvB,kBAAY,GAAG,CAAC;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,QAAQ,IAAI,CAAA,MAAK,kBAAkB,GAAG,GAAG,cAAc,CAAC;AAC3E,SAAO,CAAC,OAAO,UAAU;AAC3B;AAEA,SAAS,YACP,OACA,GAG0C;AAC1C,cAAY,KAAK;AAEjB,SAAO,MAAM,UAAU,GAAG,kDAAkD;AAC5E,eAAa,MAAM,CAAC,CAAC;AACrB,IAAE,MAAM,CAAC,CAAC;AACV,eAAa,MAAM,CAAC,CAAC;AACvB;AAMA,SAAS,kBACP,OACA,GAGA,gBACyB;AACzB,cAAY,KAAK;AACjB,SAAO,MAAM,UAAU,GAAG,kDAAkD;AAC5E,eAAa,MAAM,CAAC,CAAC;AACrB,IAAE,MAAM,CAAC,CAAC;AACV,QAAM,YAAY,eAAe,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC;AACnD,SAAO,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,GAAG,SAAS;AACvC;AAMA,MAAe,SAAgB;AAAA,EAC7B;AAAA,EACA;AAAA,EAES;AAAA,EAET,iBAAiB;AAAA,EAEjB,YAAY,SAA8B,MAAY,WAAoB;AACxE,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAcA,SAAiB;AACf,WAAO,KAAK,QAAQ,KAAK,QAAQ,SAAS,CAAC,EAAE,CAAC;AAAA,EAChD;AAAA,EAEA,iBAAiB,MAAyB;AACxC,QAAI,KAAK,mBAAmB,IAAI;AAC9B,aAAO,KAAK;AAAA,IACd;AAEA,QAAI,MAAM,KAAK;AACf,eAAW,SAAS,KAAK,SAAS;AAChC,aAAO,MAAM,CAAC;AAAA,IAChB;AACA,WAAQ,KAAK,iBAAiB;AAAA,EAChC;AAAA,EAEU,YAAY,MAAkB;AACtC,SAAK,iBAAiB;AACtB,SAAK;AAAA,MACH;AAAA,IAAA;AAAA,EAEJ;AACF;AAEO,SAAS,YACd,MACA,eACa;AACb,SAAO,kBAAkB,KAAK,OAAO,KAAK,SAAS,aAAa;AAClE;AAEO,MAAM,qBAAqB,SAA0B;AAAA,EACjD,QAAQ;AAAA,EAEjB,IACE,KACA,OACA,WACA,MACuB;AACvB,QAAI;AACJ,UAAM,IAAI,aAAa,KAAK,KAAK,OAAO;AACxC,QAAI,CAAC,kBAAkB,GAAG,KAAK,SAAS,GAAG,GAAG;AAE5C,oBAAc;AAAA,IAChB,OAAO;AACL,oBAAc;AAAA,IAChB;AAEA,WAAO,QAAQ;AAAA,MACb,KAAK,QAAQ,MAAM,GAAG,aAAa,CAAC,KAAK,OAAO,SAAS,CAAC;AAAA,IAAA;AAAA,EAE9D;AAAA,EAEA,QACE,MACA,OACA,gBACG,OACW;AACd,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,OAAO,aAAa,GAAG,KAAK;AAChD,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,eAAe,KAAK,SAAS,OAAO,aAAa,GAAG,KAAK;AACzE,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,IAAI,KAAa,MAAyC;AACxD,UAAM,IAAI,aAAa,KAAK,KAAK,OAAO;AACxC,QAAI,CAAC,kBAAkB,GAAG,KAAK,SAAS,GAAG,GAAG;AAE5C,aAAO,QAAQ,QAAQ,IAAI;AAAA,IAC7B;AAGA,WAAO,QAAQ,QAAQ,KAAK,QAAQ,MAAM,GAAG,CAAC,CAAC;AAAA,EACjD;AAAA,EAEA,OAAO,KAAK,OAAgD;AAC1D,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,MAAM,CAAC;AAAA,IACf;AAAA,EACF;AAAA,EAEA,OAAO,YACL,OAC8C;AAC9C,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,SAAS,eACP,OACA,OACA,gBACG,OACE;AACL,QAAM,MAAM,MAAM,MAAM,GAAG,KAAK;AAChC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,KAAK,MAAM,CAAC,CAAC;AAAA,EACnB;AACA,WAAS,IAAI,QAAQ,aAAa,IAAI,MAAM,QAAQ,KAAK;AACvD,QAAI,KAAK,MAAM,CAAC,CAAC;AAAA,EACnB;AACA,SAAO;AACT;AAEO,MAAM,yBAAyB,SAAe;AAAA,EAC1C;AAAA,EAET,YACE,SACA,MACA,OACA,WACA;AACA,UAAM,SAAS,MAAM,SAAS;AAC9B,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,MAAM,IACJ,KACA,OACA,WACA,MAC2B;AAC3B,QAAI,IAAI,aAAa,KAAK,KAAK,OAAO;AACtC,QAAI,MAAM,KAAK,QAAQ,QAAQ;AAE7B;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC;AACnC,UAAM,eAAe,MAAM,KAAK,QAAQ,SAAS;AAEjD,UAAM,YAAY,MAAM,aAAa,IAAI,KAAK,OAAO,WAAW,IAAI;AAEpE,UAAM,gBAAgB,UAAU,iBAAiB,IAAI;AACrD,QAAI,gBAAgB,KAAK,WAAW,gBAAgB,KAAK,SAAS;AAChE,aAAO,KAAK,mBAAmB,MAAM,GAAG,SAAS;AAAA,IACnD;AAEA,UAAM,WAAW;AAAA,MACf;AAAA,MACA,KAAK;AAAA,IAAA;AAEP,WAAO,KAAK,cAAc,MAAM,GAAG,QAAQ;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,mBACJ,MACA,GACA,WAC2B;AAC3B,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,cAAc,KAAK;AAIzB,QAAI;AACJ,QAAI;AACJ,QAAI;AACJ,QAAI,IAAI,GAAG;AACT,YAAM,OAAO,YAAY,IAAI,CAAC,EAAE,CAAC;AACjC,YAAM,kBAAkB,MAAM,KAAK,QAAQ,IAAI;AAC/C,eAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,UAAU;AAAA,MAAA;AAEZ,mBAAa,IAAI;AACjB,oBAAc;AAAA,IAChB,WAAW,IAAI,YAAY,SAAS,GAAG;AACrC,YAAM,OAAO,YAAY,IAAI,CAAC,EAAE,CAAC;AACjC,YAAM,cAAc,MAAM,KAAK,QAAQ,IAAI;AAC3C,eAAS;AAAA,QACP,UAAU;AAAA,QACV,YAAY;AAAA,MAAA;AAEd,mBAAa;AACb,oBAAc;AAAA,IAChB,OAAO;AACL,eAAS,UAAU;AACnB,mBAAa;AACb,oBAAc;AAAA,IAChB;AAEA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA,CAAA,UAAS,MAAM,CAAC;AAAA,MAChB,KAAK,UAAU,KAAK;AAAA,MACpB,KAAK,UAAU,KAAK;AAAA,IAAA;AAKtB,UAAM,aAA4B,CAAA;AAClC,eAAWG,YAAW,YAAY;AAChC,YAAM,OAAO,KAAK,YAAYA,UAAS,KAAK;AAC5C,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,KAAK;AAAA,MAAA;AAEP,iBAAW,KAAK,YAAY;AAAA,IAC9B;AAEA,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,YAAY,aAAa,GAAG,UAAU;AAC1D,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IAAA;AAGL,WAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,EACrD;AAAA,EAEA,cACE,MACA,OACA,UACkB;AAClB,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,OAAO,GAAG,QAAQ;AACtC,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AACA,UAAM,UAAU,eAAe,KAAK,SAAS,OAAO,GAAG,QAAQ;AAC/D,WAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,EACrD;AAAA,EAEA,MAAM,IACJ,KACA,MAC0C;AAC1C,UAAM,IAAI,aAAa,KAAK,KAAK,OAAO;AACxC,QAAI,MAAM,KAAK,QAAQ,QAAQ;AAE7B,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC;AACnC,UAAM,eAAe,MAAM,KAAK,QAAQ,SAAS;AACjD,UAAM,UAAU,aAAa;AAE7B,UAAM,YAAY,MAAM,aAAa,IAAI,KAAK,IAAI;AAClD,QAAI,UAAU,SAAS,SAAS;AAE9B,aAAO;AAAA,IACT;AAEA,QAAI,UAAU,QAAQ,WAAW,GAAG;AAElC,YAAM,UAAU,eAAe,KAAK,SAAS,GAAG,CAAC;AACjD,aAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,IACrD;AAEA,QAAI,MAAM,KAAK,KAAK,QAAQ,WAAW,GAAG;AAGxC,aAAO;AAAA,IACT;AAGA,QAAI,UAAU,iBAAiB,IAAI,IAAI,KAAK,SAAS;AAEnD,YAAM,QAAQ,8BAA8B,WAAW,KAAK,YAAY;AACxE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK;AAAA,IAC1C;AAGA,WAAO,KAAK,mBAAmB,MAAM,GAAG,SAAS;AAAA,EACnD;AAAA,EAEA,OAAO,KAAK,MAA+C;AACzD,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,YAAY,MAAM,KAAK,QAAQ,MAAM,CAAC,CAAC;AAC7C,aAAO,UAAU,KAAK,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,OAAO,YACL,MAC8C;AAC9C,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,YAAY,MAAM,KAAK,QAAQ,MAAM,CAAC,CAAC;AAC7C,aAAO,UAAU,YAAY,IAAI;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,YACE,OACA,QACA,MACiD;AACjD,UAAM,KAAiD,CAAA;AACvD,aAAS,IAAI,OAAO,IAAI,UAAU,IAAI,KAAK,QAAQ,QAAQ,KAAK;AAC9D,SAAG,KAAK,KAAK,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;AAAA,IAC1C;AACA,WAAO,QAAQ,IAAI,EAAE;AAAA,EACvB;AAAA,EAEA,MAAM,qBACJ,OACA,QACA,MAC0C;AAC1C,UAAM,EAAC,UAAS;AAEhB,QAAI,WAAW,GAAG;AAChB,aAAO,IAAI,iBAAiB,IAAI,iBAAiB,QAAQ,GAAG,IAAI;AAAA,IAClE;AAEA,UAAM,SAAS,MAAM,KAAK,YAAY,OAAO,QAAQ,QAAQ,IAAI;AAEjE,QAAI,QAAQ,GAAG;AACb,YAAMA,WAAyB,CAAA;AAC/B,iBAAW,SAAS,QAA8B;AAChDA,iBAAQ,KAAK,GAAG,MAAM,OAAO;AAAA,MAC/B;AACA,aAAO,IAAI,iBAAiBA,UAAS,iBAAiB,QAAQ,GAAG,IAAI;AAAA,IACvE;AAEA,WAAO,UAAU,GAAG,wBAAwB;AAC5C,UAAM,UAAoC,CAAA;AAC1C,eAAW,SAAS,QAA0B;AAC5C,cAAQ,KAAK,GAAG,MAAM,OAAO;AAAA,IAC/B;AACA,WAAO,IAAI,aAAa,SAAS,cAAA,GAAiB,IAAI;AAAA,EACxD;AACF;AAoBO,SAAS,YACd,SACA,MACA,OACA,WACiC;AACjC,MAAI,UAAU,GAAG;AACf,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AACA,SAAO,IAAI,iBAAiB,SAA0B,MAAM,OAAO,SAAS;AAC9E;AAEO,SAAS,eACd,MACsB;AACtB,SAAO,KAAK,UAAU;AACxB;AAEO,SAAS,UACd,QAEA,gBACA,KACA,KACO;AACP,QAAM,aAAoB,CAAA;AAC1B,QAAM,QAAkB,CAAA;AACxB,MAAI,MAAM;AACV,MAAI,QAAa,CAAA;AACjB,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAO,eAAe,KAAK;AACjC,QAAI,QAAQ,KAAK;AACf,UAAI,MAAM,SAAS,GAAG;AACpB,mBAAW,KAAK,KAAK;AACrB,cAAM,KAAK,GAAG;AAAA,MAChB;AACA,iBAAW,KAAK,CAAC,KAAK,CAAC;AACvB,YAAM,KAAK,IAAI;AACf,YAAM;AACN,cAAQ,CAAA;AAAA,IACV,WAAW,MAAM,QAAQ,KAAK;AAC5B,YAAM,KAAK,KAAK;AAChB,iBAAW,KAAK,KAAK;AACrB,YAAM,KAAK,MAAM,IAAI;AACrB,YAAM;AACN,cAAQ,CAAA;AAAA,IACV,OAAO;AACL,aAAO;AACP,YAAM,KAAK,KAAK;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,MAAM,GAAG;AACX,QAAI,MAAM,SAAS,KAAK,MAAM,MAAM,MAAM,SAAS,CAAC,KAAK,KAAK;AAC5D,iBAAW,WAAW,SAAS,CAAC,EAAE,KAAK,GAAG,KAAK;AAAA,IACjD,OAAO;AACL,iBAAW,KAAK,KAAK;AAAA,IACvB;AAAA,EACF;AAEA,SAAO;AACT;AAEO,MAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA,CAAA;AAAA,EACAC;AACF;AACO,MAAM,oBAAoB,IAAI,aAAa,CAAA,GAAI,WAAW,KAAK;AAE/D,SAAS,8BACd,MACA,gBACwB;AACxB,QAAM,MAAM,KAAK,OAAA;AACjB,QAAM,QAAQ,KAAK;AACnB,QAAM,OAAO,eAAe,KAAK,KAAK;AACtC,SAAO,CAAC,KAAK,OAAO,IAAI;AAC1B;"}
@@ -32,12 +32,12 @@ class BTreeWrite extends BTreeRead {
32
32
  this.maxSize = maxSize;
33
33
  }
34
34
  #addToModified(node) {
35
- assert(node.isMutable);
35
+ assert(node.isMutable, "Expected node to be mutable");
36
36
  this.#modified.set(node.hash, node);
37
37
  this._cache.set(node.hash, node);
38
38
  }
39
39
  updateNode(node) {
40
- assert(node.isMutable);
40
+ assert(node.isMutable, "Expected node to be mutable");
41
41
  this.#modified.delete(node.hash);
42
42
  node.hash = newRandomHash();
43
43
  this.#addToModified(node);
@@ -1 +1 @@
1
- {"version":3,"file":"write.js","sources":["../../../../../replicache/src/btree/write.ts"],"sourcesContent":["import {Lock} from '@rocicorp/lock';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {getSizeOfEntry} from '../../../shared/src/size-of-value.ts';\nimport {type Chunk, type CreateChunk, toRefs} from '../dag/chunk.ts';\nimport type {Write} from '../dag/store.ts';\nimport type * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport {\n DataNodeImpl,\n type Entry,\n InternalNodeImpl,\n createNewInternalEntryForNode,\n emptyDataNode,\n isDataNodeImpl,\n newNodeImpl,\n partition,\n toChunkData,\n} from './node.ts';\nimport {BTreeRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class BTreeWrite extends BTreeRead {\n /**\n * This rw lock is used to ensure we do not mutate the btree in parallel. It\n * would be a problem if we didn't have the lock in cases like this:\n *\n * ```ts\n * const p1 = tree.put('a', 0);\n * const p2 = tree.put('b', 1);\n * await p1;\n * await p2;\n * ```\n *\n * because both `p1` and `p2` would start from the old root hash but a put\n * changes the root hash so the two concurrent puts would lead to only one of\n * them actually working, and it is not deterministic which one would finish\n * last.\n */\n readonly #lock = new Lock();\n readonly #modified: Map<Hash, DataNodeImpl | InternalNodeImpl> = new Map();\n\n declare protected _dagRead: Write;\n\n readonly minSize: number;\n readonly maxSize: number;\n\n constructor(\n dagWrite: Write,\n formatVersion: FormatVersion,\n root: Hash = emptyHash,\n minSize = 8 * 1024,\n maxSize = 16 * 1024,\n getEntrySize: <K, V>(k: K, v: V) => number = getSizeOfEntry,\n chunkHeaderSize?: number,\n ) {\n super(dagWrite, formatVersion, root, getEntrySize, chunkHeaderSize);\n\n this.minSize = minSize;\n this.maxSize = maxSize;\n }\n\n #addToModified(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable);\n this.#modified.set(node.hash, node);\n this._cache.set(node.hash, node);\n }\n\n updateNode(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable);\n this.#modified.delete(node.hash);\n node.hash = newRandomHash();\n this.#addToModified(node);\n }\n\n newInternalNodeImpl(\n entries: Array<Entry<Hash>>,\n level: number,\n ): InternalNodeImpl {\n const n = new InternalNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n newDataNodeImpl(entries: Entry<FrozenJSONValue>[]): DataNodeImpl {\n const n = new DataNodeImpl(entries, newRandomHash(), true);\n this.#addToModified(n);\n return n;\n }\n\n newNodeImpl(entries: Entry<FrozenJSONValue>[], level: number): DataNodeImpl;\n newNodeImpl(entries: Entry<Hash>[], level: number): InternalNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl {\n const n = newNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n put(key: string, value: FrozenJSONValue): Promise<void> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const entrySize = this.getEntrySize(key, value);\n const rootNode = await oldRootNode.set(key, value, entrySize, this);\n\n // We do the rebalancing in the parent so we need to do it here as well.\n if (rootNode.getChildNodeSize(this) > this.maxSize) {\n const headerSize = this.chunkHeaderSize;\n const partitions = partition(\n rootNode.entries,\n value => value[2],\n this.minSize - headerSize,\n this.maxSize - headerSize,\n );\n const {level} = rootNode;\n const entries: Entry<Hash>[] = partitions.map(entries => {\n const node = this.newNodeImpl(entries, level);\n return createNewInternalEntryForNode(node, this.getEntrySize);\n });\n const newRoot = this.newInternalNodeImpl(entries, level + 1);\n this.rootHash = newRoot.hash;\n return;\n }\n\n this.rootHash = rootNode.hash;\n });\n }\n\n del(key: string): Promise<boolean> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const newRootNode = await oldRootNode.del(key, this);\n\n // No need to rebalance here since if root gets too small there is nothing\n // we can do about that.\n const found = this.rootHash !== newRootNode.hash;\n if (found) {\n // Flatten one layer.\n if (newRootNode.level > 0 && newRootNode.entries.length === 1) {\n this.rootHash = (newRootNode as InternalNodeImpl).entries[0][1];\n } else {\n this.rootHash = newRootNode.hash;\n }\n }\n\n return found;\n });\n }\n\n clear(): Promise<void> {\n return this.#lock.withLock(() => {\n this.#modified.clear();\n this.rootHash = emptyHash;\n });\n }\n\n flush(): Promise<Hash> {\n return this.#lock.withLock(async () => {\n const dagWrite = this._dagRead;\n\n if (this.rootHash === emptyHash) {\n // Write a chunk for the empty tree.\n const chunk = dagWrite.createChunk(emptyDataNode, []);\n await dagWrite.putChunk(chunk as Chunk<ReadonlyJSONValue>);\n return chunk.hash;\n }\n\n const newChunks: Chunk[] = [];\n const newRoot = gatherNewChunks(\n this.rootHash,\n newChunks,\n dagWrite.createChunk,\n this.#modified,\n this._formatVersion,\n );\n await Promise.all(newChunks.map(chunk => dagWrite.putChunk(chunk)));\n this.#modified.clear();\n this.rootHash = newRoot;\n return newRoot;\n });\n }\n}\n\nfunction gatherNewChunks(\n hash: Hash,\n newChunks: Chunk[],\n createChunk: CreateChunk,\n modified: Map<Hash, DataNodeImpl | InternalNodeImpl>,\n formatVersion: FormatVersion,\n): Hash {\n const node = modified.get(hash);\n if (node === undefined) {\n // Not modified, use the original.\n return hash;\n }\n\n if (isDataNodeImpl(node)) {\n const chunk = createChunk(toChunkData(node, formatVersion), []);\n newChunks.push(chunk);\n return chunk.hash;\n }\n\n // The BTree cannot have duplicate keys so the child entry hashes are unique.\n // No need fot a set to dedupe here.\n const refs: Hash[] = [];\n const {entries} = node;\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n const childHash = entry[1];\n const newChildHash = gatherNewChunks(\n childHash,\n newChunks,\n createChunk,\n modified,\n formatVersion,\n );\n if (newChildHash !== childHash) {\n // MUTATES the entries!\n // Hashes do not change the size of the entry because all hashes have the same length\n entries[i] = [entry[0], newChildHash, entry[2]];\n }\n refs.push(newChildHash);\n }\n const chunk = createChunk(toChunkData(node, formatVersion), toRefs(refs));\n newChunks.push(chunk);\n return chunk.hash;\n}\n"],"names":["value","entries","chunk"],"mappings":";;;;;;;AAyBO,MAAM,mBAAmB,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiB/B,QAAQ,IAAI,KAAA;AAAA,EACZ,gCAA4D,IAAA;AAAA,EAI5D;AAAA,EACA;AAAA,EAET,YACE,UACA,eACA,OAAa,WACb,UAAU,IAAI,MACd,UAAU,KAAK,MACf,eAA6C,gBAC7C,iBACA;AACA,UAAM,UAAU,eAAe,MAAM,cAAc,eAAe;AAElE,SAAK,UAAU;AACf,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,eAAe,MAA6C;AAC1D,WAAO,KAAK,SAAS;AACrB,SAAK,UAAU,IAAI,KAAK,MAAM,IAAI;AAClC,SAAK,OAAO,IAAI,KAAK,MAAM,IAAI;AAAA,EACjC;AAAA,EAEA,WAAW,MAA6C;AACtD,WAAO,KAAK,SAAS;AACrB,SAAK,UAAU,OAAO,KAAK,IAAI;AAC/B,SAAK,OAAO,cAAA;AACZ,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,oBACE,SACA,OACkB;AAClB,UAAM,IAAI,IAAI,iBAAiB,SAAS,cAAA,GAAiB,OAAO,IAAI;AACpE,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,gBAAgB,SAAiD;AAC/D,UAAM,IAAI,IAAI,aAAa,SAAS,cAAA,GAAiB,IAAI;AACzD,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAQA,YACE,SACA,OACiC;AACjC,UAAM,IAAI,YAAY,SAAS,cAAA,GAAiB,OAAO,IAAI;AAC3D,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,KAAa,OAAuC;AACtD,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,cAAc,MAAM,KAAK,QAAQ,KAAK,QAAQ;AACpD,YAAM,YAAY,KAAK,aAAa,KAAK,KAAK;AAC9C,YAAM,WAAW,MAAM,YAAY,IAAI,KAAK,OAAO,WAAW,IAAI;AAGlE,UAAI,SAAS,iBAAiB,IAAI,IAAI,KAAK,SAAS;AAClD,cAAM,aAAa,KAAK;AACxB,cAAM,aAAa;AAAA,UACjB,SAAS;AAAA,UACT,CAAAA,WAASA,OAAM,CAAC;AAAA,UAChB,KAAK,UAAU;AAAA,UACf,KAAK,UAAU;AAAA,QAAA;AAEjB,cAAM,EAAC,UAAS;AAChB,cAAM,UAAyB,WAAW,IAAI,CAAAC,aAAW;AACvD,gBAAM,OAAO,KAAK,YAAYA,UAAS,KAAK;AAC5C,iBAAO,8BAA8B,MAAM,KAAK,YAAY;AAAA,QAC9D,CAAC;AACD,cAAM,UAAU,KAAK,oBAAoB,SAAS,QAAQ,CAAC;AAC3D,aAAK,WAAW,QAAQ;AACxB;AAAA,MACF;AAEA,WAAK,WAAW,SAAS;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,KAA+B;AACjC,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,cAAc,MAAM,KAAK,QAAQ,KAAK,QAAQ;AACpD,YAAM,cAAc,MAAM,YAAY,IAAI,KAAK,IAAI;AAInD,YAAM,QAAQ,KAAK,aAAa,YAAY;AAC5C,UAAI,OAAO;AAET,YAAI,YAAY,QAAQ,KAAK,YAAY,QAAQ,WAAW,GAAG;AAC7D,eAAK,WAAY,YAAiC,QAAQ,CAAC,EAAE,CAAC;AAAA,QAChE,OAAO;AACL,eAAK,WAAW,YAAY;AAAA,QAC9B;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,MAAM,SAAS,MAAM;AAC/B,WAAK,UAAU,MAAA;AACf,WAAK,WAAW;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,WAAW,KAAK;AAEtB,UAAI,KAAK,aAAa,WAAW;AAE/B,cAAM,QAAQ,SAAS,YAAY,eAAe,CAAA,CAAE;AACpD,cAAM,SAAS,SAAS,KAAiC;AACzD,eAAO,MAAM;AAAA,MACf;AAEA,YAAM,YAAqB,CAAA;AAC3B,YAAM,UAAU;AAAA,QACd,KAAK;AAAA,QACL;AAAA,QACA,SAAS;AAAA,QACT,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAEP,YAAM,QAAQ,IAAI,UAAU,IAAI,WAAS,SAAS,SAAS,KAAK,CAAC,CAAC;AAClE,WAAK,UAAU,MAAA;AACf,WAAK,WAAW;AAChB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEA,SAAS,gBACP,MACA,WACA,aACA,UACA,eACM;AACN,QAAM,OAAO,SAAS,IAAI,IAAI;AAC9B,MAAI,SAAS,QAAW;AAEtB,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,IAAI,GAAG;AACxB,UAAMC,SAAQ,YAAY,YAAY,MAAM,aAAa,GAAG,EAAE;AAC9D,cAAU,KAAKA,MAAK;AACpB,WAAOA,OAAM;AAAA,EACf;AAIA,QAAM,OAAe,CAAA;AACrB,QAAM,EAAC,YAAW;AAClB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,QAAQ,QAAQ,CAAC;AACvB,UAAM,YAAY,MAAM,CAAC;AACzB,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,QAAI,iBAAiB,WAAW;AAG9B,cAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,cAAc,MAAM,CAAC,CAAC;AAAA,IAChD;AACA,SAAK,KAAK,YAAY;AAAA,EACxB;AACA,QAAM,QAAQ,YAAY,YAAY,MAAM,aAAa,GAAG,OAAO,IAAI,CAAC;AACxE,YAAU,KAAK,KAAK;AACpB,SAAO,MAAM;AACf;"}
1
+ {"version":3,"file":"write.js","sources":["../../../../../replicache/src/btree/write.ts"],"sourcesContent":["import {Lock} from '@rocicorp/lock';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {getSizeOfEntry} from '../../../shared/src/size-of-value.ts';\nimport {type Chunk, type CreateChunk, toRefs} from '../dag/chunk.ts';\nimport type {Write} from '../dag/store.ts';\nimport type * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport {\n DataNodeImpl,\n type Entry,\n InternalNodeImpl,\n createNewInternalEntryForNode,\n emptyDataNode,\n isDataNodeImpl,\n newNodeImpl,\n partition,\n toChunkData,\n} from './node.ts';\nimport {BTreeRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class BTreeWrite extends BTreeRead {\n /**\n * This rw lock is used to ensure we do not mutate the btree in parallel. It\n * would be a problem if we didn't have the lock in cases like this:\n *\n * ```ts\n * const p1 = tree.put('a', 0);\n * const p2 = tree.put('b', 1);\n * await p1;\n * await p2;\n * ```\n *\n * because both `p1` and `p2` would start from the old root hash but a put\n * changes the root hash so the two concurrent puts would lead to only one of\n * them actually working, and it is not deterministic which one would finish\n * last.\n */\n readonly #lock = new Lock();\n readonly #modified: Map<Hash, DataNodeImpl | InternalNodeImpl> = new Map();\n\n declare protected _dagRead: Write;\n\n readonly minSize: number;\n readonly maxSize: number;\n\n constructor(\n dagWrite: Write,\n formatVersion: FormatVersion,\n root: Hash = emptyHash,\n minSize = 8 * 1024,\n maxSize = 16 * 1024,\n getEntrySize: <K, V>(k: K, v: V) => number = getSizeOfEntry,\n chunkHeaderSize?: number,\n ) {\n super(dagWrite, formatVersion, root, getEntrySize, chunkHeaderSize);\n\n this.minSize = minSize;\n this.maxSize = maxSize;\n }\n\n #addToModified(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable, 'Expected node to be mutable');\n this.#modified.set(node.hash, node);\n this._cache.set(node.hash, node);\n }\n\n updateNode(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable, 'Expected node to be mutable');\n this.#modified.delete(node.hash);\n node.hash = newRandomHash();\n this.#addToModified(node);\n }\n\n newInternalNodeImpl(\n entries: Array<Entry<Hash>>,\n level: number,\n ): InternalNodeImpl {\n const n = new InternalNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n newDataNodeImpl(entries: Entry<FrozenJSONValue>[]): DataNodeImpl {\n const n = new DataNodeImpl(entries, newRandomHash(), true);\n this.#addToModified(n);\n return n;\n }\n\n newNodeImpl(entries: Entry<FrozenJSONValue>[], level: number): DataNodeImpl;\n newNodeImpl(entries: Entry<Hash>[], level: number): InternalNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl {\n const n = newNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n put(key: string, value: FrozenJSONValue): Promise<void> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const entrySize = this.getEntrySize(key, value);\n const rootNode = await oldRootNode.set(key, value, entrySize, this);\n\n // We do the rebalancing in the parent so we need to do it here as well.\n if (rootNode.getChildNodeSize(this) > this.maxSize) {\n const headerSize = this.chunkHeaderSize;\n const partitions = partition(\n rootNode.entries,\n value => value[2],\n this.minSize - headerSize,\n this.maxSize - headerSize,\n );\n const {level} = rootNode;\n const entries: Entry<Hash>[] = partitions.map(entries => {\n const node = this.newNodeImpl(entries, level);\n return createNewInternalEntryForNode(node, this.getEntrySize);\n });\n const newRoot = this.newInternalNodeImpl(entries, level + 1);\n this.rootHash = newRoot.hash;\n return;\n }\n\n this.rootHash = rootNode.hash;\n });\n }\n\n del(key: string): Promise<boolean> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const newRootNode = await oldRootNode.del(key, this);\n\n // No need to rebalance here since if root gets too small there is nothing\n // we can do about that.\n const found = this.rootHash !== newRootNode.hash;\n if (found) {\n // Flatten one layer.\n if (newRootNode.level > 0 && newRootNode.entries.length === 1) {\n this.rootHash = (newRootNode as InternalNodeImpl).entries[0][1];\n } else {\n this.rootHash = newRootNode.hash;\n }\n }\n\n return found;\n });\n }\n\n clear(): Promise<void> {\n return this.#lock.withLock(() => {\n this.#modified.clear();\n this.rootHash = emptyHash;\n });\n }\n\n flush(): Promise<Hash> {\n return this.#lock.withLock(async () => {\n const dagWrite = this._dagRead;\n\n if (this.rootHash === emptyHash) {\n // Write a chunk for the empty tree.\n const chunk = dagWrite.createChunk(emptyDataNode, []);\n await dagWrite.putChunk(chunk as Chunk<ReadonlyJSONValue>);\n return chunk.hash;\n }\n\n const newChunks: Chunk[] = [];\n const newRoot = gatherNewChunks(\n this.rootHash,\n newChunks,\n dagWrite.createChunk,\n this.#modified,\n this._formatVersion,\n );\n await Promise.all(newChunks.map(chunk => dagWrite.putChunk(chunk)));\n this.#modified.clear();\n this.rootHash = newRoot;\n return newRoot;\n });\n }\n}\n\nfunction gatherNewChunks(\n hash: Hash,\n newChunks: Chunk[],\n createChunk: CreateChunk,\n modified: Map<Hash, DataNodeImpl | InternalNodeImpl>,\n formatVersion: FormatVersion,\n): Hash {\n const node = modified.get(hash);\n if (node === undefined) {\n // Not modified, use the original.\n return hash;\n }\n\n if (isDataNodeImpl(node)) {\n const chunk = createChunk(toChunkData(node, formatVersion), []);\n newChunks.push(chunk);\n return chunk.hash;\n }\n\n // The BTree cannot have duplicate keys so the child entry hashes are unique.\n // No need fot a set to dedupe here.\n const refs: Hash[] = [];\n const {entries} = node;\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n const childHash = entry[1];\n const newChildHash = gatherNewChunks(\n childHash,\n newChunks,\n createChunk,\n modified,\n formatVersion,\n );\n if (newChildHash !== childHash) {\n // MUTATES the entries!\n // Hashes do not change the size of the entry because all hashes have the same length\n entries[i] = [entry[0], newChildHash, entry[2]];\n }\n refs.push(newChildHash);\n }\n const chunk = createChunk(toChunkData(node, formatVersion), toRefs(refs));\n newChunks.push(chunk);\n return chunk.hash;\n}\n"],"names":["value","entries","chunk"],"mappings":";;;;;;;AAyBO,MAAM,mBAAmB,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiB/B,QAAQ,IAAI,KAAA;AAAA,EACZ,gCAA4D,IAAA;AAAA,EAI5D;AAAA,EACA;AAAA,EAET,YACE,UACA,eACA,OAAa,WACb,UAAU,IAAI,MACd,UAAU,KAAK,MACf,eAA6C,gBAC7C,iBACA;AACA,UAAM,UAAU,eAAe,MAAM,cAAc,eAAe;AAElE,SAAK,UAAU;AACf,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,eAAe,MAA6C;AAC1D,WAAO,KAAK,WAAW,6BAA6B;AACpD,SAAK,UAAU,IAAI,KAAK,MAAM,IAAI;AAClC,SAAK,OAAO,IAAI,KAAK,MAAM,IAAI;AAAA,EACjC;AAAA,EAEA,WAAW,MAA6C;AACtD,WAAO,KAAK,WAAW,6BAA6B;AACpD,SAAK,UAAU,OAAO,KAAK,IAAI;AAC/B,SAAK,OAAO,cAAA;AACZ,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,oBACE,SACA,OACkB;AAClB,UAAM,IAAI,IAAI,iBAAiB,SAAS,cAAA,GAAiB,OAAO,IAAI;AACpE,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,gBAAgB,SAAiD;AAC/D,UAAM,IAAI,IAAI,aAAa,SAAS,cAAA,GAAiB,IAAI;AACzD,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAQA,YACE,SACA,OACiC;AACjC,UAAM,IAAI,YAAY,SAAS,cAAA,GAAiB,OAAO,IAAI;AAC3D,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,KAAa,OAAuC;AACtD,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,cAAc,MAAM,KAAK,QAAQ,KAAK,QAAQ;AACpD,YAAM,YAAY,KAAK,aAAa,KAAK,KAAK;AAC9C,YAAM,WAAW,MAAM,YAAY,IAAI,KAAK,OAAO,WAAW,IAAI;AAGlE,UAAI,SAAS,iBAAiB,IAAI,IAAI,KAAK,SAAS;AAClD,cAAM,aAAa,KAAK;AACxB,cAAM,aAAa;AAAA,UACjB,SAAS;AAAA,UACT,CAAAA,WAASA,OAAM,CAAC;AAAA,UAChB,KAAK,UAAU;AAAA,UACf,KAAK,UAAU;AAAA,QAAA;AAEjB,cAAM,EAAC,UAAS;AAChB,cAAM,UAAyB,WAAW,IAAI,CAAAC,aAAW;AACvD,gBAAM,OAAO,KAAK,YAAYA,UAAS,KAAK;AAC5C,iBAAO,8BAA8B,MAAM,KAAK,YAAY;AAAA,QAC9D,CAAC;AACD,cAAM,UAAU,KAAK,oBAAoB,SAAS,QAAQ,CAAC;AAC3D,aAAK,WAAW,QAAQ;AACxB;AAAA,MACF;AAEA,WAAK,WAAW,SAAS;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,KAA+B;AACjC,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,cAAc,MAAM,KAAK,QAAQ,KAAK,QAAQ;AACpD,YAAM,cAAc,MAAM,YAAY,IAAI,KAAK,IAAI;AAInD,YAAM,QAAQ,KAAK,aAAa,YAAY;AAC5C,UAAI,OAAO;AAET,YAAI,YAAY,QAAQ,KAAK,YAAY,QAAQ,WAAW,GAAG;AAC7D,eAAK,WAAY,YAAiC,QAAQ,CAAC,EAAE,CAAC;AAAA,QAChE,OAAO;AACL,eAAK,WAAW,YAAY;AAAA,QAC9B;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,MAAM,SAAS,MAAM;AAC/B,WAAK,UAAU,MAAA;AACf,WAAK,WAAW;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,WAAW,KAAK;AAEtB,UAAI,KAAK,aAAa,WAAW;AAE/B,cAAM,QAAQ,SAAS,YAAY,eAAe,CAAA,CAAE;AACpD,cAAM,SAAS,SAAS,KAAiC;AACzD,eAAO,MAAM;AAAA,MACf;AAEA,YAAM,YAAqB,CAAA;AAC3B,YAAM,UAAU;AAAA,QACd,KAAK;AAAA,QACL;AAAA,QACA,SAAS;AAAA,QACT,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAEP,YAAM,QAAQ,IAAI,UAAU,IAAI,WAAS,SAAS,SAAS,KAAK,CAAC,CAAC;AAClE,WAAK,UAAU,MAAA;AACf,WAAK,WAAW;AAChB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEA,SAAS,gBACP,MACA,WACA,aACA,UACA,eACM;AACN,QAAM,OAAO,SAAS,IAAI,IAAI;AAC9B,MAAI,SAAS,QAAW;AAEtB,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,IAAI,GAAG;AACxB,UAAMC,SAAQ,YAAY,YAAY,MAAM,aAAa,GAAG,EAAE;AAC9D,cAAU,KAAKA,MAAK;AACpB,WAAOA,OAAM;AAAA,EACf;AAIA,QAAM,OAAe,CAAA;AACrB,QAAM,EAAC,YAAW;AAClB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,QAAQ,QAAQ,CAAC;AACvB,UAAM,YAAY,MAAM,CAAC;AACzB,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,QAAI,iBAAiB,WAAW;AAG9B,cAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,cAAc,MAAM,CAAC,CAAC;AAAA,IAChD;AACA,SAAK,KAAK,YAAY;AAAA,EACxB;AACA,QAAM,QAAQ,YAAY,YAAY,MAAM,aAAa,GAAG,OAAO,IAAI,CAAC;AACxE,YAAU,KAAK,KAAK;AACpB,SAAO,MAAM;AACf;"}
@@ -42,8 +42,11 @@ class RefCountUpdates {
42
42
  )
43
43
  );
44
44
  if (this.#isLazyDelegate) {
45
- assert(this.#delegate.areRefsCounted);
46
- assert(this.#refsCounted);
45
+ assert(
46
+ this.#delegate.areRefsCounted,
47
+ "Expected delegate.areRefsCounted to be defined"
48
+ );
49
+ assert(this.#refsCounted, "Expected refsCounted to be defined");
47
50
  let refCountsUpdated;
48
51
  do {
49
52
  refCountsUpdated = false;
@@ -1 +1 @@
1
- {"version":3,"file":"gc.js","sources":["../../../../../replicache/src/dag/gc.ts"],"sourcesContent":["import {assert, assertNumber} from '../../../shared/src/asserts.ts';\nimport type {MaybePromise} from '../../../shared/src/types.ts';\nimport {skipGCAsserts} from '../config.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\n\nexport type HeadChange = {\n new: Hash | undefined;\n old: Hash | undefined;\n};\n\ntype LoadedRefCountPromises = Map<Hash, Promise<number>>;\n\nexport interface RefCountUpdatesDelegate {\n getRefCount: (hash: Hash) => MaybePromise<number | undefined>;\n getRefs: (hash: Hash) => MaybePromise<readonly Hash[] | undefined>;\n /**\n * Should be implemented if the store lazily loads refs, returning whether\n * or not the chunks refs have already been counted (i.e. are reflected\n * in `getRefCount`).\n *\n * If defined then:\n * - `getRefs` should return undefined for refs that have not been loaded,\n * but should never return undefined for hashes in `putChunks`.\n * - it is assumed that chunks in `putChunks` may have been reachable before\n * the write, but may not have been counted. This method is used to\n * determine if they have been counted or not. If they have not been\n * counted, and are reachable with the write applied, the returned\n * ref count updates will include updates for counting them.\n *\n * If undefined then:\n * - `getRefs` should never return undefined\n * - it is assumed that the refs of any chunks which were reachable before\n * the write are already counted\n */\n areRefsCounted?: (hash: Hash) => boolean;\n}\n\n/**\n * Computes how ref counts should be updated when a dag write is committed.\n * Does not modify the dag store.\n * @param headChanges Heads that were changed by the dag write.\n * @param putChunks Chunks that were put by the dag write.\n * @param delegate Delegate used for getting ref information from the dag store.\n * @returns Map from chunk Hash to changed ref counts. Chunks with a new ref\n * count of 0 should be deleted. All hashes in `putChunks` will have an entry\n * (which will be zero if the newly put chunk is not reachable from any head).\n */\nexport function computeRefCountUpdates(\n headChanges: Iterable<HeadChange>,\n putChunks: ReadonlySet<Hash>,\n delegate: RefCountUpdatesDelegate,\n): Promise<Map<Hash, number>> {\n return new RefCountUpdates(headChanges, putChunks, delegate).compute();\n}\n\nclass RefCountUpdates {\n readonly #newHeads: Hash[];\n readonly #oldHeads: Hash[];\n readonly #putChunks: ReadonlySet<Hash>;\n readonly #delegate: RefCountUpdatesDelegate;\n readonly #refsCounted: Set<Hash> | null;\n readonly #refCountUpdates: Map<Hash, number>;\n readonly #loadedRefCountPromises: LoadedRefCountPromises;\n readonly #isLazyDelegate: boolean;\n\n constructor(\n headChanges: Iterable<HeadChange>,\n putChunks: ReadonlySet<Hash>,\n delegate: RefCountUpdatesDelegate,\n ) {\n const newHeads: Hash[] = [];\n const oldHeads: Hash[] = [];\n for (const changedHead of headChanges) {\n if (changedHead.old !== changedHead.new) {\n changedHead.old && oldHeads.push(changedHead.old);\n changedHead.new && newHeads.push(changedHead.new);\n }\n }\n this.#newHeads = newHeads;\n this.#oldHeads = oldHeads;\n this.#putChunks = putChunks;\n this.#delegate = delegate;\n this.#refCountUpdates = new Map();\n // This map is used to ensure we do not load the ref count key more than once.\n // Once it is loaded we only operate on a cache of the ref counts.\n this.#loadedRefCountPromises = new Map();\n this.#isLazyDelegate = delegate.areRefsCounted !== undefined;\n this.#refsCounted = this.#isLazyDelegate ? new Set() : null;\n }\n\n async compute(): Promise<Map<Hash, number>> {\n for (const n of this.#newHeads) {\n await this.#changeRefCount(n, 1);\n }\n\n // Now go through the put chunks to ensure each has an entry in\n // refCountUpdates (zero for new chunks which are not reachable from\n // newHeads).\n await Promise.all(\n Array.from(this.#putChunks.values(), hash =>\n this.#ensureRefCountLoaded(hash),\n ),\n );\n\n if (this.#isLazyDelegate) {\n assert(this.#delegate.areRefsCounted);\n assert(this.#refsCounted);\n let refCountsUpdated;\n do {\n refCountsUpdated = false;\n for (const hash of this.#putChunks.values()) {\n if (\n !this.#delegate.areRefsCounted(hash) &&\n !this.#refsCounted.has(hash) &&\n this.#refCountUpdates.get(hash) !== 0\n ) {\n await this.#updateRefsCounts(hash, 1);\n refCountsUpdated = true;\n break;\n }\n }\n } while (refCountsUpdated);\n }\n\n for (const o of this.#oldHeads) {\n await this.#changeRefCount(o, -1);\n }\n\n if (!skipGCAsserts) {\n for (const [hash, update] of this.#refCountUpdates) {\n assert(\n update >= 0,\n `ref count update must be non-negative. ${hash}:${update}`,\n );\n }\n }\n\n return this.#refCountUpdates;\n }\n\n async #changeRefCount(hash: Hash, delta: number): Promise<void> {\n // First make sure that we have the ref count in the cache. This is async\n // because it might need to load the ref count from the store (via the delegate).\n //\n // Once we have loaded the ref count all the updates to it are sync to\n // prevent race conditions.\n await this.#ensureRefCountLoaded(hash);\n if (this.#updateRefCount(hash, delta)) {\n await this.#updateRefsCounts(hash, delta);\n }\n }\n\n async #updateRefsCounts(hash: Hash, delta: number) {\n if (hash === emptyHash) {\n return;\n }\n const refs = await this.#delegate.getRefs(hash);\n if (!skipGCAsserts) {\n assert(\n refs || (this.#isLazyDelegate && !this.#putChunks.has(hash)),\n 'refs must be defined',\n );\n }\n\n if (refs !== undefined) {\n this.#refsCounted?.add(hash);\n const ps = refs.map(ref => this.#changeRefCount(ref, delta));\n await Promise.all(ps);\n }\n }\n\n #ensureRefCountLoaded(hash: Hash): Promise<number> {\n // Only get the ref count once.\n let p = this.#loadedRefCountPromises.get(hash);\n if (p === undefined) {\n p = (async () => {\n const value = (await this.#delegate.getRefCount(hash)) || 0;\n this.#refCountUpdates.set(hash, value);\n return value;\n })();\n this.#loadedRefCountPromises.set(hash, p);\n }\n return p;\n }\n\n #updateRefCount(hash: Hash, delta: number): boolean {\n const oldCount = this.#refCountUpdates.get(hash);\n assertNumber(oldCount);\n this.#refCountUpdates.set(hash, oldCount + delta);\n return (oldCount === 0 && delta === 1) || (oldCount === 1 && delta === -1);\n }\n}\n"],"names":["skipGCAsserts"],"mappings":";;;AA+CO,SAAS,uBACd,aACA,WACA,UAC4B;AAC5B,SAAO,IAAI,gBAAgB,aAAa,WAAW,QAAQ,EAAE,QAAA;AAC/D;AAEA,MAAM,gBAAgB;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,aACA,WACA,UACA;AACA,UAAM,WAAmB,CAAA;AACzB,UAAM,WAAmB,CAAA;AACzB,eAAW,eAAe,aAAa;AACrC,UAAI,YAAY,QAAQ,YAAY,KAAK;AACvC,oBAAY,OAAO,SAAS,KAAK,YAAY,GAAG;AAChD,oBAAY,OAAO,SAAS,KAAK,YAAY,GAAG;AAAA,MAClD;AAAA,IACF;AACA,SAAK,YAAY;AACjB,SAAK,YAAY;AACjB,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,uCAAuB,IAAA;AAG5B,SAAK,8CAA8B,IAAA;AACnC,SAAK,kBAAkB,SAAS,mBAAmB;AACnD,SAAK,eAAe,KAAK,kBAAkB,oBAAI,QAAQ;AAAA,EACzD;AAAA,EAEA,MAAM,UAAsC;AAC1C,eAAW,KAAK,KAAK,WAAW;AAC9B,YAAM,KAAK,gBAAgB,GAAG,CAAC;AAAA,IACjC;AAKA,UAAM,QAAQ;AAAA,MACZ,MAAM;AAAA,QAAK,KAAK,WAAW,OAAA;AAAA,QAAU,CAAA,SACnC,KAAK,sBAAsB,IAAI;AAAA,MAAA;AAAA,IACjC;AAGF,QAAI,KAAK,iBAAiB;AACxB,aAAO,KAAK,UAAU,cAAc;AACpC,aAAO,KAAK,YAAY;AACxB,UAAI;AACJ,SAAG;AACD,2BAAmB;AACnB,mBAAW,QAAQ,KAAK,WAAW,OAAA,GAAU;AAC3C,cACE,CAAC,KAAK,UAAU,eAAe,IAAI,KACnC,CAAC,KAAK,aAAa,IAAI,IAAI,KAC3B,KAAK,iBAAiB,IAAI,IAAI,MAAM,GACpC;AACA,kBAAM,KAAK,kBAAkB,MAAM,CAAC;AACpC,+BAAmB;AACnB;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS;AAAA,IACX;AAEA,eAAW,KAAK,KAAK,WAAW;AAC9B,YAAM,KAAK,gBAAgB,GAAG,EAAE;AAAA,IAClC;AAEA,QAAI,CAACA,QAAe;AAClB,iBAAW,CAAC,MAAM,MAAM,KAAK,KAAK,kBAAkB;AAClD;AAAA,UACE,UAAU;AAAA,UACV,0CAA0C,IAAI,IAAI,MAAM;AAAA,QAAA;AAAA,MAE5D;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,gBAAgB,MAAY,OAA8B;AAM9D,UAAM,KAAK,sBAAsB,IAAI;AACrC,QAAI,KAAK,gBAAgB,MAAM,KAAK,GAAG;AACrC,YAAM,KAAK,kBAAkB,MAAM,KAAK;AAAA,IAC1C;AAAA,EACF;AAAA,EAEA,MAAM,kBAAkB,MAAY,OAAe;AACjD,QAAI,SAAS,WAAW;AACtB;AAAA,IACF;AACA,UAAM,OAAO,MAAM,KAAK,UAAU,QAAQ,IAAI;AAC9C,QAAI,CAACA,QAAe;AAClB;AAAA,QACE,QAAS,KAAK,mBAAmB,CAAC,KAAK,WAAW,IAAI,IAAI;AAAA,QAC1D;AAAA,MAAA;AAAA,IAEJ;AAEA,QAAI,SAAS,QAAW;AACtB,WAAK,cAAc,IAAI,IAAI;AAC3B,YAAM,KAAK,KAAK,IAAI,CAAA,QAAO,KAAK,gBAAgB,KAAK,KAAK,CAAC;AAC3D,YAAM,QAAQ,IAAI,EAAE;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,sBAAsB,MAA6B;AAEjD,QAAI,IAAI,KAAK,wBAAwB,IAAI,IAAI;AAC7C,QAAI,MAAM,QAAW;AACnB,WAAK,YAAY;AACf,cAAM,QAAS,MAAM,KAAK,UAAU,YAAY,IAAI,KAAM;AAC1D,aAAK,iBAAiB,IAAI,MAAM,KAAK;AACrC,eAAO;AAAA,MACT,GAAA;AACA,WAAK,wBAAwB,IAAI,MAAM,CAAC;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAgB,MAAY,OAAwB;AAClD,UAAM,WAAW,KAAK,iBAAiB,IAAI,IAAI;AAC/C,iBAAa,QAAQ;AACrB,SAAK,iBAAiB,IAAI,MAAM,WAAW,KAAK;AAChD,WAAQ,aAAa,KAAK,UAAU,KAAO,aAAa,KAAK,UAAU;AAAA,EACzE;AACF;"}
1
+ {"version":3,"file":"gc.js","sources":["../../../../../replicache/src/dag/gc.ts"],"sourcesContent":["import {assert, assertNumber} from '../../../shared/src/asserts.ts';\nimport type {MaybePromise} from '../../../shared/src/types.ts';\nimport {skipGCAsserts} from '../config.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\n\nexport type HeadChange = {\n new: Hash | undefined;\n old: Hash | undefined;\n};\n\ntype LoadedRefCountPromises = Map<Hash, Promise<number>>;\n\nexport interface RefCountUpdatesDelegate {\n getRefCount: (hash: Hash) => MaybePromise<number | undefined>;\n getRefs: (hash: Hash) => MaybePromise<readonly Hash[] | undefined>;\n /**\n * Should be implemented if the store lazily loads refs, returning whether\n * or not the chunks refs have already been counted (i.e. are reflected\n * in `getRefCount`).\n *\n * If defined then:\n * - `getRefs` should return undefined for refs that have not been loaded,\n * but should never return undefined for hashes in `putChunks`.\n * - it is assumed that chunks in `putChunks` may have been reachable before\n * the write, but may not have been counted. This method is used to\n * determine if they have been counted or not. If they have not been\n * counted, and are reachable with the write applied, the returned\n * ref count updates will include updates for counting them.\n *\n * If undefined then:\n * - `getRefs` should never return undefined\n * - it is assumed that the refs of any chunks which were reachable before\n * the write are already counted\n */\n areRefsCounted?: (hash: Hash) => boolean;\n}\n\n/**\n * Computes how ref counts should be updated when a dag write is committed.\n * Does not modify the dag store.\n * @param headChanges Heads that were changed by the dag write.\n * @param putChunks Chunks that were put by the dag write.\n * @param delegate Delegate used for getting ref information from the dag store.\n * @returns Map from chunk Hash to changed ref counts. Chunks with a new ref\n * count of 0 should be deleted. All hashes in `putChunks` will have an entry\n * (which will be zero if the newly put chunk is not reachable from any head).\n */\nexport function computeRefCountUpdates(\n headChanges: Iterable<HeadChange>,\n putChunks: ReadonlySet<Hash>,\n delegate: RefCountUpdatesDelegate,\n): Promise<Map<Hash, number>> {\n return new RefCountUpdates(headChanges, putChunks, delegate).compute();\n}\n\nclass RefCountUpdates {\n readonly #newHeads: Hash[];\n readonly #oldHeads: Hash[];\n readonly #putChunks: ReadonlySet<Hash>;\n readonly #delegate: RefCountUpdatesDelegate;\n readonly #refsCounted: Set<Hash> | null;\n readonly #refCountUpdates: Map<Hash, number>;\n readonly #loadedRefCountPromises: LoadedRefCountPromises;\n readonly #isLazyDelegate: boolean;\n\n constructor(\n headChanges: Iterable<HeadChange>,\n putChunks: ReadonlySet<Hash>,\n delegate: RefCountUpdatesDelegate,\n ) {\n const newHeads: Hash[] = [];\n const oldHeads: Hash[] = [];\n for (const changedHead of headChanges) {\n if (changedHead.old !== changedHead.new) {\n changedHead.old && oldHeads.push(changedHead.old);\n changedHead.new && newHeads.push(changedHead.new);\n }\n }\n this.#newHeads = newHeads;\n this.#oldHeads = oldHeads;\n this.#putChunks = putChunks;\n this.#delegate = delegate;\n this.#refCountUpdates = new Map();\n // This map is used to ensure we do not load the ref count key more than once.\n // Once it is loaded we only operate on a cache of the ref counts.\n this.#loadedRefCountPromises = new Map();\n this.#isLazyDelegate = delegate.areRefsCounted !== undefined;\n this.#refsCounted = this.#isLazyDelegate ? new Set() : null;\n }\n\n async compute(): Promise<Map<Hash, number>> {\n for (const n of this.#newHeads) {\n await this.#changeRefCount(n, 1);\n }\n\n // Now go through the put chunks to ensure each has an entry in\n // refCountUpdates (zero for new chunks which are not reachable from\n // newHeads).\n await Promise.all(\n Array.from(this.#putChunks.values(), hash =>\n this.#ensureRefCountLoaded(hash),\n ),\n );\n\n if (this.#isLazyDelegate) {\n assert(\n this.#delegate.areRefsCounted,\n 'Expected delegate.areRefsCounted to be defined',\n );\n assert(this.#refsCounted, 'Expected refsCounted to be defined');\n let refCountsUpdated;\n do {\n refCountsUpdated = false;\n for (const hash of this.#putChunks.values()) {\n if (\n !this.#delegate.areRefsCounted(hash) &&\n !this.#refsCounted.has(hash) &&\n this.#refCountUpdates.get(hash) !== 0\n ) {\n await this.#updateRefsCounts(hash, 1);\n refCountsUpdated = true;\n break;\n }\n }\n } while (refCountsUpdated);\n }\n\n for (const o of this.#oldHeads) {\n await this.#changeRefCount(o, -1);\n }\n\n if (!skipGCAsserts) {\n for (const [hash, update] of this.#refCountUpdates) {\n assert(\n update >= 0,\n `ref count update must be non-negative. ${hash}:${update}`,\n );\n }\n }\n\n return this.#refCountUpdates;\n }\n\n async #changeRefCount(hash: Hash, delta: number): Promise<void> {\n // First make sure that we have the ref count in the cache. This is async\n // because it might need to load the ref count from the store (via the delegate).\n //\n // Once we have loaded the ref count all the updates to it are sync to\n // prevent race conditions.\n await this.#ensureRefCountLoaded(hash);\n if (this.#updateRefCount(hash, delta)) {\n await this.#updateRefsCounts(hash, delta);\n }\n }\n\n async #updateRefsCounts(hash: Hash, delta: number) {\n if (hash === emptyHash) {\n return;\n }\n const refs = await this.#delegate.getRefs(hash);\n if (!skipGCAsserts) {\n assert(\n refs || (this.#isLazyDelegate && !this.#putChunks.has(hash)),\n 'refs must be defined',\n );\n }\n\n if (refs !== undefined) {\n this.#refsCounted?.add(hash);\n const ps = refs.map(ref => this.#changeRefCount(ref, delta));\n await Promise.all(ps);\n }\n }\n\n #ensureRefCountLoaded(hash: Hash): Promise<number> {\n // Only get the ref count once.\n let p = this.#loadedRefCountPromises.get(hash);\n if (p === undefined) {\n p = (async () => {\n const value = (await this.#delegate.getRefCount(hash)) || 0;\n this.#refCountUpdates.set(hash, value);\n return value;\n })();\n this.#loadedRefCountPromises.set(hash, p);\n }\n return p;\n }\n\n #updateRefCount(hash: Hash, delta: number): boolean {\n const oldCount = this.#refCountUpdates.get(hash);\n assertNumber(oldCount);\n this.#refCountUpdates.set(hash, oldCount + delta);\n return (oldCount === 0 && delta === 1) || (oldCount === 1 && delta === -1);\n }\n}\n"],"names":["skipGCAsserts"],"mappings":";;;AA+CO,SAAS,uBACd,aACA,WACA,UAC4B;AAC5B,SAAO,IAAI,gBAAgB,aAAa,WAAW,QAAQ,EAAE,QAAA;AAC/D;AAEA,MAAM,gBAAgB;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,aACA,WACA,UACA;AACA,UAAM,WAAmB,CAAA;AACzB,UAAM,WAAmB,CAAA;AACzB,eAAW,eAAe,aAAa;AACrC,UAAI,YAAY,QAAQ,YAAY,KAAK;AACvC,oBAAY,OAAO,SAAS,KAAK,YAAY,GAAG;AAChD,oBAAY,OAAO,SAAS,KAAK,YAAY,GAAG;AAAA,MAClD;AAAA,IACF;AACA,SAAK,YAAY;AACjB,SAAK,YAAY;AACjB,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,uCAAuB,IAAA;AAG5B,SAAK,8CAA8B,IAAA;AACnC,SAAK,kBAAkB,SAAS,mBAAmB;AACnD,SAAK,eAAe,KAAK,kBAAkB,oBAAI,QAAQ;AAAA,EACzD;AAAA,EAEA,MAAM,UAAsC;AAC1C,eAAW,KAAK,KAAK,WAAW;AAC9B,YAAM,KAAK,gBAAgB,GAAG,CAAC;AAAA,IACjC;AAKA,UAAM,QAAQ;AAAA,MACZ,MAAM;AAAA,QAAK,KAAK,WAAW,OAAA;AAAA,QAAU,CAAA,SACnC,KAAK,sBAAsB,IAAI;AAAA,MAAA;AAAA,IACjC;AAGF,QAAI,KAAK,iBAAiB;AACxB;AAAA,QACE,KAAK,UAAU;AAAA,QACf;AAAA,MAAA;AAEF,aAAO,KAAK,cAAc,oCAAoC;AAC9D,UAAI;AACJ,SAAG;AACD,2BAAmB;AACnB,mBAAW,QAAQ,KAAK,WAAW,OAAA,GAAU;AAC3C,cACE,CAAC,KAAK,UAAU,eAAe,IAAI,KACnC,CAAC,KAAK,aAAa,IAAI,IAAI,KAC3B,KAAK,iBAAiB,IAAI,IAAI,MAAM,GACpC;AACA,kBAAM,KAAK,kBAAkB,MAAM,CAAC;AACpC,+BAAmB;AACnB;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS;AAAA,IACX;AAEA,eAAW,KAAK,KAAK,WAAW;AAC9B,YAAM,KAAK,gBAAgB,GAAG,EAAE;AAAA,IAClC;AAEA,QAAI,CAACA,QAAe;AAClB,iBAAW,CAAC,MAAM,MAAM,KAAK,KAAK,kBAAkB;AAClD;AAAA,UACE,UAAU;AAAA,UACV,0CAA0C,IAAI,IAAI,MAAM;AAAA,QAAA;AAAA,MAE5D;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,gBAAgB,MAAY,OAA8B;AAM9D,UAAM,KAAK,sBAAsB,IAAI;AACrC,QAAI,KAAK,gBAAgB,MAAM,KAAK,GAAG;AACrC,YAAM,KAAK,kBAAkB,MAAM,KAAK;AAAA,IAC1C;AAAA,EACF;AAAA,EAEA,MAAM,kBAAkB,MAAY,OAAe;AACjD,QAAI,SAAS,WAAW;AACtB;AAAA,IACF;AACA,UAAM,OAAO,MAAM,KAAK,UAAU,QAAQ,IAAI;AAC9C,QAAI,CAACA,QAAe;AAClB;AAAA,QACE,QAAS,KAAK,mBAAmB,CAAC,KAAK,WAAW,IAAI,IAAI;AAAA,QAC1D;AAAA,MAAA;AAAA,IAEJ;AAEA,QAAI,SAAS,QAAW;AACtB,WAAK,cAAc,IAAI,IAAI;AAC3B,YAAM,KAAK,KAAK,IAAI,CAAA,QAAO,KAAK,gBAAgB,KAAK,KAAK,CAAC;AAC3D,YAAM,QAAQ,IAAI,EAAE;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,sBAAsB,MAA6B;AAEjD,QAAI,IAAI,KAAK,wBAAwB,IAAI,IAAI;AAC7C,QAAI,MAAM,QAAW;AACnB,WAAK,YAAY;AACf,cAAM,QAAS,MAAM,KAAK,UAAU,YAAY,IAAI,KAAM;AAC1D,aAAK,iBAAiB,IAAI,MAAM,KAAK;AACrC,eAAO;AAAA,MACT,GAAA;AACA,WAAK,wBAAwB,IAAI,MAAM,CAAC;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAgB,MAAY,OAAwB;AAClD,UAAM,WAAW,KAAK,iBAAiB,IAAI,IAAI;AAC/C,iBAAa,QAAQ;AACrB,SAAK,iBAAiB,IAAI,MAAM,WAAW,KAAK;AAChD,WAAQ,aAAa,KAAK,UAAU,KAAO,aAAa,KAAK,UAAU;AAAA,EACzE;AACF;"}
@@ -1 +1 @@
1
- {"version":3,"file":"write.d.ts","sourceRoot":"","sources":["../../../../../replicache/src/db/write.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,6BAA6B,CAAC;AAGtD,OAAO,EAAC,SAAS,EAAmB,MAAM,kBAAkB,CAAC;AAC7D,OAAO,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,eAAe,CAAC;AAChD,OAAO,KAAK,EAAC,KAAK,IAAI,QAAQ,EAAC,MAAM,iBAAiB,CAAC;AACvD,OAAO,KAAK,aAAa,MAAM,2BAA2B,CAAC;AAC3D,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC;AACvD,OAAO,EAAC,KAAK,IAAI,EAAY,MAAM,YAAY,CAAC;AAEhD,OAAO,KAAK,EAAC,qBAAqB,EAAC,MAAM,iBAAiB,CAAC;AAC3D,OAAO,EAAC,QAAQ,EAAC,MAAM,iBAAiB,CAAC;AACzC,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACxC,OAAO,EACL,KAAK,IAAI,IAAI,UAAU,EAQxB,MAAM,aAAa,CAAC;AAGrB,OAAO,EAAC,UAAU,EAAa,MAAM,YAAY,CAAC;AAElD,OAAO,EAAC,IAAI,EAAqB,MAAM,WAAW,CAAC;AAEnD,KAAK,aAAa,GAAG,IAAI,CAAC,OAAO,aAAa,CAAC,CAAC;AAEhD,qBAAa,KAAM,SAAQ,IAAI;;IAKrB,GAAG,EAAE,UAAU,CAAC;IAExB,SAAiB,OAAO,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;gBAKhD,QAAQ,EAAE,QAAQ,EAClB,GAAG,EAAE,UAAU,EACf,KAAK,EAAE,MAAM,CAAC,UAAU,CAAC,GAAG,SAAS,EACrC,IAAI,EAAE,UAAU,EAChB,OAAO,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,EAChC,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa;IAkB9B;;;OAGG;IACG,GAAG,CACP,EAAE,EAAE,UAAU,EACd,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,eAAe,GACrB,OAAO,CAAC,IAAI,CAAC;IAOhB,aAAa,IAAI,OAAO,CAAC,MAAM,CAAC;IAI1B,GAAG,CAAC,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IASlD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAStB,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;IA6DxC,MAAM,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQvC,eAAe,CACnB,QAAQ,EAAE,MAAM,EAChB,UAAU,EAAE,qBAAqB,GAChC,OAAO,CAAC,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IAgE5B,KAAK,IAAI,IAAI;CAGd;AAED,wBAAsB,aAAa,CACjC,SAAS,EAAE,IAAI,EACf,WAAW,EAAE,MAAM,EACnB,eAAe,EAAE,eAAe,EAChC,YAAY,EAAE,IAAI,GAAG,IAAI,EACzB,QAAQ,EAAE,QAAQ,EAClB,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,KAAK,CAAC,CA0BhB;AAED,wBAAsB,oBAAoB,CACxC,SAAS,EAAE,IAAI,EACf,eAAe,EAAE,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,EACzC,UAAU,EAAE,YAAY,EACxB,QAAQ,EAAE,QAAQ,EAClB,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,KAAK,CAAC,CAYhB;AAED,wBAAsB,aAAa,CACjC,EAAE,EAAE,UAAU,EACd,OAAO,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,EAChC,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,MAAM,OAAO,CAAC,eAAe,GAAG,SAAS,CAAC,EACxD,MAAM,EAAE,eAAe,GAAG,SAAS,GAClC,OAAO,CAAC,IAAI,CAAC,CAmCf;AAED,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,MAAM,CAAC,UAAU,CAAC,EAC1B,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa,GAC3B,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,CAYzB;AAED,wBAAsB,gBAAgB,CACpC,EAAE,EAAE,UAAU,EACd,QAAQ,EAAE,QAAQ,EAClB,QAAQ,EAAE,SAAS,EACnB,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,MAAM,EACnB,UAAU,EAAE,OAAO,EACnB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,UAAU,CAAC,CAkBrB"}
1
+ {"version":3,"file":"write.d.ts","sourceRoot":"","sources":["../../../../../replicache/src/db/write.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,6BAA6B,CAAC;AAGtD,OAAO,EAAC,SAAS,EAAmB,MAAM,kBAAkB,CAAC;AAC7D,OAAO,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,eAAe,CAAC;AAChD,OAAO,KAAK,EAAC,KAAK,IAAI,QAAQ,EAAC,MAAM,iBAAiB,CAAC;AACvD,OAAO,KAAK,aAAa,MAAM,2BAA2B,CAAC;AAC3D,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC;AACvD,OAAO,EAAC,KAAK,IAAI,EAAY,MAAM,YAAY,CAAC;AAEhD,OAAO,KAAK,EAAC,qBAAqB,EAAC,MAAM,iBAAiB,CAAC;AAC3D,OAAO,EAAC,QAAQ,EAAC,MAAM,iBAAiB,CAAC;AACzC,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACxC,OAAO,EACL,KAAK,IAAI,IAAI,UAAU,EAQxB,MAAM,aAAa,CAAC;AAGrB,OAAO,EAAC,UAAU,EAAa,MAAM,YAAY,CAAC;AAElD,OAAO,EAAC,IAAI,EAAqB,MAAM,WAAW,CAAC;AAEnD,KAAK,aAAa,GAAG,IAAI,CAAC,OAAO,aAAa,CAAC,CAAC;AAEhD,qBAAa,KAAM,SAAQ,IAAI;;IAKrB,GAAG,EAAE,UAAU,CAAC;IAExB,SAAiB,OAAO,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;gBAKhD,QAAQ,EAAE,QAAQ,EAClB,GAAG,EAAE,UAAU,EACf,KAAK,EAAE,MAAM,CAAC,UAAU,CAAC,GAAG,SAAS,EACrC,IAAI,EAAE,UAAU,EAChB,OAAO,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,EAChC,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa;IAwB9B;;;OAGG;IACG,GAAG,CACP,EAAE,EAAE,UAAU,EACd,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,eAAe,GACrB,OAAO,CAAC,IAAI,CAAC;IAOhB,aAAa,IAAI,OAAO,CAAC,MAAM,CAAC;IAI1B,GAAG,CAAC,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IASlD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAStB,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;IAmExC,MAAM,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQvC,eAAe,CACnB,QAAQ,EAAE,MAAM,EAChB,UAAU,EAAE,qBAAqB,GAChC,OAAO,CAAC,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IAgE5B,KAAK,IAAI,IAAI;CAGd;AAED,wBAAsB,aAAa,CACjC,SAAS,EAAE,IAAI,EACf,WAAW,EAAE,MAAM,EACnB,eAAe,EAAE,eAAe,EAChC,YAAY,EAAE,IAAI,GAAG,IAAI,EACzB,QAAQ,EAAE,QAAQ,EAClB,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,KAAK,CAAC,CA6BhB;AAED,wBAAsB,oBAAoB,CACxC,SAAS,EAAE,IAAI,EACf,eAAe,EAAE,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,EACzC,UAAU,EAAE,YAAY,EACxB,QAAQ,EAAE,QAAQ,EAClB,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,KAAK,CAAC,CAYhB;AAED,wBAAsB,aAAa,CACjC,EAAE,EAAE,UAAU,EACd,OAAO,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,EAChC,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,MAAM,OAAO,CAAC,eAAe,GAAG,SAAS,CAAC,EACxD,MAAM,EAAE,eAAe,GAAG,SAAS,GAClC,OAAO,CAAC,IAAI,CAAC,CAmCf;AAED,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,MAAM,CAAC,UAAU,CAAC,EAC1B,QAAQ,EAAE,QAAQ,EAClB,aAAa,EAAE,aAAa,GAC3B,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,CAYzB;AAED,wBAAsB,gBAAgB,CACpC,EAAE,EAAE,UAAU,EACd,QAAQ,EAAE,QAAQ,EAClB,QAAQ,EAAE,SAAS,EACnB,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,MAAM,EACnB,UAAU,EAAE,OAAO,EACnB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,UAAU,CAAC,CAkBrB"}
@@ -25,9 +25,15 @@ class Write extends Read {
25
25
  this.#clientID = clientID;
26
26
  this.#formatVersion = formatVersion;
27
27
  if (basis === void 0) {
28
- assert(meta.basisHash === emptyHash);
28
+ assert(
29
+ meta.basisHash === emptyHash,
30
+ "Expected basisHash to be emptyHash when basis is undefined"
31
+ );
29
32
  } else {
30
- assert(meta.basisHash === basis.chunk.hash);
33
+ assert(
34
+ meta.basisHash === basis.chunk.hash,
35
+ "Expected meta.basisHash to equal basis.chunk.hash"
36
+ );
31
37
  }
32
38
  }
33
39
  /**
@@ -72,7 +78,10 @@ class Write extends Read {
72
78
  const meta = this.#meta;
73
79
  switch (meta.type) {
74
80
  case LocalDD31: {
75
- assert(this.#formatVersion >= DD31);
81
+ assert(
82
+ this.#formatVersion >= DD31,
83
+ "Expected formatVersion >= DD31 for LocalDD31 commit"
84
+ );
76
85
  const {
77
86
  basisHash,
78
87
  mutationID,
@@ -97,7 +106,10 @@ class Write extends Read {
97
106
  break;
98
107
  }
99
108
  case SnapshotDD31: {
100
- assert(this.#formatVersion > DD31);
109
+ assert(
110
+ this.#formatVersion > DD31,
111
+ "Expected formatVersion > DD31 for SnapshotDD31 commit"
112
+ );
101
113
  const { basisHash, lastMutationIDs, cookieJSON } = meta;
102
114
  commit = newSnapshotDD31(
103
115
  this.#dagWrite.createChunk,
@@ -159,7 +171,7 @@ class Write extends Read {
159
171
  continue;
160
172
  }
161
173
  const basisIndex = basisIndexes.get(name);
162
- assert(index !== basisIndex);
174
+ assert(index !== basisIndex, "Expected index to differ from basisIndex");
163
175
  const indexDiffResult = await (basisIndex ? diff(basisIndex.map, index.map) : (
164
176
  // No basis. All keys are new.
165
177
  allEntriesAsDiff(index.map, "add")
@@ -183,7 +195,10 @@ async function newWriteLocal(basisHash, mutatorName, mutatorArgsJSON, originalHa
183
195
  const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);
184
196
  const mutationID = await basis.getNextMutationID(clientID, dagWrite);
185
197
  const indexes = readIndexesForWrite(basis, dagWrite, formatVersion);
186
- assert(formatVersion >= DD31);
198
+ assert(
199
+ formatVersion >= DD31,
200
+ "Expected formatVersion >= DD31 for newWriteLocal"
201
+ );
187
202
  return new Write(
188
203
  dagWrite,
189
204
  bTreeWrite,
@@ -1 +1 @@
1
- {"version":3,"file":"write.js","sources":["../../../../../replicache/src/db/write.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {diff} from '../btree/diff.ts';\nimport type {InternalDiff} from '../btree/node.ts';\nimport {BTreeRead, allEntriesAsDiff} from '../btree/read.ts';\nimport {BTreeWrite} from '../btree/write.ts';\nimport type {FrozenCookie} from '../cookies.ts';\nimport type {Write as DagWrite} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\nimport {lazy} from '../lazy.ts';\nimport type {DiffComputationConfig} from '../sync/diff.ts';\nimport {DiffsMap} from '../sync/diff.ts';\nimport type {ClientID} from '../sync/ids.ts';\nimport type {Commit} from './commit.ts';\nimport {\n type Meta as CommitMeta,\n type IndexRecord,\n type Meta,\n baseSnapshotHashFromHash,\n commitFromHash,\n newLocalDD31 as commitNewLocalDD31,\n newSnapshotDD31 as commitNewSnapshotDD31,\n getMutationID,\n} from './commit.ts';\nimport * as IndexOperation from './index-operation-enum.ts';\nimport type {IndexRead} from './index.ts';\nimport {IndexWrite, indexValue} from './index.ts';\nimport * as MetaType from './meta-type-enum.ts';\nimport {Read, readIndexesForRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class Write extends Read {\n readonly #dagWrite: DagWrite;\n readonly #basis: Commit<CommitMeta> | undefined;\n readonly #meta: CommitMeta;\n\n declare map: BTreeWrite;\n\n declare readonly indexes: Map<string, IndexWrite>;\n readonly #clientID: ClientID;\n readonly #formatVersion: FormatVersion;\n\n constructor(\n dagWrite: DagWrite,\n map: BTreeWrite,\n basis: Commit<CommitMeta> | undefined,\n meta: CommitMeta,\n indexes: Map<string, IndexWrite>,\n clientID: ClientID,\n formatVersion: FormatVersion,\n ) {\n // TypeScript has trouble\n super(dagWrite, map, indexes);\n this.#dagWrite = dagWrite;\n this.#basis = basis;\n this.#meta = meta;\n this.#clientID = clientID;\n this.#formatVersion = formatVersion;\n\n // TODO(arv): if (DEBUG) { ...\n if (basis === undefined) {\n assert(meta.basisHash === emptyHash);\n } else {\n assert(meta.basisHash === basis.chunk.hash);\n }\n }\n\n /**\n * The value needs to be frozen since it is kept in memory and used later for\n * comparison as well as returned in `get`.\n */\n async put(\n lc: LogContext,\n key: string,\n value: FrozenJSONValue,\n ): Promise<void> {\n const oldVal = lazy(() => this.map.get(key));\n await updateIndexes(lc, this.indexes, key, oldVal, value);\n\n await this.map.put(key, value);\n }\n\n getMutationID(): Promise<number> {\n return getMutationID(this.#clientID, this.#dagWrite, this.#meta);\n }\n\n async del(lc: LogContext, key: string): Promise<boolean> {\n // TODO(arv): This does the binary search twice. We can do better.\n const oldVal = lazy(() => this.map.get(key));\n if (oldVal !== undefined) {\n await updateIndexes(lc, this.indexes, key, oldVal, undefined);\n }\n return this.map.del(key);\n }\n\n async clear(): Promise<void> {\n await this.map.clear();\n const ps = [];\n for (const idx of this.indexes.values()) {\n ps.push(idx.clear());\n }\n await Promise.all(ps);\n }\n\n async putCommit(): Promise<Commit<CommitMeta>> {\n const valueHash = await this.map.flush();\n const indexRecords: IndexRecord[] = [];\n\n for (const index of this.indexes.values()) {\n const valueHash = await index.flush();\n const indexRecord: IndexRecord = {\n definition: index.meta.definition,\n valueHash,\n };\n indexRecords.push(indexRecord);\n }\n\n let commit: Commit<Meta>;\n const meta = this.#meta;\n switch (meta.type) {\n case MetaType.LocalDD31: {\n assert(this.#formatVersion >= FormatVersion.DD31);\n const {\n basisHash,\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n timestamp,\n } = meta;\n commit = commitNewLocalDD31(\n this.#dagWrite.createChunk,\n basisHash,\n await baseSnapshotHashFromHash(basisHash, this.#dagWrite),\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n valueHash,\n indexRecords,\n timestamp,\n this.#clientID,\n );\n break;\n }\n\n case MetaType.SnapshotDD31: {\n assert(this.#formatVersion > FormatVersion.DD31);\n const {basisHash, lastMutationIDs, cookieJSON} = meta;\n commit = commitNewSnapshotDD31(\n this.#dagWrite.createChunk,\n basisHash,\n lastMutationIDs,\n cookieJSON,\n valueHash,\n indexRecords,\n );\n break;\n }\n }\n await this.#dagWrite.putChunk(commit.chunk);\n return commit;\n }\n\n // Return value is the hash of the new commit.\n async commit(headName: string): Promise<Hash> {\n const commit = await this.putCommit();\n const commitHash = commit.chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return commitHash;\n }\n\n async commitWithDiffs(\n headName: string,\n diffConfig: DiffComputationConfig,\n ): Promise<[Hash, DiffsMap]> {\n const commit = this.putCommit();\n const diffMap = await this.#generateDiffs(diffConfig);\n const commitHash = (await commit).chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return [commitHash, diffMap];\n }\n\n async #generateDiffs(diffConfig: DiffComputationConfig): Promise<DiffsMap> {\n const diffsMap = new DiffsMap();\n if (!diffConfig.shouldComputeDiffs()) {\n return diffsMap;\n }\n\n let valueDiff: InternalDiff = [];\n if (this.#basis) {\n const basisMap = new BTreeRead(\n this.#dagWrite,\n this.#formatVersion,\n this.#basis.valueHash,\n );\n valueDiff = await diff(basisMap, this.map);\n }\n diffsMap.set('', valueDiff);\n let basisIndexes: Map<string, IndexRead>;\n if (this.#basis) {\n basisIndexes = readIndexesForRead(\n this.#basis,\n this.#dagWrite,\n this.#formatVersion,\n );\n } else {\n basisIndexes = new Map();\n }\n\n for (const [name, index] of this.indexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(name)) {\n continue;\n }\n const basisIndex = basisIndexes.get(name);\n assert(index !== basisIndex);\n\n const indexDiffResult = await (basisIndex\n ? diff(basisIndex.map, index.map)\n : // No basis. All keys are new.\n allEntriesAsDiff(index.map, 'add'));\n diffsMap.set(name, indexDiffResult);\n }\n\n // Handle indexes in basisIndex but not in this.indexes. All keys are\n // deleted.\n for (const [name, basisIndex] of basisIndexes) {\n if (\n !this.indexes.has(name) &&\n diffConfig.shouldComputeDiffsForIndex(name)\n ) {\n const indexDiffResult = await allEntriesAsDiff(basisIndex.map, 'del');\n diffsMap.set(name, indexDiffResult);\n }\n }\n return diffsMap;\n }\n\n close(): void {\n this.#dagWrite.release();\n }\n}\n\nexport async function newWriteLocal(\n basisHash: Hash,\n mutatorName: string,\n mutatorArgsJSON: FrozenJSONValue,\n originalHash: Hash | null,\n dagWrite: DagWrite,\n timestamp: number,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n const mutationID = await basis.getNextMutationID(clientID, dagWrite);\n const indexes = readIndexesForWrite(basis, dagWrite, formatVersion);\n assert(formatVersion >= FormatVersion.DD31);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n\n {\n type: MetaType.LocalDD31,\n basisHash,\n baseSnapshotHash: await baseSnapshotHashFromHash(basisHash, dagWrite),\n mutatorName,\n mutatorArgsJSON,\n mutationID,\n originalHash,\n timestamp,\n clientID,\n },\n indexes,\n clientID,\n formatVersion,\n );\n}\n\nexport async function newWriteSnapshotDD31(\n basisHash: Hash,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n dagWrite: DagWrite,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n {basisHash, type: MetaType.SnapshotDD31, lastMutationIDs, cookieJSON},\n readIndexesForWrite(basis, dagWrite, formatVersion),\n clientID,\n formatVersion,\n );\n}\n\nexport async function updateIndexes(\n lc: LogContext,\n indexes: Map<string, IndexWrite>,\n key: string,\n oldValGetter: () => Promise<FrozenJSONValue | undefined>,\n newVal: FrozenJSONValue | undefined,\n): Promise<void> {\n const ps: Promise<void>[] = [];\n for (const idx of indexes.values()) {\n const {keyPrefix} = idx.meta.definition;\n if (!keyPrefix || key.startsWith(keyPrefix)) {\n const oldVal = await oldValGetter();\n if (oldVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Remove,\n key,\n oldVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n if (newVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Add,\n key,\n newVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n }\n }\n await Promise.all(ps);\n}\n\nexport function readIndexesForWrite(\n commit: Commit<CommitMeta>,\n dagWrite: DagWrite,\n formatVersion: FormatVersion,\n): Map<string, IndexWrite> {\n const m = new Map();\n for (const index of commit.indexes) {\n m.set(\n index.definition.name,\n new IndexWrite(\n index,\n new BTreeWrite(dagWrite, formatVersion, index.valueHash),\n ),\n );\n }\n return m;\n}\n\nexport async function createIndexBTree(\n lc: LogContext,\n dagWrite: DagWrite,\n valueMap: BTreeRead,\n prefix: string,\n jsonPointer: string,\n allowEmpty: boolean,\n formatVersion: FormatVersion,\n): Promise<BTreeWrite> {\n const indexMap = new BTreeWrite(dagWrite, formatVersion);\n for await (const entry of valueMap.scan(prefix)) {\n const key = entry[0];\n if (!key.startsWith(prefix)) {\n break;\n }\n await indexValue(\n lc,\n indexMap,\n IndexOperation.Add,\n key,\n entry[1],\n jsonPointer,\n allowEmpty,\n );\n }\n return indexMap;\n}\n"],"names":["valueHash","MetaType.LocalDD31","FormatVersion.DD31","commitNewLocalDD31","MetaType.SnapshotDD31","commitNewSnapshotDD31","IndexOperation.Remove","IndexOperation.Add"],"mappings":";;;;;;;;;;;;;AAmCO,MAAM,cAAc,KAAK;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EAKA;AAAA,EACA;AAAA,EAET,YACE,UACA,KACA,OACA,MACA,SACA,UACA,eACA;AAEA,UAAM,UAAU,KAAK,OAAO;AAC5B,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,QAAQ;AACb,SAAK,YAAY;AACjB,SAAK,iBAAiB;AAGtB,QAAI,UAAU,QAAW;AACvB,aAAO,KAAK,cAAc,SAAS;AAAA,IACrC,OAAO;AACL,aAAO,KAAK,cAAc,MAAM,MAAM,IAAI;AAAA,IAC5C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,IACJ,IACA,KACA,OACe;AACf,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAC3C,UAAM,cAAc,IAAI,KAAK,SAAS,KAAK,QAAQ,KAAK;AAExD,UAAM,KAAK,IAAI,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EAEA,gBAAiC;AAC/B,WAAO,cAAc,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK;AAAA,EACjE;AAAA,EAEA,MAAM,IAAI,IAAgB,KAA+B;AAEvD,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAC3C,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc,IAAI,KAAK,SAAS,KAAK,QAAQ,MAAS;AAAA,IAC9D;AACA,WAAO,KAAK,IAAI,IAAI,GAAG;AAAA,EACzB;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,IAAI,MAAA;AACf,UAAM,KAAK,CAAA;AACX,eAAW,OAAO,KAAK,QAAQ,OAAA,GAAU;AACvC,SAAG,KAAK,IAAI,OAAO;AAAA,IACrB;AACA,UAAM,QAAQ,IAAI,EAAE;AAAA,EACtB;AAAA,EAEA,MAAM,YAAyC;AAC7C,UAAM,YAAY,MAAM,KAAK,IAAI,MAAA;AACjC,UAAM,eAA8B,CAAA;AAEpC,eAAW,SAAS,KAAK,QAAQ,OAAA,GAAU;AACzC,YAAMA,aAAY,MAAM,MAAM,MAAA;AAC9B,YAAM,cAA2B;AAAA,QAC/B,YAAY,MAAM,KAAK;AAAA,QACvB,WAAAA;AAAAA,MAAA;AAEF,mBAAa,KAAK,WAAW;AAAA,IAC/B;AAEA,QAAI;AACJ,UAAM,OAAO,KAAK;AAClB,YAAQ,KAAK,MAAA;AAAA,MACX,KAAKC,WAAoB;AACvB,eAAO,KAAK,kBAAkBC,IAAkB;AAChD,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA,IACE;AACJ,iBAASC;AAAAA,UACP,KAAK,UAAU;AAAA,UACf;AAAA,UACA,MAAM,yBAAyB,WAAW,KAAK,SAAS;AAAA,UACxD;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QAAA;AAEP;AAAA,MACF;AAAA,MAEA,KAAKC,cAAuB;AAC1B,eAAO,KAAK,iBAAiBF,IAAkB;AAC/C,cAAM,EAAC,WAAW,iBAAiB,WAAA,IAAc;AACjD,iBAASG;AAAAA,UACP,KAAK,UAAU;AAAA,UACf;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA;AAEF;AAAA,MACF;AAAA,IAAA;AAEF,UAAM,KAAK,UAAU,SAAS,OAAO,KAAK;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAO,UAAiC;AAC5C,UAAM,SAAS,MAAM,KAAK,UAAA;AAC1B,UAAM,aAAa,OAAO,MAAM;AAChC,UAAM,KAAK,UAAU,QAAQ,UAAU,UAAU;AACjD,UAAM,KAAK,UAAU,OAAA;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,gBACJ,UACA,YAC2B;AAC3B,UAAM,SAAS,KAAK,UAAA;AACpB,UAAM,UAAU,MAAM,KAAK,eAAe,UAAU;AACpD,UAAM,cAAc,MAAM,QAAQ,MAAM;AACxC,UAAM,KAAK,UAAU,QAAQ,UAAU,UAAU;AACjD,UAAM,KAAK,UAAU,OAAA;AACrB,WAAO,CAAC,YAAY,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAM,eAAe,YAAsD;AACzE,UAAM,WAAW,IAAI,SAAA;AACrB,QAAI,CAAC,WAAW,sBAAsB;AACpC,aAAO;AAAA,IACT;AAEA,QAAI,YAA0B,CAAA;AAC9B,QAAI,KAAK,QAAQ;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,OAAO;AAAA,MAAA;AAEd,kBAAY,MAAM,KAAK,UAAU,KAAK,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,IAAI,SAAS;AAC1B,QAAI;AACJ,QAAI,KAAK,QAAQ;AACf,qBAAe;AAAA,QACb,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAAA,IAET,OAAO;AACL,yCAAmB,IAAA;AAAA,IACrB;AAEA,eAAW,CAAC,MAAM,KAAK,KAAK,KAAK,SAAS;AACxC,UAAI,CAAC,WAAW,2BAA2B,IAAI,GAAG;AAChD;AAAA,MACF;AACA,YAAM,aAAa,aAAa,IAAI,IAAI;AACxC,aAAO,UAAU,UAAU;AAE3B,YAAM,kBAAkB,OAAO,aAC3B,KAAK,WAAW,KAAK,MAAM,GAAG;AAAA;AAAA,QAE9B,iBAAiB,MAAM,KAAK,KAAK;AAAA;AACrC,eAAS,IAAI,MAAM,eAAe;AAAA,IACpC;AAIA,eAAW,CAAC,MAAM,UAAU,KAAK,cAAc;AAC7C,UACE,CAAC,KAAK,QAAQ,IAAI,IAAI,KACtB,WAAW,2BAA2B,IAAI,GAC1C;AACA,cAAM,kBAAkB,MAAM,iBAAiB,WAAW,KAAK,KAAK;AACpE,iBAAS,IAAI,MAAM,eAAe;AAAA,MACpC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,UAAU,QAAA;AAAA,EACjB;AACF;AAEA,eAAsB,cACpB,WACA,aACA,iBACA,cACA,UACA,WACA,UACA,eACgB;AAChB,QAAM,QAAQ,MAAM,eAAe,WAAW,QAAQ;AACtD,QAAM,aAAa,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAC1E,QAAM,aAAa,MAAM,MAAM,kBAAkB,UAAU,QAAQ;AACnE,QAAM,UAAU,oBAAoB,OAAO,UAAU,aAAa;AAClE,SAAO,iBAAiBH,IAAkB;AAC1C,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IAEA;AAAA,MACE,MAAMD;AAAAA,MACN;AAAA,MACA,kBAAkB,MAAM,yBAAyB,WAAW,QAAQ;AAAA,MACpE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,IAEF;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;AAEA,eAAsB,qBACpB,WACA,iBACA,YACA,UACA,UACA,eACgB;AAChB,QAAM,QAAQ,MAAM,eAAe,WAAW,QAAQ;AACtD,QAAM,aAAa,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAC1E,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA,EAAC,WAAW,MAAMG,cAAuB,iBAAiB,WAAA;AAAA,IAC1D,oBAAoB,OAAO,UAAU,aAAa;AAAA,IAClD;AAAA,IACA;AAAA,EAAA;AAEJ;AAEA,eAAsB,cACpB,IACA,SACA,KACA,cACA,QACe;AACf,QAAM,KAAsB,CAAA;AAC5B,aAAW,OAAO,QAAQ,UAAU;AAClC,UAAM,EAAC,UAAA,IAAa,IAAI,KAAK;AAC7B,QAAI,CAAC,aAAa,IAAI,WAAW,SAAS,GAAG;AAC3C,YAAM,SAAS,MAAM,aAAA;AACrB,UAAI,WAAW,QAAW;AACxB,WAAG;AAAA,UACD;AAAA,YACE;AAAA,YACA,IAAI;AAAA,YACJE;AAAAA,YACA;AAAA,YACA;AAAA,YACA,IAAI,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,cAAc;AAAA,UAAA;AAAA,QACpC;AAAA,MAEJ;AACA,UAAI,WAAW,QAAW;AACxB,WAAG;AAAA,UACD;AAAA,YACE;AAAA,YACA,IAAI;AAAA,YACJC;AAAAA,YACA;AAAA,YACA;AAAA,YACA,IAAI,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,cAAc;AAAA,UAAA;AAAA,QACpC;AAAA,MAEJ;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,EAAE;AACtB;AAEO,SAAS,oBACd,QACA,UACA,eACyB;AACzB,QAAM,wBAAQ,IAAA;AACd,aAAW,SAAS,OAAO,SAAS;AAClC,MAAE;AAAA,MACA,MAAM,WAAW;AAAA,MACjB,IAAI;AAAA,QACF;AAAA,QACA,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAAA,MAAA;AAAA,IACzD;AAAA,EAEJ;AACA,SAAO;AACT;AAEA,eAAsB,iBACpB,IACA,UACA,UACA,QACA,aACA,YACA,eACqB;AACrB,QAAM,WAAW,IAAI,WAAW,UAAU,aAAa;AACvD,mBAAiB,SAAS,SAAS,KAAK,MAAM,GAAG;AAC/C,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,CAAC,IAAI,WAAW,MAAM,GAAG;AAC3B;AAAA,IACF;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACAA;AAAAA,MACA;AAAA,MACA,MAAM,CAAC;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AACA,SAAO;AACT;"}
1
+ {"version":3,"file":"write.js","sources":["../../../../../replicache/src/db/write.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {diff} from '../btree/diff.ts';\nimport type {InternalDiff} from '../btree/node.ts';\nimport {BTreeRead, allEntriesAsDiff} from '../btree/read.ts';\nimport {BTreeWrite} from '../btree/write.ts';\nimport type {FrozenCookie} from '../cookies.ts';\nimport type {Write as DagWrite} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\nimport {lazy} from '../lazy.ts';\nimport type {DiffComputationConfig} from '../sync/diff.ts';\nimport {DiffsMap} from '../sync/diff.ts';\nimport type {ClientID} from '../sync/ids.ts';\nimport type {Commit} from './commit.ts';\nimport {\n type Meta as CommitMeta,\n type IndexRecord,\n type Meta,\n baseSnapshotHashFromHash,\n commitFromHash,\n newLocalDD31 as commitNewLocalDD31,\n newSnapshotDD31 as commitNewSnapshotDD31,\n getMutationID,\n} from './commit.ts';\nimport * as IndexOperation from './index-operation-enum.ts';\nimport type {IndexRead} from './index.ts';\nimport {IndexWrite, indexValue} from './index.ts';\nimport * as MetaType from './meta-type-enum.ts';\nimport {Read, readIndexesForRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class Write extends Read {\n readonly #dagWrite: DagWrite;\n readonly #basis: Commit<CommitMeta> | undefined;\n readonly #meta: CommitMeta;\n\n declare map: BTreeWrite;\n\n declare readonly indexes: Map<string, IndexWrite>;\n readonly #clientID: ClientID;\n readonly #formatVersion: FormatVersion;\n\n constructor(\n dagWrite: DagWrite,\n map: BTreeWrite,\n basis: Commit<CommitMeta> | undefined,\n meta: CommitMeta,\n indexes: Map<string, IndexWrite>,\n clientID: ClientID,\n formatVersion: FormatVersion,\n ) {\n // TypeScript has trouble\n super(dagWrite, map, indexes);\n this.#dagWrite = dagWrite;\n this.#basis = basis;\n this.#meta = meta;\n this.#clientID = clientID;\n this.#formatVersion = formatVersion;\n\n // TODO(arv): if (DEBUG) { ...\n if (basis === undefined) {\n assert(\n meta.basisHash === emptyHash,\n 'Expected basisHash to be emptyHash when basis is undefined',\n );\n } else {\n assert(\n meta.basisHash === basis.chunk.hash,\n 'Expected meta.basisHash to equal basis.chunk.hash',\n );\n }\n }\n\n /**\n * The value needs to be frozen since it is kept in memory and used later for\n * comparison as well as returned in `get`.\n */\n async put(\n lc: LogContext,\n key: string,\n value: FrozenJSONValue,\n ): Promise<void> {\n const oldVal = lazy(() => this.map.get(key));\n await updateIndexes(lc, this.indexes, key, oldVal, value);\n\n await this.map.put(key, value);\n }\n\n getMutationID(): Promise<number> {\n return getMutationID(this.#clientID, this.#dagWrite, this.#meta);\n }\n\n async del(lc: LogContext, key: string): Promise<boolean> {\n // TODO(arv): This does the binary search twice. We can do better.\n const oldVal = lazy(() => this.map.get(key));\n if (oldVal !== undefined) {\n await updateIndexes(lc, this.indexes, key, oldVal, undefined);\n }\n return this.map.del(key);\n }\n\n async clear(): Promise<void> {\n await this.map.clear();\n const ps = [];\n for (const idx of this.indexes.values()) {\n ps.push(idx.clear());\n }\n await Promise.all(ps);\n }\n\n async putCommit(): Promise<Commit<CommitMeta>> {\n const valueHash = await this.map.flush();\n const indexRecords: IndexRecord[] = [];\n\n for (const index of this.indexes.values()) {\n const valueHash = await index.flush();\n const indexRecord: IndexRecord = {\n definition: index.meta.definition,\n valueHash,\n };\n indexRecords.push(indexRecord);\n }\n\n let commit: Commit<Meta>;\n const meta = this.#meta;\n switch (meta.type) {\n case MetaType.LocalDD31: {\n assert(\n this.#formatVersion >= FormatVersion.DD31,\n 'Expected formatVersion >= DD31 for LocalDD31 commit',\n );\n const {\n basisHash,\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n timestamp,\n } = meta;\n commit = commitNewLocalDD31(\n this.#dagWrite.createChunk,\n basisHash,\n await baseSnapshotHashFromHash(basisHash, this.#dagWrite),\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n valueHash,\n indexRecords,\n timestamp,\n this.#clientID,\n );\n break;\n }\n\n case MetaType.SnapshotDD31: {\n assert(\n this.#formatVersion > FormatVersion.DD31,\n 'Expected formatVersion > DD31 for SnapshotDD31 commit',\n );\n const {basisHash, lastMutationIDs, cookieJSON} = meta;\n commit = commitNewSnapshotDD31(\n this.#dagWrite.createChunk,\n basisHash,\n lastMutationIDs,\n cookieJSON,\n valueHash,\n indexRecords,\n );\n break;\n }\n }\n await this.#dagWrite.putChunk(commit.chunk);\n return commit;\n }\n\n // Return value is the hash of the new commit.\n async commit(headName: string): Promise<Hash> {\n const commit = await this.putCommit();\n const commitHash = commit.chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return commitHash;\n }\n\n async commitWithDiffs(\n headName: string,\n diffConfig: DiffComputationConfig,\n ): Promise<[Hash, DiffsMap]> {\n const commit = this.putCommit();\n const diffMap = await this.#generateDiffs(diffConfig);\n const commitHash = (await commit).chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return [commitHash, diffMap];\n }\n\n async #generateDiffs(diffConfig: DiffComputationConfig): Promise<DiffsMap> {\n const diffsMap = new DiffsMap();\n if (!diffConfig.shouldComputeDiffs()) {\n return diffsMap;\n }\n\n let valueDiff: InternalDiff = [];\n if (this.#basis) {\n const basisMap = new BTreeRead(\n this.#dagWrite,\n this.#formatVersion,\n this.#basis.valueHash,\n );\n valueDiff = await diff(basisMap, this.map);\n }\n diffsMap.set('', valueDiff);\n let basisIndexes: Map<string, IndexRead>;\n if (this.#basis) {\n basisIndexes = readIndexesForRead(\n this.#basis,\n this.#dagWrite,\n this.#formatVersion,\n );\n } else {\n basisIndexes = new Map();\n }\n\n for (const [name, index] of this.indexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(name)) {\n continue;\n }\n const basisIndex = basisIndexes.get(name);\n assert(index !== basisIndex, 'Expected index to differ from basisIndex');\n\n const indexDiffResult = await (basisIndex\n ? diff(basisIndex.map, index.map)\n : // No basis. All keys are new.\n allEntriesAsDiff(index.map, 'add'));\n diffsMap.set(name, indexDiffResult);\n }\n\n // Handle indexes in basisIndex but not in this.indexes. All keys are\n // deleted.\n for (const [name, basisIndex] of basisIndexes) {\n if (\n !this.indexes.has(name) &&\n diffConfig.shouldComputeDiffsForIndex(name)\n ) {\n const indexDiffResult = await allEntriesAsDiff(basisIndex.map, 'del');\n diffsMap.set(name, indexDiffResult);\n }\n }\n return diffsMap;\n }\n\n close(): void {\n this.#dagWrite.release();\n }\n}\n\nexport async function newWriteLocal(\n basisHash: Hash,\n mutatorName: string,\n mutatorArgsJSON: FrozenJSONValue,\n originalHash: Hash | null,\n dagWrite: DagWrite,\n timestamp: number,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n const mutationID = await basis.getNextMutationID(clientID, dagWrite);\n const indexes = readIndexesForWrite(basis, dagWrite, formatVersion);\n assert(\n formatVersion >= FormatVersion.DD31,\n 'Expected formatVersion >= DD31 for newWriteLocal',\n );\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n\n {\n type: MetaType.LocalDD31,\n basisHash,\n baseSnapshotHash: await baseSnapshotHashFromHash(basisHash, dagWrite),\n mutatorName,\n mutatorArgsJSON,\n mutationID,\n originalHash,\n timestamp,\n clientID,\n },\n indexes,\n clientID,\n formatVersion,\n );\n}\n\nexport async function newWriteSnapshotDD31(\n basisHash: Hash,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n dagWrite: DagWrite,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n {basisHash, type: MetaType.SnapshotDD31, lastMutationIDs, cookieJSON},\n readIndexesForWrite(basis, dagWrite, formatVersion),\n clientID,\n formatVersion,\n );\n}\n\nexport async function updateIndexes(\n lc: LogContext,\n indexes: Map<string, IndexWrite>,\n key: string,\n oldValGetter: () => Promise<FrozenJSONValue | undefined>,\n newVal: FrozenJSONValue | undefined,\n): Promise<void> {\n const ps: Promise<void>[] = [];\n for (const idx of indexes.values()) {\n const {keyPrefix} = idx.meta.definition;\n if (!keyPrefix || key.startsWith(keyPrefix)) {\n const oldVal = await oldValGetter();\n if (oldVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Remove,\n key,\n oldVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n if (newVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Add,\n key,\n newVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n }\n }\n await Promise.all(ps);\n}\n\nexport function readIndexesForWrite(\n commit: Commit<CommitMeta>,\n dagWrite: DagWrite,\n formatVersion: FormatVersion,\n): Map<string, IndexWrite> {\n const m = new Map();\n for (const index of commit.indexes) {\n m.set(\n index.definition.name,\n new IndexWrite(\n index,\n new BTreeWrite(dagWrite, formatVersion, index.valueHash),\n ),\n );\n }\n return m;\n}\n\nexport async function createIndexBTree(\n lc: LogContext,\n dagWrite: DagWrite,\n valueMap: BTreeRead,\n prefix: string,\n jsonPointer: string,\n allowEmpty: boolean,\n formatVersion: FormatVersion,\n): Promise<BTreeWrite> {\n const indexMap = new BTreeWrite(dagWrite, formatVersion);\n for await (const entry of valueMap.scan(prefix)) {\n const key = entry[0];\n if (!key.startsWith(prefix)) {\n break;\n }\n await indexValue(\n lc,\n indexMap,\n IndexOperation.Add,\n key,\n entry[1],\n jsonPointer,\n allowEmpty,\n );\n }\n return indexMap;\n}\n"],"names":["valueHash","MetaType.LocalDD31","FormatVersion.DD31","commitNewLocalDD31","MetaType.SnapshotDD31","commitNewSnapshotDD31","IndexOperation.Remove","IndexOperation.Add"],"mappings":";;;;;;;;;;;;;AAmCO,MAAM,cAAc,KAAK;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EAKA;AAAA,EACA;AAAA,EAET,YACE,UACA,KACA,OACA,MACA,SACA,UACA,eACA;AAEA,UAAM,UAAU,KAAK,OAAO;AAC5B,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,QAAQ;AACb,SAAK,YAAY;AACjB,SAAK,iBAAiB;AAGtB,QAAI,UAAU,QAAW;AACvB;AAAA,QACE,KAAK,cAAc;AAAA,QACnB;AAAA,MAAA;AAAA,IAEJ,OAAO;AACL;AAAA,QACE,KAAK,cAAc,MAAM,MAAM;AAAA,QAC/B;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,IACJ,IACA,KACA,OACe;AACf,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAC3C,UAAM,cAAc,IAAI,KAAK,SAAS,KAAK,QAAQ,KAAK;AAExD,UAAM,KAAK,IAAI,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EAEA,gBAAiC;AAC/B,WAAO,cAAc,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK;AAAA,EACjE;AAAA,EAEA,MAAM,IAAI,IAAgB,KAA+B;AAEvD,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAC3C,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc,IAAI,KAAK,SAAS,KAAK,QAAQ,MAAS;AAAA,IAC9D;AACA,WAAO,KAAK,IAAI,IAAI,GAAG;AAAA,EACzB;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,IAAI,MAAA;AACf,UAAM,KAAK,CAAA;AACX,eAAW,OAAO,KAAK,QAAQ,OAAA,GAAU;AACvC,SAAG,KAAK,IAAI,OAAO;AAAA,IACrB;AACA,UAAM,QAAQ,IAAI,EAAE;AAAA,EACtB;AAAA,EAEA,MAAM,YAAyC;AAC7C,UAAM,YAAY,MAAM,KAAK,IAAI,MAAA;AACjC,UAAM,eAA8B,CAAA;AAEpC,eAAW,SAAS,KAAK,QAAQ,OAAA,GAAU;AACzC,YAAMA,aAAY,MAAM,MAAM,MAAA;AAC9B,YAAM,cAA2B;AAAA,QAC/B,YAAY,MAAM,KAAK;AAAA,QACvB,WAAAA;AAAAA,MAAA;AAEF,mBAAa,KAAK,WAAW;AAAA,IAC/B;AAEA,QAAI;AACJ,UAAM,OAAO,KAAK;AAClB,YAAQ,KAAK,MAAA;AAAA,MACX,KAAKC,WAAoB;AACvB;AAAA,UACE,KAAK,kBAAkBC;AAAAA,UACvB;AAAA,QAAA;AAEF,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA,IACE;AACJ,iBAASC;AAAAA,UACP,KAAK,UAAU;AAAA,UACf;AAAA,UACA,MAAM,yBAAyB,WAAW,KAAK,SAAS;AAAA,UACxD;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QAAA;AAEP;AAAA,MACF;AAAA,MAEA,KAAKC,cAAuB;AAC1B;AAAA,UACE,KAAK,iBAAiBF;AAAAA,UACtB;AAAA,QAAA;AAEF,cAAM,EAAC,WAAW,iBAAiB,WAAA,IAAc;AACjD,iBAASG;AAAAA,UACP,KAAK,UAAU;AAAA,UACf;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA;AAEF;AAAA,MACF;AAAA,IAAA;AAEF,UAAM,KAAK,UAAU,SAAS,OAAO,KAAK;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAO,UAAiC;AAC5C,UAAM,SAAS,MAAM,KAAK,UAAA;AAC1B,UAAM,aAAa,OAAO,MAAM;AAChC,UAAM,KAAK,UAAU,QAAQ,UAAU,UAAU;AACjD,UAAM,KAAK,UAAU,OAAA;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,gBACJ,UACA,YAC2B;AAC3B,UAAM,SAAS,KAAK,UAAA;AACpB,UAAM,UAAU,MAAM,KAAK,eAAe,UAAU;AACpD,UAAM,cAAc,MAAM,QAAQ,MAAM;AACxC,UAAM,KAAK,UAAU,QAAQ,UAAU,UAAU;AACjD,UAAM,KAAK,UAAU,OAAA;AACrB,WAAO,CAAC,YAAY,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAM,eAAe,YAAsD;AACzE,UAAM,WAAW,IAAI,SAAA;AACrB,QAAI,CAAC,WAAW,sBAAsB;AACpC,aAAO;AAAA,IACT;AAEA,QAAI,YAA0B,CAAA;AAC9B,QAAI,KAAK,QAAQ;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,OAAO;AAAA,MAAA;AAEd,kBAAY,MAAM,KAAK,UAAU,KAAK,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,IAAI,SAAS;AAC1B,QAAI;AACJ,QAAI,KAAK,QAAQ;AACf,qBAAe;AAAA,QACb,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAAA,IAET,OAAO;AACL,yCAAmB,IAAA;AAAA,IACrB;AAEA,eAAW,CAAC,MAAM,KAAK,KAAK,KAAK,SAAS;AACxC,UAAI,CAAC,WAAW,2BAA2B,IAAI,GAAG;AAChD;AAAA,MACF;AACA,YAAM,aAAa,aAAa,IAAI,IAAI;AACxC,aAAO,UAAU,YAAY,0CAA0C;AAEvE,YAAM,kBAAkB,OAAO,aAC3B,KAAK,WAAW,KAAK,MAAM,GAAG;AAAA;AAAA,QAE9B,iBAAiB,MAAM,KAAK,KAAK;AAAA;AACrC,eAAS,IAAI,MAAM,eAAe;AAAA,IACpC;AAIA,eAAW,CAAC,MAAM,UAAU,KAAK,cAAc;AAC7C,UACE,CAAC,KAAK,QAAQ,IAAI,IAAI,KACtB,WAAW,2BAA2B,IAAI,GAC1C;AACA,cAAM,kBAAkB,MAAM,iBAAiB,WAAW,KAAK,KAAK;AACpE,iBAAS,IAAI,MAAM,eAAe;AAAA,MACpC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,UAAU,QAAA;AAAA,EACjB;AACF;AAEA,eAAsB,cACpB,WACA,aACA,iBACA,cACA,UACA,WACA,UACA,eACgB;AAChB,QAAM,QAAQ,MAAM,eAAe,WAAW,QAAQ;AACtD,QAAM,aAAa,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAC1E,QAAM,aAAa,MAAM,MAAM,kBAAkB,UAAU,QAAQ;AACnE,QAAM,UAAU,oBAAoB,OAAO,UAAU,aAAa;AAClE;AAAA,IACE,iBAAiBH;AAAAA,IACjB;AAAA,EAAA;AAEF,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IAEA;AAAA,MACE,MAAMD;AAAAA,MACN;AAAA,MACA,kBAAkB,MAAM,yBAAyB,WAAW,QAAQ;AAAA,MACpE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,IAEF;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;AAEA,eAAsB,qBACpB,WACA,iBACA,YACA,UACA,UACA,eACgB;AAChB,QAAM,QAAQ,MAAM,eAAe,WAAW,QAAQ;AACtD,QAAM,aAAa,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAC1E,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA,EAAC,WAAW,MAAMG,cAAuB,iBAAiB,WAAA;AAAA,IAC1D,oBAAoB,OAAO,UAAU,aAAa;AAAA,IAClD;AAAA,IACA;AAAA,EAAA;AAEJ;AAEA,eAAsB,cACpB,IACA,SACA,KACA,cACA,QACe;AACf,QAAM,KAAsB,CAAA;AAC5B,aAAW,OAAO,QAAQ,UAAU;AAClC,UAAM,EAAC,UAAA,IAAa,IAAI,KAAK;AAC7B,QAAI,CAAC,aAAa,IAAI,WAAW,SAAS,GAAG;AAC3C,YAAM,SAAS,MAAM,aAAA;AACrB,UAAI,WAAW,QAAW;AACxB,WAAG;AAAA,UACD;AAAA,YACE;AAAA,YACA,IAAI;AAAA,YACJE;AAAAA,YACA;AAAA,YACA;AAAA,YACA,IAAI,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,cAAc;AAAA,UAAA;AAAA,QACpC;AAAA,MAEJ;AACA,UAAI,WAAW,QAAW;AACxB,WAAG;AAAA,UACD;AAAA,YACE;AAAA,YACA,IAAI;AAAA,YACJC;AAAAA,YACA;AAAA,YACA;AAAA,YACA,IAAI,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,cAAc;AAAA,UAAA;AAAA,QACpC;AAAA,MAEJ;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,EAAE;AACtB;AAEO,SAAS,oBACd,QACA,UACA,eACyB;AACzB,QAAM,wBAAQ,IAAA;AACd,aAAW,SAAS,OAAO,SAAS;AAClC,MAAE;AAAA,MACA,MAAM,WAAW;AAAA,MACjB,IAAI;AAAA,QACF;AAAA,QACA,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAAA,MAAA;AAAA,IACzD;AAAA,EAEJ;AACA,SAAO;AACT;AAEA,eAAsB,iBACpB,IACA,UACA,UACA,QACA,aACA,YACA,eACqB;AACrB,QAAM,WAAW,IAAI,WAAW,UAAU,aAAa;AACvD,mBAAiB,SAAS,SAAS,KAAK,MAAM,GAAG;AAC/C,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,CAAC,IAAI,WAAW,MAAM,GAAG;AAC3B;AAAA,IACF;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACAA;AAAAA,MACA;AAAA,MACA,MAAM,CAAC;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AACA,SAAO;AACT;"}
@@ -1 +1 @@
1
- {"version":3,"file":"error-responses.d.ts","sourceRoot":"","sources":["../../../../replicache/src/error-responses.ts"],"names":[],"mappings":"AAUA,KAAK,aAAa,GAAG;IAAC,KAAK,EAAE,MAAM,CAAA;CAAC,CAAC;AAErC,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,IAAI,aAAa,CAEjE;AAED;;;;GAIG;AACH,MAAM,MAAM,2BAA2B,GAAG;IACxC,KAAK,EAAE,qBAAqB,CAAC;CAC9B,CAAC;AAEF,wBAAgB,6BAA6B,CAC3C,CAAC,EAAE,OAAO,GACT,CAAC,IAAI,2BAA2B,CAElC;AAED;;;GAGG;AACH,MAAM,MAAM,2BAA2B,GAAG;IACxC,KAAK,EAAE,qBAAqB,CAAC;IAC7B,WAAW,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,GAAG,SAAS,CAAC;CACtD,CAAC;AAEF,wBAAgB,6BAA6B,CAC3C,CAAC,EAAE,OAAO,GACT,CAAC,IAAI,2BAA2B,CAelC;AAED,wBAAgB,iCAAiC,CAC/C,CAAC,EAAE,OAAO,GACT,OAAO,CAAC,CAAC,IAAI,2BAA2B,CAE1C"}
1
+ {"version":3,"file":"error-responses.d.ts","sourceRoot":"","sources":["../../../../replicache/src/error-responses.ts"],"names":[],"mappings":"AAUA,KAAK,aAAa,GAAG;IAAC,KAAK,EAAE,MAAM,CAAA;CAAC,CAAC;AAErC,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,IAAI,aAAa,CAEjE;AAED;;;;GAIG;AACH,MAAM,MAAM,2BAA2B,GAAG;IACxC,KAAK,EAAE,qBAAqB,CAAC;CAC9B,CAAC;AAEF,wBAAgB,6BAA6B,CAC3C,CAAC,EAAE,OAAO,GACT,CAAC,IAAI,2BAA2B,CAElC;AAED;;;GAGG;AACH,MAAM,MAAM,2BAA2B,GAAG;IACxC,KAAK,EAAE,qBAAqB,CAAC;IAC7B,WAAW,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,GAAG,SAAS,CAAC;CACtD,CAAC;AAEF,wBAAgB,6BAA6B,CAC3C,CAAC,EAAE,OAAO,GACT,CAAC,IAAI,2BAA2B,CAelC;AAED,wBAAgB,iCAAiC,CAC/C,CAAC,EAAE,OAAO,GACT,OAAO,CAAC,CAAC,IAAI,2BAA2B,CAK1C"}
@@ -23,7 +23,10 @@ function isVersionNotSupportedResponse(v) {
23
23
  return false;
24
24
  }
25
25
  function assertVersionNotSupportedResponse(v) {
26
- assert(isVersionNotSupportedResponse(v));
26
+ assert(
27
+ isVersionNotSupportedResponse(v),
28
+ "Expected a VersionNotSupportedResponse"
29
+ );
27
30
  }
28
31
  export {
29
32
  assertVersionNotSupportedResponse,
@@ -1 +1 @@
1
- {"version":3,"file":"error-responses.js","sources":["../../../../replicache/src/error-responses.ts"],"sourcesContent":["import {assert} from '../../shared/src/asserts.ts';\n\nfunction isError(obj: unknown, type: string): boolean {\n return (\n typeof obj === 'object' &&\n obj !== null &&\n (obj as {error: unknown}).error === type\n );\n}\n\ntype ErrorResponse = {error: string};\n\nexport function isErrorResponse(obj: object): obj is ErrorResponse {\n return typeof (obj as {error: unknown}).error === 'string';\n}\n\n/**\n * In certain scenarios the server can signal that it does not know about the\n * client. For example, the server might have lost all of its state (this might\n * happen during the development of the server).\n */\nexport type ClientStateNotFoundResponse = {\n error: 'ClientStateNotFound';\n};\n\nexport function isClientStateNotFoundResponse(\n v: unknown,\n): v is ClientStateNotFoundResponse {\n return isError(v, 'ClientStateNotFound');\n}\n\n/**\n * The server endpoint may respond with a `VersionNotSupported` error if it does\n * not know how to handle the pull, push or schema version.\n */\nexport type VersionNotSupportedResponse = {\n error: 'VersionNotSupported';\n versionType?: 'pull' | 'push' | 'schema' | undefined;\n};\n\nexport function isVersionNotSupportedResponse(\n v: unknown,\n): v is VersionNotSupportedResponse {\n if (!isError(v, 'VersionNotSupported')) {\n return false;\n }\n\n const {versionType} = v as Record<string, unknown>;\n switch (versionType) {\n case undefined:\n case 'pull':\n case 'push':\n case 'schema':\n return true;\n }\n\n return false;\n}\n\nexport function assertVersionNotSupportedResponse(\n v: unknown,\n): asserts v is VersionNotSupportedResponse {\n assert(isVersionNotSupportedResponse(v));\n}\n"],"names":[],"mappings":";AAEA,SAAS,QAAQ,KAAc,MAAuB;AACpD,SACE,OAAO,QAAQ,YACf,QAAQ,QACP,IAAyB,UAAU;AAExC;AAIO,SAAS,gBAAgB,KAAmC;AACjE,SAAO,OAAQ,IAAyB,UAAU;AACpD;AAWO,SAAS,8BACd,GACkC;AAClC,SAAO,QAAQ,GAAG,qBAAqB;AACzC;AAWO,SAAS,8BACd,GACkC;AAClC,MAAI,CAAC,QAAQ,GAAG,qBAAqB,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,EAAC,gBAAe;AACtB,UAAQ,aAAA;AAAA,IACN,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,EAAA;AAGX,SAAO;AACT;AAEO,SAAS,kCACd,GAC0C;AAC1C,SAAO,8BAA8B,CAAC,CAAC;AACzC;"}
1
+ {"version":3,"file":"error-responses.js","sources":["../../../../replicache/src/error-responses.ts"],"sourcesContent":["import {assert} from '../../shared/src/asserts.ts';\n\nfunction isError(obj: unknown, type: string): boolean {\n return (\n typeof obj === 'object' &&\n obj !== null &&\n (obj as {error: unknown}).error === type\n );\n}\n\ntype ErrorResponse = {error: string};\n\nexport function isErrorResponse(obj: object): obj is ErrorResponse {\n return typeof (obj as {error: unknown}).error === 'string';\n}\n\n/**\n * In certain scenarios the server can signal that it does not know about the\n * client. For example, the server might have lost all of its state (this might\n * happen during the development of the server).\n */\nexport type ClientStateNotFoundResponse = {\n error: 'ClientStateNotFound';\n};\n\nexport function isClientStateNotFoundResponse(\n v: unknown,\n): v is ClientStateNotFoundResponse {\n return isError(v, 'ClientStateNotFound');\n}\n\n/**\n * The server endpoint may respond with a `VersionNotSupported` error if it does\n * not know how to handle the pull, push or schema version.\n */\nexport type VersionNotSupportedResponse = {\n error: 'VersionNotSupported';\n versionType?: 'pull' | 'push' | 'schema' | undefined;\n};\n\nexport function isVersionNotSupportedResponse(\n v: unknown,\n): v is VersionNotSupportedResponse {\n if (!isError(v, 'VersionNotSupported')) {\n return false;\n }\n\n const {versionType} = v as Record<string, unknown>;\n switch (versionType) {\n case undefined:\n case 'pull':\n case 'push':\n case 'schema':\n return true;\n }\n\n return false;\n}\n\nexport function assertVersionNotSupportedResponse(\n v: unknown,\n): asserts v is VersionNotSupportedResponse {\n assert(\n isVersionNotSupportedResponse(v),\n 'Expected a VersionNotSupportedResponse',\n );\n}\n"],"names":[],"mappings":";AAEA,SAAS,QAAQ,KAAc,MAAuB;AACpD,SACE,OAAO,QAAQ,YACf,QAAQ,QACP,IAAyB,UAAU;AAExC;AAIO,SAAS,gBAAgB,KAAmC;AACjE,SAAO,OAAQ,IAAyB,UAAU;AACpD;AAWO,SAAS,8BACd,GACkC;AAClC,SAAO,QAAQ,GAAG,qBAAqB;AACzC;AAWO,SAAS,8BACd,GACkC;AAClC,MAAI,CAAC,QAAQ,GAAG,qBAAqB,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,EAAC,gBAAe;AACtB,UAAQ,aAAA;AAAA,IACN,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,EAAA;AAGX,SAAO;AACT;AAEO,SAAS,kCACd,GAC0C;AAC1C;AAAA,IACE,8BAA8B,CAAC;AAAA,IAC/B;AAAA,EAAA;AAEJ;"}
@@ -1 +1 @@
1
- {"version":3,"file":"clients.d.ts","sourceRoot":"","sources":["../../../../../replicache/src/persist/clients.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,6BAA6B,CAAC;AAEtD,OAAO,KAAK,MAAM,MAAM,+BAA+B,CAAC;AAKxD,OAAO,KAAK,EAAC,IAAI,EAAE,KAAK,EAAE,KAAK,EAAC,MAAM,iBAAiB,CAAC;AACxD,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAGL,KAAK,gBAAgB,EAOtB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,KAAK,EAAC,cAAc,EAAC,MAAM,uBAAuB,CAAC;AAC1D,OAAO,KAAK,KAAK,aAAa,MAAM,2BAA2B,CAAC;AAEhE,OAAO,EAAC,KAAK,IAAI,EAAa,MAAM,YAAY,CAAC;AACjD,OAAO,EAAC,KAAK,gBAAgB,EAAwB,MAAM,kBAAkB,CAAC;AAC9E,OAAO,EACL,KAAK,aAAa,EAClB,KAAK,QAAQ,EAEd,MAAM,gBAAgB,CAAC;AAExB,OAAO,EACL,KAAK,WAAW,EAKjB,MAAM,oBAAoB,CAAC;AAG5B,KAAK,aAAa,GAAG,IAAI,CAAC,OAAO,aAAa,CAAC,CAAC;AAEhD,MAAM,MAAM,SAAS,GAAG,WAAW,CAAC,QAAQ,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;AAEnE,QAAA,MAAM,cAAc;;;IAKlB;;;OAGG;;IAGH;;;OAGG;;cAEH,CAAC;AAEH,MAAM,MAAM,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,cAAc,CAAC,CAAC;AAE3D,QAAA,MAAM,cAAc;;IAGlB;;;;;;;;;;;OAWG;;IAGH;;;OAGG;;IAGH;;;OAGG;;cAEH,CAAC;AAEH,MAAM,MAAM,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,cAAc,CAAC,CAAC;AAE3D,MAAM,MAAM,MAAM,GAAG,QAAQ,GAAG,QAAQ,CAAC;AAMzC,eAAO,MAAM,iBAAiB,YAAY,CAAC;AAQ3C,wBAAgB,cAAc,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,KAAK,IAAI,QAAQ,CAExE;AAqCD,wBAAsB,UAAU,CAAC,OAAO,EAAE,IAAI,GAAG,OAAO,CAAC,SAAS,CAAC,CAGlE;AAaD;;GAEG;AACH,qBAAa,wBAAyB,SAAQ,KAAK;IACjD,IAAI,SAA8B;IAClC,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;gBACR,EAAE,EAAE,QAAQ;CAIzB;AAED;;GAEG;AACH,wBAAsB,oBAAoB,CACxC,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,IAAI,CAAC,CAIf;AAED,wBAAsB,cAAc,CAClC,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,OAAO,CAAC,CAElB;AAED,wBAAsB,SAAS,CAC7B,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAG7B;AAED,wBAAsB,aAAa,CACjC,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,MAAM,CAAC,CAMjB;AAED,KAAK,kBAAkB,GAAG;IACxB,MAAM,EAAE,QAAQ;IAChB,IAAI,EAAE,IAAI;IACV,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,OAAO;CACxB,CAAC;AAEF,wBAAgB,YAAY,CAC1B,WAAW,EAAE,QAAQ,EACrB,EAAE,EAAE,UAAU,EACd,MAAM,EAAE,KAAK,EACb,YAAY,EAAE,MAAM,EAAE,EACtB,OAAO,EAAE,gBAAgB,EACzB,aAAa,EAAE,aAAa,EAC5B,wBAAwB,EAAE,OAAO,GAChC,OAAO,CAAC,kBAAkB,CAAC,CAuJ7B;AAWD,eAAO,MAAM,6BAA6B,IAAI,CAAC;AAC/C,eAAO,MAAM,8BAA8B,IAAI,CAAC;AAChD,eAAO,MAAM,8BAA8B,IAAI,CAAC;AAEhD,MAAM,MAAM,wBAAwB,GAChC;IACE,IAAI,EAAE,OAAO,6BAA6B,CAAC;CAC5C,GACD;IACE,IAAI,EAAE,OAAO,8BAA8B,CAAC;IAC5C,QAAQ,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAC;CACpC,GACD;IACE,IAAI,EAAE,OAAO,8BAA8B,CAAC;IAC5C,aAAa,EAAE,aAAa,CAAC;IAC7B,QAAQ,EAAE,IAAI,CAAC;CAChB,CAAC;AAEN,wBAAsB,kBAAkB,CACtC,OAAO,EAAE,IAAI,EACb,YAAY,EAAE,MAAM,EAAE,EACtB,OAAO,EAAE,gBAAgB,GACxB,OAAO,CAAC,wBAAwB,CAAC,CA4CnC;AAsBD,wBAAsB,uBAAuB,CAC3C,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,IAAI,GACT,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC,CAMlC;AAED,wBAAsB,yBAAyB,CAC7C,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,IAAI,GACT,OAAO,CAAC,aAAa,GAAG,SAAS,CAAC,CAGpC;AAED;;;GAGG;AACH,wBAAsB,SAAS,CAC7B,QAAQ,EAAE,QAAQ,EAClB,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,KAAK,GACd,OAAO,CAAC,IAAI,CAAC,CAIf;AAED;;;GAGG;AACH,wBAAsB,UAAU,CAC9B,OAAO,EAAE,SAAS,EAClB,QAAQ,EAAE,KAAK,GACd,OAAO,CAAC,IAAI,CAAC,CAMf;AAED;;GAEG;AACH,MAAM,MAAM,gBAAgB,GAAG,CAC7B,cAAc,EAAE,cAAc,KAC3B,OAAO,CAAC,IAAI,CAAC,CAAC"}
1
+ {"version":3,"file":"clients.d.ts","sourceRoot":"","sources":["../../../../../replicache/src/persist/clients.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,6BAA6B,CAAC;AAEtD,OAAO,KAAK,MAAM,MAAM,+BAA+B,CAAC;AAKxD,OAAO,KAAK,EAAC,IAAI,EAAE,KAAK,EAAE,KAAK,EAAC,MAAM,iBAAiB,CAAC;AACxD,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAGL,KAAK,gBAAgB,EAOtB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,KAAK,EAAC,cAAc,EAAC,MAAM,uBAAuB,CAAC;AAC1D,OAAO,KAAK,KAAK,aAAa,MAAM,2BAA2B,CAAC;AAEhE,OAAO,EAAC,KAAK,IAAI,EAAa,MAAM,YAAY,CAAC;AACjD,OAAO,EAAC,KAAK,gBAAgB,EAAwB,MAAM,kBAAkB,CAAC;AAC9E,OAAO,EACL,KAAK,aAAa,EAClB,KAAK,QAAQ,EAEd,MAAM,gBAAgB,CAAC;AAExB,OAAO,EACL,KAAK,WAAW,EAKjB,MAAM,oBAAoB,CAAC;AAG5B,KAAK,aAAa,GAAG,IAAI,CAAC,OAAO,aAAa,CAAC,CAAC;AAEhD,MAAM,MAAM,SAAS,GAAG,WAAW,CAAC,QAAQ,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;AAEnE,QAAA,MAAM,cAAc;;;IAKlB;;;OAGG;;IAGH;;;OAGG;;cAEH,CAAC;AAEH,MAAM,MAAM,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,cAAc,CAAC,CAAC;AAE3D,QAAA,MAAM,cAAc;;IAGlB;;;;;;;;;;;OAWG;;IAGH;;;OAGG;;IAGH;;;OAGG;;cAEH,CAAC;AAEH,MAAM,MAAM,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,cAAc,CAAC,CAAC;AAE3D,MAAM,MAAM,MAAM,GAAG,QAAQ,GAAG,QAAQ,CAAC;AAMzC,eAAO,MAAM,iBAAiB,YAAY,CAAC;AAQ3C,wBAAgB,cAAc,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,KAAK,IAAI,QAAQ,CAExE;AAqCD,wBAAsB,UAAU,CAAC,OAAO,EAAE,IAAI,GAAG,OAAO,CAAC,SAAS,CAAC,CAGlE;AAaD;;GAEG;AACH,qBAAa,wBAAyB,SAAQ,KAAK;IACjD,IAAI,SAA8B;IAClC,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;gBACR,EAAE,EAAE,QAAQ;CAIzB;AAED;;GAEG;AACH,wBAAsB,oBAAoB,CACxC,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,IAAI,CAAC,CAIf;AAED,wBAAsB,cAAc,CAClC,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,OAAO,CAAC,CAElB;AAED,wBAAsB,SAAS,CAC7B,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAG7B;AAED,wBAAsB,aAAa,CACjC,EAAE,EAAE,QAAQ,EACZ,OAAO,EAAE,IAAI,GACZ,OAAO,CAAC,MAAM,CAAC,CAMjB;AAED,KAAK,kBAAkB,GAAG;IACxB,MAAM,EAAE,QAAQ;IAChB,IAAI,EAAE,IAAI;IACV,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,OAAO;CACxB,CAAC;AAEF,wBAAgB,YAAY,CAC1B,WAAW,EAAE,QAAQ,EACrB,EAAE,EAAE,UAAU,EACd,MAAM,EAAE,KAAK,EACb,YAAY,EAAE,MAAM,EAAE,EACtB,OAAO,EAAE,gBAAgB,EACzB,aAAa,EAAE,aAAa,EAC5B,wBAAwB,EAAE,OAAO,GAChC,OAAO,CAAC,kBAAkB,CAAC,CA0J7B;AAWD,eAAO,MAAM,6BAA6B,IAAI,CAAC;AAC/C,eAAO,MAAM,8BAA8B,IAAI,CAAC;AAChD,eAAO,MAAM,8BAA8B,IAAI,CAAC;AAEhD,MAAM,MAAM,wBAAwB,GAChC;IACE,IAAI,EAAE,OAAO,6BAA6B,CAAC;CAC5C,GACD;IACE,IAAI,EAAE,OAAO,8BAA8B,CAAC;IAC5C,QAAQ,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAC;CACpC,GACD;IACE,IAAI,EAAE,OAAO,8BAA8B,CAAC;IAC5C,aAAa,EAAE,aAAa,CAAC;IAC7B,QAAQ,EAAE,IAAI,CAAC;CAChB,CAAC;AAEN,wBAAsB,kBAAkB,CACtC,OAAO,EAAE,IAAI,EACb,YAAY,EAAE,MAAM,EAAE,EACtB,OAAO,EAAE,gBAAgB,GACxB,OAAO,CAAC,wBAAwB,CAAC,CA4CnC;AAsBD,wBAAsB,uBAAuB,CAC3C,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,IAAI,GACT,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC,CAMlC;AAED,wBAAsB,yBAAyB,CAC7C,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,IAAI,GACT,OAAO,CAAC,aAAa,GAAG,SAAS,CAAC,CAGpC;AAED;;;GAGG;AACH,wBAAsB,SAAS,CAC7B,QAAQ,EAAE,QAAQ,EAClB,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,KAAK,GACd,OAAO,CAAC,IAAI,CAAC,CAIf;AAED;;;GAGG;AACH,wBAAsB,UAAU,CAC9B,OAAO,EAAE,SAAS,EAClB,QAAQ,EAAE,KAAK,GACd,OAAO,CAAC,IAAI,CAAC,CAMf;AAED;;GAEG;AACH,MAAM,MAAM,gBAAgB,GAAG,CAC7B,cAAc,EAAE,cAAc,KAC3B,OAAO,CAAC,IAAI,CAAC,CAAC"}
@@ -206,7 +206,10 @@ function initClientV6(newClientID, lc, perdag, mutatorNames, indexes, formatVers
206
206
  indexRecords2
207
207
  );
208
208
  }
209
- assert(res.type === FIND_MATCHING_CLIENT_TYPE_FORK);
209
+ assert(
210
+ res.type === FIND_MATCHING_CLIENT_TYPE_FORK,
211
+ "Expected result type to be FORK"
212
+ );
210
213
  const { snapshot } = res;
211
214
  const indexRecords = [];
212
215
  const { valueHash, indexes: oldIndexes } = snapshot;