@tldraw/store 4.1.0-canary.54e71ea20e0d → 4.1.0-canary.5d5610599458

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/dist-cjs/index.d.ts +1884 -153
  2. package/dist-cjs/index.js +1 -1
  3. package/dist-cjs/lib/AtomMap.js +241 -1
  4. package/dist-cjs/lib/AtomMap.js.map +2 -2
  5. package/dist-cjs/lib/BaseRecord.js.map +2 -2
  6. package/dist-cjs/lib/ImmutableMap.js +141 -0
  7. package/dist-cjs/lib/ImmutableMap.js.map +2 -2
  8. package/dist-cjs/lib/IncrementalSetConstructor.js +45 -5
  9. package/dist-cjs/lib/IncrementalSetConstructor.js.map +2 -2
  10. package/dist-cjs/lib/RecordType.js +116 -21
  11. package/dist-cjs/lib/RecordType.js.map +2 -2
  12. package/dist-cjs/lib/RecordsDiff.js.map +2 -2
  13. package/dist-cjs/lib/Store.js +233 -39
  14. package/dist-cjs/lib/Store.js.map +2 -2
  15. package/dist-cjs/lib/StoreQueries.js +135 -22
  16. package/dist-cjs/lib/StoreQueries.js.map +2 -2
  17. package/dist-cjs/lib/StoreSchema.js +207 -2
  18. package/dist-cjs/lib/StoreSchema.js.map +2 -2
  19. package/dist-cjs/lib/StoreSideEffects.js +102 -10
  20. package/dist-cjs/lib/StoreSideEffects.js.map +2 -2
  21. package/dist-cjs/lib/executeQuery.js.map +2 -2
  22. package/dist-cjs/lib/migrate.js.map +2 -2
  23. package/dist-cjs/lib/setUtils.js.map +2 -2
  24. package/dist-esm/index.d.mts +1884 -153
  25. package/dist-esm/index.mjs +1 -1
  26. package/dist-esm/lib/AtomMap.mjs +241 -1
  27. package/dist-esm/lib/AtomMap.mjs.map +2 -2
  28. package/dist-esm/lib/BaseRecord.mjs.map +2 -2
  29. package/dist-esm/lib/ImmutableMap.mjs +141 -0
  30. package/dist-esm/lib/ImmutableMap.mjs.map +2 -2
  31. package/dist-esm/lib/IncrementalSetConstructor.mjs +45 -5
  32. package/dist-esm/lib/IncrementalSetConstructor.mjs.map +2 -2
  33. package/dist-esm/lib/RecordType.mjs +116 -21
  34. package/dist-esm/lib/RecordType.mjs.map +2 -2
  35. package/dist-esm/lib/RecordsDiff.mjs.map +2 -2
  36. package/dist-esm/lib/Store.mjs +233 -39
  37. package/dist-esm/lib/Store.mjs.map +2 -2
  38. package/dist-esm/lib/StoreQueries.mjs +135 -22
  39. package/dist-esm/lib/StoreQueries.mjs.map +2 -2
  40. package/dist-esm/lib/StoreSchema.mjs +207 -2
  41. package/dist-esm/lib/StoreSchema.mjs.map +2 -2
  42. package/dist-esm/lib/StoreSideEffects.mjs +102 -10
  43. package/dist-esm/lib/StoreSideEffects.mjs.map +2 -2
  44. package/dist-esm/lib/executeQuery.mjs.map +2 -2
  45. package/dist-esm/lib/migrate.mjs.map +2 -2
  46. package/dist-esm/lib/setUtils.mjs.map +2 -2
  47. package/package.json +3 -3
  48. package/src/lib/AtomMap.ts +241 -1
  49. package/src/lib/BaseRecord.test.ts +44 -0
  50. package/src/lib/BaseRecord.ts +118 -4
  51. package/src/lib/ImmutableMap.test.ts +103 -0
  52. package/src/lib/ImmutableMap.ts +212 -0
  53. package/src/lib/IncrementalSetConstructor.test.ts +111 -0
  54. package/src/lib/IncrementalSetConstructor.ts +63 -6
  55. package/src/lib/RecordType.ts +149 -25
  56. package/src/lib/RecordsDiff.test.ts +144 -0
  57. package/src/lib/RecordsDiff.ts +145 -10
  58. package/src/lib/Store.test.ts +827 -0
  59. package/src/lib/Store.ts +533 -67
  60. package/src/lib/StoreQueries.test.ts +627 -0
  61. package/src/lib/StoreQueries.ts +194 -27
  62. package/src/lib/StoreSchema.test.ts +226 -0
  63. package/src/lib/StoreSchema.ts +386 -8
  64. package/src/lib/StoreSideEffects.test.ts +239 -19
  65. package/src/lib/StoreSideEffects.ts +266 -19
  66. package/src/lib/devFreeze.test.ts +137 -0
  67. package/src/lib/executeQuery.test.ts +481 -0
  68. package/src/lib/executeQuery.ts +80 -2
  69. package/src/lib/migrate.test.ts +400 -0
  70. package/src/lib/migrate.ts +187 -14
  71. package/src/lib/setUtils.test.ts +105 -0
  72. package/src/lib/setUtils.ts +44 -4
@@ -49,12 +49,70 @@ class StoreSchema {
49
49
  }
50
50
  }
51
51
  }
52
+ /**
53
+ * Creates a new StoreSchema with the given record types and options.
54
+ *
55
+ * This static factory method is the recommended way to create a StoreSchema.
56
+ * It ensures type safety while providing a clean API for schema definition.
57
+ *
58
+ * @param types - Object mapping type names to their RecordType definitions
59
+ * @param options - Optional configuration for migrations, validation, and integrity checking
60
+ * @returns A new StoreSchema instance
61
+ *
62
+ * @example
63
+ * ```ts
64
+ * const Book = createRecordType<Book>('book', { scope: 'document' })
65
+ * const Author = createRecordType<Author>('author', { scope: 'document' })
66
+ *
67
+ * const schema = StoreSchema.create(
68
+ * {
69
+ * book: Book,
70
+ * author: Author
71
+ * },
72
+ * {
73
+ * migrations: [bookMigrations],
74
+ * onValidationFailure: (failure) => failure.record
75
+ * }
76
+ * )
77
+ * ```
78
+ *
79
+ * @public
80
+ */
52
81
  static create(types, options) {
53
82
  return new StoreSchema(types, options ?? {});
54
83
  }
55
84
  migrations = {};
56
85
  sortedMigrations;
57
86
  migrationCache = /* @__PURE__ */ new WeakMap();
87
+ /**
88
+ * Validates a record using its corresponding RecordType validator.
89
+ *
90
+ * This method ensures that records conform to their type definitions before
91
+ * being stored. If validation fails and an onValidationFailure handler is
92
+ * provided, it will be called to potentially recover from the error.
93
+ *
94
+ * @param store - The store instance where validation is occurring
95
+ * @param record - The record to validate
96
+ * @param phase - The lifecycle phase where validation is happening
97
+ * @param recordBefore - The previous version of the record (for updates)
98
+ * @returns The validated record, potentially modified by validation failure handler
99
+ *
100
+ * @example
101
+ * ```ts
102
+ * try {
103
+ * const validatedBook = schema.validateRecord(
104
+ * store,
105
+ * { id: 'book:1', typeName: 'book', title: '', author: 'Jane Doe' },
106
+ * 'createRecord',
107
+ * null
108
+ * )
109
+ * } catch (error) {
110
+ * console.error('Record validation failed:', error)
111
+ * }
112
+ * ```
113
+ *
114
+ * @public
115
+ */
58
116
  validateRecord(store, record, phase, recordBefore) {
59
117
  try {
60
118
  const recordType = getOwnProperty(this.types, record.typeName);
@@ -76,6 +134,34 @@ class StoreSchema {
76
134
  }
77
135
  }
78
136
  }
137
+ /**
138
+ * Gets all migrations that need to be applied to upgrade from a persisted schema
139
+ * to the current schema version.
140
+ *
141
+ * This method compares the persisted schema with the current schema and determines
142
+ * which migrations need to be applied to bring the data up to date. It handles
143
+ * both regular migrations and retroactive migrations, and caches results for
144
+ * performance.
145
+ *
146
+ * @param persistedSchema - The schema version that was previously persisted
147
+ * @returns A Result containing the list of migrations to apply, or an error message
148
+ *
149
+ * @example
150
+ * ```ts
151
+ * const persistedSchema = {
152
+ * schemaVersion: 2,
153
+ * sequences: { 'com.tldraw.book': 1, 'com.tldraw.author': 0 }
154
+ * }
155
+ *
156
+ * const migrationsResult = schema.getMigrationsSince(persistedSchema)
157
+ * if (migrationsResult.ok) {
158
+ * console.log('Migrations to apply:', migrationsResult.value.length)
159
+ * // Apply each migration to bring data up to date
160
+ * }
161
+ * ```
162
+ *
163
+ * @public
164
+ */
79
165
  getMigrationsSince(persistedSchema) {
80
166
  const cached = this.migrationCache.get(persistedSchema);
81
167
  if (cached) {
@@ -127,6 +213,34 @@ class StoreSchema {
127
213
  this.migrationCache.set(persistedSchema, result);
128
214
  return result;
129
215
  }
216
+ /**
217
+ * Migrates a single persisted record to match the current schema version.
218
+ *
219
+ * This method applies the necessary migrations to transform a record from an
220
+ * older (or newer) schema version to the current version. It supports both
221
+ * forward ('up') and backward ('down') migrations.
222
+ *
223
+ * @param record - The record to migrate
224
+ * @param persistedSchema - The schema version the record was persisted with
225
+ * @param direction - Direction to migrate ('up' for newer, 'down' for older)
226
+ * @returns A MigrationResult containing the migrated record or an error
227
+ *
228
+ * @example
229
+ * ```ts
230
+ * const oldRecord = { id: 'book:1', typeName: 'book', title: 'Old Title', publishDate: '2020-01-01' }
231
+ * const oldSchema = { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } }
232
+ *
233
+ * const result = schema.migratePersistedRecord(oldRecord, oldSchema, 'up')
234
+ * if (result.type === 'success') {
235
+ * console.log('Migrated record:', result.value)
236
+ * // Record now has publishedYear instead of publishDate
237
+ * } else {
238
+ * console.error('Migration failed:', result.reason)
239
+ * }
240
+ * ```
241
+ *
242
+ * @public
243
+ */
130
244
  migratePersistedRecord(record, persistedSchema, direction = "up") {
131
245
  const migrations = this.getMigrationsSince(persistedSchema);
132
246
  if (!migrations.ok) {
@@ -171,6 +285,37 @@ class StoreSchema {
171
285
  }
172
286
  return { type: "success", value: record };
173
287
  }
288
+ /**
289
+ * Migrates an entire store snapshot to match the current schema version.
290
+ *
291
+ * This method applies all necessary migrations to bring a persisted store
292
+ * snapshot up to the current schema version. It handles both record-level
293
+ * and store-level migrations, and can optionally mutate the input store
294
+ * for performance.
295
+ *
296
+ * @param snapshot - The store snapshot containing data and schema information
297
+ * @param opts - Options controlling migration behavior
298
+ * - mutateInputStore - Whether to modify the input store directly (default: false)
299
+ * @returns A MigrationResult containing the migrated store or an error
300
+ *
301
+ * @example
302
+ * ```ts
303
+ * const snapshot = {
304
+ * schema: { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } },
305
+ * store: {
306
+ * 'book:1': { id: 'book:1', typeName: 'book', title: 'Old Book', publishDate: '2020-01-01' }
307
+ * }
308
+ * }
309
+ *
310
+ * const result = schema.migrateStoreSnapshot(snapshot)
311
+ * if (result.type === 'success') {
312
+ * console.log('Migrated store:', result.value)
313
+ * // All records are now at current schema version
314
+ * }
315
+ * ```
316
+ *
317
+ * @public
318
+ */
174
319
  migrateStoreSnapshot(snapshot, opts) {
175
320
  let { store } = snapshot;
176
321
  const migrations = this.getMigrationsSince(snapshot.schema);
@@ -211,10 +356,49 @@ class StoreSchema {
211
356
  }
212
357
  return { type: "success", value: store };
213
358
  }
214
- /** @internal */
359
+ /**
360
+ * Creates an integrity checker function for the given store.
361
+ *
362
+ * This method calls the createIntegrityChecker option if provided, allowing
363
+ * custom integrity checking logic to be set up for the store. The integrity
364
+ * checker is used to validate store consistency and catch data corruption.
365
+ *
366
+ * @param store - The store instance to create an integrity checker for
367
+ * @returns An integrity checker function, or undefined if none is configured
368
+ *
369
+ * @internal
370
+ */
215
371
  createIntegrityChecker(store) {
216
372
  return this.options.createIntegrityChecker?.(store) ?? void 0;
217
373
  }
374
+ /**
375
+ * Serializes the current schema to a SerializedSchemaV2 format.
376
+ *
377
+ * This method creates a serialized representation of the current schema,
378
+ * capturing the latest version number for each migration sequence.
379
+ * The result can be persisted and later used to determine what migrations
380
+ * need to be applied when loading data.
381
+ *
382
+ * @returns A SerializedSchemaV2 object representing the current schema state
383
+ *
384
+ * @example
385
+ * ```ts
386
+ * const serialized = schema.serialize()
387
+ * console.log(serialized)
388
+ * // {
389
+ * // schemaVersion: 2,
390
+ * // sequences: {
391
+ * // 'com.tldraw.book': 3,
392
+ * // 'com.tldraw.author': 2
393
+ * // }
394
+ * // }
395
+ *
396
+ * // Store this with your data for future migrations
397
+ * localStorage.setItem('schema', JSON.stringify(serialized))
398
+ * ```
399
+ *
400
+ * @public
401
+ */
218
402
  serialize() {
219
403
  return {
220
404
  schemaVersion: 2,
@@ -227,6 +411,14 @@ class StoreSchema {
227
411
  };
228
412
  }
229
413
  /**
414
+ * Serializes a schema representing the earliest possible version.
415
+ *
416
+ * This method creates a serialized schema where all migration sequences
417
+ * are set to version 0, representing the state before any migrations
418
+ * have been applied. This is used in specific legacy scenarios.
419
+ *
420
+ * @returns A SerializedSchema with all sequences set to version 0
421
+ *
230
422
  * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!
231
423
  * @internal
232
424
  */
@@ -238,7 +430,20 @@ class StoreSchema {
238
430
  )
239
431
  };
240
432
  }
241
- /** @internal */
433
+ /**
434
+ * Gets the RecordType definition for a given type name.
435
+ *
436
+ * This method retrieves the RecordType associated with the specified
437
+ * type name, which contains the record's validation, creation, and
438
+ * other behavioral logic.
439
+ *
440
+ * @param typeName - The name of the record type to retrieve
441
+ * @returns The RecordType definition for the specified type
442
+ *
443
+ * @throws Will throw an error if the record type does not exist
444
+ *
445
+ * @internal
446
+ */
242
447
  getType(typeName) {
243
448
  const type = getOwnProperty(this.types, typeName);
244
449
  assert(type, "record type does not exists");
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/StoreSchema.ts"],
4
- "sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/** @public */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/** @public */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/** @public */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/** @public */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/** @public */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/** @public */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\tprivate readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\t// Check cache first\n\t\tconst cached = this.migrationCache.get(persistedSchema)\n\t\tif (cached) {\n\t\t\treturn cached\n\t\t}\n\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\t// Cache the error result\n\t\t\tthis.migrationCache.set(persistedSchema, upgradeResult)\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\tconst result = Result.ok([])\n\t\t\t// Cache the empty result\n\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\treturn result\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\tconst result = Result.err('Incompatible schema?')\n\t\t\t\t// Cache the error result\n\t\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\t\treturn result\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\tconst result = Result.ok(\n\t\t\tthis.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))\n\t\t)\n\t\t// Cache the result\n\t\tthis.migrationCache.set(persistedSchema, result)\n\t\treturn result\n\t}\n\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\tmigrateStoreSnapshot(\n\t\tsnapshot: StoreSnapshot<R>,\n\t\topts?: { mutateInputStore?: boolean }\n\t): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tif (!opts?.mutateInputStore) {\n\t\t\tstore = structuredClone(store)\n\t\t}\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = result as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = result as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/** @internal */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t * @internal\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/** @internal */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
5
- "mappings": "AAAA;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAIP;AAAA,EAEC;AAAA,EAIA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAqCA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,OAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,OAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,OAAO,GAAG,MAAM;AACxB;AAqBO,MAAM,YAAkD;AAAA,EAetD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,aAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,yBAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,mBAAmB,eAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,eAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA,EAnCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EACQ,iBAAiB,oBAAI,QAAuD;AAAA,EAyB7F,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,aAAa,eAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA,EAEO,mBAAmB,iBAAgE;AAEzF,UAAM,SAAS,KAAK,eAAe,IAAI,eAAe;AACtD,QAAI,QAAQ;AACX,aAAO;AAAA,IACR;AAEA,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AAEtB,WAAK,eAAe,IAAI,iBAAiB,aAAa;AACtD,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,YAAMA,UAAS,OAAO,GAAG,CAAC,CAAC;AAE3B,WAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,aAAOA;AAAA,IACR;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,cAAMA,UAAS,OAAO,IAAI,sBAAsB;AAEhD,aAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,eAAOA;AAAA,MACR;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,UAAM,SAAS,OAAO;AAAA,MACrB,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC;AAAA,IACxE;AAEA,SAAK,eAAe,IAAI,iBAAiB,MAAM;AAC/C,WAAO;AAAA,EACR;AAAA,EAEA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,uBAAuB,sBACvB,uBAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,uBAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,aAAS,gBAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,mBAAS,gBAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA,EAEA,qBACC,UACA,MACsC;AACtC,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,QAAI,CAAC,MAAM,kBAAkB;AAC5B,cAAQ,gBAAgB,KAAK;AAAA,IAC9B;AAEA,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,IAAI;AAAA,YACnC;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,oBAAQ;AAAA,UACT;AAAA,QACD,OAAO;AACN,gCAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA,EAGA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA,EAEA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,SAAS,iBAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGA,QAAQ,UAAkB;AACzB,UAAM,OAAO,eAAe,KAAK,OAAO,QAAQ;AAChD,WAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
4
+ "sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/**\n * Version 1 format for serialized store schema information.\n *\n * This is the legacy format used before schema version 2. Version 1 schemas\n * separate store-level versioning from record-level versioning, and support\n * subtypes for complex record types like shapes.\n *\n * @example\n * ```ts\n * const schemaV1: SerializedSchemaV1 = {\n * schemaVersion: 1,\n * storeVersion: 2,\n * recordVersions: {\n * book: { version: 3 },\n * shape: {\n * version: 2,\n * subTypeVersions: { rectangle: 1, circle: 2 },\n * subTypeKey: 'type'\n * }\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/**\n * Version 2 format for serialized store schema information.\n *\n * This is the current format that uses a unified sequence-based approach\n * for tracking versions across all migration sequences. Each sequence ID\n * maps to the latest version number for that sequence.\n *\n * @example\n * ```ts\n * const schemaV2: SerializedSchemaV2 = {\n * schemaVersion: 2,\n * sequences: {\n * 'com.tldraw.store': 3,\n * 'com.tldraw.book': 2,\n * 'com.tldraw.shape': 4,\n * 'com.tldraw.shape.rectangle': 1\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/**\n * Union type representing all supported serialized schema formats.\n *\n * This type allows the store to handle both legacy (V1) and current (V2)\n * schema formats during deserialization and migration.\n *\n * @example\n * ```ts\n * function handleSchema(schema: SerializedSchema) {\n * if (schema.schemaVersion === 1) {\n * // Handle V1 format\n * console.log('Store version:', schema.storeVersion)\n * } else {\n * // Handle V2 format\n * console.log('Sequences:', schema.sequences)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\n/**\n * Upgrades a serialized schema from version 1 to version 2 format.\n *\n * Version 1 schemas use separate `storeVersion` and `recordVersions` fields,\n * while version 2 schemas use a unified `sequences` object with sequence IDs.\n *\n * @param schema - The serialized schema to upgrade\n * @returns A Result containing the upgraded schema or an error message\n *\n * @example\n * ```ts\n * const v1Schema = {\n * schemaVersion: 1,\n * storeVersion: 1,\n * recordVersions: {\n * book: { version: 2 },\n * author: { version: 1, subTypeVersions: { fiction: 1 }, subTypeKey: 'genre' }\n * }\n * }\n *\n * const result = upgradeSchema(v1Schema)\n * if (result.ok) {\n * console.log(result.value.sequences)\n * // { 'com.tldraw.store': 1, 'com.tldraw.book': 2, 'com.tldraw.author': 1, 'com.tldraw.author.fiction': 1 }\n * }\n * ```\n *\n * @public\n */\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/**\n * Information about a record validation failure that occurred in the store.\n *\n * This interface provides context about validation errors, including the failed\n * record, the store state, and the operation phase where the failure occurred.\n * It's used by validation failure handlers to implement recovery strategies.\n *\n * @example\n * ```ts\n * const schema = StoreSchema.create(\n * { book: Book },\n * {\n * onValidationFailure: (failure: StoreValidationFailure<Book>) => {\n * console.error(`Validation failed during ${failure.phase}:`, failure.error)\n * console.log('Failed record:', failure.record)\n * console.log('Previous record:', failure.recordBefore)\n *\n * // Return a corrected version of the record\n * return { ...failure.record, title: failure.record.title || 'Untitled' }\n * }\n * }\n * )\n * ```\n *\n * @public\n */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/**\n * Configuration options for creating a StoreSchema.\n *\n * These options control migration behavior, validation error handling,\n * and integrity checking for the store schema.\n *\n * @example\n * ```ts\n * const options: StoreSchemaOptions<MyRecord, MyProps> = {\n * migrations: [bookMigrations, authorMigrations],\n * onValidationFailure: (failure) => {\n * // Log the error and return a corrected record\n * console.error('Validation failed:', failure.error)\n * return sanitizeRecord(failure.record)\n * },\n * createIntegrityChecker: (store) => {\n * // Set up integrity checking logic\n * return setupIntegrityChecks(store)\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/**\n * Manages the schema definition, validation, and migration system for a Store.\n *\n * StoreSchema coordinates record types, handles data migrations between schema\n * versions, validates records, and provides the foundational structure for\n * reactive stores. It acts as the central authority for data consistency\n * and evolution within the store system.\n *\n * @example\n * ```ts\n * // Define record types\n * const Book = createRecordType<Book>('book', { scope: 'document' })\n * const Author = createRecordType<Author>('author', { scope: 'document' })\n *\n * // Create schema with migrations\n * const schema = StoreSchema.create(\n * { book: Book, author: Author },\n * {\n * migrations: [bookMigrations, authorMigrations],\n * onValidationFailure: (failure) => {\n * console.warn('Validation failed, using default:', failure.error)\n * return failure.record // or return a corrected version\n * }\n * }\n * )\n *\n * // Use with store\n * const store = new Store({ schema })\n * ```\n *\n * @public\n */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\t/**\n\t * Creates a new StoreSchema with the given record types and options.\n\t *\n\t * This static factory method is the recommended way to create a StoreSchema.\n\t * It ensures type safety while providing a clean API for schema definition.\n\t *\n\t * @param types - Object mapping type names to their RecordType definitions\n\t * @param options - Optional configuration for migrations, validation, and integrity checking\n\t * @returns A new StoreSchema instance\n\t *\n\t * @example\n\t * ```ts\n\t * const Book = createRecordType<Book>('book', { scope: 'document' })\n\t * const Author = createRecordType<Author>('author', { scope: 'document' })\n\t *\n\t * const schema = StoreSchema.create(\n\t * {\n\t * book: Book,\n\t * author: Author\n\t * },\n\t * {\n\t * migrations: [bookMigrations],\n\t * onValidationFailure: (failure) => failure.record\n\t * }\n\t * )\n\t * ```\n\t *\n\t * @public\n\t */\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\tprivate readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Validates a record using its corresponding RecordType validator.\n\t *\n\t * This method ensures that records conform to their type definitions before\n\t * being stored. If validation fails and an onValidationFailure handler is\n\t * provided, it will be called to potentially recover from the error.\n\t *\n\t * @param store - The store instance where validation is occurring\n\t * @param record - The record to validate\n\t * @param phase - The lifecycle phase where validation is happening\n\t * @param recordBefore - The previous version of the record (for updates)\n\t * @returns The validated record, potentially modified by validation failure handler\n\t *\n\t * @example\n\t * ```ts\n\t * try {\n\t * const validatedBook = schema.validateRecord(\n\t * store,\n\t * { id: 'book:1', typeName: 'book', title: '', author: 'Jane Doe' },\n\t * 'createRecord',\n\t * null\n\t * )\n\t * } catch (error) {\n\t * console.error('Record validation failed:', error)\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Gets all migrations that need to be applied to upgrade from a persisted schema\n\t * to the current schema version.\n\t *\n\t * This method compares the persisted schema with the current schema and determines\n\t * which migrations need to be applied to bring the data up to date. It handles\n\t * both regular migrations and retroactive migrations, and caches results for\n\t * performance.\n\t *\n\t * @param persistedSchema - The schema version that was previously persisted\n\t * @returns A Result containing the list of migrations to apply, or an error message\n\t *\n\t * @example\n\t * ```ts\n\t * const persistedSchema = {\n\t * schemaVersion: 2,\n\t * sequences: { 'com.tldraw.book': 1, 'com.tldraw.author': 0 }\n\t * }\n\t *\n\t * const migrationsResult = schema.getMigrationsSince(persistedSchema)\n\t * if (migrationsResult.ok) {\n\t * console.log('Migrations to apply:', migrationsResult.value.length)\n\t * // Apply each migration to bring data up to date\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\t// Check cache first\n\t\tconst cached = this.migrationCache.get(persistedSchema)\n\t\tif (cached) {\n\t\t\treturn cached\n\t\t}\n\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\t// Cache the error result\n\t\t\tthis.migrationCache.set(persistedSchema, upgradeResult)\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\tconst result = Result.ok([])\n\t\t\t// Cache the empty result\n\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\treturn result\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\tconst result = Result.err('Incompatible schema?')\n\t\t\t\t// Cache the error result\n\t\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\t\treturn result\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\tconst result = Result.ok(\n\t\t\tthis.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))\n\t\t)\n\t\t// Cache the result\n\t\tthis.migrationCache.set(persistedSchema, result)\n\t\treturn result\n\t}\n\n\t/**\n\t * Migrates a single persisted record to match the current schema version.\n\t *\n\t * This method applies the necessary migrations to transform a record from an\n\t * older (or newer) schema version to the current version. It supports both\n\t * forward ('up') and backward ('down') migrations.\n\t *\n\t * @param record - The record to migrate\n\t * @param persistedSchema - The schema version the record was persisted with\n\t * @param direction - Direction to migrate ('up' for newer, 'down' for older)\n\t * @returns A MigrationResult containing the migrated record or an error\n\t *\n\t * @example\n\t * ```ts\n\t * const oldRecord = { id: 'book:1', typeName: 'book', title: 'Old Title', publishDate: '2020-01-01' }\n\t * const oldSchema = { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } }\n\t *\n\t * const result = schema.migratePersistedRecord(oldRecord, oldSchema, 'up')\n\t * if (result.type === 'success') {\n\t * console.log('Migrated record:', result.value)\n\t * // Record now has publishedYear instead of publishDate\n\t * } else {\n\t * console.error('Migration failed:', result.reason)\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\t/**\n\t * Migrates an entire store snapshot to match the current schema version.\n\t *\n\t * This method applies all necessary migrations to bring a persisted store\n\t * snapshot up to the current schema version. It handles both record-level\n\t * and store-level migrations, and can optionally mutate the input store\n\t * for performance.\n\t *\n\t * @param snapshot - The store snapshot containing data and schema information\n\t * @param opts - Options controlling migration behavior\n\t * - mutateInputStore - Whether to modify the input store directly (default: false)\n\t * @returns A MigrationResult containing the migrated store or an error\n\t *\n\t * @example\n\t * ```ts\n\t * const snapshot = {\n\t * schema: { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } },\n\t * store: {\n\t * 'book:1': { id: 'book:1', typeName: 'book', title: 'Old Book', publishDate: '2020-01-01' }\n\t * }\n\t * }\n\t *\n\t * const result = schema.migrateStoreSnapshot(snapshot)\n\t * if (result.type === 'success') {\n\t * console.log('Migrated store:', result.value)\n\t * // All records are now at current schema version\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tmigrateStoreSnapshot(\n\t\tsnapshot: StoreSnapshot<R>,\n\t\topts?: { mutateInputStore?: boolean }\n\t): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tif (!opts?.mutateInputStore) {\n\t\t\tstore = structuredClone(store)\n\t\t}\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = result as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = result as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/**\n\t * Creates an integrity checker function for the given store.\n\t *\n\t * This method calls the createIntegrityChecker option if provided, allowing\n\t * custom integrity checking logic to be set up for the store. The integrity\n\t * checker is used to validate store consistency and catch data corruption.\n\t *\n\t * @param store - The store instance to create an integrity checker for\n\t * @returns An integrity checker function, or undefined if none is configured\n\t *\n\t * @internal\n\t */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\t/**\n\t * Serializes the current schema to a SerializedSchemaV2 format.\n\t *\n\t * This method creates a serialized representation of the current schema,\n\t * capturing the latest version number for each migration sequence.\n\t * The result can be persisted and later used to determine what migrations\n\t * need to be applied when loading data.\n\t *\n\t * @returns A SerializedSchemaV2 object representing the current schema state\n\t *\n\t * @example\n\t * ```ts\n\t * const serialized = schema.serialize()\n\t * console.log(serialized)\n\t * // {\n\t * // schemaVersion: 2,\n\t * // sequences: {\n\t * // 'com.tldraw.book': 3,\n\t * // 'com.tldraw.author': 2\n\t * // }\n\t * // }\n\t *\n\t * // Store this with your data for future migrations\n\t * localStorage.setItem('schema', JSON.stringify(serialized))\n\t * ```\n\t *\n\t * @public\n\t */\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * Serializes a schema representing the earliest possible version.\n\t *\n\t * This method creates a serialized schema where all migration sequences\n\t * are set to version 0, representing the state before any migrations\n\t * have been applied. This is used in specific legacy scenarios.\n\t *\n\t * @returns A SerializedSchema with all sequences set to version 0\n\t *\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t * @internal\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * Gets the RecordType definition for a given type name.\n\t *\n\t * This method retrieves the RecordType associated with the specified\n\t * type name, which contains the record's validation, creation, and\n\t * other behavioral logic.\n\t *\n\t * @param typeName - The name of the record type to retrieve\n\t * @returns The RecordType definition for the specified type\n\t *\n\t * @throws Will throw an error if the record type does not exist\n\t *\n\t * @internal\n\t */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
5
+ "mappings": "AAAA;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAIP;AAAA,EAEC;AAAA,EAIA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAmIA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,OAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,OAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,OAAO,GAAG,MAAM;AACxB;AAoGO,MAAM,YAAkD;AAAA,EA4CtD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,aAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,yBAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,mBAAmB,eAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,eAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAnCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EACQ,iBAAiB,oBAAI,QAAuD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsD7F,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,aAAa,eAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BO,mBAAmB,iBAAgE;AAEzF,UAAM,SAAS,KAAK,eAAe,IAAI,eAAe;AACtD,QAAI,QAAQ;AACX,aAAO;AAAA,IACR;AAEA,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AAEtB,WAAK,eAAe,IAAI,iBAAiB,aAAa;AACtD,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,YAAMA,UAAS,OAAO,GAAG,CAAC,CAAC;AAE3B,WAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,aAAOA;AAAA,IACR;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,cAAMA,UAAS,OAAO,IAAI,sBAAsB;AAEhD,aAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,eAAOA;AAAA,MACR;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,UAAM,SAAS,OAAO;AAAA,MACrB,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC;AAAA,IACxE;AAEA,SAAK,eAAe,IAAI,iBAAiB,MAAM;AAC/C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,uBAAuB,sBACvB,uBAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,uBAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,aAAS,gBAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,mBAAS,gBAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiCA,qBACC,UACA,MACsC;AACtC,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,QAAI,CAAC,MAAM,kBAAkB;AAC5B,cAAQ,gBAAgB,KAAK;AAAA,IAC9B;AAEA,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,IAAI;AAAA,YACnC;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,oBAAQ;AAAA,UACT;AAAA,QACD,OAAO;AACN,gCAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,SAAS,iBAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,QAAQ,UAAkB;AACzB,UAAM,OAAO,eAAe,KAAK,OAAO,QAAQ;AAChD,WAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
6
6
  "names": ["result"]
7
7
  }
@@ -1,4 +1,9 @@
1
1
  class StoreSideEffects {
2
+ /**
3
+ * Creates a new side effects manager for the given store.
4
+ *
5
+ * store - The store instance to manage side effects for
6
+ */
2
7
  constructor(store) {
3
8
  this.store = store;
4
9
  }
@@ -10,15 +15,35 @@ class StoreSideEffects {
10
15
  _afterDeleteHandlers = {};
11
16
  _operationCompleteHandlers = [];
12
17
  _isEnabled = true;
13
- /** @internal */
18
+ /**
19
+ * Checks whether side effects are currently enabled.
20
+ * When disabled, all side effect handlers are bypassed.
21
+ *
22
+ * @returns `true` if side effects are enabled, `false` otherwise
23
+ * @internal
24
+ */
14
25
  isEnabled() {
15
26
  return this._isEnabled;
16
27
  }
17
- /** @internal */
28
+ /**
29
+ * Enables or disables side effects processing.
30
+ * When disabled, no side effect handlers will be called.
31
+ *
32
+ * @param enabled - Whether to enable or disable side effects
33
+ * @internal
34
+ */
18
35
  setIsEnabled(enabled) {
19
36
  this._isEnabled = enabled;
20
37
  }
21
- /** @internal */
38
+ /**
39
+ * Processes all registered 'before create' handlers for a record.
40
+ * Handlers are called in registration order and can transform the record.
41
+ *
42
+ * @param record - The record about to be created
43
+ * @param source - Whether the change originated from 'user' or 'remote'
44
+ * @returns The potentially modified record to actually create
45
+ * @internal
46
+ */
22
47
  handleBeforeCreate(record, source) {
23
48
  if (!this._isEnabled) return record;
24
49
  const handlers = this._beforeCreateHandlers[record.typeName];
@@ -31,7 +56,14 @@ class StoreSideEffects {
31
56
  }
32
57
  return record;
33
58
  }
34
- /** @internal */
59
+ /**
60
+ * Processes all registered 'after create' handlers for a record.
61
+ * Handlers are called in registration order after the record is created.
62
+ *
63
+ * @param record - The record that was created
64
+ * @param source - Whether the change originated from 'user' or 'remote'
65
+ * @internal
66
+ */
35
67
  handleAfterCreate(record, source) {
36
68
  if (!this._isEnabled) return;
37
69
  const handlers = this._afterCreateHandlers[record.typeName];
@@ -41,7 +73,16 @@ class StoreSideEffects {
41
73
  }
42
74
  }
43
75
  }
44
- /** @internal */
76
+ /**
77
+ * Processes all registered 'before change' handlers for a record.
78
+ * Handlers are called in registration order and can modify or block the change.
79
+ *
80
+ * @param prev - The current version of the record
81
+ * @param next - The proposed new version of the record
82
+ * @param source - Whether the change originated from 'user' or 'remote'
83
+ * @returns The potentially modified record to actually store
84
+ * @internal
85
+ */
45
86
  handleBeforeChange(prev, next, source) {
46
87
  if (!this._isEnabled) return next;
47
88
  const handlers = this._beforeChangeHandlers[next.typeName];
@@ -54,7 +95,15 @@ class StoreSideEffects {
54
95
  }
55
96
  return next;
56
97
  }
57
- /** @internal */
98
+ /**
99
+ * Processes all registered 'after change' handlers for a record.
100
+ * Handlers are called in registration order after the record is updated.
101
+ *
102
+ * @param prev - The previous version of the record
103
+ * @param next - The new version of the record that was stored
104
+ * @param source - Whether the change originated from 'user' or 'remote'
105
+ * @internal
106
+ */
58
107
  handleAfterChange(prev, next, source) {
59
108
  if (!this._isEnabled) return;
60
109
  const handlers = this._afterChangeHandlers[next.typeName];
@@ -64,7 +113,15 @@ class StoreSideEffects {
64
113
  }
65
114
  }
66
115
  }
67
- /** @internal */
116
+ /**
117
+ * Processes all registered 'before delete' handlers for a record.
118
+ * If any handler returns `false`, the deletion is prevented.
119
+ *
120
+ * @param record - The record about to be deleted
121
+ * @param source - Whether the change originated from 'user' or 'remote'
122
+ * @returns `true` to allow deletion, `false` to prevent it
123
+ * @internal
124
+ */
68
125
  handleBeforeDelete(record, source) {
69
126
  if (!this._isEnabled) return true;
70
127
  const handlers = this._beforeDeleteHandlers[record.typeName];
@@ -77,7 +134,14 @@ class StoreSideEffects {
77
134
  }
78
135
  return true;
79
136
  }
80
- /** @internal */
137
+ /**
138
+ * Processes all registered 'after delete' handlers for a record.
139
+ * Handlers are called in registration order after the record is deleted.
140
+ *
141
+ * @param record - The record that was deleted
142
+ * @param source - Whether the change originated from 'user' or 'remote'
143
+ * @internal
144
+ */
81
145
  handleAfterDelete(record, source) {
82
146
  if (!this._isEnabled) return;
83
147
  const handlers = this._afterDeleteHandlers[record.typeName];
@@ -87,7 +151,13 @@ class StoreSideEffects {
87
151
  }
88
152
  }
89
153
  }
90
- /** @internal */
154
+ /**
155
+ * Processes all registered operation complete handlers.
156
+ * Called after an atomic store operation finishes.
157
+ *
158
+ * @param source - Whether the operation originated from 'user' or 'remote'
159
+ * @internal
160
+ */
91
161
  handleOperationComplete(source) {
92
162
  if (!this._isEnabled) return;
93
163
  for (const handler of this._operationCompleteHandlers) {
@@ -95,7 +165,29 @@ class StoreSideEffects {
95
165
  }
96
166
  }
97
167
  /**
98
- * Internal helper for registering a bunch of side effects at once and keeping them organized.
168
+ * Internal helper for registering multiple side effect handlers at once and keeping them organized.
169
+ * This provides a convenient way to register handlers for multiple record types and lifecycle events
170
+ * in a single call, returning a single cleanup function.
171
+ *
172
+ * @param handlersByType - An object mapping record type names to their respective handlers
173
+ * @returns A function that removes all registered handlers when called
174
+ *
175
+ * @example
176
+ * ```ts
177
+ * const cleanup = sideEffects.register({
178
+ * shape: {
179
+ * afterDelete: (shape) => console.log('Shape deleted:', shape.id),
180
+ * beforeChange: (prev, next) => ({ ...next, lastModified: Date.now() })
181
+ * },
182
+ * arrow: {
183
+ * afterCreate: (arrow) => updateConnectedShapes(arrow)
184
+ * }
185
+ * })
186
+ *
187
+ * // Later, remove all handlers
188
+ * cleanup()
189
+ * ```
190
+ *
99
191
  * @internal
100
192
  */
101
193
  register(handlersByType) {