@tldraw/store 4.2.2 → 4.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist-cjs/index.d.ts +1 -49
- package/dist-cjs/index.js +1 -3
- package/dist-cjs/index.js.map +2 -2
- package/dist-cjs/lib/ImmutableMap.js +23 -25
- package/dist-cjs/lib/ImmutableMap.js.map +2 -2
- package/dist-cjs/lib/StoreSchema.js +24 -80
- package/dist-cjs/lib/StoreSchema.js.map +2 -2
- package/dist-cjs/lib/StoreSideEffects.js +1 -1
- package/dist-cjs/lib/StoreSideEffects.js.map +1 -1
- package/dist-cjs/lib/devFreeze.js +3 -5
- package/dist-cjs/lib/devFreeze.js.map +2 -2
- package/dist-cjs/lib/migrate.js.map +2 -2
- package/dist-esm/index.d.mts +1 -49
- package/dist-esm/index.mjs +1 -3
- package/dist-esm/index.mjs.map +2 -2
- package/dist-esm/lib/ImmutableMap.mjs +23 -25
- package/dist-esm/lib/ImmutableMap.mjs.map +2 -2
- package/dist-esm/lib/StoreSchema.mjs +25 -83
- package/dist-esm/lib/StoreSchema.mjs.map +2 -2
- package/dist-esm/lib/StoreSideEffects.mjs +1 -1
- package/dist-esm/lib/StoreSideEffects.mjs.map +1 -1
- package/dist-esm/lib/devFreeze.mjs +3 -5
- package/dist-esm/lib/devFreeze.mjs.map +2 -2
- package/dist-esm/lib/migrate.mjs.map +2 -2
- package/package.json +4 -4
- package/src/index.ts +0 -3
- package/src/lib/ImmutableMap.ts +33 -25
- package/src/lib/StoreSchema.ts +30 -90
- package/src/lib/StoreSideEffects.ts +1 -1
- package/src/lib/devFreeze.test.ts +2 -6
- package/src/lib/devFreeze.ts +3 -7
- package/src/lib/migrate.ts +0 -29
- package/src/lib/test/recordStore.test.ts +0 -182
- package/dist-cjs/lib/AtomSet.js +0 -68
- package/dist-cjs/lib/AtomSet.js.map +0 -7
- package/dist-cjs/lib/isDev.js +0 -37
- package/dist-cjs/lib/isDev.js.map +0 -7
- package/dist-esm/lib/AtomSet.mjs +0 -48
- package/dist-esm/lib/AtomSet.mjs.map +0 -7
- package/dist-esm/lib/isDev.mjs +0 -16
- package/dist-esm/lib/isDev.mjs.map +0 -7
- package/src/lib/AtomSet.ts +0 -52
- package/src/lib/isDev.ts +0 -20
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/lib/StoreSchema.ts"],
|
|
4
|
-
"sourcesContent": ["import {\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tisEqual,\n\tobjectMapEntries,\n\tResult,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { devFreeze } from './devFreeze'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tSynchronousStorage,\n\tvalidateMigrations,\n} from './migrate'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\n\n/**\n * Version 1 format for serialized store schema information.\n *\n * This is the legacy format used before schema version 2. Version 1 schemas\n * separate store-level versioning from record-level versioning, and support\n * subtypes for complex record types like shapes.\n *\n * @example\n * ```ts\n * const schemaV1: SerializedSchemaV1 = {\n * schemaVersion: 1,\n * storeVersion: 2,\n * recordVersions: {\n * book: { version: 3 },\n * shape: {\n * version: 2,\n * subTypeVersions: { rectangle: 1, circle: 2 },\n * subTypeKey: 'type'\n * }\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/**\n * Version 2 format for serialized store schema information.\n *\n * This is the current format that uses a unified sequence-based approach\n * for tracking versions across all migration sequences. Each sequence ID\n * maps to the latest version number for that sequence.\n *\n * @example\n * ```ts\n * const schemaV2: SerializedSchemaV2 = {\n * schemaVersion: 2,\n * sequences: {\n * 'com.tldraw.store': 3,\n * 'com.tldraw.book': 2,\n * 'com.tldraw.shape': 4,\n * 'com.tldraw.shape.rectangle': 1\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/**\n * Union type representing all supported serialized schema formats.\n *\n * This type allows the store to handle both legacy (V1) and current (V2)\n * schema formats during deserialization and migration.\n *\n * @example\n * ```ts\n * function handleSchema(schema: SerializedSchema) {\n * if (schema.schemaVersion === 1) {\n * // Handle V1 format\n * console.log('Store version:', schema.storeVersion)\n * } else {\n * // Handle V2 format\n * console.log('Sequences:', schema.sequences)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\n/**\n * Upgrades a serialized schema from version 1 to version 2 format.\n *\n * Version 1 schemas use separate `storeVersion` and `recordVersions` fields,\n * while version 2 schemas use a unified `sequences` object with sequence IDs.\n *\n * @param schema - The serialized schema to upgrade\n * @returns A Result containing the upgraded schema or an error message\n *\n * @example\n * ```ts\n * const v1Schema = {\n * schemaVersion: 1,\n * storeVersion: 1,\n * recordVersions: {\n * book: { version: 2 },\n * author: { version: 1, subTypeVersions: { fiction: 1 }, subTypeKey: 'genre' }\n * }\n * }\n *\n * const result = upgradeSchema(v1Schema)\n * if (result.ok) {\n * console.log(result.value.sequences)\n * // { 'com.tldraw.store': 1, 'com.tldraw.book': 2, 'com.tldraw.author': 1, 'com.tldraw.author.fiction': 1 }\n * }\n * ```\n *\n * @public\n */\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/**\n * Information about a record validation failure that occurred in the store.\n *\n * This interface provides context about validation errors, including the failed\n * record, the store state, and the operation phase where the failure occurred.\n * It's used by validation failure handlers to implement recovery strategies.\n *\n * @example\n * ```ts\n * const schema = StoreSchema.create(\n * { book: Book },\n * {\n * onValidationFailure: (failure: StoreValidationFailure<Book>) => {\n * console.error(`Validation failed during ${failure.phase}:`, failure.error)\n * console.log('Failed record:', failure.record)\n * console.log('Previous record:', failure.recordBefore)\n *\n * // Return a corrected version of the record\n * return { ...failure.record, title: failure.record.title || 'Untitled' }\n * }\n * }\n * )\n * ```\n *\n * @public\n */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/**\n * Configuration options for creating a StoreSchema.\n *\n * These options control migration behavior, validation error handling,\n * and integrity checking for the store schema.\n *\n * @example\n * ```ts\n * const options: StoreSchemaOptions<MyRecord, MyProps> = {\n * migrations: [bookMigrations, authorMigrations],\n * onValidationFailure: (failure) => {\n * // Log the error and return a corrected record\n * console.error('Validation failed:', failure.error)\n * return sanitizeRecord(failure.record)\n * },\n * createIntegrityChecker: (store) => {\n * // Set up integrity checking logic\n * return setupIntegrityChecks(store)\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/**\n * Manages the schema definition, validation, and migration system for a Store.\n *\n * StoreSchema coordinates record types, handles data migrations between schema\n * versions, validates records, and provides the foundational structure for\n * reactive stores. It acts as the central authority for data consistency\n * and evolution within the store system.\n *\n * @example\n * ```ts\n * // Define record types\n * const Book = createRecordType<Book>('book', { scope: 'document' })\n * const Author = createRecordType<Author>('author', { scope: 'document' })\n *\n * // Create schema with migrations\n * const schema = StoreSchema.create(\n * { book: Book, author: Author },\n * {\n * migrations: [bookMigrations, authorMigrations],\n * onValidationFailure: (failure) => {\n * console.warn('Validation failed, using default:', failure.error)\n * return failure.record // or return a corrected version\n * }\n * }\n * )\n *\n * // Use with store\n * const store = new Store({ schema })\n * ```\n *\n * @public\n */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\t/**\n\t * Creates a new StoreSchema with the given record types and options.\n\t *\n\t * This static factory method is the recommended way to create a StoreSchema.\n\t * It ensures type safety while providing a clean API for schema definition.\n\t *\n\t * @param types - Object mapping type names to their RecordType definitions\n\t * @param options - Optional configuration for migrations, validation, and integrity checking\n\t * @returns A new StoreSchema instance\n\t *\n\t * @example\n\t * ```ts\n\t * const Book = createRecordType<Book>('book', { scope: 'document' })\n\t * const Author = createRecordType<Author>('author', { scope: 'document' })\n\t *\n\t * const schema = StoreSchema.create(\n\t * {\n\t * book: Book,\n\t * author: Author\n\t * },\n\t * {\n\t * migrations: [bookMigrations],\n\t * onValidationFailure: (failure) => failure.record\n\t * }\n\t * )\n\t * ```\n\t *\n\t * @public\n\t */\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\tprivate readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Validates a record using its corresponding RecordType validator.\n\t *\n\t * This method ensures that records conform to their type definitions before\n\t * being stored. If validation fails and an onValidationFailure handler is\n\t * provided, it will be called to potentially recover from the error.\n\t *\n\t * @param store - The store instance where validation is occurring\n\t * @param record - The record to validate\n\t * @param phase - The lifecycle phase where validation is happening\n\t * @param recordBefore - The previous version of the record (for updates)\n\t * @returns The validated record, potentially modified by validation failure handler\n\t *\n\t * @example\n\t * ```ts\n\t * try {\n\t * const validatedBook = schema.validateRecord(\n\t * store,\n\t * { id: 'book:1', typeName: 'book', title: '', author: 'Jane Doe' },\n\t * 'createRecord',\n\t * null\n\t * )\n\t * } catch (error) {\n\t * console.error('Record validation failed:', error)\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Gets all migrations that need to be applied to upgrade from a persisted schema\n\t * to the current schema version.\n\t *\n\t * This method compares the persisted schema with the current schema and determines\n\t * which migrations need to be applied to bring the data up to date. It handles\n\t * both regular migrations and retroactive migrations, and caches results for\n\t * performance.\n\t *\n\t * @param persistedSchema - The schema version that was previously persisted\n\t * @returns A Result containing the list of migrations to apply, or an error message\n\t *\n\t * @example\n\t * ```ts\n\t * const persistedSchema = {\n\t * schemaVersion: 2,\n\t * sequences: { 'com.tldraw.book': 1, 'com.tldraw.author': 0 }\n\t * }\n\t *\n\t * const migrationsResult = schema.getMigrationsSince(persistedSchema)\n\t * if (migrationsResult.ok) {\n\t * console.log('Migrations to apply:', migrationsResult.value.length)\n\t * // Apply each migration to bring data up to date\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\t// Check cache first\n\t\tconst cached = this.migrationCache.get(persistedSchema)\n\t\tif (cached) {\n\t\t\treturn cached\n\t\t}\n\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\t// Cache the error result\n\t\t\tthis.migrationCache.set(persistedSchema, upgradeResult)\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\tconst result = Result.ok([])\n\t\t\t// Cache the empty result\n\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\treturn result\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\tconst result = Result.err('Incompatible schema?')\n\t\t\t\t// Cache the error result\n\t\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\t\treturn result\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\tconst result = Result.ok(\n\t\t\tthis.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))\n\t\t)\n\t\t// Cache the result\n\t\tthis.migrationCache.set(persistedSchema, result)\n\t\treturn result\n\t}\n\n\t/**\n\t * Migrates a single persisted record to match the current schema version.\n\t *\n\t * This method applies the necessary migrations to transform a record from an\n\t * older (or newer) schema version to the current version. It supports both\n\t * forward ('up') and backward ('down') migrations.\n\t *\n\t * @param record - The record to migrate\n\t * @param persistedSchema - The schema version the record was persisted with\n\t * @param direction - Direction to migrate ('up' for newer, 'down' for older)\n\t * @returns A MigrationResult containing the migrated record or an error\n\t *\n\t * @example\n\t * ```ts\n\t * const oldRecord = { id: 'book:1', typeName: 'book', title: 'Old Title', publishDate: '2020-01-01' }\n\t * const oldSchema = { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } }\n\t *\n\t * const result = schema.migratePersistedRecord(oldRecord, oldSchema, 'up')\n\t * if (result.type === 'success') {\n\t * console.log('Migrated record:', result.value)\n\t * // Record now has publishedYear instead of publishDate\n\t * } else {\n\t * console.error('Migration failed:', result.reason)\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (!migrationsToApply.every((m) => m.scope === 'record')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.scope === 'record' && m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tif (migration.scope === 'storage') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\tmigrateStorage(storage: SynchronousStorage<R>) {\n\t\tconst schema = storage.getSchema()\n\t\tassert(schema, 'Schema is missing.')\n\n\t\tconst migrations = this.getMigrationsSince(schema)\n\t\tif (!migrations.ok) {\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\tthrow new Error(migrations.error)\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn\n\t\t}\n\n\t\tstorage.setSchema(this.serialize())\n\n\t\tfor (const migration of migrationsToApply) {\n\t\t\tif (migration.scope === 'record') {\n\t\t\t\tfor (const [id, state] of storage.entries()) {\n\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(state) : true\n\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\tconst record = structuredClone(state)\n\t\t\t\t\tconst result = migration.up!(record as any) ?? record\n\t\t\t\t\tif (!isEqual(result, state)) {\n\t\t\t\t\t\tstorage.set(id, result as R)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t// legacy\n\t\t\t\tconst prevStore = Object.fromEntries(storage.entries())\n\t\t\t\tlet nextStore = structuredClone(prevStore)\n\t\t\t\tnextStore = (migration.up!(nextStore) as any) ?? nextStore\n\t\t\t\tfor (const [id, state] of Object.entries(nextStore)) {\n\t\t\t\t\tif (!state) continue // these will be deleted in the next loop\n\t\t\t\t\tif (!isEqual(state, prevStore[id])) {\n\t\t\t\t\t\tstorage.set(id, state)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tfor (const id of Object.keys(prevStore)) {\n\t\t\t\t\tif (!nextStore[id]) {\n\t\t\t\t\t\tstorage.delete(id)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else if (migration.scope === 'storage') {\n\t\t\t\tmigration.up!(storage)\n\t\t\t} else {\n\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t}\n\t\t}\n\t\t// Clean up by filtering out any non-document records.\n\t\t// This is mainly legacy support for extremely early days tldraw.\n\t\tfor (const [id, state] of storage.entries()) {\n\t\t\tif (this.getType(state.typeName).scope !== 'document') {\n\t\t\t\tstorage.delete(id)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Migrates an entire store snapshot to match the current schema version.\n\t *\n\t * This method applies all necessary migrations to bring a persisted store\n\t * snapshot up to the current schema version. It handles both record-level\n\t * and store-level migrations, and can optionally mutate the input store\n\t * for performance.\n\t *\n\t * @param snapshot - The store snapshot containing data and schema information\n\t * @param opts - Options controlling migration behavior\n\t * - mutateInputStore - Whether to modify the input store directly (default: false)\n\t * @returns A MigrationResult containing the migrated store or an error\n\t *\n\t * @example\n\t * ```ts\n\t * const snapshot = {\n\t * schema: { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } },\n\t * store: {\n\t * 'book:1': { id: 'book:1', typeName: 'book', title: 'Old Book', publishDate: '2020-01-01' }\n\t * }\n\t * }\n\t *\n\t * const result = schema.migrateStoreSnapshot(snapshot)\n\t * if (result.type === 'success') {\n\t * console.log('Migrated store:', result.value)\n\t * // All records are now at current schema version\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tmigrateStoreSnapshot(\n\t\tsnapshot: StoreSnapshot<R>,\n\t\topts?: { mutateInputStore?: boolean }\n\t): MigrationResult<SerializedStore<R>> {\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: snapshot.store }\n\t\t}\n\t\tconst store = Object.assign(\n\t\t\tnew Map<string, R>(objectMapEntries(snapshot.store).map(devFreeze)),\n\t\t\t{\n\t\t\t\tgetSchema: () => snapshot.schema,\n\t\t\t\tsetSchema: (_: SerializedSchema) => {},\n\t\t\t}\n\t\t)\n\t\ttry {\n\t\t\tthis.migrateStorage(store)\n\t\t\tif (opts?.mutateInputStore) {\n\t\t\t\tfor (const [id, record] of store.entries()) {\n\t\t\t\t\tsnapshot.store[id as keyof typeof snapshot.store] = record\n\t\t\t\t}\n\t\t\t\tfor (const id of Object.keys(snapshot.store)) {\n\t\t\t\t\tif (!store.has(id)) {\n\t\t\t\t\t\tdelete snapshot.store[id as keyof typeof snapshot.store]\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn { type: 'success', value: snapshot.store }\n\t\t\t} else {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'success',\n\t\t\t\t\tvalue: Object.fromEntries(store.entries()) as SerializedStore<R>,\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t}\n\n\t/**\n\t * Creates an integrity checker function for the given store.\n\t *\n\t * This method calls the createIntegrityChecker option if provided, allowing\n\t * custom integrity checking logic to be set up for the store. The integrity\n\t * checker is used to validate store consistency and catch data corruption.\n\t *\n\t * @param store - The store instance to create an integrity checker for\n\t * @returns An integrity checker function, or undefined if none is configured\n\t *\n\t * @internal\n\t */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\t/**\n\t * Serializes the current schema to a SerializedSchemaV2 format.\n\t *\n\t * This method creates a serialized representation of the current schema,\n\t * capturing the latest version number for each migration sequence.\n\t * The result can be persisted and later used to determine what migrations\n\t * need to be applied when loading data.\n\t *\n\t * @returns A SerializedSchemaV2 object representing the current schema state\n\t *\n\t * @example\n\t * ```ts\n\t * const serialized = schema.serialize()\n\t * console.log(serialized)\n\t * // {\n\t * // schemaVersion: 2,\n\t * // sequences: {\n\t * // 'com.tldraw.book': 3,\n\t * // 'com.tldraw.author': 2\n\t * // }\n\t * // }\n\t *\n\t * // Store this with your data for future migrations\n\t * localStorage.setItem('schema', JSON.stringify(serialized))\n\t * ```\n\t *\n\t * @public\n\t */\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * Serializes a schema representing the earliest possible version.\n\t *\n\t * This method creates a serialized schema where all migration sequences\n\t * are set to version 0, representing the state before any migrations\n\t * have been applied. This is used in specific legacy scenarios.\n\t *\n\t * @returns A SerializedSchema with all sequences set to version 0\n\t *\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t * @internal\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * Gets the RecordType definition for a given type name.\n\t *\n\t * This method retrieves the RecordType associated with the specified\n\t * type name, which contains the record's validation, creation, and\n\t * other behavioral logic.\n\t *\n\t * @param typeName - The name of the record type to retrieve\n\t * @returns The RecordType definition for the specified type\n\t *\n\t * @throws Will throw an error if the record type does not exist\n\t *\n\t * @internal\n\t */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
|
|
5
|
-
"mappings": "AAAA;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,
|
|
4
|
+
"sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/**\n * Version 1 format for serialized store schema information.\n *\n * This is the legacy format used before schema version 2. Version 1 schemas\n * separate store-level versioning from record-level versioning, and support\n * subtypes for complex record types like shapes.\n *\n * @example\n * ```ts\n * const schemaV1: SerializedSchemaV1 = {\n * schemaVersion: 1,\n * storeVersion: 2,\n * recordVersions: {\n * book: { version: 3 },\n * shape: {\n * version: 2,\n * subTypeVersions: { rectangle: 1, circle: 2 },\n * subTypeKey: 'type'\n * }\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/**\n * Version 2 format for serialized store schema information.\n *\n * This is the current format that uses a unified sequence-based approach\n * for tracking versions across all migration sequences. Each sequence ID\n * maps to the latest version number for that sequence.\n *\n * @example\n * ```ts\n * const schemaV2: SerializedSchemaV2 = {\n * schemaVersion: 2,\n * sequences: {\n * 'com.tldraw.store': 3,\n * 'com.tldraw.book': 2,\n * 'com.tldraw.shape': 4,\n * 'com.tldraw.shape.rectangle': 1\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/**\n * Union type representing all supported serialized schema formats.\n *\n * This type allows the store to handle both legacy (V1) and current (V2)\n * schema formats during deserialization and migration.\n *\n * @example\n * ```ts\n * function handleSchema(schema: SerializedSchema) {\n * if (schema.schemaVersion === 1) {\n * // Handle V1 format\n * console.log('Store version:', schema.storeVersion)\n * } else {\n * // Handle V2 format\n * console.log('Sequences:', schema.sequences)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\n/**\n * Upgrades a serialized schema from version 1 to version 2 format.\n *\n * Version 1 schemas use separate `storeVersion` and `recordVersions` fields,\n * while version 2 schemas use a unified `sequences` object with sequence IDs.\n *\n * @param schema - The serialized schema to upgrade\n * @returns A Result containing the upgraded schema or an error message\n *\n * @example\n * ```ts\n * const v1Schema = {\n * schemaVersion: 1,\n * storeVersion: 1,\n * recordVersions: {\n * book: { version: 2 },\n * author: { version: 1, subTypeVersions: { fiction: 1 }, subTypeKey: 'genre' }\n * }\n * }\n *\n * const result = upgradeSchema(v1Schema)\n * if (result.ok) {\n * console.log(result.value.sequences)\n * // { 'com.tldraw.store': 1, 'com.tldraw.book': 2, 'com.tldraw.author': 1, 'com.tldraw.author.fiction': 1 }\n * }\n * ```\n *\n * @public\n */\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/**\n * Information about a record validation failure that occurred in the store.\n *\n * This interface provides context about validation errors, including the failed\n * record, the store state, and the operation phase where the failure occurred.\n * It's used by validation failure handlers to implement recovery strategies.\n *\n * @example\n * ```ts\n * const schema = StoreSchema.create(\n * { book: Book },\n * {\n * onValidationFailure: (failure: StoreValidationFailure<Book>) => {\n * console.error(`Validation failed during ${failure.phase}:`, failure.error)\n * console.log('Failed record:', failure.record)\n * console.log('Previous record:', failure.recordBefore)\n *\n * // Return a corrected version of the record\n * return { ...failure.record, title: failure.record.title || 'Untitled' }\n * }\n * }\n * )\n * ```\n *\n * @public\n */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/**\n * Configuration options for creating a StoreSchema.\n *\n * These options control migration behavior, validation error handling,\n * and integrity checking for the store schema.\n *\n * @example\n * ```ts\n * const options: StoreSchemaOptions<MyRecord, MyProps> = {\n * migrations: [bookMigrations, authorMigrations],\n * onValidationFailure: (failure) => {\n * // Log the error and return a corrected record\n * console.error('Validation failed:', failure.error)\n * return sanitizeRecord(failure.record)\n * },\n * createIntegrityChecker: (store) => {\n * // Set up integrity checking logic\n * return setupIntegrityChecks(store)\n * }\n * }\n * ```\n *\n * @public\n */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/**\n * Manages the schema definition, validation, and migration system for a Store.\n *\n * StoreSchema coordinates record types, handles data migrations between schema\n * versions, validates records, and provides the foundational structure for\n * reactive stores. It acts as the central authority for data consistency\n * and evolution within the store system.\n *\n * @example\n * ```ts\n * // Define record types\n * const Book = createRecordType<Book>('book', { scope: 'document' })\n * const Author = createRecordType<Author>('author', { scope: 'document' })\n *\n * // Create schema with migrations\n * const schema = StoreSchema.create(\n * { book: Book, author: Author },\n * {\n * migrations: [bookMigrations, authorMigrations],\n * onValidationFailure: (failure) => {\n * console.warn('Validation failed, using default:', failure.error)\n * return failure.record // or return a corrected version\n * }\n * }\n * )\n *\n * // Use with store\n * const store = new Store({ schema })\n * ```\n *\n * @public\n */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\t/**\n\t * Creates a new StoreSchema with the given record types and options.\n\t *\n\t * This static factory method is the recommended way to create a StoreSchema.\n\t * It ensures type safety while providing a clean API for schema definition.\n\t *\n\t * @param types - Object mapping type names to their RecordType definitions\n\t * @param options - Optional configuration for migrations, validation, and integrity checking\n\t * @returns A new StoreSchema instance\n\t *\n\t * @example\n\t * ```ts\n\t * const Book = createRecordType<Book>('book', { scope: 'document' })\n\t * const Author = createRecordType<Author>('author', { scope: 'document' })\n\t *\n\t * const schema = StoreSchema.create(\n\t * {\n\t * book: Book,\n\t * author: Author\n\t * },\n\t * {\n\t * migrations: [bookMigrations],\n\t * onValidationFailure: (failure) => failure.record\n\t * }\n\t * )\n\t * ```\n\t *\n\t * @public\n\t */\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\tprivate readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Validates a record using its corresponding RecordType validator.\n\t *\n\t * This method ensures that records conform to their type definitions before\n\t * being stored. If validation fails and an onValidationFailure handler is\n\t * provided, it will be called to potentially recover from the error.\n\t *\n\t * @param store - The store instance where validation is occurring\n\t * @param record - The record to validate\n\t * @param phase - The lifecycle phase where validation is happening\n\t * @param recordBefore - The previous version of the record (for updates)\n\t * @returns The validated record, potentially modified by validation failure handler\n\t *\n\t * @example\n\t * ```ts\n\t * try {\n\t * const validatedBook = schema.validateRecord(\n\t * store,\n\t * { id: 'book:1', typeName: 'book', title: '', author: 'Jane Doe' },\n\t * 'createRecord',\n\t * null\n\t * )\n\t * } catch (error) {\n\t * console.error('Record validation failed:', error)\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Gets all migrations that need to be applied to upgrade from a persisted schema\n\t * to the current schema version.\n\t *\n\t * This method compares the persisted schema with the current schema and determines\n\t * which migrations need to be applied to bring the data up to date. It handles\n\t * both regular migrations and retroactive migrations, and caches results for\n\t * performance.\n\t *\n\t * @param persistedSchema - The schema version that was previously persisted\n\t * @returns A Result containing the list of migrations to apply, or an error message\n\t *\n\t * @example\n\t * ```ts\n\t * const persistedSchema = {\n\t * schemaVersion: 2,\n\t * sequences: { 'com.tldraw.book': 1, 'com.tldraw.author': 0 }\n\t * }\n\t *\n\t * const migrationsResult = schema.getMigrationsSince(persistedSchema)\n\t * if (migrationsResult.ok) {\n\t * console.log('Migrations to apply:', migrationsResult.value.length)\n\t * // Apply each migration to bring data up to date\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\t// Check cache first\n\t\tconst cached = this.migrationCache.get(persistedSchema)\n\t\tif (cached) {\n\t\t\treturn cached\n\t\t}\n\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\t// Cache the error result\n\t\t\tthis.migrationCache.set(persistedSchema, upgradeResult)\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\tconst result = Result.ok([])\n\t\t\t// Cache the empty result\n\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\treturn result\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\tconst result = Result.err('Incompatible schema?')\n\t\t\t\t// Cache the error result\n\t\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\t\treturn result\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\tconst result = Result.ok(\n\t\t\tthis.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))\n\t\t)\n\t\t// Cache the result\n\t\tthis.migrationCache.set(persistedSchema, result)\n\t\treturn result\n\t}\n\n\t/**\n\t * Migrates a single persisted record to match the current schema version.\n\t *\n\t * This method applies the necessary migrations to transform a record from an\n\t * older (or newer) schema version to the current version. It supports both\n\t * forward ('up') and backward ('down') migrations.\n\t *\n\t * @param record - The record to migrate\n\t * @param persistedSchema - The schema version the record was persisted with\n\t * @param direction - Direction to migrate ('up' for newer, 'down' for older)\n\t * @returns A MigrationResult containing the migrated record or an error\n\t *\n\t * @example\n\t * ```ts\n\t * const oldRecord = { id: 'book:1', typeName: 'book', title: 'Old Title', publishDate: '2020-01-01' }\n\t * const oldSchema = { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } }\n\t *\n\t * const result = schema.migratePersistedRecord(oldRecord, oldSchema, 'up')\n\t * if (result.type === 'success') {\n\t * console.log('Migrated record:', result.value)\n\t * // Record now has publishedYear instead of publishDate\n\t * } else {\n\t * console.error('Migration failed:', result.reason)\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\t/**\n\t * Migrates an entire store snapshot to match the current schema version.\n\t *\n\t * This method applies all necessary migrations to bring a persisted store\n\t * snapshot up to the current schema version. It handles both record-level\n\t * and store-level migrations, and can optionally mutate the input store\n\t * for performance.\n\t *\n\t * @param snapshot - The store snapshot containing data and schema information\n\t * @param opts - Options controlling migration behavior\n\t * - mutateInputStore - Whether to modify the input store directly (default: false)\n\t * @returns A MigrationResult containing the migrated store or an error\n\t *\n\t * @example\n\t * ```ts\n\t * const snapshot = {\n\t * schema: { schemaVersion: 2, sequences: { 'com.tldraw.book': 1 } },\n\t * store: {\n\t * 'book:1': { id: 'book:1', typeName: 'book', title: 'Old Book', publishDate: '2020-01-01' }\n\t * }\n\t * }\n\t *\n\t * const result = schema.migrateStoreSnapshot(snapshot)\n\t * if (result.type === 'success') {\n\t * console.log('Migrated store:', result.value)\n\t * // All records are now at current schema version\n\t * }\n\t * ```\n\t *\n\t * @public\n\t */\n\tmigrateStoreSnapshot(\n\t\tsnapshot: StoreSnapshot<R>,\n\t\topts?: { mutateInputStore?: boolean }\n\t): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tif (!opts?.mutateInputStore) {\n\t\t\tstore = structuredClone(store)\n\t\t}\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = result as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = result as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/**\n\t * Creates an integrity checker function for the given store.\n\t *\n\t * This method calls the createIntegrityChecker option if provided, allowing\n\t * custom integrity checking logic to be set up for the store. The integrity\n\t * checker is used to validate store consistency and catch data corruption.\n\t *\n\t * @param store - The store instance to create an integrity checker for\n\t * @returns An integrity checker function, or undefined if none is configured\n\t *\n\t * @internal\n\t */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\t/**\n\t * Serializes the current schema to a SerializedSchemaV2 format.\n\t *\n\t * This method creates a serialized representation of the current schema,\n\t * capturing the latest version number for each migration sequence.\n\t * The result can be persisted and later used to determine what migrations\n\t * need to be applied when loading data.\n\t *\n\t * @returns A SerializedSchemaV2 object representing the current schema state\n\t *\n\t * @example\n\t * ```ts\n\t * const serialized = schema.serialize()\n\t * console.log(serialized)\n\t * // {\n\t * // schemaVersion: 2,\n\t * // sequences: {\n\t * // 'com.tldraw.book': 3,\n\t * // 'com.tldraw.author': 2\n\t * // }\n\t * // }\n\t *\n\t * // Store this with your data for future migrations\n\t * localStorage.setItem('schema', JSON.stringify(serialized))\n\t * ```\n\t *\n\t * @public\n\t */\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * Serializes a schema representing the earliest possible version.\n\t *\n\t * This method creates a serialized schema where all migration sequences\n\t * are set to version 0, representing the state before any migrations\n\t * have been applied. This is used in specific legacy scenarios.\n\t *\n\t * @returns A SerializedSchema with all sequences set to version 0\n\t *\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t * @internal\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * Gets the RecordType definition for a given type name.\n\t *\n\t * This method retrieves the RecordType associated with the specified\n\t * type name, which contains the record's validation, creation, and\n\t * other behavioral logic.\n\t *\n\t * @param typeName - The name of the record type to retrieve\n\t * @returns The RecordType definition for the specified type\n\t *\n\t * @throws Will throw an error if the record type does not exist\n\t *\n\t * @internal\n\t */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
|
|
5
|
+
"mappings": "AAAA;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAIP;AAAA,EAEC;AAAA,EAIA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAmIA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,OAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,OAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,OAAO,GAAG,MAAM;AACxB;AAoGO,MAAM,YAAkD;AAAA,EA4CtD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,aAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,yBAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,mBAAmB,eAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,eAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAnCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EACQ,iBAAiB,oBAAI,QAAuD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsD7F,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,aAAa,eAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BO,mBAAmB,iBAAgE;AAEzF,UAAM,SAAS,KAAK,eAAe,IAAI,eAAe;AACtD,QAAI,QAAQ;AACX,aAAO;AAAA,IACR;AAEA,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AAEtB,WAAK,eAAe,IAAI,iBAAiB,aAAa;AACtD,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,YAAMA,UAAS,OAAO,GAAG,CAAC,CAAC;AAE3B,WAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,aAAOA;AAAA,IACR;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,cAAMA,UAAS,OAAO,IAAI,sBAAsB;AAEhD,aAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,eAAOA;AAAA,MACR;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,UAAM,SAAS,OAAO;AAAA,MACrB,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC;AAAA,IACxE;AAEA,SAAK,eAAe,IAAI,iBAAiB,MAAM;AAC/C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,uBAAuB,sBACvB,uBAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,uBAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,aAAS,gBAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,mBAAS,gBAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiCA,qBACC,UACA,MACsC;AACtC,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,QAAI,CAAC,MAAM,kBAAkB;AAC5B,cAAQ,gBAAgB,KAAK;AAAA,IAC9B;AAEA,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,IAAI;AAAA,YACnC;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,oBAAQ;AAAA,UACT;AAAA,QACD,OAAO;AACN,gCAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,SAAS,iBAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,QAAQ,UAAkB;AACzB,UAAM,OAAO,eAAe,KAAK,OAAO,QAAQ;AAChD,WAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
|
|
6
6
|
"names": ["result"]
|
|
7
7
|
}
|
|
@@ -260,7 +260,7 @@ class StoreSideEffects {
|
|
|
260
260
|
* ```ts
|
|
261
261
|
* editor.sideEffects.registerAfterCreateHandler('page', (page, source) => {
|
|
262
262
|
* // Automatically create a shape when a page is created
|
|
263
|
-
* editor.createShape({
|
|
263
|
+
* editor.createShape<TLTextShape>({
|
|
264
264
|
* id: createShapeId(),
|
|
265
265
|
* type: 'text',
|
|
266
266
|
* props: { richText: toRichText(page.name) },
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/lib/StoreSideEffects.ts"],
|
|
4
|
-
"sourcesContent": ["import { UnknownRecord } from './BaseRecord'\nimport { Store } from './Store'\n\n/**\n * Handler function called before a record is created in the store.\n * The handler receives the record to be created and can return a modified version.\n * Use this to validate, transform, or modify records before they are added to the store.\n *\n * @param record - The record about to be created\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n * @returns The record to actually create (may be modified)\n *\n * @example\n * ```ts\n * const handler: StoreBeforeCreateHandler<MyRecord> = (record, source) => {\n * // Ensure all user-created records have a timestamp\n * if (source === 'user' && !record.createdAt) {\n * return { ...record, createdAt: Date.now() }\n * }\n * return record\n * }\n * ```\n *\n * @public\n */\nexport type StoreBeforeCreateHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => R\n/**\n * Handler function called after a record has been successfully created in the store.\n * Use this for side effects that should happen after record creation, such as updating\n * related records or triggering notifications.\n *\n * @param record - The record that was created\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreAfterCreateHandler<BookRecord> = (book, source) => {\n * if (source === 'user') {\n * console.log(`New book added: ${book.title}`)\n * updateAuthorBookCount(book.authorId)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type StoreAfterCreateHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => void\n/**\n * Handler function called before a record is updated in the store.\n * The handler receives the current and new versions of the record and can return\n * a modified version or the original to prevent the change.\n *\n * @param prev - The current version of the record in the store\n * @param next - The proposed new version of the record\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n * @returns The record version to actually store (may be modified or the original to block change)\n *\n * @example\n * ```ts\n * const handler: StoreBeforeChangeHandler<ShapeRecord> = (prev, next, source) => {\n * // Prevent shapes from being moved outside the canvas bounds\n * if (next.x < 0 || next.y < 0) {\n * return prev // Block the change\n * }\n * return next\n * }\n * ```\n *\n * @public\n */\nexport type StoreBeforeChangeHandler<R extends UnknownRecord> = (\n\tprev: R,\n\tnext: R,\n\tsource: 'remote' | 'user'\n) => R\n/**\n * Handler function called after a record has been successfully updated in the store.\n * Use this for side effects that should happen after record changes, such as\n * updating related records or maintaining consistency constraints.\n *\n * @param prev - The previous version of the record\n * @param next - The new version of the record that was stored\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreAfterChangeHandler<ShapeRecord> = (prev, next, source) => {\n * // Update connected arrows when a shape moves\n * if (prev.x !== next.x || prev.y !== next.y) {\n * updateConnectedArrows(next.id)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type StoreAfterChangeHandler<R extends UnknownRecord> = (\n\tprev: R,\n\tnext: R,\n\tsource: 'remote' | 'user'\n) => void\n/**\n * Handler function called before a record is deleted from the store.\n * The handler can return `false` to prevent the deletion from occurring.\n *\n * @param record - The record about to be deleted\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n * @returns `false` to prevent deletion, `void` or any other value to allow it\n *\n * @example\n * ```ts\n * const handler: StoreBeforeDeleteHandler<BookRecord> = (book, source) => {\n * // Prevent deletion of books that are currently checked out\n * if (book.isCheckedOut) {\n * console.warn('Cannot delete checked out book')\n * return false\n * }\n * // Allow deletion for other books\n * }\n * ```\n *\n * @public\n */\nexport type StoreBeforeDeleteHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => void | false\n/**\n * Handler function called after a record has been successfully deleted from the store.\n * Use this for cleanup operations and maintaining referential integrity.\n *\n * @param record - The record that was deleted\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreAfterDeleteHandler<ShapeRecord> = (shape, source) => {\n * // Clean up arrows that were connected to this shape\n * const connectedArrows = findArrowsConnectedTo(shape.id)\n * store.remove(connectedArrows.map(arrow => arrow.id))\n * }\n * ```\n *\n * @public\n */\nexport type StoreAfterDeleteHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => void\n\n/**\n * Handler function called when a store operation (atomic transaction) completes.\n * This is useful for performing actions after a batch of changes has been applied,\n * such as triggering saves or sending notifications.\n *\n * @param source - Whether the operation originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreOperationCompleteHandler = (source) => {\n * if (source === 'user') {\n * // Auto-save after user operations complete\n * saveStoreSnapshot()\n * }\n * }\n * ```\n *\n * @public\n */\nexport type StoreOperationCompleteHandler = (source: 'remote' | 'user') => void\n\n/**\n * The side effect manager (aka a \"correct state enforcer\") is responsible\n * for making sure that the store's state is always correct and consistent. This includes\n * things like: deleting a shape if its parent is deleted; unbinding\n * arrows when their binding target is deleted; maintaining referential integrity; etc.\n *\n * Side effects are organized into lifecycle hooks that run before and after\n * record operations (create, change, delete), allowing you to validate data,\n * transform records, and maintain business rules.\n *\n * @example\n * ```ts\n * const sideEffects = new StoreSideEffects(store)\n *\n * // Ensure arrows are deleted when their target shape is deleted\n * sideEffects.registerAfterDeleteHandler('shape', (shape) => {\n * const arrows = store.query.records('arrow', () => ({\n * toId: { eq: shape.id }\n * })).get()\n * store.remove(arrows.map(arrow => arrow.id))\n * })\n * ```\n *\n * @public\n */\nexport class StoreSideEffects<R extends UnknownRecord> {\n\t/**\n\t * Creates a new side effects manager for the given store.\n\t *\n\t * store - The store instance to manage side effects for\n\t */\n\tconstructor(private readonly store: Store<R>) {}\n\n\tprivate _beforeCreateHandlers: { [K in string]?: StoreBeforeCreateHandler<any>[] } = {}\n\tprivate _afterCreateHandlers: { [K in string]?: StoreAfterCreateHandler<any>[] } = {}\n\tprivate _beforeChangeHandlers: { [K in string]?: StoreBeforeChangeHandler<any>[] } = {}\n\tprivate _afterChangeHandlers: { [K in string]?: StoreAfterChangeHandler<any>[] } = {}\n\tprivate _beforeDeleteHandlers: { [K in string]?: StoreBeforeDeleteHandler<any>[] } = {}\n\tprivate _afterDeleteHandlers: { [K in string]?: StoreAfterDeleteHandler<any>[] } = {}\n\tprivate _operationCompleteHandlers: StoreOperationCompleteHandler[] = []\n\n\tprivate _isEnabled = true\n\t/**\n\t * Checks whether side effects are currently enabled.\n\t * When disabled, all side effect handlers are bypassed.\n\t *\n\t * @returns `true` if side effects are enabled, `false` otherwise\n\t * @internal\n\t */\n\tisEnabled() {\n\t\treturn this._isEnabled\n\t}\n\t/**\n\t * Enables or disables side effects processing.\n\t * When disabled, no side effect handlers will be called.\n\t *\n\t * @param enabled - Whether to enable or disable side effects\n\t * @internal\n\t */\n\tsetIsEnabled(enabled: boolean) {\n\t\tthis._isEnabled = enabled\n\t}\n\n\t/**\n\t * Processes all registered 'before create' handlers for a record.\n\t * Handlers are called in registration order and can transform the record.\n\t *\n\t * @param record - The record about to be created\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @returns The potentially modified record to actually create\n\t * @internal\n\t */\n\thandleBeforeCreate(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return record\n\n\t\tconst handlers = this._beforeCreateHandlers[record.typeName] as StoreBeforeCreateHandler<R>[]\n\t\tif (handlers) {\n\t\t\tlet r = record\n\t\t\tfor (const handler of handlers) {\n\t\t\t\tr = handler(r, source)\n\t\t\t}\n\t\t\treturn r\n\t\t}\n\n\t\treturn record\n\t}\n\n\t/**\n\t * Processes all registered 'after create' handlers for a record.\n\t * Handlers are called in registration order after the record is created.\n\t *\n\t * @param record - The record that was created\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleAfterCreate(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tconst handlers = this._afterCreateHandlers[record.typeName] as StoreAfterCreateHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\thandler(record, source)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Processes all registered 'before change' handlers for a record.\n\t * Handlers are called in registration order and can modify or block the change.\n\t *\n\t * @param prev - The current version of the record\n\t * @param next - The proposed new version of the record\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @returns The potentially modified record to actually store\n\t * @internal\n\t */\n\thandleBeforeChange(prev: R, next: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return next\n\n\t\tconst handlers = this._beforeChangeHandlers[next.typeName] as StoreBeforeChangeHandler<R>[]\n\t\tif (handlers) {\n\t\t\tlet r = next\n\t\t\tfor (const handler of handlers) {\n\t\t\t\tr = handler(prev, r, source)\n\t\t\t}\n\t\t\treturn r\n\t\t}\n\n\t\treturn next\n\t}\n\n\t/**\n\t * Processes all registered 'after change' handlers for a record.\n\t * Handlers are called in registration order after the record is updated.\n\t *\n\t * @param prev - The previous version of the record\n\t * @param next - The new version of the record that was stored\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleAfterChange(prev: R, next: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tconst handlers = this._afterChangeHandlers[next.typeName] as StoreAfterChangeHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\thandler(prev, next, source)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Processes all registered 'before delete' handlers for a record.\n\t * If any handler returns `false`, the deletion is prevented.\n\t *\n\t * @param record - The record about to be deleted\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @returns `true` to allow deletion, `false` to prevent it\n\t * @internal\n\t */\n\thandleBeforeDelete(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return true\n\n\t\tconst handlers = this._beforeDeleteHandlers[record.typeName] as StoreBeforeDeleteHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\tif (handler(record, source) === false) {\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn true\n\t}\n\n\t/**\n\t * Processes all registered 'after delete' handlers for a record.\n\t * Handlers are called in registration order after the record is deleted.\n\t *\n\t * @param record - The record that was deleted\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleAfterDelete(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tconst handlers = this._afterDeleteHandlers[record.typeName] as StoreAfterDeleteHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\thandler(record, source)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Processes all registered operation complete handlers.\n\t * Called after an atomic store operation finishes.\n\t *\n\t * @param source - Whether the operation originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleOperationComplete(source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tfor (const handler of this._operationCompleteHandlers) {\n\t\t\thandler(source)\n\t\t}\n\t}\n\n\t/**\n\t * Internal helper for registering multiple side effect handlers at once and keeping them organized.\n\t * This provides a convenient way to register handlers for multiple record types and lifecycle events\n\t * in a single call, returning a single cleanup function.\n\t *\n\t * @param handlersByType - An object mapping record type names to their respective handlers\n\t * @returns A function that removes all registered handlers when called\n\t *\n\t * @example\n\t * ```ts\n\t * const cleanup = sideEffects.register({\n\t * shape: {\n\t * afterDelete: (shape) => console.log('Shape deleted:', shape.id),\n\t * beforeChange: (prev, next) => ({ ...next, lastModified: Date.now() })\n\t * },\n\t * arrow: {\n\t * afterCreate: (arrow) => updateConnectedShapes(arrow)\n\t * }\n\t * })\n\t *\n\t * // Later, remove all handlers\n\t * cleanup()\n\t * ```\n\t *\n\t * @internal\n\t */\n\tregister(handlersByType: {\n\t\t[T in R as T['typeName']]?: {\n\t\t\tbeforeCreate?: StoreBeforeCreateHandler<T>\n\t\t\tafterCreate?: StoreAfterCreateHandler<T>\n\t\t\tbeforeChange?: StoreBeforeChangeHandler<T>\n\t\t\tafterChange?: StoreAfterChangeHandler<T>\n\t\t\tbeforeDelete?: StoreBeforeDeleteHandler<T>\n\t\t\tafterDelete?: StoreAfterDeleteHandler<T>\n\t\t}\n\t}) {\n\t\tconst disposes: (() => void)[] = []\n\t\tfor (const [type, handlers] of Object.entries(handlersByType) as any) {\n\t\t\tif (handlers?.beforeCreate) {\n\t\t\t\tdisposes.push(this.registerBeforeCreateHandler(type, handlers.beforeCreate))\n\t\t\t}\n\t\t\tif (handlers?.afterCreate) {\n\t\t\t\tdisposes.push(this.registerAfterCreateHandler(type, handlers.afterCreate))\n\t\t\t}\n\t\t\tif (handlers?.beforeChange) {\n\t\t\t\tdisposes.push(this.registerBeforeChangeHandler(type, handlers.beforeChange))\n\t\t\t}\n\t\t\tif (handlers?.afterChange) {\n\t\t\t\tdisposes.push(this.registerAfterChangeHandler(type, handlers.afterChange))\n\t\t\t}\n\t\t\tif (handlers?.beforeDelete) {\n\t\t\t\tdisposes.push(this.registerBeforeDeleteHandler(type, handlers.beforeDelete))\n\t\t\t}\n\t\t\tif (handlers?.afterDelete) {\n\t\t\t\tdisposes.push(this.registerAfterDeleteHandler(type, handlers.afterDelete))\n\t\t\t}\n\t\t}\n\t\treturn () => {\n\t\t\tfor (const dispose of disposes) dispose()\n\t\t}\n\t}\n\n\t/**\n\t * Register a handler to be called before a record of a certain type is created. Return a\n\t * modified record from the handler to change the record that will be created.\n\t *\n\t * Use this handle only to modify the creation of the record itself. If you want to trigger a\n\t * side-effect on a different record (for example, moving one shape when another is created),\n\t * use {@link StoreSideEffects.registerAfterCreateHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerBeforeCreateHandler('shape', (shape, source) => {\n\t * // only modify shapes created by the user\n\t * if (source !== 'user') return shape\n\t *\n\t * //by default, arrow shapes have no label. Let's make sure they always have a label.\n\t * if (shape.type === 'arrow') {\n\t * return {...shape, props: {...shape.props, text: 'an arrow'}}\n\t * }\n\t *\n\t * // other shapes get returned unmodified\n\t * return shape\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterBeforeCreateHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreBeforeCreateHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._beforeCreateHandlers[typeName] as StoreBeforeCreateHandler<any>[]\n\t\tif (!handlers) this._beforeCreateHandlers[typeName] = []\n\t\tthis._beforeCreateHandlers[typeName]!.push(handler)\n\t\treturn () => remove(this._beforeCreateHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called after a record is created. This is useful for side-effects\n\t * that would update _other_ records. If you want to modify the record being created use\n\t * {@link StoreSideEffects.registerBeforeCreateHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerAfterCreateHandler('page', (page, source) => {\n\t * // Automatically create a shape when a page is created\n\t * editor.createShape({\n\t * id: createShapeId(),\n\t * type: 'text',\n\t * props: { richText: toRichText(page.name) },\n\t * })\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterAfterCreateHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreAfterCreateHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._afterCreateHandlers[typeName] as StoreAfterCreateHandler<any>[]\n\t\tif (!handlers) this._afterCreateHandlers[typeName] = []\n\t\tthis._afterCreateHandlers[typeName]!.push(handler)\n\t\treturn () => remove(this._afterCreateHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called before a record is changed. The handler is given the old and\n\t * new record - you can return a modified record to apply a different update, or the old record\n\t * to block the update entirely.\n\t *\n\t * Use this handler only for intercepting updates to the record itself. If you want to update\n\t * other records in response to a change, use\n\t * {@link StoreSideEffects.registerAfterChangeHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerBeforeChangeHandler('shape', (prev, next, source) => {\n\t * if (next.isLocked && !prev.isLocked) {\n\t * // prevent shapes from ever being locked:\n\t * return prev\n\t * }\n\t * // other types of change are allowed\n\t * return next\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterBeforeChangeHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreBeforeChangeHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._beforeChangeHandlers[typeName] as StoreBeforeChangeHandler<any>[]\n\t\tif (!handlers) this._beforeChangeHandlers[typeName] = []\n\t\tthis._beforeChangeHandlers[typeName]!.push(handler)\n\t\treturn () => remove(this._beforeChangeHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called after a record is changed. This is useful for side-effects\n\t * that would update _other_ records - if you want to modify the record being changed, use\n\t * {@link StoreSideEffects.registerBeforeChangeHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerAfterChangeHandler('shape', (prev, next, source) => {\n\t * if (next.props.color === 'red') {\n\t * // there can only be one red shape at a time:\n\t * const otherRedShapes = editor.getCurrentPageShapes().filter(s => s.props.color === 'red' && s.id !== next.id)\n\t * editor.updateShapes(otherRedShapes.map(s => ({...s, props: {...s.props, color: 'blue'}})))\n\t * }\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterAfterChangeHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreAfterChangeHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._afterChangeHandlers[typeName] as StoreAfterChangeHandler<any>[]\n\t\tif (!handlers) this._afterChangeHandlers[typeName] = []\n\t\tthis._afterChangeHandlers[typeName]!.push(handler as StoreAfterChangeHandler<any>)\n\t\treturn () => remove(this._afterChangeHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called before a record is deleted. The handler can return `false` to\n\t * prevent the deletion.\n\t *\n\t * Use this handler only for intercepting deletions of the record itself. If you want to do\n\t * something to other records in response to a deletion, use\n\t * {@link StoreSideEffects.registerAfterDeleteHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerBeforeDeleteHandler('shape', (shape, source) => {\n\t * if (shape.props.color === 'red') {\n\t * // prevent red shapes from being deleted\n\t * \t return false\n\t * }\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterBeforeDeleteHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreBeforeDeleteHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._beforeDeleteHandlers[typeName] as StoreBeforeDeleteHandler<any>[]\n\t\tif (!handlers) this._beforeDeleteHandlers[typeName] = []\n\t\tthis._beforeDeleteHandlers[typeName]!.push(handler as StoreBeforeDeleteHandler<any>)\n\t\treturn () => remove(this._beforeDeleteHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called after a record is deleted. This is useful for side-effects\n\t * that would update _other_ records - if you want to block the deletion of the record itself,\n\t * use {@link StoreSideEffects.registerBeforeDeleteHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerAfterDeleteHandler('shape', (shape, source) => {\n\t * // if the last shape in a frame is deleted, delete the frame too:\n\t * const parentFrame = editor.getShape(shape.parentId)\n\t * if (!parentFrame || parentFrame.type !== 'frame') return\n\t *\n\t * const siblings = editor.getSortedChildIdsForParent(parentFrame)\n\t * if (siblings.length === 0) {\n\t * editor.deleteShape(parentFrame.id)\n\t * }\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterAfterDeleteHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreAfterDeleteHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._afterDeleteHandlers[typeName] as StoreAfterDeleteHandler<any>[]\n\t\tif (!handlers) this._afterDeleteHandlers[typeName] = []\n\t\tthis._afterDeleteHandlers[typeName]!.push(handler as StoreAfterDeleteHandler<any>)\n\t\treturn () => remove(this._afterDeleteHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called when a store completes an atomic operation.\n\t *\n\t * @example\n\t * ```ts\n\t * let count = 0\n\t *\n\t * editor.sideEffects.registerOperationCompleteHandler(() => count++)\n\t *\n\t * editor.selectAll()\n\t * expect(count).toBe(1)\n\t *\n\t * editor.store.atomic(() => {\n\t *\teditor.selectNone()\n\t * \teditor.selectAll()\n\t * })\n\t *\n\t * expect(count).toBe(2)\n\t * ```\n\t *\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t *\n\t * @public\n\t */\n\tregisterOperationCompleteHandler(handler: StoreOperationCompleteHandler) {\n\t\tthis._operationCompleteHandlers.push(handler)\n\t\treturn () => remove(this._operationCompleteHandlers, handler)\n\t}\n}\n\nfunction remove(array: any[], item: any) {\n\tconst index = array.indexOf(item)\n\tif (index >= 0) {\n\t\tarray.splice(index, 1)\n\t}\n}\n"],
|
|
4
|
+
"sourcesContent": ["import { UnknownRecord } from './BaseRecord'\nimport { Store } from './Store'\n\n/**\n * Handler function called before a record is created in the store.\n * The handler receives the record to be created and can return a modified version.\n * Use this to validate, transform, or modify records before they are added to the store.\n *\n * @param record - The record about to be created\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n * @returns The record to actually create (may be modified)\n *\n * @example\n * ```ts\n * const handler: StoreBeforeCreateHandler<MyRecord> = (record, source) => {\n * // Ensure all user-created records have a timestamp\n * if (source === 'user' && !record.createdAt) {\n * return { ...record, createdAt: Date.now() }\n * }\n * return record\n * }\n * ```\n *\n * @public\n */\nexport type StoreBeforeCreateHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => R\n/**\n * Handler function called after a record has been successfully created in the store.\n * Use this for side effects that should happen after record creation, such as updating\n * related records or triggering notifications.\n *\n * @param record - The record that was created\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreAfterCreateHandler<BookRecord> = (book, source) => {\n * if (source === 'user') {\n * console.log(`New book added: ${book.title}`)\n * updateAuthorBookCount(book.authorId)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type StoreAfterCreateHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => void\n/**\n * Handler function called before a record is updated in the store.\n * The handler receives the current and new versions of the record and can return\n * a modified version or the original to prevent the change.\n *\n * @param prev - The current version of the record in the store\n * @param next - The proposed new version of the record\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n * @returns The record version to actually store (may be modified or the original to block change)\n *\n * @example\n * ```ts\n * const handler: StoreBeforeChangeHandler<ShapeRecord> = (prev, next, source) => {\n * // Prevent shapes from being moved outside the canvas bounds\n * if (next.x < 0 || next.y < 0) {\n * return prev // Block the change\n * }\n * return next\n * }\n * ```\n *\n * @public\n */\nexport type StoreBeforeChangeHandler<R extends UnknownRecord> = (\n\tprev: R,\n\tnext: R,\n\tsource: 'remote' | 'user'\n) => R\n/**\n * Handler function called after a record has been successfully updated in the store.\n * Use this for side effects that should happen after record changes, such as\n * updating related records or maintaining consistency constraints.\n *\n * @param prev - The previous version of the record\n * @param next - The new version of the record that was stored\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreAfterChangeHandler<ShapeRecord> = (prev, next, source) => {\n * // Update connected arrows when a shape moves\n * if (prev.x !== next.x || prev.y !== next.y) {\n * updateConnectedArrows(next.id)\n * }\n * }\n * ```\n *\n * @public\n */\nexport type StoreAfterChangeHandler<R extends UnknownRecord> = (\n\tprev: R,\n\tnext: R,\n\tsource: 'remote' | 'user'\n) => void\n/**\n * Handler function called before a record is deleted from the store.\n * The handler can return `false` to prevent the deletion from occurring.\n *\n * @param record - The record about to be deleted\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n * @returns `false` to prevent deletion, `void` or any other value to allow it\n *\n * @example\n * ```ts\n * const handler: StoreBeforeDeleteHandler<BookRecord> = (book, source) => {\n * // Prevent deletion of books that are currently checked out\n * if (book.isCheckedOut) {\n * console.warn('Cannot delete checked out book')\n * return false\n * }\n * // Allow deletion for other books\n * }\n * ```\n *\n * @public\n */\nexport type StoreBeforeDeleteHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => void | false\n/**\n * Handler function called after a record has been successfully deleted from the store.\n * Use this for cleanup operations and maintaining referential integrity.\n *\n * @param record - The record that was deleted\n * @param source - Whether the change originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreAfterDeleteHandler<ShapeRecord> = (shape, source) => {\n * // Clean up arrows that were connected to this shape\n * const connectedArrows = findArrowsConnectedTo(shape.id)\n * store.remove(connectedArrows.map(arrow => arrow.id))\n * }\n * ```\n *\n * @public\n */\nexport type StoreAfterDeleteHandler<R extends UnknownRecord> = (\n\trecord: R,\n\tsource: 'remote' | 'user'\n) => void\n\n/**\n * Handler function called when a store operation (atomic transaction) completes.\n * This is useful for performing actions after a batch of changes has been applied,\n * such as triggering saves or sending notifications.\n *\n * @param source - Whether the operation originated from 'user' interaction or 'remote' synchronization\n *\n * @example\n * ```ts\n * const handler: StoreOperationCompleteHandler = (source) => {\n * if (source === 'user') {\n * // Auto-save after user operations complete\n * saveStoreSnapshot()\n * }\n * }\n * ```\n *\n * @public\n */\nexport type StoreOperationCompleteHandler = (source: 'remote' | 'user') => void\n\n/**\n * The side effect manager (aka a \"correct state enforcer\") is responsible\n * for making sure that the store's state is always correct and consistent. This includes\n * things like: deleting a shape if its parent is deleted; unbinding\n * arrows when their binding target is deleted; maintaining referential integrity; etc.\n *\n * Side effects are organized into lifecycle hooks that run before and after\n * record operations (create, change, delete), allowing you to validate data,\n * transform records, and maintain business rules.\n *\n * @example\n * ```ts\n * const sideEffects = new StoreSideEffects(store)\n *\n * // Ensure arrows are deleted when their target shape is deleted\n * sideEffects.registerAfterDeleteHandler('shape', (shape) => {\n * const arrows = store.query.records('arrow', () => ({\n * toId: { eq: shape.id }\n * })).get()\n * store.remove(arrows.map(arrow => arrow.id))\n * })\n * ```\n *\n * @public\n */\nexport class StoreSideEffects<R extends UnknownRecord> {\n\t/**\n\t * Creates a new side effects manager for the given store.\n\t *\n\t * store - The store instance to manage side effects for\n\t */\n\tconstructor(private readonly store: Store<R>) {}\n\n\tprivate _beforeCreateHandlers: { [K in string]?: StoreBeforeCreateHandler<any>[] } = {}\n\tprivate _afterCreateHandlers: { [K in string]?: StoreAfterCreateHandler<any>[] } = {}\n\tprivate _beforeChangeHandlers: { [K in string]?: StoreBeforeChangeHandler<any>[] } = {}\n\tprivate _afterChangeHandlers: { [K in string]?: StoreAfterChangeHandler<any>[] } = {}\n\tprivate _beforeDeleteHandlers: { [K in string]?: StoreBeforeDeleteHandler<any>[] } = {}\n\tprivate _afterDeleteHandlers: { [K in string]?: StoreAfterDeleteHandler<any>[] } = {}\n\tprivate _operationCompleteHandlers: StoreOperationCompleteHandler[] = []\n\n\tprivate _isEnabled = true\n\t/**\n\t * Checks whether side effects are currently enabled.\n\t * When disabled, all side effect handlers are bypassed.\n\t *\n\t * @returns `true` if side effects are enabled, `false` otherwise\n\t * @internal\n\t */\n\tisEnabled() {\n\t\treturn this._isEnabled\n\t}\n\t/**\n\t * Enables or disables side effects processing.\n\t * When disabled, no side effect handlers will be called.\n\t *\n\t * @param enabled - Whether to enable or disable side effects\n\t * @internal\n\t */\n\tsetIsEnabled(enabled: boolean) {\n\t\tthis._isEnabled = enabled\n\t}\n\n\t/**\n\t * Processes all registered 'before create' handlers for a record.\n\t * Handlers are called in registration order and can transform the record.\n\t *\n\t * @param record - The record about to be created\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @returns The potentially modified record to actually create\n\t * @internal\n\t */\n\thandleBeforeCreate(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return record\n\n\t\tconst handlers = this._beforeCreateHandlers[record.typeName] as StoreBeforeCreateHandler<R>[]\n\t\tif (handlers) {\n\t\t\tlet r = record\n\t\t\tfor (const handler of handlers) {\n\t\t\t\tr = handler(r, source)\n\t\t\t}\n\t\t\treturn r\n\t\t}\n\n\t\treturn record\n\t}\n\n\t/**\n\t * Processes all registered 'after create' handlers for a record.\n\t * Handlers are called in registration order after the record is created.\n\t *\n\t * @param record - The record that was created\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleAfterCreate(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tconst handlers = this._afterCreateHandlers[record.typeName] as StoreAfterCreateHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\thandler(record, source)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Processes all registered 'before change' handlers for a record.\n\t * Handlers are called in registration order and can modify or block the change.\n\t *\n\t * @param prev - The current version of the record\n\t * @param next - The proposed new version of the record\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @returns The potentially modified record to actually store\n\t * @internal\n\t */\n\thandleBeforeChange(prev: R, next: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return next\n\n\t\tconst handlers = this._beforeChangeHandlers[next.typeName] as StoreBeforeChangeHandler<R>[]\n\t\tif (handlers) {\n\t\t\tlet r = next\n\t\t\tfor (const handler of handlers) {\n\t\t\t\tr = handler(prev, r, source)\n\t\t\t}\n\t\t\treturn r\n\t\t}\n\n\t\treturn next\n\t}\n\n\t/**\n\t * Processes all registered 'after change' handlers for a record.\n\t * Handlers are called in registration order after the record is updated.\n\t *\n\t * @param prev - The previous version of the record\n\t * @param next - The new version of the record that was stored\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleAfterChange(prev: R, next: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tconst handlers = this._afterChangeHandlers[next.typeName] as StoreAfterChangeHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\thandler(prev, next, source)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Processes all registered 'before delete' handlers for a record.\n\t * If any handler returns `false`, the deletion is prevented.\n\t *\n\t * @param record - The record about to be deleted\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @returns `true` to allow deletion, `false` to prevent it\n\t * @internal\n\t */\n\thandleBeforeDelete(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return true\n\n\t\tconst handlers = this._beforeDeleteHandlers[record.typeName] as StoreBeforeDeleteHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\tif (handler(record, source) === false) {\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn true\n\t}\n\n\t/**\n\t * Processes all registered 'after delete' handlers for a record.\n\t * Handlers are called in registration order after the record is deleted.\n\t *\n\t * @param record - The record that was deleted\n\t * @param source - Whether the change originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleAfterDelete(record: R, source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tconst handlers = this._afterDeleteHandlers[record.typeName] as StoreAfterDeleteHandler<R>[]\n\t\tif (handlers) {\n\t\t\tfor (const handler of handlers) {\n\t\t\t\thandler(record, source)\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Processes all registered operation complete handlers.\n\t * Called after an atomic store operation finishes.\n\t *\n\t * @param source - Whether the operation originated from 'user' or 'remote'\n\t * @internal\n\t */\n\thandleOperationComplete(source: 'remote' | 'user') {\n\t\tif (!this._isEnabled) return\n\n\t\tfor (const handler of this._operationCompleteHandlers) {\n\t\t\thandler(source)\n\t\t}\n\t}\n\n\t/**\n\t * Internal helper for registering multiple side effect handlers at once and keeping them organized.\n\t * This provides a convenient way to register handlers for multiple record types and lifecycle events\n\t * in a single call, returning a single cleanup function.\n\t *\n\t * @param handlersByType - An object mapping record type names to their respective handlers\n\t * @returns A function that removes all registered handlers when called\n\t *\n\t * @example\n\t * ```ts\n\t * const cleanup = sideEffects.register({\n\t * shape: {\n\t * afterDelete: (shape) => console.log('Shape deleted:', shape.id),\n\t * beforeChange: (prev, next) => ({ ...next, lastModified: Date.now() })\n\t * },\n\t * arrow: {\n\t * afterCreate: (arrow) => updateConnectedShapes(arrow)\n\t * }\n\t * })\n\t *\n\t * // Later, remove all handlers\n\t * cleanup()\n\t * ```\n\t *\n\t * @internal\n\t */\n\tregister(handlersByType: {\n\t\t[T in R as T['typeName']]?: {\n\t\t\tbeforeCreate?: StoreBeforeCreateHandler<T>\n\t\t\tafterCreate?: StoreAfterCreateHandler<T>\n\t\t\tbeforeChange?: StoreBeforeChangeHandler<T>\n\t\t\tafterChange?: StoreAfterChangeHandler<T>\n\t\t\tbeforeDelete?: StoreBeforeDeleteHandler<T>\n\t\t\tafterDelete?: StoreAfterDeleteHandler<T>\n\t\t}\n\t}) {\n\t\tconst disposes: (() => void)[] = []\n\t\tfor (const [type, handlers] of Object.entries(handlersByType) as any) {\n\t\t\tif (handlers?.beforeCreate) {\n\t\t\t\tdisposes.push(this.registerBeforeCreateHandler(type, handlers.beforeCreate))\n\t\t\t}\n\t\t\tif (handlers?.afterCreate) {\n\t\t\t\tdisposes.push(this.registerAfterCreateHandler(type, handlers.afterCreate))\n\t\t\t}\n\t\t\tif (handlers?.beforeChange) {\n\t\t\t\tdisposes.push(this.registerBeforeChangeHandler(type, handlers.beforeChange))\n\t\t\t}\n\t\t\tif (handlers?.afterChange) {\n\t\t\t\tdisposes.push(this.registerAfterChangeHandler(type, handlers.afterChange))\n\t\t\t}\n\t\t\tif (handlers?.beforeDelete) {\n\t\t\t\tdisposes.push(this.registerBeforeDeleteHandler(type, handlers.beforeDelete))\n\t\t\t}\n\t\t\tif (handlers?.afterDelete) {\n\t\t\t\tdisposes.push(this.registerAfterDeleteHandler(type, handlers.afterDelete))\n\t\t\t}\n\t\t}\n\t\treturn () => {\n\t\t\tfor (const dispose of disposes) dispose()\n\t\t}\n\t}\n\n\t/**\n\t * Register a handler to be called before a record of a certain type is created. Return a\n\t * modified record from the handler to change the record that will be created.\n\t *\n\t * Use this handle only to modify the creation of the record itself. If you want to trigger a\n\t * side-effect on a different record (for example, moving one shape when another is created),\n\t * use {@link StoreSideEffects.registerAfterCreateHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerBeforeCreateHandler('shape', (shape, source) => {\n\t * // only modify shapes created by the user\n\t * if (source !== 'user') return shape\n\t *\n\t * //by default, arrow shapes have no label. Let's make sure they always have a label.\n\t * if (shape.type === 'arrow') {\n\t * return {...shape, props: {...shape.props, text: 'an arrow'}}\n\t * }\n\t *\n\t * // other shapes get returned unmodified\n\t * return shape\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterBeforeCreateHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreBeforeCreateHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._beforeCreateHandlers[typeName] as StoreBeforeCreateHandler<any>[]\n\t\tif (!handlers) this._beforeCreateHandlers[typeName] = []\n\t\tthis._beforeCreateHandlers[typeName]!.push(handler)\n\t\treturn () => remove(this._beforeCreateHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called after a record is created. This is useful for side-effects\n\t * that would update _other_ records. If you want to modify the record being created use\n\t * {@link StoreSideEffects.registerBeforeCreateHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerAfterCreateHandler('page', (page, source) => {\n\t * // Automatically create a shape when a page is created\n\t * editor.createShape<TLTextShape>({\n\t * id: createShapeId(),\n\t * type: 'text',\n\t * props: { richText: toRichText(page.name) },\n\t * })\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterAfterCreateHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreAfterCreateHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._afterCreateHandlers[typeName] as StoreAfterCreateHandler<any>[]\n\t\tif (!handlers) this._afterCreateHandlers[typeName] = []\n\t\tthis._afterCreateHandlers[typeName]!.push(handler)\n\t\treturn () => remove(this._afterCreateHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called before a record is changed. The handler is given the old and\n\t * new record - you can return a modified record to apply a different update, or the old record\n\t * to block the update entirely.\n\t *\n\t * Use this handler only for intercepting updates to the record itself. If you want to update\n\t * other records in response to a change, use\n\t * {@link StoreSideEffects.registerAfterChangeHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerBeforeChangeHandler('shape', (prev, next, source) => {\n\t * if (next.isLocked && !prev.isLocked) {\n\t * // prevent shapes from ever being locked:\n\t * return prev\n\t * }\n\t * // other types of change are allowed\n\t * return next\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterBeforeChangeHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreBeforeChangeHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._beforeChangeHandlers[typeName] as StoreBeforeChangeHandler<any>[]\n\t\tif (!handlers) this._beforeChangeHandlers[typeName] = []\n\t\tthis._beforeChangeHandlers[typeName]!.push(handler)\n\t\treturn () => remove(this._beforeChangeHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called after a record is changed. This is useful for side-effects\n\t * that would update _other_ records - if you want to modify the record being changed, use\n\t * {@link StoreSideEffects.registerBeforeChangeHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerAfterChangeHandler('shape', (prev, next, source) => {\n\t * if (next.props.color === 'red') {\n\t * // there can only be one red shape at a time:\n\t * const otherRedShapes = editor.getCurrentPageShapes().filter(s => s.props.color === 'red' && s.id !== next.id)\n\t * editor.updateShapes(otherRedShapes.map(s => ({...s, props: {...s.props, color: 'blue'}})))\n\t * }\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterAfterChangeHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreAfterChangeHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._afterChangeHandlers[typeName] as StoreAfterChangeHandler<any>[]\n\t\tif (!handlers) this._afterChangeHandlers[typeName] = []\n\t\tthis._afterChangeHandlers[typeName]!.push(handler as StoreAfterChangeHandler<any>)\n\t\treturn () => remove(this._afterChangeHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called before a record is deleted. The handler can return `false` to\n\t * prevent the deletion.\n\t *\n\t * Use this handler only for intercepting deletions of the record itself. If you want to do\n\t * something to other records in response to a deletion, use\n\t * {@link StoreSideEffects.registerAfterDeleteHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerBeforeDeleteHandler('shape', (shape, source) => {\n\t * if (shape.props.color === 'red') {\n\t * // prevent red shapes from being deleted\n\t * \t return false\n\t * }\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterBeforeDeleteHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreBeforeDeleteHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._beforeDeleteHandlers[typeName] as StoreBeforeDeleteHandler<any>[]\n\t\tif (!handlers) this._beforeDeleteHandlers[typeName] = []\n\t\tthis._beforeDeleteHandlers[typeName]!.push(handler as StoreBeforeDeleteHandler<any>)\n\t\treturn () => remove(this._beforeDeleteHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called after a record is deleted. This is useful for side-effects\n\t * that would update _other_ records - if you want to block the deletion of the record itself,\n\t * use {@link StoreSideEffects.registerBeforeDeleteHandler} instead.\n\t *\n\t * @example\n\t * ```ts\n\t * editor.sideEffects.registerAfterDeleteHandler('shape', (shape, source) => {\n\t * // if the last shape in a frame is deleted, delete the frame too:\n\t * const parentFrame = editor.getShape(shape.parentId)\n\t * if (!parentFrame || parentFrame.type !== 'frame') return\n\t *\n\t * const siblings = editor.getSortedChildIdsForParent(parentFrame)\n\t * if (siblings.length === 0) {\n\t * editor.deleteShape(parentFrame.id)\n\t * }\n\t * })\n\t * ```\n\t *\n\t * @param typeName - The type of record to listen for\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t */\n\tregisterAfterDeleteHandler<T extends R['typeName']>(\n\t\ttypeName: T,\n\t\thandler: StoreAfterDeleteHandler<R & { typeName: T }>\n\t) {\n\t\tconst handlers = this._afterDeleteHandlers[typeName] as StoreAfterDeleteHandler<any>[]\n\t\tif (!handlers) this._afterDeleteHandlers[typeName] = []\n\t\tthis._afterDeleteHandlers[typeName]!.push(handler as StoreAfterDeleteHandler<any>)\n\t\treturn () => remove(this._afterDeleteHandlers[typeName]!, handler)\n\t}\n\n\t/**\n\t * Register a handler to be called when a store completes an atomic operation.\n\t *\n\t * @example\n\t * ```ts\n\t * let count = 0\n\t *\n\t * editor.sideEffects.registerOperationCompleteHandler(() => count++)\n\t *\n\t * editor.selectAll()\n\t * expect(count).toBe(1)\n\t *\n\t * editor.store.atomic(() => {\n\t *\teditor.selectNone()\n\t * \teditor.selectAll()\n\t * })\n\t *\n\t * expect(count).toBe(2)\n\t * ```\n\t *\n\t * @param handler - The handler to call\n\t *\n\t * @returns A callback that removes the handler.\n\t *\n\t * @public\n\t */\n\tregisterOperationCompleteHandler(handler: StoreOperationCompleteHandler) {\n\t\tthis._operationCompleteHandlers.push(handler)\n\t\treturn () => remove(this._operationCompleteHandlers, handler)\n\t}\n}\n\nfunction remove(array: any[], item: any) {\n\tconst index = array.indexOf(item)\n\tif (index >= 0) {\n\t\tarray.splice(index, 1)\n\t}\n}\n"],
|
|
5
5
|
"mappings": "AA0MO,MAAM,iBAA0C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMtD,YAA6B,OAAiB;AAAjB;AAAA,EAAkB;AAAA,EAEvC,wBAA6E,CAAC;AAAA,EAC9E,uBAA2E,CAAC;AAAA,EAC5E,wBAA6E,CAAC;AAAA,EAC9E,uBAA2E,CAAC;AAAA,EAC5E,wBAA6E,CAAC;AAAA,EAC9E,uBAA2E,CAAC;AAAA,EAC5E,6BAA8D,CAAC;AAAA,EAE/D,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQrB,YAAY;AACX,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,SAAkB;AAC9B,SAAK,aAAa;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,mBAAmB,QAAW,QAA2B;AACxD,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,UAAM,WAAW,KAAK,sBAAsB,OAAO,QAAQ;AAC3D,QAAI,UAAU;AACb,UAAI,IAAI;AACR,iBAAW,WAAW,UAAU;AAC/B,YAAI,QAAQ,GAAG,MAAM;AAAA,MACtB;AACA,aAAO;AAAA,IACR;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,kBAAkB,QAAW,QAA2B;AACvD,QAAI,CAAC,KAAK,WAAY;AAEtB,UAAM,WAAW,KAAK,qBAAqB,OAAO,QAAQ;AAC1D,QAAI,UAAU;AACb,iBAAW,WAAW,UAAU;AAC/B,gBAAQ,QAAQ,MAAM;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,mBAAmB,MAAS,MAAS,QAA2B;AAC/D,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,UAAM,WAAW,KAAK,sBAAsB,KAAK,QAAQ;AACzD,QAAI,UAAU;AACb,UAAI,IAAI;AACR,iBAAW,WAAW,UAAU;AAC/B,YAAI,QAAQ,MAAM,GAAG,MAAM;AAAA,MAC5B;AACA,aAAO;AAAA,IACR;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,kBAAkB,MAAS,MAAS,QAA2B;AAC9D,QAAI,CAAC,KAAK,WAAY;AAEtB,UAAM,WAAW,KAAK,qBAAqB,KAAK,QAAQ;AACxD,QAAI,UAAU;AACb,iBAAW,WAAW,UAAU;AAC/B,gBAAQ,MAAM,MAAM,MAAM;AAAA,MAC3B;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,mBAAmB,QAAW,QAA2B;AACxD,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,UAAM,WAAW,KAAK,sBAAsB,OAAO,QAAQ;AAC3D,QAAI,UAAU;AACb,iBAAW,WAAW,UAAU;AAC/B,YAAI,QAAQ,QAAQ,MAAM,MAAM,OAAO;AACtC,iBAAO;AAAA,QACR;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,kBAAkB,QAAW,QAA2B;AACvD,QAAI,CAAC,KAAK,WAAY;AAEtB,UAAM,WAAW,KAAK,qBAAqB,OAAO,QAAQ;AAC1D,QAAI,UAAU;AACb,iBAAW,WAAW,UAAU;AAC/B,gBAAQ,QAAQ,MAAM;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,wBAAwB,QAA2B;AAClD,QAAI,CAAC,KAAK,WAAY;AAEtB,eAAW,WAAW,KAAK,4BAA4B;AACtD,cAAQ,MAAM;AAAA,IACf;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA4BA,SAAS,gBASN;AACF,UAAM,WAA2B,CAAC;AAClC,eAAW,CAAC,MAAM,QAAQ,KAAK,OAAO,QAAQ,cAAc,GAAU;AACrE,UAAI,UAAU,cAAc;AAC3B,iBAAS,KAAK,KAAK,4BAA4B,MAAM,SAAS,YAAY,CAAC;AAAA,MAC5E;AACA,UAAI,UAAU,aAAa;AAC1B,iBAAS,KAAK,KAAK,2BAA2B,MAAM,SAAS,WAAW,CAAC;AAAA,MAC1E;AACA,UAAI,UAAU,cAAc;AAC3B,iBAAS,KAAK,KAAK,4BAA4B,MAAM,SAAS,YAAY,CAAC;AAAA,MAC5E;AACA,UAAI,UAAU,aAAa;AAC1B,iBAAS,KAAK,KAAK,2BAA2B,MAAM,SAAS,WAAW,CAAC;AAAA,MAC1E;AACA,UAAI,UAAU,cAAc;AAC3B,iBAAS,KAAK,KAAK,4BAA4B,MAAM,SAAS,YAAY,CAAC;AAAA,MAC5E;AACA,UAAI,UAAU,aAAa;AAC1B,iBAAS,KAAK,KAAK,2BAA2B,MAAM,SAAS,WAAW,CAAC;AAAA,MAC1E;AAAA,IACD;AACA,WAAO,MAAM;AACZ,iBAAW,WAAW,SAAU,SAAQ;AAAA,IACzC;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+BA,4BACC,UACA,SACC;AACD,UAAM,WAAW,KAAK,sBAAsB,QAAQ;AACpD,QAAI,CAAC,SAAU,MAAK,sBAAsB,QAAQ,IAAI,CAAC;AACvD,SAAK,sBAAsB,QAAQ,EAAG,KAAK,OAAO;AAClD,WAAO,MAAM,OAAO,KAAK,sBAAsB,QAAQ,GAAI,OAAO;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,2BACC,UACA,SACC;AACD,UAAM,WAAW,KAAK,qBAAqB,QAAQ;AACnD,QAAI,CAAC,SAAU,MAAK,qBAAqB,QAAQ,IAAI,CAAC;AACtD,SAAK,qBAAqB,QAAQ,EAAG,KAAK,OAAO;AACjD,WAAO,MAAM,OAAO,KAAK,qBAAqB,QAAQ,GAAI,OAAO;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA4BA,4BACC,UACA,SACC;AACD,UAAM,WAAW,KAAK,sBAAsB,QAAQ;AACpD,QAAI,CAAC,SAAU,MAAK,sBAAsB,QAAQ,IAAI,CAAC;AACvD,SAAK,sBAAsB,QAAQ,EAAG,KAAK,OAAO;AAClD,WAAO,MAAM,OAAO,KAAK,sBAAsB,QAAQ,GAAI,OAAO;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,2BACC,UACA,SACC;AACD,UAAM,WAAW,KAAK,qBAAqB,QAAQ;AACnD,QAAI,CAAC,SAAU,MAAK,qBAAqB,QAAQ,IAAI,CAAC;AACtD,SAAK,qBAAqB,QAAQ,EAAG,KAAK,OAAuC;AACjF,WAAO,MAAM,OAAO,KAAK,qBAAqB,QAAQ,GAAI,OAAO;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBA,4BACC,UACA,SACC;AACD,UAAM,WAAW,KAAK,sBAAsB,QAAQ;AACpD,QAAI,CAAC,SAAU,MAAK,sBAAsB,QAAQ,IAAI,CAAC;AACvD,SAAK,sBAAsB,QAAQ,EAAG,KAAK,OAAwC;AACnF,WAAO,MAAM,OAAO,KAAK,sBAAsB,QAAQ,GAAI,OAAO;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0BA,2BACC,UACA,SACC;AACD,UAAM,WAAW,KAAK,qBAAqB,QAAQ;AACnD,QAAI,CAAC,SAAU,MAAK,qBAAqB,QAAQ,IAAI,CAAC;AACtD,SAAK,qBAAqB,QAAQ,EAAG,KAAK,OAAuC;AACjF,WAAO,MAAM,OAAO,KAAK,qBAAqB,QAAQ,GAAI,OAAO;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA4BA,iCAAiC,SAAwC;AACxE,SAAK,2BAA2B,KAAK,OAAO;AAC5C,WAAO,MAAM,OAAO,KAAK,4BAA4B,OAAO;AAAA,EAC7D;AACD;AAEA,SAAS,OAAO,OAAc,MAAW;AACxC,QAAM,QAAQ,MAAM,QAAQ,IAAI;AAChC,MAAI,SAAS,GAAG;AACf,UAAM,OAAO,OAAO,CAAC;AAAA,EACtB;AACD;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
import { STRUCTURED_CLONE_OBJECT_PROTOTYPE } from "@tldraw/utils";
|
|
2
|
-
import { isDev } from "./isDev.mjs";
|
|
3
2
|
function devFreeze(object) {
|
|
4
|
-
if (
|
|
3
|
+
if (process.env.NODE_ENV === "production") {
|
|
4
|
+
return object;
|
|
5
|
+
}
|
|
5
6
|
const proto = Object.getPrototypeOf(object);
|
|
6
7
|
if (proto && !(Array.isArray(object) || proto === Object.prototype || proto === null || proto === STRUCTURED_CLONE_OBJECT_PROTOTYPE)) {
|
|
7
8
|
console.error("cannot include non-js data in a record", object);
|
|
8
9
|
throw new Error("cannot include non-js data in a record");
|
|
9
10
|
}
|
|
10
|
-
if (Object.isFrozen(object)) {
|
|
11
|
-
return object;
|
|
12
|
-
}
|
|
13
11
|
const propNames = Object.getOwnPropertyNames(object);
|
|
14
12
|
for (const name of propNames) {
|
|
15
13
|
const value = object[name];
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/lib/devFreeze.ts"],
|
|
4
|
-
"sourcesContent": ["import { STRUCTURED_CLONE_OBJECT_PROTOTYPE } from '@tldraw/utils'\
|
|
5
|
-
"mappings": "AAAA,SAAS,yCAAyC;
|
|
4
|
+
"sourcesContent": ["import { STRUCTURED_CLONE_OBJECT_PROTOTYPE } from '@tldraw/utils'\n\n/**\n * Freeze an object when in development mode. Copied from\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze\n *\n * @example\n *\n * ```ts\n * const frozen = devFreeze({ a: 1 })\n * ```\n *\n * @param object - The object to freeze.\n * @returns The frozen object when in development mode, or else the object when in other modes.\n * @public\n */\nexport function devFreeze<T>(object: T): T {\n\tif (process.env.NODE_ENV === 'production') {\n\t\treturn object\n\t}\n\tconst proto = Object.getPrototypeOf(object)\n\tif (\n\t\tproto &&\n\t\t!(\n\t\t\tArray.isArray(object) ||\n\t\t\tproto === Object.prototype ||\n\t\t\tproto === null ||\n\t\t\tproto === STRUCTURED_CLONE_OBJECT_PROTOTYPE\n\t\t)\n\t) {\n\t\tconsole.error('cannot include non-js data in a record', object)\n\t\tthrow new Error('cannot include non-js data in a record')\n\t}\n\n\t// Retrieve the property names defined on object\n\tconst propNames = Object.getOwnPropertyNames(object)\n\n\t// Recursively freeze properties before freezing self\n\tfor (const name of propNames) {\n\t\tconst value = (object as any)[name]\n\n\t\tif (value && typeof value === 'object') {\n\t\t\tdevFreeze(value)\n\t\t}\n\t}\n\n\treturn Object.freeze(object)\n}\n"],
|
|
5
|
+
"mappings": "AAAA,SAAS,yCAAyC;AAgB3C,SAAS,UAAa,QAAc;AAC1C,MAAI,QAAQ,IAAI,aAAa,cAAc;AAC1C,WAAO;AAAA,EACR;AACA,QAAM,QAAQ,OAAO,eAAe,MAAM;AAC1C,MACC,SACA,EACC,MAAM,QAAQ,MAAM,KACpB,UAAU,OAAO,aACjB,UAAU,QACV,UAAU,oCAEV;AACD,YAAQ,MAAM,0CAA0C,MAAM;AAC9D,UAAM,IAAI,MAAM,wCAAwC;AAAA,EACzD;AAGA,QAAM,YAAY,OAAO,oBAAoB,MAAM;AAGnD,aAAW,QAAQ,WAAW;AAC7B,UAAM,QAAS,OAAe,IAAI;AAElC,QAAI,SAAS,OAAO,UAAU,UAAU;AACvC,gBAAU,KAAK;AAAA,IAChB;AAAA,EACD;AAEA,SAAO,OAAO,OAAO,MAAM;AAC5B;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/lib/migrate.ts"],
|
|
4
|
-
"sourcesContent": ["import { assert, objectMapEntries } from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { SerializedStore } from './Store'\nimport { SerializedSchema } from './StoreSchema'\n\nfunction squashDependsOn(sequence: Array<Migration | StandaloneDependsOn>): Migration[] {\n\tconst result: Migration[] = []\n\tfor (let i = sequence.length - 1; i >= 0; i--) {\n\t\tconst elem = sequence[i]\n\t\tif (!('id' in elem)) {\n\t\t\tconst dependsOn = elem.dependsOn\n\t\t\tconst prev = result[0]\n\t\t\tif (prev) {\n\t\t\t\tresult[0] = {\n\t\t\t\t\t...prev,\n\t\t\t\t\tdependsOn: dependsOn.concat(prev.dependsOn ?? []),\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tresult.unshift(elem)\n\t\t}\n\t}\n\treturn result\n}\n\n/**\n * Creates a migration sequence that defines how to transform data as your schema evolves.\n *\n * A migration sequence contains a series of migrations that are applied in order to transform\n * data from older versions to newer versions. Each migration is identified by a unique ID\n * and can operate at either the record level (transforming individual records) or store level\n * (transforming the entire store structure).\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param options - Configuration for the migration sequence\n * - sequenceId - Unique identifier for this migration sequence (e.g., 'com.myapp.book')\n * - sequence - Array of migrations or dependency declarations to include in the sequence\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added (defaults to true)\n * @returns A validated migration sequence that can be included in a store schema\n * @example\n * ```ts\n * const bookMigrations = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [\n * {\n * id: 'com.myapp.book/1',\n * scope: 'record',\n * up: (record) => ({ ...record, newField: 'default' })\n * }\n * ]\n * })\n * ```\n * @public\n */\nexport function createMigrationSequence({\n\tsequence,\n\tsequenceId,\n\tretroactive = true,\n}: {\n\tsequenceId: string\n\tretroactive?: boolean\n\tsequence: Array<Migration | StandaloneDependsOn>\n}): MigrationSequence {\n\tconst migrations: MigrationSequence = {\n\t\tsequenceId,\n\t\tretroactive,\n\t\tsequence: squashDependsOn(sequence),\n\t}\n\tvalidateMigrations(migrations)\n\treturn migrations\n}\n\n/**\n * Creates a named set of migration IDs from version numbers and a sequence ID.\n *\n * This utility function helps generate properly formatted migration IDs that follow\n * the required `sequenceId/version` pattern. It takes a sequence ID and a record\n * of named versions, returning migration IDs that can be used in migration definitions.\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param sequenceId - The sequence identifier (e.g., 'com.myapp.book')\n * @param versions - Record mapping version names to numbers\n * @returns Record mapping version names to properly formatted migration IDs\n * @example\n * ```ts\n * const migrationIds = createMigrationIds('com.myapp.book', {\n * addGenre: 1,\n * addPublisher: 2,\n * removeOldField: 3\n * })\n * // Result: {\n * // addGenre: 'com.myapp.book/1',\n * // addPublisher: 'com.myapp.book/2',\n * // removeOldField: 'com.myapp.book/3'\n * // }\n * ```\n * @public\n */\nexport function createMigrationIds<\n\tconst ID extends string,\n\tconst Versions extends Record<string, number>,\n>(sequenceId: ID, versions: Versions): { [K in keyof Versions]: `${ID}/${Versions[K]}` } {\n\treturn Object.fromEntries(\n\t\tobjectMapEntries(versions).map(([key, version]) => [key, `${sequenceId}/${version}`] as const)\n\t) as any\n}\n\n/**\n * Creates a migration sequence specifically for record-level migrations.\n *\n * This is a convenience function that creates a migration sequence where all migrations\n * operate at the record scope and are automatically filtered to apply only to records\n * of a specific type. Each migration in the sequence will be enhanced with the record\n * scope and appropriate filtering logic.\n * @param opts - Configuration for the record migration sequence\n * - recordType - The record type name these migrations should apply to\n * - filter - Optional additional filter function to determine which records to migrate\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added\n * - sequenceId - Unique identifier for this migration sequence\n * - sequence - Array of record migration definitions (scope will be added automatically)\n * @returns A migration sequence configured for record-level operations\n * @internal\n */\nexport function createRecordMigrationSequence(opts: {\n\trecordType: string\n\tfilter?(record: UnknownRecord): boolean\n\tretroactive?: boolean\n\tsequenceId: string\n\tsequence: Omit<Extract<Migration, { scope: 'record' }>, 'scope'>[]\n}): MigrationSequence {\n\tconst sequenceId = opts.sequenceId\n\treturn createMigrationSequence({\n\t\tsequenceId,\n\t\tretroactive: opts.retroactive ?? true,\n\t\tsequence: opts.sequence.map((m) =>\n\t\t\t'id' in m\n\t\t\t\t? {\n\t\t\t\t\t\t...m,\n\t\t\t\t\t\tscope: 'record',\n\t\t\t\t\t\tfilter: (r: UnknownRecord) =>\n\t\t\t\t\t\t\tr.typeName === opts.recordType &&\n\t\t\t\t\t\t\t(m.filter?.(r) ?? true) &&\n\t\t\t\t\t\t\t(opts.filter?.(r) ?? true),\n\t\t\t\t\t}\n\t\t\t\t: m\n\t\t),\n\t})\n}\n\n/**\n * Legacy migration interface for backward compatibility.\n *\n * This interface represents the old migration format that included both `up` and `down`\n * transformation functions. While still supported, new code should use the `Migration`\n * type which provides more flexibility and better integration with the current system.\n * @public\n */\nexport interface LegacyMigration<Before = any, After = any> {\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tup: (oldState: Before) => After\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tdown: (newState: After) => Before\n}\n\n/**\n * Unique identifier for a migration in the format `sequenceId/version`.\n *\n * Migration IDs follow a specific pattern where the sequence ID identifies the migration\n * sequence and the version number indicates the order within that sequence. For example:\n * 'com.myapp.book/1', 'com.myapp.book/2', etc.\n * @public\n */\nexport type MigrationId = `${string}/${number}`\n\n/**\n * Declares dependencies for migrations without being a migration itself.\n *\n * This interface allows you to specify that future migrations in a sequence depend on\n * migrations from other sequences, without defining an actual migration transformation.\n * It's used to establish cross-sequence dependencies in the migration graph.\n * @public\n */\nexport interface StandaloneDependsOn {\n\treadonly dependsOn: readonly MigrationId[]\n}\n\n/**\n * Defines a single migration that transforms data from one schema version to another.\n *\n * A migration can operate at two different scopes:\n * - `record`: Transforms individual records, with optional filtering to target specific records\n * - `store`: Transforms the entire serialized store structure\n *\n * Each migration has a unique ID and can declare dependencies on other migrations that must\n * be applied first. The `up` function performs the forward transformation, while the optional\n * `down` function can reverse the migration if needed.\n * @public\n */\nexport type Migration = {\n\treadonly id: MigrationId\n\treadonly dependsOn?: readonly MigrationId[] | undefined\n} & (\n\t| {\n\t\t\treadonly scope: 'record'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly filter?: (record: UnknownRecord) => boolean\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (oldState: UnknownRecord) => void | UnknownRecord\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (newState: UnknownRecord) => void | UnknownRecord\n\t }\n\t| {\n\t\t\treadonly scope: 'store'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (\n\t\t\t\toldState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (\n\t\t\t\tnewState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t }\n\t| {\n\t\t\treadonly scope: 'storage'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (storage: SynchronousRecordStorage<UnknownRecord>) => void\n\t\t\treadonly down?: never\n\t }\n)\n\n/**\n * Abstraction over the store that can be used to perform migrations.\n * @public\n */\nexport interface SynchronousRecordStorage<R extends UnknownRecord> {\n\tget(id: string): R | undefined\n\tset(id: string, record: R): void\n\tdelete(id: string): void\n\tkeys(): Iterable<string>\n\tvalues(): Iterable<R>\n\tentries(): Iterable<[string, R]>\n}\n\n/**\n * Abstraction over the storage that can be used to perform migrations.\n * @public\n */\nexport interface SynchronousStorage<R extends UnknownRecord> extends SynchronousRecordStorage<R> {\n\tgetSchema(): SerializedSchema\n\tsetSchema(schema: SerializedSchema): void\n}\n\n/**\n * Base interface for legacy migration information.\n *\n * Contains the basic structure used by the legacy migration system, including version\n * range information and the migration functions indexed by version number. This is\n * maintained for backward compatibility with older migration definitions.\n * @public\n */\nexport interface LegacyBaseMigrationsInfo {\n\tfirstVersion: number\n\tcurrentVersion: number\n\tmigrators: { [version: number]: LegacyMigration }\n}\n\n/**\n * Legacy migration configuration with support for sub-type migrations.\n *\n * This interface extends the base legacy migration info to support migrations that\n * vary based on a sub-type key within records. This allows different migration paths\n * for different variants of the same record type, which was useful in older migration\n * systems but is now handled more elegantly by the current Migration system.\n * @public\n */\nexport interface LegacyMigrations extends LegacyBaseMigrationsInfo {\n\tsubTypeKey?: string\n\tsubTypeMigrations?: Record<string, LegacyBaseMigrationsInfo>\n}\n\n/**\n * A complete sequence of migrations that can be applied to transform data.\n *\n * A migration sequence represents a series of ordered migrations that belong together,\n * typically for a specific part of your schema. The sequence includes metadata about\n * whether it should be applied retroactively to existing data and contains the actual\n * migration definitions in execution order.\n * @public\n */\nexport interface MigrationSequence {\n\tsequenceId: string\n\t/**\n\t * retroactive should be true if the migrations should be applied to snapshots that were created before\n\t * this migration sequence was added to the schema.\n\t *\n\t * In general:\n\t *\n\t * - retroactive should be true when app developers create their own new migration sequences.\n\t * - retroactive should be false when library developers ship a migration sequence. When you install a library for the first time, any migrations that were added in the library before that point should generally _not_ be applied to your existing data.\n\t */\n\tretroactive: boolean\n\tsequence: Migration[]\n}\n\n/**\n * Sorts migrations using a distance-minimizing topological sort.\n *\n * This function respects two types of dependencies:\n * 1. Implicit sequence dependencies (foo/1 must come before foo/2)\n * 2. Explicit dependencies via `dependsOn` property\n *\n * The algorithm minimizes the total distance between migrations and their explicit\n * dependencies in the final ordering, while maintaining topological correctness.\n * This means when migration A depends on migration B, A will be scheduled as close\n * as possible to B (while respecting all constraints).\n *\n * Implementation uses Kahn's algorithm with priority scoring:\n * - Builds dependency graph and calculates in-degrees\n * - Uses priority queue that prioritizes migrations which unblock explicit dependencies\n * - Processes migrations in urgency order while maintaining topological constraints\n * - Detects cycles by ensuring all migrations are processed\n *\n * @param migrations - Array of migrations to sort\n * @returns Sorted array of migrations in execution order\n * @throws Assertion error if circular dependencies are detected\n * @example\n * ```ts\n * const sorted = sortMigrations([\n * { id: 'app/2', scope: 'record', up: (r) => r },\n * { id: 'app/1', scope: 'record', up: (r) => r },\n * { id: 'lib/1', scope: 'record', up: (r) => r, dependsOn: ['app/1'] }\n * ])\n * // Result: [app/1, app/2, lib/1] (respects both sequence and explicit deps)\n * ```\n * @public\n */\nexport function sortMigrations(migrations: Migration[]): Migration[] {\n\tif (migrations.length === 0) return []\n\n\t// Build dependency graph and calculate in-degrees\n\tconst byId = new Map(migrations.map((m) => [m.id, m]))\n\tconst dependents = new Map<MigrationId, Set<MigrationId>>() // who depends on this\n\tconst inDegree = new Map<MigrationId, number>()\n\tconst explicitDeps = new Map<MigrationId, Set<MigrationId>>() // explicit dependsOn relationships\n\n\t// Initialize\n\tfor (const m of migrations) {\n\t\tinDegree.set(m.id, 0)\n\t\tdependents.set(m.id, new Set())\n\t\texplicitDeps.set(m.id, new Set())\n\t}\n\n\t// Add implicit sequence dependencies and explicit dependencies\n\tfor (const m of migrations) {\n\t\tconst { version, sequenceId } = parseMigrationId(m.id)\n\n\t\t// Implicit dependency on previous in sequence\n\t\tconst prevId = `${sequenceId}/${version - 1}` as MigrationId\n\t\tif (byId.has(prevId)) {\n\t\t\tdependents.get(prevId)!.add(m.id)\n\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t}\n\n\t\t// Explicit dependencies\n\t\tif (m.dependsOn) {\n\t\t\tfor (const depId of m.dependsOn) {\n\t\t\t\tif (byId.has(depId)) {\n\t\t\t\t\tdependents.get(depId)!.add(m.id)\n\t\t\t\t\texplicitDeps.get(m.id)!.add(depId)\n\t\t\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Priority queue: migrations ready to process (in-degree 0)\n\tconst ready = migrations.filter((m) => inDegree.get(m.id) === 0)\n\tconst result: Migration[] = []\n\tconst processed = new Set<MigrationId>()\n\n\twhile (ready.length > 0) {\n\t\t// Calculate urgency scores for ready migrations and pick the best one\n\t\tlet bestCandidate: Migration | undefined\n\t\tlet bestCandidateScore = -Infinity\n\n\t\tfor (const m of ready) {\n\t\t\tlet urgencyScore = 0\n\n\t\t\tfor (const depId of dependents.get(m.id) || []) {\n\t\t\t\tif (!processed.has(depId)) {\n\t\t\t\t\t// Priority 1: Count all unprocessed dependents (to break ties)\n\t\t\t\t\turgencyScore += 1\n\n\t\t\t\t\t// Priority 2: If this migration is explicitly depended on by others, boost priority\n\t\t\t\t\tif (explicitDeps.get(depId)!.has(m.id)) {\n\t\t\t\t\t\turgencyScore += 100\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (\n\t\t\t\turgencyScore > bestCandidateScore ||\n\t\t\t\t// Tiebreaker: prefer lower sequence/version\n\t\t\t\t(urgencyScore === bestCandidateScore && m.id.localeCompare(bestCandidate?.id ?? '') < 0)\n\t\t\t) {\n\t\t\t\tbestCandidate = m\n\t\t\t\tbestCandidateScore = urgencyScore\n\t\t\t}\n\t\t}\n\n\t\tconst nextMigration = bestCandidate!\n\t\tready.splice(ready.indexOf(nextMigration), 1)\n\n\t\t// Cycle detection - if we have processed everything and still have items left, there's a cycle\n\t\t// This is handled by Kahn's algorithm naturally - if we finish with items unprocessed, there's a cycle\n\n\t\t// Process this migration\n\t\tresult.push(nextMigration)\n\t\tprocessed.add(nextMigration.id)\n\n\t\t// Update in-degrees and add newly ready migrations\n\t\tfor (const depId of dependents.get(nextMigration.id) || []) {\n\t\t\tif (!processed.has(depId)) {\n\t\t\t\tinDegree.set(depId, inDegree.get(depId)! - 1)\n\t\t\t\tif (inDegree.get(depId) === 0) {\n\t\t\t\t\tready.push(byId.get(depId)!)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Check for cycles - if we didn't process all migrations, there's a cycle\n\tif (result.length !== migrations.length) {\n\t\tconst unprocessed = migrations.filter((m) => !processed.has(m.id))\n\t\tassert(false, `Circular dependency in migrations: ${unprocessed[0].id}`)\n\t}\n\n\treturn result\n}\n\n/**\n * Parses a migration ID to extract the sequence ID and version number.\n *\n * Migration IDs follow the format `sequenceId/version`, and this function splits\n * them into their component parts. This is used internally for sorting migrations\n * and understanding their relationships.\n * @param id - The migration ID to parse\n * @returns Object containing the sequence ID and numeric version\n * @example\n * ```ts\n * const { sequenceId, version } = parseMigrationId('com.myapp.book/5')\n * // sequenceId: 'com.myapp.book', version: 5\n * ```\n * @internal\n */\nexport function parseMigrationId(id: MigrationId): { sequenceId: string; version: number } {\n\tconst [sequenceId, version] = id.split('/')\n\treturn { sequenceId, version: parseInt(version) }\n}\n\nfunction validateMigrationId(id: string, expectedSequenceId?: string) {\n\tif (expectedSequenceId) {\n\t\tassert(\n\t\t\tid.startsWith(expectedSequenceId + '/'),\n\t\t\t`Every migration in sequence '${expectedSequenceId}' must have an id starting with '${expectedSequenceId}/'. Got invalid id: '${id}'`\n\t\t)\n\t}\n\n\tassert(id.match(/^(.*?)\\/(0|[1-9]\\d*)$/), `Invalid migration id: '${id}'`)\n}\n\n/**\n * Validates that a migration sequence is correctly structured.\n *\n * Performs several validation checks to ensure the migration sequence is valid:\n * - Sequence ID doesn't contain invalid characters\n * - All migration IDs belong to the expected sequence\n * - Migration versions start at 1 and increment by 1\n * - Migration IDs follow the correct format\n * @param migrations - The migration sequence to validate\n * @throws Assertion error if any validation checks fail\n * @example\n * ```ts\n * const sequence = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [{ id: 'com.myapp.book/1', scope: 'record', up: (r) => r }]\n * })\n * validateMigrations(sequence) // Passes validation\n * ```\n * @public\n */\nexport function validateMigrations(migrations: MigrationSequence) {\n\tassert(\n\t\t!migrations.sequenceId.includes('/'),\n\t\t`sequenceId cannot contain a '/', got ${migrations.sequenceId}`\n\t)\n\tassert(migrations.sequenceId.length, 'sequenceId must be a non-empty string')\n\n\tif (migrations.sequence.length === 0) {\n\t\treturn\n\t}\n\n\tvalidateMigrationId(migrations.sequence[0].id, migrations.sequenceId)\n\tlet n = parseMigrationId(migrations.sequence[0].id).version\n\tassert(\n\t\tn === 1,\n\t\t`Expected the first migrationId to be '${migrations.sequenceId}/1' but got '${migrations.sequence[0].id}'`\n\t)\n\tfor (let i = 1; i < migrations.sequence.length; i++) {\n\t\tconst id = migrations.sequence[i].id\n\t\tvalidateMigrationId(id, migrations.sequenceId)\n\t\tconst m = parseMigrationId(id).version\n\t\tassert(\n\t\t\tm === n + 1,\n\t\t\t`Migration id numbers must increase in increments of 1, expected ${migrations.sequenceId}/${n + 1} but got '${migrations.sequence[i].id}'`\n\t\t)\n\t\tn = m\n\t}\n}\n\n/**\n * Result type returned by migration operations.\n *\n * Migration operations can either succeed and return the transformed value,\n * or fail with a specific reason. This discriminated union type allows for\n * safe handling of both success and error cases when applying migrations.\n * @public\n */\nexport type MigrationResult<T> =\n\t| { type: 'success'; value: T }\n\t| { type: 'error'; reason: MigrationFailureReason }\n\n/**\n * Enumeration of possible reasons why a migration might fail.\n *\n * These reasons help identify what went wrong during migration processing,\n * allowing applications to handle different failure scenarios appropriately.\n * Common failures include incompatible data formats, unknown record types,\n * and version mismatches between the data and available migrations.\n * @public\n */\nexport enum MigrationFailureReason {\n\tIncompatibleSubtype = 'incompatible-subtype',\n\tUnknownType = 'unknown-type',\n\tTargetVersionTooNew = 'target-version-too-new',\n\tTargetVersionTooOld = 'target-version-too-old',\n\tMigrationError = 'migration-error',\n\tUnrecognizedSubtype = 'unrecognized-subtype',\n}\n"],
|
|
5
|
-
"mappings": "AAAA,SAAS,QAAQ,wBAAwB;
|
|
4
|
+
"sourcesContent": ["import { assert, objectMapEntries } from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { SerializedStore } from './Store'\n\nfunction squashDependsOn(sequence: Array<Migration | StandaloneDependsOn>): Migration[] {\n\tconst result: Migration[] = []\n\tfor (let i = sequence.length - 1; i >= 0; i--) {\n\t\tconst elem = sequence[i]\n\t\tif (!('id' in elem)) {\n\t\t\tconst dependsOn = elem.dependsOn\n\t\t\tconst prev = result[0]\n\t\t\tif (prev) {\n\t\t\t\tresult[0] = {\n\t\t\t\t\t...prev,\n\t\t\t\t\tdependsOn: dependsOn.concat(prev.dependsOn ?? []),\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tresult.unshift(elem)\n\t\t}\n\t}\n\treturn result\n}\n\n/**\n * Creates a migration sequence that defines how to transform data as your schema evolves.\n *\n * A migration sequence contains a series of migrations that are applied in order to transform\n * data from older versions to newer versions. Each migration is identified by a unique ID\n * and can operate at either the record level (transforming individual records) or store level\n * (transforming the entire store structure).\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param options - Configuration for the migration sequence\n * - sequenceId - Unique identifier for this migration sequence (e.g., 'com.myapp.book')\n * - sequence - Array of migrations or dependency declarations to include in the sequence\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added (defaults to true)\n * @returns A validated migration sequence that can be included in a store schema\n * @example\n * ```ts\n * const bookMigrations = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [\n * {\n * id: 'com.myapp.book/1',\n * scope: 'record',\n * up: (record) => ({ ...record, newField: 'default' })\n * }\n * ]\n * })\n * ```\n * @public\n */\nexport function createMigrationSequence({\n\tsequence,\n\tsequenceId,\n\tretroactive = true,\n}: {\n\tsequenceId: string\n\tretroactive?: boolean\n\tsequence: Array<Migration | StandaloneDependsOn>\n}): MigrationSequence {\n\tconst migrations: MigrationSequence = {\n\t\tsequenceId,\n\t\tretroactive,\n\t\tsequence: squashDependsOn(sequence),\n\t}\n\tvalidateMigrations(migrations)\n\treturn migrations\n}\n\n/**\n * Creates a named set of migration IDs from version numbers and a sequence ID.\n *\n * This utility function helps generate properly formatted migration IDs that follow\n * the required `sequenceId/version` pattern. It takes a sequence ID and a record\n * of named versions, returning migration IDs that can be used in migration definitions.\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param sequenceId - The sequence identifier (e.g., 'com.myapp.book')\n * @param versions - Record mapping version names to numbers\n * @returns Record mapping version names to properly formatted migration IDs\n * @example\n * ```ts\n * const migrationIds = createMigrationIds('com.myapp.book', {\n * addGenre: 1,\n * addPublisher: 2,\n * removeOldField: 3\n * })\n * // Result: {\n * // addGenre: 'com.myapp.book/1',\n * // addPublisher: 'com.myapp.book/2',\n * // removeOldField: 'com.myapp.book/3'\n * // }\n * ```\n * @public\n */\nexport function createMigrationIds<\n\tconst ID extends string,\n\tconst Versions extends Record<string, number>,\n>(sequenceId: ID, versions: Versions): { [K in keyof Versions]: `${ID}/${Versions[K]}` } {\n\treturn Object.fromEntries(\n\t\tobjectMapEntries(versions).map(([key, version]) => [key, `${sequenceId}/${version}`] as const)\n\t) as any\n}\n\n/**\n * Creates a migration sequence specifically for record-level migrations.\n *\n * This is a convenience function that creates a migration sequence where all migrations\n * operate at the record scope and are automatically filtered to apply only to records\n * of a specific type. Each migration in the sequence will be enhanced with the record\n * scope and appropriate filtering logic.\n * @param opts - Configuration for the record migration sequence\n * - recordType - The record type name these migrations should apply to\n * - filter - Optional additional filter function to determine which records to migrate\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added\n * - sequenceId - Unique identifier for this migration sequence\n * - sequence - Array of record migration definitions (scope will be added automatically)\n * @returns A migration sequence configured for record-level operations\n * @internal\n */\nexport function createRecordMigrationSequence(opts: {\n\trecordType: string\n\tfilter?(record: UnknownRecord): boolean\n\tretroactive?: boolean\n\tsequenceId: string\n\tsequence: Omit<Extract<Migration, { scope: 'record' }>, 'scope'>[]\n}): MigrationSequence {\n\tconst sequenceId = opts.sequenceId\n\treturn createMigrationSequence({\n\t\tsequenceId,\n\t\tretroactive: opts.retroactive ?? true,\n\t\tsequence: opts.sequence.map((m) =>\n\t\t\t'id' in m\n\t\t\t\t? {\n\t\t\t\t\t\t...m,\n\t\t\t\t\t\tscope: 'record',\n\t\t\t\t\t\tfilter: (r: UnknownRecord) =>\n\t\t\t\t\t\t\tr.typeName === opts.recordType &&\n\t\t\t\t\t\t\t(m.filter?.(r) ?? true) &&\n\t\t\t\t\t\t\t(opts.filter?.(r) ?? true),\n\t\t\t\t\t}\n\t\t\t\t: m\n\t\t),\n\t})\n}\n\n/**\n * Legacy migration interface for backward compatibility.\n *\n * This interface represents the old migration format that included both `up` and `down`\n * transformation functions. While still supported, new code should use the `Migration`\n * type which provides more flexibility and better integration with the current system.\n * @public\n */\nexport interface LegacyMigration<Before = any, After = any> {\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tup: (oldState: Before) => After\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tdown: (newState: After) => Before\n}\n\n/**\n * Unique identifier for a migration in the format `sequenceId/version`.\n *\n * Migration IDs follow a specific pattern where the sequence ID identifies the migration\n * sequence and the version number indicates the order within that sequence. For example:\n * 'com.myapp.book/1', 'com.myapp.book/2', etc.\n * @public\n */\nexport type MigrationId = `${string}/${number}`\n\n/**\n * Declares dependencies for migrations without being a migration itself.\n *\n * This interface allows you to specify that future migrations in a sequence depend on\n * migrations from other sequences, without defining an actual migration transformation.\n * It's used to establish cross-sequence dependencies in the migration graph.\n * @public\n */\nexport interface StandaloneDependsOn {\n\treadonly dependsOn: readonly MigrationId[]\n}\n\n/**\n * Defines a single migration that transforms data from one schema version to another.\n *\n * A migration can operate at two different scopes:\n * - `record`: Transforms individual records, with optional filtering to target specific records\n * - `store`: Transforms the entire serialized store structure\n *\n * Each migration has a unique ID and can declare dependencies on other migrations that must\n * be applied first. The `up` function performs the forward transformation, while the optional\n * `down` function can reverse the migration if needed.\n * @public\n */\nexport type Migration = {\n\treadonly id: MigrationId\n\treadonly dependsOn?: readonly MigrationId[] | undefined\n} & (\n\t| {\n\t\t\treadonly scope: 'record'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly filter?: (record: UnknownRecord) => boolean\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (oldState: UnknownRecord) => void | UnknownRecord\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (newState: UnknownRecord) => void | UnknownRecord\n\t }\n\t| {\n\t\t\treadonly scope: 'store'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (\n\t\t\t\toldState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (\n\t\t\t\tnewState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t }\n)\n\n/**\n * Base interface for legacy migration information.\n *\n * Contains the basic structure used by the legacy migration system, including version\n * range information and the migration functions indexed by version number. This is\n * maintained for backward compatibility with older migration definitions.\n * @public\n */\nexport interface LegacyBaseMigrationsInfo {\n\tfirstVersion: number\n\tcurrentVersion: number\n\tmigrators: { [version: number]: LegacyMigration }\n}\n\n/**\n * Legacy migration configuration with support for sub-type migrations.\n *\n * This interface extends the base legacy migration info to support migrations that\n * vary based on a sub-type key within records. This allows different migration paths\n * for different variants of the same record type, which was useful in older migration\n * systems but is now handled more elegantly by the current Migration system.\n * @public\n */\nexport interface LegacyMigrations extends LegacyBaseMigrationsInfo {\n\tsubTypeKey?: string\n\tsubTypeMigrations?: Record<string, LegacyBaseMigrationsInfo>\n}\n\n/**\n * A complete sequence of migrations that can be applied to transform data.\n *\n * A migration sequence represents a series of ordered migrations that belong together,\n * typically for a specific part of your schema. The sequence includes metadata about\n * whether it should be applied retroactively to existing data and contains the actual\n * migration definitions in execution order.\n * @public\n */\nexport interface MigrationSequence {\n\tsequenceId: string\n\t/**\n\t * retroactive should be true if the migrations should be applied to snapshots that were created before\n\t * this migration sequence was added to the schema.\n\t *\n\t * In general:\n\t *\n\t * - retroactive should be true when app developers create their own new migration sequences.\n\t * - retroactive should be false when library developers ship a migration sequence. When you install a library for the first time, any migrations that were added in the library before that point should generally _not_ be applied to your existing data.\n\t */\n\tretroactive: boolean\n\tsequence: Migration[]\n}\n\n/**\n * Sorts migrations using a distance-minimizing topological sort.\n *\n * This function respects two types of dependencies:\n * 1. Implicit sequence dependencies (foo/1 must come before foo/2)\n * 2. Explicit dependencies via `dependsOn` property\n *\n * The algorithm minimizes the total distance between migrations and their explicit\n * dependencies in the final ordering, while maintaining topological correctness.\n * This means when migration A depends on migration B, A will be scheduled as close\n * as possible to B (while respecting all constraints).\n *\n * Implementation uses Kahn's algorithm with priority scoring:\n * - Builds dependency graph and calculates in-degrees\n * - Uses priority queue that prioritizes migrations which unblock explicit dependencies\n * - Processes migrations in urgency order while maintaining topological constraints\n * - Detects cycles by ensuring all migrations are processed\n *\n * @param migrations - Array of migrations to sort\n * @returns Sorted array of migrations in execution order\n * @throws Assertion error if circular dependencies are detected\n * @example\n * ```ts\n * const sorted = sortMigrations([\n * { id: 'app/2', scope: 'record', up: (r) => r },\n * { id: 'app/1', scope: 'record', up: (r) => r },\n * { id: 'lib/1', scope: 'record', up: (r) => r, dependsOn: ['app/1'] }\n * ])\n * // Result: [app/1, app/2, lib/1] (respects both sequence and explicit deps)\n * ```\n * @public\n */\nexport function sortMigrations(migrations: Migration[]): Migration[] {\n\tif (migrations.length === 0) return []\n\n\t// Build dependency graph and calculate in-degrees\n\tconst byId = new Map(migrations.map((m) => [m.id, m]))\n\tconst dependents = new Map<MigrationId, Set<MigrationId>>() // who depends on this\n\tconst inDegree = new Map<MigrationId, number>()\n\tconst explicitDeps = new Map<MigrationId, Set<MigrationId>>() // explicit dependsOn relationships\n\n\t// Initialize\n\tfor (const m of migrations) {\n\t\tinDegree.set(m.id, 0)\n\t\tdependents.set(m.id, new Set())\n\t\texplicitDeps.set(m.id, new Set())\n\t}\n\n\t// Add implicit sequence dependencies and explicit dependencies\n\tfor (const m of migrations) {\n\t\tconst { version, sequenceId } = parseMigrationId(m.id)\n\n\t\t// Implicit dependency on previous in sequence\n\t\tconst prevId = `${sequenceId}/${version - 1}` as MigrationId\n\t\tif (byId.has(prevId)) {\n\t\t\tdependents.get(prevId)!.add(m.id)\n\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t}\n\n\t\t// Explicit dependencies\n\t\tif (m.dependsOn) {\n\t\t\tfor (const depId of m.dependsOn) {\n\t\t\t\tif (byId.has(depId)) {\n\t\t\t\t\tdependents.get(depId)!.add(m.id)\n\t\t\t\t\texplicitDeps.get(m.id)!.add(depId)\n\t\t\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Priority queue: migrations ready to process (in-degree 0)\n\tconst ready = migrations.filter((m) => inDegree.get(m.id) === 0)\n\tconst result: Migration[] = []\n\tconst processed = new Set<MigrationId>()\n\n\twhile (ready.length > 0) {\n\t\t// Calculate urgency scores for ready migrations and pick the best one\n\t\tlet bestCandidate: Migration | undefined\n\t\tlet bestCandidateScore = -Infinity\n\n\t\tfor (const m of ready) {\n\t\t\tlet urgencyScore = 0\n\n\t\t\tfor (const depId of dependents.get(m.id) || []) {\n\t\t\t\tif (!processed.has(depId)) {\n\t\t\t\t\t// Priority 1: Count all unprocessed dependents (to break ties)\n\t\t\t\t\turgencyScore += 1\n\n\t\t\t\t\t// Priority 2: If this migration is explicitly depended on by others, boost priority\n\t\t\t\t\tif (explicitDeps.get(depId)!.has(m.id)) {\n\t\t\t\t\t\turgencyScore += 100\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (\n\t\t\t\turgencyScore > bestCandidateScore ||\n\t\t\t\t// Tiebreaker: prefer lower sequence/version\n\t\t\t\t(urgencyScore === bestCandidateScore && m.id.localeCompare(bestCandidate?.id ?? '') < 0)\n\t\t\t) {\n\t\t\t\tbestCandidate = m\n\t\t\t\tbestCandidateScore = urgencyScore\n\t\t\t}\n\t\t}\n\n\t\tconst nextMigration = bestCandidate!\n\t\tready.splice(ready.indexOf(nextMigration), 1)\n\n\t\t// Cycle detection - if we have processed everything and still have items left, there's a cycle\n\t\t// This is handled by Kahn's algorithm naturally - if we finish with items unprocessed, there's a cycle\n\n\t\t// Process this migration\n\t\tresult.push(nextMigration)\n\t\tprocessed.add(nextMigration.id)\n\n\t\t// Update in-degrees and add newly ready migrations\n\t\tfor (const depId of dependents.get(nextMigration.id) || []) {\n\t\t\tif (!processed.has(depId)) {\n\t\t\t\tinDegree.set(depId, inDegree.get(depId)! - 1)\n\t\t\t\tif (inDegree.get(depId) === 0) {\n\t\t\t\t\tready.push(byId.get(depId)!)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Check for cycles - if we didn't process all migrations, there's a cycle\n\tif (result.length !== migrations.length) {\n\t\tconst unprocessed = migrations.filter((m) => !processed.has(m.id))\n\t\tassert(false, `Circular dependency in migrations: ${unprocessed[0].id}`)\n\t}\n\n\treturn result\n}\n\n/**\n * Parses a migration ID to extract the sequence ID and version number.\n *\n * Migration IDs follow the format `sequenceId/version`, and this function splits\n * them into their component parts. This is used internally for sorting migrations\n * and understanding their relationships.\n * @param id - The migration ID to parse\n * @returns Object containing the sequence ID and numeric version\n * @example\n * ```ts\n * const { sequenceId, version } = parseMigrationId('com.myapp.book/5')\n * // sequenceId: 'com.myapp.book', version: 5\n * ```\n * @internal\n */\nexport function parseMigrationId(id: MigrationId): { sequenceId: string; version: number } {\n\tconst [sequenceId, version] = id.split('/')\n\treturn { sequenceId, version: parseInt(version) }\n}\n\nfunction validateMigrationId(id: string, expectedSequenceId?: string) {\n\tif (expectedSequenceId) {\n\t\tassert(\n\t\t\tid.startsWith(expectedSequenceId + '/'),\n\t\t\t`Every migration in sequence '${expectedSequenceId}' must have an id starting with '${expectedSequenceId}/'. Got invalid id: '${id}'`\n\t\t)\n\t}\n\n\tassert(id.match(/^(.*?)\\/(0|[1-9]\\d*)$/), `Invalid migration id: '${id}'`)\n}\n\n/**\n * Validates that a migration sequence is correctly structured.\n *\n * Performs several validation checks to ensure the migration sequence is valid:\n * - Sequence ID doesn't contain invalid characters\n * - All migration IDs belong to the expected sequence\n * - Migration versions start at 1 and increment by 1\n * - Migration IDs follow the correct format\n * @param migrations - The migration sequence to validate\n * @throws Assertion error if any validation checks fail\n * @example\n * ```ts\n * const sequence = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [{ id: 'com.myapp.book/1', scope: 'record', up: (r) => r }]\n * })\n * validateMigrations(sequence) // Passes validation\n * ```\n * @public\n */\nexport function validateMigrations(migrations: MigrationSequence) {\n\tassert(\n\t\t!migrations.sequenceId.includes('/'),\n\t\t`sequenceId cannot contain a '/', got ${migrations.sequenceId}`\n\t)\n\tassert(migrations.sequenceId.length, 'sequenceId must be a non-empty string')\n\n\tif (migrations.sequence.length === 0) {\n\t\treturn\n\t}\n\n\tvalidateMigrationId(migrations.sequence[0].id, migrations.sequenceId)\n\tlet n = parseMigrationId(migrations.sequence[0].id).version\n\tassert(\n\t\tn === 1,\n\t\t`Expected the first migrationId to be '${migrations.sequenceId}/1' but got '${migrations.sequence[0].id}'`\n\t)\n\tfor (let i = 1; i < migrations.sequence.length; i++) {\n\t\tconst id = migrations.sequence[i].id\n\t\tvalidateMigrationId(id, migrations.sequenceId)\n\t\tconst m = parseMigrationId(id).version\n\t\tassert(\n\t\t\tm === n + 1,\n\t\t\t`Migration id numbers must increase in increments of 1, expected ${migrations.sequenceId}/${n + 1} but got '${migrations.sequence[i].id}'`\n\t\t)\n\t\tn = m\n\t}\n}\n\n/**\n * Result type returned by migration operations.\n *\n * Migration operations can either succeed and return the transformed value,\n * or fail with a specific reason. This discriminated union type allows for\n * safe handling of both success and error cases when applying migrations.\n * @public\n */\nexport type MigrationResult<T> =\n\t| { type: 'success'; value: T }\n\t| { type: 'error'; reason: MigrationFailureReason }\n\n/**\n * Enumeration of possible reasons why a migration might fail.\n *\n * These reasons help identify what went wrong during migration processing,\n * allowing applications to handle different failure scenarios appropriately.\n * Common failures include incompatible data formats, unknown record types,\n * and version mismatches between the data and available migrations.\n * @public\n */\nexport enum MigrationFailureReason {\n\tIncompatibleSubtype = 'incompatible-subtype',\n\tUnknownType = 'unknown-type',\n\tTargetVersionTooNew = 'target-version-too-new',\n\tTargetVersionTooOld = 'target-version-too-old',\n\tMigrationError = 'migration-error',\n\tUnrecognizedSubtype = 'unrecognized-subtype',\n}\n"],
|
|
5
|
+
"mappings": "AAAA,SAAS,QAAQ,wBAAwB;AAIzC,SAAS,gBAAgB,UAA+D;AACvF,QAAM,SAAsB,CAAC;AAC7B,WAAS,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AAC9C,UAAM,OAAO,SAAS,CAAC;AACvB,QAAI,EAAE,QAAQ,OAAO;AACpB,YAAM,YAAY,KAAK;AACvB,YAAM,OAAO,OAAO,CAAC;AACrB,UAAI,MAAM;AACT,eAAO,CAAC,IAAI;AAAA,UACX,GAAG;AAAA,UACH,WAAW,UAAU,OAAO,KAAK,aAAa,CAAC,CAAC;AAAA,QACjD;AAAA,MACD;AAAA,IACD,OAAO;AACN,aAAO,QAAQ,IAAI;AAAA,IACpB;AAAA,EACD;AACA,SAAO;AACR;AA+BO,SAAS,wBAAwB;AAAA,EACvC;AAAA,EACA;AAAA,EACA,cAAc;AACf,GAIsB;AACrB,QAAM,aAAgC;AAAA,IACrC;AAAA,IACA;AAAA,IACA,UAAU,gBAAgB,QAAQ;AAAA,EACnC;AACA,qBAAmB,UAAU;AAC7B,SAAO;AACR;AA4BO,SAAS,mBAGd,YAAgB,UAAuE;AACxF,SAAO,OAAO;AAAA,IACb,iBAAiB,QAAQ,EAAE,IAAI,CAAC,CAAC,KAAK,OAAO,MAAM,CAAC,KAAK,GAAG,UAAU,IAAI,OAAO,EAAE,CAAU;AAAA,EAC9F;AACD;AAkBO,SAAS,8BAA8B,MAMxB;AACrB,QAAM,aAAa,KAAK;AACxB,SAAO,wBAAwB;AAAA,IAC9B;AAAA,IACA,aAAa,KAAK,eAAe;AAAA,IACjC,UAAU,KAAK,SAAS;AAAA,MAAI,CAAC,MAC5B,QAAQ,IACL;AAAA,QACA,GAAG;AAAA,QACH,OAAO;AAAA,QACP,QAAQ,CAAC,MACR,EAAE,aAAa,KAAK,eACnB,EAAE,SAAS,CAAC,KAAK,UACjB,KAAK,SAAS,CAAC,KAAK;AAAA,MACvB,IACC;AAAA,IACJ;AAAA,EACD,CAAC;AACF;AAiKO,SAAS,eAAe,YAAsC;AACpE,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAGrC,QAAM,OAAO,IAAI,IAAI,WAAW,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AACrD,QAAM,aAAa,oBAAI,IAAmC;AAC1D,QAAM,WAAW,oBAAI,IAAyB;AAC9C,QAAM,eAAe,oBAAI,IAAmC;AAG5D,aAAW,KAAK,YAAY;AAC3B,aAAS,IAAI,EAAE,IAAI,CAAC;AACpB,eAAW,IAAI,EAAE,IAAI,oBAAI,IAAI,CAAC;AAC9B,iBAAa,IAAI,EAAE,IAAI,oBAAI,IAAI,CAAC;AAAA,EACjC;AAGA,aAAW,KAAK,YAAY;AAC3B,UAAM,EAAE,SAAS,WAAW,IAAI,iBAAiB,EAAE,EAAE;AAGrD,UAAM,SAAS,GAAG,UAAU,IAAI,UAAU,CAAC;AAC3C,QAAI,KAAK,IAAI,MAAM,GAAG;AACrB,iBAAW,IAAI,MAAM,EAAG,IAAI,EAAE,EAAE;AAChC,eAAS,IAAI,EAAE,IAAI,SAAS,IAAI,EAAE,EAAE,IAAK,CAAC;AAAA,IAC3C;AAGA,QAAI,EAAE,WAAW;AAChB,iBAAW,SAAS,EAAE,WAAW;AAChC,YAAI,KAAK,IAAI,KAAK,GAAG;AACpB,qBAAW,IAAI,KAAK,EAAG,IAAI,EAAE,EAAE;AAC/B,uBAAa,IAAI,EAAE,EAAE,EAAG,IAAI,KAAK;AACjC,mBAAS,IAAI,EAAE,IAAI,SAAS,IAAI,EAAE,EAAE,IAAK,CAAC;AAAA,QAC3C;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,QAAM,QAAQ,WAAW,OAAO,CAAC,MAAM,SAAS,IAAI,EAAE,EAAE,MAAM,CAAC;AAC/D,QAAM,SAAsB,CAAC;AAC7B,QAAM,YAAY,oBAAI,IAAiB;AAEvC,SAAO,MAAM,SAAS,GAAG;AAExB,QAAI;AACJ,QAAI,qBAAqB;AAEzB,eAAW,KAAK,OAAO;AACtB,UAAI,eAAe;AAEnB,iBAAW,SAAS,WAAW,IAAI,EAAE,EAAE,KAAK,CAAC,GAAG;AAC/C,YAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AAE1B,0BAAgB;AAGhB,cAAI,aAAa,IAAI,KAAK,EAAG,IAAI,EAAE,EAAE,GAAG;AACvC,4BAAgB;AAAA,UACjB;AAAA,QACD;AAAA,MACD;AAEA,UACC,eAAe;AAAA,MAEd,iBAAiB,sBAAsB,EAAE,GAAG,cAAc,eAAe,MAAM,EAAE,IAAI,GACrF;AACD,wBAAgB;AAChB,6BAAqB;AAAA,MACtB;AAAA,IACD;AAEA,UAAM,gBAAgB;AACtB,UAAM,OAAO,MAAM,QAAQ,aAAa,GAAG,CAAC;AAM5C,WAAO,KAAK,aAAa;AACzB,cAAU,IAAI,cAAc,EAAE;AAG9B,eAAW,SAAS,WAAW,IAAI,cAAc,EAAE,KAAK,CAAC,GAAG;AAC3D,UAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AAC1B,iBAAS,IAAI,OAAO,SAAS,IAAI,KAAK,IAAK,CAAC;AAC5C,YAAI,SAAS,IAAI,KAAK,MAAM,GAAG;AAC9B,gBAAM,KAAK,KAAK,IAAI,KAAK,CAAE;AAAA,QAC5B;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,MAAI,OAAO,WAAW,WAAW,QAAQ;AACxC,UAAM,cAAc,WAAW,OAAO,CAAC,MAAM,CAAC,UAAU,IAAI,EAAE,EAAE,CAAC;AACjE,WAAO,OAAO,sCAAsC,YAAY,CAAC,EAAE,EAAE,EAAE;AAAA,EACxE;AAEA,SAAO;AACR;AAiBO,SAAS,iBAAiB,IAA0D;AAC1F,QAAM,CAAC,YAAY,OAAO,IAAI,GAAG,MAAM,GAAG;AAC1C,SAAO,EAAE,YAAY,SAAS,SAAS,OAAO,EAAE;AACjD;AAEA,SAAS,oBAAoB,IAAY,oBAA6B;AACrE,MAAI,oBAAoB;AACvB;AAAA,MACC,GAAG,WAAW,qBAAqB,GAAG;AAAA,MACtC,gCAAgC,kBAAkB,oCAAoC,kBAAkB,wBAAwB,EAAE;AAAA,IACnI;AAAA,EACD;AAEA,SAAO,GAAG,MAAM,uBAAuB,GAAG,0BAA0B,EAAE,GAAG;AAC1E;AAsBO,SAAS,mBAAmB,YAA+B;AACjE;AAAA,IACC,CAAC,WAAW,WAAW,SAAS,GAAG;AAAA,IACnC,wCAAwC,WAAW,UAAU;AAAA,EAC9D;AACA,SAAO,WAAW,WAAW,QAAQ,uCAAuC;AAE5E,MAAI,WAAW,SAAS,WAAW,GAAG;AACrC;AAAA,EACD;AAEA,sBAAoB,WAAW,SAAS,CAAC,EAAE,IAAI,WAAW,UAAU;AACpE,MAAI,IAAI,iBAAiB,WAAW,SAAS,CAAC,EAAE,EAAE,EAAE;AACpD;AAAA,IACC,MAAM;AAAA,IACN,yCAAyC,WAAW,UAAU,gBAAgB,WAAW,SAAS,CAAC,EAAE,EAAE;AAAA,EACxG;AACA,WAAS,IAAI,GAAG,IAAI,WAAW,SAAS,QAAQ,KAAK;AACpD,UAAM,KAAK,WAAW,SAAS,CAAC,EAAE;AAClC,wBAAoB,IAAI,WAAW,UAAU;AAC7C,UAAM,IAAI,iBAAiB,EAAE,EAAE;AAC/B;AAAA,MACC,MAAM,IAAI;AAAA,MACV,mEAAmE,WAAW,UAAU,IAAI,IAAI,CAAC,aAAa,WAAW,SAAS,CAAC,EAAE,EAAE;AAAA,IACxI;AACA,QAAI;AAAA,EACL;AACD;AAuBO,IAAK,yBAAL,kBAAKA,4BAAL;AACN,EAAAA,wBAAA,yBAAsB;AACtB,EAAAA,wBAAA,iBAAc;AACd,EAAAA,wBAAA,yBAAsB;AACtB,EAAAA,wBAAA,yBAAsB;AACtB,EAAAA,wBAAA,oBAAiB;AACjB,EAAAA,wBAAA,yBAAsB;AANX,SAAAA;AAAA,GAAA;",
|
|
6
6
|
"names": ["MigrationFailureReason"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tldraw/store",
|
|
3
3
|
"description": "tldraw infinite canvas SDK (store).",
|
|
4
|
-
"version": "4.2.
|
|
4
|
+
"version": "4.2.3",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "tldraw Inc.",
|
|
7
7
|
"email": "hello@tldraw.com"
|
|
@@ -44,11 +44,11 @@
|
|
|
44
44
|
"context": "yarn run -T tsx ../../internal/scripts/context.ts"
|
|
45
45
|
},
|
|
46
46
|
"dependencies": {
|
|
47
|
-
"@tldraw/state": "4.2.
|
|
48
|
-
"@tldraw/utils": "4.2.
|
|
47
|
+
"@tldraw/state": "4.2.3",
|
|
48
|
+
"@tldraw/utils": "4.2.3"
|
|
49
49
|
},
|
|
50
50
|
"peerDependencies": {
|
|
51
|
-
"react": "^18.2.0 || ^19.
|
|
51
|
+
"react": "^18.2.0 || ^19.0.0"
|
|
52
52
|
},
|
|
53
53
|
"devDependencies": {
|
|
54
54
|
"@peculiar/webcrypto": "^1.5.0",
|
package/src/index.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { registerTldrawLibraryVersion } from '@tldraw/utils'
|
|
2
2
|
export { AtomMap } from './lib/AtomMap'
|
|
3
|
-
export { AtomSet } from './lib/AtomSet'
|
|
4
3
|
export type { BaseRecord, IdOf, RecordId, UnknownRecord } from './lib/BaseRecord'
|
|
5
4
|
export { devFreeze } from './lib/devFreeze'
|
|
6
5
|
export { type QueryExpression, type QueryValueMatcher } from './lib/executeQuery'
|
|
@@ -19,8 +18,6 @@ export {
|
|
|
19
18
|
type MigrationResult,
|
|
20
19
|
type MigrationSequence,
|
|
21
20
|
type StandaloneDependsOn,
|
|
22
|
-
type SynchronousRecordStorage,
|
|
23
|
-
type SynchronousStorage,
|
|
24
21
|
} from './lib/migrate'
|
|
25
22
|
export {
|
|
26
23
|
createEmptyRecordsDiff,
|