@tldraw/store 4.3.0-next.82cfddd7ee89 → 4.3.0-next.842fb21476f2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist-cjs/index.d.ts +49 -1
- package/dist-cjs/index.js +3 -1
- package/dist-cjs/index.js.map +2 -2
- package/dist-cjs/lib/AtomSet.js +68 -0
- package/dist-cjs/lib/AtomSet.js.map +7 -0
- package/dist-cjs/lib/ImmutableMap.js +25 -23
- package/dist-cjs/lib/ImmutableMap.js.map +2 -2
- package/dist-cjs/lib/StoreSchema.js +80 -24
- package/dist-cjs/lib/StoreSchema.js.map +2 -2
- package/dist-cjs/lib/StoreSideEffects.js +1 -1
- package/dist-cjs/lib/StoreSideEffects.js.map +1 -1
- package/dist-cjs/lib/devFreeze.js +5 -3
- package/dist-cjs/lib/devFreeze.js.map +2 -2
- package/dist-cjs/lib/isDev.js +37 -0
- package/dist-cjs/lib/isDev.js.map +7 -0
- package/dist-cjs/lib/migrate.js.map +2 -2
- package/dist-esm/index.d.mts +49 -1
- package/dist-esm/index.mjs +3 -1
- package/dist-esm/index.mjs.map +2 -2
- package/dist-esm/lib/AtomSet.mjs +48 -0
- package/dist-esm/lib/AtomSet.mjs.map +7 -0
- package/dist-esm/lib/ImmutableMap.mjs +25 -23
- package/dist-esm/lib/ImmutableMap.mjs.map +2 -2
- package/dist-esm/lib/StoreSchema.mjs +83 -25
- package/dist-esm/lib/StoreSchema.mjs.map +2 -2
- package/dist-esm/lib/StoreSideEffects.mjs +1 -1
- package/dist-esm/lib/StoreSideEffects.mjs.map +1 -1
- package/dist-esm/lib/devFreeze.mjs +5 -3
- package/dist-esm/lib/devFreeze.mjs.map +2 -2
- package/dist-esm/lib/isDev.mjs +16 -0
- package/dist-esm/lib/isDev.mjs.map +7 -0
- package/dist-esm/lib/migrate.mjs.map +2 -2
- package/package.json +4 -4
- package/src/index.ts +3 -0
- package/src/lib/AtomSet.ts +52 -0
- package/src/lib/ImmutableMap.ts +25 -33
- package/src/lib/StoreSchema.ts +90 -30
- package/src/lib/StoreSideEffects.ts +1 -1
- package/src/lib/devFreeze.test.ts +6 -2
- package/src/lib/devFreeze.ts +7 -3
- package/src/lib/isDev.ts +20 -0
- package/src/lib/migrate.ts +29 -0
- package/src/lib/test/recordStore.test.ts +182 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/lib/migrate.ts"],
|
|
4
|
-
"sourcesContent": ["import { assert, objectMapEntries } from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { SerializedStore } from './Store'\n\nfunction squashDependsOn(sequence: Array<Migration | StandaloneDependsOn>): Migration[] {\n\tconst result: Migration[] = []\n\tfor (let i = sequence.length - 1; i >= 0; i--) {\n\t\tconst elem = sequence[i]\n\t\tif (!('id' in elem)) {\n\t\t\tconst dependsOn = elem.dependsOn\n\t\t\tconst prev = result[0]\n\t\t\tif (prev) {\n\t\t\t\tresult[0] = {\n\t\t\t\t\t...prev,\n\t\t\t\t\tdependsOn: dependsOn.concat(prev.dependsOn ?? []),\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tresult.unshift(elem)\n\t\t}\n\t}\n\treturn result\n}\n\n/**\n * Creates a migration sequence that defines how to transform data as your schema evolves.\n *\n * A migration sequence contains a series of migrations that are applied in order to transform\n * data from older versions to newer versions. Each migration is identified by a unique ID\n * and can operate at either the record level (transforming individual records) or store level\n * (transforming the entire store structure).\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param options - Configuration for the migration sequence\n * - sequenceId - Unique identifier for this migration sequence (e.g., 'com.myapp.book')\n * - sequence - Array of migrations or dependency declarations to include in the sequence\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added (defaults to true)\n * @returns A validated migration sequence that can be included in a store schema\n * @example\n * ```ts\n * const bookMigrations = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [\n * {\n * id: 'com.myapp.book/1',\n * scope: 'record',\n * up: (record) => ({ ...record, newField: 'default' })\n * }\n * ]\n * })\n * ```\n * @public\n */\nexport function createMigrationSequence({\n\tsequence,\n\tsequenceId,\n\tretroactive = true,\n}: {\n\tsequenceId: string\n\tretroactive?: boolean\n\tsequence: Array<Migration | StandaloneDependsOn>\n}): MigrationSequence {\n\tconst migrations: MigrationSequence = {\n\t\tsequenceId,\n\t\tretroactive,\n\t\tsequence: squashDependsOn(sequence),\n\t}\n\tvalidateMigrations(migrations)\n\treturn migrations\n}\n\n/**\n * Creates a named set of migration IDs from version numbers and a sequence ID.\n *\n * This utility function helps generate properly formatted migration IDs that follow\n * the required `sequenceId/version` pattern. It takes a sequence ID and a record\n * of named versions, returning migration IDs that can be used in migration definitions.\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param sequenceId - The sequence identifier (e.g., 'com.myapp.book')\n * @param versions - Record mapping version names to numbers\n * @returns Record mapping version names to properly formatted migration IDs\n * @example\n * ```ts\n * const migrationIds = createMigrationIds('com.myapp.book', {\n * addGenre: 1,\n * addPublisher: 2,\n * removeOldField: 3\n * })\n * // Result: {\n * // addGenre: 'com.myapp.book/1',\n * // addPublisher: 'com.myapp.book/2',\n * // removeOldField: 'com.myapp.book/3'\n * // }\n * ```\n * @public\n */\nexport function createMigrationIds<\n\tconst ID extends string,\n\tconst Versions extends Record<string, number>,\n>(sequenceId: ID, versions: Versions): { [K in keyof Versions]: `${ID}/${Versions[K]}` } {\n\treturn Object.fromEntries(\n\t\tobjectMapEntries(versions).map(([key, version]) => [key, `${sequenceId}/${version}`] as const)\n\t) as any\n}\n\n/**\n * Creates a migration sequence specifically for record-level migrations.\n *\n * This is a convenience function that creates a migration sequence where all migrations\n * operate at the record scope and are automatically filtered to apply only to records\n * of a specific type. Each migration in the sequence will be enhanced with the record\n * scope and appropriate filtering logic.\n * @param opts - Configuration for the record migration sequence\n * - recordType - The record type name these migrations should apply to\n * - filter - Optional additional filter function to determine which records to migrate\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added\n * - sequenceId - Unique identifier for this migration sequence\n * - sequence - Array of record migration definitions (scope will be added automatically)\n * @returns A migration sequence configured for record-level operations\n * @internal\n */\nexport function createRecordMigrationSequence(opts: {\n\trecordType: string\n\tfilter?(record: UnknownRecord): boolean\n\tretroactive?: boolean\n\tsequenceId: string\n\tsequence: Omit<Extract<Migration, { scope: 'record' }>, 'scope'>[]\n}): MigrationSequence {\n\tconst sequenceId = opts.sequenceId\n\treturn createMigrationSequence({\n\t\tsequenceId,\n\t\tretroactive: opts.retroactive ?? true,\n\t\tsequence: opts.sequence.map((m) =>\n\t\t\t'id' in m\n\t\t\t\t? {\n\t\t\t\t\t\t...m,\n\t\t\t\t\t\tscope: 'record',\n\t\t\t\t\t\tfilter: (r: UnknownRecord) =>\n\t\t\t\t\t\t\tr.typeName === opts.recordType &&\n\t\t\t\t\t\t\t(m.filter?.(r) ?? true) &&\n\t\t\t\t\t\t\t(opts.filter?.(r) ?? true),\n\t\t\t\t\t}\n\t\t\t\t: m\n\t\t),\n\t})\n}\n\n/**\n * Legacy migration interface for backward compatibility.\n *\n * This interface represents the old migration format that included both `up` and `down`\n * transformation functions. While still supported, new code should use the `Migration`\n * type which provides more flexibility and better integration with the current system.\n * @public\n */\nexport interface LegacyMigration<Before = any, After = any> {\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tup: (oldState: Before) => After\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tdown: (newState: After) => Before\n}\n\n/**\n * Unique identifier for a migration in the format `sequenceId/version`.\n *\n * Migration IDs follow a specific pattern where the sequence ID identifies the migration\n * sequence and the version number indicates the order within that sequence. For example:\n * 'com.myapp.book/1', 'com.myapp.book/2', etc.\n * @public\n */\nexport type MigrationId = `${string}/${number}`\n\n/**\n * Declares dependencies for migrations without being a migration itself.\n *\n * This interface allows you to specify that future migrations in a sequence depend on\n * migrations from other sequences, without defining an actual migration transformation.\n * It's used to establish cross-sequence dependencies in the migration graph.\n * @public\n */\nexport interface StandaloneDependsOn {\n\treadonly dependsOn: readonly MigrationId[]\n}\n\n/**\n * Defines a single migration that transforms data from one schema version to another.\n *\n * A migration can operate at two different scopes:\n * - `record`: Transforms individual records, with optional filtering to target specific records\n * - `store`: Transforms the entire serialized store structure\n *\n * Each migration has a unique ID and can declare dependencies on other migrations that must\n * be applied first. The `up` function performs the forward transformation, while the optional\n * `down` function can reverse the migration if needed.\n * @public\n */\nexport type Migration = {\n\treadonly id: MigrationId\n\treadonly dependsOn?: readonly MigrationId[] | undefined\n} & (\n\t| {\n\t\t\treadonly scope: 'record'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly filter?: (record: UnknownRecord) => boolean\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (oldState: UnknownRecord) => void | UnknownRecord\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (newState: UnknownRecord) => void | UnknownRecord\n\t }\n\t| {\n\t\t\treadonly scope: 'store'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (\n\t\t\t\toldState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (\n\t\t\t\tnewState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t }\n)\n\n/**\n * Base interface for legacy migration information.\n *\n * Contains the basic structure used by the legacy migration system, including version\n * range information and the migration functions indexed by version number. This is\n * maintained for backward compatibility with older migration definitions.\n * @public\n */\nexport interface LegacyBaseMigrationsInfo {\n\tfirstVersion: number\n\tcurrentVersion: number\n\tmigrators: { [version: number]: LegacyMigration }\n}\n\n/**\n * Legacy migration configuration with support for sub-type migrations.\n *\n * This interface extends the base legacy migration info to support migrations that\n * vary based on a sub-type key within records. This allows different migration paths\n * for different variants of the same record type, which was useful in older migration\n * systems but is now handled more elegantly by the current Migration system.\n * @public\n */\nexport interface LegacyMigrations extends LegacyBaseMigrationsInfo {\n\tsubTypeKey?: string\n\tsubTypeMigrations?: Record<string, LegacyBaseMigrationsInfo>\n}\n\n/**\n * A complete sequence of migrations that can be applied to transform data.\n *\n * A migration sequence represents a series of ordered migrations that belong together,\n * typically for a specific part of your schema. The sequence includes metadata about\n * whether it should be applied retroactively to existing data and contains the actual\n * migration definitions in execution order.\n * @public\n */\nexport interface MigrationSequence {\n\tsequenceId: string\n\t/**\n\t * retroactive should be true if the migrations should be applied to snapshots that were created before\n\t * this migration sequence was added to the schema.\n\t *\n\t * In general:\n\t *\n\t * - retroactive should be true when app developers create their own new migration sequences.\n\t * - retroactive should be false when library developers ship a migration sequence. When you install a library for the first time, any migrations that were added in the library before that point should generally _not_ be applied to your existing data.\n\t */\n\tretroactive: boolean\n\tsequence: Migration[]\n}\n\n/**\n * Sorts migrations using a distance-minimizing topological sort.\n *\n * This function respects two types of dependencies:\n * 1. Implicit sequence dependencies (foo/1 must come before foo/2)\n * 2. Explicit dependencies via `dependsOn` property\n *\n * The algorithm minimizes the total distance between migrations and their explicit\n * dependencies in the final ordering, while maintaining topological correctness.\n * This means when migration A depends on migration B, A will be scheduled as close\n * as possible to B (while respecting all constraints).\n *\n * Implementation uses Kahn's algorithm with priority scoring:\n * - Builds dependency graph and calculates in-degrees\n * - Uses priority queue that prioritizes migrations which unblock explicit dependencies\n * - Processes migrations in urgency order while maintaining topological constraints\n * - Detects cycles by ensuring all migrations are processed\n *\n * @param migrations - Array of migrations to sort\n * @returns Sorted array of migrations in execution order\n * @throws Assertion error if circular dependencies are detected\n * @example\n * ```ts\n * const sorted = sortMigrations([\n * { id: 'app/2', scope: 'record', up: (r) => r },\n * { id: 'app/1', scope: 'record', up: (r) => r },\n * { id: 'lib/1', scope: 'record', up: (r) => r, dependsOn: ['app/1'] }\n * ])\n * // Result: [app/1, app/2, lib/1] (respects both sequence and explicit deps)\n * ```\n * @public\n */\nexport function sortMigrations(migrations: Migration[]): Migration[] {\n\tif (migrations.length === 0) return []\n\n\t// Build dependency graph and calculate in-degrees\n\tconst byId = new Map(migrations.map((m) => [m.id, m]))\n\tconst dependents = new Map<MigrationId, Set<MigrationId>>() // who depends on this\n\tconst inDegree = new Map<MigrationId, number>()\n\tconst explicitDeps = new Map<MigrationId, Set<MigrationId>>() // explicit dependsOn relationships\n\n\t// Initialize\n\tfor (const m of migrations) {\n\t\tinDegree.set(m.id, 0)\n\t\tdependents.set(m.id, new Set())\n\t\texplicitDeps.set(m.id, new Set())\n\t}\n\n\t// Add implicit sequence dependencies and explicit dependencies\n\tfor (const m of migrations) {\n\t\tconst { version, sequenceId } = parseMigrationId(m.id)\n\n\t\t// Implicit dependency on previous in sequence\n\t\tconst prevId = `${sequenceId}/${version - 1}` as MigrationId\n\t\tif (byId.has(prevId)) {\n\t\t\tdependents.get(prevId)!.add(m.id)\n\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t}\n\n\t\t// Explicit dependencies\n\t\tif (m.dependsOn) {\n\t\t\tfor (const depId of m.dependsOn) {\n\t\t\t\tif (byId.has(depId)) {\n\t\t\t\t\tdependents.get(depId)!.add(m.id)\n\t\t\t\t\texplicitDeps.get(m.id)!.add(depId)\n\t\t\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Priority queue: migrations ready to process (in-degree 0)\n\tconst ready = migrations.filter((m) => inDegree.get(m.id) === 0)\n\tconst result: Migration[] = []\n\tconst processed = new Set<MigrationId>()\n\n\twhile (ready.length > 0) {\n\t\t// Calculate urgency scores for ready migrations and pick the best one\n\t\tlet bestCandidate: Migration | undefined\n\t\tlet bestCandidateScore = -Infinity\n\n\t\tfor (const m of ready) {\n\t\t\tlet urgencyScore = 0\n\n\t\t\tfor (const depId of dependents.get(m.id) || []) {\n\t\t\t\tif (!processed.has(depId)) {\n\t\t\t\t\t// Priority 1: Count all unprocessed dependents (to break ties)\n\t\t\t\t\turgencyScore += 1\n\n\t\t\t\t\t// Priority 2: If this migration is explicitly depended on by others, boost priority\n\t\t\t\t\tif (explicitDeps.get(depId)!.has(m.id)) {\n\t\t\t\t\t\turgencyScore += 100\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (\n\t\t\t\turgencyScore > bestCandidateScore ||\n\t\t\t\t// Tiebreaker: prefer lower sequence/version\n\t\t\t\t(urgencyScore === bestCandidateScore && m.id.localeCompare(bestCandidate?.id ?? '') < 0)\n\t\t\t) {\n\t\t\t\tbestCandidate = m\n\t\t\t\tbestCandidateScore = urgencyScore\n\t\t\t}\n\t\t}\n\n\t\tconst nextMigration = bestCandidate!\n\t\tready.splice(ready.indexOf(nextMigration), 1)\n\n\t\t// Cycle detection - if we have processed everything and still have items left, there's a cycle\n\t\t// This is handled by Kahn's algorithm naturally - if we finish with items unprocessed, there's a cycle\n\n\t\t// Process this migration\n\t\tresult.push(nextMigration)\n\t\tprocessed.add(nextMigration.id)\n\n\t\t// Update in-degrees and add newly ready migrations\n\t\tfor (const depId of dependents.get(nextMigration.id) || []) {\n\t\t\tif (!processed.has(depId)) {\n\t\t\t\tinDegree.set(depId, inDegree.get(depId)! - 1)\n\t\t\t\tif (inDegree.get(depId) === 0) {\n\t\t\t\t\tready.push(byId.get(depId)!)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Check for cycles - if we didn't process all migrations, there's a cycle\n\tif (result.length !== migrations.length) {\n\t\tconst unprocessed = migrations.filter((m) => !processed.has(m.id))\n\t\tassert(false, `Circular dependency in migrations: ${unprocessed[0].id}`)\n\t}\n\n\treturn result\n}\n\n/**\n * Parses a migration ID to extract the sequence ID and version number.\n *\n * Migration IDs follow the format `sequenceId/version`, and this function splits\n * them into their component parts. This is used internally for sorting migrations\n * and understanding their relationships.\n * @param id - The migration ID to parse\n * @returns Object containing the sequence ID and numeric version\n * @example\n * ```ts\n * const { sequenceId, version } = parseMigrationId('com.myapp.book/5')\n * // sequenceId: 'com.myapp.book', version: 5\n * ```\n * @internal\n */\nexport function parseMigrationId(id: MigrationId): { sequenceId: string; version: number } {\n\tconst [sequenceId, version] = id.split('/')\n\treturn { sequenceId, version: parseInt(version) }\n}\n\nfunction validateMigrationId(id: string, expectedSequenceId?: string) {\n\tif (expectedSequenceId) {\n\t\tassert(\n\t\t\tid.startsWith(expectedSequenceId + '/'),\n\t\t\t`Every migration in sequence '${expectedSequenceId}' must have an id starting with '${expectedSequenceId}/'. Got invalid id: '${id}'`\n\t\t)\n\t}\n\n\tassert(id.match(/^(.*?)\\/(0|[1-9]\\d*)$/), `Invalid migration id: '${id}'`)\n}\n\n/**\n * Validates that a migration sequence is correctly structured.\n *\n * Performs several validation checks to ensure the migration sequence is valid:\n * - Sequence ID doesn't contain invalid characters\n * - All migration IDs belong to the expected sequence\n * - Migration versions start at 1 and increment by 1\n * - Migration IDs follow the correct format\n * @param migrations - The migration sequence to validate\n * @throws Assertion error if any validation checks fail\n * @example\n * ```ts\n * const sequence = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [{ id: 'com.myapp.book/1', scope: 'record', up: (r) => r }]\n * })\n * validateMigrations(sequence) // Passes validation\n * ```\n * @public\n */\nexport function validateMigrations(migrations: MigrationSequence) {\n\tassert(\n\t\t!migrations.sequenceId.includes('/'),\n\t\t`sequenceId cannot contain a '/', got ${migrations.sequenceId}`\n\t)\n\tassert(migrations.sequenceId.length, 'sequenceId must be a non-empty string')\n\n\tif (migrations.sequence.length === 0) {\n\t\treturn\n\t}\n\n\tvalidateMigrationId(migrations.sequence[0].id, migrations.sequenceId)\n\tlet n = parseMigrationId(migrations.sequence[0].id).version\n\tassert(\n\t\tn === 1,\n\t\t`Expected the first migrationId to be '${migrations.sequenceId}/1' but got '${migrations.sequence[0].id}'`\n\t)\n\tfor (let i = 1; i < migrations.sequence.length; i++) {\n\t\tconst id = migrations.sequence[i].id\n\t\tvalidateMigrationId(id, migrations.sequenceId)\n\t\tconst m = parseMigrationId(id).version\n\t\tassert(\n\t\t\tm === n + 1,\n\t\t\t`Migration id numbers must increase in increments of 1, expected ${migrations.sequenceId}/${n + 1} but got '${migrations.sequence[i].id}'`\n\t\t)\n\t\tn = m\n\t}\n}\n\n/**\n * Result type returned by migration operations.\n *\n * Migration operations can either succeed and return the transformed value,\n * or fail with a specific reason. This discriminated union type allows for\n * safe handling of both success and error cases when applying migrations.\n * @public\n */\nexport type MigrationResult<T> =\n\t| { type: 'success'; value: T }\n\t| { type: 'error'; reason: MigrationFailureReason }\n\n/**\n * Enumeration of possible reasons why a migration might fail.\n *\n * These reasons help identify what went wrong during migration processing,\n * allowing applications to handle different failure scenarios appropriately.\n * Common failures include incompatible data formats, unknown record types,\n * and version mismatches between the data and available migrations.\n * @public\n */\nexport enum MigrationFailureReason {\n\tIncompatibleSubtype = 'incompatible-subtype',\n\tUnknownType = 'unknown-type',\n\tTargetVersionTooNew = 'target-version-too-new',\n\tTargetVersionTooOld = 'target-version-too-old',\n\tMigrationError = 'migration-error',\n\tUnrecognizedSubtype = 'unrecognized-subtype',\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyC;
|
|
4
|
+
"sourcesContent": ["import { assert, objectMapEntries } from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { SerializedStore } from './Store'\nimport { SerializedSchema } from './StoreSchema'\n\nfunction squashDependsOn(sequence: Array<Migration | StandaloneDependsOn>): Migration[] {\n\tconst result: Migration[] = []\n\tfor (let i = sequence.length - 1; i >= 0; i--) {\n\t\tconst elem = sequence[i]\n\t\tif (!('id' in elem)) {\n\t\t\tconst dependsOn = elem.dependsOn\n\t\t\tconst prev = result[0]\n\t\t\tif (prev) {\n\t\t\t\tresult[0] = {\n\t\t\t\t\t...prev,\n\t\t\t\t\tdependsOn: dependsOn.concat(prev.dependsOn ?? []),\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tresult.unshift(elem)\n\t\t}\n\t}\n\treturn result\n}\n\n/**\n * Creates a migration sequence that defines how to transform data as your schema evolves.\n *\n * A migration sequence contains a series of migrations that are applied in order to transform\n * data from older versions to newer versions. Each migration is identified by a unique ID\n * and can operate at either the record level (transforming individual records) or store level\n * (transforming the entire store structure).\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param options - Configuration for the migration sequence\n * - sequenceId - Unique identifier for this migration sequence (e.g., 'com.myapp.book')\n * - sequence - Array of migrations or dependency declarations to include in the sequence\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added (defaults to true)\n * @returns A validated migration sequence that can be included in a store schema\n * @example\n * ```ts\n * const bookMigrations = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [\n * {\n * id: 'com.myapp.book/1',\n * scope: 'record',\n * up: (record) => ({ ...record, newField: 'default' })\n * }\n * ]\n * })\n * ```\n * @public\n */\nexport function createMigrationSequence({\n\tsequence,\n\tsequenceId,\n\tretroactive = true,\n}: {\n\tsequenceId: string\n\tretroactive?: boolean\n\tsequence: Array<Migration | StandaloneDependsOn>\n}): MigrationSequence {\n\tconst migrations: MigrationSequence = {\n\t\tsequenceId,\n\t\tretroactive,\n\t\tsequence: squashDependsOn(sequence),\n\t}\n\tvalidateMigrations(migrations)\n\treturn migrations\n}\n\n/**\n * Creates a named set of migration IDs from version numbers and a sequence ID.\n *\n * This utility function helps generate properly formatted migration IDs that follow\n * the required `sequenceId/version` pattern. It takes a sequence ID and a record\n * of named versions, returning migration IDs that can be used in migration definitions.\n *\n * See the [migration guide](https://tldraw.dev/docs/persistence#Migrations) for more info on how to use this API.\n * @param sequenceId - The sequence identifier (e.g., 'com.myapp.book')\n * @param versions - Record mapping version names to numbers\n * @returns Record mapping version names to properly formatted migration IDs\n * @example\n * ```ts\n * const migrationIds = createMigrationIds('com.myapp.book', {\n * addGenre: 1,\n * addPublisher: 2,\n * removeOldField: 3\n * })\n * // Result: {\n * // addGenre: 'com.myapp.book/1',\n * // addPublisher: 'com.myapp.book/2',\n * // removeOldField: 'com.myapp.book/3'\n * // }\n * ```\n * @public\n */\nexport function createMigrationIds<\n\tconst ID extends string,\n\tconst Versions extends Record<string, number>,\n>(sequenceId: ID, versions: Versions): { [K in keyof Versions]: `${ID}/${Versions[K]}` } {\n\treturn Object.fromEntries(\n\t\tobjectMapEntries(versions).map(([key, version]) => [key, `${sequenceId}/${version}`] as const)\n\t) as any\n}\n\n/**\n * Creates a migration sequence specifically for record-level migrations.\n *\n * This is a convenience function that creates a migration sequence where all migrations\n * operate at the record scope and are automatically filtered to apply only to records\n * of a specific type. Each migration in the sequence will be enhanced with the record\n * scope and appropriate filtering logic.\n * @param opts - Configuration for the record migration sequence\n * - recordType - The record type name these migrations should apply to\n * - filter - Optional additional filter function to determine which records to migrate\n * - retroactive - Whether migrations should apply to snapshots created before this sequence was added\n * - sequenceId - Unique identifier for this migration sequence\n * - sequence - Array of record migration definitions (scope will be added automatically)\n * @returns A migration sequence configured for record-level operations\n * @internal\n */\nexport function createRecordMigrationSequence(opts: {\n\trecordType: string\n\tfilter?(record: UnknownRecord): boolean\n\tretroactive?: boolean\n\tsequenceId: string\n\tsequence: Omit<Extract<Migration, { scope: 'record' }>, 'scope'>[]\n}): MigrationSequence {\n\tconst sequenceId = opts.sequenceId\n\treturn createMigrationSequence({\n\t\tsequenceId,\n\t\tretroactive: opts.retroactive ?? true,\n\t\tsequence: opts.sequence.map((m) =>\n\t\t\t'id' in m\n\t\t\t\t? {\n\t\t\t\t\t\t...m,\n\t\t\t\t\t\tscope: 'record',\n\t\t\t\t\t\tfilter: (r: UnknownRecord) =>\n\t\t\t\t\t\t\tr.typeName === opts.recordType &&\n\t\t\t\t\t\t\t(m.filter?.(r) ?? true) &&\n\t\t\t\t\t\t\t(opts.filter?.(r) ?? true),\n\t\t\t\t\t}\n\t\t\t\t: m\n\t\t),\n\t})\n}\n\n/**\n * Legacy migration interface for backward compatibility.\n *\n * This interface represents the old migration format that included both `up` and `down`\n * transformation functions. While still supported, new code should use the `Migration`\n * type which provides more flexibility and better integration with the current system.\n * @public\n */\nexport interface LegacyMigration<Before = any, After = any> {\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tup: (oldState: Before) => After\n\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\tdown: (newState: After) => Before\n}\n\n/**\n * Unique identifier for a migration in the format `sequenceId/version`.\n *\n * Migration IDs follow a specific pattern where the sequence ID identifies the migration\n * sequence and the version number indicates the order within that sequence. For example:\n * 'com.myapp.book/1', 'com.myapp.book/2', etc.\n * @public\n */\nexport type MigrationId = `${string}/${number}`\n\n/**\n * Declares dependencies for migrations without being a migration itself.\n *\n * This interface allows you to specify that future migrations in a sequence depend on\n * migrations from other sequences, without defining an actual migration transformation.\n * It's used to establish cross-sequence dependencies in the migration graph.\n * @public\n */\nexport interface StandaloneDependsOn {\n\treadonly dependsOn: readonly MigrationId[]\n}\n\n/**\n * Defines a single migration that transforms data from one schema version to another.\n *\n * A migration can operate at two different scopes:\n * - `record`: Transforms individual records, with optional filtering to target specific records\n * - `store`: Transforms the entire serialized store structure\n *\n * Each migration has a unique ID and can declare dependencies on other migrations that must\n * be applied first. The `up` function performs the forward transformation, while the optional\n * `down` function can reverse the migration if needed.\n * @public\n */\nexport type Migration = {\n\treadonly id: MigrationId\n\treadonly dependsOn?: readonly MigrationId[] | undefined\n} & (\n\t| {\n\t\t\treadonly scope: 'record'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly filter?: (record: UnknownRecord) => boolean\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (oldState: UnknownRecord) => void | UnknownRecord\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (newState: UnknownRecord) => void | UnknownRecord\n\t }\n\t| {\n\t\t\treadonly scope: 'store'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (\n\t\t\t\toldState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly down?: (\n\t\t\t\tnewState: SerializedStore<UnknownRecord>\n\t\t\t) => void | SerializedStore<UnknownRecord>\n\t }\n\t| {\n\t\t\treadonly scope: 'storage'\n\t\t\t// eslint-disable-next-line @typescript-eslint/method-signature-style\n\t\t\treadonly up: (storage: SynchronousRecordStorage<UnknownRecord>) => void\n\t\t\treadonly down?: never\n\t }\n)\n\n/**\n * Abstraction over the store that can be used to perform migrations.\n * @public\n */\nexport interface SynchronousRecordStorage<R extends UnknownRecord> {\n\tget(id: string): R | undefined\n\tset(id: string, record: R): void\n\tdelete(id: string): void\n\tkeys(): Iterable<string>\n\tvalues(): Iterable<R>\n\tentries(): Iterable<[string, R]>\n}\n\n/**\n * Abstraction over the storage that can be used to perform migrations.\n * @public\n */\nexport interface SynchronousStorage<R extends UnknownRecord> extends SynchronousRecordStorage<R> {\n\tgetSchema(): SerializedSchema\n\tsetSchema(schema: SerializedSchema): void\n}\n\n/**\n * Base interface for legacy migration information.\n *\n * Contains the basic structure used by the legacy migration system, including version\n * range information and the migration functions indexed by version number. This is\n * maintained for backward compatibility with older migration definitions.\n * @public\n */\nexport interface LegacyBaseMigrationsInfo {\n\tfirstVersion: number\n\tcurrentVersion: number\n\tmigrators: { [version: number]: LegacyMigration }\n}\n\n/**\n * Legacy migration configuration with support for sub-type migrations.\n *\n * This interface extends the base legacy migration info to support migrations that\n * vary based on a sub-type key within records. This allows different migration paths\n * for different variants of the same record type, which was useful in older migration\n * systems but is now handled more elegantly by the current Migration system.\n * @public\n */\nexport interface LegacyMigrations extends LegacyBaseMigrationsInfo {\n\tsubTypeKey?: string\n\tsubTypeMigrations?: Record<string, LegacyBaseMigrationsInfo>\n}\n\n/**\n * A complete sequence of migrations that can be applied to transform data.\n *\n * A migration sequence represents a series of ordered migrations that belong together,\n * typically for a specific part of your schema. The sequence includes metadata about\n * whether it should be applied retroactively to existing data and contains the actual\n * migration definitions in execution order.\n * @public\n */\nexport interface MigrationSequence {\n\tsequenceId: string\n\t/**\n\t * retroactive should be true if the migrations should be applied to snapshots that were created before\n\t * this migration sequence was added to the schema.\n\t *\n\t * In general:\n\t *\n\t * - retroactive should be true when app developers create their own new migration sequences.\n\t * - retroactive should be false when library developers ship a migration sequence. When you install a library for the first time, any migrations that were added in the library before that point should generally _not_ be applied to your existing data.\n\t */\n\tretroactive: boolean\n\tsequence: Migration[]\n}\n\n/**\n * Sorts migrations using a distance-minimizing topological sort.\n *\n * This function respects two types of dependencies:\n * 1. Implicit sequence dependencies (foo/1 must come before foo/2)\n * 2. Explicit dependencies via `dependsOn` property\n *\n * The algorithm minimizes the total distance between migrations and their explicit\n * dependencies in the final ordering, while maintaining topological correctness.\n * This means when migration A depends on migration B, A will be scheduled as close\n * as possible to B (while respecting all constraints).\n *\n * Implementation uses Kahn's algorithm with priority scoring:\n * - Builds dependency graph and calculates in-degrees\n * - Uses priority queue that prioritizes migrations which unblock explicit dependencies\n * - Processes migrations in urgency order while maintaining topological constraints\n * - Detects cycles by ensuring all migrations are processed\n *\n * @param migrations - Array of migrations to sort\n * @returns Sorted array of migrations in execution order\n * @throws Assertion error if circular dependencies are detected\n * @example\n * ```ts\n * const sorted = sortMigrations([\n * { id: 'app/2', scope: 'record', up: (r) => r },\n * { id: 'app/1', scope: 'record', up: (r) => r },\n * { id: 'lib/1', scope: 'record', up: (r) => r, dependsOn: ['app/1'] }\n * ])\n * // Result: [app/1, app/2, lib/1] (respects both sequence and explicit deps)\n * ```\n * @public\n */\nexport function sortMigrations(migrations: Migration[]): Migration[] {\n\tif (migrations.length === 0) return []\n\n\t// Build dependency graph and calculate in-degrees\n\tconst byId = new Map(migrations.map((m) => [m.id, m]))\n\tconst dependents = new Map<MigrationId, Set<MigrationId>>() // who depends on this\n\tconst inDegree = new Map<MigrationId, number>()\n\tconst explicitDeps = new Map<MigrationId, Set<MigrationId>>() // explicit dependsOn relationships\n\n\t// Initialize\n\tfor (const m of migrations) {\n\t\tinDegree.set(m.id, 0)\n\t\tdependents.set(m.id, new Set())\n\t\texplicitDeps.set(m.id, new Set())\n\t}\n\n\t// Add implicit sequence dependencies and explicit dependencies\n\tfor (const m of migrations) {\n\t\tconst { version, sequenceId } = parseMigrationId(m.id)\n\n\t\t// Implicit dependency on previous in sequence\n\t\tconst prevId = `${sequenceId}/${version - 1}` as MigrationId\n\t\tif (byId.has(prevId)) {\n\t\t\tdependents.get(prevId)!.add(m.id)\n\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t}\n\n\t\t// Explicit dependencies\n\t\tif (m.dependsOn) {\n\t\t\tfor (const depId of m.dependsOn) {\n\t\t\t\tif (byId.has(depId)) {\n\t\t\t\t\tdependents.get(depId)!.add(m.id)\n\t\t\t\t\texplicitDeps.get(m.id)!.add(depId)\n\t\t\t\t\tinDegree.set(m.id, inDegree.get(m.id)! + 1)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Priority queue: migrations ready to process (in-degree 0)\n\tconst ready = migrations.filter((m) => inDegree.get(m.id) === 0)\n\tconst result: Migration[] = []\n\tconst processed = new Set<MigrationId>()\n\n\twhile (ready.length > 0) {\n\t\t// Calculate urgency scores for ready migrations and pick the best one\n\t\tlet bestCandidate: Migration | undefined\n\t\tlet bestCandidateScore = -Infinity\n\n\t\tfor (const m of ready) {\n\t\t\tlet urgencyScore = 0\n\n\t\t\tfor (const depId of dependents.get(m.id) || []) {\n\t\t\t\tif (!processed.has(depId)) {\n\t\t\t\t\t// Priority 1: Count all unprocessed dependents (to break ties)\n\t\t\t\t\turgencyScore += 1\n\n\t\t\t\t\t// Priority 2: If this migration is explicitly depended on by others, boost priority\n\t\t\t\t\tif (explicitDeps.get(depId)!.has(m.id)) {\n\t\t\t\t\t\turgencyScore += 100\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (\n\t\t\t\turgencyScore > bestCandidateScore ||\n\t\t\t\t// Tiebreaker: prefer lower sequence/version\n\t\t\t\t(urgencyScore === bestCandidateScore && m.id.localeCompare(bestCandidate?.id ?? '') < 0)\n\t\t\t) {\n\t\t\t\tbestCandidate = m\n\t\t\t\tbestCandidateScore = urgencyScore\n\t\t\t}\n\t\t}\n\n\t\tconst nextMigration = bestCandidate!\n\t\tready.splice(ready.indexOf(nextMigration), 1)\n\n\t\t// Cycle detection - if we have processed everything and still have items left, there's a cycle\n\t\t// This is handled by Kahn's algorithm naturally - if we finish with items unprocessed, there's a cycle\n\n\t\t// Process this migration\n\t\tresult.push(nextMigration)\n\t\tprocessed.add(nextMigration.id)\n\n\t\t// Update in-degrees and add newly ready migrations\n\t\tfor (const depId of dependents.get(nextMigration.id) || []) {\n\t\t\tif (!processed.has(depId)) {\n\t\t\t\tinDegree.set(depId, inDegree.get(depId)! - 1)\n\t\t\t\tif (inDegree.get(depId) === 0) {\n\t\t\t\t\tready.push(byId.get(depId)!)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Check for cycles - if we didn't process all migrations, there's a cycle\n\tif (result.length !== migrations.length) {\n\t\tconst unprocessed = migrations.filter((m) => !processed.has(m.id))\n\t\tassert(false, `Circular dependency in migrations: ${unprocessed[0].id}`)\n\t}\n\n\treturn result\n}\n\n/**\n * Parses a migration ID to extract the sequence ID and version number.\n *\n * Migration IDs follow the format `sequenceId/version`, and this function splits\n * them into their component parts. This is used internally for sorting migrations\n * and understanding their relationships.\n * @param id - The migration ID to parse\n * @returns Object containing the sequence ID and numeric version\n * @example\n * ```ts\n * const { sequenceId, version } = parseMigrationId('com.myapp.book/5')\n * // sequenceId: 'com.myapp.book', version: 5\n * ```\n * @internal\n */\nexport function parseMigrationId(id: MigrationId): { sequenceId: string; version: number } {\n\tconst [sequenceId, version] = id.split('/')\n\treturn { sequenceId, version: parseInt(version) }\n}\n\nfunction validateMigrationId(id: string, expectedSequenceId?: string) {\n\tif (expectedSequenceId) {\n\t\tassert(\n\t\t\tid.startsWith(expectedSequenceId + '/'),\n\t\t\t`Every migration in sequence '${expectedSequenceId}' must have an id starting with '${expectedSequenceId}/'. Got invalid id: '${id}'`\n\t\t)\n\t}\n\n\tassert(id.match(/^(.*?)\\/(0|[1-9]\\d*)$/), `Invalid migration id: '${id}'`)\n}\n\n/**\n * Validates that a migration sequence is correctly structured.\n *\n * Performs several validation checks to ensure the migration sequence is valid:\n * - Sequence ID doesn't contain invalid characters\n * - All migration IDs belong to the expected sequence\n * - Migration versions start at 1 and increment by 1\n * - Migration IDs follow the correct format\n * @param migrations - The migration sequence to validate\n * @throws Assertion error if any validation checks fail\n * @example\n * ```ts\n * const sequence = createMigrationSequence({\n * sequenceId: 'com.myapp.book',\n * sequence: [{ id: 'com.myapp.book/1', scope: 'record', up: (r) => r }]\n * })\n * validateMigrations(sequence) // Passes validation\n * ```\n * @public\n */\nexport function validateMigrations(migrations: MigrationSequence) {\n\tassert(\n\t\t!migrations.sequenceId.includes('/'),\n\t\t`sequenceId cannot contain a '/', got ${migrations.sequenceId}`\n\t)\n\tassert(migrations.sequenceId.length, 'sequenceId must be a non-empty string')\n\n\tif (migrations.sequence.length === 0) {\n\t\treturn\n\t}\n\n\tvalidateMigrationId(migrations.sequence[0].id, migrations.sequenceId)\n\tlet n = parseMigrationId(migrations.sequence[0].id).version\n\tassert(\n\t\tn === 1,\n\t\t`Expected the first migrationId to be '${migrations.sequenceId}/1' but got '${migrations.sequence[0].id}'`\n\t)\n\tfor (let i = 1; i < migrations.sequence.length; i++) {\n\t\tconst id = migrations.sequence[i].id\n\t\tvalidateMigrationId(id, migrations.sequenceId)\n\t\tconst m = parseMigrationId(id).version\n\t\tassert(\n\t\t\tm === n + 1,\n\t\t\t`Migration id numbers must increase in increments of 1, expected ${migrations.sequenceId}/${n + 1} but got '${migrations.sequence[i].id}'`\n\t\t)\n\t\tn = m\n\t}\n}\n\n/**\n * Result type returned by migration operations.\n *\n * Migration operations can either succeed and return the transformed value,\n * or fail with a specific reason. This discriminated union type allows for\n * safe handling of both success and error cases when applying migrations.\n * @public\n */\nexport type MigrationResult<T> =\n\t| { type: 'success'; value: T }\n\t| { type: 'error'; reason: MigrationFailureReason }\n\n/**\n * Enumeration of possible reasons why a migration might fail.\n *\n * These reasons help identify what went wrong during migration processing,\n * allowing applications to handle different failure scenarios appropriately.\n * Common failures include incompatible data formats, unknown record types,\n * and version mismatches between the data and available migrations.\n * @public\n */\nexport enum MigrationFailureReason {\n\tIncompatibleSubtype = 'incompatible-subtype',\n\tUnknownType = 'unknown-type',\n\tTargetVersionTooNew = 'target-version-too-new',\n\tTargetVersionTooOld = 'target-version-too-old',\n\tMigrationError = 'migration-error',\n\tUnrecognizedSubtype = 'unrecognized-subtype',\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyC;AAKzC,SAAS,gBAAgB,UAA+D;AACvF,QAAM,SAAsB,CAAC;AAC7B,WAAS,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AAC9C,UAAM,OAAO,SAAS,CAAC;AACvB,QAAI,EAAE,QAAQ,OAAO;AACpB,YAAM,YAAY,KAAK;AACvB,YAAM,OAAO,OAAO,CAAC;AACrB,UAAI,MAAM;AACT,eAAO,CAAC,IAAI;AAAA,UACX,GAAG;AAAA,UACH,WAAW,UAAU,OAAO,KAAK,aAAa,CAAC,CAAC;AAAA,QACjD;AAAA,MACD;AAAA,IACD,OAAO;AACN,aAAO,QAAQ,IAAI;AAAA,IACpB;AAAA,EACD;AACA,SAAO;AACR;AA+BO,SAAS,wBAAwB;AAAA,EACvC;AAAA,EACA;AAAA,EACA,cAAc;AACf,GAIsB;AACrB,QAAM,aAAgC;AAAA,IACrC;AAAA,IACA;AAAA,IACA,UAAU,gBAAgB,QAAQ;AAAA,EACnC;AACA,qBAAmB,UAAU;AAC7B,SAAO;AACR;AA4BO,SAAS,mBAGd,YAAgB,UAAuE;AACxF,SAAO,OAAO;AAAA,QACb,+BAAiB,QAAQ,EAAE,IAAI,CAAC,CAAC,KAAK,OAAO,MAAM,CAAC,KAAK,GAAG,UAAU,IAAI,OAAO,EAAE,CAAU;AAAA,EAC9F;AACD;AAkBO,SAAS,8BAA8B,MAMxB;AACrB,QAAM,aAAa,KAAK;AACxB,SAAO,wBAAwB;AAAA,IAC9B;AAAA,IACA,aAAa,KAAK,eAAe;AAAA,IACjC,UAAU,KAAK,SAAS;AAAA,MAAI,CAAC,MAC5B,QAAQ,IACL;AAAA,QACA,GAAG;AAAA,QACH,OAAO;AAAA,QACP,QAAQ,CAAC,MACR,EAAE,aAAa,KAAK,eACnB,EAAE,SAAS,CAAC,KAAK,UACjB,KAAK,SAAS,CAAC,KAAK;AAAA,MACvB,IACC;AAAA,IACJ;AAAA,EACD,CAAC;AACF;AA6LO,SAAS,eAAe,YAAsC;AACpE,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAGrC,QAAM,OAAO,IAAI,IAAI,WAAW,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AACrD,QAAM,aAAa,oBAAI,IAAmC;AAC1D,QAAM,WAAW,oBAAI,IAAyB;AAC9C,QAAM,eAAe,oBAAI,IAAmC;AAG5D,aAAW,KAAK,YAAY;AAC3B,aAAS,IAAI,EAAE,IAAI,CAAC;AACpB,eAAW,IAAI,EAAE,IAAI,oBAAI,IAAI,CAAC;AAC9B,iBAAa,IAAI,EAAE,IAAI,oBAAI,IAAI,CAAC;AAAA,EACjC;AAGA,aAAW,KAAK,YAAY;AAC3B,UAAM,EAAE,SAAS,WAAW,IAAI,iBAAiB,EAAE,EAAE;AAGrD,UAAM,SAAS,GAAG,UAAU,IAAI,UAAU,CAAC;AAC3C,QAAI,KAAK,IAAI,MAAM,GAAG;AACrB,iBAAW,IAAI,MAAM,EAAG,IAAI,EAAE,EAAE;AAChC,eAAS,IAAI,EAAE,IAAI,SAAS,IAAI,EAAE,EAAE,IAAK,CAAC;AAAA,IAC3C;AAGA,QAAI,EAAE,WAAW;AAChB,iBAAW,SAAS,EAAE,WAAW;AAChC,YAAI,KAAK,IAAI,KAAK,GAAG;AACpB,qBAAW,IAAI,KAAK,EAAG,IAAI,EAAE,EAAE;AAC/B,uBAAa,IAAI,EAAE,EAAE,EAAG,IAAI,KAAK;AACjC,mBAAS,IAAI,EAAE,IAAI,SAAS,IAAI,EAAE,EAAE,IAAK,CAAC;AAAA,QAC3C;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,QAAM,QAAQ,WAAW,OAAO,CAAC,MAAM,SAAS,IAAI,EAAE,EAAE,MAAM,CAAC;AAC/D,QAAM,SAAsB,CAAC;AAC7B,QAAM,YAAY,oBAAI,IAAiB;AAEvC,SAAO,MAAM,SAAS,GAAG;AAExB,QAAI;AACJ,QAAI,qBAAqB;AAEzB,eAAW,KAAK,OAAO;AACtB,UAAI,eAAe;AAEnB,iBAAW,SAAS,WAAW,IAAI,EAAE,EAAE,KAAK,CAAC,GAAG;AAC/C,YAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AAE1B,0BAAgB;AAGhB,cAAI,aAAa,IAAI,KAAK,EAAG,IAAI,EAAE,EAAE,GAAG;AACvC,4BAAgB;AAAA,UACjB;AAAA,QACD;AAAA,MACD;AAEA,UACC,eAAe;AAAA,MAEd,iBAAiB,sBAAsB,EAAE,GAAG,cAAc,eAAe,MAAM,EAAE,IAAI,GACrF;AACD,wBAAgB;AAChB,6BAAqB;AAAA,MACtB;AAAA,IACD;AAEA,UAAM,gBAAgB;AACtB,UAAM,OAAO,MAAM,QAAQ,aAAa,GAAG,CAAC;AAM5C,WAAO,KAAK,aAAa;AACzB,cAAU,IAAI,cAAc,EAAE;AAG9B,eAAW,SAAS,WAAW,IAAI,cAAc,EAAE,KAAK,CAAC,GAAG;AAC3D,UAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AAC1B,iBAAS,IAAI,OAAO,SAAS,IAAI,KAAK,IAAK,CAAC;AAC5C,YAAI,SAAS,IAAI,KAAK,MAAM,GAAG;AAC9B,gBAAM,KAAK,KAAK,IAAI,KAAK,CAAE;AAAA,QAC5B;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,MAAI,OAAO,WAAW,WAAW,QAAQ;AACxC,UAAM,cAAc,WAAW,OAAO,CAAC,MAAM,CAAC,UAAU,IAAI,EAAE,EAAE,CAAC;AACjE,6BAAO,OAAO,sCAAsC,YAAY,CAAC,EAAE,EAAE,EAAE;AAAA,EACxE;AAEA,SAAO;AACR;AAiBO,SAAS,iBAAiB,IAA0D;AAC1F,QAAM,CAAC,YAAY,OAAO,IAAI,GAAG,MAAM,GAAG;AAC1C,SAAO,EAAE,YAAY,SAAS,SAAS,OAAO,EAAE;AACjD;AAEA,SAAS,oBAAoB,IAAY,oBAA6B;AACrE,MAAI,oBAAoB;AACvB;AAAA,MACC,GAAG,WAAW,qBAAqB,GAAG;AAAA,MACtC,gCAAgC,kBAAkB,oCAAoC,kBAAkB,wBAAwB,EAAE;AAAA,IACnI;AAAA,EACD;AAEA,2BAAO,GAAG,MAAM,uBAAuB,GAAG,0BAA0B,EAAE,GAAG;AAC1E;AAsBO,SAAS,mBAAmB,YAA+B;AACjE;AAAA,IACC,CAAC,WAAW,WAAW,SAAS,GAAG;AAAA,IACnC,wCAAwC,WAAW,UAAU;AAAA,EAC9D;AACA,2BAAO,WAAW,WAAW,QAAQ,uCAAuC;AAE5E,MAAI,WAAW,SAAS,WAAW,GAAG;AACrC;AAAA,EACD;AAEA,sBAAoB,WAAW,SAAS,CAAC,EAAE,IAAI,WAAW,UAAU;AACpE,MAAI,IAAI,iBAAiB,WAAW,SAAS,CAAC,EAAE,EAAE,EAAE;AACpD;AAAA,IACC,MAAM;AAAA,IACN,yCAAyC,WAAW,UAAU,gBAAgB,WAAW,SAAS,CAAC,EAAE,EAAE;AAAA,EACxG;AACA,WAAS,IAAI,GAAG,IAAI,WAAW,SAAS,QAAQ,KAAK;AACpD,UAAM,KAAK,WAAW,SAAS,CAAC,EAAE;AAClC,wBAAoB,IAAI,WAAW,UAAU;AAC7C,UAAM,IAAI,iBAAiB,EAAE,EAAE;AAC/B;AAAA,MACC,MAAM,IAAI;AAAA,MACV,mEAAmE,WAAW,UAAU,IAAI,IAAI,CAAC,aAAa,WAAW,SAAS,CAAC,EAAE,EAAE;AAAA,IACxI;AACA,QAAI;AAAA,EACL;AACD;AAuBO,IAAK,yBAAL,kBAAKA,4BAAL;AACN,EAAAA,wBAAA,yBAAsB;AACtB,EAAAA,wBAAA,iBAAc;AACd,EAAAA,wBAAA,yBAAsB;AACtB,EAAAA,wBAAA,yBAAsB;AACtB,EAAAA,wBAAA,oBAAiB;AACjB,EAAAA,wBAAA,yBAAsB;AANX,SAAAA;AAAA,GAAA;",
|
|
6
6
|
"names": ["MigrationFailureReason"]
|
|
7
7
|
}
|
package/dist-esm/index.d.mts
CHANGED
|
@@ -281,6 +281,27 @@ export declare class AtomMap<K, V> implements Map<K, V> {
|
|
|
281
281
|
[Symbol.toStringTag]: string;
|
|
282
282
|
}
|
|
283
283
|
|
|
284
|
+
/**
|
|
285
|
+
* A drop-in replacement for Set that stores values in atoms and can be used in reactive contexts.
|
|
286
|
+
* @public
|
|
287
|
+
*/
|
|
288
|
+
export declare class AtomSet<T> {
|
|
289
|
+
private readonly name;
|
|
290
|
+
private readonly map;
|
|
291
|
+
constructor(name: string, keys?: Iterable<T>);
|
|
292
|
+
add(value: T): this;
|
|
293
|
+
clear(): void;
|
|
294
|
+
delete(value: T): boolean;
|
|
295
|
+
forEach(callbackfn: (value: T, value2: T, set: AtomSet<T>) => void, thisArg?: any): void;
|
|
296
|
+
has(value: T): boolean;
|
|
297
|
+
get size(): number;
|
|
298
|
+
entries(): Generator<[T, T], undefined, unknown>;
|
|
299
|
+
keys(): Generator<T, undefined, unknown>;
|
|
300
|
+
values(): Generator<T, undefined, unknown>;
|
|
301
|
+
[Symbol.iterator](): Generator<T, undefined, unknown>;
|
|
302
|
+
[Symbol.toStringTag]: string;
|
|
303
|
+
}
|
|
304
|
+
|
|
284
305
|
/**
|
|
285
306
|
* The base record interface that all records in the store must extend.
|
|
286
307
|
* This interface provides the fundamental structure required for all records: a unique ID and a type name.
|
|
@@ -680,6 +701,10 @@ export declare type Migration = {
|
|
|
680
701
|
readonly filter?: (record: UnknownRecord) => boolean;
|
|
681
702
|
readonly scope: 'record';
|
|
682
703
|
readonly up: (oldState: UnknownRecord) => UnknownRecord | void;
|
|
704
|
+
} | {
|
|
705
|
+
readonly down?: never;
|
|
706
|
+
readonly scope: 'storage';
|
|
707
|
+
readonly up: (storage: SynchronousRecordStorage<UnknownRecord>) => void;
|
|
683
708
|
});
|
|
684
709
|
|
|
685
710
|
/**
|
|
@@ -2397,6 +2422,7 @@ export declare class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
2397
2422
|
* @public
|
|
2398
2423
|
*/
|
|
2399
2424
|
migratePersistedRecord(record: R, persistedSchema: SerializedSchema, direction?: 'down' | 'up'): MigrationResult<R>;
|
|
2425
|
+
migrateStorage(storage: SynchronousStorage<R>): void;
|
|
2400
2426
|
/**
|
|
2401
2427
|
* Migrates an entire store snapshot to match the current schema version.
|
|
2402
2428
|
*
|
|
@@ -2588,7 +2614,7 @@ export declare class StoreSideEffects<R extends UnknownRecord> {
|
|
|
2588
2614
|
* ```ts
|
|
2589
2615
|
* editor.sideEffects.registerAfterCreateHandler('page', (page, source) => {
|
|
2590
2616
|
* // Automatically create a shape when a page is created
|
|
2591
|
-
* editor.createShape
|
|
2617
|
+
* editor.createShape({
|
|
2592
2618
|
* id: createShapeId(),
|
|
2593
2619
|
* type: 'text',
|
|
2594
2620
|
* props: { richText: toRichText(page.name) },
|
|
@@ -2850,6 +2876,28 @@ export declare type StoreValidators<R extends UnknownRecord> = {
|
|
|
2850
2876
|
}>>;
|
|
2851
2877
|
};
|
|
2852
2878
|
|
|
2879
|
+
/**
|
|
2880
|
+
* Abstraction over the store that can be used to perform migrations.
|
|
2881
|
+
* @public
|
|
2882
|
+
*/
|
|
2883
|
+
export declare interface SynchronousRecordStorage<R extends UnknownRecord> {
|
|
2884
|
+
get(id: string): R | undefined;
|
|
2885
|
+
set(id: string, record: R): void;
|
|
2886
|
+
delete(id: string): void;
|
|
2887
|
+
keys(): Iterable<string>;
|
|
2888
|
+
values(): Iterable<R>;
|
|
2889
|
+
entries(): Iterable<[string, R]>;
|
|
2890
|
+
}
|
|
2891
|
+
|
|
2892
|
+
/**
|
|
2893
|
+
* Abstraction over the storage that can be used to perform migrations.
|
|
2894
|
+
* @public
|
|
2895
|
+
*/
|
|
2896
|
+
export declare interface SynchronousStorage<R extends UnknownRecord> extends SynchronousRecordStorage<R> {
|
|
2897
|
+
getSchema(): SerializedSchema;
|
|
2898
|
+
setSchema(schema: SerializedSchema): void;
|
|
2899
|
+
}
|
|
2900
|
+
|
|
2853
2901
|
/**
|
|
2854
2902
|
* A generic type representing any record that extends BaseRecord.
|
|
2855
2903
|
* This is useful for type constraints when you need to work with records of unknown types,
|
package/dist-esm/index.mjs
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { registerTldrawLibraryVersion } from "@tldraw/utils";
|
|
2
2
|
import { AtomMap } from "./lib/AtomMap.mjs";
|
|
3
|
+
import { AtomSet } from "./lib/AtomSet.mjs";
|
|
3
4
|
import { devFreeze } from "./lib/devFreeze.mjs";
|
|
4
5
|
import { IncrementalSetConstructor } from "./lib/IncrementalSetConstructor.mjs";
|
|
5
6
|
import {
|
|
@@ -28,11 +29,12 @@ import {
|
|
|
28
29
|
} from "./lib/StoreSideEffects.mjs";
|
|
29
30
|
registerTldrawLibraryVersion(
|
|
30
31
|
"@tldraw/store",
|
|
31
|
-
"4.3.0-next.
|
|
32
|
+
"4.3.0-next.842fb21476f2",
|
|
32
33
|
"esm"
|
|
33
34
|
);
|
|
34
35
|
export {
|
|
35
36
|
AtomMap,
|
|
37
|
+
AtomSet,
|
|
36
38
|
IncrementalSetConstructor,
|
|
37
39
|
MigrationFailureReason,
|
|
38
40
|
RecordType,
|
package/dist-esm/index.mjs.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/index.ts"],
|
|
4
|
-
"sourcesContent": ["import { registerTldrawLibraryVersion } from '@tldraw/utils'\nexport { AtomMap } from './lib/AtomMap'\nexport type { BaseRecord, IdOf, RecordId, UnknownRecord } from './lib/BaseRecord'\nexport { devFreeze } from './lib/devFreeze'\nexport { type QueryExpression, type QueryValueMatcher } from './lib/executeQuery'\nexport { IncrementalSetConstructor } from './lib/IncrementalSetConstructor'\nexport {\n\tcreateMigrationIds,\n\tcreateMigrationSequence,\n\tcreateRecordMigrationSequence,\n\tMigrationFailureReason,\n\tparseMigrationId,\n\ttype LegacyBaseMigrationsInfo,\n\ttype LegacyMigration,\n\ttype LegacyMigrations,\n\ttype Migration,\n\ttype MigrationId,\n\ttype MigrationResult,\n\ttype MigrationSequence,\n\ttype StandaloneDependsOn,\n} from './lib/migrate'\nexport {\n\tcreateEmptyRecordsDiff,\n\tisRecordsDiffEmpty,\n\treverseRecordsDiff,\n\tsquashRecordDiffs,\n\tsquashRecordDiffsMutable,\n\ttype RecordsDiff,\n} from './lib/RecordsDiff'\nexport { assertIdType, createRecordType, RecordType, type RecordScope } from './lib/RecordType'\nexport {\n\tcreateComputedCache,\n\tStore,\n\ttype ChangeSource,\n\ttype CollectionDiff,\n\ttype ComputedCache,\n\ttype CreateComputedCacheOpts,\n\ttype HistoryEntry,\n\ttype RecordFromId,\n\ttype SerializedStore,\n\ttype StoreError,\n\ttype StoreListener,\n\ttype StoreListenerFilters,\n\ttype StoreObject,\n\ttype StoreObjectRecordType,\n\ttype StoreRecord,\n\ttype StoreSnapshot,\n\ttype StoreValidator,\n\ttype StoreValidators,\n} from './lib/Store'\nexport { StoreQueries, type RSIndex, type RSIndexDiff, type RSIndexMap } from './lib/StoreQueries'\nexport { StoreSchema, type StoreValidationFailure } from './lib/StoreSchema'\nexport type {\n\tSerializedSchema,\n\tSerializedSchemaV1,\n\tSerializedSchemaV2,\n\tStoreSchemaOptions,\n} from './lib/StoreSchema'\nexport {\n\tStoreSideEffects,\n\ttype StoreAfterChangeHandler,\n\ttype StoreAfterCreateHandler,\n\ttype StoreAfterDeleteHandler,\n\ttype StoreBeforeChangeHandler,\n\ttype StoreBeforeCreateHandler,\n\ttype StoreBeforeDeleteHandler,\n\ttype StoreOperationCompleteHandler,\n} from './lib/StoreSideEffects'\n\nregisterTldrawLibraryVersion(\n\t(globalThis as any).TLDRAW_LIBRARY_NAME,\n\t(globalThis as any).TLDRAW_LIBRARY_VERSION,\n\t(globalThis as any).TLDRAW_LIBRARY_MODULES\n)\n"],
|
|
5
|
-
"mappings": "AAAA,SAAS,oCAAoC;AAC7C,SAAS,eAAe;AAExB,SAAS,iBAAiB;AAE1B,SAAS,iCAAiC;AAC1C;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,
|
|
4
|
+
"sourcesContent": ["import { registerTldrawLibraryVersion } from '@tldraw/utils'\nexport { AtomMap } from './lib/AtomMap'\nexport { AtomSet } from './lib/AtomSet'\nexport type { BaseRecord, IdOf, RecordId, UnknownRecord } from './lib/BaseRecord'\nexport { devFreeze } from './lib/devFreeze'\nexport { type QueryExpression, type QueryValueMatcher } from './lib/executeQuery'\nexport { IncrementalSetConstructor } from './lib/IncrementalSetConstructor'\nexport {\n\tcreateMigrationIds,\n\tcreateMigrationSequence,\n\tcreateRecordMigrationSequence,\n\tMigrationFailureReason,\n\tparseMigrationId,\n\ttype LegacyBaseMigrationsInfo,\n\ttype LegacyMigration,\n\ttype LegacyMigrations,\n\ttype Migration,\n\ttype MigrationId,\n\ttype MigrationResult,\n\ttype MigrationSequence,\n\ttype StandaloneDependsOn,\n\ttype SynchronousRecordStorage,\n\ttype SynchronousStorage,\n} from './lib/migrate'\nexport {\n\tcreateEmptyRecordsDiff,\n\tisRecordsDiffEmpty,\n\treverseRecordsDiff,\n\tsquashRecordDiffs,\n\tsquashRecordDiffsMutable,\n\ttype RecordsDiff,\n} from './lib/RecordsDiff'\nexport { assertIdType, createRecordType, RecordType, type RecordScope } from './lib/RecordType'\nexport {\n\tcreateComputedCache,\n\tStore,\n\ttype ChangeSource,\n\ttype CollectionDiff,\n\ttype ComputedCache,\n\ttype CreateComputedCacheOpts,\n\ttype HistoryEntry,\n\ttype RecordFromId,\n\ttype SerializedStore,\n\ttype StoreError,\n\ttype StoreListener,\n\ttype StoreListenerFilters,\n\ttype StoreObject,\n\ttype StoreObjectRecordType,\n\ttype StoreRecord,\n\ttype StoreSnapshot,\n\ttype StoreValidator,\n\ttype StoreValidators,\n} from './lib/Store'\nexport { StoreQueries, type RSIndex, type RSIndexDiff, type RSIndexMap } from './lib/StoreQueries'\nexport { StoreSchema, type StoreValidationFailure } from './lib/StoreSchema'\nexport type {\n\tSerializedSchema,\n\tSerializedSchemaV1,\n\tSerializedSchemaV2,\n\tStoreSchemaOptions,\n} from './lib/StoreSchema'\nexport {\n\tStoreSideEffects,\n\ttype StoreAfterChangeHandler,\n\ttype StoreAfterCreateHandler,\n\ttype StoreAfterDeleteHandler,\n\ttype StoreBeforeChangeHandler,\n\ttype StoreBeforeCreateHandler,\n\ttype StoreBeforeDeleteHandler,\n\ttype StoreOperationCompleteHandler,\n} from './lib/StoreSideEffects'\n\nregisterTldrawLibraryVersion(\n\t(globalThis as any).TLDRAW_LIBRARY_NAME,\n\t(globalThis as any).TLDRAW_LIBRARY_VERSION,\n\t(globalThis as any).TLDRAW_LIBRARY_MODULES\n)\n"],
|
|
5
|
+
"mappings": "AAAA,SAAS,oCAAoC;AAC7C,SAAS,eAAe;AACxB,SAAS,eAAe;AAExB,SAAS,iBAAiB;AAE1B,SAAS,iCAAiC;AAC1C;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAWM;AACP;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEM;AACP,SAAS,cAAc,kBAAkB,kBAAoC;AAC7E;AAAA,EACC;AAAA,EACA;AAAA,OAiBM;AACP,SAAS,oBAAqE;AAC9E,SAAS,mBAAgD;AAOzD;AAAA,EACC;AAAA,OAQM;AAEP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { AtomMap } from "./AtomMap.mjs";
|
|
2
|
+
class AtomSet {
|
|
3
|
+
constructor(name, keys) {
|
|
4
|
+
this.name = name;
|
|
5
|
+
const entries = keys ? Array.from(keys, (k) => [k, k]) : void 0;
|
|
6
|
+
this.map = new AtomMap(name, entries);
|
|
7
|
+
}
|
|
8
|
+
map;
|
|
9
|
+
add(value) {
|
|
10
|
+
this.map.set(value, value);
|
|
11
|
+
return this;
|
|
12
|
+
}
|
|
13
|
+
clear() {
|
|
14
|
+
this.map.clear();
|
|
15
|
+
}
|
|
16
|
+
delete(value) {
|
|
17
|
+
return this.map.delete(value);
|
|
18
|
+
}
|
|
19
|
+
forEach(callbackfn, thisArg) {
|
|
20
|
+
for (const value of this) {
|
|
21
|
+
callbackfn.call(thisArg, value, value, this);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
has(value) {
|
|
25
|
+
return this.map.has(value);
|
|
26
|
+
}
|
|
27
|
+
// eslint-disable-next-line no-restricted-syntax
|
|
28
|
+
get size() {
|
|
29
|
+
return this.map.size;
|
|
30
|
+
}
|
|
31
|
+
entries() {
|
|
32
|
+
return this.map.entries();
|
|
33
|
+
}
|
|
34
|
+
keys() {
|
|
35
|
+
return this.map.keys();
|
|
36
|
+
}
|
|
37
|
+
values() {
|
|
38
|
+
return this.map.keys();
|
|
39
|
+
}
|
|
40
|
+
[Symbol.iterator]() {
|
|
41
|
+
return this.map.keys();
|
|
42
|
+
}
|
|
43
|
+
[Symbol.toStringTag] = "AtomSet";
|
|
44
|
+
}
|
|
45
|
+
export {
|
|
46
|
+
AtomSet
|
|
47
|
+
};
|
|
48
|
+
//# sourceMappingURL=AtomSet.mjs.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/lib/AtomSet.ts"],
|
|
4
|
+
"sourcesContent": ["import { AtomMap } from './AtomMap'\n\n/**\n * A drop-in replacement for Set that stores values in atoms and can be used in reactive contexts.\n * @public\n */\nexport class AtomSet<T> {\n\tprivate readonly map: AtomMap<T, T>\n\tconstructor(\n\t\tprivate readonly name: string,\n\t\tkeys?: Iterable<T>\n\t) {\n\t\tconst entries = keys ? Array.from(keys, (k) => [k, k] as const) : undefined\n\t\tthis.map = new AtomMap(name, entries)\n\t}\n\n\tadd(value: T): this {\n\t\tthis.map.set(value, value)\n\t\treturn this\n\t}\n\tclear(): void {\n\t\tthis.map.clear()\n\t}\n\tdelete(value: T): boolean {\n\t\treturn this.map.delete(value)\n\t}\n\tforEach(callbackfn: (value: T, value2: T, set: AtomSet<T>) => void, thisArg?: any): void {\n\t\tfor (const value of this) {\n\t\t\tcallbackfn.call(thisArg, value, value, this)\n\t\t}\n\t}\n\thas(value: T): boolean {\n\t\treturn this.map.has(value)\n\t}\n\t// eslint-disable-next-line no-restricted-syntax\n\tget size(): number {\n\t\treturn this.map.size\n\t}\n\tentries(): Generator<[T, T], undefined, unknown> {\n\t\treturn this.map.entries()\n\t}\n\tkeys(): Generator<T, undefined, unknown> {\n\t\treturn this.map.keys()\n\t}\n\tvalues(): Generator<T, undefined, unknown> {\n\t\treturn this.map.keys()\n\t}\n\t[Symbol.iterator](): Generator<T, undefined, unknown> {\n\t\treturn this.map.keys()\n\t}\n\t[Symbol.toStringTag]: string = 'AtomSet'\n}\n"],
|
|
5
|
+
"mappings": "AAAA,SAAS,eAAe;AAMjB,MAAM,QAAW;AAAA,EAEvB,YACkB,MACjB,MACC;AAFgB;AAGjB,UAAM,UAAU,OAAO,MAAM,KAAK,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAU,IAAI;AAClE,SAAK,MAAM,IAAI,QAAQ,MAAM,OAAO;AAAA,EACrC;AAAA,EAPiB;AAAA,EASjB,IAAI,OAAgB;AACnB,SAAK,IAAI,IAAI,OAAO,KAAK;AACzB,WAAO;AAAA,EACR;AAAA,EACA,QAAc;AACb,SAAK,IAAI,MAAM;AAAA,EAChB;AAAA,EACA,OAAO,OAAmB;AACzB,WAAO,KAAK,IAAI,OAAO,KAAK;AAAA,EAC7B;AAAA,EACA,QAAQ,YAA4D,SAAqB;AACxF,eAAW,SAAS,MAAM;AACzB,iBAAW,KAAK,SAAS,OAAO,OAAO,IAAI;AAAA,IAC5C;AAAA,EACD;AAAA,EACA,IAAI,OAAmB;AACtB,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC1B;AAAA;AAAA,EAEA,IAAI,OAAe;AAClB,WAAO,KAAK,IAAI;AAAA,EACjB;AAAA,EACA,UAAiD;AAChD,WAAO,KAAK,IAAI,QAAQ;AAAA,EACzB;AAAA,EACA,OAAyC;AACxC,WAAO,KAAK,IAAI,KAAK;AAAA,EACtB;AAAA,EACA,SAA2C;AAC1C,WAAO,KAAK,IAAI,KAAK;AAAA,EACtB;AAAA,EACA,CAAC,OAAO,QAAQ,IAAsC;AACrD,WAAO,KAAK,IAAI,KAAK;AAAA,EACtB;AAAA,EACA,CAAC,OAAO,WAAW,IAAY;AAChC;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -124,16 +124,9 @@ function SetRef(ref) {
|
|
|
124
124
|
ref.value = true;
|
|
125
125
|
}
|
|
126
126
|
}
|
|
127
|
-
function arrCopy(arr, offset) {
|
|
128
|
-
|
|
129
|
-
const len = Math.max(0, arr.length - offset);
|
|
130
|
-
const newArr = new Array(len);
|
|
131
|
-
for (let ii = 0; ii < len; ii++) {
|
|
132
|
-
newArr[ii] = arr[ii + offset];
|
|
133
|
-
}
|
|
134
|
-
return newArr;
|
|
127
|
+
function arrCopy(arr, offset = 0) {
|
|
128
|
+
return arr.slice(offset);
|
|
135
129
|
}
|
|
136
|
-
const is = Object.is;
|
|
137
130
|
class OwnerID {
|
|
138
131
|
}
|
|
139
132
|
class ImmutableMap {
|
|
@@ -358,7 +351,7 @@ class ArrayMapNode {
|
|
|
358
351
|
get(_shift, _keyHash, key, notSetValue) {
|
|
359
352
|
const entries = this.entries;
|
|
360
353
|
for (let ii = 0, len = entries.length; ii < len; ii++) {
|
|
361
|
-
if (is(key, entries[ii][0])) {
|
|
354
|
+
if (Object.is(key, entries[ii][0])) {
|
|
362
355
|
return entries[ii][1];
|
|
363
356
|
}
|
|
364
357
|
}
|
|
@@ -370,7 +363,7 @@ class ArrayMapNode {
|
|
|
370
363
|
let idx = 0;
|
|
371
364
|
const len = entries.length;
|
|
372
365
|
for (; idx < len; idx++) {
|
|
373
|
-
if (is(key, entries[idx][0])) {
|
|
366
|
+
if (Object.is(key, entries[idx][0])) {
|
|
374
367
|
break;
|
|
375
368
|
}
|
|
376
369
|
}
|
|
@@ -379,7 +372,7 @@ class ArrayMapNode {
|
|
|
379
372
|
return this;
|
|
380
373
|
}
|
|
381
374
|
SetRef(didAlter);
|
|
382
|
-
(removed || !exists)
|
|
375
|
+
if (removed || !exists) SetRef(didChangeSize);
|
|
383
376
|
if (removed && entries.length === 1) {
|
|
384
377
|
return;
|
|
385
378
|
}
|
|
@@ -390,7 +383,11 @@ class ArrayMapNode {
|
|
|
390
383
|
const newEntries = isEditable ? entries : arrCopy(entries);
|
|
391
384
|
if (exists) {
|
|
392
385
|
if (removed) {
|
|
393
|
-
idx === len - 1
|
|
386
|
+
if (idx === len - 1) {
|
|
387
|
+
newEntries.pop();
|
|
388
|
+
} else {
|
|
389
|
+
newEntries[idx] = newEntries.pop();
|
|
390
|
+
}
|
|
394
391
|
} else {
|
|
395
392
|
newEntries[idx] = [key, value];
|
|
396
393
|
}
|
|
@@ -531,7 +528,7 @@ class HashCollisionNode {
|
|
|
531
528
|
get(shift, keyHash, key, notSetValue) {
|
|
532
529
|
const entries = this.entries;
|
|
533
530
|
for (let ii = 0, len = entries.length; ii < len; ii++) {
|
|
534
|
-
if (is(key, entries[ii][0])) {
|
|
531
|
+
if (Object.is(key, entries[ii][0])) {
|
|
535
532
|
return entries[ii][1];
|
|
536
533
|
}
|
|
537
534
|
}
|
|
@@ -554,7 +551,7 @@ class HashCollisionNode {
|
|
|
554
551
|
let idx = 0;
|
|
555
552
|
const len = entries.length;
|
|
556
553
|
for (; idx < len; idx++) {
|
|
557
|
-
if (is(key, entries[idx][0])) {
|
|
554
|
+
if (Object.is(key, entries[idx][0])) {
|
|
558
555
|
break;
|
|
559
556
|
}
|
|
560
557
|
}
|
|
@@ -563,7 +560,7 @@ class HashCollisionNode {
|
|
|
563
560
|
return this;
|
|
564
561
|
}
|
|
565
562
|
SetRef(didAlter);
|
|
566
|
-
(removed || !exists)
|
|
563
|
+
if (removed || !exists) SetRef(didChangeSize);
|
|
567
564
|
if (removed && len === 2) {
|
|
568
565
|
return new ValueNode(ownerID, this.keyHash, entries[idx ^ 1]);
|
|
569
566
|
}
|
|
@@ -571,7 +568,11 @@ class HashCollisionNode {
|
|
|
571
568
|
const newEntries = isEditable ? entries : arrCopy(entries);
|
|
572
569
|
if (exists) {
|
|
573
570
|
if (removed) {
|
|
574
|
-
idx === len - 1
|
|
571
|
+
if (idx === len - 1) {
|
|
572
|
+
newEntries.pop();
|
|
573
|
+
} else {
|
|
574
|
+
newEntries[idx] = newEntries.pop();
|
|
575
|
+
}
|
|
575
576
|
} else {
|
|
576
577
|
newEntries[idx] = [key, value];
|
|
577
578
|
}
|
|
@@ -592,11 +593,11 @@ class ValueNode {
|
|
|
592
593
|
this.entry = entry;
|
|
593
594
|
}
|
|
594
595
|
get(shift, keyHash, key, notSetValue) {
|
|
595
|
-
return is(key, this.entry[0]) ? this.entry[1] : notSetValue;
|
|
596
|
+
return Object.is(key, this.entry[0]) ? this.entry[1] : notSetValue;
|
|
596
597
|
}
|
|
597
598
|
update(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) {
|
|
598
599
|
const removed = value === NOT_SET;
|
|
599
|
-
const keyMatch = is(key, this.entry[0]);
|
|
600
|
+
const keyMatch = Object.is(key, this.entry[0]);
|
|
600
601
|
if (keyMatch ? value === this.entry[1] : removed) {
|
|
601
602
|
return this;
|
|
602
603
|
}
|
|
@@ -675,10 +676,11 @@ const ITERATE_VALUES = 1;
|
|
|
675
676
|
const ITERATE_ENTRIES = 2;
|
|
676
677
|
function iteratorValue(type, k, v, iteratorResult) {
|
|
677
678
|
const value = type === ITERATE_KEYS ? k : type === ITERATE_VALUES ? v : [k, v];
|
|
678
|
-
|
|
679
|
-
value
|
|
680
|
-
|
|
681
|
-
|
|
679
|
+
if (iteratorResult) {
|
|
680
|
+
iteratorResult.value = value;
|
|
681
|
+
} else {
|
|
682
|
+
iteratorResult = { value, done: false };
|
|
683
|
+
}
|
|
682
684
|
return iteratorResult;
|
|
683
685
|
}
|
|
684
686
|
function iteratorDone() {
|