@tldraw/store 3.16.0-internal.a478398270c6 → 3.16.0-next.15f085081fd5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -44,7 +44,7 @@ const tolkeinId = Author.createCustomId('tolkein')
44
44
 
45
45
  store.put([
46
46
  Author.create({
47
- id: jrrTolkeinId,
47
+ id: tolkeinId,
48
48
  name: 'J.R.R Tolkein',
49
49
  }),
50
50
  ])
@@ -163,7 +163,7 @@ store.deserialize(serialized)
163
163
 
164
164
  ### `listen(listener: ((entry: HistoryEntry) => void): () => void`
165
165
 
166
- Add a new listener to the store The store will call the function each time the history changes. Returns a function to remove the listener.
166
+ Add a new listener to the store. The store will call the function each time the history changes. Returns a function to remove the listener.
167
167
 
168
168
  ```ts
169
169
  store.listen((entry) => doSomethingWith(entry))
@@ -500,10 +500,13 @@ export declare type SerializedStore<R extends UnknownRecord> = Record<IdOf<R>, R
500
500
  * Squash a collection of diffs into a single diff.
501
501
  *
502
502
  * @param diffs - An array of diffs to squash.
503
+ * @param options - An optional object with a `mutateFirstDiff` property. If `mutateFirstDiff` is true, the first diff in the array will be mutated in-place.
503
504
  * @returns A single diff that represents the squashed diffs.
504
505
  * @public
505
506
  */
506
- export declare function squashRecordDiffs<T extends UnknownRecord>(diffs: RecordsDiff<T>[]): RecordsDiff<T>;
507
+ export declare function squashRecordDiffs<T extends UnknownRecord>(diffs: RecordsDiff<T>[], options?: {
508
+ mutateFirstDiff?: boolean;
509
+ }): RecordsDiff<T>;
507
510
 
508
511
  /* Excluded from this release type: squashRecordDiffsMutable */
509
512
 
@@ -892,11 +895,14 @@ export declare class StoreSchema<R extends UnknownRecord, P = unknown> {
892
895
  }, options?: StoreSchemaOptions<R, P>): StoreSchema<R, P>;
893
896
  readonly migrations: Record<string, MigrationSequence>;
894
897
  readonly sortedMigrations: readonly Migration[];
898
+ private readonly migrationCache;
895
899
  private constructor();
896
900
  validateRecord(store: Store<R>, record: R, phase: 'createRecord' | 'initialize' | 'tests' | 'updateRecord', recordBefore: null | R): R;
897
901
  getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string>;
898
902
  migratePersistedRecord(record: R, persistedSchema: SerializedSchema, direction?: 'down' | 'up'): MigrationResult<R>;
899
- migrateStoreSnapshot(snapshot: StoreSnapshot<R>): MigrationResult<SerializedStore<R>>;
903
+ migrateStoreSnapshot(snapshot: StoreSnapshot<R>, opts?: {
904
+ mutateInputStore?: boolean;
905
+ }): MigrationResult<SerializedStore<R>>;
900
906
  /* Excluded from this release type: createIntegrityChecker */
901
907
  serialize(): SerializedSchemaV2;
902
908
  /**
package/dist-cjs/index.js CHANGED
@@ -55,7 +55,7 @@ var import_StoreSchema = require("./lib/StoreSchema");
55
55
  var import_StoreSideEffects = require("./lib/StoreSideEffects");
56
56
  (0, import_utils.registerTldrawLibraryVersion)(
57
57
  "@tldraw/store",
58
- "3.16.0-internal.a478398270c6",
58
+ "3.16.0-next.15f085081fd5",
59
59
  "cjs"
60
60
  );
61
61
  //# sourceMappingURL=index.js.map
@@ -39,9 +39,9 @@ function reverseRecordsDiff(diff) {
39
39
  function isRecordsDiffEmpty(diff) {
40
40
  return Object.keys(diff.added).length === 0 && Object.keys(diff.updated).length === 0 && Object.keys(diff.removed).length === 0;
41
41
  }
42
- function squashRecordDiffs(diffs) {
43
- const result = { added: {}, removed: {}, updated: {} };
44
- squashRecordDiffsMutable(result, diffs);
42
+ function squashRecordDiffs(diffs, options) {
43
+ const result = options?.mutateFirstDiff ? diffs[0] : { added: {}, removed: {}, updated: {} };
44
+ squashRecordDiffsMutable(result, options?.mutateFirstDiff ? diffs.slice(1) : diffs);
45
45
  return result;
46
46
  }
47
47
  function squashRecordDiffsMutable(target, diffs) {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/RecordsDiff.ts"],
4
- "sourcesContent": ["import { objectMapEntries } from '@tldraw/utils'\nimport { IdOf, UnknownRecord } from './BaseRecord'\n\n/**\n * A diff describing the changes to a record.\n *\n * @public\n */\nexport interface RecordsDiff<R extends UnknownRecord> {\n\tadded: Record<IdOf<R>, R>\n\tupdated: Record<IdOf<R>, [from: R, to: R]>\n\tremoved: Record<IdOf<R>, R>\n}\n\n/** @internal */\nexport function createEmptyRecordsDiff<R extends UnknownRecord>(): RecordsDiff<R> {\n\treturn { added: {}, updated: {}, removed: {} } as RecordsDiff<R>\n}\n\n/** @public */\nexport function reverseRecordsDiff(diff: RecordsDiff<any>) {\n\tconst result: RecordsDiff<any> = { added: diff.removed, removed: diff.added, updated: {} }\n\tfor (const [from, to] of Object.values(diff.updated)) {\n\t\tresult.updated[from.id] = [to, from]\n\t}\n\treturn result\n}\n\n/**\n * Is a records diff empty?\n * @internal\n */\nexport function isRecordsDiffEmpty<T extends UnknownRecord>(diff: RecordsDiff<T>) {\n\treturn (\n\t\tObject.keys(diff.added).length === 0 &&\n\t\tObject.keys(diff.updated).length === 0 &&\n\t\tObject.keys(diff.removed).length === 0\n\t)\n}\n\n/**\n * Squash a collection of diffs into a single diff.\n *\n * @param diffs - An array of diffs to squash.\n * @returns A single diff that represents the squashed diffs.\n * @public\n */\nexport function squashRecordDiffs<T extends UnknownRecord>(\n\tdiffs: RecordsDiff<T>[]\n): RecordsDiff<T> {\n\tconst result = { added: {}, removed: {}, updated: {} } as RecordsDiff<T>\n\n\tsquashRecordDiffsMutable(result, diffs)\n\treturn result\n}\n\n/**\n * Apply the array `diffs` to the `target` diff, mutating it in-place.\n * @internal\n */\nexport function squashRecordDiffsMutable<T extends UnknownRecord>(\n\ttarget: RecordsDiff<T>,\n\tdiffs: RecordsDiff<T>[]\n): void {\n\tfor (const diff of diffs) {\n\t\tfor (const [id, value] of objectMapEntries(diff.added)) {\n\t\t\tif (target.removed[id]) {\n\t\t\t\tconst original = target.removed[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tif (original !== value) {\n\t\t\t\t\ttarget.updated[id] = [original, value]\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ttarget.added[id] = value\n\t\t\t}\n\t\t}\n\n\t\tfor (const [id, [_from, to]] of objectMapEntries(diff.updated)) {\n\t\t\tif (target.added[id]) {\n\t\t\t\ttarget.added[id] = to\n\t\t\t\tdelete target.updated[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif (target.updated[id]) {\n\t\t\t\ttarget.updated[id] = [target.updated[id][0], to]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\ttarget.updated[id] = diff.updated[id]\n\t\t\tdelete target.removed[id]\n\t\t}\n\n\t\tfor (const [id, value] of objectMapEntries(diff.removed)) {\n\t\t\t// the same record was added in this diff sequence, just drop it\n\t\t\tif (target.added[id]) {\n\t\t\t\tdelete target.added[id]\n\t\t\t} else if (target.updated[id]) {\n\t\t\t\ttarget.removed[id] = target.updated[id][0]\n\t\t\t\tdelete target.updated[id]\n\t\t\t} else {\n\t\t\t\ttarget.removed[id] = value\n\t\t\t}\n\t\t}\n\t}\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAiC;AAe1B,SAAS,yBAAkE;AACjF,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAC9C;AAGO,SAAS,mBAAmB,MAAwB;AAC1D,QAAM,SAA2B,EAAE,OAAO,KAAK,SAAS,SAAS,KAAK,OAAO,SAAS,CAAC,EAAE;AACzF,aAAW,CAAC,MAAM,EAAE,KAAK,OAAO,OAAO,KAAK,OAAO,GAAG;AACrD,WAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI;AAAA,EACpC;AACA,SAAO;AACR;AAMO,SAAS,mBAA4C,MAAsB;AACjF,SACC,OAAO,KAAK,KAAK,KAAK,EAAE,WAAW,KACnC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW,KACrC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW;AAEvC;AASO,SAAS,kBACf,OACiB;AACjB,QAAM,SAAS,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAErD,2BAAyB,QAAQ,KAAK;AACtC,SAAO;AACR;AAMO,SAAS,yBACf,QACA,OACO;AACP,aAAW,QAAQ,OAAO;AACzB,eAAW,CAAC,IAAI,KAAK,SAAK,+BAAiB,KAAK,KAAK,GAAG;AACvD,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,cAAM,WAAW,OAAO,QAAQ,EAAE;AAClC,eAAO,OAAO,QAAQ,EAAE;AACxB,YAAI,aAAa,OAAO;AACvB,iBAAO,QAAQ,EAAE,IAAI,CAAC,UAAU,KAAK;AAAA,QACtC;AAAA,MACD,OAAO;AACN,eAAO,MAAM,EAAE,IAAI;AAAA,MACpB;AAAA,IACD;AAEA,eAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,SAAK,+BAAiB,KAAK,OAAO,GAAG;AAC/D,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,MAAM,EAAE,IAAI;AACnB,eAAO,OAAO,QAAQ,EAAE;AACxB,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AACA,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,eAAO,QAAQ,EAAE,IAAI,CAAC,OAAO,QAAQ,EAAE,EAAE,CAAC,GAAG,EAAE;AAC/C,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AAEA,aAAO,QAAQ,EAAE,IAAI,KAAK,QAAQ,EAAE;AACpC,aAAO,OAAO,QAAQ,EAAE;AAAA,IACzB;AAEA,eAAW,CAAC,IAAI,KAAK,SAAK,+BAAiB,KAAK,OAAO,GAAG;AAEzD,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,OAAO,MAAM,EAAE;AAAA,MACvB,WAAW,OAAO,QAAQ,EAAE,GAAG;AAC9B,eAAO,QAAQ,EAAE,IAAI,OAAO,QAAQ,EAAE,EAAE,CAAC;AACzC,eAAO,OAAO,QAAQ,EAAE;AAAA,MACzB,OAAO;AACN,eAAO,QAAQ,EAAE,IAAI;AAAA,MACtB;AAAA,IACD;AAAA,EACD;AACD;",
4
+ "sourcesContent": ["import { objectMapEntries } from '@tldraw/utils'\nimport { IdOf, UnknownRecord } from './BaseRecord'\n\n/**\n * A diff describing the changes to a record.\n *\n * @public\n */\nexport interface RecordsDiff<R extends UnknownRecord> {\n\tadded: Record<IdOf<R>, R>\n\tupdated: Record<IdOf<R>, [from: R, to: R]>\n\tremoved: Record<IdOf<R>, R>\n}\n\n/** @internal */\nexport function createEmptyRecordsDiff<R extends UnknownRecord>(): RecordsDiff<R> {\n\treturn { added: {}, updated: {}, removed: {} } as RecordsDiff<R>\n}\n\n/** @public */\nexport function reverseRecordsDiff(diff: RecordsDiff<any>) {\n\tconst result: RecordsDiff<any> = { added: diff.removed, removed: diff.added, updated: {} }\n\tfor (const [from, to] of Object.values(diff.updated)) {\n\t\tresult.updated[from.id] = [to, from]\n\t}\n\treturn result\n}\n\n/**\n * Is a records diff empty?\n * @internal\n */\nexport function isRecordsDiffEmpty<T extends UnknownRecord>(diff: RecordsDiff<T>) {\n\treturn (\n\t\tObject.keys(diff.added).length === 0 &&\n\t\tObject.keys(diff.updated).length === 0 &&\n\t\tObject.keys(diff.removed).length === 0\n\t)\n}\n\n/**\n * Squash a collection of diffs into a single diff.\n *\n * @param diffs - An array of diffs to squash.\n * @param options - An optional object with a `mutateFirstDiff` property. If `mutateFirstDiff` is true, the first diff in the array will be mutated in-place.\n * @returns A single diff that represents the squashed diffs.\n * @public\n */\nexport function squashRecordDiffs<T extends UnknownRecord>(\n\tdiffs: RecordsDiff<T>[],\n\toptions?: {\n\t\tmutateFirstDiff?: boolean\n\t}\n): RecordsDiff<T> {\n\tconst result = options?.mutateFirstDiff\n\t\t? diffs[0]\n\t\t: ({ added: {}, removed: {}, updated: {} } as RecordsDiff<T>)\n\n\tsquashRecordDiffsMutable(result, options?.mutateFirstDiff ? diffs.slice(1) : diffs)\n\treturn result\n}\n\n/**\n * Apply the array `diffs` to the `target` diff, mutating it in-place.\n * @internal\n */\nexport function squashRecordDiffsMutable<T extends UnknownRecord>(\n\ttarget: RecordsDiff<T>,\n\tdiffs: RecordsDiff<T>[]\n): void {\n\tfor (const diff of diffs) {\n\t\tfor (const [id, value] of objectMapEntries(diff.added)) {\n\t\t\tif (target.removed[id]) {\n\t\t\t\tconst original = target.removed[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tif (original !== value) {\n\t\t\t\t\ttarget.updated[id] = [original, value]\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ttarget.added[id] = value\n\t\t\t}\n\t\t}\n\n\t\tfor (const [id, [_from, to]] of objectMapEntries(diff.updated)) {\n\t\t\tif (target.added[id]) {\n\t\t\t\ttarget.added[id] = to\n\t\t\t\tdelete target.updated[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif (target.updated[id]) {\n\t\t\t\ttarget.updated[id] = [target.updated[id][0], to]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\ttarget.updated[id] = diff.updated[id]\n\t\t\tdelete target.removed[id]\n\t\t}\n\n\t\tfor (const [id, value] of objectMapEntries(diff.removed)) {\n\t\t\t// the same record was added in this diff sequence, just drop it\n\t\t\tif (target.added[id]) {\n\t\t\t\tdelete target.added[id]\n\t\t\t} else if (target.updated[id]) {\n\t\t\t\ttarget.removed[id] = target.updated[id][0]\n\t\t\t\tdelete target.updated[id]\n\t\t\t} else {\n\t\t\t\ttarget.removed[id] = value\n\t\t\t}\n\t\t}\n\t}\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAiC;AAe1B,SAAS,yBAAkE;AACjF,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAC9C;AAGO,SAAS,mBAAmB,MAAwB;AAC1D,QAAM,SAA2B,EAAE,OAAO,KAAK,SAAS,SAAS,KAAK,OAAO,SAAS,CAAC,EAAE;AACzF,aAAW,CAAC,MAAM,EAAE,KAAK,OAAO,OAAO,KAAK,OAAO,GAAG;AACrD,WAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI;AAAA,EACpC;AACA,SAAO;AACR;AAMO,SAAS,mBAA4C,MAAsB;AACjF,SACC,OAAO,KAAK,KAAK,KAAK,EAAE,WAAW,KACnC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW,KACrC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW;AAEvC;AAUO,SAAS,kBACf,OACA,SAGiB;AACjB,QAAM,SAAS,SAAS,kBACrB,MAAM,CAAC,IACN,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAE1C,2BAAyB,QAAQ,SAAS,kBAAkB,MAAM,MAAM,CAAC,IAAI,KAAK;AAClF,SAAO;AACR;AAMO,SAAS,yBACf,QACA,OACO;AACP,aAAW,QAAQ,OAAO;AACzB,eAAW,CAAC,IAAI,KAAK,SAAK,+BAAiB,KAAK,KAAK,GAAG;AACvD,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,cAAM,WAAW,OAAO,QAAQ,EAAE;AAClC,eAAO,OAAO,QAAQ,EAAE;AACxB,YAAI,aAAa,OAAO;AACvB,iBAAO,QAAQ,EAAE,IAAI,CAAC,UAAU,KAAK;AAAA,QACtC;AAAA,MACD,OAAO;AACN,eAAO,MAAM,EAAE,IAAI;AAAA,MACpB;AAAA,IACD;AAEA,eAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,SAAK,+BAAiB,KAAK,OAAO,GAAG;AAC/D,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,MAAM,EAAE,IAAI;AACnB,eAAO,OAAO,QAAQ,EAAE;AACxB,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AACA,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,eAAO,QAAQ,EAAE,IAAI,CAAC,OAAO,QAAQ,EAAE,EAAE,CAAC,GAAG,EAAE;AAC/C,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AAEA,aAAO,QAAQ,EAAE,IAAI,KAAK,QAAQ,EAAE;AACpC,aAAO,OAAO,QAAQ,EAAE;AAAA,IACzB;AAEA,eAAW,CAAC,IAAI,KAAK,SAAK,+BAAiB,KAAK,OAAO,GAAG;AAEzD,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,OAAO,MAAM,EAAE;AAAA,MACvB,WAAW,OAAO,QAAQ,EAAE,GAAG;AAC9B,eAAO,QAAQ,EAAE,IAAI,OAAO,QAAQ,EAAE,EAAE,CAAC;AACzC,eAAO,OAAO,QAAQ,EAAE;AAAA,MACzB,OAAO;AACN,eAAO,QAAQ,EAAE,IAAI;AAAA,MACtB;AAAA,IACD;AAAA,EACD;AACD;",
6
6
  "names": []
7
7
  }
@@ -67,6 +67,7 @@ class StoreSchema {
67
67
  }
68
68
  migrations = {};
69
69
  sortedMigrations;
70
+ migrationCache = /* @__PURE__ */ new WeakMap();
70
71
  validateRecord(store, record, phase, recordBefore) {
71
72
  try {
72
73
  const recordType = (0, import_utils.getOwnProperty)(this.types, record.typeName);
@@ -88,10 +89,14 @@ class StoreSchema {
88
89
  }
89
90
  }
90
91
  }
91
- // TODO: use a weakmap to store the result of this function
92
92
  getMigrationsSince(persistedSchema) {
93
+ const cached = this.migrationCache.get(persistedSchema);
94
+ if (cached) {
95
+ return cached;
96
+ }
93
97
  const upgradeResult = upgradeSchema(persistedSchema);
94
98
  if (!upgradeResult.ok) {
99
+ this.migrationCache.set(persistedSchema, upgradeResult);
95
100
  return upgradeResult;
96
101
  }
97
102
  const schema = upgradeResult.value;
@@ -105,7 +110,9 @@ class StoreSchema {
105
110
  }
106
111
  }
107
112
  if (sequenceIdsToInclude.size === 0) {
108
- return import_utils.Result.ok([]);
113
+ const result2 = import_utils.Result.ok([]);
114
+ this.migrationCache.set(persistedSchema, result2);
115
+ return result2;
109
116
  }
110
117
  const allMigrationsToInclude = /* @__PURE__ */ new Set();
111
118
  for (const sequenceId of sequenceIdsToInclude) {
@@ -119,13 +126,19 @@ class StoreSchema {
119
126
  const theirVersionId = `${sequenceId}/${theirVersion}`;
120
127
  const idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId);
121
128
  if (idx === -1) {
122
- return import_utils.Result.err("Incompatible schema?");
129
+ const result2 = import_utils.Result.err("Incompatible schema?");
130
+ this.migrationCache.set(persistedSchema, result2);
131
+ return result2;
123
132
  }
124
133
  for (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {
125
134
  allMigrationsToInclude.add(migration.id);
126
135
  }
127
136
  }
128
- return import_utils.Result.ok(this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id)));
137
+ const result = import_utils.Result.ok(
138
+ this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))
139
+ );
140
+ this.migrationCache.set(persistedSchema, result);
141
+ return result;
129
142
  }
130
143
  migratePersistedRecord(record, persistedSchema, direction = "up") {
131
144
  const migrations = this.getMigrationsSince(persistedSchema);
@@ -171,7 +184,7 @@ class StoreSchema {
171
184
  }
172
185
  return { type: "success", value: record };
173
186
  }
174
- migrateStoreSnapshot(snapshot) {
187
+ migrateStoreSnapshot(snapshot, opts) {
175
188
  let { store } = snapshot;
176
189
  const migrations = this.getMigrationsSince(snapshot.schema);
177
190
  if (!migrations.ok) {
@@ -182,7 +195,9 @@ class StoreSchema {
182
195
  if (migrationsToApply.length === 0) {
183
196
  return { type: "success", value: store };
184
197
  }
185
- store = (0, import_utils.structuredClone)(store);
198
+ if (!opts?.mutateInputStore) {
199
+ store = (0, import_utils.structuredClone)(store);
200
+ }
186
201
  try {
187
202
  for (const migration of migrationsToApply) {
188
203
  if (migration.scope === "record") {
@@ -191,13 +206,13 @@ class StoreSchema {
191
206
  if (!shouldApply) continue;
192
207
  const result = migration.up(record);
193
208
  if (result) {
194
- store[id] = (0, import_utils.structuredClone)(result);
209
+ store[id] = result;
195
210
  }
196
211
  }
197
212
  } else if (migration.scope === "store") {
198
213
  const result = migration.up(store);
199
214
  if (result) {
200
- store = (0, import_utils.structuredClone)(result);
215
+ store = result;
201
216
  }
202
217
  } else {
203
218
  (0, import_utils.exhaustiveSwitchError)(migration);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/StoreSchema.ts"],
4
- "sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/** @public */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/** @public */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/** @public */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/** @public */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/** @public */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/** @public */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\t// TODO: use a weakmap to store the result of this function\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\treturn Result.ok([])\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\treturn Result.err('Incompatible schema?')\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\treturn Result.ok(this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id)))\n\t}\n\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\tmigrateStoreSnapshot(snapshot: StoreSnapshot<R>): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tstore = structuredClone(store)\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = structuredClone(result) as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = structuredClone(result) as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/** @internal */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/** @internal */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAMO;AAIP,qBASO;AAqCA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,oBAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,oBAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,oBAAO,GAAG,MAAM;AACxB;AAqBO,MAAM,YAAkD;AAAA,EActD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,+BAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,6CAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,uBAAmB,+BAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,iCAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA,EAlCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EAyBT,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,iBAAa,6BAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGO,mBAAmB,iBAAgE;AACzF,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AACtB,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,aAAO,oBAAO,GAAG,CAAC,CAAC;AAAA,IACpB;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,eAAO,oBAAO,IAAI,sBAAsB;AAAA,MACzC;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,WAAO,oBAAO,GAAG,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC,CAAC;AAAA,EAC1F;AAAA,EAEA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,sCAAuB,sBACvB,sCAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,sCAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,iBAAS,8BAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,uBAAS,8BAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA,EAEA,qBAAqB,UAAiE;AACrF,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,gBAAQ,8BAAgB,KAAK;AAE7B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,QAAI,8BAAgB,MAAM;AAAA,YACzD;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,wBAAQ,8BAAgB,MAAM;AAAA,UAC/B;AAAA,QACD,OAAO;AACN,kDAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA,EAGA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA,EAEA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,aAAS,iCAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGA,QAAQ,UAAkB;AACzB,UAAM,WAAO,6BAAe,KAAK,OAAO,QAAQ;AAChD,6BAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
6
- "names": []
4
+ "sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/** @public */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/** @public */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/** @public */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/** @public */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/** @public */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/** @public */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\tprivate readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\t// Check cache first\n\t\tconst cached = this.migrationCache.get(persistedSchema)\n\t\tif (cached) {\n\t\t\treturn cached\n\t\t}\n\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\t// Cache the error result\n\t\t\tthis.migrationCache.set(persistedSchema, upgradeResult)\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\tconst result = Result.ok([])\n\t\t\t// Cache the empty result\n\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\treturn result\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\tconst result = Result.err('Incompatible schema?')\n\t\t\t\t// Cache the error result\n\t\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\t\treturn result\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\tconst result = Result.ok(\n\t\t\tthis.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))\n\t\t)\n\t\t// Cache the result\n\t\tthis.migrationCache.set(persistedSchema, result)\n\t\treturn result\n\t}\n\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\tmigrateStoreSnapshot(\n\t\tsnapshot: StoreSnapshot<R>,\n\t\topts?: { mutateInputStore?: boolean }\n\t): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tif (!opts?.mutateInputStore) {\n\t\t\tstore = structuredClone(store)\n\t\t}\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = result as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = result as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/** @internal */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/** @internal */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAMO;AAIP,qBASO;AAqCA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,oBAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,oBAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,oBAAO,GAAG,MAAM;AACxB;AAqBO,MAAM,YAAkD;AAAA,EAetD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,+BAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,6CAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,uBAAmB,+BAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,iCAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA,EAnCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EACQ,iBAAiB,oBAAI,QAAuD;AAAA,EAyB7F,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,iBAAa,6BAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA,EAEO,mBAAmB,iBAAgE;AAEzF,UAAM,SAAS,KAAK,eAAe,IAAI,eAAe;AACtD,QAAI,QAAQ;AACX,aAAO;AAAA,IACR;AAEA,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AAEtB,WAAK,eAAe,IAAI,iBAAiB,aAAa;AACtD,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,YAAMA,UAAS,oBAAO,GAAG,CAAC,CAAC;AAE3B,WAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,aAAOA;AAAA,IACR;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,cAAMA,UAAS,oBAAO,IAAI,sBAAsB;AAEhD,aAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,eAAOA;AAAA,MACR;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,UAAM,SAAS,oBAAO;AAAA,MACrB,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC;AAAA,IACxE;AAEA,SAAK,eAAe,IAAI,iBAAiB,MAAM;AAC/C,WAAO;AAAA,EACR;AAAA,EAEA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,sCAAuB,sBACvB,sCAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,sCAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,iBAAS,8BAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,uBAAS,8BAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA,EAEA,qBACC,UACA,MACsC;AACtC,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,QAAI,CAAC,MAAM,kBAAkB;AAC5B,kBAAQ,8BAAgB,KAAK;AAAA,IAC9B;AAEA,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,IAAI;AAAA,YACnC;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,oBAAQ;AAAA,UACT;AAAA,QACD,OAAO;AACN,kDAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,sCAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA,EAGA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA,EAEA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,aAAS,iCAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGA,QAAQ,UAAkB;AACzB,UAAM,WAAO,6BAAe,KAAK,OAAO,QAAQ;AAChD,6BAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
6
+ "names": ["result"]
7
7
  }
@@ -500,10 +500,13 @@ export declare type SerializedStore<R extends UnknownRecord> = Record<IdOf<R>, R
500
500
  * Squash a collection of diffs into a single diff.
501
501
  *
502
502
  * @param diffs - An array of diffs to squash.
503
+ * @param options - An optional object with a `mutateFirstDiff` property. If `mutateFirstDiff` is true, the first diff in the array will be mutated in-place.
503
504
  * @returns A single diff that represents the squashed diffs.
504
505
  * @public
505
506
  */
506
- export declare function squashRecordDiffs<T extends UnknownRecord>(diffs: RecordsDiff<T>[]): RecordsDiff<T>;
507
+ export declare function squashRecordDiffs<T extends UnknownRecord>(diffs: RecordsDiff<T>[], options?: {
508
+ mutateFirstDiff?: boolean;
509
+ }): RecordsDiff<T>;
507
510
 
508
511
  /* Excluded from this release type: squashRecordDiffsMutable */
509
512
 
@@ -892,11 +895,14 @@ export declare class StoreSchema<R extends UnknownRecord, P = unknown> {
892
895
  }, options?: StoreSchemaOptions<R, P>): StoreSchema<R, P>;
893
896
  readonly migrations: Record<string, MigrationSequence>;
894
897
  readonly sortedMigrations: readonly Migration[];
898
+ private readonly migrationCache;
895
899
  private constructor();
896
900
  validateRecord(store: Store<R>, record: R, phase: 'createRecord' | 'initialize' | 'tests' | 'updateRecord', recordBefore: null | R): R;
897
901
  getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string>;
898
902
  migratePersistedRecord(record: R, persistedSchema: SerializedSchema, direction?: 'down' | 'up'): MigrationResult<R>;
899
- migrateStoreSnapshot(snapshot: StoreSnapshot<R>): MigrationResult<SerializedStore<R>>;
903
+ migrateStoreSnapshot(snapshot: StoreSnapshot<R>, opts?: {
904
+ mutateInputStore?: boolean;
905
+ }): MigrationResult<SerializedStore<R>>;
900
906
  /* Excluded from this release type: createIntegrityChecker */
901
907
  serialize(): SerializedSchemaV2;
902
908
  /**
@@ -29,7 +29,7 @@ import {
29
29
  } from "./lib/StoreSideEffects.mjs";
30
30
  registerTldrawLibraryVersion(
31
31
  "@tldraw/store",
32
- "3.16.0-internal.a478398270c6",
32
+ "3.16.0-next.15f085081fd5",
33
33
  "esm"
34
34
  );
35
35
  export {
@@ -12,9 +12,9 @@ function reverseRecordsDiff(diff) {
12
12
  function isRecordsDiffEmpty(diff) {
13
13
  return Object.keys(diff.added).length === 0 && Object.keys(diff.updated).length === 0 && Object.keys(diff.removed).length === 0;
14
14
  }
15
- function squashRecordDiffs(diffs) {
16
- const result = { added: {}, removed: {}, updated: {} };
17
- squashRecordDiffsMutable(result, diffs);
15
+ function squashRecordDiffs(diffs, options) {
16
+ const result = options?.mutateFirstDiff ? diffs[0] : { added: {}, removed: {}, updated: {} };
17
+ squashRecordDiffsMutable(result, options?.mutateFirstDiff ? diffs.slice(1) : diffs);
18
18
  return result;
19
19
  }
20
20
  function squashRecordDiffsMutable(target, diffs) {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/RecordsDiff.ts"],
4
- "sourcesContent": ["import { objectMapEntries } from '@tldraw/utils'\nimport { IdOf, UnknownRecord } from './BaseRecord'\n\n/**\n * A diff describing the changes to a record.\n *\n * @public\n */\nexport interface RecordsDiff<R extends UnknownRecord> {\n\tadded: Record<IdOf<R>, R>\n\tupdated: Record<IdOf<R>, [from: R, to: R]>\n\tremoved: Record<IdOf<R>, R>\n}\n\n/** @internal */\nexport function createEmptyRecordsDiff<R extends UnknownRecord>(): RecordsDiff<R> {\n\treturn { added: {}, updated: {}, removed: {} } as RecordsDiff<R>\n}\n\n/** @public */\nexport function reverseRecordsDiff(diff: RecordsDiff<any>) {\n\tconst result: RecordsDiff<any> = { added: diff.removed, removed: diff.added, updated: {} }\n\tfor (const [from, to] of Object.values(diff.updated)) {\n\t\tresult.updated[from.id] = [to, from]\n\t}\n\treturn result\n}\n\n/**\n * Is a records diff empty?\n * @internal\n */\nexport function isRecordsDiffEmpty<T extends UnknownRecord>(diff: RecordsDiff<T>) {\n\treturn (\n\t\tObject.keys(diff.added).length === 0 &&\n\t\tObject.keys(diff.updated).length === 0 &&\n\t\tObject.keys(diff.removed).length === 0\n\t)\n}\n\n/**\n * Squash a collection of diffs into a single diff.\n *\n * @param diffs - An array of diffs to squash.\n * @returns A single diff that represents the squashed diffs.\n * @public\n */\nexport function squashRecordDiffs<T extends UnknownRecord>(\n\tdiffs: RecordsDiff<T>[]\n): RecordsDiff<T> {\n\tconst result = { added: {}, removed: {}, updated: {} } as RecordsDiff<T>\n\n\tsquashRecordDiffsMutable(result, diffs)\n\treturn result\n}\n\n/**\n * Apply the array `diffs` to the `target` diff, mutating it in-place.\n * @internal\n */\nexport function squashRecordDiffsMutable<T extends UnknownRecord>(\n\ttarget: RecordsDiff<T>,\n\tdiffs: RecordsDiff<T>[]\n): void {\n\tfor (const diff of diffs) {\n\t\tfor (const [id, value] of objectMapEntries(diff.added)) {\n\t\t\tif (target.removed[id]) {\n\t\t\t\tconst original = target.removed[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tif (original !== value) {\n\t\t\t\t\ttarget.updated[id] = [original, value]\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ttarget.added[id] = value\n\t\t\t}\n\t\t}\n\n\t\tfor (const [id, [_from, to]] of objectMapEntries(diff.updated)) {\n\t\t\tif (target.added[id]) {\n\t\t\t\ttarget.added[id] = to\n\t\t\t\tdelete target.updated[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif (target.updated[id]) {\n\t\t\t\ttarget.updated[id] = [target.updated[id][0], to]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\ttarget.updated[id] = diff.updated[id]\n\t\t\tdelete target.removed[id]\n\t\t}\n\n\t\tfor (const [id, value] of objectMapEntries(diff.removed)) {\n\t\t\t// the same record was added in this diff sequence, just drop it\n\t\t\tif (target.added[id]) {\n\t\t\t\tdelete target.added[id]\n\t\t\t} else if (target.updated[id]) {\n\t\t\t\ttarget.removed[id] = target.updated[id][0]\n\t\t\t\tdelete target.updated[id]\n\t\t\t} else {\n\t\t\t\ttarget.removed[id] = value\n\t\t\t}\n\t\t}\n\t}\n}\n"],
5
- "mappings": "AAAA,SAAS,wBAAwB;AAe1B,SAAS,yBAAkE;AACjF,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAC9C;AAGO,SAAS,mBAAmB,MAAwB;AAC1D,QAAM,SAA2B,EAAE,OAAO,KAAK,SAAS,SAAS,KAAK,OAAO,SAAS,CAAC,EAAE;AACzF,aAAW,CAAC,MAAM,EAAE,KAAK,OAAO,OAAO,KAAK,OAAO,GAAG;AACrD,WAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI;AAAA,EACpC;AACA,SAAO;AACR;AAMO,SAAS,mBAA4C,MAAsB;AACjF,SACC,OAAO,KAAK,KAAK,KAAK,EAAE,WAAW,KACnC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW,KACrC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW;AAEvC;AASO,SAAS,kBACf,OACiB;AACjB,QAAM,SAAS,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAErD,2BAAyB,QAAQ,KAAK;AACtC,SAAO;AACR;AAMO,SAAS,yBACf,QACA,OACO;AACP,aAAW,QAAQ,OAAO;AACzB,eAAW,CAAC,IAAI,KAAK,KAAK,iBAAiB,KAAK,KAAK,GAAG;AACvD,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,cAAM,WAAW,OAAO,QAAQ,EAAE;AAClC,eAAO,OAAO,QAAQ,EAAE;AACxB,YAAI,aAAa,OAAO;AACvB,iBAAO,QAAQ,EAAE,IAAI,CAAC,UAAU,KAAK;AAAA,QACtC;AAAA,MACD,OAAO;AACN,eAAO,MAAM,EAAE,IAAI;AAAA,MACpB;AAAA,IACD;AAEA,eAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,iBAAiB,KAAK,OAAO,GAAG;AAC/D,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,MAAM,EAAE,IAAI;AACnB,eAAO,OAAO,QAAQ,EAAE;AACxB,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AACA,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,eAAO,QAAQ,EAAE,IAAI,CAAC,OAAO,QAAQ,EAAE,EAAE,CAAC,GAAG,EAAE;AAC/C,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AAEA,aAAO,QAAQ,EAAE,IAAI,KAAK,QAAQ,EAAE;AACpC,aAAO,OAAO,QAAQ,EAAE;AAAA,IACzB;AAEA,eAAW,CAAC,IAAI,KAAK,KAAK,iBAAiB,KAAK,OAAO,GAAG;AAEzD,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,OAAO,MAAM,EAAE;AAAA,MACvB,WAAW,OAAO,QAAQ,EAAE,GAAG;AAC9B,eAAO,QAAQ,EAAE,IAAI,OAAO,QAAQ,EAAE,EAAE,CAAC;AACzC,eAAO,OAAO,QAAQ,EAAE;AAAA,MACzB,OAAO;AACN,eAAO,QAAQ,EAAE,IAAI;AAAA,MACtB;AAAA,IACD;AAAA,EACD;AACD;",
4
+ "sourcesContent": ["import { objectMapEntries } from '@tldraw/utils'\nimport { IdOf, UnknownRecord } from './BaseRecord'\n\n/**\n * A diff describing the changes to a record.\n *\n * @public\n */\nexport interface RecordsDiff<R extends UnknownRecord> {\n\tadded: Record<IdOf<R>, R>\n\tupdated: Record<IdOf<R>, [from: R, to: R]>\n\tremoved: Record<IdOf<R>, R>\n}\n\n/** @internal */\nexport function createEmptyRecordsDiff<R extends UnknownRecord>(): RecordsDiff<R> {\n\treturn { added: {}, updated: {}, removed: {} } as RecordsDiff<R>\n}\n\n/** @public */\nexport function reverseRecordsDiff(diff: RecordsDiff<any>) {\n\tconst result: RecordsDiff<any> = { added: diff.removed, removed: diff.added, updated: {} }\n\tfor (const [from, to] of Object.values(diff.updated)) {\n\t\tresult.updated[from.id] = [to, from]\n\t}\n\treturn result\n}\n\n/**\n * Is a records diff empty?\n * @internal\n */\nexport function isRecordsDiffEmpty<T extends UnknownRecord>(diff: RecordsDiff<T>) {\n\treturn (\n\t\tObject.keys(diff.added).length === 0 &&\n\t\tObject.keys(diff.updated).length === 0 &&\n\t\tObject.keys(diff.removed).length === 0\n\t)\n}\n\n/**\n * Squash a collection of diffs into a single diff.\n *\n * @param diffs - An array of diffs to squash.\n * @param options - An optional object with a `mutateFirstDiff` property. If `mutateFirstDiff` is true, the first diff in the array will be mutated in-place.\n * @returns A single diff that represents the squashed diffs.\n * @public\n */\nexport function squashRecordDiffs<T extends UnknownRecord>(\n\tdiffs: RecordsDiff<T>[],\n\toptions?: {\n\t\tmutateFirstDiff?: boolean\n\t}\n): RecordsDiff<T> {\n\tconst result = options?.mutateFirstDiff\n\t\t? diffs[0]\n\t\t: ({ added: {}, removed: {}, updated: {} } as RecordsDiff<T>)\n\n\tsquashRecordDiffsMutable(result, options?.mutateFirstDiff ? diffs.slice(1) : diffs)\n\treturn result\n}\n\n/**\n * Apply the array `diffs` to the `target` diff, mutating it in-place.\n * @internal\n */\nexport function squashRecordDiffsMutable<T extends UnknownRecord>(\n\ttarget: RecordsDiff<T>,\n\tdiffs: RecordsDiff<T>[]\n): void {\n\tfor (const diff of diffs) {\n\t\tfor (const [id, value] of objectMapEntries(diff.added)) {\n\t\t\tif (target.removed[id]) {\n\t\t\t\tconst original = target.removed[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tif (original !== value) {\n\t\t\t\t\ttarget.updated[id] = [original, value]\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ttarget.added[id] = value\n\t\t\t}\n\t\t}\n\n\t\tfor (const [id, [_from, to]] of objectMapEntries(diff.updated)) {\n\t\t\tif (target.added[id]) {\n\t\t\t\ttarget.added[id] = to\n\t\t\t\tdelete target.updated[id]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif (target.updated[id]) {\n\t\t\t\ttarget.updated[id] = [target.updated[id][0], to]\n\t\t\t\tdelete target.removed[id]\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\ttarget.updated[id] = diff.updated[id]\n\t\t\tdelete target.removed[id]\n\t\t}\n\n\t\tfor (const [id, value] of objectMapEntries(diff.removed)) {\n\t\t\t// the same record was added in this diff sequence, just drop it\n\t\t\tif (target.added[id]) {\n\t\t\t\tdelete target.added[id]\n\t\t\t} else if (target.updated[id]) {\n\t\t\t\ttarget.removed[id] = target.updated[id][0]\n\t\t\t\tdelete target.updated[id]\n\t\t\t} else {\n\t\t\t\ttarget.removed[id] = value\n\t\t\t}\n\t\t}\n\t}\n}\n"],
5
+ "mappings": "AAAA,SAAS,wBAAwB;AAe1B,SAAS,yBAAkE;AACjF,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAC9C;AAGO,SAAS,mBAAmB,MAAwB;AAC1D,QAAM,SAA2B,EAAE,OAAO,KAAK,SAAS,SAAS,KAAK,OAAO,SAAS,CAAC,EAAE;AACzF,aAAW,CAAC,MAAM,EAAE,KAAK,OAAO,OAAO,KAAK,OAAO,GAAG;AACrD,WAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI;AAAA,EACpC;AACA,SAAO;AACR;AAMO,SAAS,mBAA4C,MAAsB;AACjF,SACC,OAAO,KAAK,KAAK,KAAK,EAAE,WAAW,KACnC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW,KACrC,OAAO,KAAK,KAAK,OAAO,EAAE,WAAW;AAEvC;AAUO,SAAS,kBACf,OACA,SAGiB;AACjB,QAAM,SAAS,SAAS,kBACrB,MAAM,CAAC,IACN,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,SAAS,CAAC,EAAE;AAE1C,2BAAyB,QAAQ,SAAS,kBAAkB,MAAM,MAAM,CAAC,IAAI,KAAK;AAClF,SAAO;AACR;AAMO,SAAS,yBACf,QACA,OACO;AACP,aAAW,QAAQ,OAAO;AACzB,eAAW,CAAC,IAAI,KAAK,KAAK,iBAAiB,KAAK,KAAK,GAAG;AACvD,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,cAAM,WAAW,OAAO,QAAQ,EAAE;AAClC,eAAO,OAAO,QAAQ,EAAE;AACxB,YAAI,aAAa,OAAO;AACvB,iBAAO,QAAQ,EAAE,IAAI,CAAC,UAAU,KAAK;AAAA,QACtC;AAAA,MACD,OAAO;AACN,eAAO,MAAM,EAAE,IAAI;AAAA,MACpB;AAAA,IACD;AAEA,eAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,iBAAiB,KAAK,OAAO,GAAG;AAC/D,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,MAAM,EAAE,IAAI;AACnB,eAAO,OAAO,QAAQ,EAAE;AACxB,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AACA,UAAI,OAAO,QAAQ,EAAE,GAAG;AACvB,eAAO,QAAQ,EAAE,IAAI,CAAC,OAAO,QAAQ,EAAE,EAAE,CAAC,GAAG,EAAE;AAC/C,eAAO,OAAO,QAAQ,EAAE;AACxB;AAAA,MACD;AAEA,aAAO,QAAQ,EAAE,IAAI,KAAK,QAAQ,EAAE;AACpC,aAAO,OAAO,QAAQ,EAAE;AAAA,IACzB;AAEA,eAAW,CAAC,IAAI,KAAK,KAAK,iBAAiB,KAAK,OAAO,GAAG;AAEzD,UAAI,OAAO,MAAM,EAAE,GAAG;AACrB,eAAO,OAAO,MAAM,EAAE;AAAA,MACvB,WAAW,OAAO,QAAQ,EAAE,GAAG;AAC9B,eAAO,QAAQ,EAAE,IAAI,OAAO,QAAQ,EAAE,EAAE,CAAC;AACzC,eAAO,OAAO,QAAQ,EAAE;AAAA,MACzB,OAAO;AACN,eAAO,QAAQ,EAAE,IAAI;AAAA,MACtB;AAAA,IACD;AAAA,EACD;AACD;",
6
6
  "names": []
7
7
  }
@@ -54,6 +54,7 @@ class StoreSchema {
54
54
  }
55
55
  migrations = {};
56
56
  sortedMigrations;
57
+ migrationCache = /* @__PURE__ */ new WeakMap();
57
58
  validateRecord(store, record, phase, recordBefore) {
58
59
  try {
59
60
  const recordType = getOwnProperty(this.types, record.typeName);
@@ -75,10 +76,14 @@ class StoreSchema {
75
76
  }
76
77
  }
77
78
  }
78
- // TODO: use a weakmap to store the result of this function
79
79
  getMigrationsSince(persistedSchema) {
80
+ const cached = this.migrationCache.get(persistedSchema);
81
+ if (cached) {
82
+ return cached;
83
+ }
80
84
  const upgradeResult = upgradeSchema(persistedSchema);
81
85
  if (!upgradeResult.ok) {
86
+ this.migrationCache.set(persistedSchema, upgradeResult);
82
87
  return upgradeResult;
83
88
  }
84
89
  const schema = upgradeResult.value;
@@ -92,7 +97,9 @@ class StoreSchema {
92
97
  }
93
98
  }
94
99
  if (sequenceIdsToInclude.size === 0) {
95
- return Result.ok([]);
100
+ const result2 = Result.ok([]);
101
+ this.migrationCache.set(persistedSchema, result2);
102
+ return result2;
96
103
  }
97
104
  const allMigrationsToInclude = /* @__PURE__ */ new Set();
98
105
  for (const sequenceId of sequenceIdsToInclude) {
@@ -106,13 +113,19 @@ class StoreSchema {
106
113
  const theirVersionId = `${sequenceId}/${theirVersion}`;
107
114
  const idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId);
108
115
  if (idx === -1) {
109
- return Result.err("Incompatible schema?");
116
+ const result2 = Result.err("Incompatible schema?");
117
+ this.migrationCache.set(persistedSchema, result2);
118
+ return result2;
110
119
  }
111
120
  for (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {
112
121
  allMigrationsToInclude.add(migration.id);
113
122
  }
114
123
  }
115
- return Result.ok(this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id)));
124
+ const result = Result.ok(
125
+ this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))
126
+ );
127
+ this.migrationCache.set(persistedSchema, result);
128
+ return result;
116
129
  }
117
130
  migratePersistedRecord(record, persistedSchema, direction = "up") {
118
131
  const migrations = this.getMigrationsSince(persistedSchema);
@@ -158,7 +171,7 @@ class StoreSchema {
158
171
  }
159
172
  return { type: "success", value: record };
160
173
  }
161
- migrateStoreSnapshot(snapshot) {
174
+ migrateStoreSnapshot(snapshot, opts) {
162
175
  let { store } = snapshot;
163
176
  const migrations = this.getMigrationsSince(snapshot.schema);
164
177
  if (!migrations.ok) {
@@ -169,7 +182,9 @@ class StoreSchema {
169
182
  if (migrationsToApply.length === 0) {
170
183
  return { type: "success", value: store };
171
184
  }
172
- store = structuredClone(store);
185
+ if (!opts?.mutateInputStore) {
186
+ store = structuredClone(store);
187
+ }
173
188
  try {
174
189
  for (const migration of migrationsToApply) {
175
190
  if (migration.scope === "record") {
@@ -178,13 +193,13 @@ class StoreSchema {
178
193
  if (!shouldApply) continue;
179
194
  const result = migration.up(record);
180
195
  if (result) {
181
- store[id] = structuredClone(result);
196
+ store[id] = result;
182
197
  }
183
198
  }
184
199
  } else if (migration.scope === "store") {
185
200
  const result = migration.up(store);
186
201
  if (result) {
187
- store = structuredClone(result);
202
+ store = result;
188
203
  }
189
204
  } else {
190
205
  exhaustiveSwitchError(migration);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/StoreSchema.ts"],
4
- "sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/** @public */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/** @public */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/** @public */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/** @public */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/** @public */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/** @public */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\t// TODO: use a weakmap to store the result of this function\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\treturn Result.ok([])\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\treturn Result.err('Incompatible schema?')\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\treturn Result.ok(this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id)))\n\t}\n\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\tmigrateStoreSnapshot(snapshot: StoreSnapshot<R>): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tstore = structuredClone(store)\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = structuredClone(result) as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = structuredClone(result) as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/** @internal */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/** @internal */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
5
- "mappings": "AAAA;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAIP;AAAA,EAEC;AAAA,EAIA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAqCA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,OAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,OAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,OAAO,GAAG,MAAM;AACxB;AAqBO,MAAM,YAAkD;AAAA,EActD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,aAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,yBAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,mBAAmB,eAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,eAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA,EAlCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EAyBT,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,aAAa,eAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGO,mBAAmB,iBAAgE;AACzF,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AACtB,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,aAAO,OAAO,GAAG,CAAC,CAAC;AAAA,IACpB;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,eAAO,OAAO,IAAI,sBAAsB;AAAA,MACzC;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,WAAO,OAAO,GAAG,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC,CAAC;AAAA,EAC1F;AAAA,EAEA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,uBAAuB,sBACvB,uBAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,uBAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,aAAS,gBAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,mBAAS,gBAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA,EAEA,qBAAqB,UAAiE;AACrF,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,YAAQ,gBAAgB,KAAK;AAE7B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,IAAI,gBAAgB,MAAM;AAAA,YACzD;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,oBAAQ,gBAAgB,MAAM;AAAA,UAC/B;AAAA,QACD,OAAO;AACN,gCAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA,EAGA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA,EAEA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,SAAS,iBAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGA,QAAQ,UAAkB;AACzB,UAAM,OAAO,eAAe,KAAK,OAAO,QAAQ;AAChD,WAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
6
- "names": []
4
+ "sourcesContent": ["import {\n\tResult,\n\tassert,\n\texhaustiveSwitchError,\n\tgetOwnProperty,\n\tstructuredClone,\n} from '@tldraw/utils'\nimport { UnknownRecord } from './BaseRecord'\nimport { RecordType } from './RecordType'\nimport { SerializedStore, Store, StoreSnapshot } from './Store'\nimport {\n\tMigration,\n\tMigrationFailureReason,\n\tMigrationId,\n\tMigrationResult,\n\tMigrationSequence,\n\tparseMigrationId,\n\tsortMigrations,\n\tvalidateMigrations,\n} from './migrate'\n\n/** @public */\nexport interface SerializedSchemaV1 {\n\t/** Schema version is the version for this type you're looking at right now */\n\tschemaVersion: 1\n\t/**\n\t * Store version is the version for the structure of the store. e.g. higher level structure like\n\t * removing or renaming a record type.\n\t */\n\tstoreVersion: number\n\t/** Record versions are the versions for each record type. e.g. adding a new field to a record */\n\trecordVersions: Record<\n\t\tstring,\n\t\t| {\n\t\t\t\tversion: number\n\t\t }\n\t\t| {\n\t\t\t\t// subtypes are used for migrating shape and asset props\n\t\t\t\tversion: number\n\t\t\t\tsubTypeVersions: Record<string, number>\n\t\t\t\tsubTypeKey: string\n\t\t }\n\t>\n}\n\n/** @public */\nexport interface SerializedSchemaV2 {\n\tschemaVersion: 2\n\tsequences: {\n\t\t[sequenceId: string]: number\n\t}\n}\n\n/** @public */\nexport type SerializedSchema = SerializedSchemaV1 | SerializedSchemaV2\n\nexport function upgradeSchema(schema: SerializedSchema): Result<SerializedSchemaV2, string> {\n\tif (schema.schemaVersion > 2 || schema.schemaVersion < 1) return Result.err('Bad schema version')\n\tif (schema.schemaVersion === 2) return Result.ok(schema as SerializedSchemaV2)\n\tconst result: SerializedSchemaV2 = {\n\t\tschemaVersion: 2,\n\t\tsequences: {\n\t\t\t'com.tldraw.store': schema.storeVersion,\n\t\t},\n\t}\n\n\tfor (const [typeName, recordVersion] of Object.entries(schema.recordVersions)) {\n\t\tresult.sequences[`com.tldraw.${typeName}`] = recordVersion.version\n\t\tif ('subTypeKey' in recordVersion) {\n\t\t\tfor (const [subType, version] of Object.entries(recordVersion.subTypeVersions)) {\n\t\t\t\tresult.sequences[`com.tldraw.${typeName}.${subType}`] = version\n\t\t\t}\n\t\t}\n\t}\n\treturn Result.ok(result)\n}\n\n/** @public */\nexport interface StoreValidationFailure<R extends UnknownRecord> {\n\terror: unknown\n\tstore: Store<R>\n\trecord: R\n\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests'\n\trecordBefore: R | null\n}\n\n/** @public */\nexport interface StoreSchemaOptions<R extends UnknownRecord, P> {\n\tmigrations?: MigrationSequence[]\n\t/** @public */\n\tonValidationFailure?(data: StoreValidationFailure<R>): R\n\t/** @internal */\n\tcreateIntegrityChecker?(store: Store<R, P>): void\n}\n\n/** @public */\nexport class StoreSchema<R extends UnknownRecord, P = unknown> {\n\tstatic create<R extends UnknownRecord, P = unknown>(\n\t\t// HACK: making this param work with RecordType is an enormous pain\n\t\t// let's just settle for making sure each typeName has a corresponding RecordType\n\t\t// and accept that this function won't be able to infer the record type from it's arguments\n\t\ttypes: { [TypeName in R['typeName']]: { createId: any } },\n\t\toptions?: StoreSchemaOptions<R, P>\n\t): StoreSchema<R, P> {\n\t\treturn new StoreSchema<R, P>(types as any, options ?? {})\n\t}\n\n\treadonly migrations: Record<string, MigrationSequence> = {}\n\treadonly sortedMigrations: readonly Migration[]\n\tprivate readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()\n\n\tprivate constructor(\n\t\tpublic readonly types: {\n\t\t\t[Record in R as Record['typeName']]: RecordType<R, any>\n\t\t},\n\t\tprivate readonly options: StoreSchemaOptions<R, P>\n\t) {\n\t\tfor (const m of options.migrations ?? []) {\n\t\t\tassert(!this.migrations[m.sequenceId], `Duplicate migration sequenceId ${m.sequenceId}`)\n\t\t\tvalidateMigrations(m)\n\t\t\tthis.migrations[m.sequenceId] = m\n\t\t}\n\t\tconst allMigrations = Object.values(this.migrations).flatMap((m) => m.sequence)\n\t\tthis.sortedMigrations = sortMigrations(allMigrations)\n\n\t\tfor (const migration of this.sortedMigrations) {\n\t\t\tif (!migration.dependsOn?.length) continue\n\t\t\tfor (const dep of migration.dependsOn) {\n\t\t\t\tconst depMigration = allMigrations.find((m) => m.id === dep)\n\t\t\t\tassert(depMigration, `Migration '${migration.id}' depends on missing migration '${dep}'`)\n\t\t\t}\n\t\t}\n\t}\n\n\tvalidateRecord(\n\t\tstore: Store<R>,\n\t\trecord: R,\n\t\tphase: 'initialize' | 'createRecord' | 'updateRecord' | 'tests',\n\t\trecordBefore: R | null\n\t): R {\n\t\ttry {\n\t\t\tconst recordType = getOwnProperty(this.types, record.typeName)\n\t\t\tif (!recordType) {\n\t\t\t\tthrow new Error(`Missing definition for record type ${record.typeName}`)\n\t\t\t}\n\t\t\treturn recordType.validate(record, recordBefore ?? undefined)\n\t\t} catch (error: unknown) {\n\t\t\tif (this.options.onValidationFailure) {\n\t\t\t\treturn this.options.onValidationFailure({\n\t\t\t\t\tstore,\n\t\t\t\t\trecord,\n\t\t\t\t\tphase,\n\t\t\t\t\trecordBefore,\n\t\t\t\t\terror,\n\t\t\t\t})\n\t\t\t} else {\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\tpublic getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {\n\t\t// Check cache first\n\t\tconst cached = this.migrationCache.get(persistedSchema)\n\t\tif (cached) {\n\t\t\treturn cached\n\t\t}\n\n\t\tconst upgradeResult = upgradeSchema(persistedSchema)\n\t\tif (!upgradeResult.ok) {\n\t\t\t// Cache the error result\n\t\t\tthis.migrationCache.set(persistedSchema, upgradeResult)\n\t\t\treturn upgradeResult\n\t\t}\n\t\tconst schema = upgradeResult.value\n\t\tconst sequenceIdsToInclude = new Set(\n\t\t\t// start with any shared sequences\n\t\t\tObject.keys(schema.sequences).filter((sequenceId) => this.migrations[sequenceId])\n\t\t)\n\n\t\t// also include any sequences that are not in the persisted schema but are marked as postHoc\n\t\tfor (const sequenceId in this.migrations) {\n\t\t\tif (schema.sequences[sequenceId] === undefined && this.migrations[sequenceId].retroactive) {\n\t\t\t\tsequenceIdsToInclude.add(sequenceId)\n\t\t\t}\n\t\t}\n\n\t\tif (sequenceIdsToInclude.size === 0) {\n\t\t\tconst result = Result.ok([])\n\t\t\t// Cache the empty result\n\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\treturn result\n\t\t}\n\n\t\tconst allMigrationsToInclude = new Set<MigrationId>()\n\t\tfor (const sequenceId of sequenceIdsToInclude) {\n\t\t\tconst theirVersion = schema.sequences[sequenceId]\n\t\t\tif (\n\t\t\t\t(typeof theirVersion !== 'number' && this.migrations[sequenceId].retroactive) ||\n\t\t\t\ttheirVersion === 0\n\t\t\t) {\n\t\t\t\tfor (const migration of this.migrations[sequenceId].sequence) {\n\t\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tconst theirVersionId = `${sequenceId}/${theirVersion}`\n\t\t\tconst idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)\n\t\t\t// todo: better error handling\n\t\t\tif (idx === -1) {\n\t\t\t\tconst result = Result.err('Incompatible schema?')\n\t\t\t\t// Cache the error result\n\t\t\t\tthis.migrationCache.set(persistedSchema, result)\n\t\t\t\treturn result\n\t\t\t}\n\t\t\tfor (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {\n\t\t\t\tallMigrationsToInclude.add(migration.id)\n\t\t\t}\n\t\t}\n\n\t\t// collect any migrations\n\t\tconst result = Result.ok(\n\t\t\tthis.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))\n\t\t)\n\t\t// Cache the result\n\t\tthis.migrationCache.set(persistedSchema, result)\n\t\treturn result\n\t}\n\n\tmigratePersistedRecord(\n\t\trecord: R,\n\t\tpersistedSchema: SerializedSchema,\n\t\tdirection: 'up' | 'down' = 'up'\n\t): MigrationResult<R> {\n\t\tconst migrations = this.getMigrationsSince(persistedSchema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating record', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tlet migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: record }\n\t\t}\n\n\t\tif (migrationsToApply.some((m) => m.scope === 'store')) {\n\t\t\treturn {\n\t\t\t\ttype: 'error',\n\t\t\t\treason:\n\t\t\t\t\tdirection === 'down'\n\t\t\t\t\t\t? MigrationFailureReason.TargetVersionTooOld\n\t\t\t\t\t\t: MigrationFailureReason.TargetVersionTooNew,\n\t\t\t}\n\t\t}\n\n\t\tif (direction === 'down') {\n\t\t\tif (!migrationsToApply.every((m) => m.down)) {\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'error',\n\t\t\t\t\treason: MigrationFailureReason.TargetVersionTooOld,\n\t\t\t\t}\n\t\t\t}\n\t\t\tmigrationsToApply = migrationsToApply.slice().reverse()\n\t\t}\n\n\t\trecord = structuredClone(record)\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)\n\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record) : true\n\t\t\t\tif (!shouldApply) continue\n\t\t\t\tconst result = migration[direction]!(record)\n\t\t\t\tif (result) {\n\t\t\t\t\trecord = structuredClone(result) as any\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating record', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: record }\n\t}\n\n\tmigrateStoreSnapshot(\n\t\tsnapshot: StoreSnapshot<R>,\n\t\topts?: { mutateInputStore?: boolean }\n\t): MigrationResult<SerializedStore<R>> {\n\t\tlet { store } = snapshot\n\t\tconst migrations = this.getMigrationsSince(snapshot.schema)\n\t\tif (!migrations.ok) {\n\t\t\t// TODO: better error\n\t\t\tconsole.error('Error migrating store', migrations.error)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\t\tconst migrationsToApply = migrations.value\n\t\tif (migrationsToApply.length === 0) {\n\t\t\treturn { type: 'success', value: store }\n\t\t}\n\n\t\tif (!opts?.mutateInputStore) {\n\t\t\tstore = structuredClone(store)\n\t\t}\n\n\t\ttry {\n\t\t\tfor (const migration of migrationsToApply) {\n\t\t\t\tif (migration.scope === 'record') {\n\t\t\t\t\tfor (const [id, record] of Object.entries(store)) {\n\t\t\t\t\t\tconst shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true\n\t\t\t\t\t\tif (!shouldApply) continue\n\t\t\t\t\t\tconst result = migration.up!(record as any)\n\t\t\t\t\t\tif (result) {\n\t\t\t\t\t\t\tstore[id as keyof typeof store] = result as any\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (migration.scope === 'store') {\n\t\t\t\t\tconst result = migration.up!(store)\n\t\t\t\t\tif (result) {\n\t\t\t\t\t\tstore = result as any\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\texhaustiveSwitchError(migration)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error('Error migrating store', e)\n\t\t\treturn { type: 'error', reason: MigrationFailureReason.MigrationError }\n\t\t}\n\n\t\treturn { type: 'success', value: store }\n\t}\n\n\t/** @internal */\n\tcreateIntegrityChecker(store: Store<R, P>): (() => void) | undefined {\n\t\treturn this.options.createIntegrityChecker?.(store) ?? undefined\n\t}\n\n\tserialize(): SerializedSchemaV2 {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId, sequence }) => [\n\t\t\t\t\tsequenceId,\n\t\t\t\t\tsequence.length ? parseMigrationId(sequence.at(-1)!.id).version : 0,\n\t\t\t\t])\n\t\t\t),\n\t\t}\n\t}\n\n\t/**\n\t * @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!\n\t */\n\tserializeEarliestVersion(): SerializedSchema {\n\t\treturn {\n\t\t\tschemaVersion: 2,\n\t\t\tsequences: Object.fromEntries(\n\t\t\t\tObject.values(this.migrations).map(({ sequenceId }) => [sequenceId, 0])\n\t\t\t),\n\t\t}\n\t}\n\n\t/** @internal */\n\tgetType(typeName: string) {\n\t\tconst type = getOwnProperty(this.types, typeName)\n\t\tassert(type, 'record type does not exists')\n\t\treturn type\n\t}\n}\n"],
5
+ "mappings": "AAAA;AAAA,EACC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAIP;AAAA,EAEC;AAAA,EAIA;AAAA,EACA;AAAA,EACA;AAAA,OACM;AAqCA,SAAS,cAAc,QAA8D;AAC3F,MAAI,OAAO,gBAAgB,KAAK,OAAO,gBAAgB,EAAG,QAAO,OAAO,IAAI,oBAAoB;AAChG,MAAI,OAAO,kBAAkB,EAAG,QAAO,OAAO,GAAG,MAA4B;AAC7E,QAAM,SAA6B;AAAA,IAClC,eAAe;AAAA,IACf,WAAW;AAAA,MACV,oBAAoB,OAAO;AAAA,IAC5B;AAAA,EACD;AAEA,aAAW,CAAC,UAAU,aAAa,KAAK,OAAO,QAAQ,OAAO,cAAc,GAAG;AAC9E,WAAO,UAAU,cAAc,QAAQ,EAAE,IAAI,cAAc;AAC3D,QAAI,gBAAgB,eAAe;AAClC,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,cAAc,eAAe,GAAG;AAC/E,eAAO,UAAU,cAAc,QAAQ,IAAI,OAAO,EAAE,IAAI;AAAA,MACzD;AAAA,IACD;AAAA,EACD;AACA,SAAO,OAAO,GAAG,MAAM;AACxB;AAqBO,MAAM,YAAkD;AAAA,EAetD,YACS,OAGC,SAChB;AAJe;AAGC;AAEjB,eAAW,KAAK,QAAQ,cAAc,CAAC,GAAG;AACzC,aAAO,CAAC,KAAK,WAAW,EAAE,UAAU,GAAG,kCAAkC,EAAE,UAAU,EAAE;AACvF,yBAAmB,CAAC;AACpB,WAAK,WAAW,EAAE,UAAU,IAAI;AAAA,IACjC;AACA,UAAM,gBAAgB,OAAO,OAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAC9E,SAAK,mBAAmB,eAAe,aAAa;AAEpD,eAAW,aAAa,KAAK,kBAAkB;AAC9C,UAAI,CAAC,UAAU,WAAW,OAAQ;AAClC,iBAAW,OAAO,UAAU,WAAW;AACtC,cAAM,eAAe,cAAc,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG;AAC3D,eAAO,cAAc,cAAc,UAAU,EAAE,mCAAmC,GAAG,GAAG;AAAA,MACzF;AAAA,IACD;AAAA,EACD;AAAA,EAnCA,OAAO,OAIN,OACA,SACoB;AACpB,WAAO,IAAI,YAAkB,OAAc,WAAW,CAAC,CAAC;AAAA,EACzD;AAAA,EAES,aAAgD,CAAC;AAAA,EACjD;AAAA,EACQ,iBAAiB,oBAAI,QAAuD;AAAA,EAyB7F,eACC,OACA,QACA,OACA,cACI;AACJ,QAAI;AACH,YAAM,aAAa,eAAe,KAAK,OAAO,OAAO,QAAQ;AAC7D,UAAI,CAAC,YAAY;AAChB,cAAM,IAAI,MAAM,sCAAsC,OAAO,QAAQ,EAAE;AAAA,MACxE;AACA,aAAO,WAAW,SAAS,QAAQ,gBAAgB,MAAS;AAAA,IAC7D,SAAS,OAAgB;AACxB,UAAI,KAAK,QAAQ,qBAAqB;AACrC,eAAO,KAAK,QAAQ,oBAAoB;AAAA,UACvC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA,EAEO,mBAAmB,iBAAgE;AAEzF,UAAM,SAAS,KAAK,eAAe,IAAI,eAAe;AACtD,QAAI,QAAQ;AACX,aAAO;AAAA,IACR;AAEA,UAAM,gBAAgB,cAAc,eAAe;AACnD,QAAI,CAAC,cAAc,IAAI;AAEtB,WAAK,eAAe,IAAI,iBAAiB,aAAa;AACtD,aAAO;AAAA,IACR;AACA,UAAM,SAAS,cAAc;AAC7B,UAAM,uBAAuB,IAAI;AAAA;AAAA,MAEhC,OAAO,KAAK,OAAO,SAAS,EAAE,OAAO,CAAC,eAAe,KAAK,WAAW,UAAU,CAAC;AAAA,IACjF;AAGA,eAAW,cAAc,KAAK,YAAY;AACzC,UAAI,OAAO,UAAU,UAAU,MAAM,UAAa,KAAK,WAAW,UAAU,EAAE,aAAa;AAC1F,6BAAqB,IAAI,UAAU;AAAA,MACpC;AAAA,IACD;AAEA,QAAI,qBAAqB,SAAS,GAAG;AACpC,YAAMA,UAAS,OAAO,GAAG,CAAC,CAAC;AAE3B,WAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,aAAOA;AAAA,IACR;AAEA,UAAM,yBAAyB,oBAAI,IAAiB;AACpD,eAAW,cAAc,sBAAsB;AAC9C,YAAM,eAAe,OAAO,UAAU,UAAU;AAChD,UACE,OAAO,iBAAiB,YAAY,KAAK,WAAW,UAAU,EAAE,eACjE,iBAAiB,GAChB;AACD,mBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,UAAU;AAC7D,iCAAuB,IAAI,UAAU,EAAE;AAAA,QACxC;AACA;AAAA,MACD;AACA,YAAM,iBAAiB,GAAG,UAAU,IAAI,YAAY;AACpD,YAAM,MAAM,KAAK,WAAW,UAAU,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,cAAc;AAEzF,UAAI,QAAQ,IAAI;AACf,cAAMA,UAAS,OAAO,IAAI,sBAAsB;AAEhD,aAAK,eAAe,IAAI,iBAAiBA,OAAM;AAC/C,eAAOA;AAAA,MACR;AACA,iBAAW,aAAa,KAAK,WAAW,UAAU,EAAE,SAAS,MAAM,MAAM,CAAC,GAAG;AAC5E,+BAAuB,IAAI,UAAU,EAAE;AAAA,MACxC;AAAA,IACD;AAGA,UAAM,SAAS,OAAO;AAAA,MACrB,KAAK,iBAAiB,OAAO,CAAC,EAAE,GAAG,MAAM,uBAAuB,IAAI,EAAE,CAAC;AAAA,IACxE;AAEA,SAAK,eAAe,IAAI,iBAAiB,MAAM;AAC/C,WAAO;AAAA,EACR;AAAA,EAEA,uBACC,QACA,iBACA,YAA2B,MACN;AACrB,UAAM,aAAa,KAAK,mBAAmB,eAAe;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,0BAA0B,WAAW,KAAK;AACxD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,QAAI,oBAAoB,WAAW;AACnC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,IACzC;AAEA,QAAI,kBAAkB,KAAK,CAAC,MAAM,EAAE,UAAU,OAAO,GAAG;AACvD,aAAO;AAAA,QACN,MAAM;AAAA,QACN,QACC,cAAc,SACX,uBAAuB,sBACvB,uBAAuB;AAAA,MAC5B;AAAA,IACD;AAEA,QAAI,cAAc,QAAQ;AACzB,UAAI,CAAC,kBAAkB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC5C,eAAO;AAAA,UACN,MAAM;AAAA,UACN,QAAQ,uBAAuB;AAAA,QAChC;AAAA,MACD;AACA,0BAAoB,kBAAkB,MAAM,EAAE,QAAQ;AAAA,IACvD;AAEA,aAAS,gBAAgB,MAAM;AAC/B,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,QAAS,OAAM,IAAI;AAAA;AAAA,QAAqC;AAChF,cAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAM,IAAI;AAClE,YAAI,CAAC,YAAa;AAClB,cAAM,SAAS,UAAU,SAAS,EAAG,MAAM;AAC3C,YAAI,QAAQ;AACX,mBAAS,gBAAgB,MAAM;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,0BAA0B,CAAC;AACzC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,OAAO;AAAA,EACzC;AAAA,EAEA,qBACC,UACA,MACsC;AACtC,QAAI,EAAE,MAAM,IAAI;AAChB,UAAM,aAAa,KAAK,mBAAmB,SAAS,MAAM;AAC1D,QAAI,CAAC,WAAW,IAAI;AAEnB,cAAQ,MAAM,yBAAyB,WAAW,KAAK;AACvD,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AACA,UAAM,oBAAoB,WAAW;AACrC,QAAI,kBAAkB,WAAW,GAAG;AACnC,aAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,IACxC;AAEA,QAAI,CAAC,MAAM,kBAAkB;AAC5B,cAAQ,gBAAgB,KAAK;AAAA,IAC9B;AAEA,QAAI;AACH,iBAAW,aAAa,mBAAmB;AAC1C,YAAI,UAAU,UAAU,UAAU;AACjC,qBAAW,CAAC,IAAI,MAAM,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,kBAAM,cAAc,UAAU,SAAS,UAAU,OAAO,MAAuB,IAAI;AACnF,gBAAI,CAAC,YAAa;AAClB,kBAAM,SAAS,UAAU,GAAI,MAAa;AAC1C,gBAAI,QAAQ;AACX,oBAAM,EAAwB,IAAI;AAAA,YACnC;AAAA,UACD;AAAA,QACD,WAAW,UAAU,UAAU,SAAS;AACvC,gBAAM,SAAS,UAAU,GAAI,KAAK;AAClC,cAAI,QAAQ;AACX,oBAAQ;AAAA,UACT;AAAA,QACD,OAAO;AACN,gCAAsB,SAAS;AAAA,QAChC;AAAA,MACD;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,yBAAyB,CAAC;AACxC,aAAO,EAAE,MAAM,SAAS,QAAQ,uBAAuB,eAAe;AAAA,IACvE;AAEA,WAAO,EAAE,MAAM,WAAW,OAAO,MAAM;AAAA,EACxC;AAAA;AAAA,EAGA,uBAAuB,OAA8C;AACpE,WAAO,KAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,EACxD;AAAA,EAEA,YAAgC;AAC/B,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM;AAAA,UAChE;AAAA,UACA,SAAS,SAAS,iBAAiB,SAAS,GAAG,EAAE,EAAG,EAAE,EAAE,UAAU;AAAA,QACnE,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,2BAA6C;AAC5C,WAAO;AAAA,MACN,eAAe;AAAA,MACf,WAAW,OAAO;AAAA,QACjB,OAAO,OAAO,KAAK,UAAU,EAAE,IAAI,CAAC,EAAE,WAAW,MAAM,CAAC,YAAY,CAAC,CAAC;AAAA,MACvE;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGA,QAAQ,UAAkB;AACzB,UAAM,OAAO,eAAe,KAAK,OAAO,QAAQ;AAChD,WAAO,MAAM,6BAA6B;AAC1C,WAAO;AAAA,EACR;AACD;",
6
+ "names": ["result"]
7
7
  }