@tldraw/store 4.3.0-canary.c5efe11c58e0 → 4.3.0-canary.ce745d1ecc12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dist-cjs/index.d.ts +48 -0
  2. package/dist-cjs/index.js +3 -1
  3. package/dist-cjs/index.js.map +2 -2
  4. package/dist-cjs/lib/AtomSet.js +68 -0
  5. package/dist-cjs/lib/AtomSet.js.map +7 -0
  6. package/dist-cjs/lib/ImmutableMap.js +25 -23
  7. package/dist-cjs/lib/ImmutableMap.js.map +2 -2
  8. package/dist-cjs/lib/StoreSchema.js +84 -24
  9. package/dist-cjs/lib/StoreSchema.js.map +2 -2
  10. package/dist-cjs/lib/devFreeze.js +5 -3
  11. package/dist-cjs/lib/devFreeze.js.map +2 -2
  12. package/dist-cjs/lib/isDev.js +37 -0
  13. package/dist-cjs/lib/isDev.js.map +7 -0
  14. package/dist-cjs/lib/migrate.js.map +2 -2
  15. package/dist-esm/index.d.mts +48 -0
  16. package/dist-esm/index.mjs +3 -1
  17. package/dist-esm/index.mjs.map +2 -2
  18. package/dist-esm/lib/AtomSet.mjs +48 -0
  19. package/dist-esm/lib/AtomSet.mjs.map +7 -0
  20. package/dist-esm/lib/ImmutableMap.mjs +25 -23
  21. package/dist-esm/lib/ImmutableMap.mjs.map +2 -2
  22. package/dist-esm/lib/StoreSchema.mjs +87 -25
  23. package/dist-esm/lib/StoreSchema.mjs.map +2 -2
  24. package/dist-esm/lib/devFreeze.mjs +5 -3
  25. package/dist-esm/lib/devFreeze.mjs.map +2 -2
  26. package/dist-esm/lib/isDev.mjs +16 -0
  27. package/dist-esm/lib/isDev.mjs.map +7 -0
  28. package/dist-esm/lib/migrate.mjs.map +2 -2
  29. package/package.json +4 -4
  30. package/src/index.ts +3 -0
  31. package/src/lib/AtomSet.ts +52 -0
  32. package/src/lib/ImmutableMap.ts +25 -33
  33. package/src/lib/StoreSchema.ts +97 -30
  34. package/src/lib/devFreeze.test.ts +6 -2
  35. package/src/lib/devFreeze.ts +7 -3
  36. package/src/lib/isDev.ts +20 -0
  37. package/src/lib/migrate.ts +29 -0
  38. package/src/lib/test/recordStore.test.ts +182 -0
@@ -1,13 +1,14 @@
1
1
  import {
2
- Result,
3
2
  assert,
4
3
  exhaustiveSwitchError,
5
4
  getOwnProperty,
5
+ isEqual,
6
+ objectMapEntries,
7
+ Result,
6
8
  structuredClone,
7
9
  } from '@tldraw/utils'
8
10
  import { UnknownRecord } from './BaseRecord'
9
- import { RecordType } from './RecordType'
10
- import { SerializedStore, Store, StoreSnapshot } from './Store'
11
+ import { devFreeze } from './devFreeze'
11
12
  import {
12
13
  Migration,
13
14
  MigrationFailureReason,
@@ -16,8 +17,11 @@ import {
16
17
  MigrationSequence,
17
18
  parseMigrationId,
18
19
  sortMigrations,
20
+ SynchronousStorage,
19
21
  validateMigrations,
20
22
  } from './migrate'
23
+ import { RecordType } from './RecordType'
24
+ import { SerializedStore, Store, StoreSnapshot } from './Store'
21
25
 
22
26
  /**
23
27
  * Version 1 format for serialized store schema information.
@@ -530,7 +534,7 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
530
534
  return { type: 'success', value: record }
531
535
  }
532
536
 
533
- if (migrationsToApply.some((m) => m.scope === 'store')) {
537
+ if (!migrationsToApply.every((m) => m.scope === 'record')) {
534
538
  return {
535
539
  type: 'error',
536
540
  reason:
@@ -541,7 +545,7 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
541
545
  }
542
546
 
543
547
  if (direction === 'down') {
544
- if (!migrationsToApply.every((m) => m.down)) {
548
+ if (!migrationsToApply.every((m) => m.scope === 'record' && m.down)) {
545
549
  return {
546
550
  type: 'error',
547
551
  reason: MigrationFailureReason.TargetVersionTooOld,
@@ -554,6 +558,7 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
554
558
  try {
555
559
  for (const migration of migrationsToApply) {
556
560
  if (migration.scope === 'store') throw new Error(/* won't happen, just for TS */)
561
+ if (migration.scope === 'storage') throw new Error(/* won't happen, just for TS */)
557
562
  const shouldApply = migration.filter ? migration.filter(record) : true
558
563
  if (!shouldApply) continue
559
564
  const result = migration[direction]!(record)
@@ -569,6 +574,71 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
569
574
  return { type: 'success', value: record }
570
575
  }
571
576
 
577
+ migrateStorage(storage: SynchronousStorage<R>) {
578
+ const schema = storage.getSchema()
579
+ assert(schema, 'Schema is missing.')
580
+
581
+ const migrations = this.getMigrationsSince(schema)
582
+ if (!migrations.ok) {
583
+ console.error('Error migrating store', migrations.error)
584
+ throw new Error(migrations.error)
585
+ }
586
+ const migrationsToApply = migrations.value
587
+ if (migrationsToApply.length === 0) {
588
+ return
589
+ }
590
+
591
+ storage.setSchema(this.serialize())
592
+
593
+ for (const migration of migrationsToApply) {
594
+ if (migration.scope === 'record') {
595
+ // Collect updates during iteration, then apply them after.
596
+ // This avoids issues with live iterators (e.g., SQLite) where updating
597
+ // records during iteration can cause them to be visited multiple times.
598
+ const updates: [string, R][] = []
599
+ for (const [id, state] of storage.entries()) {
600
+ const shouldApply = migration.filter ? migration.filter(state) : true
601
+ if (!shouldApply) continue
602
+ const record = structuredClone(state)
603
+ const result = migration.up!(record as any) ?? record
604
+ if (!isEqual(result, state)) {
605
+ updates.push([id, result as R])
606
+ }
607
+ }
608
+ for (const [id, record] of updates) {
609
+ storage.set(id, record)
610
+ }
611
+ } else if (migration.scope === 'store') {
612
+ // legacy
613
+ const prevStore = Object.fromEntries(storage.entries())
614
+ let nextStore = structuredClone(prevStore)
615
+ nextStore = (migration.up!(nextStore) as any) ?? nextStore
616
+ for (const [id, state] of Object.entries(nextStore)) {
617
+ if (!state) continue // these will be deleted in the next loop
618
+ if (!isEqual(state, prevStore[id])) {
619
+ storage.set(id, state)
620
+ }
621
+ }
622
+ for (const id of Object.keys(prevStore)) {
623
+ if (!nextStore[id]) {
624
+ storage.delete(id)
625
+ }
626
+ }
627
+ } else if (migration.scope === 'storage') {
628
+ migration.up!(storage)
629
+ } else {
630
+ exhaustiveSwitchError(migration)
631
+ }
632
+ }
633
+ // Clean up by filtering out any non-document records.
634
+ // This is mainly legacy support for extremely early days tldraw.
635
+ for (const [id, state] of storage.entries()) {
636
+ if (this.getType(state.typeName).scope !== 'document') {
637
+ storage.delete(id)
638
+ }
639
+ }
640
+ }
641
+
572
642
  /**
573
643
  * Migrates an entire store snapshot to match the current schema version.
574
644
  *
@@ -604,7 +674,6 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
604
674
  snapshot: StoreSnapshot<R>,
605
675
  opts?: { mutateInputStore?: boolean }
606
676
  ): MigrationResult<SerializedStore<R>> {
607
- let { store } = snapshot
608
677
  const migrations = this.getMigrationsSince(snapshot.schema)
609
678
  if (!migrations.ok) {
610
679
  // TODO: better error
@@ -613,39 +682,37 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
613
682
  }
614
683
  const migrationsToApply = migrations.value
615
684
  if (migrationsToApply.length === 0) {
616
- return { type: 'success', value: store }
617
- }
618
-
619
- if (!opts?.mutateInputStore) {
620
- store = structuredClone(store)
685
+ return { type: 'success', value: snapshot.store }
621
686
  }
622
-
687
+ const store = Object.assign(
688
+ new Map<string, R>(objectMapEntries(snapshot.store).map(devFreeze)),
689
+ {
690
+ getSchema: () => snapshot.schema,
691
+ setSchema: (_: SerializedSchema) => {},
692
+ }
693
+ )
623
694
  try {
624
- for (const migration of migrationsToApply) {
625
- if (migration.scope === 'record') {
626
- for (const [id, record] of Object.entries(store)) {
627
- const shouldApply = migration.filter ? migration.filter(record as UnknownRecord) : true
628
- if (!shouldApply) continue
629
- const result = migration.up!(record as any)
630
- if (result) {
631
- store[id as keyof typeof store] = result as any
632
- }
633
- }
634
- } else if (migration.scope === 'store') {
635
- const result = migration.up!(store)
636
- if (result) {
637
- store = result as any
695
+ this.migrateStorage(store)
696
+ if (opts?.mutateInputStore) {
697
+ for (const [id, record] of store.entries()) {
698
+ snapshot.store[id as keyof typeof snapshot.store] = record
699
+ }
700
+ for (const id of Object.keys(snapshot.store)) {
701
+ if (!store.has(id)) {
702
+ delete snapshot.store[id as keyof typeof snapshot.store]
638
703
  }
639
- } else {
640
- exhaustiveSwitchError(migration)
704
+ }
705
+ return { type: 'success', value: snapshot.store }
706
+ } else {
707
+ return {
708
+ type: 'success',
709
+ value: Object.fromEntries(store.entries()) as SerializedStore<R>,
641
710
  }
642
711
  }
643
712
  } catch (e) {
644
713
  console.error('Error migrating store', e)
645
714
  return { type: 'error', reason: MigrationFailureReason.MigrationError }
646
715
  }
647
-
648
- return { type: 'success', value: store }
649
716
  }
650
717
 
651
718
  /**
@@ -1,8 +1,12 @@
1
- import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'
1
+ import { afterAll, beforeEach, describe, expect, it, MockedFunction, vi } from 'vitest'
2
2
  import { devFreeze } from './devFreeze'
3
+ import { isDev } from './isDev'
3
4
 
4
5
  // Mock process.env for testing
5
6
  const originalEnv = process.env.NODE_ENV
7
+ vi.mock('./isDev', () => ({
8
+ isDev: vi.fn(() => true),
9
+ }))
6
10
 
7
11
  describe('devFreeze', () => {
8
12
  beforeEach(() => {
@@ -13,7 +17,7 @@ describe('devFreeze', () => {
13
17
  describe('production mode behavior', () => {
14
18
  beforeEach(() => {
15
19
  // Mock production environment
16
- vi.stubGlobal('process', { env: { NODE_ENV: 'production' } })
20
+ ;(isDev as MockedFunction<typeof isDev>).mockReturnValue(false)
17
21
  })
18
22
 
19
23
  it('should return objects unchanged in production mode', () => {
@@ -1,4 +1,5 @@
1
1
  import { STRUCTURED_CLONE_OBJECT_PROTOTYPE } from '@tldraw/utils'
2
+ import { isDev } from './isDev'
2
3
 
3
4
  /**
4
5
  * Freeze an object when in development mode. Copied from
@@ -15,9 +16,8 @@ import { STRUCTURED_CLONE_OBJECT_PROTOTYPE } from '@tldraw/utils'
15
16
  * @public
16
17
  */
17
18
  export function devFreeze<T>(object: T): T {
18
- if (process.env.NODE_ENV === 'production') {
19
- return object
20
- }
19
+ if (!isDev()) return object
20
+
21
21
  const proto = Object.getPrototypeOf(object)
22
22
  if (
23
23
  proto &&
@@ -32,6 +32,10 @@ export function devFreeze<T>(object: T): T {
32
32
  throw new Error('cannot include non-js data in a record')
33
33
  }
34
34
 
35
+ if (Object.isFrozen(object)) {
36
+ return object
37
+ }
38
+
35
39
  // Retrieve the property names defined on object
36
40
  const propNames = Object.getOwnPropertyNames(object)
37
41
 
@@ -0,0 +1,20 @@
1
+ let _isDev = false
2
+ try {
3
+ _isDev = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'test'
4
+ } catch (_e) {
5
+ /* noop */
6
+ }
7
+ try {
8
+ _isDev =
9
+ _isDev ||
10
+ (import.meta as any).env.DEV ||
11
+ (import.meta as any).env.TEST ||
12
+ (import.meta as any).env.MODE === 'development' ||
13
+ (import.meta as any).env.MODE === 'test'
14
+ } catch (_e) {
15
+ /* noop */
16
+ }
17
+
18
+ export function isDev() {
19
+ return _isDev
20
+ }
@@ -1,6 +1,7 @@
1
1
  import { assert, objectMapEntries } from '@tldraw/utils'
2
2
  import { UnknownRecord } from './BaseRecord'
3
3
  import { SerializedStore } from './Store'
4
+ import { SerializedSchema } from './StoreSchema'
4
5
 
5
6
  function squashDependsOn(sequence: Array<Migration | StandaloneDependsOn>): Migration[] {
6
7
  const result: Migration[] = []
@@ -219,8 +220,36 @@ export type Migration = {
219
220
  newState: SerializedStore<UnknownRecord>
220
221
  ) => void | SerializedStore<UnknownRecord>
221
222
  }
223
+ | {
224
+ readonly scope: 'storage'
225
+ // eslint-disable-next-line @typescript-eslint/method-signature-style
226
+ readonly up: (storage: SynchronousRecordStorage<UnknownRecord>) => void
227
+ readonly down?: never
228
+ }
222
229
  )
223
230
 
231
+ /**
232
+ * Abstraction over the store that can be used to perform migrations.
233
+ * @public
234
+ */
235
+ export interface SynchronousRecordStorage<R extends UnknownRecord> {
236
+ get(id: string): R | undefined
237
+ set(id: string, record: R): void
238
+ delete(id: string): void
239
+ keys(): Iterable<string>
240
+ values(): Iterable<R>
241
+ entries(): Iterable<[string, R]>
242
+ }
243
+
244
+ /**
245
+ * Abstraction over the storage that can be used to perform migrations.
246
+ * @public
247
+ */
248
+ export interface SynchronousStorage<R extends UnknownRecord> extends SynchronousRecordStorage<R> {
249
+ getSchema(): SerializedSchema
250
+ setSchema(schema: SerializedSchema): void
251
+ }
252
+
224
253
  /**
225
254
  * Base interface for legacy migration information.
226
255
  *
@@ -883,6 +883,188 @@ describe('snapshots', () => {
883
883
  expect(up).toHaveBeenCalledTimes(1)
884
884
  expect(store2.get(Book.createId('lotr'))!.numPages).toBe(42)
885
885
  })
886
+
887
+ it('migrates the snapshot with storage scope', () => {
888
+ const snapshot1 = store.getStoreSnapshot()
889
+ const up = vi.fn((storage: any) => {
890
+ const book = storage.get('book:lotr')
891
+ storage.set('book:lotr', { ...book, numPages: 42 })
892
+ })
893
+
894
+ expect((snapshot1.store as any)['book:lotr'].numPages).toBe(1000)
895
+
896
+ const store2 = new Store({
897
+ props: {},
898
+ schema: StoreSchema.create<Book | Author>(
899
+ {
900
+ book: Book,
901
+ author: Author,
902
+ },
903
+ {
904
+ migrations: [
905
+ createMigrationSequence({
906
+ sequenceId: 'com.tldraw',
907
+ retroactive: true,
908
+ sequence: [
909
+ {
910
+ id: `com.tldraw/1`,
911
+ scope: 'storage',
912
+ up,
913
+ },
914
+ ],
915
+ }),
916
+ ],
917
+ }
918
+ ),
919
+ })
920
+
921
+ expect(() => {
922
+ store2.loadStoreSnapshot(snapshot1)
923
+ }).not.toThrow()
924
+
925
+ expect(up).toHaveBeenCalledTimes(1)
926
+ expect(store2.get(Book.createId('lotr'))!.numPages).toBe(42)
927
+ })
928
+
929
+ it('storage scope migration can delete records', () => {
930
+ const snapshot1 = store.getStoreSnapshot()
931
+ const up = vi.fn((storage: any) => {
932
+ storage.delete('author:mcavoy')
933
+ })
934
+
935
+ expect((snapshot1.store as any)['author:mcavoy']).toBeDefined()
936
+
937
+ const store2 = new Store({
938
+ props: {},
939
+ schema: StoreSchema.create<Book | Author>(
940
+ {
941
+ book: Book,
942
+ author: Author,
943
+ },
944
+ {
945
+ migrations: [
946
+ createMigrationSequence({
947
+ sequenceId: 'com.tldraw',
948
+ retroactive: true,
949
+ sequence: [
950
+ {
951
+ id: `com.tldraw/1`,
952
+ scope: 'storage',
953
+ up,
954
+ },
955
+ ],
956
+ }),
957
+ ],
958
+ }
959
+ ),
960
+ })
961
+
962
+ expect(() => {
963
+ store2.loadStoreSnapshot(snapshot1)
964
+ }).not.toThrow()
965
+
966
+ expect(up).toHaveBeenCalledTimes(1)
967
+ expect(store2.get(Author.createId('mcavoy'))).toBeUndefined()
968
+ })
969
+
970
+ it('storage scope migration can iterate records', () => {
971
+ const snapshot1 = store.getStoreSnapshot()
972
+ const up = vi.fn((storage: any) => {
973
+ for (const [id, record] of storage.entries()) {
974
+ if (record.typeName === 'book') {
975
+ storage.set(id, { ...record, numPages: record.numPages + 100 })
976
+ }
977
+ }
978
+ })
979
+
980
+ expect((snapshot1.store as any)['book:lotr'].numPages).toBe(1000)
981
+ expect((snapshot1.store as any)['book:hobbit'].numPages).toBe(300)
982
+
983
+ const store2 = new Store({
984
+ props: {},
985
+ schema: StoreSchema.create<Book | Author>(
986
+ {
987
+ book: Book,
988
+ author: Author,
989
+ },
990
+ {
991
+ migrations: [
992
+ createMigrationSequence({
993
+ sequenceId: 'com.tldraw',
994
+ retroactive: true,
995
+ sequence: [
996
+ {
997
+ id: `com.tldraw/1`,
998
+ scope: 'storage',
999
+ up,
1000
+ },
1001
+ ],
1002
+ }),
1003
+ ],
1004
+ }
1005
+ ),
1006
+ })
1007
+
1008
+ expect(() => {
1009
+ store2.loadStoreSnapshot(snapshot1)
1010
+ }).not.toThrow()
1011
+
1012
+ expect(up).toHaveBeenCalledTimes(1)
1013
+ expect(store2.get(Book.createId('lotr'))!.numPages).toBe(1100)
1014
+ expect(store2.get(Book.createId('hobbit'))!.numPages).toBe(400)
1015
+ })
1016
+
1017
+ it('storage scope migration can use values() and keys()', () => {
1018
+ const snapshot1 = store.getStoreSnapshot()
1019
+ const keysCollected: string[] = []
1020
+ const valuesCollected: any[] = []
1021
+
1022
+ const up = vi.fn((storage: any) => {
1023
+ for (const key of storage.keys()) {
1024
+ keysCollected.push(key)
1025
+ }
1026
+ for (const value of storage.values()) {
1027
+ valuesCollected.push(value)
1028
+ }
1029
+ })
1030
+
1031
+ const store2 = new Store({
1032
+ props: {},
1033
+ schema: StoreSchema.create<Book | Author>(
1034
+ {
1035
+ book: Book,
1036
+ author: Author,
1037
+ },
1038
+ {
1039
+ migrations: [
1040
+ createMigrationSequence({
1041
+ sequenceId: 'com.tldraw',
1042
+ retroactive: true,
1043
+ sequence: [
1044
+ {
1045
+ id: `com.tldraw/1`,
1046
+ scope: 'storage',
1047
+ up,
1048
+ },
1049
+ ],
1050
+ }),
1051
+ ],
1052
+ }
1053
+ ),
1054
+ })
1055
+
1056
+ expect(() => {
1057
+ store2.loadStoreSnapshot(snapshot1)
1058
+ }).not.toThrow()
1059
+
1060
+ expect(up).toHaveBeenCalledTimes(1)
1061
+ expect(keysCollected).toContain('book:lotr')
1062
+ expect(keysCollected).toContain('book:hobbit')
1063
+ expect(keysCollected).toContain('author:tolkein')
1064
+ expect(keysCollected).toContain('author:mcavoy')
1065
+ expect(keysCollected).toContain('author:cassidy')
1066
+ expect(valuesCollected.length).toBe(5)
1067
+ })
886
1068
  })
887
1069
 
888
1070
  describe('diffs', () => {