@tldraw/store 3.16.0-internal.a478398270c6 → 3.16.0-next.15f085081fd5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist-cjs/index.d.ts +8 -2
- package/dist-cjs/index.js +1 -1
- package/dist-cjs/lib/RecordsDiff.js +3 -3
- package/dist-cjs/lib/RecordsDiff.js.map +2 -2
- package/dist-cjs/lib/StoreSchema.js +23 -8
- package/dist-cjs/lib/StoreSchema.js.map +3 -3
- package/dist-esm/index.d.mts +8 -2
- package/dist-esm/index.mjs +1 -1
- package/dist-esm/lib/RecordsDiff.mjs +3 -3
- package/dist-esm/lib/RecordsDiff.mjs.map +2 -2
- package/dist-esm/lib/StoreSchema.mjs +23 -8
- package/dist-esm/lib/StoreSchema.mjs.map +3 -3
- package/package.json +12 -19
- package/src/lib/RecordsDiff.ts +9 -3
- package/src/lib/StoreSchema.ts +32 -8
- package/src/lib/test/AtomMap.test.ts +2 -1
- package/src/lib/test/dependsOn.test.ts +2 -2
- package/src/lib/test/migrationCaching.test.ts +209 -0
- package/src/lib/test/recordStore.test.ts +40 -37
- package/src/lib/test/sortMigrations.test.ts +4 -4
- package/src/lib/test/validateMigrations.test.ts +8 -8
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tldraw/store",
|
|
3
|
-
"description": "
|
|
4
|
-
"version": "3.16.0-
|
|
3
|
+
"description": "tldraw infinite canvas SDK (store).",
|
|
4
|
+
"version": "3.16.0-next.15f085081fd5",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "tldraw Inc.",
|
|
7
7
|
"email": "hello@tldraw.com"
|
|
@@ -17,6 +17,7 @@
|
|
|
17
17
|
},
|
|
18
18
|
"keywords": [
|
|
19
19
|
"tldraw",
|
|
20
|
+
"sdk",
|
|
20
21
|
"drawing",
|
|
21
22
|
"app",
|
|
22
23
|
"development",
|
|
@@ -31,19 +32,20 @@
|
|
|
31
32
|
"src"
|
|
32
33
|
],
|
|
33
34
|
"scripts": {
|
|
34
|
-
"test-ci": "
|
|
35
|
-
"test": "yarn run -T
|
|
36
|
-
"test-coverage": "
|
|
35
|
+
"test-ci": "yarn run -T vitest run --passWithNoTests",
|
|
36
|
+
"test": "yarn run -T vitest --passWithNoTests",
|
|
37
|
+
"test-coverage": "yarn run -T vitest run --coverage --passWithNoTests",
|
|
37
38
|
"build": "yarn run -T tsx ../../internal/scripts/build-package.ts",
|
|
38
39
|
"build-api": "yarn run -T tsx ../../internal/scripts/build-api.ts",
|
|
39
40
|
"prepack": "yarn run -T tsx ../../internal/scripts/prepack.ts",
|
|
40
41
|
"postpack": "../../internal/scripts/postpack.sh",
|
|
41
42
|
"pack-tarball": "yarn pack",
|
|
42
|
-
"lint": "yarn run -T tsx ../../internal/scripts/lint.ts"
|
|
43
|
+
"lint": "yarn run -T tsx ../../internal/scripts/lint.ts",
|
|
44
|
+
"context": "yarn run -T tsx ../../internal/scripts/context.ts"
|
|
43
45
|
},
|
|
44
46
|
"dependencies": {
|
|
45
|
-
"@tldraw/state": "3.16.0-
|
|
46
|
-
"@tldraw/utils": "3.16.0-
|
|
47
|
+
"@tldraw/state": "3.16.0-next.15f085081fd5",
|
|
48
|
+
"@tldraw/utils": "3.16.0-next.15f085081fd5"
|
|
47
49
|
},
|
|
48
50
|
"peerDependencies": {
|
|
49
51
|
"react": "^18.2.0 || ^19.0.0"
|
|
@@ -51,17 +53,8 @@
|
|
|
51
53
|
"devDependencies": {
|
|
52
54
|
"@peculiar/webcrypto": "^1.5.0",
|
|
53
55
|
"lazyrepo": "0.0.0-alpha.27",
|
|
54
|
-
"raf": "^3.4.1"
|
|
55
|
-
|
|
56
|
-
"jest": {
|
|
57
|
-
"preset": "../../internal/config/jest/node/jest-preset.js",
|
|
58
|
-
"setupFiles": [
|
|
59
|
-
"./setupTests.js",
|
|
60
|
-
"raf/polyfill"
|
|
61
|
-
],
|
|
62
|
-
"moduleNameMapper": {
|
|
63
|
-
"^~(.*)": "<rootDir>/src/$1"
|
|
64
|
-
}
|
|
56
|
+
"raf": "^3.4.1",
|
|
57
|
+
"vitest": "^3.2.4"
|
|
65
58
|
},
|
|
66
59
|
"module": "dist-esm/index.mjs",
|
|
67
60
|
"source": "src/index.ts",
|
package/src/lib/RecordsDiff.ts
CHANGED
|
@@ -42,15 +42,21 @@ export function isRecordsDiffEmpty<T extends UnknownRecord>(diff: RecordsDiff<T>
|
|
|
42
42
|
* Squash a collection of diffs into a single diff.
|
|
43
43
|
*
|
|
44
44
|
* @param diffs - An array of diffs to squash.
|
|
45
|
+
* @param options - An optional object with a `mutateFirstDiff` property. If `mutateFirstDiff` is true, the first diff in the array will be mutated in-place.
|
|
45
46
|
* @returns A single diff that represents the squashed diffs.
|
|
46
47
|
* @public
|
|
47
48
|
*/
|
|
48
49
|
export function squashRecordDiffs<T extends UnknownRecord>(
|
|
49
|
-
diffs: RecordsDiff<T>[]
|
|
50
|
+
diffs: RecordsDiff<T>[],
|
|
51
|
+
options?: {
|
|
52
|
+
mutateFirstDiff?: boolean
|
|
53
|
+
}
|
|
50
54
|
): RecordsDiff<T> {
|
|
51
|
-
const result =
|
|
55
|
+
const result = options?.mutateFirstDiff
|
|
56
|
+
? diffs[0]
|
|
57
|
+
: ({ added: {}, removed: {}, updated: {} } as RecordsDiff<T>)
|
|
52
58
|
|
|
53
|
-
squashRecordDiffsMutable(result, diffs)
|
|
59
|
+
squashRecordDiffsMutable(result, options?.mutateFirstDiff ? diffs.slice(1) : diffs)
|
|
54
60
|
return result
|
|
55
61
|
}
|
|
56
62
|
|
package/src/lib/StoreSchema.ts
CHANGED
|
@@ -107,6 +107,7 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
107
107
|
|
|
108
108
|
readonly migrations: Record<string, MigrationSequence> = {}
|
|
109
109
|
readonly sortedMigrations: readonly Migration[]
|
|
110
|
+
private readonly migrationCache = new WeakMap<SerializedSchema, Result<Migration[], string>>()
|
|
110
111
|
|
|
111
112
|
private constructor(
|
|
112
113
|
public readonly types: {
|
|
@@ -158,10 +159,17 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
158
159
|
}
|
|
159
160
|
}
|
|
160
161
|
|
|
161
|
-
// TODO: use a weakmap to store the result of this function
|
|
162
162
|
public getMigrationsSince(persistedSchema: SerializedSchema): Result<Migration[], string> {
|
|
163
|
+
// Check cache first
|
|
164
|
+
const cached = this.migrationCache.get(persistedSchema)
|
|
165
|
+
if (cached) {
|
|
166
|
+
return cached
|
|
167
|
+
}
|
|
168
|
+
|
|
163
169
|
const upgradeResult = upgradeSchema(persistedSchema)
|
|
164
170
|
if (!upgradeResult.ok) {
|
|
171
|
+
// Cache the error result
|
|
172
|
+
this.migrationCache.set(persistedSchema, upgradeResult)
|
|
165
173
|
return upgradeResult
|
|
166
174
|
}
|
|
167
175
|
const schema = upgradeResult.value
|
|
@@ -178,7 +186,10 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
178
186
|
}
|
|
179
187
|
|
|
180
188
|
if (sequenceIdsToInclude.size === 0) {
|
|
181
|
-
|
|
189
|
+
const result = Result.ok([])
|
|
190
|
+
// Cache the empty result
|
|
191
|
+
this.migrationCache.set(persistedSchema, result)
|
|
192
|
+
return result
|
|
182
193
|
}
|
|
183
194
|
|
|
184
195
|
const allMigrationsToInclude = new Set<MigrationId>()
|
|
@@ -197,7 +208,10 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
197
208
|
const idx = this.migrations[sequenceId].sequence.findIndex((m) => m.id === theirVersionId)
|
|
198
209
|
// todo: better error handling
|
|
199
210
|
if (idx === -1) {
|
|
200
|
-
|
|
211
|
+
const result = Result.err('Incompatible schema?')
|
|
212
|
+
// Cache the error result
|
|
213
|
+
this.migrationCache.set(persistedSchema, result)
|
|
214
|
+
return result
|
|
201
215
|
}
|
|
202
216
|
for (const migration of this.migrations[sequenceId].sequence.slice(idx + 1)) {
|
|
203
217
|
allMigrationsToInclude.add(migration.id)
|
|
@@ -205,7 +219,12 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
205
219
|
}
|
|
206
220
|
|
|
207
221
|
// collect any migrations
|
|
208
|
-
|
|
222
|
+
const result = Result.ok(
|
|
223
|
+
this.sortedMigrations.filter(({ id }) => allMigrationsToInclude.has(id))
|
|
224
|
+
)
|
|
225
|
+
// Cache the result
|
|
226
|
+
this.migrationCache.set(persistedSchema, result)
|
|
227
|
+
return result
|
|
209
228
|
}
|
|
210
229
|
|
|
211
230
|
migratePersistedRecord(
|
|
@@ -263,7 +282,10 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
263
282
|
return { type: 'success', value: record }
|
|
264
283
|
}
|
|
265
284
|
|
|
266
|
-
migrateStoreSnapshot(
|
|
285
|
+
migrateStoreSnapshot(
|
|
286
|
+
snapshot: StoreSnapshot<R>,
|
|
287
|
+
opts?: { mutateInputStore?: boolean }
|
|
288
|
+
): MigrationResult<SerializedStore<R>> {
|
|
267
289
|
let { store } = snapshot
|
|
268
290
|
const migrations = this.getMigrationsSince(snapshot.schema)
|
|
269
291
|
if (!migrations.ok) {
|
|
@@ -276,7 +298,9 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
276
298
|
return { type: 'success', value: store }
|
|
277
299
|
}
|
|
278
300
|
|
|
279
|
-
|
|
301
|
+
if (!opts?.mutateInputStore) {
|
|
302
|
+
store = structuredClone(store)
|
|
303
|
+
}
|
|
280
304
|
|
|
281
305
|
try {
|
|
282
306
|
for (const migration of migrationsToApply) {
|
|
@@ -286,13 +310,13 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
286
310
|
if (!shouldApply) continue
|
|
287
311
|
const result = migration.up!(record as any)
|
|
288
312
|
if (result) {
|
|
289
|
-
store[id as keyof typeof store] =
|
|
313
|
+
store[id as keyof typeof store] = result as any
|
|
290
314
|
}
|
|
291
315
|
}
|
|
292
316
|
} else if (migration.scope === 'store') {
|
|
293
317
|
const result = migration.up!(store)
|
|
294
318
|
if (result) {
|
|
295
|
-
store =
|
|
319
|
+
store = result as any
|
|
296
320
|
}
|
|
297
321
|
} else {
|
|
298
322
|
exhaustiveSwitchError(migration)
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { react, transaction } from '@tldraw/state'
|
|
2
|
+
import { vi } from 'vitest'
|
|
2
3
|
import { AtomMap } from '../AtomMap'
|
|
3
4
|
|
|
4
5
|
describe('AtomMap', () => {
|
|
@@ -11,7 +12,7 @@ describe('AtomMap', () => {
|
|
|
11
12
|
})
|
|
12
13
|
|
|
13
14
|
function testReactor(name: string, fn: () => any) {
|
|
14
|
-
const cb =
|
|
15
|
+
const cb = vi.fn(fn)
|
|
15
16
|
const cleanup = react(name, cb)
|
|
16
17
|
cleanupFns.push(() => cleanup())
|
|
17
18
|
return cb
|
|
@@ -26,7 +26,7 @@ describe('dependsOn', () => {
|
|
|
26
26
|
}
|
|
27
27
|
)
|
|
28
28
|
}).toThrowErrorMatchingInlineSnapshot(
|
|
29
|
-
`
|
|
29
|
+
`[Error: Migration 'foo/1' depends on missing migration 'bar/1']`
|
|
30
30
|
)
|
|
31
31
|
})
|
|
32
32
|
|
|
@@ -108,7 +108,7 @@ describe('standalone dependsOn', () => {
|
|
|
108
108
|
}
|
|
109
109
|
)
|
|
110
110
|
}).toThrowErrorMatchingInlineSnapshot(
|
|
111
|
-
`
|
|
111
|
+
`[Error: Migration 'foo/1' depends on missing migration 'bar/1']`
|
|
112
112
|
)
|
|
113
113
|
})
|
|
114
114
|
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-deprecated */
|
|
2
|
+
import { assert } from '@tldraw/utils'
|
|
3
|
+
import {
|
|
4
|
+
BaseRecord,
|
|
5
|
+
Migration,
|
|
6
|
+
RecordId,
|
|
7
|
+
createMigrationIds,
|
|
8
|
+
createMigrationSequence,
|
|
9
|
+
createRecordType,
|
|
10
|
+
} from '../../'
|
|
11
|
+
import { StoreSchema } from '../StoreSchema'
|
|
12
|
+
|
|
13
|
+
interface TestRecord extends BaseRecord<'test', RecordId<TestRecord>> {
|
|
14
|
+
name: string
|
|
15
|
+
version: number
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
describe('StoreSchema migration caching', () => {
|
|
19
|
+
// Create migration IDs
|
|
20
|
+
const TestVersions = createMigrationIds('com.tldraw.test', {
|
|
21
|
+
AddVersion: 1,
|
|
22
|
+
UpdateVersion: 2,
|
|
23
|
+
})
|
|
24
|
+
|
|
25
|
+
// Create a simple schema with migrations
|
|
26
|
+
const createTestSchema = (version: number) => {
|
|
27
|
+
const TestRecordType = createRecordType<TestRecord>('test', {
|
|
28
|
+
scope: 'document',
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
const sequence: Migration[] = []
|
|
32
|
+
|
|
33
|
+
if (version > 1) {
|
|
34
|
+
sequence.push({
|
|
35
|
+
id: TestVersions.AddVersion,
|
|
36
|
+
scope: 'record',
|
|
37
|
+
up: (record: any) => {
|
|
38
|
+
// Mutate the record in place
|
|
39
|
+
record.version = 2
|
|
40
|
+
// Don't return anything
|
|
41
|
+
},
|
|
42
|
+
down: (record: any) => {
|
|
43
|
+
record.version = 1
|
|
44
|
+
// Don't return anything
|
|
45
|
+
},
|
|
46
|
+
})
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (version > 2) {
|
|
50
|
+
sequence.push({
|
|
51
|
+
id: TestVersions.UpdateVersion,
|
|
52
|
+
scope: 'record',
|
|
53
|
+
up: (record: any) => {
|
|
54
|
+
record.version = 3
|
|
55
|
+
// Don't return anything
|
|
56
|
+
},
|
|
57
|
+
down: (record: any) => {
|
|
58
|
+
record.version = 2
|
|
59
|
+
// Don't return anything
|
|
60
|
+
},
|
|
61
|
+
})
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const schema = StoreSchema.create(
|
|
65
|
+
{
|
|
66
|
+
test: TestRecordType,
|
|
67
|
+
},
|
|
68
|
+
{
|
|
69
|
+
migrations: [createMigrationSequence({ sequenceId: 'com.tldraw.test', sequence })],
|
|
70
|
+
}
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return schema
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
it('should cache migration results and return same array reference', () => {
|
|
77
|
+
const schema = createTestSchema(3)
|
|
78
|
+
const oldSchema = schema.serializeEarliestVersion()
|
|
79
|
+
|
|
80
|
+
// First call should create the migrations array
|
|
81
|
+
const migrations1 = schema.getMigrationsSince(oldSchema)
|
|
82
|
+
assert(migrations1.ok)
|
|
83
|
+
expect(migrations1.value).toHaveLength(2)
|
|
84
|
+
|
|
85
|
+
// Second call should return the same array reference (cached)
|
|
86
|
+
const migrations2 = schema.getMigrationsSince(oldSchema)
|
|
87
|
+
assert(migrations2.ok)
|
|
88
|
+
expect(migrations2.value).toBe(migrations1.value) // Same array reference
|
|
89
|
+
|
|
90
|
+
// Third call should also return the same array reference
|
|
91
|
+
const migrations3 = schema.getMigrationsSince(oldSchema)
|
|
92
|
+
assert(migrations3.ok)
|
|
93
|
+
expect(migrations3.value).toBe(migrations1.value)
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
it('should not cache when schema versions are different', () => {
|
|
97
|
+
const schema = createTestSchema(3)
|
|
98
|
+
const oldSchema = schema.serializeEarliestVersion()
|
|
99
|
+
|
|
100
|
+
// Call with original schema
|
|
101
|
+
const migrations1 = schema.getMigrationsSince(oldSchema)
|
|
102
|
+
expect(migrations1.ok).toBe(true)
|
|
103
|
+
if (!migrations1.ok) throw new Error('Expected migrations1 to be ok')
|
|
104
|
+
|
|
105
|
+
// Create a different schema version by using a schema with version 2
|
|
106
|
+
const schemaV2 = createTestSchema(2)
|
|
107
|
+
const schemaV2Serialized = schemaV2.serializeEarliestVersion()
|
|
108
|
+
const migrations2 = schema.getMigrationsSince(schemaV2Serialized)
|
|
109
|
+
expect(migrations2.ok).toBe(true)
|
|
110
|
+
if (!migrations2.ok) throw new Error('Expected migrations2 to be ok')
|
|
111
|
+
|
|
112
|
+
// Should be different arrays (no cache hit)
|
|
113
|
+
expect(migrations2.value).not.toBe(migrations1.value)
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
it('should handle mutateInputStore: true with migrators that return void', () => {
|
|
117
|
+
const schema = createTestSchema(3)
|
|
118
|
+
const oldSchema = schema.serializeEarliestVersion()
|
|
119
|
+
|
|
120
|
+
const store = {
|
|
121
|
+
test1: {
|
|
122
|
+
id: 'test1',
|
|
123
|
+
name: 'Test 1',
|
|
124
|
+
version: 1,
|
|
125
|
+
typeName: 'test',
|
|
126
|
+
},
|
|
127
|
+
test2: {
|
|
128
|
+
id: 'test2',
|
|
129
|
+
name: 'Test 2',
|
|
130
|
+
version: 1,
|
|
131
|
+
typeName: 'test',
|
|
132
|
+
},
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Test with mutateInputStore: true
|
|
136
|
+
const result1 = schema.migrateStoreSnapshot(
|
|
137
|
+
{ store, schema: oldSchema },
|
|
138
|
+
{ mutateInputStore: true }
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
assert(result1.type === 'success')
|
|
142
|
+
expect((result1.value as any).test1.version).toBe(3)
|
|
143
|
+
expect((result1.value as any).test2.version).toBe(3)
|
|
144
|
+
|
|
145
|
+
// The input store should be mutated in place
|
|
146
|
+
expect(result1.value).toBe(store)
|
|
147
|
+
})
|
|
148
|
+
|
|
149
|
+
it('should handle mutateInputStore: false with migrators that return void', () => {
|
|
150
|
+
const schema = createTestSchema(3)
|
|
151
|
+
const oldSchema = schema.serializeEarliestVersion()
|
|
152
|
+
|
|
153
|
+
const store = {
|
|
154
|
+
test1: {
|
|
155
|
+
id: 'test1',
|
|
156
|
+
name: 'Test 1',
|
|
157
|
+
version: 1,
|
|
158
|
+
typeName: 'test',
|
|
159
|
+
},
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Test with mutateInputStore: false (default)
|
|
163
|
+
const result = schema.migrateStoreSnapshot({ store, schema: oldSchema })
|
|
164
|
+
|
|
165
|
+
assert(result.type === 'success')
|
|
166
|
+
expect((result.value as any).test1.version).toBe(3)
|
|
167
|
+
|
|
168
|
+
// The input store should NOT be mutated
|
|
169
|
+
expect(store.test1.version).toBe(1)
|
|
170
|
+
})
|
|
171
|
+
|
|
172
|
+
it('should handle empty migration list caching', () => {
|
|
173
|
+
const schema = createTestSchema(1) // No migrations
|
|
174
|
+
const oldSchema = schema.serializeEarliestVersion()
|
|
175
|
+
|
|
176
|
+
// First call
|
|
177
|
+
const migrations1 = schema.getMigrationsSince(oldSchema)
|
|
178
|
+
assert(migrations1.ok)
|
|
179
|
+
|
|
180
|
+
expect(migrations1.value).toHaveLength(0)
|
|
181
|
+
|
|
182
|
+
// Second call should return same array reference
|
|
183
|
+
const migrations2 = schema.getMigrationsSince(oldSchema)
|
|
184
|
+
assert(migrations2.ok)
|
|
185
|
+
expect(migrations2.value).toBe(migrations1.value)
|
|
186
|
+
expect(migrations2.value).toHaveLength(0)
|
|
187
|
+
})
|
|
188
|
+
|
|
189
|
+
it('should handle incompatible schema caching', () => {
|
|
190
|
+
const schema = createTestSchema(3)
|
|
191
|
+
const incompatibleSchema = {
|
|
192
|
+
schemaVersion: 1 as const,
|
|
193
|
+
storeVersion: 1,
|
|
194
|
+
recordVersions: {
|
|
195
|
+
test: {
|
|
196
|
+
version: 999, // Much higher version than what we support
|
|
197
|
+
},
|
|
198
|
+
},
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// First call should fail
|
|
202
|
+
const migrations1 = schema.getMigrationsSince(incompatibleSchema)
|
|
203
|
+
expect(migrations1.ok).toBe(false)
|
|
204
|
+
|
|
205
|
+
// Second call should also fail (but might be cached)
|
|
206
|
+
const migrations2 = schema.getMigrationsSince(incompatibleSchema)
|
|
207
|
+
expect(migrations2.ok).toBe(false)
|
|
208
|
+
})
|
|
209
|
+
})
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Computed, react, RESET_VALUE, transact } from '@tldraw/state'
|
|
2
|
+
import { vi } from 'vitest'
|
|
2
3
|
import { BaseRecord, RecordId } from '../BaseRecord'
|
|
3
4
|
import { createMigrationSequence } from '../migrate'
|
|
4
5
|
import { RecordsDiff, reverseRecordsDiff } from '../RecordsDiff'
|
|
@@ -206,7 +207,7 @@ describe('Store', () => {
|
|
|
206
207
|
|
|
207
208
|
it('allows adding onAfterChange callbacks that see the final state of the world', () => {
|
|
208
209
|
/* ADDING */
|
|
209
|
-
const onAfterCreate =
|
|
210
|
+
const onAfterCreate = vi.fn((current) => {
|
|
210
211
|
expect(current).toEqual(
|
|
211
212
|
Author.create({ name: 'J.R.R Tolkein', id: Author.createId('tolkein') })
|
|
212
213
|
)
|
|
@@ -218,7 +219,7 @@ describe('Store', () => {
|
|
|
218
219
|
expect(onAfterCreate).toHaveBeenCalledTimes(1)
|
|
219
220
|
|
|
220
221
|
/* UPDATING */
|
|
221
|
-
const onAfterChange =
|
|
222
|
+
const onAfterChange = vi.fn((prev, current) => {
|
|
222
223
|
expect(prev.name).toBe('J.R.R Tolkein')
|
|
223
224
|
expect(current.name).toBe('Butch Cassidy')
|
|
224
225
|
|
|
@@ -231,7 +232,7 @@ describe('Store', () => {
|
|
|
231
232
|
expect(onAfterChange).toHaveBeenCalledTimes(1)
|
|
232
233
|
|
|
233
234
|
/* REMOVING */
|
|
234
|
-
const onAfterDelete =
|
|
235
|
+
const onAfterDelete = vi.fn((prev) => {
|
|
235
236
|
if (prev.typeName === 'author') {
|
|
236
237
|
expect(prev.name).toBe('Butch Cassidy')
|
|
237
238
|
}
|
|
@@ -309,7 +310,7 @@ describe('Store', () => {
|
|
|
309
310
|
})
|
|
310
311
|
|
|
311
312
|
it('supports listening for changes to the whole store', async () => {
|
|
312
|
-
const listener =
|
|
313
|
+
const listener = vi.fn()
|
|
313
314
|
store.listen(listener)
|
|
314
315
|
|
|
315
316
|
transact(() => {
|
|
@@ -336,7 +337,7 @@ describe('Store', () => {
|
|
|
336
337
|
|
|
337
338
|
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
338
339
|
expect(listener).toHaveBeenCalledTimes(1)
|
|
339
|
-
expect(listener.mock.lastCall[0]).toMatchInlineSnapshot(`
|
|
340
|
+
expect(listener.mock.lastCall?.[0]).toMatchInlineSnapshot(`
|
|
340
341
|
{
|
|
341
342
|
"changes": {
|
|
342
343
|
"added": {
|
|
@@ -391,7 +392,7 @@ describe('Store', () => {
|
|
|
391
392
|
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
392
393
|
expect(listener).toHaveBeenCalledTimes(2)
|
|
393
394
|
|
|
394
|
-
expect(listener.mock.lastCall[0]).toMatchInlineSnapshot(`
|
|
395
|
+
expect(listener.mock.lastCall?.[0]).toMatchInlineSnapshot(`
|
|
395
396
|
{
|
|
396
397
|
"changes": {
|
|
397
398
|
"added": {},
|
|
@@ -444,7 +445,7 @@ describe('Store', () => {
|
|
|
444
445
|
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
445
446
|
expect(listener).toHaveBeenCalledTimes(3)
|
|
446
447
|
|
|
447
|
-
expect(listener.mock.lastCall[0]).toMatchInlineSnapshot(`
|
|
448
|
+
expect(listener.mock.lastCall?.[0]).toMatchInlineSnapshot(`
|
|
448
449
|
{
|
|
449
450
|
"changes": {
|
|
450
451
|
"added": {},
|
|
@@ -480,7 +481,7 @@ describe('Store', () => {
|
|
|
480
481
|
})
|
|
481
482
|
|
|
482
483
|
it('supports filtering history by scope', () => {
|
|
483
|
-
const listener =
|
|
484
|
+
const listener = vi.fn()
|
|
484
485
|
store.listen(listener, {
|
|
485
486
|
scope: 'session',
|
|
486
487
|
})
|
|
@@ -521,7 +522,7 @@ describe('Store', () => {
|
|
|
521
522
|
})
|
|
522
523
|
|
|
523
524
|
it('supports filtering history by scope (2)', () => {
|
|
524
|
-
const listener =
|
|
525
|
+
const listener = vi.fn()
|
|
525
526
|
store.listen(listener, {
|
|
526
527
|
scope: 'document',
|
|
527
528
|
})
|
|
@@ -550,7 +551,7 @@ describe('Store', () => {
|
|
|
550
551
|
})
|
|
551
552
|
|
|
552
553
|
it('supports filtering history by source', () => {
|
|
553
|
-
const listener =
|
|
554
|
+
const listener = vi.fn()
|
|
554
555
|
store.listen(listener, {
|
|
555
556
|
source: 'remote',
|
|
556
557
|
})
|
|
@@ -600,7 +601,7 @@ describe('Store', () => {
|
|
|
600
601
|
})
|
|
601
602
|
|
|
602
603
|
it('supports filtering history by source (user)', () => {
|
|
603
|
-
const listener =
|
|
604
|
+
const listener = vi.fn()
|
|
604
605
|
store.listen(listener, {
|
|
605
606
|
source: 'user',
|
|
606
607
|
})
|
|
@@ -658,7 +659,7 @@ describe('Store', () => {
|
|
|
658
659
|
// @ts-expect-error
|
|
659
660
|
globalThis.__FORCE_RAF_IN_TESTS__ = true
|
|
660
661
|
store.put([Author.create({ name: 'J.R.R Tolkein', id: Author.createId('tolkein') })])
|
|
661
|
-
const firstListener =
|
|
662
|
+
const firstListener = vi.fn()
|
|
662
663
|
store.listen(firstListener)
|
|
663
664
|
expect(firstListener).toHaveBeenCalledTimes(0)
|
|
664
665
|
|
|
@@ -666,7 +667,7 @@ describe('Store', () => {
|
|
|
666
667
|
|
|
667
668
|
expect(firstListener).toHaveBeenCalledTimes(0)
|
|
668
669
|
|
|
669
|
-
const secondListener =
|
|
670
|
+
const secondListener = vi.fn()
|
|
670
671
|
|
|
671
672
|
store.listen(secondListener)
|
|
672
673
|
|
|
@@ -707,7 +708,7 @@ describe('Store', () => {
|
|
|
707
708
|
const id = Author.createId('tolkein')
|
|
708
709
|
store.put([Author.create({ name: 'J.R.R Tolkein', id })])
|
|
709
710
|
|
|
710
|
-
const listener =
|
|
711
|
+
const listener = vi.fn()
|
|
711
712
|
store.listen(listener)
|
|
712
713
|
|
|
713
714
|
// Return the exact same value that came in
|
|
@@ -717,7 +718,7 @@ describe('Store', () => {
|
|
|
717
718
|
})
|
|
718
719
|
|
|
719
720
|
it('tells listeners the source of the changes so they can decide if they want to run or not', async () => {
|
|
720
|
-
const listener =
|
|
721
|
+
const listener = vi.fn()
|
|
721
722
|
store.listen(listener)
|
|
722
723
|
|
|
723
724
|
store.put([Author.create({ name: 'Jimmy Beans', id: Author.createId('jimmy') })])
|
|
@@ -824,7 +825,7 @@ describe('snapshots', () => {
|
|
|
824
825
|
expect(() => {
|
|
825
826
|
// @ts-expect-error
|
|
826
827
|
store2.loadStoreSnapshot(snapshot1)
|
|
827
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
828
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Missing definition for record type author]`)
|
|
828
829
|
})
|
|
829
830
|
|
|
830
831
|
it('throws errors when loading a snapshot with a different schema', () => {
|
|
@@ -839,12 +840,12 @@ describe('snapshots', () => {
|
|
|
839
840
|
|
|
840
841
|
expect(() => {
|
|
841
842
|
store2.loadStoreSnapshot(snapshot1 as any)
|
|
842
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
843
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Missing definition for record type author]`)
|
|
843
844
|
})
|
|
844
845
|
|
|
845
846
|
it('migrates the snapshot', () => {
|
|
846
847
|
const snapshot1 = store.getStoreSnapshot()
|
|
847
|
-
const up =
|
|
848
|
+
const up = vi.fn((s: any) => {
|
|
848
849
|
s['book:lotr'].numPages = 42
|
|
849
850
|
})
|
|
850
851
|
|
|
@@ -969,7 +970,7 @@ describe('diffs', () => {
|
|
|
969
970
|
})
|
|
970
971
|
it('produces diffs from `addHistoryInterceptor`', () => {
|
|
971
972
|
const diffs: any[] = []
|
|
972
|
-
const interceptor =
|
|
973
|
+
const interceptor = vi.fn((diff) => diffs.push(diff))
|
|
973
974
|
store.addHistoryInterceptor(interceptor)
|
|
974
975
|
|
|
975
976
|
store.put([
|
|
@@ -1095,15 +1096,15 @@ describe('callbacks', () => {
|
|
|
1095
1096
|
numPages: 1,
|
|
1096
1097
|
})
|
|
1097
1098
|
|
|
1098
|
-
let onAfterCreate:
|
|
1099
|
-
let onAfterChange:
|
|
1100
|
-
let onAfterDelete:
|
|
1099
|
+
let onAfterCreate: ReturnType<typeof vi.fn>
|
|
1100
|
+
let onAfterChange: ReturnType<typeof vi.fn>
|
|
1101
|
+
let onAfterDelete: ReturnType<typeof vi.fn>
|
|
1101
1102
|
|
|
1102
|
-
let onBeforeCreate:
|
|
1103
|
-
let onBeforeChange:
|
|
1104
|
-
let onBeforeDelete:
|
|
1103
|
+
let onBeforeCreate: ReturnType<typeof vi.fn>
|
|
1104
|
+
let onBeforeChange: ReturnType<typeof vi.fn>
|
|
1105
|
+
let onBeforeDelete: ReturnType<typeof vi.fn>
|
|
1105
1106
|
|
|
1106
|
-
let onOperationComplete:
|
|
1107
|
+
let onOperationComplete: ReturnType<typeof vi.fn>
|
|
1107
1108
|
|
|
1108
1109
|
beforeEach(() => {
|
|
1109
1110
|
store = new Store({
|
|
@@ -1113,15 +1114,15 @@ describe('callbacks', () => {
|
|
|
1113
1114
|
}),
|
|
1114
1115
|
})
|
|
1115
1116
|
|
|
1116
|
-
onAfterCreate =
|
|
1117
|
-
onAfterChange =
|
|
1118
|
-
onAfterDelete =
|
|
1117
|
+
onAfterCreate = vi.fn((record) => callbacks.push({ type: 'create', record }))
|
|
1118
|
+
onAfterChange = vi.fn((from, to) => callbacks.push({ type: 'change', from, to }))
|
|
1119
|
+
onAfterDelete = vi.fn((record) => callbacks.push({ type: 'delete', record }))
|
|
1119
1120
|
|
|
1120
|
-
onBeforeCreate =
|
|
1121
|
-
onBeforeChange =
|
|
1122
|
-
onBeforeDelete =
|
|
1121
|
+
onBeforeCreate = vi.fn((record) => record)
|
|
1122
|
+
onBeforeChange = vi.fn((_from, to) => to)
|
|
1123
|
+
onBeforeDelete = vi.fn((_record) => {})
|
|
1123
1124
|
|
|
1124
|
-
onOperationComplete =
|
|
1125
|
+
onOperationComplete = vi.fn(() => callbacks.push({ type: 'complete' }))
|
|
1125
1126
|
callbacks = []
|
|
1126
1127
|
|
|
1127
1128
|
store.sideEffects.registerAfterCreateHandler('book', onAfterCreate)
|
|
@@ -1161,12 +1162,12 @@ describe('callbacks', () => {
|
|
|
1161
1162
|
|
|
1162
1163
|
it('bails out if too many callbacks are fired', () => {
|
|
1163
1164
|
let limit = 10
|
|
1164
|
-
onAfterCreate.mockImplementation((record) => {
|
|
1165
|
+
onAfterCreate.mockImplementation((record: any) => {
|
|
1165
1166
|
if (record.numPages < limit) {
|
|
1166
1167
|
store.put([{ ...record, numPages: record.numPages + 1 }])
|
|
1167
1168
|
}
|
|
1168
1169
|
})
|
|
1169
|
-
onAfterChange.mockImplementation((from, to) => {
|
|
1170
|
+
onAfterChange.mockImplementation((from: any, to: any) => {
|
|
1170
1171
|
if (to.numPages < limit) {
|
|
1171
1172
|
store.put([{ ...to, numPages: to.numPages + 1 }])
|
|
1172
1173
|
}
|
|
@@ -1181,7 +1182,9 @@ describe('callbacks', () => {
|
|
|
1181
1182
|
store.clear()
|
|
1182
1183
|
expect(() => {
|
|
1183
1184
|
store.put([book2])
|
|
1184
|
-
}).toThrowErrorMatchingInlineSnapshot(
|
|
1185
|
+
}).toThrowErrorMatchingInlineSnapshot(
|
|
1186
|
+
`[Error: Maximum store update depth exceeded, bailing out]`
|
|
1187
|
+
)
|
|
1185
1188
|
})
|
|
1186
1189
|
|
|
1187
1190
|
it('keeps firing operation complete callbacks until all are cleared', () => {
|
|
@@ -1197,7 +1200,7 @@ describe('callbacks', () => {
|
|
|
1197
1200
|
|
|
1198
1201
|
store.put([book1])
|
|
1199
1202
|
|
|
1200
|
-
onAfterChange.mockImplementation((prev, next) => {
|
|
1203
|
+
onAfterChange.mockImplementation((prev: any, next: any) => {
|
|
1201
1204
|
if ([0, 1, 2, 5, 6].includes(step)) {
|
|
1202
1205
|
step++
|
|
1203
1206
|
store.put([{ ...next, numPages: next.numPages + 1 }])
|