@tldraw/store 3.16.0-canary.dfdf6b7de8c2 → 3.16.0-canary.e1d5c8aeb399
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -11
- package/dist-cjs/index.d.ts +6 -41
- package/dist-cjs/index.js +1 -2
- package/dist-cjs/index.js.map +2 -2
- package/dist-cjs/lib/RecordType.js +0 -16
- package/dist-cjs/lib/RecordType.js.map +2 -2
- package/dist-cjs/lib/RecordsDiff.js +3 -3
- package/dist-cjs/lib/RecordsDiff.js.map +2 -2
- package/dist-cjs/lib/Store.js +1 -20
- package/dist-cjs/lib/Store.js.map +2 -2
- package/dist-cjs/lib/StoreSchema.js +1 -0
- package/dist-cjs/lib/StoreSchema.js.map +2 -2
- package/dist-cjs/lib/migrate.js +57 -43
- package/dist-cjs/lib/migrate.js.map +2 -2
- package/dist-esm/index.d.mts +6 -41
- package/dist-esm/index.mjs +1 -3
- package/dist-esm/index.mjs.map +2 -2
- package/dist-esm/lib/RecordType.mjs +0 -16
- package/dist-esm/lib/RecordType.mjs.map +2 -2
- package/dist-esm/lib/RecordsDiff.mjs +3 -3
- package/dist-esm/lib/RecordsDiff.mjs.map +2 -2
- package/dist-esm/lib/Store.mjs +1 -20
- package/dist-esm/lib/Store.mjs.map +2 -2
- package/dist-esm/lib/StoreSchema.mjs +1 -0
- package/dist-esm/lib/StoreSchema.mjs.map +2 -2
- package/dist-esm/lib/migrate.mjs +57 -43
- package/dist-esm/lib/migrate.mjs.map +2 -2
- package/package.json +10 -18
- package/src/index.ts +0 -1
- package/src/lib/RecordType.ts +0 -17
- package/src/lib/RecordsDiff.ts +9 -3
- package/src/lib/Store.ts +1 -22
- package/src/lib/StoreSchema.ts +1 -0
- package/src/lib/migrate.ts +106 -57
- package/src/lib/test/AtomMap.test.ts +2 -1
- package/src/lib/test/dependsOn.test.ts +2 -2
- package/src/lib/test/recordStore.test.ts +40 -37
- package/src/lib/test/sortMigrations.test.ts +36 -4
- package/src/lib/test/validateMigrations.test.ts +8 -8
- package/src/lib/test/defineMigrations.test.ts +0 -232
package/src/lib/RecordType.ts
CHANGED
|
@@ -116,23 +116,6 @@ export class RecordType<
|
|
|
116
116
|
return (this.typeName + ':' + (customUniquePart ?? uniqueId())) as IdOf<R>
|
|
117
117
|
}
|
|
118
118
|
|
|
119
|
-
/**
|
|
120
|
-
* Create a new ID for this record type based on the given ID.
|
|
121
|
-
*
|
|
122
|
-
* @example
|
|
123
|
-
*
|
|
124
|
-
* ```ts
|
|
125
|
-
* const id = recordType.createCustomId('myId')
|
|
126
|
-
* ```
|
|
127
|
-
*
|
|
128
|
-
* @deprecated - Use `createId` instead.
|
|
129
|
-
* @param id - The ID to base the new ID on.
|
|
130
|
-
* @returns The new ID.
|
|
131
|
-
*/
|
|
132
|
-
createCustomId(id: string): IdOf<R> {
|
|
133
|
-
return (this.typeName + ':' + id) as IdOf<R>
|
|
134
|
-
}
|
|
135
|
-
|
|
136
119
|
/**
|
|
137
120
|
* Takes an id like `user:123` and returns the part after the colon `123`
|
|
138
121
|
*
|
package/src/lib/RecordsDiff.ts
CHANGED
|
@@ -42,15 +42,21 @@ export function isRecordsDiffEmpty<T extends UnknownRecord>(diff: RecordsDiff<T>
|
|
|
42
42
|
* Squash a collection of diffs into a single diff.
|
|
43
43
|
*
|
|
44
44
|
* @param diffs - An array of diffs to squash.
|
|
45
|
+
* @param options - An optional object with a `mutateFirstDiff` property. If `mutateFirstDiff` is true, the first diff in the array will be mutated in-place.
|
|
45
46
|
* @returns A single diff that represents the squashed diffs.
|
|
46
47
|
* @public
|
|
47
48
|
*/
|
|
48
49
|
export function squashRecordDiffs<T extends UnknownRecord>(
|
|
49
|
-
diffs: RecordsDiff<T>[]
|
|
50
|
+
diffs: RecordsDiff<T>[],
|
|
51
|
+
options?: {
|
|
52
|
+
mutateFirstDiff?: boolean
|
|
53
|
+
}
|
|
50
54
|
): RecordsDiff<T> {
|
|
51
|
-
const result =
|
|
55
|
+
const result = options?.mutateFirstDiff
|
|
56
|
+
? diffs[0]
|
|
57
|
+
: ({ added: {}, removed: {}, updated: {} } as RecordsDiff<T>)
|
|
52
58
|
|
|
53
|
-
squashRecordDiffsMutable(result, diffs)
|
|
59
|
+
squashRecordDiffsMutable(result, options?.mutateFirstDiff ? diffs.slice(1) : diffs)
|
|
54
60
|
return result
|
|
55
61
|
}
|
|
56
62
|
|
package/src/lib/Store.ts
CHANGED
|
@@ -501,21 +501,11 @@ export class Store<R extends UnknownRecord = UnknownRecord, Props = unknown> {
|
|
|
501
501
|
}
|
|
502
502
|
}
|
|
503
503
|
|
|
504
|
-
/**
|
|
505
|
-
* @deprecated use `getSnapshot` from the 'tldraw' package instead.
|
|
506
|
-
*/
|
|
507
|
-
getSnapshot(scope: RecordScope | 'all' = 'document') {
|
|
508
|
-
console.warn(
|
|
509
|
-
'[tldraw] `Store.getSnapshot` is deprecated and will be removed in a future release. Use `getSnapshot` from the `tldraw` package instead.'
|
|
510
|
-
)
|
|
511
|
-
return this.getStoreSnapshot(scope)
|
|
512
|
-
}
|
|
513
|
-
|
|
514
504
|
/**
|
|
515
505
|
* Migrate a serialized snapshot of the store and its schema.
|
|
516
506
|
*
|
|
517
507
|
* ```ts
|
|
518
|
-
* const snapshot = store.
|
|
508
|
+
* const snapshot = store.getStoreSnapshot()
|
|
519
509
|
* store.migrateSnapshot(snapshot)
|
|
520
510
|
* ```
|
|
521
511
|
*
|
|
@@ -566,17 +556,6 @@ export class Store<R extends UnknownRecord = UnknownRecord, Props = unknown> {
|
|
|
566
556
|
}
|
|
567
557
|
}
|
|
568
558
|
|
|
569
|
-
/**
|
|
570
|
-
* @public
|
|
571
|
-
* @deprecated use `loadSnapshot` from the 'tldraw' package instead.
|
|
572
|
-
*/
|
|
573
|
-
loadSnapshot(snapshot: StoreSnapshot<R>) {
|
|
574
|
-
console.warn(
|
|
575
|
-
"[tldraw] `Store.loadSnapshot` is deprecated and will be removed in a future release. Use `loadSnapshot` from the 'tldraw' package instead."
|
|
576
|
-
)
|
|
577
|
-
this.loadStoreSnapshot(snapshot)
|
|
578
|
-
}
|
|
579
|
-
|
|
580
559
|
/**
|
|
581
560
|
* Get an array of all values in the store.
|
|
582
561
|
*
|
package/src/lib/StoreSchema.ts
CHANGED
|
@@ -349,6 +349,7 @@ export class StoreSchema<R extends UnknownRecord, P = unknown> {
|
|
|
349
349
|
|
|
350
350
|
/**
|
|
351
351
|
* @deprecated This is only here for legacy reasons, don't use it unless you have david's blessing!
|
|
352
|
+
* @internal
|
|
352
353
|
*/
|
|
353
354
|
serializeEarliestVersion(): SerializedSchema {
|
|
354
355
|
return {
|
package/src/lib/migrate.ts
CHANGED
|
@@ -2,45 +2,6 @@ import { assert, objectMapEntries } from '@tldraw/utils'
|
|
|
2
2
|
import { UnknownRecord } from './BaseRecord'
|
|
3
3
|
import { SerializedStore } from './Store'
|
|
4
4
|
|
|
5
|
-
let didWarn = false
|
|
6
|
-
|
|
7
|
-
/**
|
|
8
|
-
* @public
|
|
9
|
-
* @deprecated use `createShapePropsMigrationSequence` instead. See [the docs](https://tldraw.dev/docs/persistence#Updating-legacy-shape-migrations-defineMigrations) for how to migrate.
|
|
10
|
-
*/
|
|
11
|
-
export function defineMigrations(opts: {
|
|
12
|
-
firstVersion?: number
|
|
13
|
-
currentVersion?: number
|
|
14
|
-
migrators?: Record<number, LegacyMigration>
|
|
15
|
-
subTypeKey?: string
|
|
16
|
-
subTypeMigrations?: Record<string, LegacyBaseMigrationsInfo>
|
|
17
|
-
}): LegacyMigrations {
|
|
18
|
-
const { currentVersion, firstVersion, migrators = {}, subTypeKey, subTypeMigrations } = opts
|
|
19
|
-
if (!didWarn) {
|
|
20
|
-
console.warn(
|
|
21
|
-
`The 'defineMigrations' function is deprecated and will be removed in a future release. Use the new migrations API instead. See the migration guide for more info: https://tldraw.dev/docs/persistence#Updating-legacy-shape-migrations-defineMigrations`
|
|
22
|
-
)
|
|
23
|
-
didWarn = true
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
// Some basic guards against impossible version combinations, some of which will be caught by TypeScript
|
|
27
|
-
if (typeof currentVersion === 'number' && typeof firstVersion === 'number') {
|
|
28
|
-
if ((currentVersion as number) === (firstVersion as number)) {
|
|
29
|
-
throw Error(`Current version is equal to initial version.`)
|
|
30
|
-
} else if (currentVersion < firstVersion) {
|
|
31
|
-
throw Error(`Current version is lower than initial version.`)
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
return {
|
|
36
|
-
firstVersion: (firstVersion as number) ?? 0, // defaults
|
|
37
|
-
currentVersion: (currentVersion as number) ?? 0, // defaults
|
|
38
|
-
migrators,
|
|
39
|
-
subTypeKey,
|
|
40
|
-
subTypeMigrations,
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
|
|
44
5
|
function squashDependsOn(sequence: Array<Migration | StandaloneDependsOn>): Migration[] {
|
|
45
6
|
const result: Migration[] = []
|
|
46
7
|
for (let i = sequence.length - 1; i >= 0; i--) {
|
|
@@ -199,38 +160,126 @@ export interface MigrationSequence {
|
|
|
199
160
|
sequence: Migration[]
|
|
200
161
|
}
|
|
201
162
|
|
|
163
|
+
/**
|
|
164
|
+
* Sorts migrations using a distance-minimizing topological sort.
|
|
165
|
+
*
|
|
166
|
+
* This function respects two types of dependencies:
|
|
167
|
+
* 1. Implicit sequence dependencies (foo/1 must come before foo/2)
|
|
168
|
+
* 2. Explicit dependencies via `dependsOn` property
|
|
169
|
+
*
|
|
170
|
+
* The algorithm minimizes the total distance between migrations and their explicit
|
|
171
|
+
* dependencies in the final ordering, while maintaining topological correctness.
|
|
172
|
+
* This means when migration A depends on migration B, A will be scheduled as close
|
|
173
|
+
* as possible to B (while respecting all constraints).
|
|
174
|
+
*
|
|
175
|
+
* Implementation uses Kahn's algorithm with priority scoring:
|
|
176
|
+
* - Builds dependency graph and calculates in-degrees
|
|
177
|
+
* - Uses priority queue that prioritizes migrations which unblock explicit dependencies
|
|
178
|
+
* - Processes migrations in urgency order while maintaining topological constraints
|
|
179
|
+
* - Detects cycles by ensuring all migrations are processed
|
|
180
|
+
*
|
|
181
|
+
* @param migrations - Array of migrations to sort
|
|
182
|
+
* @returns Sorted array of migrations in execution order
|
|
183
|
+
*/
|
|
202
184
|
export function sortMigrations(migrations: Migration[]): Migration[] {
|
|
203
|
-
|
|
204
|
-
const byId = new Map(migrations.map((m) => [m.id, m]))
|
|
205
|
-
const isProcessing = new Set<MigrationId>()
|
|
185
|
+
if (migrations.length === 0) return []
|
|
206
186
|
|
|
207
|
-
|
|
187
|
+
// Build dependency graph and calculate in-degrees
|
|
188
|
+
const byId = new Map(migrations.map((m) => [m.id, m]))
|
|
189
|
+
const dependents = new Map<MigrationId, Set<MigrationId>>() // who depends on this
|
|
190
|
+
const inDegree = new Map<MigrationId, number>()
|
|
191
|
+
const explicitDeps = new Map<MigrationId, Set<MigrationId>>() // explicit dependsOn relationships
|
|
208
192
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
193
|
+
// Initialize
|
|
194
|
+
for (const m of migrations) {
|
|
195
|
+
inDegree.set(m.id, 0)
|
|
196
|
+
dependents.set(m.id, new Set())
|
|
197
|
+
explicitDeps.set(m.id, new Set())
|
|
198
|
+
}
|
|
212
199
|
|
|
200
|
+
// Add implicit sequence dependencies and explicit dependencies
|
|
201
|
+
for (const m of migrations) {
|
|
213
202
|
const { version, sequenceId } = parseMigrationId(m.id)
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
203
|
+
|
|
204
|
+
// Implicit dependency on previous in sequence
|
|
205
|
+
const prevId = `${sequenceId}/${version - 1}` as MigrationId
|
|
206
|
+
if (byId.has(prevId)) {
|
|
207
|
+
dependents.get(prevId)!.add(m.id)
|
|
208
|
+
inDegree.set(m.id, inDegree.get(m.id)! + 1)
|
|
217
209
|
}
|
|
218
210
|
|
|
211
|
+
// Explicit dependencies
|
|
219
212
|
if (m.dependsOn) {
|
|
220
|
-
for (const
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
213
|
+
for (const depId of m.dependsOn) {
|
|
214
|
+
if (byId.has(depId)) {
|
|
215
|
+
dependents.get(depId)!.add(m.id)
|
|
216
|
+
explicitDeps.get(m.id)!.add(depId)
|
|
217
|
+
inDegree.set(m.id, inDegree.get(m.id)! + 1)
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// Priority queue: migrations ready to process (in-degree 0)
|
|
224
|
+
const ready = migrations.filter((m) => inDegree.get(m.id) === 0)
|
|
225
|
+
const result: Migration[] = []
|
|
226
|
+
const processed = new Set<MigrationId>()
|
|
227
|
+
|
|
228
|
+
while (ready.length > 0) {
|
|
229
|
+
// Calculate urgency scores for ready migrations and pick the best one
|
|
230
|
+
let bestCandidate: Migration | undefined
|
|
231
|
+
let bestCandidateScore = -Infinity
|
|
232
|
+
|
|
233
|
+
for (const m of ready) {
|
|
234
|
+
let urgencyScore = 0
|
|
235
|
+
|
|
236
|
+
for (const depId of dependents.get(m.id) || []) {
|
|
237
|
+
if (!processed.has(depId)) {
|
|
238
|
+
// Priority 1: Count all unprocessed dependents (to break ties)
|
|
239
|
+
urgencyScore += 1
|
|
240
|
+
|
|
241
|
+
// Priority 2: If this migration is explicitly depended on by others, boost priority
|
|
242
|
+
if (explicitDeps.get(depId)!.has(m.id)) {
|
|
243
|
+
urgencyScore += 100
|
|
244
|
+
}
|
|
224
245
|
}
|
|
225
246
|
}
|
|
247
|
+
|
|
248
|
+
if (
|
|
249
|
+
urgencyScore > bestCandidateScore ||
|
|
250
|
+
// Tiebreaker: prefer lower sequence/version
|
|
251
|
+
(urgencyScore === bestCandidateScore && m.id.localeCompare(bestCandidate?.id ?? '') < 0)
|
|
252
|
+
) {
|
|
253
|
+
bestCandidate = m
|
|
254
|
+
bestCandidateScore = urgencyScore
|
|
255
|
+
}
|
|
226
256
|
}
|
|
227
257
|
|
|
228
|
-
|
|
229
|
-
|
|
258
|
+
const nextMigration = bestCandidate!
|
|
259
|
+
ready.splice(ready.indexOf(nextMigration), 1)
|
|
260
|
+
|
|
261
|
+
// Cycle detection - if we have processed everything and still have items left, there's a cycle
|
|
262
|
+
// This is handled by Kahn's algorithm naturally - if we finish with items unprocessed, there's a cycle
|
|
263
|
+
|
|
264
|
+
// Process this migration
|
|
265
|
+
result.push(nextMigration)
|
|
266
|
+
processed.add(nextMigration.id)
|
|
267
|
+
|
|
268
|
+
// Update in-degrees and add newly ready migrations
|
|
269
|
+
for (const depId of dependents.get(nextMigration.id) || []) {
|
|
270
|
+
if (!processed.has(depId)) {
|
|
271
|
+
inDegree.set(depId, inDegree.get(depId)! - 1)
|
|
272
|
+
if (inDegree.get(depId) === 0) {
|
|
273
|
+
ready.push(byId.get(depId)!)
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
}
|
|
230
277
|
}
|
|
231
278
|
|
|
232
|
-
for
|
|
233
|
-
|
|
279
|
+
// Check for cycles - if we didn't process all migrations, there's a cycle
|
|
280
|
+
if (result.length !== migrations.length) {
|
|
281
|
+
const unprocessed = migrations.filter((m) => !processed.has(m.id))
|
|
282
|
+
assert(false, `Circular dependency in migrations: ${unprocessed[0].id}`)
|
|
234
283
|
}
|
|
235
284
|
|
|
236
285
|
return result
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { react, transaction } from '@tldraw/state'
|
|
2
|
+
import { vi } from 'vitest'
|
|
2
3
|
import { AtomMap } from '../AtomMap'
|
|
3
4
|
|
|
4
5
|
describe('AtomMap', () => {
|
|
@@ -11,7 +12,7 @@ describe('AtomMap', () => {
|
|
|
11
12
|
})
|
|
12
13
|
|
|
13
14
|
function testReactor(name: string, fn: () => any) {
|
|
14
|
-
const cb =
|
|
15
|
+
const cb = vi.fn(fn)
|
|
15
16
|
const cleanup = react(name, cb)
|
|
16
17
|
cleanupFns.push(() => cleanup())
|
|
17
18
|
return cb
|
|
@@ -26,7 +26,7 @@ describe('dependsOn', () => {
|
|
|
26
26
|
}
|
|
27
27
|
)
|
|
28
28
|
}).toThrowErrorMatchingInlineSnapshot(
|
|
29
|
-
`
|
|
29
|
+
`[Error: Migration 'foo/1' depends on missing migration 'bar/1']`
|
|
30
30
|
)
|
|
31
31
|
})
|
|
32
32
|
|
|
@@ -108,7 +108,7 @@ describe('standalone dependsOn', () => {
|
|
|
108
108
|
}
|
|
109
109
|
)
|
|
110
110
|
}).toThrowErrorMatchingInlineSnapshot(
|
|
111
|
-
`
|
|
111
|
+
`[Error: Migration 'foo/1' depends on missing migration 'bar/1']`
|
|
112
112
|
)
|
|
113
113
|
})
|
|
114
114
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Computed, react, RESET_VALUE, transact } from '@tldraw/state'
|
|
2
|
+
import { vi } from 'vitest'
|
|
2
3
|
import { BaseRecord, RecordId } from '../BaseRecord'
|
|
3
4
|
import { createMigrationSequence } from '../migrate'
|
|
4
5
|
import { RecordsDiff, reverseRecordsDiff } from '../RecordsDiff'
|
|
@@ -206,7 +207,7 @@ describe('Store', () => {
|
|
|
206
207
|
|
|
207
208
|
it('allows adding onAfterChange callbacks that see the final state of the world', () => {
|
|
208
209
|
/* ADDING */
|
|
209
|
-
const onAfterCreate =
|
|
210
|
+
const onAfterCreate = vi.fn((current) => {
|
|
210
211
|
expect(current).toEqual(
|
|
211
212
|
Author.create({ name: 'J.R.R Tolkein', id: Author.createId('tolkein') })
|
|
212
213
|
)
|
|
@@ -218,7 +219,7 @@ describe('Store', () => {
|
|
|
218
219
|
expect(onAfterCreate).toHaveBeenCalledTimes(1)
|
|
219
220
|
|
|
220
221
|
/* UPDATING */
|
|
221
|
-
const onAfterChange =
|
|
222
|
+
const onAfterChange = vi.fn((prev, current) => {
|
|
222
223
|
expect(prev.name).toBe('J.R.R Tolkein')
|
|
223
224
|
expect(current.name).toBe('Butch Cassidy')
|
|
224
225
|
|
|
@@ -231,7 +232,7 @@ describe('Store', () => {
|
|
|
231
232
|
expect(onAfterChange).toHaveBeenCalledTimes(1)
|
|
232
233
|
|
|
233
234
|
/* REMOVING */
|
|
234
|
-
const onAfterDelete =
|
|
235
|
+
const onAfterDelete = vi.fn((prev) => {
|
|
235
236
|
if (prev.typeName === 'author') {
|
|
236
237
|
expect(prev.name).toBe('Butch Cassidy')
|
|
237
238
|
}
|
|
@@ -309,7 +310,7 @@ describe('Store', () => {
|
|
|
309
310
|
})
|
|
310
311
|
|
|
311
312
|
it('supports listening for changes to the whole store', async () => {
|
|
312
|
-
const listener =
|
|
313
|
+
const listener = vi.fn()
|
|
313
314
|
store.listen(listener)
|
|
314
315
|
|
|
315
316
|
transact(() => {
|
|
@@ -336,7 +337,7 @@ describe('Store', () => {
|
|
|
336
337
|
|
|
337
338
|
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
338
339
|
expect(listener).toHaveBeenCalledTimes(1)
|
|
339
|
-
expect(listener.mock.lastCall[0]).toMatchInlineSnapshot(`
|
|
340
|
+
expect(listener.mock.lastCall?.[0]).toMatchInlineSnapshot(`
|
|
340
341
|
{
|
|
341
342
|
"changes": {
|
|
342
343
|
"added": {
|
|
@@ -391,7 +392,7 @@ describe('Store', () => {
|
|
|
391
392
|
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
392
393
|
expect(listener).toHaveBeenCalledTimes(2)
|
|
393
394
|
|
|
394
|
-
expect(listener.mock.lastCall[0]).toMatchInlineSnapshot(`
|
|
395
|
+
expect(listener.mock.lastCall?.[0]).toMatchInlineSnapshot(`
|
|
395
396
|
{
|
|
396
397
|
"changes": {
|
|
397
398
|
"added": {},
|
|
@@ -444,7 +445,7 @@ describe('Store', () => {
|
|
|
444
445
|
await new Promise((resolve) => requestAnimationFrame(resolve))
|
|
445
446
|
expect(listener).toHaveBeenCalledTimes(3)
|
|
446
447
|
|
|
447
|
-
expect(listener.mock.lastCall[0]).toMatchInlineSnapshot(`
|
|
448
|
+
expect(listener.mock.lastCall?.[0]).toMatchInlineSnapshot(`
|
|
448
449
|
{
|
|
449
450
|
"changes": {
|
|
450
451
|
"added": {},
|
|
@@ -480,7 +481,7 @@ describe('Store', () => {
|
|
|
480
481
|
})
|
|
481
482
|
|
|
482
483
|
it('supports filtering history by scope', () => {
|
|
483
|
-
const listener =
|
|
484
|
+
const listener = vi.fn()
|
|
484
485
|
store.listen(listener, {
|
|
485
486
|
scope: 'session',
|
|
486
487
|
})
|
|
@@ -521,7 +522,7 @@ describe('Store', () => {
|
|
|
521
522
|
})
|
|
522
523
|
|
|
523
524
|
it('supports filtering history by scope (2)', () => {
|
|
524
|
-
const listener =
|
|
525
|
+
const listener = vi.fn()
|
|
525
526
|
store.listen(listener, {
|
|
526
527
|
scope: 'document',
|
|
527
528
|
})
|
|
@@ -550,7 +551,7 @@ describe('Store', () => {
|
|
|
550
551
|
})
|
|
551
552
|
|
|
552
553
|
it('supports filtering history by source', () => {
|
|
553
|
-
const listener =
|
|
554
|
+
const listener = vi.fn()
|
|
554
555
|
store.listen(listener, {
|
|
555
556
|
source: 'remote',
|
|
556
557
|
})
|
|
@@ -600,7 +601,7 @@ describe('Store', () => {
|
|
|
600
601
|
})
|
|
601
602
|
|
|
602
603
|
it('supports filtering history by source (user)', () => {
|
|
603
|
-
const listener =
|
|
604
|
+
const listener = vi.fn()
|
|
604
605
|
store.listen(listener, {
|
|
605
606
|
source: 'user',
|
|
606
607
|
})
|
|
@@ -658,7 +659,7 @@ describe('Store', () => {
|
|
|
658
659
|
// @ts-expect-error
|
|
659
660
|
globalThis.__FORCE_RAF_IN_TESTS__ = true
|
|
660
661
|
store.put([Author.create({ name: 'J.R.R Tolkein', id: Author.createId('tolkein') })])
|
|
661
|
-
const firstListener =
|
|
662
|
+
const firstListener = vi.fn()
|
|
662
663
|
store.listen(firstListener)
|
|
663
664
|
expect(firstListener).toHaveBeenCalledTimes(0)
|
|
664
665
|
|
|
@@ -666,7 +667,7 @@ describe('Store', () => {
|
|
|
666
667
|
|
|
667
668
|
expect(firstListener).toHaveBeenCalledTimes(0)
|
|
668
669
|
|
|
669
|
-
const secondListener =
|
|
670
|
+
const secondListener = vi.fn()
|
|
670
671
|
|
|
671
672
|
store.listen(secondListener)
|
|
672
673
|
|
|
@@ -707,7 +708,7 @@ describe('Store', () => {
|
|
|
707
708
|
const id = Author.createId('tolkein')
|
|
708
709
|
store.put([Author.create({ name: 'J.R.R Tolkein', id })])
|
|
709
710
|
|
|
710
|
-
const listener =
|
|
711
|
+
const listener = vi.fn()
|
|
711
712
|
store.listen(listener)
|
|
712
713
|
|
|
713
714
|
// Return the exact same value that came in
|
|
@@ -717,7 +718,7 @@ describe('Store', () => {
|
|
|
717
718
|
})
|
|
718
719
|
|
|
719
720
|
it('tells listeners the source of the changes so they can decide if they want to run or not', async () => {
|
|
720
|
-
const listener =
|
|
721
|
+
const listener = vi.fn()
|
|
721
722
|
store.listen(listener)
|
|
722
723
|
|
|
723
724
|
store.put([Author.create({ name: 'Jimmy Beans', id: Author.createId('jimmy') })])
|
|
@@ -824,7 +825,7 @@ describe('snapshots', () => {
|
|
|
824
825
|
expect(() => {
|
|
825
826
|
// @ts-expect-error
|
|
826
827
|
store2.loadStoreSnapshot(snapshot1)
|
|
827
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
828
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Missing definition for record type author]`)
|
|
828
829
|
})
|
|
829
830
|
|
|
830
831
|
it('throws errors when loading a snapshot with a different schema', () => {
|
|
@@ -839,12 +840,12 @@ describe('snapshots', () => {
|
|
|
839
840
|
|
|
840
841
|
expect(() => {
|
|
841
842
|
store2.loadStoreSnapshot(snapshot1 as any)
|
|
842
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
843
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Missing definition for record type author]`)
|
|
843
844
|
})
|
|
844
845
|
|
|
845
846
|
it('migrates the snapshot', () => {
|
|
846
847
|
const snapshot1 = store.getStoreSnapshot()
|
|
847
|
-
const up =
|
|
848
|
+
const up = vi.fn((s: any) => {
|
|
848
849
|
s['book:lotr'].numPages = 42
|
|
849
850
|
})
|
|
850
851
|
|
|
@@ -969,7 +970,7 @@ describe('diffs', () => {
|
|
|
969
970
|
})
|
|
970
971
|
it('produces diffs from `addHistoryInterceptor`', () => {
|
|
971
972
|
const diffs: any[] = []
|
|
972
|
-
const interceptor =
|
|
973
|
+
const interceptor = vi.fn((diff) => diffs.push(diff))
|
|
973
974
|
store.addHistoryInterceptor(interceptor)
|
|
974
975
|
|
|
975
976
|
store.put([
|
|
@@ -1095,15 +1096,15 @@ describe('callbacks', () => {
|
|
|
1095
1096
|
numPages: 1,
|
|
1096
1097
|
})
|
|
1097
1098
|
|
|
1098
|
-
let onAfterCreate:
|
|
1099
|
-
let onAfterChange:
|
|
1100
|
-
let onAfterDelete:
|
|
1099
|
+
let onAfterCreate: ReturnType<typeof vi.fn>
|
|
1100
|
+
let onAfterChange: ReturnType<typeof vi.fn>
|
|
1101
|
+
let onAfterDelete: ReturnType<typeof vi.fn>
|
|
1101
1102
|
|
|
1102
|
-
let onBeforeCreate:
|
|
1103
|
-
let onBeforeChange:
|
|
1104
|
-
let onBeforeDelete:
|
|
1103
|
+
let onBeforeCreate: ReturnType<typeof vi.fn>
|
|
1104
|
+
let onBeforeChange: ReturnType<typeof vi.fn>
|
|
1105
|
+
let onBeforeDelete: ReturnType<typeof vi.fn>
|
|
1105
1106
|
|
|
1106
|
-
let onOperationComplete:
|
|
1107
|
+
let onOperationComplete: ReturnType<typeof vi.fn>
|
|
1107
1108
|
|
|
1108
1109
|
beforeEach(() => {
|
|
1109
1110
|
store = new Store({
|
|
@@ -1113,15 +1114,15 @@ describe('callbacks', () => {
|
|
|
1113
1114
|
}),
|
|
1114
1115
|
})
|
|
1115
1116
|
|
|
1116
|
-
onAfterCreate =
|
|
1117
|
-
onAfterChange =
|
|
1118
|
-
onAfterDelete =
|
|
1117
|
+
onAfterCreate = vi.fn((record) => callbacks.push({ type: 'create', record }))
|
|
1118
|
+
onAfterChange = vi.fn((from, to) => callbacks.push({ type: 'change', from, to }))
|
|
1119
|
+
onAfterDelete = vi.fn((record) => callbacks.push({ type: 'delete', record }))
|
|
1119
1120
|
|
|
1120
|
-
onBeforeCreate =
|
|
1121
|
-
onBeforeChange =
|
|
1122
|
-
onBeforeDelete =
|
|
1121
|
+
onBeforeCreate = vi.fn((record) => record)
|
|
1122
|
+
onBeforeChange = vi.fn((_from, to) => to)
|
|
1123
|
+
onBeforeDelete = vi.fn((_record) => {})
|
|
1123
1124
|
|
|
1124
|
-
onOperationComplete =
|
|
1125
|
+
onOperationComplete = vi.fn(() => callbacks.push({ type: 'complete' }))
|
|
1125
1126
|
callbacks = []
|
|
1126
1127
|
|
|
1127
1128
|
store.sideEffects.registerAfterCreateHandler('book', onAfterCreate)
|
|
@@ -1161,12 +1162,12 @@ describe('callbacks', () => {
|
|
|
1161
1162
|
|
|
1162
1163
|
it('bails out if too many callbacks are fired', () => {
|
|
1163
1164
|
let limit = 10
|
|
1164
|
-
onAfterCreate.mockImplementation((record) => {
|
|
1165
|
+
onAfterCreate.mockImplementation((record: any) => {
|
|
1165
1166
|
if (record.numPages < limit) {
|
|
1166
1167
|
store.put([{ ...record, numPages: record.numPages + 1 }])
|
|
1167
1168
|
}
|
|
1168
1169
|
})
|
|
1169
|
-
onAfterChange.mockImplementation((from, to) => {
|
|
1170
|
+
onAfterChange.mockImplementation((from: any, to: any) => {
|
|
1170
1171
|
if (to.numPages < limit) {
|
|
1171
1172
|
store.put([{ ...to, numPages: to.numPages + 1 }])
|
|
1172
1173
|
}
|
|
@@ -1181,7 +1182,9 @@ describe('callbacks', () => {
|
|
|
1181
1182
|
store.clear()
|
|
1182
1183
|
expect(() => {
|
|
1183
1184
|
store.put([book2])
|
|
1184
|
-
}).toThrowErrorMatchingInlineSnapshot(
|
|
1185
|
+
}).toThrowErrorMatchingInlineSnapshot(
|
|
1186
|
+
`[Error: Maximum store update depth exceeded, bailing out]`
|
|
1187
|
+
)
|
|
1185
1188
|
})
|
|
1186
1189
|
|
|
1187
1190
|
it('keeps firing operation complete callbacks until all are cleared', () => {
|
|
@@ -1197,7 +1200,7 @@ describe('callbacks', () => {
|
|
|
1197
1200
|
|
|
1198
1201
|
store.put([book1])
|
|
1199
1202
|
|
|
1200
|
-
onAfterChange.mockImplementation((prev, next) => {
|
|
1203
|
+
onAfterChange.mockImplementation((prev: any, next: any) => {
|
|
1201
1204
|
if ([0, 1, 2, 5, 6].includes(step)) {
|
|
1202
1205
|
step++
|
|
1203
1206
|
store.put([{ ...next, numPages: next.numPages + 1 }])
|
|
@@ -31,21 +31,53 @@ describe(sortMigrations, () => {
|
|
|
31
31
|
).toEqual(['bar/1', 'bar/2', 'foo/1', 'foo/2'])
|
|
32
32
|
})
|
|
33
33
|
|
|
34
|
+
it('should minimize distance between dependencies and dependents', () => {
|
|
35
|
+
// bar/3 depends on foo/1 - should process bar sequence immediately after foo/1
|
|
36
|
+
expect(
|
|
37
|
+
sort([m('foo/2'), m('bar/3', { dependsOn: ['foo/1'] }), m('foo/1'), m('bar/1'), m('bar/2')])
|
|
38
|
+
).toEqual(['foo/1', 'bar/1', 'bar/2', 'bar/3', 'foo/2'])
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
it('should minimize total distance for multiple explicit dependencies', () => {
|
|
42
|
+
// Both bar/2 and baz/1 depend on foo/1 - minimize total distance
|
|
43
|
+
expect(
|
|
44
|
+
sort([
|
|
45
|
+
m('foo/2'),
|
|
46
|
+
m('bar/2', { dependsOn: ['foo/1'] }),
|
|
47
|
+
m('foo/1'),
|
|
48
|
+
m('bar/1'),
|
|
49
|
+
m('baz/1', { dependsOn: ['foo/1'] }),
|
|
50
|
+
])
|
|
51
|
+
).toEqual(['foo/1', 'bar/1', 'bar/2', 'baz/1', 'foo/2'])
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
it('should handle chain of explicit dependencies optimally', () => {
|
|
55
|
+
// foo/1 -> bar/1 -> baz/1 chain should be consecutive
|
|
56
|
+
expect(
|
|
57
|
+
sort([
|
|
58
|
+
m('foo/2'),
|
|
59
|
+
m('bar/1', { dependsOn: ['foo/1'] }),
|
|
60
|
+
m('foo/1'),
|
|
61
|
+
m('baz/1', { dependsOn: ['bar/1'] }),
|
|
62
|
+
])
|
|
63
|
+
).toEqual(['foo/1', 'bar/1', 'baz/1', 'foo/2'])
|
|
64
|
+
})
|
|
65
|
+
|
|
34
66
|
it('should fail if a cycle is created', () => {
|
|
35
67
|
expect(() => {
|
|
36
68
|
sort([m('foo/1', { dependsOn: ['foo/1'] })])
|
|
37
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
69
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Circular dependency in migrations: foo/1]`)
|
|
38
70
|
|
|
39
71
|
expect(() => {
|
|
40
72
|
sort([m('foo/1', { dependsOn: ['foo/2'] }), m('foo/2')])
|
|
41
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
73
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Circular dependency in migrations: foo/1]`)
|
|
42
74
|
|
|
43
75
|
expect(() => {
|
|
44
76
|
sort([m('foo/1', { dependsOn: ['bar/1'] }), m('bar/1', { dependsOn: ['foo/1'] })])
|
|
45
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
77
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Circular dependency in migrations: foo/1]`)
|
|
46
78
|
|
|
47
79
|
expect(() => {
|
|
48
80
|
sort([m('bar/1', { dependsOn: ['foo/1'] }), m('foo/1', { dependsOn: ['bar/1'] })])
|
|
49
|
-
}).toThrowErrorMatchingInlineSnapshot(`
|
|
81
|
+
}).toThrowErrorMatchingInlineSnapshot(`[Error: Circular dependency in migrations: bar/1]`)
|
|
50
82
|
})
|
|
51
83
|
})
|