@instantdb/core 0.22.164 → 0.22.165
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/__tests__/src/infiniteQuery.e2e.test.ts +384 -0
- package/__tests__/src/simple.e2e.test.ts +0 -1
- package/__tests__/src/utils/e2e.ts +1 -1
- package/dist/commonjs/Reactor.d.ts +1 -1
- package/dist/commonjs/Reactor.js +3 -3
- package/dist/commonjs/Reactor.js.map +1 -1
- package/dist/commonjs/index.d.ts +23 -2
- package/dist/commonjs/index.d.ts.map +1 -1
- package/dist/commonjs/index.js +25 -1
- package/dist/commonjs/index.js.map +1 -1
- package/dist/commonjs/infiniteQuery.d.ts +26 -0
- package/dist/commonjs/infiniteQuery.d.ts.map +1 -0
- package/dist/commonjs/infiniteQuery.js +422 -0
- package/dist/commonjs/infiniteQuery.js.map +1 -0
- package/dist/commonjs/instaql.d.ts.map +1 -1
- package/dist/commonjs/instaql.js +18 -5
- package/dist/commonjs/instaql.js.map +1 -1
- package/dist/commonjs/queryTypes.d.ts +2 -2
- package/dist/commonjs/queryTypes.d.ts.map +1 -1
- package/dist/commonjs/queryTypes.js.map +1 -1
- package/dist/commonjs/utils/Deferred.d.ts +5 -4
- package/dist/commonjs/utils/Deferred.d.ts.map +1 -1
- package/dist/commonjs/utils/Deferred.js.map +1 -1
- package/dist/commonjs/utils/weakHash.d.ts.map +1 -1
- package/dist/commonjs/utils/weakHash.js +4 -0
- package/dist/commonjs/utils/weakHash.js.map +1 -1
- package/dist/esm/Reactor.d.ts +1 -1
- package/dist/esm/Reactor.js +1 -1
- package/dist/esm/Reactor.js.map +1 -1
- package/dist/esm/index.d.ts +23 -2
- package/dist/esm/index.d.ts.map +1 -1
- package/dist/esm/index.js +25 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/infiniteQuery.d.ts +26 -0
- package/dist/esm/infiniteQuery.d.ts.map +1 -0
- package/dist/esm/infiniteQuery.js +417 -0
- package/dist/esm/infiniteQuery.js.map +1 -0
- package/dist/esm/instaql.d.ts.map +1 -1
- package/dist/esm/instaql.js +18 -5
- package/dist/esm/instaql.js.map +1 -1
- package/dist/esm/queryTypes.d.ts +2 -2
- package/dist/esm/queryTypes.d.ts.map +1 -1
- package/dist/esm/queryTypes.js.map +1 -1
- package/dist/esm/utils/Deferred.d.ts +5 -4
- package/dist/esm/utils/Deferred.d.ts.map +1 -1
- package/dist/esm/utils/Deferred.js.map +1 -1
- package/dist/esm/utils/weakHash.d.ts.map +1 -1
- package/dist/esm/utils/weakHash.js +4 -0
- package/dist/esm/utils/weakHash.js.map +1 -1
- package/dist/standalone/index.js +1731 -1432
- package/dist/standalone/index.umd.cjs +3 -3
- package/package.json +2 -2
- package/src/Reactor.js +1 -1
- package/src/index.ts +49 -0
- package/src/infiniteQuery.ts +573 -0
- package/src/instaql.ts +25 -7
- package/src/queryTypes.ts +1 -2
- package/src/utils/{Deferred.js → Deferred.ts} +4 -4
- package/src/utils/weakHash.ts +4 -0
- package/vitest.config.ts +6 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@instantdb/core",
|
|
3
|
-
"version": "0.22.
|
|
3
|
+
"version": "0.22.165",
|
|
4
4
|
"description": "Instant's core local abstraction",
|
|
5
5
|
"homepage": "https://github.com/instantdb/instant/tree/main/client/packages/core",
|
|
6
6
|
"repository": {
|
|
@@ -56,7 +56,7 @@
|
|
|
56
56
|
"dependencies": {
|
|
57
57
|
"mutative": "^1.0.10",
|
|
58
58
|
"uuid": "^11.1.0",
|
|
59
|
-
"@instantdb/version": "0.22.
|
|
59
|
+
"@instantdb/version": "0.22.165"
|
|
60
60
|
},
|
|
61
61
|
"scripts": {
|
|
62
62
|
"test": "vitest",
|
package/src/Reactor.js
CHANGED
|
@@ -10,7 +10,7 @@ import * as authAPI from './authAPI.ts';
|
|
|
10
10
|
import * as StorageApi from './StorageAPI.ts';
|
|
11
11
|
import * as flags from './utils/flags.ts';
|
|
12
12
|
import { buildPresenceSlice, hasPresenceResponseChanged } from './presence.ts';
|
|
13
|
-
import { Deferred } from './utils/Deferred.
|
|
13
|
+
import { Deferred } from './utils/Deferred.ts';
|
|
14
14
|
import { PersistedObject } from './utils/PersistedObject.ts';
|
|
15
15
|
|
|
16
16
|
import { extractTriples } from './model/instaqlResult.js';
|
package/src/index.ts
CHANGED
|
@@ -57,6 +57,9 @@ import type {
|
|
|
57
57
|
InstaQLResult,
|
|
58
58
|
InstaQLFields,
|
|
59
59
|
ValidQuery,
|
|
60
|
+
Cursor,
|
|
61
|
+
Order,
|
|
62
|
+
InstaQLQueryEntityResult,
|
|
60
63
|
} from './queryTypes.ts';
|
|
61
64
|
import type { PresencePeer } from './presenceTypes.ts';
|
|
62
65
|
import type {
|
|
@@ -142,6 +145,12 @@ import {
|
|
|
142
145
|
ReadableStreamCtor,
|
|
143
146
|
WritableStreamCtor,
|
|
144
147
|
} from './Stream.ts';
|
|
148
|
+
import {
|
|
149
|
+
type InfiniteQueryCallbackResponse,
|
|
150
|
+
type InfiniteQuerySubscription,
|
|
151
|
+
subscribeInfiniteQuery,
|
|
152
|
+
getInfiniteQueryInitialSnapshot,
|
|
153
|
+
} from './infiniteQuery.ts';
|
|
145
154
|
|
|
146
155
|
const defaultOpenDevtool = true;
|
|
147
156
|
|
|
@@ -695,6 +704,38 @@ class InstantCoreDatabase<
|
|
|
695
704
|
return this._reactor.subscribeQuery(query, cb, opts);
|
|
696
705
|
}
|
|
697
706
|
|
|
707
|
+
/**
|
|
708
|
+
* Subscribe to a query and incrementally load more items
|
|
709
|
+
*
|
|
710
|
+
* Only one top level namespace in the query is allowed.
|
|
711
|
+
* @example
|
|
712
|
+
* const { unsubscribe, loadNextPage } = db.subscribeInfiniteQuery({
|
|
713
|
+
* posts: {
|
|
714
|
+
* $: {
|
|
715
|
+
* limit: 20, // Load 20 posts at a time
|
|
716
|
+
* order: {
|
|
717
|
+
* createdAt: 'desc',
|
|
718
|
+
* },
|
|
719
|
+
* },
|
|
720
|
+
* },
|
|
721
|
+
* (resp) => {
|
|
722
|
+
* console.log(resp.data.posts);
|
|
723
|
+
* }
|
|
724
|
+
* });
|
|
725
|
+
*/
|
|
726
|
+
subscribeInfiniteQuery<Q extends ValidQuery<Q, Schema>>(
|
|
727
|
+
query: Q,
|
|
728
|
+
cb: (resp: InfiniteQueryCallbackResponse<Schema, Q, UseDates>) => void,
|
|
729
|
+
opts?: InstaQLOptions,
|
|
730
|
+
): InfiniteQuerySubscription {
|
|
731
|
+
return subscribeInfiniteQuery<Schema, Q, UseDates>(
|
|
732
|
+
this as any,
|
|
733
|
+
query,
|
|
734
|
+
cb,
|
|
735
|
+
opts,
|
|
736
|
+
);
|
|
737
|
+
}
|
|
738
|
+
|
|
698
739
|
/**
|
|
699
740
|
* Listen for the logged in state. This is useful
|
|
700
741
|
* for deciding when to show a login screen.
|
|
@@ -1025,6 +1066,9 @@ export {
|
|
|
1025
1066
|
version,
|
|
1026
1067
|
InstantError,
|
|
1027
1068
|
|
|
1069
|
+
// infinite query
|
|
1070
|
+
getInfiniteQueryInitialSnapshot,
|
|
1071
|
+
|
|
1028
1072
|
// sync table enums
|
|
1029
1073
|
SyncTableCallbackEventType,
|
|
1030
1074
|
|
|
@@ -1057,6 +1101,7 @@ export {
|
|
|
1057
1101
|
// new query types
|
|
1058
1102
|
type InstaQLParams,
|
|
1059
1103
|
type ValidQuery,
|
|
1104
|
+
type Cursor,
|
|
1060
1105
|
type InstaQLOptions,
|
|
1061
1106
|
type InstaQLQueryParams,
|
|
1062
1107
|
type InstantQuery,
|
|
@@ -1065,6 +1110,10 @@ export {
|
|
|
1065
1110
|
type InstantEntity,
|
|
1066
1111
|
type InstantSchemaDatabase,
|
|
1067
1112
|
type InstaQLFields,
|
|
1113
|
+
type Order,
|
|
1114
|
+
type InstaQLQueryEntityResult,
|
|
1115
|
+
type InfiniteQueryCallbackResponse,
|
|
1116
|
+
type InfiniteQuerySubscription,
|
|
1068
1117
|
|
|
1069
1118
|
// schema types
|
|
1070
1119
|
type AttrsDefs,
|
|
@@ -0,0 +1,573 @@
|
|
|
1
|
+
import {
|
|
2
|
+
coerceQuery,
|
|
3
|
+
QueryValidationError,
|
|
4
|
+
type InstantCoreDatabase,
|
|
5
|
+
type ValidQuery,
|
|
6
|
+
} from './index.ts';
|
|
7
|
+
import {
|
|
8
|
+
InstaQLResponse,
|
|
9
|
+
InstaQLOptions,
|
|
10
|
+
Cursor,
|
|
11
|
+
Order,
|
|
12
|
+
} from './queryTypes.ts';
|
|
13
|
+
import { InstantSchemaDef } from './schemaTypes.ts';
|
|
14
|
+
import { assert } from './utils/error.ts';
|
|
15
|
+
|
|
16
|
+
// Example for {order: {value: "asc"}}
|
|
17
|
+
//
|
|
18
|
+
// 0
|
|
19
|
+
// <------------------|------------------------------------------------------>
|
|
20
|
+
// <- starter sub ->
|
|
21
|
+
//
|
|
22
|
+
// Bootstrap phase: until the limit (4 in this example) items are reached, the
|
|
23
|
+
// starter subscription is the only subscription and it writes to the forwardChunks map with the key PRE_BOOTSTRAP_CURSOR.
|
|
24
|
+
//
|
|
25
|
+
// When the limit is reached it automatically becomes a real forward chunk and has a definite start and end.
|
|
26
|
+
// A new reverse chunk gets added to watch for any new items at the start of the list.
|
|
27
|
+
//
|
|
28
|
+
// 0 1 2 3
|
|
29
|
+
// <------------------|------------------------------------------------------>
|
|
30
|
+
// <- starter sub ->
|
|
31
|
+
//
|
|
32
|
+
// ↓ BECOMES ↓
|
|
33
|
+
//
|
|
34
|
+
// 0 1 2 3
|
|
35
|
+
// <------------------|------------------------------------------------------>
|
|
36
|
+
// <-reverse chunk][forward chunk ]
|
|
37
|
+
//
|
|
38
|
+
// 0 1 2 3 4
|
|
39
|
+
// <------------------|------------------------------------------------------>
|
|
40
|
+
// <-reverse chunk][forward chunk ]
|
|
41
|
+
// When item 4 is added, the forward chunk subscription gets updated so that
|
|
42
|
+
// hasNextPage is `true`. This tells the user that a new page can be loaded.
|
|
43
|
+
//
|
|
44
|
+
// User clicks: loadNextPage
|
|
45
|
+
// 0 1 2 3 4
|
|
46
|
+
// <------------------|------------------------------------------------------>
|
|
47
|
+
// <-reverse chunk][ frozen forward chunk ][ new forward chunk ]
|
|
48
|
+
//
|
|
49
|
+
// More numbers get added
|
|
50
|
+
// 0 1 2 3 4 5 6 7 8
|
|
51
|
+
// <------------------|------------------------------------------------------>
|
|
52
|
+
// <-reverse chunk][ frozen forward chunk ][ forward chunk ] ^
|
|
53
|
+
// hasNextPage=true^
|
|
54
|
+
//
|
|
55
|
+
//
|
|
56
|
+
// User clicks: loadNextPage
|
|
57
|
+
//
|
|
58
|
+
// 0 1 2 3 4 5 6 7 8
|
|
59
|
+
// <------------------|------------------------------------------------------>
|
|
60
|
+
// <-reverse chunk][ frozen forward chunk ][ frozen forward chunk ][ new chunk
|
|
61
|
+
//
|
|
62
|
+
// The reverse chunks work in the same way as the forward chunks but the order in the query is reversed.
|
|
63
|
+
// When a reverse chunks recieves an update it will check to see if more can be loaded and it will
|
|
64
|
+
// automatically freeze the chunk and add a new one. i.e. : works the same as if
|
|
65
|
+
// loadNextPage was automatically clicked when hasNextPage became true.
|
|
66
|
+
//
|
|
67
|
+
// Chunks are indexed by their starting point cursor, for forward chunks this is the "[" point.
|
|
68
|
+
// Their starting point cursor is inclusive in the query and exclusive from the following query
|
|
69
|
+
|
|
70
|
+
const makeCursorKey = (cursor: Cursor) => JSON.stringify(cursor);
|
|
71
|
+
const parseCursorKey = (cursorKey: string) => JSON.parse(cursorKey) as Cursor;
|
|
72
|
+
|
|
73
|
+
export type ChunkStatus = 'pre-bootstrap' | 'bootstrapping' | 'frozen';
|
|
74
|
+
type Chunk = {
|
|
75
|
+
status: ChunkStatus;
|
|
76
|
+
data: any[];
|
|
77
|
+
hasMore?: boolean;
|
|
78
|
+
endCursor?: Cursor;
|
|
79
|
+
afterInclusive?: boolean;
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
type ChunkWithEndCursor = Chunk & { endCursor: Cursor };
|
|
83
|
+
|
|
84
|
+
const chunkHasEndCursor = (chunk: Chunk): chunk is ChunkWithEndCursor => {
|
|
85
|
+
return !!chunk.endCursor;
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
export interface InfiniteQuerySubscription {
|
|
89
|
+
unsubscribe: () => void;
|
|
90
|
+
loadNextPage: () => void;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const readCanLoadNextPage = (forwardChunks: Map<string, Chunk>) => {
|
|
94
|
+
const chunksInOrder = Array.from(forwardChunks.values());
|
|
95
|
+
if (chunksInOrder.length === 0) return false;
|
|
96
|
+
return chunksInOrder[chunksInOrder.length - 1]?.hasMore || false;
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
// Chunk sub key is used to create keys to keep track of the subscriptions
|
|
100
|
+
// while the chunk maps are keyed by the cursor, here we disinguish between
|
|
101
|
+
// forward and reverse because the first 2 chunks will have the same starting
|
|
102
|
+
// cursor.
|
|
103
|
+
const chunkSubKey = (direction: 'forward' | 'reverse', cursor: Cursor) =>
|
|
104
|
+
`${direction}:${JSON.stringify(cursor)}`;
|
|
105
|
+
|
|
106
|
+
const reverseOrder = <
|
|
107
|
+
Schema extends InstantSchemaDef<any, any, any>,
|
|
108
|
+
Entity extends keyof Schema['entities'],
|
|
109
|
+
>(
|
|
110
|
+
order?: Order<Schema, Entity>,
|
|
111
|
+
): Order<Schema, Entity> => {
|
|
112
|
+
if (!order) {
|
|
113
|
+
return {
|
|
114
|
+
serverCreatedAt: 'asc',
|
|
115
|
+
} satisfies Order<Schema, Entity>;
|
|
116
|
+
}
|
|
117
|
+
const key = Object.keys(order).at(0);
|
|
118
|
+
if (!key) {
|
|
119
|
+
return {
|
|
120
|
+
serverCreatedAt: 'asc',
|
|
121
|
+
} satisfies Order<Schema, Entity>;
|
|
122
|
+
}
|
|
123
|
+
return {
|
|
124
|
+
[key]: order[key as keyof typeof order] === 'asc' ? 'desc' : 'asc',
|
|
125
|
+
} as Order<Schema, Entity>;
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
const normalizeChunks = (
|
|
129
|
+
forwardChunks: Map<string, Chunk>,
|
|
130
|
+
reverseChunks: Map<string, Chunk>,
|
|
131
|
+
): { chunks: Chunk[]; data: any[] } => {
|
|
132
|
+
const chunks = [
|
|
133
|
+
...Array.from(reverseChunks.values()).slice().reverse(),
|
|
134
|
+
...Array.from(forwardChunks.values()),
|
|
135
|
+
];
|
|
136
|
+
|
|
137
|
+
const data = [
|
|
138
|
+
...Array.from(reverseChunks.values())
|
|
139
|
+
.slice()
|
|
140
|
+
.reverse()
|
|
141
|
+
.flatMap((chunk) => chunk.data.slice().reverse()),
|
|
142
|
+
...Array.from(forwardChunks.values()).flatMap((chunk) => chunk.data),
|
|
143
|
+
];
|
|
144
|
+
return { chunks, data };
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
const PRE_BOOTSTRAP_CURSOR: Cursor = ['bootstrap', 'bootstrap', 'bootstrap', 1];
|
|
148
|
+
|
|
149
|
+
export type InfiniteQueryCallbackResponse<
|
|
150
|
+
Schema extends InstantSchemaDef<any, any, any>,
|
|
151
|
+
Query extends Record<string, any>,
|
|
152
|
+
UseDatesLocal extends boolean,
|
|
153
|
+
> =
|
|
154
|
+
| {
|
|
155
|
+
error: { message: string };
|
|
156
|
+
data: undefined;
|
|
157
|
+
canLoadNextPage: boolean;
|
|
158
|
+
}
|
|
159
|
+
| {
|
|
160
|
+
error: undefined;
|
|
161
|
+
data: InstaQLResponse<Schema, Query, UseDatesLocal>;
|
|
162
|
+
canLoadNextPage: boolean;
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
export const subscribeInfiniteQuery = <
|
|
166
|
+
Schema extends InstantSchemaDef<any, any, any>,
|
|
167
|
+
Q extends ValidQuery<Q, Schema>,
|
|
168
|
+
UseDates extends boolean,
|
|
169
|
+
>(
|
|
170
|
+
db: InstantCoreDatabase<Schema, UseDates>,
|
|
171
|
+
fullQuery: Q,
|
|
172
|
+
cb: (resp: InfiniteQueryCallbackResponse<Schema, Q, UseDates>) => void,
|
|
173
|
+
opts?: InstaQLOptions,
|
|
174
|
+
): InfiniteQuerySubscription => {
|
|
175
|
+
const { entityName, entityQuery: query } = splitAndValidateQuery(fullQuery);
|
|
176
|
+
|
|
177
|
+
const pageSize = query.$?.limit || 10;
|
|
178
|
+
const entity = entityName;
|
|
179
|
+
|
|
180
|
+
const forwardChunks = new Map<string, Chunk>();
|
|
181
|
+
const reverseChunks = new Map<string, Chunk>();
|
|
182
|
+
// Keeps track of all subscriptions (besides starter sub)
|
|
183
|
+
const allUnsubs = new Map<string, () => void>();
|
|
184
|
+
|
|
185
|
+
let hasKickstarted = false;
|
|
186
|
+
let isActive = true;
|
|
187
|
+
let lastReverseAdvancedChunkKey: string | null = null;
|
|
188
|
+
let starterUnsub: (() => void) | null = null;
|
|
189
|
+
|
|
190
|
+
const sendError = (err: { message: string }) => {
|
|
191
|
+
cb({ error: err, data: undefined, canLoadNextPage: false });
|
|
192
|
+
};
|
|
193
|
+
|
|
194
|
+
const pushUpdate = () => {
|
|
195
|
+
if (!isActive) return;
|
|
196
|
+
|
|
197
|
+
const { chunks, data } = normalizeChunks(forwardChunks, reverseChunks);
|
|
198
|
+
cb({
|
|
199
|
+
data: { [entity]: data } as InstaQLResponse<
|
|
200
|
+
Schema,
|
|
201
|
+
typeof query,
|
|
202
|
+
UseDates
|
|
203
|
+
>,
|
|
204
|
+
// @ts-expect-error hidden debug variable
|
|
205
|
+
chunks,
|
|
206
|
+
canLoadNextPage: readCanLoadNextPage(forwardChunks),
|
|
207
|
+
});
|
|
208
|
+
};
|
|
209
|
+
|
|
210
|
+
const setForwardChunk = (startCursor: Cursor, chunk: Chunk) => {
|
|
211
|
+
forwardChunks.set(makeCursorKey(startCursor), chunk);
|
|
212
|
+
pushUpdate();
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
const setReverseChunk = (startCursor: Cursor, chunk: Chunk) => {
|
|
216
|
+
reverseChunks.set(makeCursorKey(startCursor), chunk);
|
|
217
|
+
maybeAdvanceReverse();
|
|
218
|
+
pushUpdate();
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
const freezeReverse = (chunkKey: string, chunk: ChunkWithEndCursor) => {
|
|
222
|
+
const startCursor = parseCursorKey(chunkKey);
|
|
223
|
+
const currentSub = allUnsubs.get(chunkSubKey('reverse', startCursor));
|
|
224
|
+
currentSub?.();
|
|
225
|
+
|
|
226
|
+
const nextSub = db.subscribeQuery(
|
|
227
|
+
{
|
|
228
|
+
[entity]: {
|
|
229
|
+
...query,
|
|
230
|
+
$: {
|
|
231
|
+
after: startCursor,
|
|
232
|
+
before: chunk.endCursor,
|
|
233
|
+
beforeInclusive: true,
|
|
234
|
+
where: query.$?.where,
|
|
235
|
+
fields: query.$?.fields,
|
|
236
|
+
order: reverseOrder(query.$?.order),
|
|
237
|
+
},
|
|
238
|
+
},
|
|
239
|
+
} as unknown as Q,
|
|
240
|
+
(frozenData) => {
|
|
241
|
+
if (frozenData.error) {
|
|
242
|
+
return sendError(frozenData.error);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
const rows = frozenData.data[entity];
|
|
246
|
+
const pageInfo = frozenData.pageInfo[entity];
|
|
247
|
+
assert(
|
|
248
|
+
rows && pageInfo,
|
|
249
|
+
'Expected query subscription to contain rows and pageInfo',
|
|
250
|
+
);
|
|
251
|
+
|
|
252
|
+
setReverseChunk(startCursor, {
|
|
253
|
+
data: rows,
|
|
254
|
+
status: 'frozen',
|
|
255
|
+
hasMore: pageInfo.hasNextPage,
|
|
256
|
+
endCursor: pageInfo.endCursor,
|
|
257
|
+
});
|
|
258
|
+
},
|
|
259
|
+
opts,
|
|
260
|
+
);
|
|
261
|
+
|
|
262
|
+
allUnsubs.set(chunkSubKey('reverse', startCursor), nextSub);
|
|
263
|
+
};
|
|
264
|
+
|
|
265
|
+
const pushNewReverse = (startCursor: Cursor) => {
|
|
266
|
+
const querySub = db.subscribeQuery(
|
|
267
|
+
{
|
|
268
|
+
[entity]: {
|
|
269
|
+
...query,
|
|
270
|
+
$: {
|
|
271
|
+
limit: pageSize,
|
|
272
|
+
after: startCursor,
|
|
273
|
+
where: query.$?.where,
|
|
274
|
+
fields: query.$?.fields,
|
|
275
|
+
order: reverseOrder(query.$?.order),
|
|
276
|
+
},
|
|
277
|
+
},
|
|
278
|
+
} as unknown as Q,
|
|
279
|
+
(windowData) => {
|
|
280
|
+
if (windowData.error) {
|
|
281
|
+
return sendError(windowData.error);
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
const rows = windowData.data[entity];
|
|
285
|
+
const pageInfo = windowData.pageInfo[entity];
|
|
286
|
+
assert(rows && pageInfo, 'Expected rows and pageInfo');
|
|
287
|
+
|
|
288
|
+
setReverseChunk(startCursor, {
|
|
289
|
+
data: rows,
|
|
290
|
+
status: 'bootstrapping',
|
|
291
|
+
hasMore: pageInfo.hasNextPage,
|
|
292
|
+
endCursor: pageInfo.endCursor,
|
|
293
|
+
});
|
|
294
|
+
},
|
|
295
|
+
opts,
|
|
296
|
+
);
|
|
297
|
+
|
|
298
|
+
allUnsubs.set(chunkSubKey('reverse', startCursor), querySub);
|
|
299
|
+
};
|
|
300
|
+
|
|
301
|
+
const pushNewForward = (startCursor: Cursor, afterInclusive = false) => {
|
|
302
|
+
const querySub = db.subscribeQuery(
|
|
303
|
+
{
|
|
304
|
+
[entity]: {
|
|
305
|
+
...query,
|
|
306
|
+
$: {
|
|
307
|
+
limit: pageSize,
|
|
308
|
+
after: startCursor,
|
|
309
|
+
afterInclusive,
|
|
310
|
+
where: query.$?.where,
|
|
311
|
+
fields: query.$?.fields,
|
|
312
|
+
order: query.$?.order,
|
|
313
|
+
},
|
|
314
|
+
},
|
|
315
|
+
} as unknown as Q,
|
|
316
|
+
(windowData) => {
|
|
317
|
+
if (windowData.error) {
|
|
318
|
+
return sendError(windowData.error);
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
const rows = windowData.data[entity];
|
|
322
|
+
const pageInfo = windowData.pageInfo[entity];
|
|
323
|
+
assert(rows && pageInfo, 'Page info and rows');
|
|
324
|
+
|
|
325
|
+
setForwardChunk(startCursor, {
|
|
326
|
+
data: rows,
|
|
327
|
+
status: 'bootstrapping',
|
|
328
|
+
hasMore: pageInfo.hasNextPage,
|
|
329
|
+
endCursor: pageInfo.endCursor,
|
|
330
|
+
afterInclusive,
|
|
331
|
+
});
|
|
332
|
+
},
|
|
333
|
+
opts,
|
|
334
|
+
);
|
|
335
|
+
|
|
336
|
+
allUnsubs.set(chunkSubKey('forward', startCursor), querySub);
|
|
337
|
+
};
|
|
338
|
+
|
|
339
|
+
const freezeForward = (startCursor: Cursor) => {
|
|
340
|
+
const key = makeCursorKey(startCursor);
|
|
341
|
+
const currentSub = allUnsubs.get(chunkSubKey('forward', startCursor));
|
|
342
|
+
currentSub?.();
|
|
343
|
+
|
|
344
|
+
const chunk = forwardChunks.get(key);
|
|
345
|
+
if (!chunk?.endCursor) return;
|
|
346
|
+
|
|
347
|
+
const nextSub = db.subscribeQuery(
|
|
348
|
+
{
|
|
349
|
+
[entity]: {
|
|
350
|
+
...query,
|
|
351
|
+
$: {
|
|
352
|
+
after: startCursor,
|
|
353
|
+
afterInclusive: chunk.afterInclusive,
|
|
354
|
+
before: chunk.endCursor,
|
|
355
|
+
beforeInclusive: true,
|
|
356
|
+
where: query.$?.where,
|
|
357
|
+
fields: query.$?.fields,
|
|
358
|
+
order: query.$?.order,
|
|
359
|
+
},
|
|
360
|
+
},
|
|
361
|
+
} as unknown as Q,
|
|
362
|
+
(frozenData) => {
|
|
363
|
+
if (frozenData.error) {
|
|
364
|
+
return sendError(frozenData.error);
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
const rows = frozenData.data[entity];
|
|
368
|
+
const pageInfo = frozenData.pageInfo[entity];
|
|
369
|
+
assert(rows && pageInfo, 'Expected rows and pageInfo');
|
|
370
|
+
|
|
371
|
+
setForwardChunk(startCursor, {
|
|
372
|
+
data: rows,
|
|
373
|
+
status: 'frozen',
|
|
374
|
+
hasMore: pageInfo.hasNextPage,
|
|
375
|
+
endCursor: pageInfo.endCursor,
|
|
376
|
+
afterInclusive: chunk.afterInclusive,
|
|
377
|
+
});
|
|
378
|
+
},
|
|
379
|
+
opts,
|
|
380
|
+
);
|
|
381
|
+
|
|
382
|
+
allUnsubs.set(chunkSubKey('forward', startCursor), nextSub);
|
|
383
|
+
};
|
|
384
|
+
|
|
385
|
+
// Consider order: {val: "asc"} with pageItems = 4
|
|
386
|
+
// A reverse chunk captures all the new items coming in before us.
|
|
387
|
+
// If we hit 4 then we freeze the current chunk and create a new reverse chunk
|
|
388
|
+
const maybeAdvanceReverse = () => {
|
|
389
|
+
const tailEntry = Array.from(reverseChunks.entries()).at(-1);
|
|
390
|
+
if (!tailEntry) return;
|
|
391
|
+
|
|
392
|
+
const [chunkKey, chunk] = tailEntry;
|
|
393
|
+
|
|
394
|
+
// If a chunk has more, then it must have an endCursor
|
|
395
|
+
if (!chunk?.hasMore) return;
|
|
396
|
+
if (!chunkHasEndCursor(chunk)) return;
|
|
397
|
+
|
|
398
|
+
// maybeAdvanceReverse can run multiple times if multiple changes are made
|
|
399
|
+
// to the reverse chunk
|
|
400
|
+
// This prevents adding the same new reverse frame twice
|
|
401
|
+
const advanceKey = `${chunkKey}:${makeCursorKey(chunk.endCursor)}`;
|
|
402
|
+
if (advanceKey == lastReverseAdvancedChunkKey) return;
|
|
403
|
+
lastReverseAdvancedChunkKey = advanceKey;
|
|
404
|
+
|
|
405
|
+
freezeReverse(chunkKey, chunk);
|
|
406
|
+
pushNewReverse(chunk.endCursor);
|
|
407
|
+
};
|
|
408
|
+
|
|
409
|
+
const loadNextPage = () => {
|
|
410
|
+
const tailEntry = Array.from(forwardChunks.entries()).at(-1);
|
|
411
|
+
if (!tailEntry) return;
|
|
412
|
+
|
|
413
|
+
const [chunkKey, chunk] = tailEntry;
|
|
414
|
+
|
|
415
|
+
// If the chunk has more items after it, it must have an end cursor, and we can
|
|
416
|
+
// load more items
|
|
417
|
+
// if (!chunk?.hasMore) return;
|
|
418
|
+
if (!chunk.endCursor) return;
|
|
419
|
+
|
|
420
|
+
freezeForward(parseCursorKey(chunkKey));
|
|
421
|
+
pushNewForward(chunk.endCursor);
|
|
422
|
+
};
|
|
423
|
+
|
|
424
|
+
starterUnsub = db.subscribeQuery(
|
|
425
|
+
{
|
|
426
|
+
[entity]: {
|
|
427
|
+
...query,
|
|
428
|
+
$: {
|
|
429
|
+
limit: pageSize,
|
|
430
|
+
where: query.$?.where,
|
|
431
|
+
fields: query.$?.fields,
|
|
432
|
+
order: query.$?.order,
|
|
433
|
+
},
|
|
434
|
+
},
|
|
435
|
+
} as unknown as Q,
|
|
436
|
+
async (starterData) => {
|
|
437
|
+
if (hasKickstarted) return;
|
|
438
|
+
if (starterData.error) {
|
|
439
|
+
return sendError(starterData.error);
|
|
440
|
+
}
|
|
441
|
+
const pageInfo = starterData.pageInfo[entity];
|
|
442
|
+
|
|
443
|
+
const rows = starterData?.data?.[entity];
|
|
444
|
+
assert(rows && pageInfo, 'Expected rows and pageInfo');
|
|
445
|
+
|
|
446
|
+
if (rows.length < pageSize) {
|
|
447
|
+
// If the rows are less than the page size, then we don't need to
|
|
448
|
+
// create forward and reverse chunks.
|
|
449
|
+
// We just treat the starter query as a forward chunk
|
|
450
|
+
setForwardChunk(PRE_BOOTSTRAP_CURSOR, {
|
|
451
|
+
data: rows,
|
|
452
|
+
status: 'pre-bootstrap',
|
|
453
|
+
});
|
|
454
|
+
return;
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// Consider a query with no items; the server will return a result with
|
|
458
|
+
// no start cursor. If we add {pageSize} optimistic updates we can
|
|
459
|
+
// get here and still have no startCursor. By returning we are skipping
|
|
460
|
+
// the optimistic update and just waiting for the result from the
|
|
461
|
+
// server.
|
|
462
|
+
const initialForwardCursor = pageInfo.startCursor;
|
|
463
|
+
if (!initialForwardCursor) {
|
|
464
|
+
return;
|
|
465
|
+
}
|
|
466
|
+
forwardChunks.delete(makeCursorKey(PRE_BOOTSTRAP_CURSOR));
|
|
467
|
+
|
|
468
|
+
pushNewForward(initialForwardCursor, true);
|
|
469
|
+
pushNewReverse(pageInfo.startCursor);
|
|
470
|
+
hasKickstarted = true;
|
|
471
|
+
|
|
472
|
+
// Flush the initial boostrap querysub data
|
|
473
|
+
// because immediately unsubscribing will never save it for offline in idb
|
|
474
|
+
await db._reactor.querySubs.flush();
|
|
475
|
+
|
|
476
|
+
// Unsubscribe the starter subscription
|
|
477
|
+
starterUnsub?.();
|
|
478
|
+
starterUnsub = null;
|
|
479
|
+
},
|
|
480
|
+
opts,
|
|
481
|
+
);
|
|
482
|
+
|
|
483
|
+
const unsubscribe = () => {
|
|
484
|
+
if (!isActive) return;
|
|
485
|
+
isActive = false;
|
|
486
|
+
starterUnsub?.();
|
|
487
|
+
starterUnsub = null;
|
|
488
|
+
for (const unsub of allUnsubs.values()) {
|
|
489
|
+
unsub?.();
|
|
490
|
+
}
|
|
491
|
+
allUnsubs.clear();
|
|
492
|
+
};
|
|
493
|
+
|
|
494
|
+
return {
|
|
495
|
+
unsubscribe,
|
|
496
|
+
loadNextPage,
|
|
497
|
+
};
|
|
498
|
+
};
|
|
499
|
+
|
|
500
|
+
export const getInfiniteQueryInitialSnapshot = <
|
|
501
|
+
Schema extends InstantSchemaDef<any, any, any>,
|
|
502
|
+
Q extends ValidQuery<Q, Schema>,
|
|
503
|
+
UseDates extends boolean,
|
|
504
|
+
>(
|
|
505
|
+
db: InstantCoreDatabase<Schema, UseDates>,
|
|
506
|
+
fullQuery: Q | null,
|
|
507
|
+
opts?: InstaQLOptions,
|
|
508
|
+
):
|
|
509
|
+
| InfiniteQueryCallbackResponse<Schema, Q, UseDates>
|
|
510
|
+
| {
|
|
511
|
+
canLoadNextPage: false;
|
|
512
|
+
data: undefined;
|
|
513
|
+
error: undefined;
|
|
514
|
+
} => {
|
|
515
|
+
if (!fullQuery) {
|
|
516
|
+
return {
|
|
517
|
+
canLoadNextPage: false,
|
|
518
|
+
data: undefined,
|
|
519
|
+
error: undefined,
|
|
520
|
+
};
|
|
521
|
+
}
|
|
522
|
+
const { entityName, entityQuery } = splitAndValidateQuery(fullQuery);
|
|
523
|
+
|
|
524
|
+
const pageSize = entityQuery.$?.limit || 10;
|
|
525
|
+
|
|
526
|
+
let coercedQuery = fullQuery
|
|
527
|
+
? coerceQuery({
|
|
528
|
+
[entityName]: {
|
|
529
|
+
...entityQuery,
|
|
530
|
+
$: {
|
|
531
|
+
limit: pageSize,
|
|
532
|
+
where: entityQuery.$?.where,
|
|
533
|
+
fields: entityQuery.$?.fields,
|
|
534
|
+
order: entityQuery.$?.order,
|
|
535
|
+
},
|
|
536
|
+
},
|
|
537
|
+
})
|
|
538
|
+
: null;
|
|
539
|
+
|
|
540
|
+
if (opts && 'ruleParams' in opts) {
|
|
541
|
+
coercedQuery = {
|
|
542
|
+
$$ruleParams: opts.ruleParams,
|
|
543
|
+
...fullQuery,
|
|
544
|
+
};
|
|
545
|
+
}
|
|
546
|
+
const queryResult = db._reactor.getPreviousResult(coercedQuery);
|
|
547
|
+
|
|
548
|
+
return {
|
|
549
|
+
canLoadNextPage: false,
|
|
550
|
+
data: queryResult?.data || undefined,
|
|
551
|
+
error: undefined,
|
|
552
|
+
};
|
|
553
|
+
};
|
|
554
|
+
|
|
555
|
+
/**
|
|
556
|
+
* @throws QueryValidationError
|
|
557
|
+
* @param fullQuery a ValidQuery with one key (entity)
|
|
558
|
+
*/
|
|
559
|
+
const splitAndValidateQuery = (fullQuery: Record<string, any>) => {
|
|
560
|
+
const entityNames = Object.keys(fullQuery);
|
|
561
|
+
if (entityNames.length !== 1) {
|
|
562
|
+
throw new QueryValidationError(
|
|
563
|
+
'subscribeInfiniteQuery expects exactly one entity',
|
|
564
|
+
);
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
const [entityName, entityQuery] = Object.entries(fullQuery)[0];
|
|
568
|
+
|
|
569
|
+
if (!entityName || !entityQuery) {
|
|
570
|
+
throw new QueryValidationError('No query provided for infinite query');
|
|
571
|
+
}
|
|
572
|
+
return { entityName, entityQuery };
|
|
573
|
+
};
|