@tanstack/trailbase-db-collection 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/errors.cjs +39 -0
- package/dist/cjs/errors.cjs.map +1 -0
- package/dist/cjs/errors.d.cts +16 -0
- package/dist/cjs/index.cjs +6 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +1 -0
- package/dist/cjs/trailbase.cjs +26 -30
- package/dist/cjs/trailbase.cjs.map +1 -1
- package/dist/esm/errors.d.ts +16 -0
- package/dist/esm/errors.js +39 -0
- package/dist/esm/errors.js.map +1 -0
- package/dist/esm/index.d.ts +1 -0
- package/dist/esm/index.js +6 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/trailbase.js +26 -30
- package/dist/esm/trailbase.js.map +1 -1
- package/package.json +2 -2
- package/src/errors.ts +37 -0
- package/src/index.ts +2 -0
- package/src/trailbase.ts +40 -35
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const db = require("@tanstack/db");
|
|
4
|
+
class TrailBaseDBCollectionError extends db.TanStackDBError {
|
|
5
|
+
constructor(message) {
|
|
6
|
+
super(message);
|
|
7
|
+
this.name = `TrailBaseDBCollectionError`;
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {
|
|
11
|
+
constructor(ids) {
|
|
12
|
+
super(`Timeout waiting for ids: ${ids}`);
|
|
13
|
+
this.name = `TimeoutWaitingForIdsError`;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
class ExpectedInsertTypeError extends TrailBaseDBCollectionError {
|
|
17
|
+
constructor(actualType) {
|
|
18
|
+
super(`Expected 'insert', got: ${actualType}`);
|
|
19
|
+
this.name = `ExpectedInsertTypeError`;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {
|
|
23
|
+
constructor(actualType) {
|
|
24
|
+
super(`Expected 'update', got: ${actualType}`);
|
|
25
|
+
this.name = `ExpectedUpdateTypeError`;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {
|
|
29
|
+
constructor(actualType) {
|
|
30
|
+
super(`Expected 'delete', got: ${actualType}`);
|
|
31
|
+
this.name = `ExpectedDeleteTypeError`;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.ExpectedDeleteTypeError = ExpectedDeleteTypeError;
|
|
35
|
+
exports.ExpectedInsertTypeError = ExpectedInsertTypeError;
|
|
36
|
+
exports.ExpectedUpdateTypeError = ExpectedUpdateTypeError;
|
|
37
|
+
exports.TimeoutWaitingForIdsError = TimeoutWaitingForIdsError;
|
|
38
|
+
exports.TrailBaseDBCollectionError = TrailBaseDBCollectionError;
|
|
39
|
+
//# sourceMappingURL=errors.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.cjs","sources":["../../src/errors.ts"],"sourcesContent":["import { TanStackDBError } from \"@tanstack/db\"\n\n// TrailBase DB Collection Errors\nexport class TrailBaseDBCollectionError extends TanStackDBError {\n constructor(message: string) {\n super(message)\n this.name = `TrailBaseDBCollectionError`\n }\n}\n\nexport class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {\n constructor(ids: string) {\n super(`Timeout waiting for ids: ${ids}`)\n this.name = `TimeoutWaitingForIdsError`\n }\n}\n\nexport class ExpectedInsertTypeError extends TrailBaseDBCollectionError {\n constructor(actualType: string) {\n super(`Expected 'insert', got: ${actualType}`)\n this.name = `ExpectedInsertTypeError`\n }\n}\n\nexport class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {\n constructor(actualType: string) {\n super(`Expected 'update', got: ${actualType}`)\n this.name = `ExpectedUpdateTypeError`\n }\n}\n\nexport class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {\n constructor(actualType: string) {\n super(`Expected 'delete', got: ${actualType}`)\n this.name = `ExpectedDeleteTypeError`\n }\n}\n"],"names":["TanStackDBError"],"mappings":";;;AAGO,MAAM,mCAAmCA,GAAAA,gBAAgB;AAAA,EAC9D,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,kCAAkC,2BAA2B;AAAA,EACxE,YAAY,KAAa;AACvB,UAAM,4BAA4B,GAAG,EAAE;AACvC,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,gCAAgC,2BAA2B;AAAA,EACtE,YAAY,YAAoB;AAC9B,UAAM,2BAA2B,UAAU,EAAE;AAC7C,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,gCAAgC,2BAA2B;AAAA,EACtE,YAAY,YAAoB;AAC9B,UAAM,2BAA2B,UAAU,EAAE;AAC7C,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,gCAAgC,2BAA2B;AAAA,EACtE,YAAY,YAAoB;AAC9B,UAAM,2BAA2B,UAAU,EAAE;AAC7C,SAAK,OAAO;AAAA,EACd;AACF;;;;;;"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { TanStackDBError } from '@tanstack/db';
|
|
2
|
+
export declare class TrailBaseDBCollectionError extends TanStackDBError {
|
|
3
|
+
constructor(message: string);
|
|
4
|
+
}
|
|
5
|
+
export declare class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {
|
|
6
|
+
constructor(ids: string);
|
|
7
|
+
}
|
|
8
|
+
export declare class ExpectedInsertTypeError extends TrailBaseDBCollectionError {
|
|
9
|
+
constructor(actualType: string);
|
|
10
|
+
}
|
|
11
|
+
export declare class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {
|
|
12
|
+
constructor(actualType: string);
|
|
13
|
+
}
|
|
14
|
+
export declare class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {
|
|
15
|
+
constructor(actualType: string);
|
|
16
|
+
}
|
package/dist/cjs/index.cjs
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
3
|
const trailbase = require("./trailbase.cjs");
|
|
4
|
+
const errors = require("./errors.cjs");
|
|
4
5
|
exports.trailBaseCollectionOptions = trailbase.trailBaseCollectionOptions;
|
|
6
|
+
exports.ExpectedDeleteTypeError = errors.ExpectedDeleteTypeError;
|
|
7
|
+
exports.ExpectedInsertTypeError = errors.ExpectedInsertTypeError;
|
|
8
|
+
exports.ExpectedUpdateTypeError = errors.ExpectedUpdateTypeError;
|
|
9
|
+
exports.TimeoutWaitingForIdsError = errors.TimeoutWaitingForIdsError;
|
|
10
|
+
exports.TrailBaseDBCollectionError = errors.TrailBaseDBCollectionError;
|
|
5
11
|
//# sourceMappingURL=index.cjs.map
|
package/dist/cjs/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;"}
|
package/dist/cjs/index.d.cts
CHANGED
package/dist/cjs/trailbase.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
3
|
const store = require("@tanstack/store");
|
|
4
|
+
const errors = require("./errors.cjs");
|
|
4
5
|
function convert(conversions, input) {
|
|
5
6
|
const c = conversions;
|
|
6
7
|
return Object.fromEntries(
|
|
@@ -35,7 +36,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
35
36
|
return new Promise((resolve, reject) => {
|
|
36
37
|
const timeoutId = setTimeout(() => {
|
|
37
38
|
unsubscribe();
|
|
38
|
-
reject(new
|
|
39
|
+
reject(new errors.TimeoutWaitingForIdsError(ids.toString()));
|
|
39
40
|
}, timeout);
|
|
40
41
|
const unsubscribe = seenIds.subscribe((value) => {
|
|
41
42
|
if (completed(value.currentVal)) {
|
|
@@ -46,29 +47,8 @@ function trailBaseCollectionOptions(config) {
|
|
|
46
47
|
});
|
|
47
48
|
});
|
|
48
49
|
};
|
|
49
|
-
const weakSeenIds = new WeakRef(seenIds);
|
|
50
|
-
const cleanupTimer = setInterval(() => {
|
|
51
|
-
const seen = weakSeenIds.deref();
|
|
52
|
-
if (seen) {
|
|
53
|
-
seen.setState((curr) => {
|
|
54
|
-
const now = Date.now();
|
|
55
|
-
let anyExpired = false;
|
|
56
|
-
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
57
|
-
const expired = now - v > 300 * 1e3;
|
|
58
|
-
anyExpired = anyExpired || expired;
|
|
59
|
-
return !expired;
|
|
60
|
-
});
|
|
61
|
-
if (anyExpired) {
|
|
62
|
-
return new Map(notExpired);
|
|
63
|
-
}
|
|
64
|
-
return curr;
|
|
65
|
-
});
|
|
66
|
-
} else {
|
|
67
|
-
clearInterval(cleanupTimer);
|
|
68
|
-
}
|
|
69
|
-
}, 120 * 1e3);
|
|
70
50
|
let eventReader;
|
|
71
|
-
const
|
|
51
|
+
const cancelEventReader = () => {
|
|
72
52
|
if (eventReader) {
|
|
73
53
|
eventReader.cancel();
|
|
74
54
|
eventReader.releaseLock();
|
|
@@ -109,7 +89,6 @@ function trailBaseCollectionOptions(config) {
|
|
|
109
89
|
cursor = response.cursor;
|
|
110
90
|
}
|
|
111
91
|
commit();
|
|
112
|
-
markReady();
|
|
113
92
|
}
|
|
114
93
|
async function listen(reader) {
|
|
115
94
|
while (true) {
|
|
@@ -150,10 +129,27 @@ function trailBaseCollectionOptions(config) {
|
|
|
150
129
|
try {
|
|
151
130
|
await initialFetch();
|
|
152
131
|
} catch (e) {
|
|
153
|
-
|
|
154
|
-
markReady();
|
|
132
|
+
cancelEventReader();
|
|
155
133
|
throw e;
|
|
134
|
+
} finally {
|
|
135
|
+
markReady();
|
|
156
136
|
}
|
|
137
|
+
const periodicCleanupTask = setInterval(() => {
|
|
138
|
+
seenIds.setState((curr) => {
|
|
139
|
+
const now = Date.now();
|
|
140
|
+
let anyExpired = false;
|
|
141
|
+
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
142
|
+
const expired = now - v > 300 * 1e3;
|
|
143
|
+
anyExpired = anyExpired || expired;
|
|
144
|
+
return !expired;
|
|
145
|
+
});
|
|
146
|
+
if (anyExpired) {
|
|
147
|
+
return new Map(notExpired);
|
|
148
|
+
}
|
|
149
|
+
return curr;
|
|
150
|
+
});
|
|
151
|
+
}, 120 * 1e3);
|
|
152
|
+
reader.closed.finally(() => clearInterval(periodicCleanupTask));
|
|
157
153
|
}
|
|
158
154
|
start();
|
|
159
155
|
},
|
|
@@ -169,7 +165,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
169
165
|
params.transaction.mutations.map((tx) => {
|
|
170
166
|
const { type, modified } = tx;
|
|
171
167
|
if (type !== `insert`) {
|
|
172
|
-
throw new
|
|
168
|
+
throw new errors.ExpectedInsertTypeError(type);
|
|
173
169
|
}
|
|
174
170
|
return serialIns(modified);
|
|
175
171
|
})
|
|
@@ -182,7 +178,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
182
178
|
params.transaction.mutations.map(async (tx) => {
|
|
183
179
|
const { type, changes, key } = tx;
|
|
184
180
|
if (type !== `update`) {
|
|
185
|
-
throw new
|
|
181
|
+
throw new errors.ExpectedUpdateTypeError(type);
|
|
186
182
|
}
|
|
187
183
|
await config.recordApi.update(key, serialUpd(changes));
|
|
188
184
|
return String(key);
|
|
@@ -195,7 +191,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
195
191
|
params.transaction.mutations.map(async (tx) => {
|
|
196
192
|
const { type, key } = tx;
|
|
197
193
|
if (type !== `delete`) {
|
|
198
|
-
throw new
|
|
194
|
+
throw new errors.ExpectedDeleteTypeError(type);
|
|
199
195
|
}
|
|
200
196
|
await config.recordApi.delete(key);
|
|
201
197
|
return String(key);
|
|
@@ -204,7 +200,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
204
200
|
await awaitIds(ids);
|
|
205
201
|
},
|
|
206
202
|
utils: {
|
|
207
|
-
cancel
|
|
203
|
+
cancel: cancelEventReader
|
|
208
204
|
}
|
|
209
205
|
};
|
|
210
206
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"trailbase.cjs","sources":["../../src/trailbase.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unnecessary-condition */\nimport { Store } from \"@tanstack/store\"\nimport type { Event, RecordApi } from \"trailbase\"\n\nimport type {\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\n\ntype ShapeOf<T> = Record<keyof T, unknown>\ntype Conversion<I, O> = (value: I) => O\n\ntype OptionalConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? K\n : never]?: Conversion<InputType[K], OutputType[K]>\n}\n\ntype RequiredConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that do not strictly require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? never\n : K]: Conversion<InputType[K], OutputType[K]>\n}\n\ntype Conversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = OptionalConversions<InputType, OutputType> &\n RequiredConversions<InputType, OutputType>\n\nfunction convert<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: InputType\n): OutputType {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\nfunction convertPartial<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: Partial<InputType>\n): Partial<OutputType> {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\n/**\n * Configuration interface for Trailbase Collection\n */\nexport interface TrailBaseCollectionConfig<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n> extends Omit<\n CollectionConfig<TItem, TKey>,\n `sync` | `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Record API name\n */\n recordApi: RecordApi<TRecord>\n\n parse: Conversions<TRecord, TItem>\n serialize: Conversions<TItem, TRecord>\n}\n\nexport type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>\n\nexport interface TrailBaseCollectionUtils extends UtilsRecord {\n cancel: () => void\n}\n\nexport function trailBaseCollectionOptions<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n>(\n config: TrailBaseCollectionConfig<TItem, TRecord, TKey>\n): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {\n const getKey = config.getKey\n\n const parse = (record: TRecord) =>\n convert<TRecord, TItem>(config.parse, record)\n const serialUpd = (item: Partial<TItem>) =>\n convertPartial<TItem, TRecord>(config.serialize, item)\n const serialIns = (item: TItem) =>\n convert<TItem, TRecord>(config.serialize, item)\n\n const seenIds = new Store(new Map<string, number>())\n\n const awaitIds = (\n ids: Array<string>,\n timeout: number = 120 * 1000\n ): Promise<void> => {\n const completed = (value: Map<string, number>) =>\n ids.every((id) => value.has(id))\n if (completed(seenIds.state)) {\n return Promise.resolve()\n }\n\n return new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribe()\n reject(new Error(`Timeout waiting for ids: ${ids}`))\n }, timeout)\n\n const unsubscribe = seenIds.subscribe((value) => {\n if (completed(value.currentVal)) {\n clearTimeout(timeoutId)\n unsubscribe()\n resolve()\n }\n })\n })\n }\n\n const weakSeenIds = new WeakRef(seenIds)\n const cleanupTimer = setInterval(() => {\n const seen = weakSeenIds.deref()\n if (seen) {\n seen.setState((curr) => {\n const now = Date.now()\n let anyExpired = false\n\n const notExpired = Array.from(curr.entries()).filter(([_, v]) => {\n const expired = now - v > 300 * 1000\n anyExpired = anyExpired || expired\n return !expired\n })\n\n if (anyExpired) {\n return new Map(notExpired)\n }\n return curr\n })\n } else {\n clearInterval(cleanupTimer)\n }\n }, 120 * 1000)\n\n type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]\n\n let eventReader: ReadableStreamDefaultReader<Event> | undefined\n const cancel = () => {\n if (eventReader) {\n eventReader.cancel()\n eventReader.releaseLock()\n eventReader = undefined\n }\n }\n\n const sync = {\n sync: (params: SyncParams) => {\n const { begin, write, commit, markReady } = params\n\n // Initial fetch.\n async function initialFetch() {\n const limit = 256\n let response = await config.recordApi.list({\n pagination: {\n limit,\n },\n })\n let cursor = response.cursor\n let got = 0\n\n begin()\n\n while (true) {\n const length = response.records.length\n if (length === 0) break\n\n got = got + length\n for (const item of response.records) {\n write({\n type: `insert`,\n value: parse(item),\n })\n }\n\n if (length < limit) break\n\n response = await config.recordApi.list({\n pagination: {\n limit,\n cursor,\n offset: cursor === undefined ? got : undefined,\n },\n })\n cursor = response.cursor\n }\n\n commit()\n markReady()\n }\n\n // Afterwards subscribe.\n async function listen(reader: ReadableStreamDefaultReader<Event>) {\n while (true) {\n const { done, value: event } = await reader.read()\n\n if (done || !event) {\n reader.releaseLock()\n eventReader = undefined\n return\n }\n\n begin()\n let value: TItem | undefined\n if (`Insert` in event) {\n value = parse(event.Insert as TRecord)\n write({ type: `insert`, value })\n } else if (`Delete` in event) {\n value = parse(event.Delete as TRecord)\n write({ type: `delete`, value })\n } else if (`Update` in event) {\n value = parse(event.Update as TRecord)\n write({ type: `update`, value })\n } else {\n console.error(`Error: ${event.Error}`)\n }\n commit()\n\n if (value) {\n seenIds.setState((curr: Map<string, number>) => {\n const newIds = new Map(curr)\n newIds.set(String(getKey(value)), Date.now())\n return newIds\n })\n }\n }\n }\n\n async function start() {\n const eventStream = await config.recordApi.subscribe(`*`)\n const reader = (eventReader = eventStream.getReader())\n\n // Start listening for subscriptions first. Otherwise, we'd risk a gap\n // between the initial fetch and starting to listen.\n listen(reader)\n\n try {\n await initialFetch()\n } catch (e) {\n cancel()\n markReady()\n throw e\n }\n }\n\n start()\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n return {\n ...config,\n sync,\n getKey,\n onInsert: async (\n params: InsertMutationFnParams<TItem, TKey>\n ): Promise<Array<number | string>> => {\n const ids = await config.recordApi.createBulk(\n params.transaction.mutations.map((tx) => {\n const { type, modified } = tx\n if (type !== `insert`) {\n throw new Error(`Expected 'insert', got: ${type}`)\n }\n return serialIns(modified)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly added to the local\n // DB by the subscription.\n await awaitIds(ids.map((id) => String(id)))\n\n return ids\n },\n onUpdate: async (params: UpdateMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, changes, key } = tx\n if (type !== `update`) {\n throw new Error(`Expected 'update', got: ${type}`)\n }\n\n await config.recordApi.update(key, serialUpd(changes))\n\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n onDelete: async (params: DeleteMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, key } = tx\n if (type !== `delete`) {\n throw new Error(`Expected 'delete', got: ${type}`)\n }\n\n await config.recordApi.delete(key)\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n utils: {\n cancel,\n },\n }\n}\n"],"names":["Store"],"mappings":";;;AA0CA,SAAS,QAIP,aACA,OACY;AACZ,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AAEA,SAAS,eAIP,aACA,OACqB;AACrB,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AA4BO,SAAS,2BAKd,QACqE;AACrE,QAAM,SAAS,OAAO;AAEtB,QAAM,QAAQ,CAAC,WACb,QAAwB,OAAO,OAAO,MAAM;AAC9C,QAAM,YAAY,CAAC,SACjB,eAA+B,OAAO,WAAW,IAAI;AACvD,QAAM,YAAY,CAAC,SACjB,QAAwB,OAAO,WAAW,IAAI;AAEhD,QAAM,UAAU,IAAIA,YAAM,oBAAI,KAAqB;AAEnD,QAAM,WAAW,CACf,KACA,UAAkB,MAAM,QACN;AAClB,UAAM,YAAY,CAAC,UACjB,IAAI,MAAM,CAAC,OAAO,MAAM,IAAI,EAAE,CAAC;AACjC,QAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,aAAO,QAAQ,QAAA;AAAA,IACjB;AAEA,WAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,YAAM,YAAY,WAAW,MAAM;AACjC,oBAAA;AACA,eAAO,IAAI,MAAM,4BAA4B,GAAG,EAAE,CAAC;AAAA,MACrD,GAAG,OAAO;AAEV,YAAM,cAAc,QAAQ,UAAU,CAAC,UAAU;AAC/C,YAAI,UAAU,MAAM,UAAU,GAAG;AAC/B,uBAAa,SAAS;AACtB,sBAAA;AACA,kBAAA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,cAAc,IAAI,QAAQ,OAAO;AACvC,QAAM,eAAe,YAAY,MAAM;AACrC,UAAM,OAAO,YAAY,MAAA;AACzB,QAAI,MAAM;AACR,WAAK,SAAS,CAAC,SAAS;AACtB,cAAM,MAAM,KAAK,IAAA;AACjB,YAAI,aAAa;AAEjB,cAAM,aAAa,MAAM,KAAK,KAAK,SAAS,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM;AAC/D,gBAAM,UAAU,MAAM,IAAI,MAAM;AAChC,uBAAa,cAAc;AAC3B,iBAAO,CAAC;AAAA,QACV,CAAC;AAED,YAAI,YAAY;AACd,iBAAO,IAAI,IAAI,UAAU;AAAA,QAC3B;AACA,eAAO;AAAA,MACT,CAAC;AAAA,IACH,OAAO;AACL,oBAAc,YAAY;AAAA,IAC5B;AAAA,EACF,GAAG,MAAM,GAAI;AAIb,MAAI;AACJ,QAAM,SAAS,MAAM;AACnB,QAAI,aAAa;AACf,kBAAY,OAAA;AACZ,kBAAY,YAAA;AACZ,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,QAAM,OAAO;AAAA,IACX,MAAM,CAAC,WAAuB;AAC5B,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,qBAAe,eAAe;AAC5B,cAAM,QAAQ;AACd,YAAI,WAAW,MAAM,OAAO,UAAU,KAAK;AAAA,UACzC,YAAY;AAAA,YACV;AAAA,UAAA;AAAA,QACF,CACD;AACD,YAAI,SAAS,SAAS;AACtB,YAAI,MAAM;AAEV,cAAA;AAEA,eAAO,MAAM;AACX,gBAAM,SAAS,SAAS,QAAQ;AAChC,cAAI,WAAW,EAAG;AAElB,gBAAM,MAAM;AACZ,qBAAW,QAAQ,SAAS,SAAS;AACnC,kBAAM;AAAA,cACJ,MAAM;AAAA,cACN,OAAO,MAAM,IAAI;AAAA,YAAA,CAClB;AAAA,UACH;AAEA,cAAI,SAAS,MAAO;AAEpB,qBAAW,MAAM,OAAO,UAAU,KAAK;AAAA,YACrC,YAAY;AAAA,cACV;AAAA,cACA;AAAA,cACA,QAAQ,WAAW,SAAY,MAAM;AAAA,YAAA;AAAA,UACvC,CACD;AACD,mBAAS,SAAS;AAAA,QACpB;AAEA,eAAA;AACA,kBAAA;AAAA,MACF;AAGA,qBAAe,OAAO,QAA4C;AAChE,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,OAAO,UAAU,MAAM,OAAO,KAAA;AAE5C,cAAI,QAAQ,CAAC,OAAO;AAClB,mBAAO,YAAA;AACP,0BAAc;AACd;AAAA,UACF;AAEA,gBAAA;AACA,cAAI;AACJ,cAAI,YAAY,OAAO;AACrB,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,OAAO;AACL,oBAAQ,MAAM,UAAU,MAAM,KAAK,EAAE;AAAA,UACvC;AACA,iBAAA;AAEA,cAAI,OAAO;AACT,oBAAQ,SAAS,CAAC,SAA8B;AAC9C,oBAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,qBAAO,IAAI,OAAO,OAAO,KAAK,CAAC,GAAG,KAAK,KAAK;AAC5C,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,qBAAe,QAAQ;AACrB,cAAM,cAAc,MAAM,OAAO,UAAU,UAAU,GAAG;AACxD,cAAM,SAAU,cAAc,YAAY,UAAA;AAI1C,eAAO,MAAM;AAEb,YAAI;AACF,gBAAM,aAAA;AAAA,QACR,SAAS,GAAG;AACV,iBAAA;AACA,oBAAA;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAA;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,OACR,WACoC;AACpC,YAAM,MAAM,MAAM,OAAO,UAAU;AAAA,QACjC,OAAO,YAAY,UAAU,IAAI,CAAC,OAAO;AACvC,gBAAM,EAAE,MAAM,SAAA,IAAa;AAC3B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AACA,iBAAO,UAAU,QAAQ;AAAA,QAC3B,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,IAAI,IAAI,CAAC,OAAO,OAAO,EAAE,CAAC,CAAC;AAE1C,aAAO;AAAA,IACT;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,SAAS,IAAA,IAAQ;AAC/B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,KAAK,UAAU,OAAO,CAAC;AAErD,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,IAAA,IAAQ;AACtB,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,GAAG;AACjC,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,EACF;AAEJ;;"}
|
|
1
|
+
{"version":3,"file":"trailbase.cjs","sources":["../../src/trailbase.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unnecessary-condition */\nimport { Store } from \"@tanstack/store\"\nimport {\n ExpectedDeleteTypeError,\n ExpectedInsertTypeError,\n ExpectedUpdateTypeError,\n TimeoutWaitingForIdsError,\n} from \"./errors\"\nimport type { Event, RecordApi } from \"trailbase\"\n\nimport type {\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\n\ntype ShapeOf<T> = Record<keyof T, unknown>\ntype Conversion<I, O> = (value: I) => O\n\ntype OptionalConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? K\n : never]?: Conversion<InputType[K], OutputType[K]>\n}\n\ntype RequiredConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that do not strictly require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? never\n : K]: Conversion<InputType[K], OutputType[K]>\n}\n\ntype Conversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = OptionalConversions<InputType, OutputType> &\n RequiredConversions<InputType, OutputType>\n\nfunction convert<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: InputType\n): OutputType {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\nfunction convertPartial<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: Partial<InputType>\n): Partial<OutputType> {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\n/**\n * Configuration interface for Trailbase Collection\n */\nexport interface TrailBaseCollectionConfig<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n> extends Omit<\n CollectionConfig<TItem, TKey>,\n `sync` | `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Record API name\n */\n recordApi: RecordApi<TRecord>\n\n parse: Conversions<TRecord, TItem>\n serialize: Conversions<TItem, TRecord>\n}\n\nexport type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>\n\nexport interface TrailBaseCollectionUtils extends UtilsRecord {\n cancel: () => void\n}\n\nexport function trailBaseCollectionOptions<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n>(\n config: TrailBaseCollectionConfig<TItem, TRecord, TKey>\n): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {\n const getKey = config.getKey\n\n const parse = (record: TRecord) =>\n convert<TRecord, TItem>(config.parse, record)\n const serialUpd = (item: Partial<TItem>) =>\n convertPartial<TItem, TRecord>(config.serialize, item)\n const serialIns = (item: TItem) =>\n convert<TItem, TRecord>(config.serialize, item)\n\n const seenIds = new Store(new Map<string, number>())\n\n const awaitIds = (\n ids: Array<string>,\n timeout: number = 120 * 1000\n ): Promise<void> => {\n const completed = (value: Map<string, number>) =>\n ids.every((id) => value.has(id))\n if (completed(seenIds.state)) {\n return Promise.resolve()\n }\n\n return new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribe()\n reject(new TimeoutWaitingForIdsError(ids.toString()))\n }, timeout)\n\n const unsubscribe = seenIds.subscribe((value) => {\n if (completed(value.currentVal)) {\n clearTimeout(timeoutId)\n unsubscribe()\n resolve()\n }\n })\n })\n }\n\n let eventReader: ReadableStreamDefaultReader<Event> | undefined\n const cancelEventReader = () => {\n if (eventReader) {\n eventReader.cancel()\n eventReader.releaseLock()\n eventReader = undefined\n }\n }\n\n type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]\n const sync = {\n sync: (params: SyncParams) => {\n const { begin, write, commit, markReady } = params\n\n // Initial fetch.\n async function initialFetch() {\n const limit = 256\n let response = await config.recordApi.list({\n pagination: {\n limit,\n },\n })\n let cursor = response.cursor\n let got = 0\n\n begin()\n\n while (true) {\n const length = response.records.length\n if (length === 0) break\n\n got = got + length\n for (const item of response.records) {\n write({\n type: `insert`,\n value: parse(item),\n })\n }\n\n if (length < limit) break\n\n response = await config.recordApi.list({\n pagination: {\n limit,\n cursor,\n offset: cursor === undefined ? got : undefined,\n },\n })\n cursor = response.cursor\n }\n\n commit()\n }\n\n // Afterwards subscribe.\n async function listen(reader: ReadableStreamDefaultReader<Event>) {\n while (true) {\n const { done, value: event } = await reader.read()\n\n if (done || !event) {\n reader.releaseLock()\n eventReader = undefined\n return\n }\n\n begin()\n let value: TItem | undefined\n if (`Insert` in event) {\n value = parse(event.Insert as TRecord)\n write({ type: `insert`, value })\n } else if (`Delete` in event) {\n value = parse(event.Delete as TRecord)\n write({ type: `delete`, value })\n } else if (`Update` in event) {\n value = parse(event.Update as TRecord)\n write({ type: `update`, value })\n } else {\n console.error(`Error: ${event.Error}`)\n }\n commit()\n\n if (value) {\n seenIds.setState((curr: Map<string, number>) => {\n const newIds = new Map(curr)\n newIds.set(String(getKey(value)), Date.now())\n return newIds\n })\n }\n }\n }\n\n async function start() {\n const eventStream = await config.recordApi.subscribe(`*`)\n const reader = (eventReader = eventStream.getReader())\n\n // Start listening for subscriptions first. Otherwise, we'd risk a gap\n // between the initial fetch and starting to listen.\n listen(reader)\n\n try {\n await initialFetch()\n } catch (e) {\n cancelEventReader()\n throw e\n } finally {\n // Mark ready both if everything went well or if there's an error to\n // avoid blocking apps waiting for `.preload()` to finish.\n markReady()\n }\n\n // Lastly, start a periodic cleanup task that will be removed when the\n // reader closes.\n const periodicCleanupTask = setInterval(() => {\n seenIds.setState((curr) => {\n const now = Date.now()\n let anyExpired = false\n\n const notExpired = Array.from(curr.entries()).filter(([_, v]) => {\n const expired = now - v > 300 * 1000\n anyExpired = anyExpired || expired\n return !expired\n })\n\n if (anyExpired) {\n return new Map(notExpired)\n }\n return curr\n })\n }, 120 * 1000)\n\n reader.closed.finally(() => clearInterval(periodicCleanupTask))\n }\n\n start()\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n return {\n ...config,\n sync,\n getKey,\n onInsert: async (\n params: InsertMutationFnParams<TItem, TKey>\n ): Promise<Array<number | string>> => {\n const ids = await config.recordApi.createBulk(\n params.transaction.mutations.map((tx) => {\n const { type, modified } = tx\n if (type !== `insert`) {\n throw new ExpectedInsertTypeError(type)\n }\n return serialIns(modified)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly added to the local\n // DB by the subscription.\n await awaitIds(ids.map((id) => String(id)))\n\n return ids\n },\n onUpdate: async (params: UpdateMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, changes, key } = tx\n if (type !== `update`) {\n throw new ExpectedUpdateTypeError(type)\n }\n\n await config.recordApi.update(key, serialUpd(changes))\n\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n onDelete: async (params: DeleteMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, key } = tx\n if (type !== `delete`) {\n throw new ExpectedDeleteTypeError(type)\n }\n\n await config.recordApi.delete(key)\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n utils: {\n cancel: cancelEventReader,\n },\n }\n}\n"],"names":["Store","TimeoutWaitingForIdsError","ExpectedInsertTypeError","ExpectedUpdateTypeError","ExpectedDeleteTypeError"],"mappings":";;;;AAgDA,SAAS,QAIP,aACA,OACY;AACZ,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AAEA,SAAS,eAIP,aACA,OACqB;AACrB,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AA4BO,SAAS,2BAKd,QACqE;AACrE,QAAM,SAAS,OAAO;AAEtB,QAAM,QAAQ,CAAC,WACb,QAAwB,OAAO,OAAO,MAAM;AAC9C,QAAM,YAAY,CAAC,SACjB,eAA+B,OAAO,WAAW,IAAI;AACvD,QAAM,YAAY,CAAC,SACjB,QAAwB,OAAO,WAAW,IAAI;AAEhD,QAAM,UAAU,IAAIA,YAAM,oBAAI,KAAqB;AAEnD,QAAM,WAAW,CACf,KACA,UAAkB,MAAM,QACN;AAClB,UAAM,YAAY,CAAC,UACjB,IAAI,MAAM,CAAC,OAAO,MAAM,IAAI,EAAE,CAAC;AACjC,QAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,aAAO,QAAQ,QAAA;AAAA,IACjB;AAEA,WAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,YAAM,YAAY,WAAW,MAAM;AACjC,oBAAA;AACA,eAAO,IAAIC,OAAAA,0BAA0B,IAAI,SAAA,CAAU,CAAC;AAAA,MACtD,GAAG,OAAO;AAEV,YAAM,cAAc,QAAQ,UAAU,CAAC,UAAU;AAC/C,YAAI,UAAU,MAAM,UAAU,GAAG;AAC/B,uBAAa,SAAS;AACtB,sBAAA;AACA,kBAAA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,MAAI;AACJ,QAAM,oBAAoB,MAAM;AAC9B,QAAI,aAAa;AACf,kBAAY,OAAA;AACZ,kBAAY,YAAA;AACZ,oBAAc;AAAA,IAChB;AAAA,EACF;AAGA,QAAM,OAAO;AAAA,IACX,MAAM,CAAC,WAAuB;AAC5B,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,qBAAe,eAAe;AAC5B,cAAM,QAAQ;AACd,YAAI,WAAW,MAAM,OAAO,UAAU,KAAK;AAAA,UACzC,YAAY;AAAA,YACV;AAAA,UAAA;AAAA,QACF,CACD;AACD,YAAI,SAAS,SAAS;AACtB,YAAI,MAAM;AAEV,cAAA;AAEA,eAAO,MAAM;AACX,gBAAM,SAAS,SAAS,QAAQ;AAChC,cAAI,WAAW,EAAG;AAElB,gBAAM,MAAM;AACZ,qBAAW,QAAQ,SAAS,SAAS;AACnC,kBAAM;AAAA,cACJ,MAAM;AAAA,cACN,OAAO,MAAM,IAAI;AAAA,YAAA,CAClB;AAAA,UACH;AAEA,cAAI,SAAS,MAAO;AAEpB,qBAAW,MAAM,OAAO,UAAU,KAAK;AAAA,YACrC,YAAY;AAAA,cACV;AAAA,cACA;AAAA,cACA,QAAQ,WAAW,SAAY,MAAM;AAAA,YAAA;AAAA,UACvC,CACD;AACD,mBAAS,SAAS;AAAA,QACpB;AAEA,eAAA;AAAA,MACF;AAGA,qBAAe,OAAO,QAA4C;AAChE,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,OAAO,UAAU,MAAM,OAAO,KAAA;AAE5C,cAAI,QAAQ,CAAC,OAAO;AAClB,mBAAO,YAAA;AACP,0BAAc;AACd;AAAA,UACF;AAEA,gBAAA;AACA,cAAI;AACJ,cAAI,YAAY,OAAO;AACrB,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,OAAO;AACL,oBAAQ,MAAM,UAAU,MAAM,KAAK,EAAE;AAAA,UACvC;AACA,iBAAA;AAEA,cAAI,OAAO;AACT,oBAAQ,SAAS,CAAC,SAA8B;AAC9C,oBAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,qBAAO,IAAI,OAAO,OAAO,KAAK,CAAC,GAAG,KAAK,KAAK;AAC5C,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,qBAAe,QAAQ;AACrB,cAAM,cAAc,MAAM,OAAO,UAAU,UAAU,GAAG;AACxD,cAAM,SAAU,cAAc,YAAY,UAAA;AAI1C,eAAO,MAAM;AAEb,YAAI;AACF,gBAAM,aAAA;AAAA,QACR,SAAS,GAAG;AACV,4BAAA;AACA,gBAAM;AAAA,QACR,UAAA;AAGE,oBAAA;AAAA,QACF;AAIA,cAAM,sBAAsB,YAAY,MAAM;AAC5C,kBAAQ,SAAS,CAAC,SAAS;AACzB,kBAAM,MAAM,KAAK,IAAA;AACjB,gBAAI,aAAa;AAEjB,kBAAM,aAAa,MAAM,KAAK,KAAK,SAAS,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM;AAC/D,oBAAM,UAAU,MAAM,IAAI,MAAM;AAChC,2BAAa,cAAc;AAC3B,qBAAO,CAAC;AAAA,YACV,CAAC;AAED,gBAAI,YAAY;AACd,qBAAO,IAAI,IAAI,UAAU;AAAA,YAC3B;AACA,mBAAO;AAAA,UACT,CAAC;AAAA,QACH,GAAG,MAAM,GAAI;AAEb,eAAO,OAAO,QAAQ,MAAM,cAAc,mBAAmB,CAAC;AAAA,MAChE;AAEA,YAAA;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,OACR,WACoC;AACpC,YAAM,MAAM,MAAM,OAAO,UAAU;AAAA,QACjC,OAAO,YAAY,UAAU,IAAI,CAAC,OAAO;AACvC,gBAAM,EAAE,MAAM,SAAA,IAAa;AAC3B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAIC,OAAAA,wBAAwB,IAAI;AAAA,UACxC;AACA,iBAAO,UAAU,QAAQ;AAAA,QAC3B,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,IAAI,IAAI,CAAC,OAAO,OAAO,EAAE,CAAC,CAAC;AAE1C,aAAO;AAAA,IACT;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,SAAS,IAAA,IAAQ;AAC/B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAIC,OAAAA,wBAAwB,IAAI;AAAA,UACxC;AAEA,gBAAM,OAAO,UAAU,OAAO,KAAK,UAAU,OAAO,CAAC;AAErD,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,IAAA,IAAQ;AACtB,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAIC,OAAAA,wBAAwB,IAAI;AAAA,UACxC;AAEA,gBAAM,OAAO,UAAU,OAAO,GAAG;AACjC,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,OAAO;AAAA,MACL,QAAQ;AAAA,IAAA;AAAA,EACV;AAEJ;;"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { TanStackDBError } from '@tanstack/db';
|
|
2
|
+
export declare class TrailBaseDBCollectionError extends TanStackDBError {
|
|
3
|
+
constructor(message: string);
|
|
4
|
+
}
|
|
5
|
+
export declare class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {
|
|
6
|
+
constructor(ids: string);
|
|
7
|
+
}
|
|
8
|
+
export declare class ExpectedInsertTypeError extends TrailBaseDBCollectionError {
|
|
9
|
+
constructor(actualType: string);
|
|
10
|
+
}
|
|
11
|
+
export declare class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {
|
|
12
|
+
constructor(actualType: string);
|
|
13
|
+
}
|
|
14
|
+
export declare class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {
|
|
15
|
+
constructor(actualType: string);
|
|
16
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { TanStackDBError } from "@tanstack/db";
|
|
2
|
+
class TrailBaseDBCollectionError extends TanStackDBError {
|
|
3
|
+
constructor(message) {
|
|
4
|
+
super(message);
|
|
5
|
+
this.name = `TrailBaseDBCollectionError`;
|
|
6
|
+
}
|
|
7
|
+
}
|
|
8
|
+
class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {
|
|
9
|
+
constructor(ids) {
|
|
10
|
+
super(`Timeout waiting for ids: ${ids}`);
|
|
11
|
+
this.name = `TimeoutWaitingForIdsError`;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
class ExpectedInsertTypeError extends TrailBaseDBCollectionError {
|
|
15
|
+
constructor(actualType) {
|
|
16
|
+
super(`Expected 'insert', got: ${actualType}`);
|
|
17
|
+
this.name = `ExpectedInsertTypeError`;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {
|
|
21
|
+
constructor(actualType) {
|
|
22
|
+
super(`Expected 'update', got: ${actualType}`);
|
|
23
|
+
this.name = `ExpectedUpdateTypeError`;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {
|
|
27
|
+
constructor(actualType) {
|
|
28
|
+
super(`Expected 'delete', got: ${actualType}`);
|
|
29
|
+
this.name = `ExpectedDeleteTypeError`;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
export {
|
|
33
|
+
ExpectedDeleteTypeError,
|
|
34
|
+
ExpectedInsertTypeError,
|
|
35
|
+
ExpectedUpdateTypeError,
|
|
36
|
+
TimeoutWaitingForIdsError,
|
|
37
|
+
TrailBaseDBCollectionError
|
|
38
|
+
};
|
|
39
|
+
//# sourceMappingURL=errors.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.js","sources":["../../src/errors.ts"],"sourcesContent":["import { TanStackDBError } from \"@tanstack/db\"\n\n// TrailBase DB Collection Errors\nexport class TrailBaseDBCollectionError extends TanStackDBError {\n constructor(message: string) {\n super(message)\n this.name = `TrailBaseDBCollectionError`\n }\n}\n\nexport class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {\n constructor(ids: string) {\n super(`Timeout waiting for ids: ${ids}`)\n this.name = `TimeoutWaitingForIdsError`\n }\n}\n\nexport class ExpectedInsertTypeError extends TrailBaseDBCollectionError {\n constructor(actualType: string) {\n super(`Expected 'insert', got: ${actualType}`)\n this.name = `ExpectedInsertTypeError`\n }\n}\n\nexport class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {\n constructor(actualType: string) {\n super(`Expected 'update', got: ${actualType}`)\n this.name = `ExpectedUpdateTypeError`\n }\n}\n\nexport class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {\n constructor(actualType: string) {\n super(`Expected 'delete', got: ${actualType}`)\n this.name = `ExpectedDeleteTypeError`\n }\n}\n"],"names":[],"mappings":";AAGO,MAAM,mCAAmC,gBAAgB;AAAA,EAC9D,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,kCAAkC,2BAA2B;AAAA,EACxE,YAAY,KAAa;AACvB,UAAM,4BAA4B,GAAG,EAAE;AACvC,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,gCAAgC,2BAA2B;AAAA,EACtE,YAAY,YAAoB;AAC9B,UAAM,2BAA2B,UAAU,EAAE;AAC7C,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,gCAAgC,2BAA2B;AAAA,EACtE,YAAY,YAAoB;AAC9B,UAAM,2BAA2B,UAAU,EAAE;AAC7C,SAAK,OAAO;AAAA,EACd;AACF;AAEO,MAAM,gCAAgC,2BAA2B;AAAA,EACtE,YAAY,YAAoB;AAC9B,UAAM,2BAA2B,UAAU,EAAE;AAC7C,SAAK,OAAO;AAAA,EACd;AACF;"}
|
package/dist/esm/index.d.ts
CHANGED
package/dist/esm/index.js
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
import { trailBaseCollectionOptions } from "./trailbase.js";
|
|
2
|
+
import { ExpectedDeleteTypeError, ExpectedInsertTypeError, ExpectedUpdateTypeError, TimeoutWaitingForIdsError, TrailBaseDBCollectionError } from "./errors.js";
|
|
2
3
|
export {
|
|
4
|
+
ExpectedDeleteTypeError,
|
|
5
|
+
ExpectedInsertTypeError,
|
|
6
|
+
ExpectedUpdateTypeError,
|
|
7
|
+
TimeoutWaitingForIdsError,
|
|
8
|
+
TrailBaseDBCollectionError,
|
|
3
9
|
trailBaseCollectionOptions
|
|
4
10
|
};
|
|
5
11
|
//# sourceMappingURL=index.js.map
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;"}
|
package/dist/esm/trailbase.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Store } from "@tanstack/store";
|
|
2
|
+
import { ExpectedDeleteTypeError, ExpectedUpdateTypeError, ExpectedInsertTypeError, TimeoutWaitingForIdsError } from "./errors.js";
|
|
2
3
|
function convert(conversions, input) {
|
|
3
4
|
const c = conversions;
|
|
4
5
|
return Object.fromEntries(
|
|
@@ -33,7 +34,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
33
34
|
return new Promise((resolve, reject) => {
|
|
34
35
|
const timeoutId = setTimeout(() => {
|
|
35
36
|
unsubscribe();
|
|
36
|
-
reject(new
|
|
37
|
+
reject(new TimeoutWaitingForIdsError(ids.toString()));
|
|
37
38
|
}, timeout);
|
|
38
39
|
const unsubscribe = seenIds.subscribe((value) => {
|
|
39
40
|
if (completed(value.currentVal)) {
|
|
@@ -44,29 +45,8 @@ function trailBaseCollectionOptions(config) {
|
|
|
44
45
|
});
|
|
45
46
|
});
|
|
46
47
|
};
|
|
47
|
-
const weakSeenIds = new WeakRef(seenIds);
|
|
48
|
-
const cleanupTimer = setInterval(() => {
|
|
49
|
-
const seen = weakSeenIds.deref();
|
|
50
|
-
if (seen) {
|
|
51
|
-
seen.setState((curr) => {
|
|
52
|
-
const now = Date.now();
|
|
53
|
-
let anyExpired = false;
|
|
54
|
-
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
55
|
-
const expired = now - v > 300 * 1e3;
|
|
56
|
-
anyExpired = anyExpired || expired;
|
|
57
|
-
return !expired;
|
|
58
|
-
});
|
|
59
|
-
if (anyExpired) {
|
|
60
|
-
return new Map(notExpired);
|
|
61
|
-
}
|
|
62
|
-
return curr;
|
|
63
|
-
});
|
|
64
|
-
} else {
|
|
65
|
-
clearInterval(cleanupTimer);
|
|
66
|
-
}
|
|
67
|
-
}, 120 * 1e3);
|
|
68
48
|
let eventReader;
|
|
69
|
-
const
|
|
49
|
+
const cancelEventReader = () => {
|
|
70
50
|
if (eventReader) {
|
|
71
51
|
eventReader.cancel();
|
|
72
52
|
eventReader.releaseLock();
|
|
@@ -107,7 +87,6 @@ function trailBaseCollectionOptions(config) {
|
|
|
107
87
|
cursor = response.cursor;
|
|
108
88
|
}
|
|
109
89
|
commit();
|
|
110
|
-
markReady();
|
|
111
90
|
}
|
|
112
91
|
async function listen(reader) {
|
|
113
92
|
while (true) {
|
|
@@ -148,10 +127,27 @@ function trailBaseCollectionOptions(config) {
|
|
|
148
127
|
try {
|
|
149
128
|
await initialFetch();
|
|
150
129
|
} catch (e) {
|
|
151
|
-
|
|
152
|
-
markReady();
|
|
130
|
+
cancelEventReader();
|
|
153
131
|
throw e;
|
|
132
|
+
} finally {
|
|
133
|
+
markReady();
|
|
154
134
|
}
|
|
135
|
+
const periodicCleanupTask = setInterval(() => {
|
|
136
|
+
seenIds.setState((curr) => {
|
|
137
|
+
const now = Date.now();
|
|
138
|
+
let anyExpired = false;
|
|
139
|
+
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
140
|
+
const expired = now - v > 300 * 1e3;
|
|
141
|
+
anyExpired = anyExpired || expired;
|
|
142
|
+
return !expired;
|
|
143
|
+
});
|
|
144
|
+
if (anyExpired) {
|
|
145
|
+
return new Map(notExpired);
|
|
146
|
+
}
|
|
147
|
+
return curr;
|
|
148
|
+
});
|
|
149
|
+
}, 120 * 1e3);
|
|
150
|
+
reader.closed.finally(() => clearInterval(periodicCleanupTask));
|
|
155
151
|
}
|
|
156
152
|
start();
|
|
157
153
|
},
|
|
@@ -167,7 +163,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
167
163
|
params.transaction.mutations.map((tx) => {
|
|
168
164
|
const { type, modified } = tx;
|
|
169
165
|
if (type !== `insert`) {
|
|
170
|
-
throw new
|
|
166
|
+
throw new ExpectedInsertTypeError(type);
|
|
171
167
|
}
|
|
172
168
|
return serialIns(modified);
|
|
173
169
|
})
|
|
@@ -180,7 +176,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
180
176
|
params.transaction.mutations.map(async (tx) => {
|
|
181
177
|
const { type, changes, key } = tx;
|
|
182
178
|
if (type !== `update`) {
|
|
183
|
-
throw new
|
|
179
|
+
throw new ExpectedUpdateTypeError(type);
|
|
184
180
|
}
|
|
185
181
|
await config.recordApi.update(key, serialUpd(changes));
|
|
186
182
|
return String(key);
|
|
@@ -193,7 +189,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
193
189
|
params.transaction.mutations.map(async (tx) => {
|
|
194
190
|
const { type, key } = tx;
|
|
195
191
|
if (type !== `delete`) {
|
|
196
|
-
throw new
|
|
192
|
+
throw new ExpectedDeleteTypeError(type);
|
|
197
193
|
}
|
|
198
194
|
await config.recordApi.delete(key);
|
|
199
195
|
return String(key);
|
|
@@ -202,7 +198,7 @@ function trailBaseCollectionOptions(config) {
|
|
|
202
198
|
await awaitIds(ids);
|
|
203
199
|
},
|
|
204
200
|
utils: {
|
|
205
|
-
cancel
|
|
201
|
+
cancel: cancelEventReader
|
|
206
202
|
}
|
|
207
203
|
};
|
|
208
204
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"trailbase.js","sources":["../../src/trailbase.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unnecessary-condition */\nimport { Store } from \"@tanstack/store\"\nimport type { Event, RecordApi } from \"trailbase\"\n\nimport type {\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\n\ntype ShapeOf<T> = Record<keyof T, unknown>\ntype Conversion<I, O> = (value: I) => O\n\ntype OptionalConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? K\n : never]?: Conversion<InputType[K], OutputType[K]>\n}\n\ntype RequiredConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that do not strictly require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? never\n : K]: Conversion<InputType[K], OutputType[K]>\n}\n\ntype Conversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = OptionalConversions<InputType, OutputType> &\n RequiredConversions<InputType, OutputType>\n\nfunction convert<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: InputType\n): OutputType {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\nfunction convertPartial<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: Partial<InputType>\n): Partial<OutputType> {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\n/**\n * Configuration interface for Trailbase Collection\n */\nexport interface TrailBaseCollectionConfig<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n> extends Omit<\n CollectionConfig<TItem, TKey>,\n `sync` | `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Record API name\n */\n recordApi: RecordApi<TRecord>\n\n parse: Conversions<TRecord, TItem>\n serialize: Conversions<TItem, TRecord>\n}\n\nexport type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>\n\nexport interface TrailBaseCollectionUtils extends UtilsRecord {\n cancel: () => void\n}\n\nexport function trailBaseCollectionOptions<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n>(\n config: TrailBaseCollectionConfig<TItem, TRecord, TKey>\n): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {\n const getKey = config.getKey\n\n const parse = (record: TRecord) =>\n convert<TRecord, TItem>(config.parse, record)\n const serialUpd = (item: Partial<TItem>) =>\n convertPartial<TItem, TRecord>(config.serialize, item)\n const serialIns = (item: TItem) =>\n convert<TItem, TRecord>(config.serialize, item)\n\n const seenIds = new Store(new Map<string, number>())\n\n const awaitIds = (\n ids: Array<string>,\n timeout: number = 120 * 1000\n ): Promise<void> => {\n const completed = (value: Map<string, number>) =>\n ids.every((id) => value.has(id))\n if (completed(seenIds.state)) {\n return Promise.resolve()\n }\n\n return new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribe()\n reject(new Error(`Timeout waiting for ids: ${ids}`))\n }, timeout)\n\n const unsubscribe = seenIds.subscribe((value) => {\n if (completed(value.currentVal)) {\n clearTimeout(timeoutId)\n unsubscribe()\n resolve()\n }\n })\n })\n }\n\n const weakSeenIds = new WeakRef(seenIds)\n const cleanupTimer = setInterval(() => {\n const seen = weakSeenIds.deref()\n if (seen) {\n seen.setState((curr) => {\n const now = Date.now()\n let anyExpired = false\n\n const notExpired = Array.from(curr.entries()).filter(([_, v]) => {\n const expired = now - v > 300 * 1000\n anyExpired = anyExpired || expired\n return !expired\n })\n\n if (anyExpired) {\n return new Map(notExpired)\n }\n return curr\n })\n } else {\n clearInterval(cleanupTimer)\n }\n }, 120 * 1000)\n\n type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]\n\n let eventReader: ReadableStreamDefaultReader<Event> | undefined\n const cancel = () => {\n if (eventReader) {\n eventReader.cancel()\n eventReader.releaseLock()\n eventReader = undefined\n }\n }\n\n const sync = {\n sync: (params: SyncParams) => {\n const { begin, write, commit, markReady } = params\n\n // Initial fetch.\n async function initialFetch() {\n const limit = 256\n let response = await config.recordApi.list({\n pagination: {\n limit,\n },\n })\n let cursor = response.cursor\n let got = 0\n\n begin()\n\n while (true) {\n const length = response.records.length\n if (length === 0) break\n\n got = got + length\n for (const item of response.records) {\n write({\n type: `insert`,\n value: parse(item),\n })\n }\n\n if (length < limit) break\n\n response = await config.recordApi.list({\n pagination: {\n limit,\n cursor,\n offset: cursor === undefined ? got : undefined,\n },\n })\n cursor = response.cursor\n }\n\n commit()\n markReady()\n }\n\n // Afterwards subscribe.\n async function listen(reader: ReadableStreamDefaultReader<Event>) {\n while (true) {\n const { done, value: event } = await reader.read()\n\n if (done || !event) {\n reader.releaseLock()\n eventReader = undefined\n return\n }\n\n begin()\n let value: TItem | undefined\n if (`Insert` in event) {\n value = parse(event.Insert as TRecord)\n write({ type: `insert`, value })\n } else if (`Delete` in event) {\n value = parse(event.Delete as TRecord)\n write({ type: `delete`, value })\n } else if (`Update` in event) {\n value = parse(event.Update as TRecord)\n write({ type: `update`, value })\n } else {\n console.error(`Error: ${event.Error}`)\n }\n commit()\n\n if (value) {\n seenIds.setState((curr: Map<string, number>) => {\n const newIds = new Map(curr)\n newIds.set(String(getKey(value)), Date.now())\n return newIds\n })\n }\n }\n }\n\n async function start() {\n const eventStream = await config.recordApi.subscribe(`*`)\n const reader = (eventReader = eventStream.getReader())\n\n // Start listening for subscriptions first. Otherwise, we'd risk a gap\n // between the initial fetch and starting to listen.\n listen(reader)\n\n try {\n await initialFetch()\n } catch (e) {\n cancel()\n markReady()\n throw e\n }\n }\n\n start()\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n return {\n ...config,\n sync,\n getKey,\n onInsert: async (\n params: InsertMutationFnParams<TItem, TKey>\n ): Promise<Array<number | string>> => {\n const ids = await config.recordApi.createBulk(\n params.transaction.mutations.map((tx) => {\n const { type, modified } = tx\n if (type !== `insert`) {\n throw new Error(`Expected 'insert', got: ${type}`)\n }\n return serialIns(modified)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly added to the local\n // DB by the subscription.\n await awaitIds(ids.map((id) => String(id)))\n\n return ids\n },\n onUpdate: async (params: UpdateMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, changes, key } = tx\n if (type !== `update`) {\n throw new Error(`Expected 'update', got: ${type}`)\n }\n\n await config.recordApi.update(key, serialUpd(changes))\n\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n onDelete: async (params: DeleteMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, key } = tx\n if (type !== `delete`) {\n throw new Error(`Expected 'delete', got: ${type}`)\n }\n\n await config.recordApi.delete(key)\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n utils: {\n cancel,\n },\n }\n}\n"],"names":[],"mappings":";AA0CA,SAAS,QAIP,aACA,OACY;AACZ,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AAEA,SAAS,eAIP,aACA,OACqB;AACrB,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AA4BO,SAAS,2BAKd,QACqE;AACrE,QAAM,SAAS,OAAO;AAEtB,QAAM,QAAQ,CAAC,WACb,QAAwB,OAAO,OAAO,MAAM;AAC9C,QAAM,YAAY,CAAC,SACjB,eAA+B,OAAO,WAAW,IAAI;AACvD,QAAM,YAAY,CAAC,SACjB,QAAwB,OAAO,WAAW,IAAI;AAEhD,QAAM,UAAU,IAAI,MAAM,oBAAI,KAAqB;AAEnD,QAAM,WAAW,CACf,KACA,UAAkB,MAAM,QACN;AAClB,UAAM,YAAY,CAAC,UACjB,IAAI,MAAM,CAAC,OAAO,MAAM,IAAI,EAAE,CAAC;AACjC,QAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,aAAO,QAAQ,QAAA;AAAA,IACjB;AAEA,WAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,YAAM,YAAY,WAAW,MAAM;AACjC,oBAAA;AACA,eAAO,IAAI,MAAM,4BAA4B,GAAG,EAAE,CAAC;AAAA,MACrD,GAAG,OAAO;AAEV,YAAM,cAAc,QAAQ,UAAU,CAAC,UAAU;AAC/C,YAAI,UAAU,MAAM,UAAU,GAAG;AAC/B,uBAAa,SAAS;AACtB,sBAAA;AACA,kBAAA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,cAAc,IAAI,QAAQ,OAAO;AACvC,QAAM,eAAe,YAAY,MAAM;AACrC,UAAM,OAAO,YAAY,MAAA;AACzB,QAAI,MAAM;AACR,WAAK,SAAS,CAAC,SAAS;AACtB,cAAM,MAAM,KAAK,IAAA;AACjB,YAAI,aAAa;AAEjB,cAAM,aAAa,MAAM,KAAK,KAAK,SAAS,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM;AAC/D,gBAAM,UAAU,MAAM,IAAI,MAAM;AAChC,uBAAa,cAAc;AAC3B,iBAAO,CAAC;AAAA,QACV,CAAC;AAED,YAAI,YAAY;AACd,iBAAO,IAAI,IAAI,UAAU;AAAA,QAC3B;AACA,eAAO;AAAA,MACT,CAAC;AAAA,IACH,OAAO;AACL,oBAAc,YAAY;AAAA,IAC5B;AAAA,EACF,GAAG,MAAM,GAAI;AAIb,MAAI;AACJ,QAAM,SAAS,MAAM;AACnB,QAAI,aAAa;AACf,kBAAY,OAAA;AACZ,kBAAY,YAAA;AACZ,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,QAAM,OAAO;AAAA,IACX,MAAM,CAAC,WAAuB;AAC5B,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,qBAAe,eAAe;AAC5B,cAAM,QAAQ;AACd,YAAI,WAAW,MAAM,OAAO,UAAU,KAAK;AAAA,UACzC,YAAY;AAAA,YACV;AAAA,UAAA;AAAA,QACF,CACD;AACD,YAAI,SAAS,SAAS;AACtB,YAAI,MAAM;AAEV,cAAA;AAEA,eAAO,MAAM;AACX,gBAAM,SAAS,SAAS,QAAQ;AAChC,cAAI,WAAW,EAAG;AAElB,gBAAM,MAAM;AACZ,qBAAW,QAAQ,SAAS,SAAS;AACnC,kBAAM;AAAA,cACJ,MAAM;AAAA,cACN,OAAO,MAAM,IAAI;AAAA,YAAA,CAClB;AAAA,UACH;AAEA,cAAI,SAAS,MAAO;AAEpB,qBAAW,MAAM,OAAO,UAAU,KAAK;AAAA,YACrC,YAAY;AAAA,cACV;AAAA,cACA;AAAA,cACA,QAAQ,WAAW,SAAY,MAAM;AAAA,YAAA;AAAA,UACvC,CACD;AACD,mBAAS,SAAS;AAAA,QACpB;AAEA,eAAA;AACA,kBAAA;AAAA,MACF;AAGA,qBAAe,OAAO,QAA4C;AAChE,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,OAAO,UAAU,MAAM,OAAO,KAAA;AAE5C,cAAI,QAAQ,CAAC,OAAO;AAClB,mBAAO,YAAA;AACP,0BAAc;AACd;AAAA,UACF;AAEA,gBAAA;AACA,cAAI;AACJ,cAAI,YAAY,OAAO;AACrB,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,OAAO;AACL,oBAAQ,MAAM,UAAU,MAAM,KAAK,EAAE;AAAA,UACvC;AACA,iBAAA;AAEA,cAAI,OAAO;AACT,oBAAQ,SAAS,CAAC,SAA8B;AAC9C,oBAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,qBAAO,IAAI,OAAO,OAAO,KAAK,CAAC,GAAG,KAAK,KAAK;AAC5C,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,qBAAe,QAAQ;AACrB,cAAM,cAAc,MAAM,OAAO,UAAU,UAAU,GAAG;AACxD,cAAM,SAAU,cAAc,YAAY,UAAA;AAI1C,eAAO,MAAM;AAEb,YAAI;AACF,gBAAM,aAAA;AAAA,QACR,SAAS,GAAG;AACV,iBAAA;AACA,oBAAA;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAA;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,OACR,WACoC;AACpC,YAAM,MAAM,MAAM,OAAO,UAAU;AAAA,QACjC,OAAO,YAAY,UAAU,IAAI,CAAC,OAAO;AACvC,gBAAM,EAAE,MAAM,SAAA,IAAa;AAC3B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AACA,iBAAO,UAAU,QAAQ;AAAA,QAC3B,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,IAAI,IAAI,CAAC,OAAO,OAAO,EAAE,CAAC,CAAC;AAE1C,aAAO;AAAA,IACT;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,SAAS,IAAA,IAAQ;AAC/B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,KAAK,UAAU,OAAO,CAAC;AAErD,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,IAAA,IAAQ;AACtB,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,GAAG;AACjC,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,EACF;AAEJ;"}
|
|
1
|
+
{"version":3,"file":"trailbase.js","sources":["../../src/trailbase.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unnecessary-condition */\nimport { Store } from \"@tanstack/store\"\nimport {\n ExpectedDeleteTypeError,\n ExpectedInsertTypeError,\n ExpectedUpdateTypeError,\n TimeoutWaitingForIdsError,\n} from \"./errors\"\nimport type { Event, RecordApi } from \"trailbase\"\n\nimport type {\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\n\ntype ShapeOf<T> = Record<keyof T, unknown>\ntype Conversion<I, O> = (value: I) => O\n\ntype OptionalConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? K\n : never]?: Conversion<InputType[K], OutputType[K]>\n}\n\ntype RequiredConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that do not strictly require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? never\n : K]: Conversion<InputType[K], OutputType[K]>\n}\n\ntype Conversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = OptionalConversions<InputType, OutputType> &\n RequiredConversions<InputType, OutputType>\n\nfunction convert<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: InputType\n): OutputType {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\nfunction convertPartial<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: Partial<InputType>\n): Partial<OutputType> {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\n/**\n * Configuration interface for Trailbase Collection\n */\nexport interface TrailBaseCollectionConfig<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n> extends Omit<\n CollectionConfig<TItem, TKey>,\n `sync` | `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Record API name\n */\n recordApi: RecordApi<TRecord>\n\n parse: Conversions<TRecord, TItem>\n serialize: Conversions<TItem, TRecord>\n}\n\nexport type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>\n\nexport interface TrailBaseCollectionUtils extends UtilsRecord {\n cancel: () => void\n}\n\nexport function trailBaseCollectionOptions<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n>(\n config: TrailBaseCollectionConfig<TItem, TRecord, TKey>\n): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {\n const getKey = config.getKey\n\n const parse = (record: TRecord) =>\n convert<TRecord, TItem>(config.parse, record)\n const serialUpd = (item: Partial<TItem>) =>\n convertPartial<TItem, TRecord>(config.serialize, item)\n const serialIns = (item: TItem) =>\n convert<TItem, TRecord>(config.serialize, item)\n\n const seenIds = new Store(new Map<string, number>())\n\n const awaitIds = (\n ids: Array<string>,\n timeout: number = 120 * 1000\n ): Promise<void> => {\n const completed = (value: Map<string, number>) =>\n ids.every((id) => value.has(id))\n if (completed(seenIds.state)) {\n return Promise.resolve()\n }\n\n return new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribe()\n reject(new TimeoutWaitingForIdsError(ids.toString()))\n }, timeout)\n\n const unsubscribe = seenIds.subscribe((value) => {\n if (completed(value.currentVal)) {\n clearTimeout(timeoutId)\n unsubscribe()\n resolve()\n }\n })\n })\n }\n\n let eventReader: ReadableStreamDefaultReader<Event> | undefined\n const cancelEventReader = () => {\n if (eventReader) {\n eventReader.cancel()\n eventReader.releaseLock()\n eventReader = undefined\n }\n }\n\n type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]\n const sync = {\n sync: (params: SyncParams) => {\n const { begin, write, commit, markReady } = params\n\n // Initial fetch.\n async function initialFetch() {\n const limit = 256\n let response = await config.recordApi.list({\n pagination: {\n limit,\n },\n })\n let cursor = response.cursor\n let got = 0\n\n begin()\n\n while (true) {\n const length = response.records.length\n if (length === 0) break\n\n got = got + length\n for (const item of response.records) {\n write({\n type: `insert`,\n value: parse(item),\n })\n }\n\n if (length < limit) break\n\n response = await config.recordApi.list({\n pagination: {\n limit,\n cursor,\n offset: cursor === undefined ? got : undefined,\n },\n })\n cursor = response.cursor\n }\n\n commit()\n }\n\n // Afterwards subscribe.\n async function listen(reader: ReadableStreamDefaultReader<Event>) {\n while (true) {\n const { done, value: event } = await reader.read()\n\n if (done || !event) {\n reader.releaseLock()\n eventReader = undefined\n return\n }\n\n begin()\n let value: TItem | undefined\n if (`Insert` in event) {\n value = parse(event.Insert as TRecord)\n write({ type: `insert`, value })\n } else if (`Delete` in event) {\n value = parse(event.Delete as TRecord)\n write({ type: `delete`, value })\n } else if (`Update` in event) {\n value = parse(event.Update as TRecord)\n write({ type: `update`, value })\n } else {\n console.error(`Error: ${event.Error}`)\n }\n commit()\n\n if (value) {\n seenIds.setState((curr: Map<string, number>) => {\n const newIds = new Map(curr)\n newIds.set(String(getKey(value)), Date.now())\n return newIds\n })\n }\n }\n }\n\n async function start() {\n const eventStream = await config.recordApi.subscribe(`*`)\n const reader = (eventReader = eventStream.getReader())\n\n // Start listening for subscriptions first. Otherwise, we'd risk a gap\n // between the initial fetch and starting to listen.\n listen(reader)\n\n try {\n await initialFetch()\n } catch (e) {\n cancelEventReader()\n throw e\n } finally {\n // Mark ready both if everything went well or if there's an error to\n // avoid blocking apps waiting for `.preload()` to finish.\n markReady()\n }\n\n // Lastly, start a periodic cleanup task that will be removed when the\n // reader closes.\n const periodicCleanupTask = setInterval(() => {\n seenIds.setState((curr) => {\n const now = Date.now()\n let anyExpired = false\n\n const notExpired = Array.from(curr.entries()).filter(([_, v]) => {\n const expired = now - v > 300 * 1000\n anyExpired = anyExpired || expired\n return !expired\n })\n\n if (anyExpired) {\n return new Map(notExpired)\n }\n return curr\n })\n }, 120 * 1000)\n\n reader.closed.finally(() => clearInterval(periodicCleanupTask))\n }\n\n start()\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n return {\n ...config,\n sync,\n getKey,\n onInsert: async (\n params: InsertMutationFnParams<TItem, TKey>\n ): Promise<Array<number | string>> => {\n const ids = await config.recordApi.createBulk(\n params.transaction.mutations.map((tx) => {\n const { type, modified } = tx\n if (type !== `insert`) {\n throw new ExpectedInsertTypeError(type)\n }\n return serialIns(modified)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly added to the local\n // DB by the subscription.\n await awaitIds(ids.map((id) => String(id)))\n\n return ids\n },\n onUpdate: async (params: UpdateMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, changes, key } = tx\n if (type !== `update`) {\n throw new ExpectedUpdateTypeError(type)\n }\n\n await config.recordApi.update(key, serialUpd(changes))\n\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n onDelete: async (params: DeleteMutationFnParams<TItem, TKey>) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, key } = tx\n if (type !== `delete`) {\n throw new ExpectedDeleteTypeError(type)\n }\n\n await config.recordApi.delete(key)\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n utils: {\n cancel: cancelEventReader,\n },\n }\n}\n"],"names":[],"mappings":";;AAgDA,SAAS,QAIP,aACA,OACY;AACZ,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AAEA,SAAS,eAIP,aACA,OACqB;AACrB,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AA4BO,SAAS,2BAKd,QACqE;AACrE,QAAM,SAAS,OAAO;AAEtB,QAAM,QAAQ,CAAC,WACb,QAAwB,OAAO,OAAO,MAAM;AAC9C,QAAM,YAAY,CAAC,SACjB,eAA+B,OAAO,WAAW,IAAI;AACvD,QAAM,YAAY,CAAC,SACjB,QAAwB,OAAO,WAAW,IAAI;AAEhD,QAAM,UAAU,IAAI,MAAM,oBAAI,KAAqB;AAEnD,QAAM,WAAW,CACf,KACA,UAAkB,MAAM,QACN;AAClB,UAAM,YAAY,CAAC,UACjB,IAAI,MAAM,CAAC,OAAO,MAAM,IAAI,EAAE,CAAC;AACjC,QAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,aAAO,QAAQ,QAAA;AAAA,IACjB;AAEA,WAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,YAAM,YAAY,WAAW,MAAM;AACjC,oBAAA;AACA,eAAO,IAAI,0BAA0B,IAAI,SAAA,CAAU,CAAC;AAAA,MACtD,GAAG,OAAO;AAEV,YAAM,cAAc,QAAQ,UAAU,CAAC,UAAU;AAC/C,YAAI,UAAU,MAAM,UAAU,GAAG;AAC/B,uBAAa,SAAS;AACtB,sBAAA;AACA,kBAAA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,MAAI;AACJ,QAAM,oBAAoB,MAAM;AAC9B,QAAI,aAAa;AACf,kBAAY,OAAA;AACZ,kBAAY,YAAA;AACZ,oBAAc;AAAA,IAChB;AAAA,EACF;AAGA,QAAM,OAAO;AAAA,IACX,MAAM,CAAC,WAAuB;AAC5B,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,qBAAe,eAAe;AAC5B,cAAM,QAAQ;AACd,YAAI,WAAW,MAAM,OAAO,UAAU,KAAK;AAAA,UACzC,YAAY;AAAA,YACV;AAAA,UAAA;AAAA,QACF,CACD;AACD,YAAI,SAAS,SAAS;AACtB,YAAI,MAAM;AAEV,cAAA;AAEA,eAAO,MAAM;AACX,gBAAM,SAAS,SAAS,QAAQ;AAChC,cAAI,WAAW,EAAG;AAElB,gBAAM,MAAM;AACZ,qBAAW,QAAQ,SAAS,SAAS;AACnC,kBAAM;AAAA,cACJ,MAAM;AAAA,cACN,OAAO,MAAM,IAAI;AAAA,YAAA,CAClB;AAAA,UACH;AAEA,cAAI,SAAS,MAAO;AAEpB,qBAAW,MAAM,OAAO,UAAU,KAAK;AAAA,YACrC,YAAY;AAAA,cACV;AAAA,cACA;AAAA,cACA,QAAQ,WAAW,SAAY,MAAM;AAAA,YAAA;AAAA,UACvC,CACD;AACD,mBAAS,SAAS;AAAA,QACpB;AAEA,eAAA;AAAA,MACF;AAGA,qBAAe,OAAO,QAA4C;AAChE,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,OAAO,UAAU,MAAM,OAAO,KAAA;AAE5C,cAAI,QAAQ,CAAC,OAAO;AAClB,mBAAO,YAAA;AACP,0BAAc;AACd;AAAA,UACF;AAEA,gBAAA;AACA,cAAI;AACJ,cAAI,YAAY,OAAO;AACrB,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,OAAO;AACL,oBAAQ,MAAM,UAAU,MAAM,KAAK,EAAE;AAAA,UACvC;AACA,iBAAA;AAEA,cAAI,OAAO;AACT,oBAAQ,SAAS,CAAC,SAA8B;AAC9C,oBAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,qBAAO,IAAI,OAAO,OAAO,KAAK,CAAC,GAAG,KAAK,KAAK;AAC5C,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,qBAAe,QAAQ;AACrB,cAAM,cAAc,MAAM,OAAO,UAAU,UAAU,GAAG;AACxD,cAAM,SAAU,cAAc,YAAY,UAAA;AAI1C,eAAO,MAAM;AAEb,YAAI;AACF,gBAAM,aAAA;AAAA,QACR,SAAS,GAAG;AACV,4BAAA;AACA,gBAAM;AAAA,QACR,UAAA;AAGE,oBAAA;AAAA,QACF;AAIA,cAAM,sBAAsB,YAAY,MAAM;AAC5C,kBAAQ,SAAS,CAAC,SAAS;AACzB,kBAAM,MAAM,KAAK,IAAA;AACjB,gBAAI,aAAa;AAEjB,kBAAM,aAAa,MAAM,KAAK,KAAK,SAAS,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM;AAC/D,oBAAM,UAAU,MAAM,IAAI,MAAM;AAChC,2BAAa,cAAc;AAC3B,qBAAO,CAAC;AAAA,YACV,CAAC;AAED,gBAAI,YAAY;AACd,qBAAO,IAAI,IAAI,UAAU;AAAA,YAC3B;AACA,mBAAO;AAAA,UACT,CAAC;AAAA,QACH,GAAG,MAAM,GAAI;AAEb,eAAO,OAAO,QAAQ,MAAM,cAAc,mBAAmB,CAAC;AAAA,MAChE;AAEA,YAAA;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,OACR,WACoC;AACpC,YAAM,MAAM,MAAM,OAAO,UAAU;AAAA,QACjC,OAAO,YAAY,UAAU,IAAI,CAAC,OAAO;AACvC,gBAAM,EAAE,MAAM,SAAA,IAAa;AAC3B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,wBAAwB,IAAI;AAAA,UACxC;AACA,iBAAO,UAAU,QAAQ;AAAA,QAC3B,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,IAAI,IAAI,CAAC,OAAO,OAAO,EAAE,CAAC,CAAC;AAE1C,aAAO;AAAA,IACT;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,SAAS,IAAA,IAAQ;AAC/B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,wBAAwB,IAAI;AAAA,UACxC;AAEA,gBAAM,OAAO,UAAU,OAAO,KAAK,UAAU,OAAO,CAAC;AAErD,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,UAAU,OAAO,WAAgD;AAC/D,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,IAAA,IAAQ;AACtB,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,wBAAwB,IAAI;AAAA,UACxC;AAEA,gBAAM,OAAO,UAAU,OAAO,GAAG;AACjC,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,OAAO;AAAA,MACL,QAAQ;AAAA,IAAA;AAAA,EACV;AAEJ;"}
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tanstack/trailbase-db-collection",
|
|
3
3
|
"description": "TrailBase collection for TanStack DB",
|
|
4
|
-
"version": "0.0.
|
|
4
|
+
"version": "0.0.5",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@standard-schema/spec": "^1.0.0",
|
|
7
7
|
"@tanstack/store": "^0.7.0",
|
|
8
8
|
"debug": "^4.4.1",
|
|
9
9
|
"trailbase": "^0.7.1",
|
|
10
|
-
"@tanstack/db": "0.0.
|
|
10
|
+
"@tanstack/db": "0.0.29"
|
|
11
11
|
},
|
|
12
12
|
"devDependencies": {
|
|
13
13
|
"@types/debug": "^4.1.12",
|
package/src/errors.ts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { TanStackDBError } from "@tanstack/db"
|
|
2
|
+
|
|
3
|
+
// TrailBase DB Collection Errors
|
|
4
|
+
export class TrailBaseDBCollectionError extends TanStackDBError {
|
|
5
|
+
constructor(message: string) {
|
|
6
|
+
super(message)
|
|
7
|
+
this.name = `TrailBaseDBCollectionError`
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export class TimeoutWaitingForIdsError extends TrailBaseDBCollectionError {
|
|
12
|
+
constructor(ids: string) {
|
|
13
|
+
super(`Timeout waiting for ids: ${ids}`)
|
|
14
|
+
this.name = `TimeoutWaitingForIdsError`
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export class ExpectedInsertTypeError extends TrailBaseDBCollectionError {
|
|
19
|
+
constructor(actualType: string) {
|
|
20
|
+
super(`Expected 'insert', got: ${actualType}`)
|
|
21
|
+
this.name = `ExpectedInsertTypeError`
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export class ExpectedUpdateTypeError extends TrailBaseDBCollectionError {
|
|
26
|
+
constructor(actualType: string) {
|
|
27
|
+
super(`Expected 'update', got: ${actualType}`)
|
|
28
|
+
this.name = `ExpectedUpdateTypeError`
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export class ExpectedDeleteTypeError extends TrailBaseDBCollectionError {
|
|
33
|
+
constructor(actualType: string) {
|
|
34
|
+
super(`Expected 'delete', got: ${actualType}`)
|
|
35
|
+
this.name = `ExpectedDeleteTypeError`
|
|
36
|
+
}
|
|
37
|
+
}
|
package/src/index.ts
CHANGED
package/src/trailbase.ts
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
/* eslint-disable @typescript-eslint/no-unnecessary-condition */
|
|
2
2
|
import { Store } from "@tanstack/store"
|
|
3
|
+
import {
|
|
4
|
+
ExpectedDeleteTypeError,
|
|
5
|
+
ExpectedInsertTypeError,
|
|
6
|
+
ExpectedUpdateTypeError,
|
|
7
|
+
TimeoutWaitingForIdsError,
|
|
8
|
+
} from "./errors"
|
|
3
9
|
import type { Event, RecordApi } from "trailbase"
|
|
4
10
|
|
|
5
11
|
import type {
|
|
@@ -131,7 +137,7 @@ export function trailBaseCollectionOptions<
|
|
|
131
137
|
return new Promise<void>((resolve, reject) => {
|
|
132
138
|
const timeoutId = setTimeout(() => {
|
|
133
139
|
unsubscribe()
|
|
134
|
-
reject(new
|
|
140
|
+
reject(new TimeoutWaitingForIdsError(ids.toString()))
|
|
135
141
|
}, timeout)
|
|
136
142
|
|
|
137
143
|
const unsubscribe = seenIds.subscribe((value) => {
|
|
@@ -144,34 +150,8 @@ export function trailBaseCollectionOptions<
|
|
|
144
150
|
})
|
|
145
151
|
}
|
|
146
152
|
|
|
147
|
-
const weakSeenIds = new WeakRef(seenIds)
|
|
148
|
-
const cleanupTimer = setInterval(() => {
|
|
149
|
-
const seen = weakSeenIds.deref()
|
|
150
|
-
if (seen) {
|
|
151
|
-
seen.setState((curr) => {
|
|
152
|
-
const now = Date.now()
|
|
153
|
-
let anyExpired = false
|
|
154
|
-
|
|
155
|
-
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
156
|
-
const expired = now - v > 300 * 1000
|
|
157
|
-
anyExpired = anyExpired || expired
|
|
158
|
-
return !expired
|
|
159
|
-
})
|
|
160
|
-
|
|
161
|
-
if (anyExpired) {
|
|
162
|
-
return new Map(notExpired)
|
|
163
|
-
}
|
|
164
|
-
return curr
|
|
165
|
-
})
|
|
166
|
-
} else {
|
|
167
|
-
clearInterval(cleanupTimer)
|
|
168
|
-
}
|
|
169
|
-
}, 120 * 1000)
|
|
170
|
-
|
|
171
|
-
type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]
|
|
172
|
-
|
|
173
153
|
let eventReader: ReadableStreamDefaultReader<Event> | undefined
|
|
174
|
-
const
|
|
154
|
+
const cancelEventReader = () => {
|
|
175
155
|
if (eventReader) {
|
|
176
156
|
eventReader.cancel()
|
|
177
157
|
eventReader.releaseLock()
|
|
@@ -179,6 +159,7 @@ export function trailBaseCollectionOptions<
|
|
|
179
159
|
}
|
|
180
160
|
}
|
|
181
161
|
|
|
162
|
+
type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]
|
|
182
163
|
const sync = {
|
|
183
164
|
sync: (params: SyncParams) => {
|
|
184
165
|
const { begin, write, commit, markReady } = params
|
|
@@ -221,7 +202,6 @@ export function trailBaseCollectionOptions<
|
|
|
221
202
|
}
|
|
222
203
|
|
|
223
204
|
commit()
|
|
224
|
-
markReady()
|
|
225
205
|
}
|
|
226
206
|
|
|
227
207
|
// Afterwards subscribe.
|
|
@@ -272,10 +252,35 @@ export function trailBaseCollectionOptions<
|
|
|
272
252
|
try {
|
|
273
253
|
await initialFetch()
|
|
274
254
|
} catch (e) {
|
|
275
|
-
|
|
276
|
-
markReady()
|
|
255
|
+
cancelEventReader()
|
|
277
256
|
throw e
|
|
257
|
+
} finally {
|
|
258
|
+
// Mark ready both if everything went well or if there's an error to
|
|
259
|
+
// avoid blocking apps waiting for `.preload()` to finish.
|
|
260
|
+
markReady()
|
|
278
261
|
}
|
|
262
|
+
|
|
263
|
+
// Lastly, start a periodic cleanup task that will be removed when the
|
|
264
|
+
// reader closes.
|
|
265
|
+
const periodicCleanupTask = setInterval(() => {
|
|
266
|
+
seenIds.setState((curr) => {
|
|
267
|
+
const now = Date.now()
|
|
268
|
+
let anyExpired = false
|
|
269
|
+
|
|
270
|
+
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
271
|
+
const expired = now - v > 300 * 1000
|
|
272
|
+
anyExpired = anyExpired || expired
|
|
273
|
+
return !expired
|
|
274
|
+
})
|
|
275
|
+
|
|
276
|
+
if (anyExpired) {
|
|
277
|
+
return new Map(notExpired)
|
|
278
|
+
}
|
|
279
|
+
return curr
|
|
280
|
+
})
|
|
281
|
+
}, 120 * 1000)
|
|
282
|
+
|
|
283
|
+
reader.closed.finally(() => clearInterval(periodicCleanupTask))
|
|
279
284
|
}
|
|
280
285
|
|
|
281
286
|
start()
|
|
@@ -295,7 +300,7 @@ export function trailBaseCollectionOptions<
|
|
|
295
300
|
params.transaction.mutations.map((tx) => {
|
|
296
301
|
const { type, modified } = tx
|
|
297
302
|
if (type !== `insert`) {
|
|
298
|
-
throw new
|
|
303
|
+
throw new ExpectedInsertTypeError(type)
|
|
299
304
|
}
|
|
300
305
|
return serialIns(modified)
|
|
301
306
|
})
|
|
@@ -313,7 +318,7 @@ export function trailBaseCollectionOptions<
|
|
|
313
318
|
params.transaction.mutations.map(async (tx) => {
|
|
314
319
|
const { type, changes, key } = tx
|
|
315
320
|
if (type !== `update`) {
|
|
316
|
-
throw new
|
|
321
|
+
throw new ExpectedUpdateTypeError(type)
|
|
317
322
|
}
|
|
318
323
|
|
|
319
324
|
await config.recordApi.update(key, serialUpd(changes))
|
|
@@ -332,7 +337,7 @@ export function trailBaseCollectionOptions<
|
|
|
332
337
|
params.transaction.mutations.map(async (tx) => {
|
|
333
338
|
const { type, key } = tx
|
|
334
339
|
if (type !== `delete`) {
|
|
335
|
-
throw new
|
|
340
|
+
throw new ExpectedDeleteTypeError(type)
|
|
336
341
|
}
|
|
337
342
|
|
|
338
343
|
await config.recordApi.delete(key)
|
|
@@ -346,7 +351,7 @@ export function trailBaseCollectionOptions<
|
|
|
346
351
|
await awaitIds(ids)
|
|
347
352
|
},
|
|
348
353
|
utils: {
|
|
349
|
-
cancel,
|
|
354
|
+
cancel: cancelEventReader,
|
|
350
355
|
},
|
|
351
356
|
}
|
|
352
357
|
}
|