@tanstack/trailbase-db-collection 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.cjs +5 -0
- package/dist/cjs/index.cjs.map +1 -0
- package/dist/cjs/index.d.cts +1 -0
- package/dist/cjs/trailbase.cjs +212 -0
- package/dist/cjs/trailbase.cjs.map +1 -0
- package/dist/cjs/trailbase.d.cts +30 -0
- package/dist/esm/index.d.ts +1 -0
- package/dist/esm/index.js +5 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/trailbase.d.ts +30 -0
- package/dist/esm/trailbase.js +212 -0
- package/dist/esm/trailbase.js.map +1 -0
- package/package.json +62 -0
- package/src/index.ts +5 -0
- package/src/trailbase.ts +343 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { trailBaseCollectionOptions, type TrailBaseCollectionConfig, type TrailBaseCollectionUtils, } from './trailbase.cjs';
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const store = require("@tanstack/store");
|
|
4
|
+
function convert(conversions, input) {
|
|
5
|
+
const c = conversions;
|
|
6
|
+
return Object.fromEntries(
|
|
7
|
+
Object.keys(input).map((k) => {
|
|
8
|
+
var _a;
|
|
9
|
+
const value = input[k];
|
|
10
|
+
return [k, ((_a = c[k]) == null ? void 0 : _a.call(c, value)) ?? value];
|
|
11
|
+
})
|
|
12
|
+
);
|
|
13
|
+
}
|
|
14
|
+
function convertPartial(conversions, input) {
|
|
15
|
+
const c = conversions;
|
|
16
|
+
return Object.fromEntries(
|
|
17
|
+
Object.keys(input).map((k) => {
|
|
18
|
+
var _a;
|
|
19
|
+
const value = input[k];
|
|
20
|
+
return [k, ((_a = c[k]) == null ? void 0 : _a.call(c, value)) ?? value];
|
|
21
|
+
})
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
function trailBaseCollectionOptions(config) {
|
|
25
|
+
const getKey = config.getKey;
|
|
26
|
+
const parse = (record) => convert(config.parse, record);
|
|
27
|
+
const serialUpd = (item) => convertPartial(config.serialize, item);
|
|
28
|
+
const serialIns = (item) => convert(config.serialize, item);
|
|
29
|
+
const seenIds = new store.Store(/* @__PURE__ */ new Map());
|
|
30
|
+
const awaitIds = (ids, timeout = 120 * 1e3) => {
|
|
31
|
+
const completed = (value) => ids.every((id) => value.has(id));
|
|
32
|
+
if (completed(seenIds.state)) {
|
|
33
|
+
return Promise.resolve();
|
|
34
|
+
}
|
|
35
|
+
return new Promise((resolve, reject) => {
|
|
36
|
+
const timeoutId = setTimeout(() => {
|
|
37
|
+
unsubscribe();
|
|
38
|
+
reject(new Error(`Timeout waiting for ids: ${ids}`));
|
|
39
|
+
}, timeout);
|
|
40
|
+
const unsubscribe = seenIds.subscribe((value) => {
|
|
41
|
+
if (completed(value.currentVal)) {
|
|
42
|
+
clearTimeout(timeoutId);
|
|
43
|
+
unsubscribe();
|
|
44
|
+
resolve();
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
};
|
|
49
|
+
const weakSeenIds = new WeakRef(seenIds);
|
|
50
|
+
const cleanupTimer = setInterval(() => {
|
|
51
|
+
const seen = weakSeenIds.deref();
|
|
52
|
+
if (seen) {
|
|
53
|
+
seen.setState((curr) => {
|
|
54
|
+
const now = Date.now();
|
|
55
|
+
let anyExpired = false;
|
|
56
|
+
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
57
|
+
const expired = now - v > 300 * 1e3;
|
|
58
|
+
anyExpired = anyExpired || expired;
|
|
59
|
+
return !expired;
|
|
60
|
+
});
|
|
61
|
+
if (anyExpired) {
|
|
62
|
+
return new Map(notExpired);
|
|
63
|
+
}
|
|
64
|
+
return curr;
|
|
65
|
+
});
|
|
66
|
+
} else {
|
|
67
|
+
clearInterval(cleanupTimer);
|
|
68
|
+
}
|
|
69
|
+
}, 120 * 1e3);
|
|
70
|
+
let eventReader;
|
|
71
|
+
const cancel = () => {
|
|
72
|
+
if (eventReader) {
|
|
73
|
+
eventReader.cancel();
|
|
74
|
+
eventReader.releaseLock();
|
|
75
|
+
eventReader = void 0;
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
const sync = {
|
|
79
|
+
sync: (params) => {
|
|
80
|
+
const { begin, write, commit, markReady } = params;
|
|
81
|
+
async function initialFetch() {
|
|
82
|
+
const limit = 256;
|
|
83
|
+
let response = await config.recordApi.list({
|
|
84
|
+
pagination: {
|
|
85
|
+
limit
|
|
86
|
+
}
|
|
87
|
+
});
|
|
88
|
+
let cursor = response.cursor;
|
|
89
|
+
let got = 0;
|
|
90
|
+
begin();
|
|
91
|
+
while (true) {
|
|
92
|
+
const length = response.records.length;
|
|
93
|
+
if (length === 0) break;
|
|
94
|
+
got = got + length;
|
|
95
|
+
for (const item of response.records) {
|
|
96
|
+
write({
|
|
97
|
+
type: `insert`,
|
|
98
|
+
value: parse(item)
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
if (length < limit) break;
|
|
102
|
+
response = await config.recordApi.list({
|
|
103
|
+
pagination: {
|
|
104
|
+
limit,
|
|
105
|
+
cursor,
|
|
106
|
+
offset: cursor === void 0 ? got : void 0
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
cursor = response.cursor;
|
|
110
|
+
}
|
|
111
|
+
commit();
|
|
112
|
+
markReady();
|
|
113
|
+
}
|
|
114
|
+
async function listen(reader) {
|
|
115
|
+
while (true) {
|
|
116
|
+
const { done, value: event } = await reader.read();
|
|
117
|
+
if (done || !event) {
|
|
118
|
+
reader.releaseLock();
|
|
119
|
+
eventReader = void 0;
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
begin();
|
|
123
|
+
let value;
|
|
124
|
+
if (`Insert` in event) {
|
|
125
|
+
value = parse(event.Insert);
|
|
126
|
+
write({ type: `insert`, value });
|
|
127
|
+
} else if (`Delete` in event) {
|
|
128
|
+
value = parse(event.Delete);
|
|
129
|
+
write({ type: `delete`, value });
|
|
130
|
+
} else if (`Update` in event) {
|
|
131
|
+
value = parse(event.Update);
|
|
132
|
+
write({ type: `update`, value });
|
|
133
|
+
} else {
|
|
134
|
+
console.error(`Error: ${event.Error}`);
|
|
135
|
+
}
|
|
136
|
+
commit();
|
|
137
|
+
if (value) {
|
|
138
|
+
seenIds.setState((curr) => {
|
|
139
|
+
const newIds = new Map(curr);
|
|
140
|
+
newIds.set(String(getKey(value)), Date.now());
|
|
141
|
+
return newIds;
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
async function start() {
|
|
147
|
+
const eventStream = await config.recordApi.subscribe(`*`);
|
|
148
|
+
const reader = eventReader = eventStream.getReader();
|
|
149
|
+
listen(reader);
|
|
150
|
+
try {
|
|
151
|
+
await initialFetch();
|
|
152
|
+
} catch (e) {
|
|
153
|
+
cancel();
|
|
154
|
+
markReady();
|
|
155
|
+
throw e;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
start();
|
|
159
|
+
},
|
|
160
|
+
// Expose the getSyncMetadata function
|
|
161
|
+
getSyncMetadata: void 0
|
|
162
|
+
};
|
|
163
|
+
return {
|
|
164
|
+
...config,
|
|
165
|
+
sync,
|
|
166
|
+
getKey,
|
|
167
|
+
onInsert: async (params) => {
|
|
168
|
+
const ids = await config.recordApi.createBulk(
|
|
169
|
+
params.transaction.mutations.map((tx) => {
|
|
170
|
+
const { type, modified } = tx;
|
|
171
|
+
if (type !== `insert`) {
|
|
172
|
+
throw new Error(`Expected 'insert', got: ${type}`);
|
|
173
|
+
}
|
|
174
|
+
return serialIns(modified);
|
|
175
|
+
})
|
|
176
|
+
);
|
|
177
|
+
await awaitIds(ids.map((id) => String(id)));
|
|
178
|
+
return ids;
|
|
179
|
+
},
|
|
180
|
+
onUpdate: async (params) => {
|
|
181
|
+
const ids = await Promise.all(
|
|
182
|
+
params.transaction.mutations.map(async (tx) => {
|
|
183
|
+
const { type, changes, key } = tx;
|
|
184
|
+
if (type !== `update`) {
|
|
185
|
+
throw new Error(`Expected 'update', got: ${type}`);
|
|
186
|
+
}
|
|
187
|
+
await config.recordApi.update(key, serialUpd(changes));
|
|
188
|
+
return String(key);
|
|
189
|
+
})
|
|
190
|
+
);
|
|
191
|
+
await awaitIds(ids);
|
|
192
|
+
},
|
|
193
|
+
onDelete: async (params) => {
|
|
194
|
+
const ids = await Promise.all(
|
|
195
|
+
params.transaction.mutations.map(async (tx) => {
|
|
196
|
+
const { type, key } = tx;
|
|
197
|
+
if (type !== `delete`) {
|
|
198
|
+
throw new Error(`Expected 'delete', got: ${type}`);
|
|
199
|
+
}
|
|
200
|
+
await config.recordApi.delete(key);
|
|
201
|
+
return String(key);
|
|
202
|
+
})
|
|
203
|
+
);
|
|
204
|
+
await awaitIds(ids);
|
|
205
|
+
},
|
|
206
|
+
utils: {
|
|
207
|
+
cancel
|
|
208
|
+
}
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
exports.trailBaseCollectionOptions = trailBaseCollectionOptions;
|
|
212
|
+
//# sourceMappingURL=trailbase.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"trailbase.cjs","sources":["../../src/trailbase.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unnecessary-condition */\nimport { Store } from \"@tanstack/store\"\nimport type { Event, RecordApi } from \"trailbase\"\n\nimport type { CollectionConfig, SyncConfig, UtilsRecord } from \"@tanstack/db\"\n\ntype ShapeOf<T> = Record<keyof T, unknown>\ntype Conversion<I, O> = (value: I) => O\n\ntype OptionalConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? K\n : never]?: Conversion<InputType[K], OutputType[K]>\n}\n\ntype RequiredConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that do not strictly require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? never\n : K]: Conversion<InputType[K], OutputType[K]>\n}\n\ntype Conversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = OptionalConversions<InputType, OutputType> &\n RequiredConversions<InputType, OutputType>\n\nfunction convert<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: InputType\n): OutputType {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\nfunction convertPartial<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: Partial<InputType>\n): Partial<OutputType> {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\n/**\n * Configuration interface for Trailbase Collection\n */\nexport interface TrailBaseCollectionConfig<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n> extends Omit<\n CollectionConfig<TItem, TKey>,\n `sync` | `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Record API name\n */\n recordApi: RecordApi<TRecord>\n\n parse: Conversions<TRecord, TItem>\n serialize: Conversions<TItem, TRecord>\n}\n\nexport type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>\n\nexport interface TrailBaseCollectionUtils extends UtilsRecord {\n cancel: () => void\n}\n\nexport function trailBaseCollectionOptions<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n>(\n config: TrailBaseCollectionConfig<TItem, TRecord, TKey>\n): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {\n const getKey = config.getKey\n\n const parse = (record: TRecord) =>\n convert<TRecord, TItem>(config.parse, record)\n const serialUpd = (item: Partial<TItem>) =>\n convertPartial<TItem, TRecord>(config.serialize, item)\n const serialIns = (item: TItem) =>\n convert<TItem, TRecord>(config.serialize, item)\n\n const seenIds = new Store(new Map<string, number>())\n\n const awaitIds = (\n ids: Array<string>,\n timeout: number = 120 * 1000\n ): Promise<void> => {\n const completed = (value: Map<string, number>) =>\n ids.every((id) => value.has(id))\n if (completed(seenIds.state)) {\n return Promise.resolve()\n }\n\n return new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribe()\n reject(new Error(`Timeout waiting for ids: ${ids}`))\n }, timeout)\n\n const unsubscribe = seenIds.subscribe((value) => {\n if (completed(value.currentVal)) {\n clearTimeout(timeoutId)\n unsubscribe()\n resolve()\n }\n })\n })\n }\n\n const weakSeenIds = new WeakRef(seenIds)\n const cleanupTimer = setInterval(() => {\n const seen = weakSeenIds.deref()\n if (seen) {\n seen.setState((curr) => {\n const now = Date.now()\n let anyExpired = false\n\n const notExpired = Array.from(curr.entries()).filter(([_, v]) => {\n const expired = now - v > 300 * 1000\n anyExpired = anyExpired || expired\n return !expired\n })\n\n if (anyExpired) {\n return new Map(notExpired)\n }\n return curr\n })\n } else {\n clearInterval(cleanupTimer)\n }\n }, 120 * 1000)\n\n type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]\n\n let eventReader: ReadableStreamDefaultReader<Event> | undefined\n const cancel = () => {\n if (eventReader) {\n eventReader.cancel()\n eventReader.releaseLock()\n eventReader = undefined\n }\n }\n\n const sync = {\n sync: (params: SyncParams) => {\n const { begin, write, commit, markReady } = params\n\n // Initial fetch.\n async function initialFetch() {\n const limit = 256\n let response = await config.recordApi.list({\n pagination: {\n limit,\n },\n })\n let cursor = response.cursor\n let got = 0\n\n begin()\n\n while (true) {\n const length = response.records.length\n if (length === 0) break\n\n got = got + length\n for (const item of response.records) {\n write({\n type: `insert`,\n value: parse(item),\n })\n }\n\n if (length < limit) break\n\n response = await config.recordApi.list({\n pagination: {\n limit,\n cursor,\n offset: cursor === undefined ? got : undefined,\n },\n })\n cursor = response.cursor\n }\n\n commit()\n markReady()\n }\n\n // Afterwards subscribe.\n async function listen(reader: ReadableStreamDefaultReader<Event>) {\n while (true) {\n const { done, value: event } = await reader.read()\n\n if (done || !event) {\n reader.releaseLock()\n eventReader = undefined\n return\n }\n\n begin()\n let value: TItem | undefined\n if (`Insert` in event) {\n value = parse(event.Insert as TRecord)\n write({ type: `insert`, value })\n } else if (`Delete` in event) {\n value = parse(event.Delete as TRecord)\n write({ type: `delete`, value })\n } else if (`Update` in event) {\n value = parse(event.Update as TRecord)\n write({ type: `update`, value })\n } else {\n console.error(`Error: ${event.Error}`)\n }\n commit()\n\n if (value) {\n seenIds.setState((curr) => {\n const newIds = new Map(curr)\n newIds.set(String(getKey(value)), Date.now())\n return newIds\n })\n }\n }\n }\n\n async function start() {\n const eventStream = await config.recordApi.subscribe(`*`)\n const reader = (eventReader = eventStream.getReader())\n\n // Start listening for subscriptions first. Otherwise, we'd risk a gap\n // between the initial fetch and starting to listen.\n listen(reader)\n\n try {\n await initialFetch()\n } catch (e) {\n cancel()\n markReady()\n throw e\n }\n }\n\n start()\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n return {\n ...config,\n sync,\n getKey,\n onInsert: async (params): Promise<Array<number | string>> => {\n const ids = await config.recordApi.createBulk(\n params.transaction.mutations.map((tx) => {\n const { type, modified } = tx\n if (type !== `insert`) {\n throw new Error(`Expected 'insert', got: ${type}`)\n }\n return serialIns(modified)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly added to the local\n // DB by the subscription.\n await awaitIds(ids.map((id) => String(id)))\n\n return ids\n },\n onUpdate: async (params) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, changes, key } = tx\n if (type !== `update`) {\n throw new Error(`Expected 'update', got: ${type}`)\n }\n\n await config.recordApi.update(key, serialUpd(changes))\n\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n onDelete: async (params) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, key } = tx\n if (type !== `delete`) {\n throw new Error(`Expected 'delete', got: ${type}`)\n }\n\n await config.recordApi.delete(key)\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n utils: {\n cancel,\n },\n }\n}\n"],"names":["Store"],"mappings":";;;AAmCA,SAAS,QAIP,aACA,OACY;AACZ,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AAEA,SAAS,eAIP,aACA,OACqB;AACrB,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AA4BO,SAAS,2BAKd,QACqE;AACrE,QAAM,SAAS,OAAO;AAEtB,QAAM,QAAQ,CAAC,WACb,QAAwB,OAAO,OAAO,MAAM;AAC9C,QAAM,YAAY,CAAC,SACjB,eAA+B,OAAO,WAAW,IAAI;AACvD,QAAM,YAAY,CAAC,SACjB,QAAwB,OAAO,WAAW,IAAI;AAEhD,QAAM,UAAU,IAAIA,YAAM,oBAAI,KAAqB;AAEnD,QAAM,WAAW,CACf,KACA,UAAkB,MAAM,QACN;AAClB,UAAM,YAAY,CAAC,UACjB,IAAI,MAAM,CAAC,OAAO,MAAM,IAAI,EAAE,CAAC;AACjC,QAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,aAAO,QAAQ,QAAA;AAAA,IACjB;AAEA,WAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,YAAM,YAAY,WAAW,MAAM;AACjC,oBAAA;AACA,eAAO,IAAI,MAAM,4BAA4B,GAAG,EAAE,CAAC;AAAA,MACrD,GAAG,OAAO;AAEV,YAAM,cAAc,QAAQ,UAAU,CAAC,UAAU;AAC/C,YAAI,UAAU,MAAM,UAAU,GAAG;AAC/B,uBAAa,SAAS;AACtB,sBAAA;AACA,kBAAA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,cAAc,IAAI,QAAQ,OAAO;AACvC,QAAM,eAAe,YAAY,MAAM;AACrC,UAAM,OAAO,YAAY,MAAA;AACzB,QAAI,MAAM;AACR,WAAK,SAAS,CAAC,SAAS;AACtB,cAAM,MAAM,KAAK,IAAA;AACjB,YAAI,aAAa;AAEjB,cAAM,aAAa,MAAM,KAAK,KAAK,SAAS,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM;AAC/D,gBAAM,UAAU,MAAM,IAAI,MAAM;AAChC,uBAAa,cAAc;AAC3B,iBAAO,CAAC;AAAA,QACV,CAAC;AAED,YAAI,YAAY;AACd,iBAAO,IAAI,IAAI,UAAU;AAAA,QAC3B;AACA,eAAO;AAAA,MACT,CAAC;AAAA,IACH,OAAO;AACL,oBAAc,YAAY;AAAA,IAC5B;AAAA,EACF,GAAG,MAAM,GAAI;AAIb,MAAI;AACJ,QAAM,SAAS,MAAM;AACnB,QAAI,aAAa;AACf,kBAAY,OAAA;AACZ,kBAAY,YAAA;AACZ,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,QAAM,OAAO;AAAA,IACX,MAAM,CAAC,WAAuB;AAC5B,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,qBAAe,eAAe;AAC5B,cAAM,QAAQ;AACd,YAAI,WAAW,MAAM,OAAO,UAAU,KAAK;AAAA,UACzC,YAAY;AAAA,YACV;AAAA,UAAA;AAAA,QACF,CACD;AACD,YAAI,SAAS,SAAS;AACtB,YAAI,MAAM;AAEV,cAAA;AAEA,eAAO,MAAM;AACX,gBAAM,SAAS,SAAS,QAAQ;AAChC,cAAI,WAAW,EAAG;AAElB,gBAAM,MAAM;AACZ,qBAAW,QAAQ,SAAS,SAAS;AACnC,kBAAM;AAAA,cACJ,MAAM;AAAA,cACN,OAAO,MAAM,IAAI;AAAA,YAAA,CAClB;AAAA,UACH;AAEA,cAAI,SAAS,MAAO;AAEpB,qBAAW,MAAM,OAAO,UAAU,KAAK;AAAA,YACrC,YAAY;AAAA,cACV;AAAA,cACA;AAAA,cACA,QAAQ,WAAW,SAAY,MAAM;AAAA,YAAA;AAAA,UACvC,CACD;AACD,mBAAS,SAAS;AAAA,QACpB;AAEA,eAAA;AACA,kBAAA;AAAA,MACF;AAGA,qBAAe,OAAO,QAA4C;AAChE,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,OAAO,UAAU,MAAM,OAAO,KAAA;AAE5C,cAAI,QAAQ,CAAC,OAAO;AAClB,mBAAO,YAAA;AACP,0BAAc;AACd;AAAA,UACF;AAEA,gBAAA;AACA,cAAI;AACJ,cAAI,YAAY,OAAO;AACrB,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,OAAO;AACL,oBAAQ,MAAM,UAAU,MAAM,KAAK,EAAE;AAAA,UACvC;AACA,iBAAA;AAEA,cAAI,OAAO;AACT,oBAAQ,SAAS,CAAC,SAAS;AACzB,oBAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,qBAAO,IAAI,OAAO,OAAO,KAAK,CAAC,GAAG,KAAK,KAAK;AAC5C,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,qBAAe,QAAQ;AACrB,cAAM,cAAc,MAAM,OAAO,UAAU,UAAU,GAAG;AACxD,cAAM,SAAU,cAAc,YAAY,UAAA;AAI1C,eAAO,MAAM;AAEb,YAAI;AACF,gBAAM,aAAA;AAAA,QACR,SAAS,GAAG;AACV,iBAAA;AACA,oBAAA;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAA;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAA4C;AAC3D,YAAM,MAAM,MAAM,OAAO,UAAU;AAAA,QACjC,OAAO,YAAY,UAAU,IAAI,CAAC,OAAO;AACvC,gBAAM,EAAE,MAAM,SAAA,IAAa;AAC3B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AACA,iBAAO,UAAU,QAAQ;AAAA,QAC3B,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,IAAI,IAAI,CAAC,OAAO,OAAO,EAAE,CAAC,CAAC;AAE1C,aAAO;AAAA,IACT;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,SAAS,IAAA,IAAQ;AAC/B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,KAAK,UAAU,OAAO,CAAC;AAErD,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,IAAA,IAAQ;AACtB,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,GAAG;AACjC,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,EACF;AAEJ;;"}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { RecordApi } from 'trailbase';
|
|
2
|
+
import { CollectionConfig, UtilsRecord } from '@tanstack/db';
|
|
3
|
+
type ShapeOf<T> = Record<keyof T, unknown>;
|
|
4
|
+
type Conversion<I, O> = (value: I) => O;
|
|
5
|
+
type OptionalConversions<InputType extends ShapeOf<OutputType>, OutputType extends ShapeOf<InputType>> = {
|
|
6
|
+
[K in keyof InputType as InputType[K] extends OutputType[K] ? K : never]?: Conversion<InputType[K], OutputType[K]>;
|
|
7
|
+
};
|
|
8
|
+
type RequiredConversions<InputType extends ShapeOf<OutputType>, OutputType extends ShapeOf<InputType>> = {
|
|
9
|
+
[K in keyof InputType as InputType[K] extends OutputType[K] ? never : K]: Conversion<InputType[K], OutputType[K]>;
|
|
10
|
+
};
|
|
11
|
+
type Conversions<InputType extends ShapeOf<OutputType>, OutputType extends ShapeOf<InputType>> = OptionalConversions<InputType, OutputType> & RequiredConversions<InputType, OutputType>;
|
|
12
|
+
/**
|
|
13
|
+
* Configuration interface for Trailbase Collection
|
|
14
|
+
*/
|
|
15
|
+
export interface TrailBaseCollectionConfig<TItem extends ShapeOf<TRecord>, TRecord extends ShapeOf<TItem> = TItem, TKey extends string | number = string | number> extends Omit<CollectionConfig<TItem, TKey>, `sync` | `onInsert` | `onUpdate` | `onDelete`> {
|
|
16
|
+
/**
|
|
17
|
+
* Record API name
|
|
18
|
+
*/
|
|
19
|
+
recordApi: RecordApi<TRecord>;
|
|
20
|
+
parse: Conversions<TRecord, TItem>;
|
|
21
|
+
serialize: Conversions<TItem, TRecord>;
|
|
22
|
+
}
|
|
23
|
+
export type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>;
|
|
24
|
+
export interface TrailBaseCollectionUtils extends UtilsRecord {
|
|
25
|
+
cancel: () => void;
|
|
26
|
+
}
|
|
27
|
+
export declare function trailBaseCollectionOptions<TItem extends ShapeOf<TRecord>, TRecord extends ShapeOf<TItem> = TItem, TKey extends string | number = string | number>(config: TrailBaseCollectionConfig<TItem, TRecord, TKey>): CollectionConfig<TItem, TKey> & {
|
|
28
|
+
utils: TrailBaseCollectionUtils;
|
|
29
|
+
};
|
|
30
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { trailBaseCollectionOptions, type TrailBaseCollectionConfig, type TrailBaseCollectionUtils, } from './trailbase.js';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";"}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { RecordApi } from 'trailbase';
|
|
2
|
+
import { CollectionConfig, UtilsRecord } from '@tanstack/db';
|
|
3
|
+
type ShapeOf<T> = Record<keyof T, unknown>;
|
|
4
|
+
type Conversion<I, O> = (value: I) => O;
|
|
5
|
+
type OptionalConversions<InputType extends ShapeOf<OutputType>, OutputType extends ShapeOf<InputType>> = {
|
|
6
|
+
[K in keyof InputType as InputType[K] extends OutputType[K] ? K : never]?: Conversion<InputType[K], OutputType[K]>;
|
|
7
|
+
};
|
|
8
|
+
type RequiredConversions<InputType extends ShapeOf<OutputType>, OutputType extends ShapeOf<InputType>> = {
|
|
9
|
+
[K in keyof InputType as InputType[K] extends OutputType[K] ? never : K]: Conversion<InputType[K], OutputType[K]>;
|
|
10
|
+
};
|
|
11
|
+
type Conversions<InputType extends ShapeOf<OutputType>, OutputType extends ShapeOf<InputType>> = OptionalConversions<InputType, OutputType> & RequiredConversions<InputType, OutputType>;
|
|
12
|
+
/**
|
|
13
|
+
* Configuration interface for Trailbase Collection
|
|
14
|
+
*/
|
|
15
|
+
export interface TrailBaseCollectionConfig<TItem extends ShapeOf<TRecord>, TRecord extends ShapeOf<TItem> = TItem, TKey extends string | number = string | number> extends Omit<CollectionConfig<TItem, TKey>, `sync` | `onInsert` | `onUpdate` | `onDelete`> {
|
|
16
|
+
/**
|
|
17
|
+
* Record API name
|
|
18
|
+
*/
|
|
19
|
+
recordApi: RecordApi<TRecord>;
|
|
20
|
+
parse: Conversions<TRecord, TItem>;
|
|
21
|
+
serialize: Conversions<TItem, TRecord>;
|
|
22
|
+
}
|
|
23
|
+
export type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>;
|
|
24
|
+
export interface TrailBaseCollectionUtils extends UtilsRecord {
|
|
25
|
+
cancel: () => void;
|
|
26
|
+
}
|
|
27
|
+
export declare function trailBaseCollectionOptions<TItem extends ShapeOf<TRecord>, TRecord extends ShapeOf<TItem> = TItem, TKey extends string | number = string | number>(config: TrailBaseCollectionConfig<TItem, TRecord, TKey>): CollectionConfig<TItem, TKey> & {
|
|
28
|
+
utils: TrailBaseCollectionUtils;
|
|
29
|
+
};
|
|
30
|
+
export {};
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import { Store } from "@tanstack/store";
|
|
2
|
+
function convert(conversions, input) {
|
|
3
|
+
const c = conversions;
|
|
4
|
+
return Object.fromEntries(
|
|
5
|
+
Object.keys(input).map((k) => {
|
|
6
|
+
var _a;
|
|
7
|
+
const value = input[k];
|
|
8
|
+
return [k, ((_a = c[k]) == null ? void 0 : _a.call(c, value)) ?? value];
|
|
9
|
+
})
|
|
10
|
+
);
|
|
11
|
+
}
|
|
12
|
+
function convertPartial(conversions, input) {
|
|
13
|
+
const c = conversions;
|
|
14
|
+
return Object.fromEntries(
|
|
15
|
+
Object.keys(input).map((k) => {
|
|
16
|
+
var _a;
|
|
17
|
+
const value = input[k];
|
|
18
|
+
return [k, ((_a = c[k]) == null ? void 0 : _a.call(c, value)) ?? value];
|
|
19
|
+
})
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
function trailBaseCollectionOptions(config) {
|
|
23
|
+
const getKey = config.getKey;
|
|
24
|
+
const parse = (record) => convert(config.parse, record);
|
|
25
|
+
const serialUpd = (item) => convertPartial(config.serialize, item);
|
|
26
|
+
const serialIns = (item) => convert(config.serialize, item);
|
|
27
|
+
const seenIds = new Store(/* @__PURE__ */ new Map());
|
|
28
|
+
const awaitIds = (ids, timeout = 120 * 1e3) => {
|
|
29
|
+
const completed = (value) => ids.every((id) => value.has(id));
|
|
30
|
+
if (completed(seenIds.state)) {
|
|
31
|
+
return Promise.resolve();
|
|
32
|
+
}
|
|
33
|
+
return new Promise((resolve, reject) => {
|
|
34
|
+
const timeoutId = setTimeout(() => {
|
|
35
|
+
unsubscribe();
|
|
36
|
+
reject(new Error(`Timeout waiting for ids: ${ids}`));
|
|
37
|
+
}, timeout);
|
|
38
|
+
const unsubscribe = seenIds.subscribe((value) => {
|
|
39
|
+
if (completed(value.currentVal)) {
|
|
40
|
+
clearTimeout(timeoutId);
|
|
41
|
+
unsubscribe();
|
|
42
|
+
resolve();
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
};
|
|
47
|
+
const weakSeenIds = new WeakRef(seenIds);
|
|
48
|
+
const cleanupTimer = setInterval(() => {
|
|
49
|
+
const seen = weakSeenIds.deref();
|
|
50
|
+
if (seen) {
|
|
51
|
+
seen.setState((curr) => {
|
|
52
|
+
const now = Date.now();
|
|
53
|
+
let anyExpired = false;
|
|
54
|
+
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
55
|
+
const expired = now - v > 300 * 1e3;
|
|
56
|
+
anyExpired = anyExpired || expired;
|
|
57
|
+
return !expired;
|
|
58
|
+
});
|
|
59
|
+
if (anyExpired) {
|
|
60
|
+
return new Map(notExpired);
|
|
61
|
+
}
|
|
62
|
+
return curr;
|
|
63
|
+
});
|
|
64
|
+
} else {
|
|
65
|
+
clearInterval(cleanupTimer);
|
|
66
|
+
}
|
|
67
|
+
}, 120 * 1e3);
|
|
68
|
+
let eventReader;
|
|
69
|
+
const cancel = () => {
|
|
70
|
+
if (eventReader) {
|
|
71
|
+
eventReader.cancel();
|
|
72
|
+
eventReader.releaseLock();
|
|
73
|
+
eventReader = void 0;
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
const sync = {
|
|
77
|
+
sync: (params) => {
|
|
78
|
+
const { begin, write, commit, markReady } = params;
|
|
79
|
+
async function initialFetch() {
|
|
80
|
+
const limit = 256;
|
|
81
|
+
let response = await config.recordApi.list({
|
|
82
|
+
pagination: {
|
|
83
|
+
limit
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
let cursor = response.cursor;
|
|
87
|
+
let got = 0;
|
|
88
|
+
begin();
|
|
89
|
+
while (true) {
|
|
90
|
+
const length = response.records.length;
|
|
91
|
+
if (length === 0) break;
|
|
92
|
+
got = got + length;
|
|
93
|
+
for (const item of response.records) {
|
|
94
|
+
write({
|
|
95
|
+
type: `insert`,
|
|
96
|
+
value: parse(item)
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
if (length < limit) break;
|
|
100
|
+
response = await config.recordApi.list({
|
|
101
|
+
pagination: {
|
|
102
|
+
limit,
|
|
103
|
+
cursor,
|
|
104
|
+
offset: cursor === void 0 ? got : void 0
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
cursor = response.cursor;
|
|
108
|
+
}
|
|
109
|
+
commit();
|
|
110
|
+
markReady();
|
|
111
|
+
}
|
|
112
|
+
async function listen(reader) {
|
|
113
|
+
while (true) {
|
|
114
|
+
const { done, value: event } = await reader.read();
|
|
115
|
+
if (done || !event) {
|
|
116
|
+
reader.releaseLock();
|
|
117
|
+
eventReader = void 0;
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
begin();
|
|
121
|
+
let value;
|
|
122
|
+
if (`Insert` in event) {
|
|
123
|
+
value = parse(event.Insert);
|
|
124
|
+
write({ type: `insert`, value });
|
|
125
|
+
} else if (`Delete` in event) {
|
|
126
|
+
value = parse(event.Delete);
|
|
127
|
+
write({ type: `delete`, value });
|
|
128
|
+
} else if (`Update` in event) {
|
|
129
|
+
value = parse(event.Update);
|
|
130
|
+
write({ type: `update`, value });
|
|
131
|
+
} else {
|
|
132
|
+
console.error(`Error: ${event.Error}`);
|
|
133
|
+
}
|
|
134
|
+
commit();
|
|
135
|
+
if (value) {
|
|
136
|
+
seenIds.setState((curr) => {
|
|
137
|
+
const newIds = new Map(curr);
|
|
138
|
+
newIds.set(String(getKey(value)), Date.now());
|
|
139
|
+
return newIds;
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
async function start() {
|
|
145
|
+
const eventStream = await config.recordApi.subscribe(`*`);
|
|
146
|
+
const reader = eventReader = eventStream.getReader();
|
|
147
|
+
listen(reader);
|
|
148
|
+
try {
|
|
149
|
+
await initialFetch();
|
|
150
|
+
} catch (e) {
|
|
151
|
+
cancel();
|
|
152
|
+
markReady();
|
|
153
|
+
throw e;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
start();
|
|
157
|
+
},
|
|
158
|
+
// Expose the getSyncMetadata function
|
|
159
|
+
getSyncMetadata: void 0
|
|
160
|
+
};
|
|
161
|
+
return {
|
|
162
|
+
...config,
|
|
163
|
+
sync,
|
|
164
|
+
getKey,
|
|
165
|
+
onInsert: async (params) => {
|
|
166
|
+
const ids = await config.recordApi.createBulk(
|
|
167
|
+
params.transaction.mutations.map((tx) => {
|
|
168
|
+
const { type, modified } = tx;
|
|
169
|
+
if (type !== `insert`) {
|
|
170
|
+
throw new Error(`Expected 'insert', got: ${type}`);
|
|
171
|
+
}
|
|
172
|
+
return serialIns(modified);
|
|
173
|
+
})
|
|
174
|
+
);
|
|
175
|
+
await awaitIds(ids.map((id) => String(id)));
|
|
176
|
+
return ids;
|
|
177
|
+
},
|
|
178
|
+
onUpdate: async (params) => {
|
|
179
|
+
const ids = await Promise.all(
|
|
180
|
+
params.transaction.mutations.map(async (tx) => {
|
|
181
|
+
const { type, changes, key } = tx;
|
|
182
|
+
if (type !== `update`) {
|
|
183
|
+
throw new Error(`Expected 'update', got: ${type}`);
|
|
184
|
+
}
|
|
185
|
+
await config.recordApi.update(key, serialUpd(changes));
|
|
186
|
+
return String(key);
|
|
187
|
+
})
|
|
188
|
+
);
|
|
189
|
+
await awaitIds(ids);
|
|
190
|
+
},
|
|
191
|
+
onDelete: async (params) => {
|
|
192
|
+
const ids = await Promise.all(
|
|
193
|
+
params.transaction.mutations.map(async (tx) => {
|
|
194
|
+
const { type, key } = tx;
|
|
195
|
+
if (type !== `delete`) {
|
|
196
|
+
throw new Error(`Expected 'delete', got: ${type}`);
|
|
197
|
+
}
|
|
198
|
+
await config.recordApi.delete(key);
|
|
199
|
+
return String(key);
|
|
200
|
+
})
|
|
201
|
+
);
|
|
202
|
+
await awaitIds(ids);
|
|
203
|
+
},
|
|
204
|
+
utils: {
|
|
205
|
+
cancel
|
|
206
|
+
}
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
export {
|
|
210
|
+
trailBaseCollectionOptions
|
|
211
|
+
};
|
|
212
|
+
//# sourceMappingURL=trailbase.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"trailbase.js","sources":["../../src/trailbase.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unnecessary-condition */\nimport { Store } from \"@tanstack/store\"\nimport type { Event, RecordApi } from \"trailbase\"\n\nimport type { CollectionConfig, SyncConfig, UtilsRecord } from \"@tanstack/db\"\n\ntype ShapeOf<T> = Record<keyof T, unknown>\ntype Conversion<I, O> = (value: I) => O\n\ntype OptionalConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? K\n : never]?: Conversion<InputType[K], OutputType[K]>\n}\n\ntype RequiredConversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = {\n // Excludes all keys that do not strictly require a conversation.\n [K in keyof InputType as InputType[K] extends OutputType[K]\n ? never\n : K]: Conversion<InputType[K], OutputType[K]>\n}\n\ntype Conversions<\n InputType extends ShapeOf<OutputType>,\n OutputType extends ShapeOf<InputType>,\n> = OptionalConversions<InputType, OutputType> &\n RequiredConversions<InputType, OutputType>\n\nfunction convert<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: InputType\n): OutputType {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\nfunction convertPartial<\n InputType extends ShapeOf<OutputType> & Record<string, unknown>,\n OutputType extends ShapeOf<InputType>,\n>(\n conversions: Conversions<InputType, OutputType>,\n input: Partial<InputType>\n): Partial<OutputType> {\n const c = conversions as Record<string, Conversion<InputType, OutputType>>\n\n return Object.fromEntries(\n Object.keys(input).map((k: string) => {\n const value = input[k]\n return [k, c[k]?.(value as any) ?? value]\n })\n ) as OutputType\n}\n\n/**\n * Configuration interface for Trailbase Collection\n */\nexport interface TrailBaseCollectionConfig<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n> extends Omit<\n CollectionConfig<TItem, TKey>,\n `sync` | `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Record API name\n */\n recordApi: RecordApi<TRecord>\n\n parse: Conversions<TRecord, TItem>\n serialize: Conversions<TItem, TRecord>\n}\n\nexport type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>\n\nexport interface TrailBaseCollectionUtils extends UtilsRecord {\n cancel: () => void\n}\n\nexport function trailBaseCollectionOptions<\n TItem extends ShapeOf<TRecord>,\n TRecord extends ShapeOf<TItem> = TItem,\n TKey extends string | number = string | number,\n>(\n config: TrailBaseCollectionConfig<TItem, TRecord, TKey>\n): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {\n const getKey = config.getKey\n\n const parse = (record: TRecord) =>\n convert<TRecord, TItem>(config.parse, record)\n const serialUpd = (item: Partial<TItem>) =>\n convertPartial<TItem, TRecord>(config.serialize, item)\n const serialIns = (item: TItem) =>\n convert<TItem, TRecord>(config.serialize, item)\n\n const seenIds = new Store(new Map<string, number>())\n\n const awaitIds = (\n ids: Array<string>,\n timeout: number = 120 * 1000\n ): Promise<void> => {\n const completed = (value: Map<string, number>) =>\n ids.every((id) => value.has(id))\n if (completed(seenIds.state)) {\n return Promise.resolve()\n }\n\n return new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribe()\n reject(new Error(`Timeout waiting for ids: ${ids}`))\n }, timeout)\n\n const unsubscribe = seenIds.subscribe((value) => {\n if (completed(value.currentVal)) {\n clearTimeout(timeoutId)\n unsubscribe()\n resolve()\n }\n })\n })\n }\n\n const weakSeenIds = new WeakRef(seenIds)\n const cleanupTimer = setInterval(() => {\n const seen = weakSeenIds.deref()\n if (seen) {\n seen.setState((curr) => {\n const now = Date.now()\n let anyExpired = false\n\n const notExpired = Array.from(curr.entries()).filter(([_, v]) => {\n const expired = now - v > 300 * 1000\n anyExpired = anyExpired || expired\n return !expired\n })\n\n if (anyExpired) {\n return new Map(notExpired)\n }\n return curr\n })\n } else {\n clearInterval(cleanupTimer)\n }\n }, 120 * 1000)\n\n type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]\n\n let eventReader: ReadableStreamDefaultReader<Event> | undefined\n const cancel = () => {\n if (eventReader) {\n eventReader.cancel()\n eventReader.releaseLock()\n eventReader = undefined\n }\n }\n\n const sync = {\n sync: (params: SyncParams) => {\n const { begin, write, commit, markReady } = params\n\n // Initial fetch.\n async function initialFetch() {\n const limit = 256\n let response = await config.recordApi.list({\n pagination: {\n limit,\n },\n })\n let cursor = response.cursor\n let got = 0\n\n begin()\n\n while (true) {\n const length = response.records.length\n if (length === 0) break\n\n got = got + length\n for (const item of response.records) {\n write({\n type: `insert`,\n value: parse(item),\n })\n }\n\n if (length < limit) break\n\n response = await config.recordApi.list({\n pagination: {\n limit,\n cursor,\n offset: cursor === undefined ? got : undefined,\n },\n })\n cursor = response.cursor\n }\n\n commit()\n markReady()\n }\n\n // Afterwards subscribe.\n async function listen(reader: ReadableStreamDefaultReader<Event>) {\n while (true) {\n const { done, value: event } = await reader.read()\n\n if (done || !event) {\n reader.releaseLock()\n eventReader = undefined\n return\n }\n\n begin()\n let value: TItem | undefined\n if (`Insert` in event) {\n value = parse(event.Insert as TRecord)\n write({ type: `insert`, value })\n } else if (`Delete` in event) {\n value = parse(event.Delete as TRecord)\n write({ type: `delete`, value })\n } else if (`Update` in event) {\n value = parse(event.Update as TRecord)\n write({ type: `update`, value })\n } else {\n console.error(`Error: ${event.Error}`)\n }\n commit()\n\n if (value) {\n seenIds.setState((curr) => {\n const newIds = new Map(curr)\n newIds.set(String(getKey(value)), Date.now())\n return newIds\n })\n }\n }\n }\n\n async function start() {\n const eventStream = await config.recordApi.subscribe(`*`)\n const reader = (eventReader = eventStream.getReader())\n\n // Start listening for subscriptions first. Otherwise, we'd risk a gap\n // between the initial fetch and starting to listen.\n listen(reader)\n\n try {\n await initialFetch()\n } catch (e) {\n cancel()\n markReady()\n throw e\n }\n }\n\n start()\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n return {\n ...config,\n sync,\n getKey,\n onInsert: async (params): Promise<Array<number | string>> => {\n const ids = await config.recordApi.createBulk(\n params.transaction.mutations.map((tx) => {\n const { type, modified } = tx\n if (type !== `insert`) {\n throw new Error(`Expected 'insert', got: ${type}`)\n }\n return serialIns(modified)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly added to the local\n // DB by the subscription.\n await awaitIds(ids.map((id) => String(id)))\n\n return ids\n },\n onUpdate: async (params) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, changes, key } = tx\n if (type !== `update`) {\n throw new Error(`Expected 'update', got: ${type}`)\n }\n\n await config.recordApi.update(key, serialUpd(changes))\n\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n onDelete: async (params) => {\n const ids: Array<string> = await Promise.all(\n params.transaction.mutations.map(async (tx) => {\n const { type, key } = tx\n if (type !== `delete`) {\n throw new Error(`Expected 'delete', got: ${type}`)\n }\n\n await config.recordApi.delete(key)\n return String(key)\n })\n )\n\n // The optimistic mutation overlay is removed on return, so at this point\n // we have to ensure that the new record was properly updated in the local\n // DB by the subscription.\n await awaitIds(ids)\n },\n utils: {\n cancel,\n },\n }\n}\n"],"names":[],"mappings":";AAmCA,SAAS,QAIP,aACA,OACY;AACZ,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AAEA,SAAS,eAIP,aACA,OACqB;AACrB,QAAM,IAAI;AAEV,SAAO,OAAO;AAAA,IACZ,OAAO,KAAK,KAAK,EAAE,IAAI,CAAC,MAAc;;AACpC,YAAM,QAAQ,MAAM,CAAC;AACrB,aAAO,CAAC,KAAG,OAAE,OAAF,2BAAO,WAAiB,KAAK;AAAA,IAC1C,CAAC;AAAA,EAAA;AAEL;AA4BO,SAAS,2BAKd,QACqE;AACrE,QAAM,SAAS,OAAO;AAEtB,QAAM,QAAQ,CAAC,WACb,QAAwB,OAAO,OAAO,MAAM;AAC9C,QAAM,YAAY,CAAC,SACjB,eAA+B,OAAO,WAAW,IAAI;AACvD,QAAM,YAAY,CAAC,SACjB,QAAwB,OAAO,WAAW,IAAI;AAEhD,QAAM,UAAU,IAAI,MAAM,oBAAI,KAAqB;AAEnD,QAAM,WAAW,CACf,KACA,UAAkB,MAAM,QACN;AAClB,UAAM,YAAY,CAAC,UACjB,IAAI,MAAM,CAAC,OAAO,MAAM,IAAI,EAAE,CAAC;AACjC,QAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,aAAO,QAAQ,QAAA;AAAA,IACjB;AAEA,WAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,YAAM,YAAY,WAAW,MAAM;AACjC,oBAAA;AACA,eAAO,IAAI,MAAM,4BAA4B,GAAG,EAAE,CAAC;AAAA,MACrD,GAAG,OAAO;AAEV,YAAM,cAAc,QAAQ,UAAU,CAAC,UAAU;AAC/C,YAAI,UAAU,MAAM,UAAU,GAAG;AAC/B,uBAAa,SAAS;AACtB,sBAAA;AACA,kBAAA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,cAAc,IAAI,QAAQ,OAAO;AACvC,QAAM,eAAe,YAAY,MAAM;AACrC,UAAM,OAAO,YAAY,MAAA;AACzB,QAAI,MAAM;AACR,WAAK,SAAS,CAAC,SAAS;AACtB,cAAM,MAAM,KAAK,IAAA;AACjB,YAAI,aAAa;AAEjB,cAAM,aAAa,MAAM,KAAK,KAAK,SAAS,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM;AAC/D,gBAAM,UAAU,MAAM,IAAI,MAAM;AAChC,uBAAa,cAAc;AAC3B,iBAAO,CAAC;AAAA,QACV,CAAC;AAED,YAAI,YAAY;AACd,iBAAO,IAAI,IAAI,UAAU;AAAA,QAC3B;AACA,eAAO;AAAA,MACT,CAAC;AAAA,IACH,OAAO;AACL,oBAAc,YAAY;AAAA,IAC5B;AAAA,EACF,GAAG,MAAM,GAAI;AAIb,MAAI;AACJ,QAAM,SAAS,MAAM;AACnB,QAAI,aAAa;AACf,kBAAY,OAAA;AACZ,kBAAY,YAAA;AACZ,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,QAAM,OAAO;AAAA,IACX,MAAM,CAAC,WAAuB;AAC5B,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,qBAAe,eAAe;AAC5B,cAAM,QAAQ;AACd,YAAI,WAAW,MAAM,OAAO,UAAU,KAAK;AAAA,UACzC,YAAY;AAAA,YACV;AAAA,UAAA;AAAA,QACF,CACD;AACD,YAAI,SAAS,SAAS;AACtB,YAAI,MAAM;AAEV,cAAA;AAEA,eAAO,MAAM;AACX,gBAAM,SAAS,SAAS,QAAQ;AAChC,cAAI,WAAW,EAAG;AAElB,gBAAM,MAAM;AACZ,qBAAW,QAAQ,SAAS,SAAS;AACnC,kBAAM;AAAA,cACJ,MAAM;AAAA,cACN,OAAO,MAAM,IAAI;AAAA,YAAA,CAClB;AAAA,UACH;AAEA,cAAI,SAAS,MAAO;AAEpB,qBAAW,MAAM,OAAO,UAAU,KAAK;AAAA,YACrC,YAAY;AAAA,cACV;AAAA,cACA;AAAA,cACA,QAAQ,WAAW,SAAY,MAAM;AAAA,YAAA;AAAA,UACvC,CACD;AACD,mBAAS,SAAS;AAAA,QACpB;AAEA,eAAA;AACA,kBAAA;AAAA,MACF;AAGA,qBAAe,OAAO,QAA4C;AAChE,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,OAAO,UAAU,MAAM,OAAO,KAAA;AAE5C,cAAI,QAAQ,CAAC,OAAO;AAClB,mBAAO,YAAA;AACP,0BAAc;AACd;AAAA,UACF;AAEA,gBAAA;AACA,cAAI;AACJ,cAAI,YAAY,OAAO;AACrB,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,WAAW,YAAY,OAAO;AAC5B,oBAAQ,MAAM,MAAM,MAAiB;AACrC,kBAAM,EAAE,MAAM,UAAU,MAAA,CAAO;AAAA,UACjC,OAAO;AACL,oBAAQ,MAAM,UAAU,MAAM,KAAK,EAAE;AAAA,UACvC;AACA,iBAAA;AAEA,cAAI,OAAO;AACT,oBAAQ,SAAS,CAAC,SAAS;AACzB,oBAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,qBAAO,IAAI,OAAO,OAAO,KAAK,CAAC,GAAG,KAAK,KAAK;AAC5C,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,qBAAe,QAAQ;AACrB,cAAM,cAAc,MAAM,OAAO,UAAU,UAAU,GAAG;AACxD,cAAM,SAAU,cAAc,YAAY,UAAA;AAI1C,eAAO,MAAM;AAEb,YAAI;AACF,gBAAM,aAAA;AAAA,QACR,SAAS,GAAG;AACV,iBAAA;AACA,oBAAA;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAA;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAA4C;AAC3D,YAAM,MAAM,MAAM,OAAO,UAAU;AAAA,QACjC,OAAO,YAAY,UAAU,IAAI,CAAC,OAAO;AACvC,gBAAM,EAAE,MAAM,SAAA,IAAa;AAC3B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AACA,iBAAO,UAAU,QAAQ;AAAA,QAC3B,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,IAAI,IAAI,CAAC,OAAO,OAAO,EAAE,CAAC,CAAC;AAE1C,aAAO;AAAA,IACT;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,SAAS,IAAA,IAAQ;AAC/B,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,KAAK,UAAU,OAAO,CAAC;AAErD,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,MAAqB,MAAM,QAAQ;AAAA,QACvC,OAAO,YAAY,UAAU,IAAI,OAAO,OAAO;AAC7C,gBAAM,EAAE,MAAM,IAAA,IAAQ;AACtB,cAAI,SAAS,UAAU;AACrB,kBAAM,IAAI,MAAM,2BAA2B,IAAI,EAAE;AAAA,UACnD;AAEA,gBAAM,OAAO,UAAU,OAAO,GAAG;AACjC,iBAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MAAA;AAMH,YAAM,SAAS,GAAG;AAAA,IACpB;AAAA,IACA,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,EACF;AAEJ;"}
|
package/package.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@tanstack/trailbase-db-collection",
|
|
3
|
+
"description": "TrailBase collection for TanStack DB",
|
|
4
|
+
"version": "0.0.1",
|
|
5
|
+
"dependencies": {
|
|
6
|
+
"@standard-schema/spec": "^1.0.0",
|
|
7
|
+
"@tanstack/db": "workspace:*",
|
|
8
|
+
"@tanstack/store": "^0.7.0",
|
|
9
|
+
"debug": "^4.4.1",
|
|
10
|
+
"trailbase": "^0.7.1"
|
|
11
|
+
},
|
|
12
|
+
"devDependencies": {
|
|
13
|
+
"@types/debug": "^4.1.12",
|
|
14
|
+
"@vitest/coverage-istanbul": "^3.0.9"
|
|
15
|
+
},
|
|
16
|
+
"exports": {
|
|
17
|
+
".": {
|
|
18
|
+
"import": {
|
|
19
|
+
"types": "./dist/esm/index.d.ts",
|
|
20
|
+
"default": "./dist/esm/index.js"
|
|
21
|
+
},
|
|
22
|
+
"require": {
|
|
23
|
+
"types": "./dist/cjs/index.d.cts",
|
|
24
|
+
"default": "./dist/cjs/index.cjs"
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
"./package.json": "./package.json"
|
|
28
|
+
},
|
|
29
|
+
"files": [
|
|
30
|
+
"dist",
|
|
31
|
+
"src"
|
|
32
|
+
],
|
|
33
|
+
"main": "dist/cjs/index.cjs",
|
|
34
|
+
"module": "dist/esm/index.js",
|
|
35
|
+
"packageManager": "pnpm@10.6.3",
|
|
36
|
+
"peerDependencies": {
|
|
37
|
+
"typescript": ">=4.7"
|
|
38
|
+
},
|
|
39
|
+
"author": "Sebastian Jeltsch",
|
|
40
|
+
"license": "MIT",
|
|
41
|
+
"repository": {
|
|
42
|
+
"type": "git",
|
|
43
|
+
"url": "https://github.com/TanStack/db.git",
|
|
44
|
+
"directory": "packages/trailbase-db-collection"
|
|
45
|
+
},
|
|
46
|
+
"homepage": "https://tanstack.com/db",
|
|
47
|
+
"keywords": [
|
|
48
|
+
"trailbase",
|
|
49
|
+
"sql",
|
|
50
|
+
"optimistic",
|
|
51
|
+
"typescript"
|
|
52
|
+
],
|
|
53
|
+
"scripts": {
|
|
54
|
+
"build": "vite build",
|
|
55
|
+
"dev": "vite build --watch",
|
|
56
|
+
"lint": "eslint . --fix",
|
|
57
|
+
"test": "npx vitest --run"
|
|
58
|
+
},
|
|
59
|
+
"sideEffects": false,
|
|
60
|
+
"type": "module",
|
|
61
|
+
"types": "dist/esm/index.d.ts"
|
|
62
|
+
}
|
package/src/index.ts
ADDED
package/src/trailbase.ts
ADDED
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-unnecessary-condition */
|
|
2
|
+
import { Store } from "@tanstack/store"
|
|
3
|
+
import type { Event, RecordApi } from "trailbase"
|
|
4
|
+
|
|
5
|
+
import type { CollectionConfig, SyncConfig, UtilsRecord } from "@tanstack/db"
|
|
6
|
+
|
|
7
|
+
type ShapeOf<T> = Record<keyof T, unknown>
|
|
8
|
+
type Conversion<I, O> = (value: I) => O
|
|
9
|
+
|
|
10
|
+
type OptionalConversions<
|
|
11
|
+
InputType extends ShapeOf<OutputType>,
|
|
12
|
+
OutputType extends ShapeOf<InputType>,
|
|
13
|
+
> = {
|
|
14
|
+
// Excludes all keys that require a conversation.
|
|
15
|
+
[K in keyof InputType as InputType[K] extends OutputType[K]
|
|
16
|
+
? K
|
|
17
|
+
: never]?: Conversion<InputType[K], OutputType[K]>
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
type RequiredConversions<
|
|
21
|
+
InputType extends ShapeOf<OutputType>,
|
|
22
|
+
OutputType extends ShapeOf<InputType>,
|
|
23
|
+
> = {
|
|
24
|
+
// Excludes all keys that do not strictly require a conversation.
|
|
25
|
+
[K in keyof InputType as InputType[K] extends OutputType[K]
|
|
26
|
+
? never
|
|
27
|
+
: K]: Conversion<InputType[K], OutputType[K]>
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
type Conversions<
|
|
31
|
+
InputType extends ShapeOf<OutputType>,
|
|
32
|
+
OutputType extends ShapeOf<InputType>,
|
|
33
|
+
> = OptionalConversions<InputType, OutputType> &
|
|
34
|
+
RequiredConversions<InputType, OutputType>
|
|
35
|
+
|
|
36
|
+
function convert<
|
|
37
|
+
InputType extends ShapeOf<OutputType> & Record<string, unknown>,
|
|
38
|
+
OutputType extends ShapeOf<InputType>,
|
|
39
|
+
>(
|
|
40
|
+
conversions: Conversions<InputType, OutputType>,
|
|
41
|
+
input: InputType
|
|
42
|
+
): OutputType {
|
|
43
|
+
const c = conversions as Record<string, Conversion<InputType, OutputType>>
|
|
44
|
+
|
|
45
|
+
return Object.fromEntries(
|
|
46
|
+
Object.keys(input).map((k: string) => {
|
|
47
|
+
const value = input[k]
|
|
48
|
+
return [k, c[k]?.(value as any) ?? value]
|
|
49
|
+
})
|
|
50
|
+
) as OutputType
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function convertPartial<
|
|
54
|
+
InputType extends ShapeOf<OutputType> & Record<string, unknown>,
|
|
55
|
+
OutputType extends ShapeOf<InputType>,
|
|
56
|
+
>(
|
|
57
|
+
conversions: Conversions<InputType, OutputType>,
|
|
58
|
+
input: Partial<InputType>
|
|
59
|
+
): Partial<OutputType> {
|
|
60
|
+
const c = conversions as Record<string, Conversion<InputType, OutputType>>
|
|
61
|
+
|
|
62
|
+
return Object.fromEntries(
|
|
63
|
+
Object.keys(input).map((k: string) => {
|
|
64
|
+
const value = input[k]
|
|
65
|
+
return [k, c[k]?.(value as any) ?? value]
|
|
66
|
+
})
|
|
67
|
+
) as OutputType
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Configuration interface for Trailbase Collection
|
|
72
|
+
*/
|
|
73
|
+
export interface TrailBaseCollectionConfig<
|
|
74
|
+
TItem extends ShapeOf<TRecord>,
|
|
75
|
+
TRecord extends ShapeOf<TItem> = TItem,
|
|
76
|
+
TKey extends string | number = string | number,
|
|
77
|
+
> extends Omit<
|
|
78
|
+
CollectionConfig<TItem, TKey>,
|
|
79
|
+
`sync` | `onInsert` | `onUpdate` | `onDelete`
|
|
80
|
+
> {
|
|
81
|
+
/**
|
|
82
|
+
* Record API name
|
|
83
|
+
*/
|
|
84
|
+
recordApi: RecordApi<TRecord>
|
|
85
|
+
|
|
86
|
+
parse: Conversions<TRecord, TItem>
|
|
87
|
+
serialize: Conversions<TItem, TRecord>
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
export type AwaitTxIdFn = (txId: string, timeout?: number) => Promise<boolean>
|
|
91
|
+
|
|
92
|
+
export interface TrailBaseCollectionUtils extends UtilsRecord {
|
|
93
|
+
cancel: () => void
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
export function trailBaseCollectionOptions<
|
|
97
|
+
TItem extends ShapeOf<TRecord>,
|
|
98
|
+
TRecord extends ShapeOf<TItem> = TItem,
|
|
99
|
+
TKey extends string | number = string | number,
|
|
100
|
+
>(
|
|
101
|
+
config: TrailBaseCollectionConfig<TItem, TRecord, TKey>
|
|
102
|
+
): CollectionConfig<TItem, TKey> & { utils: TrailBaseCollectionUtils } {
|
|
103
|
+
const getKey = config.getKey
|
|
104
|
+
|
|
105
|
+
const parse = (record: TRecord) =>
|
|
106
|
+
convert<TRecord, TItem>(config.parse, record)
|
|
107
|
+
const serialUpd = (item: Partial<TItem>) =>
|
|
108
|
+
convertPartial<TItem, TRecord>(config.serialize, item)
|
|
109
|
+
const serialIns = (item: TItem) =>
|
|
110
|
+
convert<TItem, TRecord>(config.serialize, item)
|
|
111
|
+
|
|
112
|
+
const seenIds = new Store(new Map<string, number>())
|
|
113
|
+
|
|
114
|
+
const awaitIds = (
|
|
115
|
+
ids: Array<string>,
|
|
116
|
+
timeout: number = 120 * 1000
|
|
117
|
+
): Promise<void> => {
|
|
118
|
+
const completed = (value: Map<string, number>) =>
|
|
119
|
+
ids.every((id) => value.has(id))
|
|
120
|
+
if (completed(seenIds.state)) {
|
|
121
|
+
return Promise.resolve()
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
return new Promise<void>((resolve, reject) => {
|
|
125
|
+
const timeoutId = setTimeout(() => {
|
|
126
|
+
unsubscribe()
|
|
127
|
+
reject(new Error(`Timeout waiting for ids: ${ids}`))
|
|
128
|
+
}, timeout)
|
|
129
|
+
|
|
130
|
+
const unsubscribe = seenIds.subscribe((value) => {
|
|
131
|
+
if (completed(value.currentVal)) {
|
|
132
|
+
clearTimeout(timeoutId)
|
|
133
|
+
unsubscribe()
|
|
134
|
+
resolve()
|
|
135
|
+
}
|
|
136
|
+
})
|
|
137
|
+
})
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const weakSeenIds = new WeakRef(seenIds)
|
|
141
|
+
const cleanupTimer = setInterval(() => {
|
|
142
|
+
const seen = weakSeenIds.deref()
|
|
143
|
+
if (seen) {
|
|
144
|
+
seen.setState((curr) => {
|
|
145
|
+
const now = Date.now()
|
|
146
|
+
let anyExpired = false
|
|
147
|
+
|
|
148
|
+
const notExpired = Array.from(curr.entries()).filter(([_, v]) => {
|
|
149
|
+
const expired = now - v > 300 * 1000
|
|
150
|
+
anyExpired = anyExpired || expired
|
|
151
|
+
return !expired
|
|
152
|
+
})
|
|
153
|
+
|
|
154
|
+
if (anyExpired) {
|
|
155
|
+
return new Map(notExpired)
|
|
156
|
+
}
|
|
157
|
+
return curr
|
|
158
|
+
})
|
|
159
|
+
} else {
|
|
160
|
+
clearInterval(cleanupTimer)
|
|
161
|
+
}
|
|
162
|
+
}, 120 * 1000)
|
|
163
|
+
|
|
164
|
+
type SyncParams = Parameters<SyncConfig<TItem, TKey>[`sync`]>[0]
|
|
165
|
+
|
|
166
|
+
let eventReader: ReadableStreamDefaultReader<Event> | undefined
|
|
167
|
+
const cancel = () => {
|
|
168
|
+
if (eventReader) {
|
|
169
|
+
eventReader.cancel()
|
|
170
|
+
eventReader.releaseLock()
|
|
171
|
+
eventReader = undefined
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const sync = {
|
|
176
|
+
sync: (params: SyncParams) => {
|
|
177
|
+
const { begin, write, commit, markReady } = params
|
|
178
|
+
|
|
179
|
+
// Initial fetch.
|
|
180
|
+
async function initialFetch() {
|
|
181
|
+
const limit = 256
|
|
182
|
+
let response = await config.recordApi.list({
|
|
183
|
+
pagination: {
|
|
184
|
+
limit,
|
|
185
|
+
},
|
|
186
|
+
})
|
|
187
|
+
let cursor = response.cursor
|
|
188
|
+
let got = 0
|
|
189
|
+
|
|
190
|
+
begin()
|
|
191
|
+
|
|
192
|
+
while (true) {
|
|
193
|
+
const length = response.records.length
|
|
194
|
+
if (length === 0) break
|
|
195
|
+
|
|
196
|
+
got = got + length
|
|
197
|
+
for (const item of response.records) {
|
|
198
|
+
write({
|
|
199
|
+
type: `insert`,
|
|
200
|
+
value: parse(item),
|
|
201
|
+
})
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (length < limit) break
|
|
205
|
+
|
|
206
|
+
response = await config.recordApi.list({
|
|
207
|
+
pagination: {
|
|
208
|
+
limit,
|
|
209
|
+
cursor,
|
|
210
|
+
offset: cursor === undefined ? got : undefined,
|
|
211
|
+
},
|
|
212
|
+
})
|
|
213
|
+
cursor = response.cursor
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
commit()
|
|
217
|
+
markReady()
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// Afterwards subscribe.
|
|
221
|
+
async function listen(reader: ReadableStreamDefaultReader<Event>) {
|
|
222
|
+
while (true) {
|
|
223
|
+
const { done, value: event } = await reader.read()
|
|
224
|
+
|
|
225
|
+
if (done || !event) {
|
|
226
|
+
reader.releaseLock()
|
|
227
|
+
eventReader = undefined
|
|
228
|
+
return
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
begin()
|
|
232
|
+
let value: TItem | undefined
|
|
233
|
+
if (`Insert` in event) {
|
|
234
|
+
value = parse(event.Insert as TRecord)
|
|
235
|
+
write({ type: `insert`, value })
|
|
236
|
+
} else if (`Delete` in event) {
|
|
237
|
+
value = parse(event.Delete as TRecord)
|
|
238
|
+
write({ type: `delete`, value })
|
|
239
|
+
} else if (`Update` in event) {
|
|
240
|
+
value = parse(event.Update as TRecord)
|
|
241
|
+
write({ type: `update`, value })
|
|
242
|
+
} else {
|
|
243
|
+
console.error(`Error: ${event.Error}`)
|
|
244
|
+
}
|
|
245
|
+
commit()
|
|
246
|
+
|
|
247
|
+
if (value) {
|
|
248
|
+
seenIds.setState((curr) => {
|
|
249
|
+
const newIds = new Map(curr)
|
|
250
|
+
newIds.set(String(getKey(value)), Date.now())
|
|
251
|
+
return newIds
|
|
252
|
+
})
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
async function start() {
|
|
258
|
+
const eventStream = await config.recordApi.subscribe(`*`)
|
|
259
|
+
const reader = (eventReader = eventStream.getReader())
|
|
260
|
+
|
|
261
|
+
// Start listening for subscriptions first. Otherwise, we'd risk a gap
|
|
262
|
+
// between the initial fetch and starting to listen.
|
|
263
|
+
listen(reader)
|
|
264
|
+
|
|
265
|
+
try {
|
|
266
|
+
await initialFetch()
|
|
267
|
+
} catch (e) {
|
|
268
|
+
cancel()
|
|
269
|
+
markReady()
|
|
270
|
+
throw e
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
start()
|
|
275
|
+
},
|
|
276
|
+
// Expose the getSyncMetadata function
|
|
277
|
+
getSyncMetadata: undefined,
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
return {
|
|
281
|
+
...config,
|
|
282
|
+
sync,
|
|
283
|
+
getKey,
|
|
284
|
+
onInsert: async (params): Promise<Array<number | string>> => {
|
|
285
|
+
const ids = await config.recordApi.createBulk(
|
|
286
|
+
params.transaction.mutations.map((tx) => {
|
|
287
|
+
const { type, modified } = tx
|
|
288
|
+
if (type !== `insert`) {
|
|
289
|
+
throw new Error(`Expected 'insert', got: ${type}`)
|
|
290
|
+
}
|
|
291
|
+
return serialIns(modified)
|
|
292
|
+
})
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
// The optimistic mutation overlay is removed on return, so at this point
|
|
296
|
+
// we have to ensure that the new record was properly added to the local
|
|
297
|
+
// DB by the subscription.
|
|
298
|
+
await awaitIds(ids.map((id) => String(id)))
|
|
299
|
+
|
|
300
|
+
return ids
|
|
301
|
+
},
|
|
302
|
+
onUpdate: async (params) => {
|
|
303
|
+
const ids: Array<string> = await Promise.all(
|
|
304
|
+
params.transaction.mutations.map(async (tx) => {
|
|
305
|
+
const { type, changes, key } = tx
|
|
306
|
+
if (type !== `update`) {
|
|
307
|
+
throw new Error(`Expected 'update', got: ${type}`)
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
await config.recordApi.update(key, serialUpd(changes))
|
|
311
|
+
|
|
312
|
+
return String(key)
|
|
313
|
+
})
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
// The optimistic mutation overlay is removed on return, so at this point
|
|
317
|
+
// we have to ensure that the new record was properly updated in the local
|
|
318
|
+
// DB by the subscription.
|
|
319
|
+
await awaitIds(ids)
|
|
320
|
+
},
|
|
321
|
+
onDelete: async (params) => {
|
|
322
|
+
const ids: Array<string> = await Promise.all(
|
|
323
|
+
params.transaction.mutations.map(async (tx) => {
|
|
324
|
+
const { type, key } = tx
|
|
325
|
+
if (type !== `delete`) {
|
|
326
|
+
throw new Error(`Expected 'delete', got: ${type}`)
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
await config.recordApi.delete(key)
|
|
330
|
+
return String(key)
|
|
331
|
+
})
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
// The optimistic mutation overlay is removed on return, so at this point
|
|
335
|
+
// we have to ensure that the new record was properly updated in the local
|
|
336
|
+
// DB by the subscription.
|
|
337
|
+
await awaitIds(ids)
|
|
338
|
+
},
|
|
339
|
+
utils: {
|
|
340
|
+
cancel,
|
|
341
|
+
},
|
|
342
|
+
}
|
|
343
|
+
}
|