@based/db 0.0.45 → 0.0.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/lib/darwin_aarch64/libdeflate.dylib +0 -0
  2. package/dist/lib/darwin_aarch64/libjemalloc_selva.2.dylib +0 -0
  3. package/dist/lib/darwin_aarch64/libnode-v20.node +0 -0
  4. package/dist/lib/darwin_aarch64/libnode-v21.node +0 -0
  5. package/dist/lib/darwin_aarch64/libnode-v22.node +0 -0
  6. package/dist/lib/darwin_aarch64/libnode-v23.node +0 -0
  7. package/dist/lib/darwin_aarch64/libnode-v24.node +0 -0
  8. package/dist/lib/darwin_aarch64/libselva.dylib +0 -0
  9. package/dist/lib/linux_aarch64/libnode-v20.node +0 -0
  10. package/dist/lib/linux_aarch64/libnode-v21.node +0 -0
  11. package/dist/lib/linux_aarch64/libnode-v22.node +0 -0
  12. package/dist/lib/linux_aarch64/libnode-v23.node +0 -0
  13. package/dist/lib/linux_aarch64/libnode-v24.node +0 -0
  14. package/dist/lib/linux_aarch64/libselva.so +0 -0
  15. package/dist/lib/linux_x86_64/libnode-v20.node +0 -0
  16. package/dist/lib/linux_x86_64/libnode-v21.node +0 -0
  17. package/dist/lib/linux_x86_64/libnode-v22.node +0 -0
  18. package/dist/lib/linux_x86_64/libnode-v23.node +0 -0
  19. package/dist/lib/linux_x86_64/libnode-v24.node +0 -0
  20. package/dist/lib/linux_x86_64/libselva.so +0 -0
  21. package/dist/src/client/index.d.ts +3 -1
  22. package/dist/src/client/index.js +11 -1
  23. package/dist/src/client/query/BasedDbQuery.d.ts +2 -2
  24. package/dist/src/client/query/debug.js +3 -0
  25. package/dist/src/client/query/display.js +1 -1
  26. package/dist/src/client/query/include/toBuffer.js +0 -1
  27. package/dist/src/client/query/read/read.js +51 -40
  28. package/dist/src/client/query/subscription/index.d.ts +10 -0
  29. package/dist/src/client/query/subscription/index.js +59 -4
  30. package/dist/src/client/query/toBuffer.js +54 -26
  31. package/dist/src/client/query/types.d.ts +7 -0
  32. package/dist/src/client/query/types.js +1 -0
  33. package/dist/src/index.js +2 -63
  34. package/dist/src/utils.d.ts +15 -0
  35. package/dist/src/utils.js +46 -0
  36. package/package.json +1 -1
Binary file
Binary file
@@ -8,6 +8,7 @@ import { DbServer } from '../server/index.js';
8
8
  import { TransformFns } from '../server/migrate/index.js';
9
9
  import { ModifyOpts } from './modify/types.js';
10
10
  import { OnClose, OnData, OnError } from './query/subscription/types.js';
11
+ import { SubStore } from './query/subscription/index.js';
11
12
  export type DbClientHooks = {
12
13
  setSchema(schema: StrictSchema, fromStart?: boolean, transformFns?: TransformFns): Promise<DbServer['schema']>;
13
14
  flushModify(buf: Uint8Array): Promise<{
@@ -15,7 +16,7 @@ export type DbClientHooks = {
15
16
  dbWriteTime?: number;
16
17
  }>;
17
18
  getQueryBuf(buf: Uint8Array): Promise<Uint8Array>;
18
- subscribe(q: BasedDbQuery, onData: OnData, onError?: OnError): OnClose;
19
+ subscribe(q: BasedDbQuery, onData: (buf: Uint8Array) => ReturnType<OnData>, onError?: OnError): OnClose;
19
20
  };
20
21
  type DbClientOpts = {
21
22
  hooks: DbClientHooks;
@@ -26,6 +27,7 @@ type DbClientOpts = {
26
27
  type DbClientSchema = DbServer['schema'];
27
28
  export declare class DbClient {
28
29
  constructor({ hooks, maxModifySize, flushTime, debug, }: DbClientOpts);
30
+ subs: Map<BasedDbQuery, SubStore>;
29
31
  flushTime: number;
30
32
  flushReady: () => void;
31
33
  flushIsReady: Promise<void>;
@@ -30,6 +30,7 @@ export class DbClient {
30
30
  debugMode(this);
31
31
  }
32
32
  }
33
+ subs = new Map();
33
34
  flushTime;
34
35
  flushReady;
35
36
  flushIsReady;
@@ -75,9 +76,14 @@ export class DbClient {
75
76
  if (this.modifyCtx.len > 8) {
76
77
  console.info('Modify cancelled - schema updated');
77
78
  }
79
+ // cancel modify queue
78
80
  const resCtx = this.modifyCtx.ctx;
79
81
  this.modifyCtx.reset();
80
82
  execCtxQueue(resCtx, true);
83
+ // resubscribe
84
+ for (const [q, store] of this.subs) {
85
+ store.resubscribe(q);
86
+ }
81
87
  if (this.listeners?.schema) {
82
88
  for (const cb of this.listeners.schema) {
83
89
  cb(this.schema);
@@ -180,10 +186,14 @@ export class DbClient {
180
186
  return expire(this, type, typeof id === 'number' ? id : id.tmpId, seconds);
181
187
  }
182
188
  destroy() {
183
- this.modifyCtx.len = 0;
189
+ this.stop();
184
190
  this.modifyCtx.db = null; // Make sure we don't have a circular ref and leak mem
185
191
  }
186
192
  stop() {
193
+ for (const [, { onClose }] of this.subs) {
194
+ onClose();
195
+ }
196
+ this.subs.clear();
187
197
  this.modifyCtx.len = 0;
188
198
  }
189
199
  // For more advanced / internal usage - use isModified instead for most cases
@@ -1,7 +1,7 @@
1
1
  import { QueryDef, QueryTarget, Operator, QueryByAliasObj } from './query.js';
2
2
  import { BasedQueryResponse } from './BasedIterable.js';
3
3
  import { Search } from './search/index.js';
4
- import { OnData, OnError } from './subscription/index.js';
4
+ import { OnData, OnError, OnClose } from './subscription/index.js';
5
5
  import { DbClient } from '../index.js';
6
6
  import { LangName } from '@based/schema';
7
7
  import { FilterAst, FilterBranchFn, FilterOpts } from './filter/types.js';
@@ -48,7 +48,7 @@ export declare class BasedDbQuery extends QueryBranch<BasedDbQuery> {
48
48
  buffer: Uint8Array;
49
49
  register(): void;
50
50
  locale(locale: LangName): this;
51
- subscribe(onData: OnData, onError?: OnError): import("./subscription/types.js").OnClose;
51
+ subscribe(onData: OnData, onError?: OnError): OnClose;
52
52
  _getSync(dbCtxExternal: any): BasedQueryResponse;
53
53
  toBuffer(): Uint8Array;
54
54
  }
@@ -101,6 +101,9 @@ export const debug = (x, start = 0, end = 0, label) => {
101
101
  if (a[j + i * w] === 252) {
102
102
  return picocolors.red(v);
103
103
  }
104
+ if (a[j + i * w] === 250) {
105
+ return picocolors.redBright(v);
106
+ }
104
107
  return v;
105
108
  })
106
109
  .join(' '));
@@ -281,7 +281,7 @@ export const inspectData = (q, def, level, top, depth, hasId = false) => {
281
281
  str = prefix + '[';
282
282
  }
283
283
  if (def.aggregate) {
284
- str += inspectObject(q.toObject(), def, '', level + 1, i === max - 1, i === 0, false, depth);
284
+ str += inspectObject('toObject' in q ? q.toObject() : q, def, '', level + 1, i === max - 1, i === 0, false, depth);
285
285
  return str;
286
286
  }
287
287
  for (const x of q) {
@@ -18,7 +18,6 @@ export const includeToBuffer = (db, def) => {
18
18
  walkDefs(db, def, f);
19
19
  }
20
20
  }
21
- // main
22
21
  if (def.include.main.len > 0) {
23
22
  // if (def.target.)
24
23
  const len = def.type === QueryDefType.Edge
@@ -3,7 +3,43 @@ import { QueryDefType } from '../types.js';
3
3
  import { read, readUtf8 } from '../../string.js';
4
4
  import { DECODER, readDoubleLE, readFloatLE, readInt16, readInt32, readUint16, readUint32, setByPath, } from '@saulx/utils';
5
5
  import { inverseLangMap } from '@based/schema';
6
- import { READ_EDGE, READ_ID, READ_REFERENCE, READ_REFERENCES, } from '../types.js';
6
+ import { READ_EDGE, READ_ID, READ_REFERENCE, READ_REFERENCES, READ_AGGREGATION, } from '../types.js';
7
+ const readAggregate = (q, result, offset, len) => {
8
+ const results = {};
9
+ if (q.aggregate.groupBy) {
10
+ let i = offset;
11
+ while (i < len) {
12
+ let key = '';
13
+ if (result[i] == 0) {
14
+ if (q.aggregate.groupBy.default) {
15
+ key = q.aggregate.groupBy.default;
16
+ }
17
+ else {
18
+ key = `$undefined`;
19
+ }
20
+ }
21
+ else {
22
+ key = DECODER.decode(result.subarray(i, i + 2));
23
+ }
24
+ i += 2;
25
+ const resultKey = (results[key] = {});
26
+ for (const aggregatesArray of q.aggregate.aggregates.values()) {
27
+ for (const agg of aggregatesArray) {
28
+ setByPath(resultKey, agg.propDef.path, readUint32(result, agg.resultPos + i));
29
+ }
30
+ }
31
+ i += q.aggregate.totalResultsPos;
32
+ }
33
+ }
34
+ else {
35
+ for (const aggregatesArray of q.aggregate.aggregates.values()) {
36
+ for (const agg of aggregatesArray) {
37
+ setByPath(results, agg.propDef.path, readUint32(result, agg.resultPos + offset));
38
+ }
39
+ }
40
+ }
41
+ return results;
42
+ };
7
43
  const addField = (p, value, item, defaultOnly = false, lang = 0) => {
8
44
  let i = p.__isEdge === true ? 1 : 0;
9
45
  // TODO OPTMIZE
@@ -175,7 +211,19 @@ export const readAllFields = (q, result, offset, end, item, id) => {
175
211
  handleUndefinedProps(id, q, item);
176
212
  return i - offset;
177
213
  }
178
- if (index === READ_EDGE) {
214
+ if (index === READ_AGGREGATION) {
215
+ // also for edges at some point!
216
+ let field = result[i];
217
+ i++;
218
+ const size = readUint32(result, i);
219
+ i += 4;
220
+ const ref = q.references.get(field);
221
+ addField(
222
+ // @ts-ignore
223
+ ref.target.propDef, readAggregate(ref, result, i, i + size), item);
224
+ i += size;
225
+ }
226
+ else if (index === READ_EDGE) {
179
227
  let prop = result[i];
180
228
  if (prop === READ_REFERENCE) {
181
229
  i++;
@@ -369,44 +417,7 @@ export const readAllFields = (q, result, offset, end, item, id) => {
369
417
  let cnt = 0;
370
418
  export const resultToObject = (q, result, end, offset = 0) => {
371
419
  if (q.aggregate) {
372
- const results = {};
373
- // range for numbers
374
- if (q.aggregate.groupBy) {
375
- // key size = 2 for now... not perfect...
376
- let i = 0;
377
- while (i < result.byteLength - 4) {
378
- // if group = 0
379
- // add extra thing for the keys maybe?
380
- let key = '';
381
- if (result[i] == 0) {
382
- if (q.aggregate.groupBy.default) {
383
- key = q.aggregate.groupBy.default;
384
- }
385
- else {
386
- key = `$undefined`;
387
- }
388
- }
389
- else {
390
- key = DECODER.decode(result.subarray(i, i + 2));
391
- }
392
- i += 2;
393
- const resultKey = (results[key] = {});
394
- for (const aggregatesArray of q.aggregate.aggregates.values()) {
395
- for (const agg of aggregatesArray) {
396
- setByPath(resultKey, agg.propDef.path, readUint32(result, agg.resultPos + i));
397
- }
398
- }
399
- i += q.aggregate.totalResultsPos;
400
- }
401
- }
402
- else {
403
- for (const aggregatesArray of q.aggregate.aggregates.values()) {
404
- for (const agg of aggregatesArray) {
405
- setByPath(results, agg.propDef.path, readUint32(result, agg.resultPos));
406
- }
407
- }
408
- }
409
- return results;
420
+ return readAggregate(q, result, 0, result.byteLength - 4);
410
421
  }
411
422
  const len = readUint32(result, offset);
412
423
  if (len === 0) {
@@ -1,4 +1,14 @@
1
1
  import { BasedDbQuery } from '../BasedDbQuery.js';
2
+ import { BasedQueryResponse } from '../BasedIterable.js';
2
3
  import { OnData, OnError, OnClose } from './types.js';
4
+ export declare class SubStore {
5
+ listeners: Map<OnData, OnError>;
6
+ onClose: OnClose;
7
+ response?: BasedQueryResponse;
8
+ checksum?: number;
9
+ len?: number;
10
+ subscribe(q: BasedDbQuery): void;
11
+ resubscribe(q: BasedDbQuery): void;
12
+ }
3
13
  export declare const subscribe: (q: BasedDbQuery, onData: OnData, onError?: OnError) => OnClose;
4
14
  export * from './types.js';
@@ -1,11 +1,66 @@
1
+ import { BasedQueryResponse } from '../BasedIterable.js';
1
2
  import { includeField } from '../query.js';
2
3
  import { registerQuery } from '../registerQuery.js';
4
+ export class SubStore {
5
+ listeners;
6
+ onClose;
7
+ response;
8
+ checksum;
9
+ len;
10
+ subscribe(q) {
11
+ if (!q.def.include.stringFields.size && !q.def.references.size) {
12
+ includeField(q.def, '*');
13
+ }
14
+ registerQuery(q);
15
+ this.onClose = q.db.hooks.subscribe(q, (res) => {
16
+ if (!this.response) {
17
+ this.response = new BasedQueryResponse(q.id, q.def, res, 0);
18
+ }
19
+ else {
20
+ this.response.result = res;
21
+ this.response.end = res.byteLength;
22
+ }
23
+ const checksum = this.response.checksum;
24
+ const len = res.byteLength;
25
+ if (this.len !== len || this.checksum !== checksum) {
26
+ for (const [onData] of this.listeners) {
27
+ onData(this.response);
28
+ }
29
+ this.len = len;
30
+ this.checksum = checksum;
31
+ }
32
+ }, (err) => {
33
+ for (const [, onError] of this.listeners) {
34
+ onError(err);
35
+ }
36
+ });
37
+ }
38
+ resubscribe(q) {
39
+ this.onClose();
40
+ q.reBuildQuery();
41
+ this.response = null;
42
+ this.subscribe(q);
43
+ }
44
+ }
3
45
  export const subscribe = (q, onData, onError) => {
4
- if (!q.def.include.stringFields.size && !q.def.references.size) {
5
- includeField(q.def, '*');
46
+ if (!q.db.subs.has(q)) {
47
+ const store = new SubStore();
48
+ store.listeners = new Map([[onData, onError]]);
49
+ store.subscribe(q);
50
+ q.db.subs.set(q, store);
51
+ }
52
+ else {
53
+ const store = q.db.subs.get(q);
54
+ store.listeners.set(onData, onError);
6
55
  }
7
- registerQuery(q);
8
- return q.db.hooks.subscribe(q, onData, onError);
56
+ return () => {
57
+ const store = q.db.subs.get(q);
58
+ store.listeners.delete(onData);
59
+ if (!store.listeners.size) {
60
+ q.db.subs.delete(q);
61
+ store.onClose();
62
+ }
63
+ };
9
64
  };
10
65
  export * from './types.js';
11
66
  //# sourceMappingURL=index.js.map
@@ -39,30 +39,58 @@ export function defToBuffer(db, def) {
39
39
  throw new Error('Wrong aggregate size (0)');
40
40
  }
41
41
  const filterSize = def.filter.size || 0;
42
- const buf = new Uint8Array(16 + filterSize + aggregateSize);
43
- buf[0] = isRootCountOnly(def, filterSize)
44
- ? QueryType.aggregatesCountType
45
- : QueryType.aggregates;
46
- buf[1] = def.schema.idUint8[0];
47
- buf[2] = def.schema.idUint8[1];
48
- buf[3] = def.range.offset;
49
- buf[4] = def.range.offset >>> 8;
50
- buf[5] = def.range.offset >>> 16;
51
- buf[6] = def.range.offset >>> 24;
52
- buf[7] = def.range.limit;
53
- buf[8] = def.range.limit >>> 8;
54
- buf[9] = def.range.limit >>> 16;
55
- buf[10] = def.range.limit >>> 24;
56
- buf[11] = filterSize;
57
- buf[12] = filterSize >>> 8;
58
- if (filterSize) {
59
- buf.set(filterToBuffer(def.filter), 13);
42
+ if (def.type === QueryDefType.References) {
43
+ const buf = new Uint8Array(13 + filterSize + aggregateSize);
44
+ const sz = 10 + filterSize + aggregateSize;
45
+ buf[0] = 251 /* includeOp.REFERENCES_AGGREGATION */;
46
+ buf[1] = sz;
47
+ buf[2] = sz >>> 8;
48
+ // ---
49
+ buf[3] = filterSize;
50
+ buf[4] = filterSize >>> 8;
51
+ buf[5] = def.range.offset;
52
+ buf[6] = def.range.offset >>> 8;
53
+ buf[7] = def.range.offset >>> 16;
54
+ buf[8] = def.range.offset >>> 24;
55
+ if (filterSize) {
56
+ buf.set(filterToBuffer(def.filter), 9);
57
+ }
58
+ // required to get typeEntry and fieldSchema
59
+ buf[9 + filterSize] = def.schema.idUint8[0]; // typeId
60
+ buf[9 + 1 + filterSize] = def.schema.idUint8[1]; // typeId
61
+ buf[9 + 2 + filterSize] = def.target.propDef.prop; // refField
62
+ const aggregateBuffer = aggregateToBuffer(def.aggregate);
63
+ buf.set(aggregateBuffer, 9 + 3 + filterSize);
64
+ // buf[12 + filterSize] = aggregateSize
65
+ // buf[12 + 1 + filterSize] = aggregateSize >>> 8
66
+ result.push(buf);
67
+ }
68
+ else {
69
+ const buf = new Uint8Array(16 + filterSize + aggregateSize);
70
+ buf[0] = isRootCountOnly(def, filterSize)
71
+ ? QueryType.aggregatesCountType
72
+ : QueryType.aggregates;
73
+ buf[1] = def.schema.idUint8[0];
74
+ buf[2] = def.schema.idUint8[1];
75
+ buf[3] = def.range.offset;
76
+ buf[4] = def.range.offset >>> 8;
77
+ buf[5] = def.range.offset >>> 16;
78
+ buf[6] = def.range.offset >>> 24;
79
+ buf[7] = def.range.limit;
80
+ buf[8] = def.range.limit >>> 8;
81
+ buf[9] = def.range.limit >>> 16;
82
+ buf[10] = def.range.limit >>> 24;
83
+ buf[11] = filterSize;
84
+ buf[12] = filterSize >>> 8;
85
+ if (filterSize) {
86
+ buf.set(filterToBuffer(def.filter), 13);
87
+ }
88
+ const aggregateBuffer = aggregateToBuffer(def.aggregate);
89
+ buf[14 + filterSize] = aggregateSize;
90
+ buf[15 + filterSize] = aggregateSize >>> 8;
91
+ buf.set(aggregateBuffer, 16 + filterSize);
92
+ result.push(buf);
60
93
  }
61
- const aggregateBuffer = aggregateToBuffer(def.aggregate);
62
- buf[14 + filterSize] = aggregateSize;
63
- buf[15 + filterSize] = aggregateSize >>> 8;
64
- buf.set(aggregateBuffer, 16 + filterSize);
65
- result.push(buf);
66
94
  // ignore this for now...
67
95
  // result.push(...include)
68
96
  if (def.type === QueryDefType.Root) {
@@ -206,7 +234,7 @@ export function defToBuffer(db, def) {
206
234
  const modsSize = filterSize + sortSize;
207
235
  const meta = new Uint8Array(modsSize + 10 + 8);
208
236
  const sz = size + 7 + modsSize + 8;
209
- meta[0] = 254;
237
+ meta[0] = 254 /* includeOp.REFERENCES */;
210
238
  meta[1] = sz;
211
239
  meta[2] = sz >>> 8;
212
240
  meta[3] = filterSize;
@@ -235,7 +263,7 @@ export function defToBuffer(db, def) {
235
263
  else if (def.type === QueryDefType.Reference) {
236
264
  const meta = new Uint8Array(6);
237
265
  const sz = size + 3;
238
- meta[0] = 255;
266
+ meta[0] = 255 /* includeOp.REFERENCE */;
239
267
  meta[1] = sz;
240
268
  meta[2] = sz >>> 8;
241
269
  meta[3] = def.schema.idUint8[0];
@@ -246,7 +274,7 @@ export function defToBuffer(db, def) {
246
274
  result.push(...include);
247
275
  if (edges) {
248
276
  const metaEdgeBuffer = new Uint8Array(3);
249
- metaEdgeBuffer[0] = 252;
277
+ metaEdgeBuffer[0] = 252 /* includeOp.EDGE */;
250
278
  metaEdgeBuffer[1] = edgesSize;
251
279
  metaEdgeBuffer[2] = edgesSize >>> 8;
252
280
  result.push(metaEdgeBuffer, ...edges);
@@ -140,3 +140,10 @@ export declare const READ_ID = 255;
140
140
  export declare const READ_EDGE = 252;
141
141
  export declare const READ_REFERENCES = 253;
142
142
  export declare const READ_REFERENCE = 254;
143
+ export declare const READ_AGGREGATION = 250;
144
+ export declare const enum includeOp {
145
+ REFERENCES_AGGREGATION = 251,
146
+ EDGE = 252,
147
+ REFERENCES = 254,
148
+ REFERENCE = 255
149
+ }
@@ -31,4 +31,5 @@ export const READ_ID = 255;
31
31
  export const READ_EDGE = 252;
32
32
  export const READ_REFERENCES = 253;
33
33
  export const READ_REFERENCE = 254;
34
+ export const READ_AGGREGATION = 250;
34
35
  //# sourceMappingURL=types.js.map
package/dist/src/index.js CHANGED
@@ -3,9 +3,7 @@ import { ModifyCtx } from './client/flushModify.js';
3
3
  import { DbServer } from './server/index.js';
4
4
  import { DbClient } from './client/index.js';
5
5
  import { wait } from '@saulx/utils';
6
- import { debugMode, debugServer } from './utils.js';
7
- import { BasedQueryResponse } from './client/query/BasedIterable.js';
8
- import { registerQuery } from './client/query/registerQuery.js';
6
+ import { debugMode, debugServer, getDefaultHooks } from './utils.js';
9
7
  export * from './client/modify/modify.js';
10
8
  export { compress, decompress };
11
9
  export { ModifyCtx }; // TODO move this somewhere
@@ -51,66 +49,7 @@ export class BasedDb {
51
49
  });
52
50
  const client = new DbClient({
53
51
  maxModifySize,
54
- hooks: {
55
- subscribe(q, onData, onError) {
56
- let killed = false;
57
- let timer;
58
- let prevChecksum;
59
- let lastLen = 0;
60
- let response;
61
- const get = async () => {
62
- const schemaVersion = q.def.schemaChecksum;
63
- if (schemaVersion && schemaVersion !== server.schema.hash) {
64
- q.reBuildQuery();
65
- registerQuery(q);
66
- response = undefined;
67
- timer = setTimeout(get, 0);
68
- return;
69
- }
70
- if (killed) {
71
- return;
72
- }
73
- const res = await server.getQueryBuf(q.buffer);
74
- if (killed) {
75
- return;
76
- }
77
- if (!response) {
78
- response = new BasedQueryResponse(q.id, q.def, res, 0);
79
- }
80
- else {
81
- response.result = res;
82
- response.end = res.byteLength;
83
- }
84
- const checksum = response.checksum;
85
- if (lastLen != res.byteLength || checksum != prevChecksum) {
86
- onData(response);
87
- lastLen = res.byteLength;
88
- prevChecksum = checksum;
89
- }
90
- timer = setTimeout(get, 200);
91
- };
92
- get();
93
- return () => {
94
- killed = true;
95
- clearTimeout(timer);
96
- };
97
- },
98
- setSchema(schema, fromStart) {
99
- return Promise.resolve(server.setSchema(schema, fromStart));
100
- },
101
- flushModify(buf) {
102
- const d = performance.now();
103
- const offsets = server.modify(buf);
104
- const dbWriteTime = performance.now() - d;
105
- return Promise.resolve({
106
- offsets,
107
- dbWriteTime,
108
- });
109
- },
110
- getQueryBuf(buf) {
111
- return Promise.resolve(server.getQueryBuf(buf));
112
- },
113
- },
52
+ hooks: getDefaultHooks(server),
114
53
  });
115
54
  this.server = server;
116
55
  this.client = client;
@@ -1,6 +1,21 @@
1
1
  import { DbServer } from './server/index.js';
2
+ import { BasedDbQuery } from './client/query/BasedDbQuery.js';
3
+ import { OnError } from './client/query/subscription/types.js';
4
+ import { StrictSchema } from '@based/schema';
2
5
  export declare const DECODER: TextDecoder;
3
6
  export declare const ENCODER: TextEncoder;
4
7
  export declare const debugMode: (target: any, getInfo?: any) => void;
5
8
  export declare const debugServer: (server: DbServer) => void;
6
9
  export declare const schemaLooseEqual: (a: any, b: any, key?: string) => boolean;
10
+ export declare const getDefaultHooks: (server: DbServer, subInterval?: number) => {
11
+ subscribe(q: BasedDbQuery, onData: (res: Uint8Array) => void, onError: OnError): () => void;
12
+ setSchema(schema: StrictSchema, fromStart: boolean): Promise<StrictSchema & {
13
+ lastId: number;
14
+ hash?: number;
15
+ }>;
16
+ flushModify(buf: Uint8Array): Promise<{
17
+ offsets: Record<number, number>;
18
+ dbWriteTime: number;
19
+ }>;
20
+ getQueryBuf(buf: Uint8Array): Promise<Uint8Array>;
21
+ };
package/dist/src/utils.js CHANGED
@@ -90,4 +90,50 @@ export const schemaLooseEqual = (a, b, key) => {
90
90
  }
91
91
  return true;
92
92
  };
93
+ export const getDefaultHooks = (server, subInterval = 200) => {
94
+ return {
95
+ subscribe(q, onData, onError) {
96
+ let timer;
97
+ let killed = false;
98
+ const poll = async () => {
99
+ const res = await server.getQueryBuf(q.buffer);
100
+ if (killed) {
101
+ return;
102
+ }
103
+ if (res.byteLength >= 8) {
104
+ onData(res);
105
+ }
106
+ else if (res.byteLength === 1 && res[0] === 0) {
107
+ console.info('schema mismatch, should resolve after update');
108
+ // ignore update and stop polling
109
+ return;
110
+ }
111
+ else {
112
+ onError(new Error('unexpected error'));
113
+ }
114
+ timer = setTimeout(poll, subInterval);
115
+ };
116
+ poll();
117
+ return () => {
118
+ clearTimeout(timer);
119
+ killed = true;
120
+ };
121
+ },
122
+ setSchema(schema, fromStart) {
123
+ return Promise.resolve(server.setSchema(schema, fromStart));
124
+ },
125
+ flushModify(buf) {
126
+ const d = performance.now();
127
+ const offsets = server.modify(buf);
128
+ const dbWriteTime = performance.now() - d;
129
+ return Promise.resolve({
130
+ offsets,
131
+ dbWriteTime,
132
+ });
133
+ },
134
+ getQueryBuf(buf) {
135
+ return Promise.resolve(server.getQueryBuf(buf));
136
+ },
137
+ };
138
+ };
93
139
  //# sourceMappingURL=utils.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@based/db",
3
- "version": "0.0.45",
3
+ "version": "0.0.47",
4
4
  "license": "MIT",
5
5
  "type": "module",
6
6
  "main": "./dist/src/index.js",