@based/db 0.0.25 → 0.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/lib/darwin_aarch64/include/selva/db.h +37 -3
  2. package/dist/lib/darwin_aarch64/include/selva/fields.h +3 -3
  3. package/dist/lib/darwin_aarch64/libdeflate.dylib +0 -0
  4. package/dist/lib/darwin_aarch64/libjemalloc_selva.2.dylib +0 -0
  5. package/dist/lib/darwin_aarch64/libnode-v20.node +0 -0
  6. package/dist/lib/darwin_aarch64/libnode-v21.node +0 -0
  7. package/dist/lib/darwin_aarch64/libnode-v22.node +0 -0
  8. package/dist/lib/darwin_aarch64/libnode-v23.node +0 -0
  9. package/dist/lib/darwin_aarch64/libselva.dylib +0 -0
  10. package/dist/lib/linux_aarch64/include/selva/db.h +37 -3
  11. package/dist/lib/linux_aarch64/include/selva/fields.h +3 -3
  12. package/dist/lib/linux_aarch64/libnode-v20.node +0 -0
  13. package/dist/lib/linux_aarch64/libnode-v21.node +0 -0
  14. package/dist/lib/linux_aarch64/libnode-v22.node +0 -0
  15. package/dist/lib/linux_aarch64/libnode-v23.node +0 -0
  16. package/dist/lib/linux_aarch64/libselva.so +0 -0
  17. package/dist/lib/linux_x86_64/include/selva/db.h +37 -3
  18. package/dist/lib/linux_x86_64/include/selva/fields.h +3 -3
  19. package/dist/lib/linux_x86_64/libnode-v20.node +0 -0
  20. package/dist/lib/linux_x86_64/libnode-v21.node +0 -0
  21. package/dist/lib/linux_x86_64/libnode-v22.node +0 -0
  22. package/dist/lib/linux_x86_64/libnode-v23.node +0 -0
  23. package/dist/lib/linux_x86_64/libselva.so +0 -0
  24. package/dist/src/client/flushModify.js +5 -1
  25. package/dist/src/client/index.d.ts +12 -6
  26. package/dist/src/client/index.js +33 -1
  27. package/dist/src/client/modify/create.js +17 -1
  28. package/dist/src/client/modify/fixed.js +52 -3
  29. package/dist/src/client/modify/modify.js +3 -1
  30. package/dist/src/client/modify/references/edge.js +3 -1
  31. package/dist/src/client/modify/upsert.js +1 -1
  32. package/dist/src/client/operations.d.ts +32 -0
  33. package/dist/src/client/operations.js +137 -0
  34. package/dist/src/client/query/BasedDbQuery.js +3 -0
  35. package/dist/src/client/query/BasedIterable.js +10 -3
  36. package/dist/src/client/query/aggregation.d.ts +3 -0
  37. package/dist/src/client/query/aggregation.js +9 -0
  38. package/dist/src/client/query/display.js +12 -2
  39. package/dist/src/client/query/filter/toBuffer.js +2 -2
  40. package/dist/src/client/query/query.d.ts +1 -1
  41. package/dist/src/client/query/query.js +1 -1
  42. package/dist/src/client/query/queryDef.js +0 -1
  43. package/dist/src/client/query/read/read.js +9 -4
  44. package/dist/src/client/query/toBuffer.js +2 -2
  45. package/dist/src/client/query/types.d.ts +4 -3
  46. package/dist/src/client/query/validation.js +5 -1
  47. package/dist/src/client/string.js +1 -1
  48. package/dist/src/index.d.ts +4 -1
  49. package/dist/src/index.js +11 -2
  50. package/dist/src/native.d.ts +1 -1
  51. package/dist/src/native.js +2 -2
  52. package/dist/src/server/csmt/tree.js +2 -2
  53. package/dist/src/server/index.d.ts +1 -1
  54. package/dist/src/server/index.js +27 -2
  55. package/dist/src/server/save.js +19 -11
  56. package/dist/src/server/start.js +0 -2
  57. package/dist/src/utils.d.ts +6 -0
  58. package/dist/src/utils.js +81 -9
  59. package/package.json +4 -4
@@ -0,0 +1,137 @@
1
+ import { makeCsmtKeyFromNodeId } from './tree.js';
2
+ export class ModifyCtx {
3
+ constructor(db) {
4
+ this.max = db.maxModifySize;
5
+ this.db = db;
6
+ this.buf = new Uint8Array(db.maxModifySize);
7
+ }
8
+ // default values
9
+ len = 0;
10
+ id = -1;
11
+ hasSortField = -1;
12
+ hasSortText = -1;
13
+ queue = new Map();
14
+ ctx = {}; // maybe make this different?
15
+ payload;
16
+ max;
17
+ buf;
18
+ field;
19
+ prefix0 = -1;
20
+ prefix1 = -1;
21
+ lastMain = -1;
22
+ mergeMain;
23
+ mergeMainSize;
24
+ db;
25
+ dirtyRanges = new Set();
26
+ dirtyTypes = new Map();
27
+ markNodeDirty(schema, nodeId) {
28
+ const key = makeCsmtKeyFromNodeId(schema.id, schema.blockCapacity, nodeId);
29
+ if (this.dirtyRanges.has(key)) {
30
+ return;
31
+ }
32
+ this.dirtyRanges.add(key);
33
+ this.updateMax();
34
+ }
35
+ markTypeDirty(schema) {
36
+ if (this.dirtyTypes.has(schema.id)) {
37
+ return;
38
+ }
39
+ this.dirtyTypes.set(schema.id, schema.lastId);
40
+ this.updateMax();
41
+ }
42
+ updateMax() {
43
+ // reserve space in the end of the buf [...data, type (16), lastId (32), typesSize (16), ...ranges (64)[], dataLen (32)]
44
+ this.max =
45
+ this.db.maxModifySize -
46
+ 4 -
47
+ 2 -
48
+ this.dirtyTypes.size * 10 -
49
+ this.dirtyRanges.size * 8;
50
+ }
51
+ getData(lastIds) {
52
+ const rangesSize = this.dirtyRanges.size;
53
+ const typesSize = this.dirtyTypes.size;
54
+ const data = this.buf.subarray(0, this.len + 4 + 2 + typesSize * 10 + rangesSize * 8);
55
+ let i = this.len;
56
+ data[i] = typesSize;
57
+ data[i + 1] = typesSize >>> 8;
58
+ i += 2;
59
+ for (const [id, startId] of this.dirtyTypes) {
60
+ const lastId = this.db.schemaTypesParsedById[id].lastId;
61
+ lastIds[id] = lastId;
62
+ data[i] = id;
63
+ data[i + 1] = id >>> 8;
64
+ i += 2;
65
+ data[i++] = startId;
66
+ data[i++] = startId >>> 8;
67
+ data[i++] = startId >>> 16;
68
+ data[i++] = startId >>> 24;
69
+ data[i++] = lastId;
70
+ data[i++] = lastId >>> 8;
71
+ data[i++] = lastId >>> 16;
72
+ data[i++] = lastId >>> 24;
73
+ }
74
+ const view = new DataView(data.buffer, data.byteOffset);
75
+ for (let key of this.dirtyRanges) {
76
+ view.setFloat64(i, key, true);
77
+ i += 8;
78
+ }
79
+ data[i++] = this.len;
80
+ data[i++] = this.len >>> 8;
81
+ data[i++] = this.len >>> 16;
82
+ data[i++] = this.len >>> 24;
83
+ return data;
84
+ }
85
+ }
86
+ export const flushBuffer = (db) => {
87
+ const ctx = db.modifyCtx;
88
+ let flushPromise;
89
+ if (ctx.len) {
90
+ const d = Date.now();
91
+ const lastIds = {};
92
+ const data = ctx.getData(lastIds);
93
+ const resCtx = ctx.ctx;
94
+ const queue = ctx.queue;
95
+ flushPromise = db.hooks.flushModify(data).then(({ offsets }) => {
96
+ resCtx.offsets = offsets;
97
+ for (const typeId in lastIds) {
98
+ if (typeId in offsets) {
99
+ const lastId = lastIds[typeId] + offsets[typeId];
100
+ const def = db.schemaTypesParsedById[typeId];
101
+ const delta = lastId - def.lastId;
102
+ if (delta > 0) {
103
+ def.lastId += delta;
104
+ def.total += delta;
105
+ }
106
+ }
107
+ else {
108
+ console.warn('no offset returned, very wrong');
109
+ }
110
+ }
111
+ db.writeTime += Date.now() - d;
112
+ if (queue.size) {
113
+ flushPromise.then(() => {
114
+ for (const [resolve, res] of queue) {
115
+ resolve(res.getId(offsets));
116
+ }
117
+ });
118
+ }
119
+ });
120
+ ctx.dirtyTypes.clear();
121
+ ctx.dirtyRanges.clear();
122
+ ctx.len = 0;
123
+ ctx.prefix0 = -1;
124
+ ctx.prefix1 = -1;
125
+ ctx.max = db.maxModifySize;
126
+ ctx.ctx = {};
127
+ }
128
+ db.isDraining = false;
129
+ return flushPromise;
130
+ };
131
+ export const startDrain = (db) => {
132
+ db.isDraining = true;
133
+ process.nextTick(() => {
134
+ flushBuffer(db);
135
+ });
136
+ };
137
+ //# sourceMappingURL=operations.js.map
@@ -208,6 +208,9 @@ export class BasedDbQuery extends QueryBranch {
208
208
  }
209
209
  }
210
210
  }
211
+ if (!db.schemaIsSetValue) {
212
+ throw new Error('Query: No schema yet - use await db.schemaIsSet()');
213
+ }
211
214
  const def = createQueryDef(db, QueryDefType.Root, target, skipValidation);
212
215
  super(db, def);
213
216
  }
@@ -2,7 +2,7 @@ import { inspect } from 'node:util';
2
2
  import picocolors from 'picocolors';
3
3
  import { debug, resultToObject, readAllFields } from './query.js';
4
4
  import { size, time, inspectData, defHasId, displayTarget } from './display.js';
5
- import { readFloatLE, readUint32 } from '../bitWise.js';
5
+ import { readFloatLE, readUint32 } from '../../utils.js';
6
6
  export { time, size, inspectData };
7
7
  export class BasedQueryResponse {
8
8
  result;
@@ -60,8 +60,9 @@ export class BasedQueryResponse {
60
60
  let id = readUint32(result, i);
61
61
  i += 4;
62
62
  let item;
63
- if (this.def.aggregation == -999 /* AggFn.NONE */) {
63
+ if (this.def.aggregation == 255 /* AggFlag.TEMP */) {
64
64
  item = {};
65
+ this.def.aggregation = 4 /* AggFlag.COUNT */;
65
66
  }
66
67
  else {
67
68
  item = {
@@ -101,7 +102,13 @@ export class BasedQueryResponse {
101
102
  return readUint32(result, offset);
102
103
  }
103
104
  get length() {
104
- return readUint32(this.result, 0);
105
+ const l = readUint32(this.result, 0);
106
+ if (this.def.aggregation != 0 /* AggFlag.NONE */ && this.def.aggregation != null) {
107
+ return l + 1;
108
+ }
109
+ else {
110
+ return l;
111
+ }
105
112
  }
106
113
  toObject() {
107
114
  return resultToObject(this.def, this.result, this.end - 4, 0);
@@ -0,0 +1,3 @@
1
+ import { QueryDef, AggFlag } from './types.js';
2
+ export declare const createAggFlagBuffer: (aggregation: AggFlag) => Uint8Array;
3
+ export declare const count: (def: QueryDef) => void;
@@ -0,0 +1,9 @@
1
+ export const createAggFlagBuffer = (aggregation) => {
2
+ const buf = new Uint8Array(1);
3
+ buf[0] = aggregation;
4
+ return buf;
5
+ };
6
+ export const count = (def) => {
7
+ def.aggregation = 4 /* AggFlag.COUNT */;
8
+ };
9
+ //# sourceMappingURL=aggregation.js.map
@@ -119,7 +119,8 @@ const inspectObject = (object, q, path, level, isLast, isFirst, isObject, depth)
119
119
  let edges = [];
120
120
  for (const k in object) {
121
121
  const key = path ? path + '.' + k : k;
122
- let def = q.props[key];
122
+ let def;
123
+ def = q.props[key];
123
124
  let v = object[k];
124
125
  const isEdge = k[0] === '$';
125
126
  if (k === '$searchScore') {
@@ -144,7 +145,16 @@ const inspectObject = (object, q, path, level, isLast, isFirst, isObject, depth)
144
145
  str += ',\n';
145
146
  }
146
147
  else if (!def) {
147
- str += inspectObject(v, q, key, level + 2, false, false, true, depth) + '';
148
+ if (Object.keys(object)[0] == 'count') {
149
+ // TODO: to flag the agg someway. This is ugly as hell!!!
150
+ str += picocolors.blue(v);
151
+ str += picocolors.italic(picocolors.dim(' count'));
152
+ str += ',\n';
153
+ }
154
+ else {
155
+ str +=
156
+ inspectObject(v, q, key, level + 2, false, false, true, depth) + '';
157
+ }
148
158
  }
149
159
  else if ('__isPropDef' in def) {
150
160
  if (def.typeIndex === REFERENCES) {
@@ -33,7 +33,7 @@ const writeConditions = (result, k, offset, conditions) => {
33
33
  export const fillConditionsBuffer = (result, conditions, offset) => {
34
34
  let lastWritten = offset;
35
35
  let orJumpIndex = 0;
36
- if (conditions.or) {
36
+ if (conditions.or && conditions.or.size != 0) {
37
37
  result[lastWritten] = META_OR_BRANCH;
38
38
  lastWritten++;
39
39
  orJumpIndex = lastWritten;
@@ -72,7 +72,7 @@ export const fillConditionsBuffer = (result, conditions, offset) => {
72
72
  result[sizeIndex + 1] = size >>> 8;
73
73
  });
74
74
  }
75
- if (conditions.or) {
75
+ if (conditions.or && conditions.or.size != 0) {
76
76
  const size = fillConditionsBuffer(result, conditions.or, lastWritten);
77
77
  result[orJumpIndex] = size;
78
78
  result[orJumpIndex + 1] = size >>> 8;
@@ -8,4 +8,4 @@ export * from './filter/toBuffer.js';
8
8
  export * from './sort.js';
9
9
  export * from './debug.js';
10
10
  export * from './read/read.js';
11
- export * from './aggregationFn.js';
11
+ export * from './aggregation.js';
@@ -8,5 +8,5 @@ export * from './filter/toBuffer.js';
8
8
  export * from './sort.js';
9
9
  export * from './debug.js';
10
10
  export * from './read/read.js';
11
- export * from './aggregationFn.js';
11
+ export * from './aggregation.js';
12
12
  //# sourceMappingURL=query.js.map
@@ -31,7 +31,6 @@ export const createQueryDef = (db, type, target, skipValidation) => {
31
31
  const t = target;
32
32
  const q = queryDef;
33
33
  q.props = t.ref.edges;
34
- // q.reverseProps = t.ref.reverseEdges
35
34
  q.type = type;
36
35
  q.target = t;
37
36
  return q;
@@ -1,7 +1,7 @@
1
1
  import { ALIAS, ALIASES, BINARY, BOOLEAN, ENUM, INT16, INT32, INT8, NUMBER, STRING, TEXT, TIMESTAMP, UINT16, UINT32, UINT8, VECTOR, JSON, CARDINALITY, } from '@based/schema/def';
2
2
  import { QueryDefType } from '../types.js';
3
3
  import { read, readUtf8 } from '../../string.js';
4
- import { readDoubleLE, readFloatLE, readInt16, readInt32, readUint16, readUint32, } from '../../bitWise.js';
4
+ import { readDoubleLE, readFloatLE, readInt16, readInt32, readUint16, readUint32, } from '../../../utils.js';
5
5
  import { inverseLangMap } from '@based/schema';
6
6
  import { READ_EDGE, READ_ID, READ_REFERENCE, READ_REFERENCES, READ_AGGREGATION, CREATE_AGGREGATION, } from '../types.js';
7
7
  const addField = (p, value, item, defaultOnly = false, lang = 0) => {
@@ -132,7 +132,7 @@ const readMain = (q, result, offset, item) => {
132
132
  return i - offset;
133
133
  };
134
134
  const handleUndefinedProps = (id, q, item) => {
135
- if (q.aggregation != -999 /* AggFn.NONE */) {
135
+ if (q.aggregation == 0 /* AggFlag.NONE */ || q.aggregation == null) {
136
136
  for (const k in q.include.propsRead) {
137
137
  if (q.include.propsRead[k] !== id) {
138
138
  // Only relvant for seperate props
@@ -291,7 +291,8 @@ export const readAllFields = (q, result, offset, end, item, id) => {
291
291
  else if (index === CREATE_AGGREGATION) {
292
292
  i--;
293
293
  result[i] = READ_AGGREGATION;
294
- q.aggregation = -999 /* AggFn.NONE */;
294
+ result[0] = result[0] + 1;
295
+ q.aggregation = 255 /* AggFlag.TEMP */;
295
296
  return i - offset - 4 - (q.search ? 4 : 0);
296
297
  }
297
298
  else if (index === READ_AGGREGATION) {
@@ -303,6 +304,9 @@ export const readAllFields = (q, result, offset, end, item, id) => {
303
304
  };
304
305
  const size = readUint32(result, i);
305
306
  addField(propAgg, readUint32(result, i + 4), item);
307
+ result[0] = result[0] - 1;
308
+ i--;
309
+ result[i] = CREATE_AGGREGATION;
306
310
  i += 4 + size + 4;
307
311
  }
308
312
  else if (index === 0) {
@@ -397,8 +401,9 @@ export const resultToObject = (q, result, end, offset = 0) => {
397
401
  const id = readUint32(result, i);
398
402
  i += 4;
399
403
  let item;
400
- if (q.aggregation == -999 /* AggFn.NONE */) {
404
+ if (q.aggregation == 255 /* AggFlag.TEMP */) {
401
405
  item = {};
406
+ q.aggregation = 4 /* AggFlag.COUNT */;
402
407
  }
403
408
  else {
404
409
  item = {
@@ -3,7 +3,7 @@ import { QueryDefType } from './types.js';
3
3
  import { includeToBuffer } from './include/toBuffer.js';
4
4
  import { filterToBuffer } from './query.js';
5
5
  import { searchToBuffer } from './search/index.js';
6
- import { createAggFnBuffer } from './aggregationFn.js';
6
+ import { createAggFlagBuffer } from './aggregation.js';
7
7
  import { ENCODER } from '../../utils.js';
8
8
  const byteSize = (arr) => {
9
9
  return arr.reduce((a, b) => {
@@ -95,7 +95,7 @@ export function defToBuffer(db, def) {
95
95
  sortSize = sort.byteLength;
96
96
  }
97
97
  let aggregation;
98
- aggregation = createAggFnBuffer(def.aggregation);
98
+ aggregation = createAggFlagBuffer(def.aggregation);
99
99
  if (def.target.ids) {
100
100
  // type 1
101
101
  // 1: 4 + ids * 4 [ids len] [id,id,id]
@@ -92,7 +92,7 @@ export type QueryDefShared = {
92
92
  };
93
93
  references: Map<number, QueryDef>;
94
94
  edges?: QueryDef;
95
- aggregation: AggFn;
95
+ aggregation: AggFlag;
96
96
  };
97
97
  export type QueryDefEdges = {
98
98
  type: QueryDefType.Edge;
@@ -120,7 +120,8 @@ export declare const READ_REFERENCES = 253;
120
120
  export declare const READ_REFERENCE = 254;
121
121
  export declare const CREATE_AGGREGATION = 250;
122
122
  export declare const READ_AGGREGATION = 251;
123
- export declare const enum AggFn {
123
+ export declare const enum AggFlag {
124
+ NONE = 0,
124
125
  AVG = 1,
125
126
  CARDINALITY = 2,
126
127
  CONCAT = 3,// string aggregation, delimiter should be an argument
@@ -133,5 +134,5 @@ export declare const enum AggFn {
133
134
  STDDEV = 10,// population or sample should be optional parameters, default = sample
134
135
  SUM = 11,
135
136
  VARIANCE = 12,
136
- NONE = -999
137
+ TEMP = 255
137
138
  }
@@ -1,5 +1,5 @@
1
1
  import picocolors from 'picocolors';
2
- import { ALIAS, BINARY, BOOLEAN, REFERENCE, REFERENCES, REVERSE_TYPE_INDEX_MAP, STRING, TEXT, TIMESTAMP, VECTOR, propIsNumerical, createEmptyDef, } from '@based/schema/def';
2
+ import { ALIAS, BINARY, BOOLEAN, REFERENCE, REFERENCES, REVERSE_TYPE_INDEX_MAP, STRING, TEXT, TIMESTAMP, VECTOR, propIsNumerical, createEmptyDef, DEFAULT_MAP, } from '@based/schema/def';
3
3
  import { EQUAL, EXISTS, HAS, isNumerical, LIKE, operatorReverseMap, VECTOR_FNS, } from './filter/types.js';
4
4
  import { MAX_ID, MAX_ID_VALUE, MAX_IDS_PER_QUERY, MIN_ID_VALUE, } from './thresholds.js';
5
5
  import { displayTarget, safeStringify } from './display.js';
@@ -420,6 +420,7 @@ export const EMPTY_ALIAS_PROP_DEF = {
420
420
  separate: true,
421
421
  len: 0,
422
422
  start: 0,
423
+ default: DEFAULT_MAP[ALIAS],
423
424
  path: ['ERROR_ALIAS'],
424
425
  };
425
426
  export const ERROR_STRING = {
@@ -429,6 +430,7 @@ export const ERROR_STRING = {
429
430
  separate: true,
430
431
  len: 0,
431
432
  start: 0,
433
+ default: DEFAULT_MAP[STRING],
432
434
  path: ['ERROR_STRING'],
433
435
  };
434
436
  export const ERROR_VECTOR = {
@@ -438,6 +440,7 @@ export const ERROR_VECTOR = {
438
440
  separate: true,
439
441
  len: 0,
440
442
  start: 0,
443
+ default: DEFAULT_MAP[VECTOR],
441
444
  path: ['ERROR_VECTOR'],
442
445
  };
443
446
  export const EMPTY_SCHEMA_DEF = {
@@ -446,5 +449,6 @@ export const EMPTY_SCHEMA_DEF = {
446
449
  propNames: new Uint8Array([]),
447
450
  packed: new Uint8Array([]),
448
451
  idUint8: new Uint8Array([0, 0]),
452
+ mainEmptyAllZeroes: true,
449
453
  };
450
454
  //# sourceMappingURL=validation.js.map
@@ -1,5 +1,5 @@
1
1
  import native from '../native.js';
2
- import { readUint32 } from './bitWise.js';
2
+ import { readUint32 } from './../utils.js';
3
3
  import makeTmpBuffer from './tmpBuffer.js';
4
4
  import { DECODER, ENCODER } from '../utils.js';
5
5
  const { getUint8Array: getTmpBuffer } = makeTmpBuffer(4096); // the usual page size?
@@ -35,7 +35,10 @@ export declare class BasedDb {
35
35
  stop: DbServer['stop'];
36
36
  save: DbServer['save'];
37
37
  migrateSchema: DbServer['migrateSchema'];
38
- isReady: DbClient['isModified'];
38
+ isModified: DbClient['isModified'];
39
+ schemaIsSet: DbClient['schemaIsSet'];
39
40
  destroy(): Promise<void>;
40
41
  wipe(): Promise<void>;
42
+ on: DbClient['on'];
43
+ off: DbClient['off'];
41
44
  }
package/dist/src/index.js CHANGED
@@ -112,8 +112,11 @@ export class BasedDb {
112
112
  migrateSchema = function () {
113
113
  return this.server.migrateSchema.apply(this.server, arguments);
114
114
  };
115
- isReady = function () {
116
- return this.client.isReady.apply(this.client, arguments);
115
+ isModified = function () {
116
+ return this.client.isModified.apply(this.client, arguments);
117
+ };
118
+ schemaIsSet = function () {
119
+ return this.client.schemaIsSet.apply(this.client, arguments);
117
120
  };
118
121
  async destroy() {
119
122
  // Tmp fix: Gives node time to GC existing buffers else it can incorrectly re-asign to mem
@@ -131,5 +134,11 @@ export class BasedDb {
131
134
  this.#init(opts);
132
135
  await this.start({ clean: true });
133
136
  }
137
+ on = function () {
138
+ return this.client.on.apply(this.client, arguments);
139
+ };
140
+ off = function () {
141
+ return this.client.on.apply(this.client, arguments);
142
+ };
134
143
  }
135
144
  //# sourceMappingURL=index.js.map
@@ -4,7 +4,7 @@ declare const _default: {
4
4
  workerCtxInit: () => void;
5
5
  externalFromInt(address: BigInt): any;
6
6
  intFromExternal(external: any): BigInt;
7
- modify: (data: Uint8Array, types: Uint8Array, dbCtx: any) => any;
7
+ modify: (data: Uint8Array, types: Uint8Array, dbCtx: any, dirtyBlocksOut: Float64Array) => any;
8
8
  getQueryBuf: (q: Uint8Array, dbCtx: any) => ArrayBuffer | null;
9
9
  start: (id: number) => any;
10
10
  stop: (dbCtx: any) => any;
@@ -28,8 +28,8 @@ export default {
28
28
  intFromExternal(external) {
29
29
  return db.intFromExternal(external);
30
30
  },
31
- modify: (data, types, dbCtx) => {
32
- db.modify(data, types, dbCtx);
31
+ modify: (data, types, dbCtx, dirtyBlocksOut) => {
32
+ db.modify(data, types, dbCtx, dirtyBlocksOut);
33
33
  },
34
34
  getQueryBuf: (q, dbCtx) => {
35
35
  const x = db.getQueryBuf(dbCtx, q);
@@ -199,8 +199,8 @@ export function createTree(createHash) {
199
199
  return {
200
200
  getRoot: () => root,
201
201
  insert: (k, h, data = null) => {
202
- if (!(h instanceof Uint8Array)) { // TODO can we extract the name somehow from Hash?
203
- throw new TypeError('`h` must be a Uint8Array'); // TODO can we extract the name somehow from Hash?
202
+ if (!(h instanceof Uint8Array)) {
203
+ throw new TypeError('`h` must be a Uint8Array');
204
204
  }
205
205
  const newLeaf = createLeaf(k, h, data);
206
206
  root = root ? insert(root, newLeaf) : newLeaf;
@@ -25,7 +25,7 @@ export declare class DbWorker {
25
25
  type OnSchemaChange = (schema: StrictSchema) => void;
26
26
  export declare class DbServer {
27
27
  #private;
28
- modifyBuf: SharedArrayBuffer;
28
+ modifyDirtyRanges: Float64Array;
29
29
  dbCtxExternal: any;
30
30
  schema: StrictSchema & {
31
31
  lastId: number;
@@ -46,6 +46,9 @@ export class DbWorker {
46
46
  });
47
47
  port1.on('message', (buf) => {
48
48
  // TODO FIX TYPES CHECK IF THIS MAKES A COPY
49
+ // It's a copy, if you don't want a copy you'd need to make it an explicit view
50
+ // to the underlying buffer:
51
+ // new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength)
49
52
  this.resolvers.shift()(new Uint8Array(buf));
50
53
  this.db.onQueryEnd();
51
54
  });
@@ -68,7 +71,7 @@ export class DbWorker {
68
71
  }
69
72
  }
70
73
  export class DbServer {
71
- modifyBuf;
74
+ modifyDirtyRanges;
72
75
  dbCtxExternal; // pointer to zig dbCtx
73
76
  schema = {
74
77
  lastId: 1, // we reserve one for root props
@@ -96,6 +99,21 @@ export class DbServer {
96
99
  this.sortIndexes = {};
97
100
  this.onSchemaChange = onSchemaChange;
98
101
  }
102
+ #resizeModifyDirtyRanges() {
103
+ let maxNrChanges = 0;
104
+ for (const typeId in this.schemaTypesParsedById) {
105
+ const def = this.schemaTypesParsedById[typeId];
106
+ const lastId = def.lastId;
107
+ const blockCapacity = def.blockCapacity;
108
+ const tmp = lastId - +!(lastId % def.blockCapacity);
109
+ const lastBlock = Math.ceil((((tmp / blockCapacity) | 0) * blockCapacity + 1) / blockCapacity);
110
+ maxNrChanges += lastBlock;
111
+ }
112
+ if (!this.modifyDirtyRanges || this.modifyDirtyRanges.length < maxNrChanges) {
113
+ const min = Math.max(maxNrChanges * 1.2, 1024) | 0;
114
+ this.modifyDirtyRanges = new Float64Array(min);
115
+ }
116
+ }
99
117
  start(opts) {
100
118
  return start(this, opts);
101
119
  }
@@ -421,7 +439,14 @@ export class DbServer {
421
439
  this.dirtyRanges.add(key);
422
440
  i += 8;
423
441
  }
424
- native.modify(data, types, this.dbCtxExternal);
442
+ this.#resizeModifyDirtyRanges();
443
+ native.modify(data, types, this.dbCtxExternal, this.modifyDirtyRanges);
444
+ for (let key of this.modifyDirtyRanges) {
445
+ if (key === 0) {
446
+ break;
447
+ }
448
+ this.dirtyRanges.add(key);
449
+ }
425
450
  }
426
451
  getQueryBuf(buf) {
427
452
  if (this.modifyQueue.length) {
@@ -9,6 +9,17 @@ import { bufToHex } from '../utils.js';
9
9
  import { createTree } from './csmt/tree.js';
10
10
  const COMMON_SDB_FILE = 'common.sdb';
11
11
  const block_sdb_file = (typeId, start, end) => `${typeId}_${start}_${end}.sdb`;
12
+ function saveRange(db, typeId, start, end, hashOut) {
13
+ const file = block_sdb_file(typeId, start, end);
14
+ const path = join(db.fileSystemPath, file);
15
+ const err = native.saveRange(path, typeId, start, end, db.dbCtxExternal, hashOut);
16
+ if (err) {
17
+ // TODO print the error string
18
+ console.error(`Save ${typeId}:${start}-${end} failed: ${err}`);
19
+ return null;
20
+ }
21
+ return file;
22
+ }
12
23
  export function save(db, sync = false, forceFullDump = false) {
13
24
  if (!(isMainThread && (db.dirtyRanges.size || forceFullDump))) {
14
25
  return;
@@ -26,12 +37,12 @@ export function save(db, sync = false, forceFullDump = false) {
26
37
  const def = db.schemaTypesParsed[key];
27
38
  foreachBlock(db, def, (start, end, _hash) => {
28
39
  const typeId = def.id;
29
- const file = block_sdb_file(typeId, start, end);
30
- const hash = new Uint8Array(16); // TODO One is unnecessary, probably the arg of this cb
31
- err = native.saveRange(join(db.fileSystemPath, file), typeId, start, end, db.dbCtxExternal, hash);
32
- if (err) {
33
- console.error(`Save ${typeId}:${start}-${end} failed: ${err}`);
34
- return; // TODO What to do with the merkle tree in db situation?
40
+ const hash = new Uint8Array(16);
41
+ const file = saveRange(db, typeId, start, end, hash);
42
+ if (!file) {
43
+ // The previous state should remain in the merkle tree for
44
+ // load and sync purposes.
45
+ return;
35
46
  }
36
47
  const mtKey = makeCsmtKey(typeId, start);
37
48
  const data = {
@@ -46,12 +57,9 @@ export function save(db, sync = false, forceFullDump = false) {
46
57
  }
47
58
  else {
48
59
  foreachDirtyBlock(db, (mtKey, typeId, start, end) => {
49
- const file = block_sdb_file(typeId, start, end);
50
- const path = join(db.fileSystemPath, file);
51
60
  const hash = new Uint8Array(16);
52
- err = native.saveRange(path, typeId, start, end, db.dbCtxExternal, hash);
53
- if (err) {
54
- console.error(`Save ${typeId}:${start}-${end} failed: ${err}`);
61
+ const file = saveRange(db, typeId, start, end, hash);
62
+ if (!file) {
55
63
  // The previous state should remain in the merkle tree for
56
64
  // load and sync purposes.
57
65
  return;
@@ -19,8 +19,6 @@ export async function start(db, opts) {
19
19
  await rm(path, { recursive: true, force: true }).catch(noop);
20
20
  }
21
21
  await mkdir(path, { recursive: true }).catch(noop);
22
- // not doing db yet
23
- // db.modifyBuf = new SharedArrayBuffer(db.maxModifySize)
24
22
  db.dbCtxExternal = native.start(id);
25
23
  let writelog = null;
26
24
  try {
@@ -5,3 +5,9 @@ export declare function concatUint8Arr(bufs: Uint8Array[], totalByteLength?: num
5
5
  export declare const bufToHex: (a: Uint8Array) => string;
6
6
  export declare const hexToBuf: (s: string) => Uint8Array;
7
7
  export declare const base64encode: (a: Uint8Array, lineMax?: number) => string;
8
+ export declare const readDoubleLE: (val: Uint8Array, offset: number) => number;
9
+ export declare const readFloatLE: (val: Uint8Array, offset: number) => number;
10
+ export declare const readUint32: (val: Uint8Array, offset: number) => number;
11
+ export declare const readInt32: (val: Uint8Array, offset: number) => number;
12
+ export declare const readInt16: (val: Uint8Array, offset: number) => number;
13
+ export declare const readUint16: (val: Uint8Array, offset: number) => number;