@based/db 0.0.24 → 0.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/lib/darwin_aarch64/include/selva/db.h +37 -3
  2. package/dist/lib/darwin_aarch64/include/selva/fields.h +3 -3
  3. package/dist/lib/darwin_aarch64/include/selva/selva_string.h +2 -2
  4. package/dist/lib/darwin_aarch64/libdeflate.dylib +0 -0
  5. package/dist/lib/darwin_aarch64/libjemalloc_selva.2.dylib +0 -0
  6. package/dist/lib/darwin_aarch64/libnode-v20.node +0 -0
  7. package/dist/lib/darwin_aarch64/libnode-v21.node +0 -0
  8. package/dist/lib/darwin_aarch64/libnode-v22.node +0 -0
  9. package/dist/lib/darwin_aarch64/libnode-v23.node +0 -0
  10. package/dist/lib/darwin_aarch64/libselva.dylib +0 -0
  11. package/dist/lib/linux_aarch64/include/selva/db.h +37 -3
  12. package/dist/lib/linux_aarch64/include/selva/fields.h +3 -3
  13. package/dist/lib/linux_aarch64/include/selva/selva_string.h +2 -2
  14. package/dist/lib/linux_aarch64/libnode-v20.node +0 -0
  15. package/dist/lib/linux_aarch64/libnode-v21.node +0 -0
  16. package/dist/lib/linux_aarch64/libnode-v22.node +0 -0
  17. package/dist/lib/linux_aarch64/libnode-v23.node +0 -0
  18. package/dist/lib/linux_aarch64/libselva.so +0 -0
  19. package/dist/lib/linux_x86_64/include/selva/db.h +37 -3
  20. package/dist/lib/linux_x86_64/include/selva/fields.h +3 -3
  21. package/dist/lib/linux_x86_64/include/selva/selva_string.h +2 -2
  22. package/dist/lib/linux_x86_64/libnode-v20.node +0 -0
  23. package/dist/lib/linux_x86_64/libnode-v21.node +0 -0
  24. package/dist/lib/linux_x86_64/libnode-v22.node +0 -0
  25. package/dist/lib/linux_x86_64/libnode-v23.node +0 -0
  26. package/dist/lib/linux_x86_64/libselva.so +0 -0
  27. package/dist/src/client/flushModify.js +5 -1
  28. package/dist/src/client/index.d.ts +12 -6
  29. package/dist/src/client/index.js +33 -1
  30. package/dist/src/client/modify/create.js +17 -1
  31. package/dist/src/client/modify/fixed.js +53 -4
  32. package/dist/src/client/modify/modify.js +9 -4
  33. package/dist/src/client/modify/references/edge.js +3 -1
  34. package/dist/src/client/modify/upsert.js +1 -1
  35. package/dist/src/client/operations.d.ts +32 -0
  36. package/dist/src/client/operations.js +137 -0
  37. package/dist/src/client/query/BasedDbQuery.js +3 -0
  38. package/dist/src/client/query/BasedIterable.d.ts +1 -1
  39. package/dist/src/client/query/BasedIterable.js +18 -5
  40. package/dist/src/client/query/aggregation.d.ts +3 -0
  41. package/dist/src/client/query/aggregation.js +9 -0
  42. package/dist/src/client/query/display.js +12 -2
  43. package/dist/src/client/query/filter/toBuffer.js +2 -2
  44. package/dist/src/client/query/query.d.ts +1 -1
  45. package/dist/src/client/query/query.js +1 -1
  46. package/dist/src/client/query/queryDef.js +0 -1
  47. package/dist/src/client/query/read/read.js +10 -5
  48. package/dist/src/client/query/toBuffer.js +2 -2
  49. package/dist/src/client/query/types.d.ts +4 -3
  50. package/dist/src/client/query/validation.js +5 -1
  51. package/dist/src/client/string.js +1 -1
  52. package/dist/src/index.d.ts +4 -1
  53. package/dist/src/index.js +11 -2
  54. package/dist/src/native.d.ts +1 -1
  55. package/dist/src/native.js +2 -2
  56. package/dist/src/server/csmt/tree.js +2 -2
  57. package/dist/src/server/index.d.ts +6 -2
  58. package/dist/src/server/index.js +31 -5
  59. package/dist/src/server/save.d.ts +20 -1
  60. package/dist/src/server/save.js +66 -30
  61. package/dist/src/server/start.js +4 -9
  62. package/dist/src/server/tree.d.ts +1 -1
  63. package/dist/src/server/tree.js +1 -1
  64. package/dist/src/utils.d.ts +6 -0
  65. package/dist/src/utils.js +81 -9
  66. package/package.json +4 -4
@@ -75,7 +75,8 @@ export function writeEdges(t, ref, ctx) {
75
75
  if (typeof value !== 'string') {
76
76
  return new ModifyError(edge, value);
77
77
  }
78
- if (ctx.len + EDGE_HEADER_SIZE + ENCODER.encode(value).byteLength > ctx.max) {
78
+ if (ctx.len + EDGE_HEADER_SIZE + ENCODER.encode(value).byteLength >
79
+ ctx.max) {
79
80
  return RANGE_ERR;
80
81
  }
81
82
  ctx.buf[ctx.len++] = STRING;
@@ -271,6 +272,7 @@ export function writeEdges(t, ref, ctx) {
271
272
  // Index of start of fields
272
273
  const sIndex = ctx.len;
273
274
  ctx.len += mainFieldsStartSize;
275
+ // this has to be replaced
274
276
  // Add zeroes
275
277
  ctx.buf.fill(0, ctx.len, ctx.len + t.edgeMainLen);
276
278
  // Keep track of written bytes from append fixed
@@ -2,7 +2,7 @@ import { ALIAS } from '@based/schema/def';
2
2
  export async function upsert(db, type, obj, opts) {
3
3
  const tree = db.schemaTypesParsed[type].tree;
4
4
  let q;
5
- let id = '';
5
+ let id = type;
6
6
  for (const key in obj) {
7
7
  if (tree[key].typeIndex === ALIAS) {
8
8
  id += `${key}:${obj[key]};`;
@@ -0,0 +1,32 @@
1
+ import { PropDef, SchemaTypeDef } from '@based/schema/def';
2
+ import { DbClient } from './index.js';
3
+ import { ModifyState } from './modify/ModifyRes.js';
4
+ export declare class ModifyCtx {
5
+ constructor(db: DbClient);
6
+ len: number;
7
+ id: number;
8
+ hasSortField: number;
9
+ hasSortText: number;
10
+ queue: Map<(payload: any) => void, ModifyState>;
11
+ ctx: {
12
+ offsets?: Record<number, number>;
13
+ };
14
+ payload: Uint8Array;
15
+ max: number;
16
+ buf: Uint8Array;
17
+ field: number;
18
+ prefix0: number;
19
+ prefix1: number;
20
+ lastMain: number;
21
+ mergeMain: (PropDef | any)[] | null;
22
+ mergeMainSize: number;
23
+ db: DbClient;
24
+ dirtyRanges: Set<number>;
25
+ dirtyTypes: Map<number, number>;
26
+ markNodeDirty(schema: SchemaTypeDef, nodeId: number): void;
27
+ markTypeDirty(schema: SchemaTypeDef): void;
28
+ updateMax(): void;
29
+ getData(lastIds: Record<number, number>): Uint8Array;
30
+ }
31
+ export declare const flushBuffer: (db: DbClient) => Promise<void>;
32
+ export declare const startDrain: (db: DbClient) => void;
@@ -0,0 +1,137 @@
1
+ import { makeCsmtKeyFromNodeId } from './tree.js';
2
+ export class ModifyCtx {
3
+ constructor(db) {
4
+ this.max = db.maxModifySize;
5
+ this.db = db;
6
+ this.buf = new Uint8Array(db.maxModifySize);
7
+ }
8
+ // default values
9
+ len = 0;
10
+ id = -1;
11
+ hasSortField = -1;
12
+ hasSortText = -1;
13
+ queue = new Map();
14
+ ctx = {}; // maybe make this different?
15
+ payload;
16
+ max;
17
+ buf;
18
+ field;
19
+ prefix0 = -1;
20
+ prefix1 = -1;
21
+ lastMain = -1;
22
+ mergeMain;
23
+ mergeMainSize;
24
+ db;
25
+ dirtyRanges = new Set();
26
+ dirtyTypes = new Map();
27
+ markNodeDirty(schema, nodeId) {
28
+ const key = makeCsmtKeyFromNodeId(schema.id, schema.blockCapacity, nodeId);
29
+ if (this.dirtyRanges.has(key)) {
30
+ return;
31
+ }
32
+ this.dirtyRanges.add(key);
33
+ this.updateMax();
34
+ }
35
+ markTypeDirty(schema) {
36
+ if (this.dirtyTypes.has(schema.id)) {
37
+ return;
38
+ }
39
+ this.dirtyTypes.set(schema.id, schema.lastId);
40
+ this.updateMax();
41
+ }
42
+ updateMax() {
43
+ // reserve space in the end of the buf [...data, type (16), lastId (32), typesSize (16), ...ranges (64)[], dataLen (32)]
44
+ this.max =
45
+ this.db.maxModifySize -
46
+ 4 -
47
+ 2 -
48
+ this.dirtyTypes.size * 10 -
49
+ this.dirtyRanges.size * 8;
50
+ }
51
+ getData(lastIds) {
52
+ const rangesSize = this.dirtyRanges.size;
53
+ const typesSize = this.dirtyTypes.size;
54
+ const data = this.buf.subarray(0, this.len + 4 + 2 + typesSize * 10 + rangesSize * 8);
55
+ let i = this.len;
56
+ data[i] = typesSize;
57
+ data[i + 1] = typesSize >>> 8;
58
+ i += 2;
59
+ for (const [id, startId] of this.dirtyTypes) {
60
+ const lastId = this.db.schemaTypesParsedById[id].lastId;
61
+ lastIds[id] = lastId;
62
+ data[i] = id;
63
+ data[i + 1] = id >>> 8;
64
+ i += 2;
65
+ data[i++] = startId;
66
+ data[i++] = startId >>> 8;
67
+ data[i++] = startId >>> 16;
68
+ data[i++] = startId >>> 24;
69
+ data[i++] = lastId;
70
+ data[i++] = lastId >>> 8;
71
+ data[i++] = lastId >>> 16;
72
+ data[i++] = lastId >>> 24;
73
+ }
74
+ const view = new DataView(data.buffer, data.byteOffset);
75
+ for (let key of this.dirtyRanges) {
76
+ view.setFloat64(i, key, true);
77
+ i += 8;
78
+ }
79
+ data[i++] = this.len;
80
+ data[i++] = this.len >>> 8;
81
+ data[i++] = this.len >>> 16;
82
+ data[i++] = this.len >>> 24;
83
+ return data;
84
+ }
85
+ }
86
+ export const flushBuffer = (db) => {
87
+ const ctx = db.modifyCtx;
88
+ let flushPromise;
89
+ if (ctx.len) {
90
+ const d = Date.now();
91
+ const lastIds = {};
92
+ const data = ctx.getData(lastIds);
93
+ const resCtx = ctx.ctx;
94
+ const queue = ctx.queue;
95
+ flushPromise = db.hooks.flushModify(data).then(({ offsets }) => {
96
+ resCtx.offsets = offsets;
97
+ for (const typeId in lastIds) {
98
+ if (typeId in offsets) {
99
+ const lastId = lastIds[typeId] + offsets[typeId];
100
+ const def = db.schemaTypesParsedById[typeId];
101
+ const delta = lastId - def.lastId;
102
+ if (delta > 0) {
103
+ def.lastId += delta;
104
+ def.total += delta;
105
+ }
106
+ }
107
+ else {
108
+ console.warn('no offset returned, very wrong');
109
+ }
110
+ }
111
+ db.writeTime += Date.now() - d;
112
+ if (queue.size) {
113
+ flushPromise.then(() => {
114
+ for (const [resolve, res] of queue) {
115
+ resolve(res.getId(offsets));
116
+ }
117
+ });
118
+ }
119
+ });
120
+ ctx.dirtyTypes.clear();
121
+ ctx.dirtyRanges.clear();
122
+ ctx.len = 0;
123
+ ctx.prefix0 = -1;
124
+ ctx.prefix1 = -1;
125
+ ctx.max = db.maxModifySize;
126
+ ctx.ctx = {};
127
+ }
128
+ db.isDraining = false;
129
+ return flushPromise;
130
+ };
131
+ export const startDrain = (db) => {
132
+ db.isDraining = true;
133
+ process.nextTick(() => {
134
+ flushBuffer(db);
135
+ });
136
+ };
137
+ //# sourceMappingURL=operations.js.map
@@ -208,6 +208,9 @@ export class BasedDbQuery extends QueryBranch {
208
208
  }
209
209
  }
210
210
  }
211
+ if (!db.schemaIsSetValue) {
212
+ throw new Error('Query: No schema yet - use await db.schemaIsSet()');
213
+ }
211
214
  const def = createQueryDef(db, QueryDefType.Root, target, skipValidation);
212
215
  super(db, def);
213
216
  }
@@ -14,7 +14,7 @@ export declare class BasedQueryResponse {
14
14
  [inspect.custom](depth: number): string;
15
15
  debug(): this;
16
16
  node(index?: number): any;
17
- [Symbol.iterator](): Generator<Item, void, unknown>;
17
+ [Symbol.iterator](): Generator<Partial<Item>, void, unknown>;
18
18
  inspect(depth?: number): this;
19
19
  forEach(fn: (item: any, key: number) => void): void;
20
20
  map(fn: (item: any, key: number) => any): any[];
@@ -2,7 +2,7 @@ import { inspect } from 'node:util';
2
2
  import picocolors from 'picocolors';
3
3
  import { debug, resultToObject, readAllFields } from './query.js';
4
4
  import { size, time, inspectData, defHasId, displayTarget } from './display.js';
5
- import { readFloatLE, readUint32 } from '../bitWise.js';
5
+ import { readFloatLE, readUint32 } from '../../utils.js';
6
6
  export { time, size, inspectData };
7
7
  export class BasedQueryResponse {
8
8
  result;
@@ -59,9 +59,16 @@ export class BasedQueryResponse {
59
59
  while (i < result.byteLength - 4) {
60
60
  let id = readUint32(result, i);
61
61
  i += 4;
62
- const item = {
63
- id,
64
- };
62
+ let item;
63
+ if (this.def.aggregation == 255 /* AggFlag.TEMP */) {
64
+ item = {};
65
+ this.def.aggregation = 4 /* AggFlag.COUNT */;
66
+ }
67
+ else {
68
+ item = {
69
+ id,
70
+ };
71
+ }
65
72
  if (this.def.search) {
66
73
  item.$searchScore = readFloatLE(result, i);
67
74
  i += 4;
@@ -95,7 +102,13 @@ export class BasedQueryResponse {
95
102
  return readUint32(result, offset);
96
103
  }
97
104
  get length() {
98
- return readUint32(this.result, 0);
105
+ const l = readUint32(this.result, 0);
106
+ if (this.def.aggregation != 0 /* AggFlag.NONE */ && this.def.aggregation != null) {
107
+ return l + 1;
108
+ }
109
+ else {
110
+ return l;
111
+ }
99
112
  }
100
113
  toObject() {
101
114
  return resultToObject(this.def, this.result, this.end - 4, 0);
@@ -0,0 +1,3 @@
1
+ import { QueryDef, AggFlag } from './types.js';
2
+ export declare const createAggFlagBuffer: (aggregation: AggFlag) => Uint8Array;
3
+ export declare const count: (def: QueryDef) => void;
@@ -0,0 +1,9 @@
1
+ export const createAggFlagBuffer = (aggregation) => {
2
+ const buf = new Uint8Array(1);
3
+ buf[0] = aggregation;
4
+ return buf;
5
+ };
6
+ export const count = (def) => {
7
+ def.aggregation = 4 /* AggFlag.COUNT */;
8
+ };
9
+ //# sourceMappingURL=aggregation.js.map
@@ -119,7 +119,8 @@ const inspectObject = (object, q, path, level, isLast, isFirst, isObject, depth)
119
119
  let edges = [];
120
120
  for (const k in object) {
121
121
  const key = path ? path + '.' + k : k;
122
- let def = q.props[key];
122
+ let def;
123
+ def = q.props[key];
123
124
  let v = object[k];
124
125
  const isEdge = k[0] === '$';
125
126
  if (k === '$searchScore') {
@@ -144,7 +145,16 @@ const inspectObject = (object, q, path, level, isLast, isFirst, isObject, depth)
144
145
  str += ',\n';
145
146
  }
146
147
  else if (!def) {
147
- str += inspectObject(v, q, key, level + 2, false, false, true, depth) + '';
148
+ if (Object.keys(object)[0] == 'count') {
149
+ // TODO: to flag the agg someway. This is ugly as hell!!!
150
+ str += picocolors.blue(v);
151
+ str += picocolors.italic(picocolors.dim(' count'));
152
+ str += ',\n';
153
+ }
154
+ else {
155
+ str +=
156
+ inspectObject(v, q, key, level + 2, false, false, true, depth) + '';
157
+ }
148
158
  }
149
159
  else if ('__isPropDef' in def) {
150
160
  if (def.typeIndex === REFERENCES) {
@@ -33,7 +33,7 @@ const writeConditions = (result, k, offset, conditions) => {
33
33
  export const fillConditionsBuffer = (result, conditions, offset) => {
34
34
  let lastWritten = offset;
35
35
  let orJumpIndex = 0;
36
- if (conditions.or) {
36
+ if (conditions.or && conditions.or.size != 0) {
37
37
  result[lastWritten] = META_OR_BRANCH;
38
38
  lastWritten++;
39
39
  orJumpIndex = lastWritten;
@@ -72,7 +72,7 @@ export const fillConditionsBuffer = (result, conditions, offset) => {
72
72
  result[sizeIndex + 1] = size >>> 8;
73
73
  });
74
74
  }
75
- if (conditions.or) {
75
+ if (conditions.or && conditions.or.size != 0) {
76
76
  const size = fillConditionsBuffer(result, conditions.or, lastWritten);
77
77
  result[orJumpIndex] = size;
78
78
  result[orJumpIndex + 1] = size >>> 8;
@@ -8,4 +8,4 @@ export * from './filter/toBuffer.js';
8
8
  export * from './sort.js';
9
9
  export * from './debug.js';
10
10
  export * from './read/read.js';
11
- export * from './aggregationFn.js';
11
+ export * from './aggregation.js';
@@ -8,5 +8,5 @@ export * from './filter/toBuffer.js';
8
8
  export * from './sort.js';
9
9
  export * from './debug.js';
10
10
  export * from './read/read.js';
11
- export * from './aggregationFn.js';
11
+ export * from './aggregation.js';
12
12
  //# sourceMappingURL=query.js.map
@@ -31,7 +31,6 @@ export const createQueryDef = (db, type, target, skipValidation) => {
31
31
  const t = target;
32
32
  const q = queryDef;
33
33
  q.props = t.ref.edges;
34
- // q.reverseProps = t.ref.reverseEdges
35
34
  q.type = type;
36
35
  q.target = t;
37
36
  return q;
@@ -1,7 +1,7 @@
1
1
  import { ALIAS, ALIASES, BINARY, BOOLEAN, ENUM, INT16, INT32, INT8, NUMBER, STRING, TEXT, TIMESTAMP, UINT16, UINT32, UINT8, VECTOR, JSON, CARDINALITY, } from '@based/schema/def';
2
2
  import { QueryDefType } from '../types.js';
3
3
  import { read, readUtf8 } from '../../string.js';
4
- import { readDoubleLE, readFloatLE, readInt16, readInt32, readUint16, readUint32, } from '../../bitWise.js';
4
+ import { readDoubleLE, readFloatLE, readInt16, readInt32, readUint16, readUint32, } from '../../../utils.js';
5
5
  import { inverseLangMap } from '@based/schema';
6
6
  import { READ_EDGE, READ_ID, READ_REFERENCE, READ_REFERENCES, READ_AGGREGATION, CREATE_AGGREGATION, } from '../types.js';
7
7
  const addField = (p, value, item, defaultOnly = false, lang = 0) => {
@@ -132,7 +132,7 @@ const readMain = (q, result, offset, item) => {
132
132
  return i - offset;
133
133
  };
134
134
  const handleUndefinedProps = (id, q, item) => {
135
- if (q.aggregation != -999 /* AggFn.NONE */) {
135
+ if (q.aggregation == 0 /* AggFlag.NONE */ || q.aggregation == null) {
136
136
  for (const k in q.include.propsRead) {
137
137
  if (q.include.propsRead[k] !== id) {
138
138
  // Only relvant for seperate props
@@ -291,7 +291,8 @@ export const readAllFields = (q, result, offset, end, item, id) => {
291
291
  else if (index === CREATE_AGGREGATION) {
292
292
  i--;
293
293
  result[i] = READ_AGGREGATION;
294
- q.aggregation = -999 /* AggFn.NONE */;
294
+ result[0] = result[0] + 1;
295
+ q.aggregation = 255 /* AggFlag.TEMP */;
295
296
  return i - offset - 4 - (q.search ? 4 : 0);
296
297
  }
297
298
  else if (index === READ_AGGREGATION) {
@@ -303,6 +304,9 @@ export const readAllFields = (q, result, offset, end, item, id) => {
303
304
  };
304
305
  const size = readUint32(result, i);
305
306
  addField(propAgg, readUint32(result, i + 4), item);
307
+ result[0] = result[0] - 1;
308
+ i--;
309
+ result[i] = CREATE_AGGREGATION;
306
310
  i += 4 + size + 4;
307
311
  }
308
312
  else if (index === 0) {
@@ -396,9 +400,10 @@ export const resultToObject = (q, result, end, offset = 0) => {
396
400
  while (i < end) {
397
401
  const id = readUint32(result, i);
398
402
  i += 4;
399
- var item;
400
- if (q.aggregation == -999 /* AggFn.NONE */) {
403
+ let item;
404
+ if (q.aggregation == 255 /* AggFlag.TEMP */) {
401
405
  item = {};
406
+ q.aggregation = 4 /* AggFlag.COUNT */;
402
407
  }
403
408
  else {
404
409
  item = {
@@ -3,7 +3,7 @@ import { QueryDefType } from './types.js';
3
3
  import { includeToBuffer } from './include/toBuffer.js';
4
4
  import { filterToBuffer } from './query.js';
5
5
  import { searchToBuffer } from './search/index.js';
6
- import { createAggFnBuffer } from './aggregationFn.js';
6
+ import { createAggFlagBuffer } from './aggregation.js';
7
7
  import { ENCODER } from '../../utils.js';
8
8
  const byteSize = (arr) => {
9
9
  return arr.reduce((a, b) => {
@@ -95,7 +95,7 @@ export function defToBuffer(db, def) {
95
95
  sortSize = sort.byteLength;
96
96
  }
97
97
  let aggregation;
98
- aggregation = createAggFnBuffer(def.aggregation);
98
+ aggregation = createAggFlagBuffer(def.aggregation);
99
99
  if (def.target.ids) {
100
100
  // type 1
101
101
  // 1: 4 + ids * 4 [ids len] [id,id,id]
@@ -92,7 +92,7 @@ export type QueryDefShared = {
92
92
  };
93
93
  references: Map<number, QueryDef>;
94
94
  edges?: QueryDef;
95
- aggregation: AggFn;
95
+ aggregation: AggFlag;
96
96
  };
97
97
  export type QueryDefEdges = {
98
98
  type: QueryDefType.Edge;
@@ -120,7 +120,8 @@ export declare const READ_REFERENCES = 253;
120
120
  export declare const READ_REFERENCE = 254;
121
121
  export declare const CREATE_AGGREGATION = 250;
122
122
  export declare const READ_AGGREGATION = 251;
123
- export declare const enum AggFn {
123
+ export declare const enum AggFlag {
124
+ NONE = 0,
124
125
  AVG = 1,
125
126
  CARDINALITY = 2,
126
127
  CONCAT = 3,// string aggregation, delimiter should be an argument
@@ -133,5 +134,5 @@ export declare const enum AggFn {
133
134
  STDDEV = 10,// population or sample should be optional parameters, default = sample
134
135
  SUM = 11,
135
136
  VARIANCE = 12,
136
- NONE = -999
137
+ TEMP = 255
137
138
  }
@@ -1,5 +1,5 @@
1
1
  import picocolors from 'picocolors';
2
- import { ALIAS, BINARY, BOOLEAN, REFERENCE, REFERENCES, REVERSE_TYPE_INDEX_MAP, STRING, TEXT, TIMESTAMP, VECTOR, propIsNumerical, createEmptyDef, } from '@based/schema/def';
2
+ import { ALIAS, BINARY, BOOLEAN, REFERENCE, REFERENCES, REVERSE_TYPE_INDEX_MAP, STRING, TEXT, TIMESTAMP, VECTOR, propIsNumerical, createEmptyDef, DEFAULT_MAP, } from '@based/schema/def';
3
3
  import { EQUAL, EXISTS, HAS, isNumerical, LIKE, operatorReverseMap, VECTOR_FNS, } from './filter/types.js';
4
4
  import { MAX_ID, MAX_ID_VALUE, MAX_IDS_PER_QUERY, MIN_ID_VALUE, } from './thresholds.js';
5
5
  import { displayTarget, safeStringify } from './display.js';
@@ -420,6 +420,7 @@ export const EMPTY_ALIAS_PROP_DEF = {
420
420
  separate: true,
421
421
  len: 0,
422
422
  start: 0,
423
+ default: DEFAULT_MAP[ALIAS],
423
424
  path: ['ERROR_ALIAS'],
424
425
  };
425
426
  export const ERROR_STRING = {
@@ -429,6 +430,7 @@ export const ERROR_STRING = {
429
430
  separate: true,
430
431
  len: 0,
431
432
  start: 0,
433
+ default: DEFAULT_MAP[STRING],
432
434
  path: ['ERROR_STRING'],
433
435
  };
434
436
  export const ERROR_VECTOR = {
@@ -438,6 +440,7 @@ export const ERROR_VECTOR = {
438
440
  separate: true,
439
441
  len: 0,
440
442
  start: 0,
443
+ default: DEFAULT_MAP[VECTOR],
441
444
  path: ['ERROR_VECTOR'],
442
445
  };
443
446
  export const EMPTY_SCHEMA_DEF = {
@@ -446,5 +449,6 @@ export const EMPTY_SCHEMA_DEF = {
446
449
  propNames: new Uint8Array([]),
447
450
  packed: new Uint8Array([]),
448
451
  idUint8: new Uint8Array([0, 0]),
452
+ mainEmptyAllZeroes: true,
449
453
  };
450
454
  //# sourceMappingURL=validation.js.map
@@ -1,5 +1,5 @@
1
1
  import native from '../native.js';
2
- import { readUint32 } from './bitWise.js';
2
+ import { readUint32 } from './../utils.js';
3
3
  import makeTmpBuffer from './tmpBuffer.js';
4
4
  import { DECODER, ENCODER } from '../utils.js';
5
5
  const { getUint8Array: getTmpBuffer } = makeTmpBuffer(4096); // the usual page size?
@@ -35,7 +35,10 @@ export declare class BasedDb {
35
35
  stop: DbServer['stop'];
36
36
  save: DbServer['save'];
37
37
  migrateSchema: DbServer['migrateSchema'];
38
- isReady: DbClient['isModified'];
38
+ isModified: DbClient['isModified'];
39
+ schemaIsSet: DbClient['schemaIsSet'];
39
40
  destroy(): Promise<void>;
40
41
  wipe(): Promise<void>;
42
+ on: DbClient['on'];
43
+ off: DbClient['off'];
41
44
  }
package/dist/src/index.js CHANGED
@@ -112,8 +112,11 @@ export class BasedDb {
112
112
  migrateSchema = function () {
113
113
  return this.server.migrateSchema.apply(this.server, arguments);
114
114
  };
115
- isReady = function () {
116
- return this.client.isReady.apply(this.client, arguments);
115
+ isModified = function () {
116
+ return this.client.isModified.apply(this.client, arguments);
117
+ };
118
+ schemaIsSet = function () {
119
+ return this.client.schemaIsSet.apply(this.client, arguments);
117
120
  };
118
121
  async destroy() {
119
122
  // Tmp fix: Gives node time to GC existing buffers else it can incorrectly re-asign to mem
@@ -131,5 +134,11 @@ export class BasedDb {
131
134
  this.#init(opts);
132
135
  await this.start({ clean: true });
133
136
  }
137
+ on = function () {
138
+ return this.client.on.apply(this.client, arguments);
139
+ };
140
+ off = function () {
141
+ return this.client.on.apply(this.client, arguments);
142
+ };
134
143
  }
135
144
  //# sourceMappingURL=index.js.map
@@ -4,7 +4,7 @@ declare const _default: {
4
4
  workerCtxInit: () => void;
5
5
  externalFromInt(address: BigInt): any;
6
6
  intFromExternal(external: any): BigInt;
7
- modify: (data: Uint8Array, types: Uint8Array, dbCtx: any) => any;
7
+ modify: (data: Uint8Array, types: Uint8Array, dbCtx: any, dirtyBlocksOut: Float64Array) => any;
8
8
  getQueryBuf: (q: Uint8Array, dbCtx: any) => ArrayBuffer | null;
9
9
  start: (id: number) => any;
10
10
  stop: (dbCtx: any) => any;
@@ -28,8 +28,8 @@ export default {
28
28
  intFromExternal(external) {
29
29
  return db.intFromExternal(external);
30
30
  },
31
- modify: (data, types, dbCtx) => {
32
- db.modify(data, types, dbCtx);
31
+ modify: (data, types, dbCtx, dirtyBlocksOut) => {
32
+ db.modify(data, types, dbCtx, dirtyBlocksOut);
33
33
  },
34
34
  getQueryBuf: (q, dbCtx) => {
35
35
  const x = db.getQueryBuf(dbCtx, q);
@@ -199,8 +199,8 @@ export function createTree(createHash) {
199
199
  return {
200
200
  getRoot: () => root,
201
201
  insert: (k, h, data = null) => {
202
- if (!(h instanceof Uint8Array)) { // TODO can we extract the name somehow from Hash?
203
- throw new TypeError('`h` must be a Uint8Array'); // TODO can we extract the name somehow from Hash?
202
+ if (!(h instanceof Uint8Array)) {
203
+ throw new TypeError('`h` must be a Uint8Array');
204
204
  }
205
205
  const newLeaf = createLeaf(k, h, data);
206
206
  root = root ? insert(root, newLeaf) : newLeaf;
@@ -4,6 +4,8 @@ import { createTree } from './csmt/index.js';
4
4
  import { Worker, MessagePort } from 'node:worker_threads';
5
5
  import { TransformFns } from './migrate/index.js';
6
6
  import exitHook from 'exit-hook';
7
+ export declare const SCHEMA_FILE = "schema.json";
8
+ export declare const WRITELOG_FILE = "writelog.json";
7
9
  declare class SortIndex {
8
10
  constructor(buf: Uint8Array, dbCtxExternal: any);
9
11
  buf: Uint8Array;
@@ -23,7 +25,7 @@ export declare class DbWorker {
23
25
  type OnSchemaChange = (schema: StrictSchema) => void;
24
26
  export declare class DbServer {
25
27
  #private;
26
- modifyBuf: SharedArrayBuffer;
28
+ modifyDirtyRanges: Float64Array;
27
29
  dbCtxExternal: any;
28
30
  schema: StrictSchema & {
29
31
  lastId: number;
@@ -57,7 +59,9 @@ export declare class DbServer {
57
59
  clean?: boolean;
58
60
  hosted?: boolean;
59
61
  }): Promise<void>;
60
- save(): void | Promise<void>;
62
+ save(opts?: {
63
+ forceFullDump?: boolean;
64
+ }): Promise<void>;
61
65
  createCsmtHashFun: () => {
62
66
  update: (buf: Uint8Array) => any;
63
67
  digest: (encoding?: "hex") => Uint8Array | string;