@based/db 0.0.26 → 0.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/dist/lib/darwin_aarch64/include/cdefs.h +1 -1
  2. package/dist/lib/darwin_aarch64/include/selva/db.h +11 -11
  3. package/dist/lib/darwin_aarch64/include/selva/fields.h +47 -12
  4. package/dist/lib/darwin_aarch64/include/selva/hll.h +59 -0
  5. package/dist/lib/darwin_aarch64/include/selva/types.h +2 -0
  6. package/dist/lib/darwin_aarch64/include/tree.h +69 -69
  7. package/dist/lib/darwin_aarch64/libdeflate.dylib +0 -0
  8. package/dist/lib/darwin_aarch64/libjemalloc_selva.2.dylib +0 -0
  9. package/dist/lib/darwin_aarch64/libnode-v20.node +0 -0
  10. package/dist/lib/darwin_aarch64/libnode-v21.node +0 -0
  11. package/dist/lib/darwin_aarch64/libnode-v22.node +0 -0
  12. package/dist/lib/darwin_aarch64/libnode-v23.node +0 -0
  13. package/dist/lib/darwin_aarch64/libselva.dylib +0 -0
  14. package/dist/lib/linux_aarch64/include/cdefs.h +1 -1
  15. package/dist/lib/linux_aarch64/include/selva/db.h +11 -11
  16. package/dist/lib/linux_aarch64/include/selva/fields.h +47 -12
  17. package/dist/lib/linux_aarch64/include/selva/hll.h +59 -0
  18. package/dist/lib/linux_aarch64/include/selva/types.h +2 -0
  19. package/dist/lib/linux_aarch64/include/tree.h +69 -69
  20. package/dist/lib/linux_aarch64/libdeflate.so +0 -0
  21. package/dist/lib/linux_aarch64/libnode-v20.node +0 -0
  22. package/dist/lib/linux_aarch64/libnode-v21.node +0 -0
  23. package/dist/lib/linux_aarch64/libnode-v22.node +0 -0
  24. package/dist/lib/linux_aarch64/libnode-v23.node +0 -0
  25. package/dist/lib/linux_aarch64/libselva.so +0 -0
  26. package/dist/lib/linux_x86_64/include/cdefs.h +1 -1
  27. package/dist/lib/linux_x86_64/include/selva/db.h +11 -11
  28. package/dist/lib/linux_x86_64/include/selva/fields.h +47 -12
  29. package/dist/lib/linux_x86_64/include/selva/hll.h +59 -0
  30. package/dist/lib/linux_x86_64/include/selva/types.h +2 -0
  31. package/dist/lib/linux_x86_64/include/tree.h +69 -69
  32. package/dist/lib/linux_x86_64/libdeflate.so +0 -0
  33. package/dist/lib/linux_x86_64/libnode-v20.node +0 -0
  34. package/dist/lib/linux_x86_64/libnode-v21.node +0 -0
  35. package/dist/lib/linux_x86_64/libnode-v22.node +0 -0
  36. package/dist/lib/linux_x86_64/libnode-v23.node +0 -0
  37. package/dist/lib/linux_x86_64/libselva.so +0 -0
  38. package/dist/src/client/modify/alias.js +3 -0
  39. package/dist/src/client/modify/binary.js +1 -1
  40. package/dist/src/client/modify/cardinality.d.ts +2 -2
  41. package/dist/src/client/modify/cardinality.js +17 -6
  42. package/dist/src/client/modify/fixed.js +6 -51
  43. package/dist/src/client/modify/json.js +15 -1
  44. package/dist/src/client/modify/references/edge.js +1 -1
  45. package/dist/src/client/modify/references/references.js +21 -6
  46. package/dist/src/client/modify/string.js +5 -6
  47. package/dist/src/client/modify/text.js +0 -11
  48. package/dist/src/client/modify/vector.js +3 -3
  49. package/dist/src/client/query/BasedDbQuery.js +6 -1
  50. package/dist/src/client/query/filter/parseFilterValue.js +2 -4
  51. package/dist/src/client/query/include/walk.js +1 -0
  52. package/dist/src/client/query/read/read.js +8 -1
  53. package/dist/src/client/query/validation.js +3 -0
  54. package/dist/src/index.d.ts +3 -0
  55. package/dist/src/index.js +8 -2
  56. package/dist/src/server/index.js +10 -3
  57. package/dist/src/server/migrate/index.js +9 -5
  58. package/dist/src/server/migrate/worker.js +26 -1
  59. package/dist/src/server/save.js +1 -1
  60. package/package.json +4 -3
  61. package/dist/src/client/bitWise.d.ts +0 -6
  62. package/dist/src/client/bitWise.js +0 -72
  63. package/dist/src/client/operations.d.ts +0 -32
  64. package/dist/src/client/operations.js +0 -137
  65. package/dist/src/client/query/aggregationFn.d.ts +0 -3
  66. package/dist/src/client/query/aggregationFn.js +0 -9
  67. package/dist/src/client/tree.d.ts +0 -1
  68. package/dist/src/client/tree.js +0 -5
@@ -1,5 +1,19 @@
1
1
  import { writeBinary } from './binary.js';
2
+ import { ModifyError } from './ModifyRes.js';
2
3
  export function writeJson(value, ctx, schema, t, parentId, modifyOp) {
3
- return writeBinary(value === null ? null : JSON.stringify(value), ctx, schema, t, parentId, modifyOp);
4
+ try {
5
+ if (value === null) {
6
+ return writeBinary(null, ctx, schema, t, parentId, modifyOp);
7
+ }
8
+ else {
9
+ if (!t.validation(value, t)) {
10
+ return new ModifyError(t, value);
11
+ }
12
+ return writeBinary(JSON.stringify(value), ctx, schema, t, parentId, modifyOp);
13
+ }
14
+ }
15
+ catch (err) {
16
+ return new ModifyError(t, value);
17
+ }
4
18
  }
5
19
  //# sourceMappingURL=json.js.map
@@ -133,7 +133,7 @@ export function writeEdges(t, ref, ctx) {
133
133
  return RANGE_ERR;
134
134
  }
135
135
  ctx.buf[ctx.len++] = CARDINALITY;
136
- writeHllBuf(value, ctx, t, size);
136
+ writeHllBuf(value, ctx, edge, size);
137
137
  }
138
138
  }
139
139
  else {
@@ -27,7 +27,14 @@ export function writeReferences(value, ctx, schema, def, res, mod) {
27
27
  else if (key === 'delete') {
28
28
  err = deleteRefs(def, ctx, schema, mod, val, res.tmpId);
29
29
  }
30
- else if (key === 'set') {
30
+ else if (key === 'update') {
31
+ // and add add: []
32
+ // replace this with update
33
+ err = updateRefs(def, ctx, schema, mod, val, res.tmpId, 1);
34
+ }
35
+ else if (key === 'add') {
36
+ // and add add: []
37
+ // replace this with update
31
38
  err = updateRefs(def, ctx, schema, mod, val, res.tmpId, 1);
32
39
  }
33
40
  else if (key === 'upsert') {
@@ -173,6 +180,9 @@ function appendRefs(def, ctx, modifyOp, refs, op, remaining) {
173
180
  else {
174
181
  return new ModifyError(def, refs);
175
182
  }
183
+ if (!def.validation(id, def)) {
184
+ return new ModifyError(def, refs);
185
+ }
176
186
  if (hasEdges) {
177
187
  if (index === undefined) {
178
188
  if (ctx.len + 9 > ctx.max) {
@@ -261,17 +271,22 @@ function putRefs(def, ctx, modifyOp, refs, op) {
261
271
  for (; i < refs.length; i++) {
262
272
  let ref = refs[i];
263
273
  if (typeof ref === 'number') {
264
- ctx.buf[ctx.len++] = ref;
265
- ctx.buf[ctx.len++] = ref >>>= 8;
266
- ctx.buf[ctx.len++] = ref >>>= 8;
267
- ctx.buf[ctx.len++] = ref >>>= 8;
274
+ if (!def.validation(ref, def)) {
275
+ break;
276
+ }
277
+ else {
278
+ ctx.buf[ctx.len++] = ref;
279
+ ctx.buf[ctx.len++] = ref >>>= 8;
280
+ ctx.buf[ctx.len++] = ref >>>= 8;
281
+ ctx.buf[ctx.len++] = ref >>>= 8;
282
+ }
268
283
  }
269
284
  else if (ref instanceof ModifyState) {
270
285
  if (ref.error) {
271
286
  return ref.error;
272
287
  }
273
288
  ref = ref.getId();
274
- if (!ref) {
289
+ if (!def.validation(ref, def)) {
275
290
  break;
276
291
  }
277
292
  ctx.buf[ctx.len++] = ref;
@@ -8,11 +8,7 @@ import { write } from '../string.js';
8
8
  // add compression handling for edge fields
9
9
  export function writeString(lang, value, ctx, def, t, parentId, modifyOp) {
10
10
  const isBuffer = value instanceof Uint8Array;
11
- if (typeof value !== 'string' && value !== null && !isBuffer) {
12
- return new ModifyError(t, value);
13
- }
14
- const len = value?.length;
15
- if (!len) {
11
+ if (value === '' || value === null) {
16
12
  if (modifyOp === UPDATE) {
17
13
  if (ctx.len + 11 /* SIZE.DEFAULT_CURSOR */ + 1 > ctx.max) {
18
14
  return RANGE_ERR;
@@ -22,11 +18,14 @@ export function writeString(lang, value, ctx, def, t, parentId, modifyOp) {
22
18
  }
23
19
  }
24
20
  else {
21
+ if (!t.validation(value, t)) {
22
+ return new ModifyError(t, value);
23
+ }
25
24
  let size = isBuffer
26
25
  ? value.byteLength
27
26
  : ENCODER.encode(value).byteLength + 6;
28
27
  if (ctx.len + 11 /* SIZE.DEFAULT_CURSOR */ + 11 + size > ctx.max) {
29
- // +10 OR +11, teh original check was +20 but
28
+ // +10 OR +11, the original check was +20 but
30
29
  // there are 10 addtional bytes in this scope
31
30
  // 5 compression size
32
31
  return RANGE_ERR;
@@ -4,16 +4,6 @@ import { writeString } from './string.js';
4
4
  import { ModifyError } from './ModifyRes.js';
5
5
  import { setCursor } from './setCursor.js';
6
6
  export function writeText(value, ctx, def, t, res, parentId, modifyOp) {
7
- const isBuffer = value instanceof Uint8Array;
8
- if (typeof value !== 'string' &&
9
- value !== null &&
10
- !isBuffer &&
11
- value &&
12
- typeof value !== 'object') {
13
- return new ModifyError(t, value);
14
- }
15
- // const len = value?.length
16
- // think about this
17
7
  if (value === null && !res.locale) {
18
8
  if (modifyOp === UPDATE) {
19
9
  if (ctx.len + 11 /* SIZE.DEFAULT_CURSOR */ + 1 > ctx.max) {
@@ -31,7 +21,6 @@ export function writeText(value, ctx, def, t, res, parentId, modifyOp) {
31
21
  // @ts-ignore
32
22
  value = '';
33
23
  }
34
- // @ts-ignore
35
24
  const err = writeString(res.locale, value, ctx, def, t, res.tmpId, modifyOp);
36
25
  if (modifyOp === CREATE) {
37
26
  const index = t.prop * (def.localeSize + 1);
@@ -18,11 +18,11 @@ export function writeVector(value, ctx, schema, t, parentId, modifyOp) {
18
18
  size = 0;
19
19
  }
20
20
  else {
21
- if (!value) {
22
- return new ModifyError(t, value);
23
- }
24
21
  size = value.byteLength + 4;
25
22
  }
23
+ if (!t.validation(value, t)) {
24
+ return new ModifyError(t, value);
25
+ }
26
26
  if (size === 0) {
27
27
  if (modifyOp === UPDATE) {
28
28
  if (ctx.len + 11 /* SIZE.DEFAULT_CURSOR */ + 1 > ctx.max) {
@@ -162,7 +162,12 @@ export class QueryBranch {
162
162
  });
163
163
  }
164
164
  else if (Array.isArray(f)) {
165
- includeFields(this.def, f);
165
+ if (f.length === 0) {
166
+ includeFields(this.def, ['id']);
167
+ }
168
+ else {
169
+ includeFields(this.def, f);
170
+ }
166
171
  }
167
172
  else if (f !== undefined) {
168
173
  throw new Error('Invalid include statement: expected props, refs and edges (string or array) or function');
@@ -1,4 +1,4 @@
1
- import { TIMESTAMP, CREATED, UPDATED, ENUM, BOOLEAN, STRING, BINARY, TEXT, } from '@based/schema/def';
1
+ import { TIMESTAMP, ENUM, BOOLEAN, STRING, BINARY, TEXT, } from '@based/schema/def';
2
2
  import { crc32 } from '../../crc32.js';
3
3
  import { convertToTimestamp } from '../../timestamp.js';
4
4
  import { ENCODER } from '../../../utils.js';
@@ -39,9 +39,7 @@ export const parseFilterValue = (prop, value) => {
39
39
  else if (prop.typeIndex === ENUM) {
40
40
  return prop.reverseEnum[value] + 1;
41
41
  }
42
- else if (prop.typeIndex === TIMESTAMP ||
43
- prop.typeIndex === CREATED ||
44
- prop.typeIndex === UPDATED) {
42
+ else if (prop.typeIndex === TIMESTAMP) {
45
43
  const v = convertToTimestamp(value);
46
44
  if (typeof v !== 'number') {
47
45
  throw new Error(`Incorrect value for timestamp ${prop.path.join('.')}`);
@@ -20,6 +20,7 @@ export const walkDefs = (db, def, f) => {
20
20
  def.edges.lang = def.lang;
21
21
  }
22
22
  const edgeProp = def.edges.props[p];
23
+ // console.log(p, !!edgeProp, Object.keys(def.edges.props), def.target)
23
24
  if (edgeProp.typeIndex === REFERENCE ||
24
25
  edgeProp.typeIndex === REFERENCES) {
25
26
  const refDef = createOrGetRefQueryDef(db, def.edges, edgeProp);
@@ -296,7 +296,7 @@ export const readAllFields = (q, result, offset, end, item, id) => {
296
296
  return i - offset - 4 - (q.search ? 4 : 0);
297
297
  }
298
298
  else if (index === READ_AGGREGATION) {
299
- // TODO: To change to a map and also to get the aggregate field name from a query function parameter
299
+ // TODO: Change to a map and also to get the aggregate field name from a query function parameter
300
300
  const propAgg = {
301
301
  name: 'count',
302
302
  path: ['count'],
@@ -314,6 +314,9 @@ export const readAllFields = (q, result, offset, end, item, id) => {
314
314
  }
315
315
  else {
316
316
  const prop = q.schema.reverseProps[index];
317
+ if (!prop) {
318
+ console.log({ prop: !!prop }, index);
319
+ }
317
320
  if (prop.typeIndex === CARDINALITY) {
318
321
  q.include.propsRead[index] = id;
319
322
  const size = readUint32(result, i);
@@ -419,6 +422,10 @@ export const resultToObject = (q, result, end, offset = 0) => {
419
422
  items.push(item);
420
423
  }
421
424
  if ('id' in q.target || 'alias' in q.target) {
425
+ if (q.type === QueryDefType.Root && q.target.type === '_root') {
426
+ // Todo can be optimized
427
+ delete items[0].id;
428
+ }
422
429
  return items[0];
423
430
  }
424
431
  return items;
@@ -418,6 +418,7 @@ export const EMPTY_ALIAS_PROP_DEF = {
418
418
  typeIndex: ALIAS,
419
419
  __isPropDef: true,
420
420
  separate: true,
421
+ validation: () => true,
421
422
  len: 0,
422
423
  start: 0,
423
424
  default: DEFAULT_MAP[ALIAS],
@@ -428,6 +429,7 @@ export const ERROR_STRING = {
428
429
  typeIndex: STRING,
429
430
  __isPropDef: true,
430
431
  separate: true,
432
+ validation: () => true,
431
433
  len: 0,
432
434
  start: 0,
433
435
  default: DEFAULT_MAP[STRING],
@@ -438,6 +440,7 @@ export const ERROR_VECTOR = {
438
440
  typeIndex: VECTOR,
439
441
  __isPropDef: true,
440
442
  separate: true,
443
+ validation: () => true,
441
444
  len: 0,
442
445
  start: 0,
443
446
  default: DEFAULT_MAP[VECTOR],
@@ -10,6 +10,9 @@ export { xxHash64 } from './client/xxHash64.js';
10
10
  export { crc32 } from './client/crc32.js';
11
11
  export * from './client/query/serialize.js';
12
12
  export * from './utils.js';
13
+ export * from './client/query/query.js';
14
+ export * from './client/query/BasedDbQuery.js';
15
+ export * from './client/query/BasedIterable.js';
13
16
  export declare class BasedDb {
14
17
  #private;
15
18
  client: DbClient;
package/dist/src/index.js CHANGED
@@ -12,6 +12,9 @@ export { xxHash64 } from './client/xxHash64.js';
12
12
  export { crc32 } from './client/crc32.js';
13
13
  export * from './client/query/serialize.js';
14
14
  export * from './utils.js';
15
+ export * from './client/query/query.js';
16
+ export * from './client/query/BasedDbQuery.js';
17
+ export * from './client/query/BasedIterable.js';
15
18
  export class BasedDb {
16
19
  client;
17
20
  server;
@@ -102,11 +105,13 @@ export class BasedDb {
102
105
  start = function () {
103
106
  return this.server.start.apply(this.server, arguments);
104
107
  };
105
- stop = function () {
108
+ stop = async function () {
109
+ await this.isModified();
106
110
  this.client.stop();
107
111
  return this.server.stop.apply(this.server, arguments);
108
112
  };
109
- save = function () {
113
+ save = async function () {
114
+ await this.isModified();
110
115
  return this.server.save.apply(this.server, arguments);
111
116
  };
112
117
  migrateSchema = function () {
@@ -119,6 +124,7 @@ export class BasedDb {
119
124
  return this.client.schemaIsSet.apply(this.client, arguments);
120
125
  };
121
126
  async destroy() {
127
+ await this.isModified();
122
128
  // Tmp fix: Gives node time to GC existing buffers else it can incorrectly re-asign to mem
123
129
  // Todo: clear all active queries, queues ETC
124
130
  await wait(Math.max(this.client.hooks.flushTime + 10, 10));
@@ -109,7 +109,8 @@ export class DbServer {
109
109
  const lastBlock = Math.ceil((((tmp / blockCapacity) | 0) * blockCapacity + 1) / blockCapacity);
110
110
  maxNrChanges += lastBlock;
111
111
  }
112
- if (!this.modifyDirtyRanges || this.modifyDirtyRanges.length < maxNrChanges) {
112
+ if (!this.modifyDirtyRanges ||
113
+ this.modifyDirtyRanges.length < maxNrChanges) {
113
114
  const min = Math.max(maxNrChanges * 1.2, 1024) | 0;
114
115
  this.modifyDirtyRanges = new Float64Array(min);
115
116
  }
@@ -356,7 +357,7 @@ export class DbServer {
356
357
  }
357
358
  updateTypeDefs(this.schema, this.schemaTypesParsed, this.schemaTypesParsedById);
358
359
  if (!fromStart) {
359
- writeFile(join(this.fileSystemPath, SCHEMA_FILE), JSON.stringify(this.schema)).catch((err) => console.error(SCHEMA_FILE, err));
360
+ writeFile(join(this.fileSystemPath, SCHEMA_FILE), JSON.stringify(this.schema)).catch((err) => console.error('!!!', SCHEMA_FILE, err));
360
361
  let types = Object.keys(this.schemaTypesParsed);
361
362
  const s = schemaToSelvaBuffer(this.schemaTypesParsed);
362
363
  for (let i = 0; i < s.length; i++) {
@@ -526,7 +527,13 @@ export class DbServer {
526
527
  }
527
528
  async destroy() {
528
529
  await this.stop(true);
529
- await rm(this.fileSystemPath, { recursive: true }).catch((err) => console.warn('Error removing dump folder', this.fileSystemPath, err.message));
530
+ await rm(this.fileSystemPath, { recursive: true }).catch((err) => {
531
+ // console.warn(
532
+ // 'Error removing dump folder',
533
+ // this.fileSystemPath,
534
+ // err.message,
535
+ // ),
536
+ });
530
537
  }
531
538
  }
532
539
  //# sourceMappingURL=index.js.map
@@ -5,7 +5,10 @@ import { Worker, MessageChannel, receiveMessageOnPort, } from 'node:worker_threa
5
5
  import native from '../../native.js';
6
6
  import './worker.js';
7
7
  import { foreachDirtyBlock } from '../tree.js';
8
+ import { SCHEMA_FILE } from '../index.js';
8
9
  import { fileURLToPath } from 'url';
10
+ import { deepMerge } from '@saulx/utils';
11
+ import { writeFile } from 'fs/promises';
9
12
  const __filename = fileURLToPath(import.meta.url);
10
13
  const __dirname = dirname(__filename);
11
14
  const workerPath = join(__dirname, 'worker.js');
@@ -64,8 +67,9 @@ export const migrate = async (fromDbServer, toSchema, transform) => {
64
67
  worker.on('error', console.error);
65
68
  let i = 0;
66
69
  let ranges = [];
67
- fromDbServer.updateMerkleTree();
68
- fromDbServer.dirtyRanges.clear();
70
+ // fromDbServer.updateMerkleTree()
71
+ // fromDbServer.dirtyRanges.clear()
72
+ await fromDbServer.save();
69
73
  fromDbServer.merkleTree.visitLeafNodes((leaf) => {
70
74
  ranges.push(leaf.data);
71
75
  });
@@ -106,13 +110,13 @@ export const migrate = async (fromDbServer, toSchema, transform) => {
106
110
  ;
107
111
  [schema, schemaTypesParsed] = msg.message;
108
112
  }
109
- fromDbServer.schema = schema;
110
- fromDbServer.schemaTypesParsed = schemaTypesParsed;
113
+ fromDbServer.schema = deepMerge(toDb.server.schema, schema);
114
+ fromDbServer.schemaTypesParsed = deepMerge(toDb.server.schemaTypesParsed, schemaTypesParsed);
111
115
  fromDbServer.dbCtxExternal = toCtx;
112
116
  toDb.server.dbCtxExternal = fromCtx;
113
117
  }
114
118
  const promises = fromDbServer.workers.map((worker) => worker.updateCtx(toAddress));
115
- promises.push(toDb.destroy(), worker.terminate());
119
+ promises.push(toDb.destroy(), worker.terminate(), fromDbServer.save({ forceFullDump: true }), writeFile(join(fromDbServer.fileSystemPath, SCHEMA_FILE), JSON.stringify(fromDbServer.schema)));
116
120
  await Promise.all(promises);
117
121
  fromDbServer.onSchemaChange?.(fromDbServer.schema);
118
122
  return fromDbServer.schema;
@@ -2,6 +2,7 @@ import { isMainThread, receiveMessageOnPort, workerData, } from 'node:worker_thr
2
2
  import native from '../../native.js';
3
3
  import { BasedDb } from '../../index.js';
4
4
  import { REFERENCE, REFERENCES } from '@based/schema/def';
5
+ import { isTypedArray } from 'node:util/types';
5
6
  if (isMainThread) {
6
7
  console.warn('running worker.ts in mainthread');
7
8
  }
@@ -12,6 +13,27 @@ else {
12
13
  const path = null;
13
14
  const fromDb = new BasedDb({ path });
14
15
  const toDb = new BasedDb({ path });
16
+ const cp = (obj) => {
17
+ let copy;
18
+ for (const key in obj) {
19
+ const val = obj[key];
20
+ if (typeof val === 'number') {
21
+ // only copy numbers
22
+ copy ??= Array.isArray(obj) ? [] : {};
23
+ copy[key] = val;
24
+ }
25
+ else if (typeof val === 'object' &&
26
+ val !== null &&
27
+ !isTypedArray(val)) {
28
+ const res = cp(val);
29
+ if (res) {
30
+ copy ??= Array.isArray(obj) ? [] : {};
31
+ copy[key] = cp(val);
32
+ }
33
+ }
34
+ }
35
+ return copy;
36
+ };
15
37
  fromDb.server.dbCtxExternal = fromCtx;
16
38
  toDb.server.dbCtxExternal = toCtx;
17
39
  await fromDb.setSchema(fromSchema, true);
@@ -71,7 +93,10 @@ else {
71
93
  }
72
94
  }
73
95
  await toDb.drain();
74
- channel.postMessage([toDb.server.schema, toDb.server.schemaTypesParsed]);
96
+ channel.postMessage([
97
+ cp(toDb.server.schema),
98
+ cp(toDb.server.schemaTypesParsed),
99
+ ]);
75
100
  // put it to sleep
76
101
  atomics[0] = 0;
77
102
  Atomics.notify(atomics, 0);
@@ -2,7 +2,7 @@ import native from '../native.js';
2
2
  import { isMainThread } from 'node:worker_threads';
3
3
  import { writeFile } from 'node:fs/promises';
4
4
  import { join } from 'node:path';
5
- import { destructureCsmtKey, foreachBlock, foreachDirtyBlock, makeCsmtKey } from './tree.js';
5
+ import { destructureCsmtKey, foreachBlock, foreachDirtyBlock, makeCsmtKey, } from './tree.js';
6
6
  import { WRITELOG_FILE } from './index.js';
7
7
  import { writeFileSync } from 'node:fs';
8
8
  import { bufToHex } from '../utils.js';
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@based/db",
3
- "version": "0.0.26",
3
+ "version": "0.0.28",
4
4
  "license": "MIT",
5
5
  "type": "module",
6
6
  "main": "./dist/src/index.js",
@@ -32,7 +32,7 @@
32
32
  "basedDbNative.cjs"
33
33
  ],
34
34
  "dependencies": {
35
- "@based/schema": "5.0.0-alpha.8",
35
+ "@based/schema": "5.0.0-alpha.9",
36
36
  "@saulx/hash": "^3.0.0",
37
37
  "@saulx/utils": "^6.1.1",
38
38
  "exit-hook": "^4.0.0",
@@ -43,6 +43,7 @@
43
43
  "@based/locale-x86-64-gnu": "*"
44
44
  },
45
45
  "devDependencies": {
46
+ "jsondiffpatch": "^0.7.3",
46
47
  "@based/crc32c": "^1.0.0",
47
48
  "@types/node": "^22.5.3",
48
49
  "axios": "^1.7.9",
@@ -50,4 +51,4 @@
50
51
  "tar": "^7.4.3",
51
52
  "typescript": "^5.6.3"
52
53
  }
53
- }
54
+ }
@@ -1,6 +0,0 @@
1
- export declare const readDoubleLE: (val: Uint8Array, offset: number) => number;
2
- export declare const readFloatLE: (val: Uint8Array, offset: number) => number;
3
- export declare const readUint32: (val: Uint8Array, offset: number) => number;
4
- export declare const readInt32: (val: Uint8Array, offset: number) => number;
5
- export declare const readInt16: (val: Uint8Array, offset: number) => number;
6
- export declare const readUint16: (val: Uint8Array, offset: number) => number;
@@ -1,72 +0,0 @@
1
- export const readDoubleLE = (val, offset) => {
2
- const low = (val[offset] |
3
- (val[offset + 1] << 8) |
4
- (val[offset + 2] << 16) |
5
- (val[offset + 3] << 24)) >>>
6
- 0;
7
- const high = (val[offset + 4] |
8
- (val[offset + 5] << 8) |
9
- (val[offset + 6] << 16) |
10
- (val[offset + 7] << 24)) >>>
11
- 0;
12
- const sign = high >>> 31 ? -1 : 1;
13
- let exponent = (high >>> 20) & 0x7ff;
14
- let fraction = (high & 0xfffff) * 2 ** 32 + low;
15
- if (exponent === 0x7ff) {
16
- if (fraction === 0)
17
- return sign * Infinity;
18
- return NaN;
19
- }
20
- if (exponent === 0) {
21
- if (fraction === 0)
22
- return sign * 0;
23
- exponent = 1;
24
- }
25
- else {
26
- fraction += 2 ** 52;
27
- }
28
- return sign * fraction * 2 ** (exponent - 1075);
29
- };
30
- export const readFloatLE = (val, offset) => {
31
- const bits = val[offset] |
32
- (val[offset + 1] << 8) |
33
- (val[offset + 2] << 16) |
34
- (val[offset + 3] << 24);
35
- const sign = bits >>> 31 ? -1 : 1;
36
- let exponent = (bits >>> 23) & 0xff;
37
- let fraction = bits & 0x7fffff;
38
- if (exponent === 0xff) {
39
- if (fraction === 0)
40
- return sign * Infinity;
41
- return NaN;
42
- }
43
- if (exponent === 0) {
44
- if (fraction === 0)
45
- return sign * 0;
46
- exponent = 1;
47
- }
48
- else {
49
- fraction |= 0x800000;
50
- }
51
- return sign * fraction * 2 ** (exponent - 150);
52
- };
53
- export const readUint32 = (val, offset) => {
54
- return ((val[offset] |
55
- (val[offset + 1] << 8) |
56
- (val[offset + 2] << 16) |
57
- (val[offset + 3] << 24)) >>>
58
- 0);
59
- };
60
- export const readInt32 = (val, offset) => {
61
- return (val[offset] |
62
- (val[offset + 1] << 8) |
63
- (val[offset + 2] << 16) |
64
- (val[offset + 3] << 24));
65
- };
66
- export const readInt16 = (val, offset) => {
67
- return ((val[offset] | (val[offset + 1] << 8)) << 16) >> 16;
68
- };
69
- export const readUint16 = (val, offset) => {
70
- return (val[offset] | (val[offset + 1] << 8)) >>> 0;
71
- };
72
- //# sourceMappingURL=bitWise.js.map
@@ -1,32 +0,0 @@
1
- import { PropDef, SchemaTypeDef } from '@based/schema/def';
2
- import { DbClient } from './index.js';
3
- import { ModifyState } from './modify/ModifyRes.js';
4
- export declare class ModifyCtx {
5
- constructor(db: DbClient);
6
- len: number;
7
- id: number;
8
- hasSortField: number;
9
- hasSortText: number;
10
- queue: Map<(payload: any) => void, ModifyState>;
11
- ctx: {
12
- offsets?: Record<number, number>;
13
- };
14
- payload: Uint8Array;
15
- max: number;
16
- buf: Uint8Array;
17
- field: number;
18
- prefix0: number;
19
- prefix1: number;
20
- lastMain: number;
21
- mergeMain: (PropDef | any)[] | null;
22
- mergeMainSize: number;
23
- db: DbClient;
24
- dirtyRanges: Set<number>;
25
- dirtyTypes: Map<number, number>;
26
- markNodeDirty(schema: SchemaTypeDef, nodeId: number): void;
27
- markTypeDirty(schema: SchemaTypeDef): void;
28
- updateMax(): void;
29
- getData(lastIds: Record<number, number>): Uint8Array;
30
- }
31
- export declare const flushBuffer: (db: DbClient) => Promise<void>;
32
- export declare const startDrain: (db: DbClient) => void;