@based/db 0.0.24 → 0.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/lib/darwin_aarch64/include/selva/db.h +37 -3
  2. package/dist/lib/darwin_aarch64/include/selva/fields.h +3 -3
  3. package/dist/lib/darwin_aarch64/include/selva/selva_string.h +2 -2
  4. package/dist/lib/darwin_aarch64/libdeflate.dylib +0 -0
  5. package/dist/lib/darwin_aarch64/libjemalloc_selva.2.dylib +0 -0
  6. package/dist/lib/darwin_aarch64/libnode-v20.node +0 -0
  7. package/dist/lib/darwin_aarch64/libnode-v21.node +0 -0
  8. package/dist/lib/darwin_aarch64/libnode-v22.node +0 -0
  9. package/dist/lib/darwin_aarch64/libnode-v23.node +0 -0
  10. package/dist/lib/darwin_aarch64/libselva.dylib +0 -0
  11. package/dist/lib/linux_aarch64/include/selva/db.h +37 -3
  12. package/dist/lib/linux_aarch64/include/selva/fields.h +3 -3
  13. package/dist/lib/linux_aarch64/include/selva/selva_string.h +2 -2
  14. package/dist/lib/linux_aarch64/libnode-v20.node +0 -0
  15. package/dist/lib/linux_aarch64/libnode-v21.node +0 -0
  16. package/dist/lib/linux_aarch64/libnode-v22.node +0 -0
  17. package/dist/lib/linux_aarch64/libnode-v23.node +0 -0
  18. package/dist/lib/linux_aarch64/libselva.so +0 -0
  19. package/dist/lib/linux_x86_64/include/selva/db.h +37 -3
  20. package/dist/lib/linux_x86_64/include/selva/fields.h +3 -3
  21. package/dist/lib/linux_x86_64/include/selva/selva_string.h +2 -2
  22. package/dist/lib/linux_x86_64/libnode-v20.node +0 -0
  23. package/dist/lib/linux_x86_64/libnode-v21.node +0 -0
  24. package/dist/lib/linux_x86_64/libnode-v22.node +0 -0
  25. package/dist/lib/linux_x86_64/libnode-v23.node +0 -0
  26. package/dist/lib/linux_x86_64/libselva.so +0 -0
  27. package/dist/src/client/flushModify.js +5 -1
  28. package/dist/src/client/index.d.ts +12 -6
  29. package/dist/src/client/index.js +33 -1
  30. package/dist/src/client/modify/create.js +17 -1
  31. package/dist/src/client/modify/fixed.js +53 -4
  32. package/dist/src/client/modify/modify.js +9 -4
  33. package/dist/src/client/modify/references/edge.js +3 -1
  34. package/dist/src/client/modify/upsert.js +1 -1
  35. package/dist/src/client/operations.d.ts +32 -0
  36. package/dist/src/client/operations.js +137 -0
  37. package/dist/src/client/query/BasedDbQuery.js +3 -0
  38. package/dist/src/client/query/BasedIterable.d.ts +1 -1
  39. package/dist/src/client/query/BasedIterable.js +18 -5
  40. package/dist/src/client/query/aggregation.d.ts +3 -0
  41. package/dist/src/client/query/aggregation.js +9 -0
  42. package/dist/src/client/query/display.js +12 -2
  43. package/dist/src/client/query/filter/toBuffer.js +2 -2
  44. package/dist/src/client/query/query.d.ts +1 -1
  45. package/dist/src/client/query/query.js +1 -1
  46. package/dist/src/client/query/queryDef.js +0 -1
  47. package/dist/src/client/query/read/read.js +10 -5
  48. package/dist/src/client/query/toBuffer.js +2 -2
  49. package/dist/src/client/query/types.d.ts +4 -3
  50. package/dist/src/client/query/validation.js +5 -1
  51. package/dist/src/client/string.js +1 -1
  52. package/dist/src/index.d.ts +4 -1
  53. package/dist/src/index.js +11 -2
  54. package/dist/src/native.d.ts +1 -1
  55. package/dist/src/native.js +2 -2
  56. package/dist/src/server/csmt/tree.js +2 -2
  57. package/dist/src/server/index.d.ts +6 -2
  58. package/dist/src/server/index.js +31 -5
  59. package/dist/src/server/save.d.ts +20 -1
  60. package/dist/src/server/save.js +66 -30
  61. package/dist/src/server/start.js +4 -9
  62. package/dist/src/server/tree.d.ts +1 -1
  63. package/dist/src/server/tree.js +1 -1
  64. package/dist/src/utils.d.ts +6 -0
  65. package/dist/src/utils.js +81 -9
  66. package/package.json +4 -4
@@ -12,7 +12,8 @@ import { Worker, MessageChannel } from 'node:worker_threads';
12
12
  import { fileURLToPath } from 'node:url';
13
13
  import { setTimeout } from 'node:timers/promises';
14
14
  import { migrate } from './migrate/index.js';
15
- const SCHEMA_FILE = 'schema.json';
15
+ export const SCHEMA_FILE = 'schema.json';
16
+ export const WRITELOG_FILE = 'writelog.json';
16
17
  const __filename = fileURLToPath(import.meta.url);
17
18
  const __dirname = dirname(__filename);
18
19
  const workerPath = join(__dirname, 'worker.js');
@@ -45,6 +46,9 @@ export class DbWorker {
45
46
  });
46
47
  port1.on('message', (buf) => {
47
48
  // TODO FIX TYPES CHECK IF THIS MAKES A COPY
49
+ // It's a copy, if you don't want a copy you'd need to make it an explicit view
50
+ // to the underlying buffer:
51
+ // new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength)
48
52
  this.resolvers.shift()(new Uint8Array(buf));
49
53
  this.db.onQueryEnd();
50
54
  });
@@ -67,7 +71,7 @@ export class DbWorker {
67
71
  }
68
72
  }
69
73
  export class DbServer {
70
- modifyBuf;
74
+ modifyDirtyRanges;
71
75
  dbCtxExternal; // pointer to zig dbCtx
72
76
  schema = {
73
77
  lastId: 1, // we reserve one for root props
@@ -95,11 +99,26 @@ export class DbServer {
95
99
  this.sortIndexes = {};
96
100
  this.onSchemaChange = onSchemaChange;
97
101
  }
102
+ #resizeModifyDirtyRanges() {
103
+ let maxNrChanges = 0;
104
+ for (const typeId in this.schemaTypesParsedById) {
105
+ const def = this.schemaTypesParsedById[typeId];
106
+ const lastId = def.lastId;
107
+ const blockCapacity = def.blockCapacity;
108
+ const tmp = lastId - +!(lastId % def.blockCapacity);
109
+ const lastBlock = Math.ceil((((tmp / blockCapacity) | 0) * blockCapacity + 1) / blockCapacity);
110
+ maxNrChanges += lastBlock;
111
+ }
112
+ if (!this.modifyDirtyRanges || this.modifyDirtyRanges.length < maxNrChanges) {
113
+ const min = Math.max(maxNrChanges * 1.2, 1024) | 0;
114
+ this.modifyDirtyRanges = new Float64Array(min);
115
+ }
116
+ }
98
117
  start(opts) {
99
118
  return start(this, opts);
100
119
  }
101
- save() {
102
- return save(this);
120
+ save(opts) {
121
+ return save(this, false, opts?.forceFullDump ?? false);
103
122
  }
104
123
  createCsmtHashFun = () => {
105
124
  // We can just reuse it as long as we only have one tree.
@@ -420,7 +439,14 @@ export class DbServer {
420
439
  this.dirtyRanges.add(key);
421
440
  i += 8;
422
441
  }
423
- native.modify(data, types, this.dbCtxExternal);
442
+ this.#resizeModifyDirtyRanges();
443
+ native.modify(data, types, this.dbCtxExternal, this.modifyDirtyRanges);
444
+ for (let key of this.modifyDirtyRanges) {
445
+ if (key === 0) {
446
+ break;
447
+ }
448
+ this.dirtyRanges.add(key);
449
+ }
424
450
  }
425
451
  getQueryBuf(buf) {
426
452
  if (this.modifyQueue.length) {
@@ -1,2 +1,21 @@
1
1
  import { DbServer } from './index.js';
2
- export declare function save<T extends boolean>(db: DbServer, sync?: T): T extends true ? void : Promise<void>;
2
+ export type Writelog = {
3
+ ts: number;
4
+ types: {
5
+ [t: number]: {
6
+ lastId: number;
7
+ blockCapacity: number;
8
+ };
9
+ };
10
+ hash: string;
11
+ commonDump: string;
12
+ rangeDumps: {
13
+ [t: number]: {
14
+ file: string;
15
+ hash: string;
16
+ start: number;
17
+ end: number;
18
+ }[];
19
+ };
20
+ };
21
+ export declare function save<T extends boolean>(db: DbServer, sync?: T, forceFullDump?: boolean): T extends true ? void : Promise<void>;
@@ -1,14 +1,27 @@
1
1
  import native from '../native.js';
2
+ import { isMainThread } from 'node:worker_threads';
2
3
  import { writeFile } from 'node:fs/promises';
3
4
  import { join } from 'node:path';
4
- import { destructureCsmtKey, foreachDirtyBlock } from './tree.js';
5
+ import { destructureCsmtKey, foreachBlock, foreachDirtyBlock, makeCsmtKey } from './tree.js';
6
+ import { WRITELOG_FILE } from './index.js';
5
7
  import { writeFileSync } from 'node:fs';
6
8
  import { bufToHex } from '../utils.js';
7
- const WRITELOG_FILE = 'writelog.json';
9
+ import { createTree } from './csmt/tree.js';
8
10
  const COMMON_SDB_FILE = 'common.sdb';
9
11
  const block_sdb_file = (typeId, start, end) => `${typeId}_${start}_${end}.sdb`;
10
- export function save(db, sync = false) {
11
- if (!db.dirtyRanges.size) {
12
+ function saveRange(db, typeId, start, end, hashOut) {
13
+ const file = block_sdb_file(typeId, start, end);
14
+ const path = join(db.fileSystemPath, file);
15
+ const err = native.saveRange(path, typeId, start, end, db.dbCtxExternal, hashOut);
16
+ if (err) {
17
+ // TODO print the error string
18
+ console.error(`Save ${typeId}:${start}-${end} failed: ${err}`);
19
+ return null;
20
+ }
21
+ return file;
22
+ }
23
+ export function save(db, sync = false, forceFullDump = false) {
24
+ if (!(isMainThread && (db.dirtyRanges.size || forceFullDump))) {
12
25
  return;
13
26
  }
14
27
  let err;
@@ -17,29 +30,55 @@ export function save(db, sync = false) {
17
30
  if (err) {
18
31
  console.error(`Save common failed: ${err}`);
19
32
  }
20
- foreachDirtyBlock(db, (mtKey, typeId, start, end) => {
21
- const file = block_sdb_file(typeId, start, end);
22
- const path = join(db.fileSystemPath, file);
23
- const hash = new Uint8Array(16);
24
- err = native.saveRange(path, typeId, start, end, db.dbCtxExternal, hash);
25
- if (err) {
26
- console.error(`Save ${typeId}:${start}-${end} failed: ${err}`);
27
- return; // TODO What to do with the merkle tree in db situation?
28
- }
29
- const data = {
30
- file,
31
- typeId,
32
- start,
33
- end,
34
- };
35
- try {
36
- db.merkleTree.delete(mtKey);
37
- }
38
- catch (err) {
39
- // console.error({ err })
33
+ if (forceFullDump) {
34
+ // We just rebuild the whole tree
35
+ db.merkleTree = createTree(db.createCsmtHashFun); // TODO This could be somewhere else.
36
+ for (const key in db.schemaTypesParsed) {
37
+ const def = db.schemaTypesParsed[key];
38
+ foreachBlock(db, def, (start, end, _hash) => {
39
+ const typeId = def.id;
40
+ const hash = new Uint8Array(16);
41
+ const file = saveRange(db, typeId, start, end, hash);
42
+ if (!file) {
43
+ // The previous state should remain in the merkle tree for
44
+ // load and sync purposes.
45
+ return;
46
+ }
47
+ const mtKey = makeCsmtKey(typeId, start);
48
+ const data = {
49
+ file,
50
+ typeId,
51
+ start,
52
+ end,
53
+ };
54
+ db.merkleTree.insert(mtKey, hash, data);
55
+ });
40
56
  }
41
- db.merkleTree.insert(mtKey, hash, data);
42
- });
57
+ }
58
+ else {
59
+ foreachDirtyBlock(db, (mtKey, typeId, start, end) => {
60
+ const hash = new Uint8Array(16);
61
+ const file = saveRange(db, typeId, start, end, hash);
62
+ if (!file) {
63
+ // The previous state should remain in the merkle tree for
64
+ // load and sync purposes.
65
+ return;
66
+ }
67
+ const data = {
68
+ file,
69
+ typeId,
70
+ start,
71
+ end,
72
+ };
73
+ try {
74
+ db.merkleTree.delete(mtKey);
75
+ }
76
+ catch (err) {
77
+ // console.error({ err })
78
+ }
79
+ db.merkleTree.insert(mtKey, hash, data);
80
+ });
81
+ }
43
82
  db.dirtyRanges.clear();
44
83
  const types = {};
45
84
  const rangeDumps = {};
@@ -61,11 +100,8 @@ export function save(db, sync = false) {
61
100
  types,
62
101
  commonDump: COMMON_SDB_FILE,
63
102
  rangeDumps,
103
+ hash: bufToHex(db.merkleTree.getRoot()?.hash ?? new Uint8Array(0)),
64
104
  };
65
- const mtRoot = db.merkleTree.getRoot();
66
- if (mtRoot) {
67
- data.hash = bufToHex(mtRoot.hash);
68
- }
69
105
  const filePath = join(db.fileSystemPath, WRITELOG_FILE);
70
106
  const content = JSON.stringify(data);
71
107
  return sync ? writeFileSync(filePath, content) : writeFile(filePath, content);
@@ -1,19 +1,16 @@
1
1
  import { stringHash } from '@saulx/hash';
2
- import { DbWorker } from './index.js';
2
+ import { DbWorker, SCHEMA_FILE, WRITELOG_FILE } from './index.js';
3
3
  import native from '../native.js';
4
4
  import { rm, mkdir, readFile } from 'node:fs/promises';
5
5
  import { join } from 'node:path';
6
6
  import { createTree, hashEq } from './csmt/index.js';
7
- import { foreachBlock } from './tree.js';
7
+ import { foreachBlock, makeCsmtKey } from './tree.js';
8
8
  import { availableParallelism } from 'node:os';
9
9
  import exitHook from 'exit-hook';
10
10
  import './worker.js';
11
11
  import { save } from './save.js';
12
12
  import { DEFAULT_BLOCK_CAPACITY } from '@based/schema/def';
13
13
  import { bufToHex, hexToBuf } from '../utils.js';
14
- const SCHEMA_FILE = 'schema.json';
15
- const WRITELOG_FILE = 'writelog.json';
16
- const makeCsmtKey = (typeId, start) => typeId * 4294967296 + start;
17
14
  export async function start(db, opts) {
18
15
  const path = db.fileSystemPath;
19
16
  const id = stringHash(path) >>> 0;
@@ -22,8 +19,6 @@ export async function start(db, opts) {
22
19
  await rm(path, { recursive: true, force: true }).catch(noop);
23
20
  }
24
21
  await mkdir(path, { recursive: true }).catch(noop);
25
- // not doing db yet
26
- // db.modifyBuf = new SharedArrayBuffer(db.maxModifySize)
27
22
  db.dbCtxExternal = native.start(id);
28
23
  let writelog = null;
29
24
  try {
@@ -94,7 +89,7 @@ export async function start(db, opts) {
94
89
  const oldHash = hexToBuf(writelog.hash);
95
90
  const newHash = db.merkleTree.getRoot()?.hash;
96
91
  if (!hashEq(oldHash, newHash)) {
97
- console.error(`WARN: CSMT hash mismatch: ${writelog.hash} != ${bufToHex(newHash)}`);
92
+ console.error(`WARN: CSMT hash mismatch. expected: ${writelog.hash} actual: ${bufToHex(newHash)}`);
98
93
  }
99
94
  }
100
95
  // start workers
@@ -105,7 +100,7 @@ export async function start(db, opts) {
105
100
  db.workers[i] = new DbWorker(address, db);
106
101
  }
107
102
  if (!opts?.hosted) {
108
- db.unlistenExit = exitHook(async (signal) => {
103
+ db.unlistenExit = exitHook((signal) => {
109
104
  const blockSig = () => { };
110
105
  const signals = ['SIGINT', 'SIGTERM', 'SIGHUP'];
111
106
  // A really dumb way to block signals temporarily while saving.
@@ -9,5 +9,5 @@ export type CsmtNodeRange = {
9
9
  export declare const destructureCsmtKey: (key: number) => number[];
10
10
  export declare const makeCsmtKey: (typeId: number, start: number) => number;
11
11
  export declare const makeCsmtKeyFromNodeId: (typeId: number, blockCapacity: number, nodeId: number) => number;
12
- export declare function foreachBlock(db: DbServer, def: SchemaTypeDef, cb: (start: number, end: number, hash: Uint8Array) => void): Promise<void>;
12
+ export declare function foreachBlock(db: DbServer, def: SchemaTypeDef, cb: (start: number, end: number, hash: Uint8Array) => void): void;
13
13
  export declare function foreachDirtyBlock(db: DbServer, cb: (mtKey: number, typeId: number, start: number, end: number) => void): Promise<void>;
@@ -8,7 +8,7 @@ export const makeCsmtKeyFromNodeId = (typeId, blockCapacity, nodeId) => {
8
8
  const tmp = nodeId - +!(nodeId % blockCapacity);
9
9
  return typeId * 4294967296 + ((tmp / blockCapacity) | 0) * blockCapacity + 1;
10
10
  };
11
- export async function foreachBlock(db, def, cb) {
11
+ export function foreachBlock(db, def, cb) {
12
12
  const step = def.blockCapacity;
13
13
  for (let start = 1; start <= def.lastId; start += step) {
14
14
  const end = start + step - 1;
@@ -5,3 +5,9 @@ export declare function concatUint8Arr(bufs: Uint8Array[], totalByteLength?: num
5
5
  export declare const bufToHex: (a: Uint8Array) => string;
6
6
  export declare const hexToBuf: (s: string) => Uint8Array;
7
7
  export declare const base64encode: (a: Uint8Array, lineMax?: number) => string;
8
+ export declare const readDoubleLE: (val: Uint8Array, offset: number) => number;
9
+ export declare const readFloatLE: (val: Uint8Array, offset: number) => number;
10
+ export declare const readUint32: (val: Uint8Array, offset: number) => number;
11
+ export declare const readInt32: (val: Uint8Array, offset: number) => number;
12
+ export declare const readInt16: (val: Uint8Array, offset: number) => number;
13
+ export declare const readUint16: (val: Uint8Array, offset: number) => number;
package/dist/src/utils.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { encodeBase64 } from '@saulx/utils';
2
- const native = (typeof window === 'undefined') ? (await import('./native.js')).default : null;
2
+ const native = typeof window === 'undefined' ? (await import('./native.js')).default : null;
3
3
  export const DECODER = new TextDecoder('utf-8');
4
4
  export const ENCODER = new TextEncoder();
5
5
  export const equals = (aB, bB) => {
@@ -28,7 +28,8 @@ export const equals = (aB, bB) => {
28
28
  }
29
29
  };
30
30
  export function concatUint8Arr(bufs, totalByteLength) {
31
- totalByteLength = totalByteLength ?? bufs.reduce((acc, cur) => acc + cur.byteLength, 0);
31
+ totalByteLength =
32
+ totalByteLength ?? bufs.reduce((acc, cur) => acc + cur.byteLength, 0);
32
33
  const res = new Uint8Array(totalByteLength);
33
34
  let off = 0;
34
35
  for (let i = 0; i < bufs.length; i++) {
@@ -62,12 +63,12 @@ const intMap = {
62
63
  '7': 0x7,
63
64
  '8': 0x8,
64
65
  '9': 0x9,
65
- 'a': 0xa,
66
- 'b': 0xb,
67
- 'c': 0xc,
68
- 'd': 0xd,
69
- 'e': 0xe,
70
- 'f': 0xf,
66
+ a: 0xa,
67
+ b: 0xb,
68
+ c: 0xc,
69
+ d: 0xd,
70
+ e: 0xe,
71
+ f: 0xf,
71
72
  };
72
73
  // Uint8Array.fromHex() and Uint8Array.toHex() are not available in V8
73
74
  // https://issues.chromium.org/issues/42204568
@@ -85,7 +86,7 @@ function base64OutLen(n, lineMax) {
85
86
  let olen;
86
87
  /* This version would be with padding but we don't pad */
87
88
  //olen = n * 4 / 3 + 4; /* 3-byte blocks to 4-byte */
88
- olen = ((4 * n / 3) + 3) & ~3;
89
+ olen = ((4 * n) / 3 + 3) & ~3;
89
90
  olen += lineMax > 0 ? olen / lineMax : 0; // line feeds
90
91
  return olen;
91
92
  }
@@ -99,4 +100,75 @@ export const base64encode = (a, lineMax = 72) => {
99
100
  return DECODER.decode(native.base64encode(tmp, a, lineMax));
100
101
  }
101
102
  };
103
+ export const readDoubleLE = (val, offset) => {
104
+ const low = (val[offset] |
105
+ (val[offset + 1] << 8) |
106
+ (val[offset + 2] << 16) |
107
+ (val[offset + 3] << 24)) >>>
108
+ 0;
109
+ const high = (val[offset + 4] |
110
+ (val[offset + 5] << 8) |
111
+ (val[offset + 6] << 16) |
112
+ (val[offset + 7] << 24)) >>>
113
+ 0;
114
+ const sign = high >>> 31 ? -1 : 1;
115
+ let exponent = (high >>> 20) & 0x7ff;
116
+ let fraction = (high & 0xfffff) * 2 ** 32 + low;
117
+ if (exponent === 0x7ff) {
118
+ if (fraction === 0)
119
+ return sign * Infinity;
120
+ return NaN;
121
+ }
122
+ if (exponent === 0) {
123
+ if (fraction === 0)
124
+ return sign * 0;
125
+ exponent = 1;
126
+ }
127
+ else {
128
+ fraction += 2 ** 52;
129
+ }
130
+ return sign * fraction * 2 ** (exponent - 1075);
131
+ };
132
+ export const readFloatLE = (val, offset) => {
133
+ const bits = val[offset] |
134
+ (val[offset + 1] << 8) |
135
+ (val[offset + 2] << 16) |
136
+ (val[offset + 3] << 24);
137
+ const sign = bits >>> 31 ? -1 : 1;
138
+ let exponent = (bits >>> 23) & 0xff;
139
+ let fraction = bits & 0x7fffff;
140
+ if (exponent === 0xff) {
141
+ if (fraction === 0)
142
+ return sign * Infinity;
143
+ return NaN;
144
+ }
145
+ if (exponent === 0) {
146
+ if (fraction === 0)
147
+ return sign * 0;
148
+ exponent = 1;
149
+ }
150
+ else {
151
+ fraction |= 0x800000;
152
+ }
153
+ return sign * fraction * 2 ** (exponent - 150);
154
+ };
155
+ export const readUint32 = (val, offset) => {
156
+ return ((val[offset] |
157
+ (val[offset + 1] << 8) |
158
+ (val[offset + 2] << 16) |
159
+ (val[offset + 3] << 24)) >>>
160
+ 0);
161
+ };
162
+ export const readInt32 = (val, offset) => {
163
+ return (val[offset] |
164
+ (val[offset + 1] << 8) |
165
+ (val[offset + 2] << 16) |
166
+ (val[offset + 3] << 24));
167
+ };
168
+ export const readInt16 = (val, offset) => {
169
+ return ((val[offset] | (val[offset + 1] << 8)) << 16) >> 16;
170
+ };
171
+ export const readUint16 = (val, offset) => {
172
+ return (val[offset] | (val[offset + 1] << 8)) >>> 0;
173
+ };
102
174
  //# sourceMappingURL=utils.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@based/db",
3
- "version": "0.0.24",
3
+ "version": "0.0.26",
4
4
  "license": "MIT",
5
5
  "type": "module",
6
6
  "main": "./dist/src/index.js",
@@ -32,9 +32,9 @@
32
32
  "basedDbNative.cjs"
33
33
  ],
34
34
  "dependencies": {
35
- "@based/schema": "5.0.0-alpha.7",
35
+ "@based/schema": "5.0.0-alpha.8",
36
36
  "@saulx/hash": "^3.0.0",
37
- "@saulx/utils": "^4.3.2",
37
+ "@saulx/utils": "^6.1.1",
38
38
  "exit-hook": "^4.0.0",
39
39
  "picocolors": "^1.1.0",
40
40
  "@based/crc32c": "^1.0.0"
@@ -50,4 +50,4 @@
50
50
  "tar": "^7.4.3",
51
51
  "typescript": "^5.6.3"
52
52
  }
53
- }
53
+ }