@wxn0brp/db-storage-bin 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,183 @@
1
+ import { BinManager, CollectionMeta } from ".";
2
+ import { getFileCrc } from "../crc32";
3
+ import { _log } from "../log";
4
+ import { findFreeSlot } from "./data";
5
+ import { HEADER_SIZE, VERSION } from "./static";
6
+ import { detectCollisions, pushToFreeList, roundUpCapacity, writeData } from "./utils";
7
+
8
+ export interface Block {
9
+ offset: number;
10
+ capacity: number;
11
+ };
12
+
13
+ export interface FileMeta {
14
+ collections: CollectionMeta[];
15
+ freeList: Block[];
16
+ fileSize: number;
17
+ payloadLength: number;
18
+ payloadOffset: number;
19
+ blockSize: number;
20
+ }
21
+
22
+ export async function openFile(cmp: BinManager) {
23
+ const { fd, options } = cmp;
24
+ const stats = await fd.stat();
25
+ const fileSize = stats.size;
26
+ await _log(2, "File size:", fileSize);
27
+
28
+ const meta: FileMeta = {
29
+ collections: [],
30
+ freeList: [],
31
+ fileSize,
32
+ payloadLength: 0,
33
+ payloadOffset: 0,
34
+ blockSize: options.preferredSize ?? 256,
35
+ }
36
+ cmp.meta = meta;
37
+
38
+ if (fileSize < HEADER_SIZE) {
39
+ await _log(2, "Initializing new file header");
40
+ await saveHeaderAndPayload(cmp);
41
+ await _log(6, "Header initialized with size:", HEADER_SIZE);
42
+ return meta;
43
+ }
44
+
45
+ const headerBuf = Buffer.alloc(HEADER_SIZE);
46
+ await fd.read(headerBuf, 0, HEADER_SIZE, 0);
47
+ await _log(6, "Header read from file");
48
+
49
+ const version = headerBuf.readUInt32LE(0);
50
+ if (version !== VERSION) {
51
+ await _log(6, "err", `Unsupported file version: ${version}`);
52
+ throw new Error(`Unsupported file version ${version}`);
53
+ }
54
+ await _log(2, "File version:", version);
55
+
56
+ const payloadLength = headerBuf.readUInt32LE(4);
57
+ meta.payloadLength = payloadLength;
58
+ await _log(6, "Payload length:", payloadLength);
59
+
60
+ const payloadOffset = headerBuf.readUInt32LE(8);
61
+ meta.payloadOffset = payloadOffset;
62
+ await _log(6, "Payload offset:", payloadOffset);
63
+
64
+ const blockSize = headerBuf.readUInt32LE(12);
65
+ meta.blockSize = blockSize;
66
+ await _log(2, "Block size:", blockSize);
67
+
68
+ if (options.crc) {
69
+ const { computedCrc, storedCrc } = await getFileCrc(fd);
70
+ const validCrc = computedCrc === storedCrc || storedCrc === 0;
71
+ await _log(2, "CRC:", computedCrc, "Needed CRC:", storedCrc, "Valid:", validCrc);
72
+ if (storedCrc === 0) {
73
+ await _log(1, "Warning: CRC is zero, CRC will not be checked");
74
+ }
75
+ if (!validCrc) {
76
+ await _log(0, "err", "Invalid CRC");
77
+ if (options.crc === 2)
78
+ throw new Error("Invalid CRC");
79
+ }
80
+ }
81
+
82
+ if (payloadOffset + payloadLength > fileSize - HEADER_SIZE) {
83
+ await _log(6, "err", "Invalid payload length");
84
+ throw new Error("Invalid payload length");
85
+ }
86
+
87
+ if (payloadLength === 0) {
88
+ await _log(6, "Empty payload, initializing collections and freeList");
89
+ return meta;
90
+ }
91
+
92
+ await readHeaderPayload(cmp);
93
+ return meta;
94
+ }
95
+
96
+ export async function readHeaderPayload(cmp: BinManager) {
97
+ const { fd, meta } = cmp;
98
+ const { payloadLength, payloadOffset } = meta;
99
+
100
+ const payloadBuf = Buffer.alloc(payloadLength);
101
+ const { bytesRead } = await fd.read(payloadBuf, 0, payloadLength, HEADER_SIZE + payloadOffset);
102
+ await _log(6, `Payload header read, bytesRead: ${bytesRead}`);
103
+
104
+ if (bytesRead < payloadLength) {
105
+ await _log(6, "err", `Incomplete payload header read: expected ${payloadLength} bytes, got ${bytesRead}`);
106
+ throw new Error(`Incomplete payload header read: expected ${payloadLength} bytes, got ${bytesRead}`);
107
+ }
108
+
109
+ const obj = await cmp.options.format.decode(payloadBuf) as {
110
+ c: [string, number, number][];
111
+ f: [number, number][];
112
+ };
113
+
114
+ meta.collections = (obj.c || []).map(([name, offset, capacity]) => ({ name, offset, capacity }));
115
+ meta.freeList = (obj.f || []).map(([offset, capacity]) => ({ offset, capacity }));
116
+
117
+ await _log(6, "Collections and freeList loaded", meta);
118
+ }
119
+
120
+ export function getHeaderPayload(meta: FileMeta) {
121
+ return {
122
+ c: meta.collections.map(({ name, offset, capacity }) => ([name, offset, capacity])),
123
+ f: meta.freeList.map(({ offset, capacity }) => [offset, capacity]),
124
+ };
125
+ }
126
+
127
+ export async function saveHeaderAndPayload(cmp: BinManager, recursion = false) {
128
+ const { fd, meta, options } = cmp;
129
+ if (!fd) throw new Error("File not open");
130
+
131
+ const { collections, freeList, fileSize } = meta;
132
+ await _log(6, "Saving header payload:", collections, freeList);
133
+
134
+ const payloadObj = getHeaderPayload(meta);
135
+
136
+ const payloadBuf = Buffer.from(await cmp.options.format.encode(payloadObj));
137
+ if (payloadBuf.length > 64 * 1024) {
138
+ console.error("Header payload too large");
139
+ throw new Error("Header payload too large");
140
+ }
141
+
142
+ await _log(6, "Header payload length:", payloadBuf.length);
143
+
144
+ const headerBuf = Buffer.alloc(HEADER_SIZE);
145
+ headerBuf.writeUInt32LE(VERSION, 0);
146
+ headerBuf.writeUInt32LE(payloadBuf.length, 4);
147
+ headerBuf.writeUInt32LE(meta.payloadOffset, 8);
148
+ headerBuf.writeUInt32LE(meta.blockSize, 12);
149
+ meta.payloadLength = payloadBuf.length;
150
+
151
+ if (options.crc) {
152
+ const { computedCrc: crc } = await getFileCrc(fd);
153
+ headerBuf.writeUInt32LE(crc, 16);
154
+ }
155
+
156
+ await _log(6, "Writing header:", headerBuf.toString("hex"));
157
+
158
+ // Write header
159
+ await fd.write(headerBuf, 0, HEADER_SIZE, 0);
160
+ // Write payload
161
+ const roundPayload = roundUpCapacity(meta, payloadBuf.length);
162
+
163
+ if (detectCollisions(meta, HEADER_SIZE + meta.payloadOffset, roundPayload)) {
164
+ await _log(2, "Collision detected");
165
+ const slot = !recursion && await findFreeSlot(cmp, roundPayload);
166
+ if (slot) {
167
+ meta.payloadOffset = slot.offset - HEADER_SIZE;
168
+ } else {
169
+ meta.payloadOffset = meta.fileSize - HEADER_SIZE;
170
+ meta.fileSize += roundPayload;
171
+ }
172
+ pushToFreeList(meta, meta.payloadOffset, roundPayload);
173
+
174
+ return await saveHeaderAndPayload(cmp, true);
175
+ }
176
+
177
+ await writeData(fd, HEADER_SIZE + meta.payloadOffset, payloadBuf, roundPayload);
178
+
179
+ await _log(6, "Payload written");
180
+
181
+ // Update file size if header + payload bigger
182
+ meta.fileSize = Math.max(fileSize, HEADER_SIZE + roundPayload);
183
+ }
@@ -0,0 +1,109 @@
1
+ import * as msgpack from "@msgpack/msgpack";
2
+ import { access, constants, FileHandle, open } from "fs/promises";
3
+ import { getFileCrc } from "../crc32";
4
+ import { _log } from "../log";
5
+ import { readLogic, writeLogic } from "./data";
6
+ import { FileMeta, openFile } from "./head";
7
+ import { optimize } from "./optimize";
8
+ import { removeCollection } from "./rm";
9
+
10
+ async function safeOpen(path: string) {
11
+ try {
12
+ await access(path, constants.F_OK);
13
+ return await open(path, "r+");
14
+ } catch {
15
+ _log(1, "Creating new file");
16
+ return await open(path, "w+");
17
+ }
18
+ }
19
+
20
+ export interface CollectionMeta {
21
+ name: string;
22
+ offset: number;
23
+ capacity: number;
24
+ }
25
+
26
+ export interface Options {
27
+ preferredSize: number;
28
+ /**
29
+ * 0 - crc off
30
+ * 1 - warn if error
31
+ * 2 - throw if error
32
+ */
33
+ crc: number;
34
+ overwriteRemovedCollection: boolean;
35
+ format: {
36
+ encode(data: any): Promise<Parameters<typeof Buffer.from>[0]>;
37
+ decode(data: Buffer): Promise<any>;
38
+ }
39
+ }
40
+
41
+ export class BinManager {
42
+ public fd: null | FileHandle = null;
43
+ public meta: FileMeta;
44
+ public options: Options;
45
+
46
+ /**
47
+ * Constructs a new BinManager instance.
48
+ * @param path - File path.
49
+ * @param [preferredSize=512] - The preferred block size for the database. Must be a positive number (preferredSize > 0)
50
+ * @throws If the path is not provided, or the preferred size is
51
+ * not a positive number.
52
+ */
53
+ constructor(public path: string, options?: Partial<Options>) {
54
+ if (!path) throw new Error("Path not provided");
55
+
56
+ this.options = {
57
+ preferredSize: 512,
58
+ crc: 2,
59
+ overwriteRemovedCollection: false,
60
+ format: {
61
+ encode: async (data: any) => msgpack.encode(data),
62
+ decode: async (data: Buffer) => msgpack.decode(data)
63
+ },
64
+ ...options
65
+ }
66
+
67
+ if (!this.options.preferredSize || this.options.preferredSize <= 0) throw new Error("Preferred size not provided");
68
+ }
69
+
70
+ async open() {
71
+ this.fd = await safeOpen(this.path);
72
+ await openFile(this);
73
+ }
74
+
75
+ async close() {
76
+ if (this.fd) {
77
+ const buff = Buffer.alloc(8);
78
+ if (this.options.crc) {
79
+ const { computedCrc: crc } = await getFileCrc(this.fd);
80
+ buff.writeUInt32LE(crc, 0);
81
+ } else {
82
+ buff.fill(0, 0, 8);
83
+ }
84
+ await this.fd.write(buff, 0, 8, 16);
85
+ await this.fd.close();
86
+ this.fd = null;
87
+ }
88
+ }
89
+
90
+ async write(collection: string, data: object[]) {
91
+ if (!this.fd) throw new Error("File not open");
92
+ await writeLogic(this, collection, data);
93
+ }
94
+
95
+ async read(collection: string) {
96
+ if (!this.fd) throw new Error("File not open");
97
+ return await readLogic(this, collection);
98
+ }
99
+
100
+ async optimize() {
101
+ if (!this.fd) throw new Error("File not open");
102
+ await optimize(this);
103
+ }
104
+
105
+ async removeCollection(collection: string) {
106
+ if (!this.fd) throw new Error("File not open");
107
+ await removeCollection(this, collection);
108
+ }
109
+ }
@@ -0,0 +1,46 @@
1
+ import { unlink } from "fs/promises";
2
+ import { BinManager } from ".";
3
+ import { saveHeaderAndPayload } from "./head";
4
+ import { HEADER_SIZE } from "./static";
5
+ import { readData, roundUpCapacity, writeData } from "./utils";
6
+ import { _log } from "../log";
7
+
8
+ export async function optimize(cmp: BinManager) {
9
+ await _log(3, "Starting database optimization");
10
+ const collections = cmp.meta.collections;
11
+
12
+ const allData = new Map<string, Buffer>();
13
+ for (const { name, offset } of collections) {
14
+ await _log(6, "Reading collection for optimization:", name);
15
+ const len = await readData(cmp.fd, offset, 4);
16
+ const data = await readData(cmp.fd, offset + 4, len.readInt32LE(0));
17
+ allData.set(name, data);
18
+ }
19
+
20
+ await _log(5, "Closing file for optimization");
21
+ await cmp.close();
22
+ await _log(6, "Deleting old database file for optimization");
23
+ await unlink(cmp.path);
24
+ await new Promise(resolve => setTimeout(resolve, 100));
25
+ await _log(5, "Re-opening database file for optimization");
26
+ await cmp.open();
27
+
28
+ let offset = roundUpCapacity(cmp.meta, cmp.meta.payloadLength + HEADER_SIZE) + cmp.meta.blockSize;
29
+ for (const [collection, data] of allData) {
30
+ await _log(6, "Writing optimized collection:", collection);
31
+ const len = roundUpCapacity(cmp.meta, data.length + 4);
32
+ const buf = Buffer.alloc(4);
33
+ buf.writeInt32LE(data.length, 0);
34
+ await writeData(cmp.fd, offset, buf, 4);
35
+ await writeData(cmp.fd, offset + 4, data, len);
36
+ cmp.meta.collections.push({
37
+ name: collection,
38
+ offset,
39
+ capacity: len
40
+ });
41
+ offset += len;
42
+ }
43
+
44
+ await saveHeaderAndPayload(cmp);
45
+ await _log(3, "Database optimization complete");
46
+ }
package/src/bin/rm.ts ADDED
@@ -0,0 +1,30 @@
1
+ import { BinManager } from ".";
2
+ import { findCollection } from "./data";
3
+ import { saveHeaderAndPayload } from "./head";
4
+ import { writeData } from "./utils";
5
+
6
+ export async function removeCollection(cmp: BinManager, collection: string) {
7
+ const { meta, fd, options } = cmp;
8
+ const collectionMeta = findCollection(cmp, collection);
9
+ if (!collectionMeta) throw new Error("Collection not found");
10
+
11
+ if (meta.collections.length === 1) {
12
+ meta.collections = [];
13
+ meta.freeList = [];
14
+ await fd.truncate(0);
15
+ await saveHeaderAndPayload(cmp);
16
+ return;
17
+ }
18
+
19
+ meta.collections.splice(meta.collections.findIndex(c => c.name === collection), 1);
20
+ meta.freeList.push({
21
+ offset: collectionMeta.offset,
22
+ capacity: collectionMeta.capacity
23
+ });
24
+
25
+ if (options.overwriteRemovedCollection) {
26
+ await writeData(fd, collectionMeta.offset, Buffer.alloc(collectionMeta.capacity), collectionMeta.capacity);
27
+ }
28
+
29
+ await saveHeaderAndPayload(cmp);
30
+ }
@@ -0,0 +1,2 @@
1
+ export const HEADER_SIZE = 64;
2
+ export const VERSION = 1;
@@ -0,0 +1,90 @@
1
+ import { FileHandle } from "fs/promises";
2
+ import { _log } from "../log";
3
+ import { Block, FileMeta } from "./head";
4
+
5
+ export function roundUpCapacity(result: FileMeta, size: number) {
6
+ return Math.ceil(size / result.blockSize) * result.blockSize;
7
+ }
8
+
9
+ export async function writeData(fd: FileHandle, offset: number, data: Buffer, capacity: number) {
10
+ if (!fd) throw new Error("File not open");
11
+ if (data.length > capacity) throw new Error("Data size exceeds capacity");
12
+
13
+ await _log(6, "Writing data at offset:", offset, "length:", data.length, "capacity:", capacity);
14
+
15
+ const { bytesWritten } = await fd.write(data, 0, data.length, offset);
16
+ await _log(5, "Bytes written:", bytesWritten);
17
+
18
+ if (data.length < capacity) {
19
+ const pad = Buffer.alloc(capacity - data.length, 0);
20
+ const padStart = offset + data.length;
21
+ await _log(6, "Padding with zeros:", pad.length, "at offset:", padStart);
22
+ const { bytesWritten: padBytesWritten } = await fd.write(pad, 0, pad.length, padStart);
23
+ await _log(6, "Bytes written:", padBytesWritten);
24
+ }
25
+
26
+ await _log(6, "Data written");
27
+ }
28
+
29
+ export async function readData(fd: FileHandle, offset: number, length: number): Promise<Buffer> {
30
+ if (!fd) throw new Error("File not open");
31
+
32
+ await _log(6, "Reading data from offset:", offset, "length:", length);
33
+
34
+ const buf = Buffer.alloc(length);
35
+ const { bytesRead } = await fd.read(buf, 0, length, offset);
36
+
37
+ await _log(5, "Bytes read:", bytesRead);
38
+
39
+ return buf;
40
+ }
41
+
42
+ export function optimizeFreeList(blocks: Block[]): Block[] {
43
+ if (blocks.length <= 1) return blocks;
44
+
45
+ const sorted = [...blocks].sort((a, b) => a.offset - b.offset);
46
+
47
+ const merged: Block[] = [];
48
+ let current = sorted[0];
49
+
50
+ for (let i = 1; i < sorted.length; i++) {
51
+ const next = sorted[i];
52
+
53
+ if (current.offset + current.capacity === next.offset) {
54
+ current = {
55
+ offset: current.offset,
56
+ capacity: current.capacity + next.capacity
57
+ };
58
+ } else {
59
+ merged.push(current);
60
+ current = next;
61
+ }
62
+ }
63
+
64
+ merged.push(current);
65
+
66
+ return merged;
67
+ }
68
+
69
+ function checkCollection(start1: number, end1: number, start2: number, end2: number) {
70
+ _log(6, "Checking collection:", start1, end1, start2, end2);
71
+ return start1 < end2 && start2 < end1;
72
+ }
73
+
74
+ export function detectCollisions(result: FileMeta, start: number, size: number, skip: string[] = []) {
75
+ for (const { name, offset, capacity } of result.collections) {
76
+ if (skip.includes(name)) continue;
77
+ if (checkCollection(offset, offset + capacity, start, start + size))
78
+ return true;
79
+ }
80
+
81
+ return false;
82
+ }
83
+
84
+ export function pushToFreeList(result: FileMeta, offset: number, len: number) {
85
+ result.freeList.push({
86
+ offset,
87
+ capacity: roundUpCapacity(result, len),
88
+ });
89
+ result.freeList = optimizeFreeList(result.freeList);
90
+ }
package/src/crc32.ts ADDED
@@ -0,0 +1,44 @@
1
+ import { FileHandle } from "fs/promises";
2
+ import { HEADER_SIZE } from "./bin/static";
3
+
4
+ const CRC32_TABLE = new Uint32Array(256);
5
+
6
+ for (let i = 0; i < 256; i++) {
7
+ let crc = i;
8
+ for (let j = 0; j < 8; j++) {
9
+ crc = (crc & 1) ? (0xEDB88320 ^ (crc >>> 1)) : (crc >>> 1);
10
+ }
11
+ CRC32_TABLE[i] = crc >>> 0;
12
+ }
13
+
14
+ export function crc32(buf: Uint8Array | string, seed = 0xFFFFFFFF): number {
15
+ if (typeof buf === "string") {
16
+ buf = new TextEncoder().encode(buf);
17
+ }
18
+
19
+ let crc = seed ^ 0xFFFFFFFF;
20
+ for (let i = 0; i < buf.length; i++) {
21
+ const byte = buf[i];
22
+ crc = (crc >>> 8) ^ CRC32_TABLE[(crc ^ byte) & 0xFF];
23
+ }
24
+ return (crc ^ 0xFFFFFFFF) >>> 0;
25
+ }
26
+
27
+ export async function getFileCrc(fd: FileHandle, short = false) {
28
+ const { size } = await fd.stat();
29
+ if (size < HEADER_SIZE) return { storedCrc: 0, computedCrc: 0 };
30
+
31
+ const buffer = Buffer.alloc(size);
32
+ await fd.read(buffer, 0, size, 0);
33
+
34
+ const storedCrc = buffer.readUInt32LE(16);
35
+ if (short && storedCrc === 0) return { storedCrc: 0, computedCrc: 0 };
36
+
37
+ buffer.fill(0, 16, 20);
38
+ const computedCrc = crc32(buffer);
39
+
40
+ return {
41
+ storedCrc,
42
+ computedCrc
43
+ }
44
+ }
package/src/index.ts ADDED
@@ -0,0 +1,20 @@
1
+ import { ValtheraClass } from "@wxn0brp/db-core";
2
+ import { BinFileAction } from "./actions";
3
+ import { BinManager, Options } from "./bin";
4
+
5
+ export * from "./actions";
6
+ export * from "./bin";
7
+
8
+ export async function createBinValthera(path: string, opts: Partial<Options> = {}, init = true) {
9
+ const mgr = new BinManager(path, opts);
10
+ const actions = new BinFileAction(mgr);
11
+ const db = new ValtheraClass({ dbAction: actions });
12
+
13
+ if (init) await actions.init();
14
+
15
+ return {
16
+ db,
17
+ actions,
18
+ mgr,
19
+ }
20
+ }
package/src/log.ts ADDED
@@ -0,0 +1,13 @@
1
+ const dir = process.cwd() + "/";
2
+ export async function _log(level: number, ...data: any[]) {
3
+ const logLevel = parseInt(process.env.VDB_BIN_LOG_LEVEL || '0', 10);
4
+ if (logLevel < level) return;
5
+
6
+ let line = new Error().stack.split('\n')[3].trim();
7
+ let path = line.slice(line.indexOf("(")).replace(dir, "").replace("(", "").replace(")", "");
8
+ const at = line.slice(3, line.indexOf("(") - 1);
9
+
10
+ if (path.length < 2) path = line.replace(dir, "").replace("at ", ""); // if path is 2 (callback):
11
+
12
+ console.log(`[${level}] ` + "\x1b[36m" + path + ":", "\x1b[33m" + at + "\x1b[0m", ...data);
13
+ }
package/suglite.json ADDED
@@ -0,0 +1,10 @@
1
+ {
2
+ "cmd": "NODE_ENV=development tsc --noEmit && yarn bun",
3
+ "watch": [
4
+ "src"
5
+ ],
6
+ "restart_cmd": "clear",
7
+ "events": {
8
+ "rs": "clear"
9
+ }
10
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,22 @@
1
+ {
2
+ "compilerOptions": {
3
+ "module": "ES2022",
4
+ "target": "ES2022",
5
+ "moduleResolution": "bundler",
6
+ "paths": {},
7
+ "esModuleInterop": true,
8
+ "skipLibCheck": true,
9
+ "outDir": "./dist",
10
+ "declaration": true
11
+ },
12
+ "include": [
13
+ "./src"
14
+ ],
15
+ "exclude": [
16
+ "node_modules"
17
+ ],
18
+ "tsc-alias": {
19
+ "resolveFullPaths": true,
20
+ "verbose": false
21
+ }
22
+ }