jexidb 1.0.6 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.gitattributes +2 -2
- package/README.md +2 -1
- package/dist/Database.cjs +419 -322
- package/package.json +2 -1
- package/src/Database.mjs +115 -106
- package/src/FileHandler.mjs +68 -49
- package/src/Serializer.mjs +4 -1
- package/test/test-v8-compressed.jdb +0 -0
- package/test/test-v8.jdb +0 -0
- package/test/test.mjs +8 -3
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "jexidb",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.1.0",
|
|
4
4
|
"description": "JexiDB is a pure JS NPM library for managing data on disk using JSONL efficiently, without the need for a server.",
|
|
5
5
|
"main": "./dist/Database.cjs",
|
|
6
6
|
"module": "./src/Database.mjs",
|
|
@@ -23,6 +23,7 @@
|
|
|
23
23
|
"@babel/preset-env": "^7.25.4"
|
|
24
24
|
},
|
|
25
25
|
"dependencies": {
|
|
26
|
+
"async-mutex": "^0.5.0",
|
|
26
27
|
"p-limit": "^6.1.0"
|
|
27
28
|
},
|
|
28
29
|
"directories": {
|
package/src/Database.mjs
CHANGED
|
@@ -2,15 +2,18 @@ import { EventEmitter } from 'events'
|
|
|
2
2
|
import FileHandler from './FileHandler.mjs'
|
|
3
3
|
import IndexManager from './IndexManager.mjs'
|
|
4
4
|
import Serializer from './Serializer.mjs'
|
|
5
|
+
import { Mutex } from 'async-mutex'
|
|
5
6
|
import fs from 'fs'
|
|
6
7
|
|
|
7
8
|
export class Database extends EventEmitter {
|
|
8
|
-
constructor(file, opts={}) {
|
|
9
|
+
constructor(file, opts = {}) {
|
|
9
10
|
super()
|
|
10
11
|
this.opts = Object.assign({
|
|
11
12
|
v8: false,
|
|
12
|
-
|
|
13
|
+
create: true,
|
|
13
14
|
indexes: {},
|
|
15
|
+
index: { data: {} },
|
|
16
|
+
includeLinePosition: true,
|
|
14
17
|
compress: false,
|
|
15
18
|
compressIndex: false,
|
|
16
19
|
maxMemoryUsage: 64 * 1024 // 64KB
|
|
@@ -22,30 +25,38 @@ export class Database extends EventEmitter {
|
|
|
22
25
|
this.indexManager = new IndexManager(this.opts)
|
|
23
26
|
this.indexOffset = 0
|
|
24
27
|
this.writeBuffer = []
|
|
28
|
+
this.mutex = new Mutex()
|
|
25
29
|
}
|
|
26
30
|
|
|
27
31
|
use(plugin) {
|
|
28
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
32
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
29
33
|
plugin(this)
|
|
30
34
|
}
|
|
31
35
|
|
|
32
36
|
async init() {
|
|
33
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
34
|
-
if(this.initialized) return
|
|
35
|
-
if(this.initlializing) return await new Promise(resolve => this.once('init', resolve))
|
|
37
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
38
|
+
if (this.initialized) return
|
|
39
|
+
if (this.initlializing) return await new Promise(resolve => this.once('init', resolve))
|
|
36
40
|
this.initializing = true
|
|
37
41
|
try {
|
|
38
|
-
if(this.opts.clear) {
|
|
42
|
+
if (this.opts.clear) {
|
|
39
43
|
await this.fileHandler.truncate(0).catch(console.error)
|
|
40
44
|
throw new Error('Cleared, empty file')
|
|
41
45
|
}
|
|
42
|
-
const lastLine = await this.fileHandler.readLastLine()
|
|
43
|
-
if(!lastLine || !lastLine.length) {
|
|
44
|
-
|
|
46
|
+
const lastLine = await this.fileHandler.readLastLine().catch(() => 0)
|
|
47
|
+
if (!lastLine || !lastLine.length) {
|
|
48
|
+
if (this.opts.create) {
|
|
49
|
+
throw new Error('File does not exists or is a empty file')
|
|
50
|
+
} else {
|
|
51
|
+
throw new Error('File is not a valid database file, expected offsets at the end of the file')
|
|
52
|
+
}
|
|
45
53
|
}
|
|
46
|
-
const offsets = await this.serializer.deserialize(lastLine, {compress: this.opts.compressIndex})
|
|
47
|
-
if(!Array.isArray(offsets)) {
|
|
48
|
-
|
|
54
|
+
const offsets = await this.serializer.deserialize(lastLine, { compress: this.opts.compressIndex })
|
|
55
|
+
if (!Array.isArray(offsets)) {
|
|
56
|
+
if(this.opts.create) {
|
|
57
|
+
throw new Error('File does not exists or is a empty file')
|
|
58
|
+
}
|
|
59
|
+
throw new Error('File is not a valid database file, expected offsets at the end of the file to be an array')
|
|
49
60
|
}
|
|
50
61
|
this.indexOffset = offsets[offsets.length - 2]
|
|
51
62
|
this.offsets = offsets
|
|
@@ -53,14 +64,16 @@ export class Database extends EventEmitter {
|
|
|
53
64
|
this.offsets = this.offsets.slice(0, -2)
|
|
54
65
|
this.shouldTruncate = true
|
|
55
66
|
let indexLine = await this.fileHandler.readRange(...ptr)
|
|
56
|
-
const index = await this.serializer.deserialize(indexLine, {compress: this.opts.compressIndex})
|
|
67
|
+
const index = await this.serializer.deserialize(indexLine, { compress: this.opts.compressIndex })
|
|
57
68
|
index && this.indexManager.load(index)
|
|
58
69
|
} catch (e) {
|
|
59
|
-
if(Array.isArray(this.offsets)) {
|
|
70
|
+
if (Array.isArray(this.offsets)) {
|
|
60
71
|
this.offsets = []
|
|
61
72
|
}
|
|
62
73
|
this.indexOffset = 0
|
|
63
|
-
if(!
|
|
74
|
+
if(!this.opts.create && !this.opts.clear) {
|
|
75
|
+
throw e
|
|
76
|
+
} else if (!String(e).includes('empty file')) {
|
|
64
77
|
console.error('Error loading database:', e)
|
|
65
78
|
}
|
|
66
79
|
} finally {
|
|
@@ -71,30 +84,30 @@ export class Database extends EventEmitter {
|
|
|
71
84
|
}
|
|
72
85
|
|
|
73
86
|
async save() {
|
|
74
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
75
|
-
if(!this.initialized) throw new Error('Database not initialized')
|
|
76
|
-
if(this.saving) return new Promise(resolve => this.once('save', resolve))
|
|
87
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
88
|
+
if (!this.initialized) throw new Error('Database not initialized')
|
|
89
|
+
if (this.saving) return new Promise(resolve => this.once('save', resolve))
|
|
77
90
|
this.saving = true
|
|
78
91
|
await this.flush()
|
|
79
92
|
if (!this.shouldSave) return
|
|
80
93
|
this.emit('before-save')
|
|
81
|
-
const index = Object.assign({data: {}}, this.indexManager.index)
|
|
82
|
-
for(const field in this.indexManager.index.data) {
|
|
83
|
-
for(const term in this.indexManager.index.data[field]) {
|
|
94
|
+
const index = Object.assign({ data: {} }, this.indexManager.index)
|
|
95
|
+
for (const field in this.indexManager.index.data) {
|
|
96
|
+
for (const term in this.indexManager.index.data[field]) {
|
|
84
97
|
index.data[field][term] = [...this.indexManager.index.data[field][term]] // set to array
|
|
85
98
|
}
|
|
86
99
|
}
|
|
87
100
|
const offsets = this.offsets.slice(0)
|
|
88
|
-
const indexString = await this.serializer.serialize(index, {compress: this.opts.compressIndex, linebreak: true}) // force linebreak here to allow 'init' to read last line as offsets correctly
|
|
89
|
-
for(const field in this.indexManager.index.data) {
|
|
90
|
-
for(const term in this.indexManager.index.data[field]) {
|
|
101
|
+
const indexString = await this.serializer.serialize(index, { compress: this.opts.compressIndex, linebreak: true }) // force linebreak here to allow 'init' to read last line as offsets correctly
|
|
102
|
+
for (const field in this.indexManager.index.data) {
|
|
103
|
+
for (const term in this.indexManager.index.data[field]) {
|
|
91
104
|
this.indexManager.index.data[field][term] = new Set(index.data[field][term]) // set back to set because of serialization
|
|
92
105
|
}
|
|
93
106
|
}
|
|
94
107
|
offsets.push(this.indexOffset)
|
|
95
108
|
offsets.push(this.indexOffset + indexString.length)
|
|
96
109
|
// save offsets as JSON always to prevent linebreaks on last line, which breaks 'init()'
|
|
97
|
-
const offsetsString = await this.serializer.serialize(offsets, {json: true, compress: false, linebreak: false})
|
|
110
|
+
const offsetsString = await this.serializer.serialize(offsets, { json: true, compress: false, linebreak: false })
|
|
98
111
|
this.writeBuffer.push(indexString)
|
|
99
112
|
this.writeBuffer.push(offsetsString)
|
|
100
113
|
await this.flush() // write the index and offsets
|
|
@@ -112,7 +125,7 @@ export class Database extends EventEmitter {
|
|
|
112
125
|
|
|
113
126
|
locate(n) {
|
|
114
127
|
if (this.offsets[n] === undefined) {
|
|
115
|
-
if(this.offsets[n - 1]) {
|
|
128
|
+
if (this.offsets[n - 1]) {
|
|
116
129
|
return [this.indexOffset, Number.MAX_SAFE_INTEGER]
|
|
117
130
|
}
|
|
118
131
|
return
|
|
@@ -120,58 +133,46 @@ export class Database extends EventEmitter {
|
|
|
120
133
|
let end = (this.offsets[n + 1] || this.indexOffset || Number.MAX_SAFE_INTEGER)
|
|
121
134
|
return [this.offsets[n], end]
|
|
122
135
|
}
|
|
123
|
-
|
|
136
|
+
|
|
124
137
|
getRanges(map) {
|
|
125
138
|
return (map || Array.from(this.offsets.keys())).map(n => {
|
|
126
|
-
|
|
127
|
-
|
|
139
|
+
const ret = this.locate(n)
|
|
140
|
+
if (ret !== undefined) return { start: ret[0], end: ret[1], index: n }
|
|
128
141
|
}).filter(n => n !== undefined)
|
|
129
142
|
}
|
|
130
143
|
|
|
131
|
-
async readLines(map, ranges) {
|
|
132
|
-
if(!ranges) ranges = this.getRanges(map)
|
|
133
|
-
const results = await this.fileHandler.readRanges(ranges, this.serializer.deserialize.bind(this.serializer))
|
|
134
|
-
let i = 0
|
|
135
|
-
for(const start in results) {
|
|
136
|
-
if(!results[start] || results[start]._ !== undefined) continue
|
|
137
|
-
while(this.offsets[i] != start && i < map.length) i++ // weak comparison as 'start' is a string
|
|
138
|
-
results[start]._ = map[i++]
|
|
139
|
-
}
|
|
140
|
-
return Object.values(results).filter(r => r !== undefined)
|
|
141
|
-
}
|
|
142
|
-
|
|
143
144
|
async insert(data) {
|
|
144
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
145
|
-
if(!this.initialized) await this.init()
|
|
145
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
146
|
+
if (!this.initialized) await this.init()
|
|
146
147
|
if (this.shouldTruncate) {
|
|
147
|
-
|
|
148
|
-
|
|
148
|
+
this.writeBuffer.push(this.indexOffset)
|
|
149
|
+
this.shouldTruncate = false
|
|
149
150
|
}
|
|
150
|
-
const line = await this.serializer.serialize(data, {compress: this.opts.compress}) // using Buffer for offsets accuracy
|
|
151
|
+
const line = await this.serializer.serialize(data, { compress: this.opts.compress, v8: this.opts.v8 }) // using Buffer for offsets accuracy
|
|
151
152
|
const position = this.offsets.length
|
|
152
153
|
this.offsets.push(this.indexOffset)
|
|
153
154
|
this.indexOffset += line.length
|
|
154
|
-
this.indexManager.add(data, position)
|
|
155
155
|
this.emit('insert', data, position)
|
|
156
156
|
this.writeBuffer.push(line)
|
|
157
|
-
if(!this.flushing && this.currentWriteBufferSize() > this.opts.maxMemoryUsage) {
|
|
157
|
+
if (!this.flushing && this.currentWriteBufferSize() > this.opts.maxMemoryUsage) {
|
|
158
158
|
await this.flush()
|
|
159
159
|
}
|
|
160
|
+
this.indexManager.add(data, position)
|
|
160
161
|
this.shouldSave = true
|
|
161
162
|
}
|
|
162
163
|
|
|
163
|
-
currentWriteBufferSize(){
|
|
164
|
+
currentWriteBufferSize() {
|
|
164
165
|
const lengths = this.writeBuffer.filter(b => Buffer.isBuffer(b)).map(b => b.length)
|
|
165
166
|
return lengths.reduce((a, b) => a + b, 0)
|
|
166
167
|
}
|
|
167
168
|
|
|
168
169
|
flush() {
|
|
169
|
-
if(this.flushing) {
|
|
170
|
+
if (this.flushing) {
|
|
170
171
|
return this.flushing
|
|
171
172
|
}
|
|
172
173
|
return this.flushing = new Promise((resolve, reject) => {
|
|
173
|
-
if(this.destroyed) return reject(new Error('Database is destroyed'))
|
|
174
|
-
if(!this.writeBuffer.length) return resolve()
|
|
174
|
+
if (this.destroyed) return reject(new Error('Database is destroyed'))
|
|
175
|
+
if (!this.writeBuffer.length) return resolve()
|
|
175
176
|
let err
|
|
176
177
|
this._flush().catch(e => err = e).finally(() => {
|
|
177
178
|
err ? reject(err) : resolve()
|
|
@@ -181,17 +182,18 @@ export class Database extends EventEmitter {
|
|
|
181
182
|
}
|
|
182
183
|
|
|
183
184
|
async _flush() {
|
|
185
|
+
const release = await this.mutex.acquire()
|
|
184
186
|
let fd = await fs.promises.open(this.fileHandler.file, 'a')
|
|
185
187
|
try {
|
|
186
|
-
while(this.writeBuffer.length) {
|
|
188
|
+
while (this.writeBuffer.length) {
|
|
187
189
|
let data
|
|
188
190
|
const pos = this.writeBuffer.findIndex(b => typeof b === 'number')
|
|
189
|
-
if(pos === 0) {
|
|
191
|
+
if (pos === 0) {
|
|
190
192
|
await fd.close()
|
|
191
193
|
await this.fileHandler.truncate(this.writeBuffer.shift())
|
|
192
194
|
fd = await fs.promises.open(this.fileHandler.file, 'a')
|
|
193
195
|
continue
|
|
194
|
-
} else if(pos === -1) {
|
|
196
|
+
} else if (pos === -1) {
|
|
195
197
|
data = Buffer.concat(this.writeBuffer)
|
|
196
198
|
this.writeBuffer.length = 0
|
|
197
199
|
} else {
|
|
@@ -201,69 +203,76 @@ export class Database extends EventEmitter {
|
|
|
201
203
|
await fd.write(data)
|
|
202
204
|
}
|
|
203
205
|
this.shouldSave = true
|
|
204
|
-
} catch(err) {
|
|
206
|
+
} catch (err) {
|
|
205
207
|
console.error('Error flushing:', err)
|
|
206
208
|
} finally {
|
|
207
|
-
|
|
209
|
+
let err
|
|
210
|
+
await fd.close().catch(e => err = e)
|
|
211
|
+
release()
|
|
212
|
+
err && console.error('Error closing file:', err)
|
|
208
213
|
}
|
|
209
214
|
}
|
|
210
215
|
|
|
211
|
-
async *walk(map, options={}) {
|
|
212
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
213
|
-
if(!this.initialized) await this.init()
|
|
216
|
+
async *walk(map, options = {}) {
|
|
217
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
218
|
+
if (!this.initialized) await this.init()
|
|
214
219
|
this.shouldSave && await this.save().catch(console.error)
|
|
215
|
-
if(this.indexOffset === 0) return
|
|
216
|
-
if(!Array.isArray(map)) {
|
|
220
|
+
if (this.indexOffset === 0) return
|
|
221
|
+
if (!Array.isArray(map)) {
|
|
217
222
|
if (map instanceof Set) {
|
|
218
223
|
map = [...map]
|
|
219
|
-
} else if(map && typeof map === 'object') {
|
|
224
|
+
} else if (map && typeof map === 'object') {
|
|
220
225
|
map = [...this.indexManager.query(map, options)]
|
|
221
226
|
} else {
|
|
222
227
|
map = [...Array(this.offsets.length).keys()]
|
|
223
228
|
}
|
|
224
229
|
}
|
|
225
230
|
const ranges = this.getRanges(map)
|
|
226
|
-
const readSize = 512 * 1024 // 512KB
|
|
227
231
|
const groupedRanges = await this.fileHandler.groupedRanges(ranges)
|
|
228
232
|
const fd = await fs.promises.open(this.fileHandler.file, 'r')
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
if(options.
|
|
233
|
-
|
|
234
|
-
}
|
|
235
|
-
|
|
233
|
+
try {
|
|
234
|
+
let count = 0
|
|
235
|
+
for (const groupedRange of groupedRanges) {
|
|
236
|
+
if (options.limit && count >= options.limit) {
|
|
237
|
+
break
|
|
238
|
+
}
|
|
239
|
+
for await (const row of this.fileHandler.readGroupedRange(groupedRange, fd)) {
|
|
240
|
+
if (options.limit && count >= options.limit) {
|
|
241
|
+
break
|
|
242
|
+
}
|
|
243
|
+
const entry = await this.serializer.deserialize(row.line, { compress: this.opts.compress, v8: this.opts.v8 })
|
|
244
|
+
if (entry === null) continue
|
|
245
|
+
count++
|
|
246
|
+
if (options.includeOffsets) {
|
|
247
|
+
yield { entry, start: row.start, _: row._ || this.offsets.findIndex(n => n === row.start) }
|
|
248
|
+
} else {
|
|
249
|
+
if (this.opts.includeLinePosition) {
|
|
250
|
+
entry._ = row._ || this.offsets.findIndex(n => n === row.start)
|
|
251
|
+
}
|
|
252
|
+
yield entry
|
|
253
|
+
}
|
|
236
254
|
}
|
|
237
255
|
}
|
|
256
|
+
} finally {
|
|
257
|
+
await fd.close()
|
|
238
258
|
}
|
|
239
|
-
await fd.close()
|
|
240
259
|
}
|
|
241
260
|
|
|
242
|
-
async query(criteria, options={}) {
|
|
243
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
244
|
-
if(!this.initialized) await this.init()
|
|
261
|
+
async query(criteria, options = {}) {
|
|
262
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
263
|
+
if (!this.initialized) await this.init()
|
|
245
264
|
this.shouldSave && await this.save().catch(console.error)
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
}
|
|
256
|
-
if (options.limit) {
|
|
257
|
-
results = results.slice(0, options.limit);
|
|
258
|
-
}
|
|
259
|
-
return results
|
|
260
|
-
} else {
|
|
261
|
-
const matchingLines = await this.indexManager.query(criteria, options)
|
|
262
|
-
if (!matchingLines || !matchingLines.size) {
|
|
263
|
-
return []
|
|
264
|
-
}
|
|
265
|
-
return await this.query([...matchingLines], options)
|
|
265
|
+
let results = []
|
|
266
|
+
for await (const entry of this.walk(criteria, options)) results.push(entry)
|
|
267
|
+
if (options.orderBy) {
|
|
268
|
+
const [field, direction = 'asc'] = options.orderBy.split(' ')
|
|
269
|
+
results.sort((a, b) => {
|
|
270
|
+
if (a[field] > b[field]) return direction === 'asc' ? 1 : -1
|
|
271
|
+
if (a[field] < b[field]) return direction === 'asc' ? -1 : 1
|
|
272
|
+
return 0;
|
|
273
|
+
})
|
|
266
274
|
}
|
|
275
|
+
return results
|
|
267
276
|
}
|
|
268
277
|
|
|
269
278
|
async update(criteria, data, options={}) {
|
|
@@ -283,13 +292,13 @@ export class Database extends EventEmitter {
|
|
|
283
292
|
if (!validMatchingLines.size) {
|
|
284
293
|
return []
|
|
285
294
|
}
|
|
286
|
-
|
|
295
|
+
let entries = []
|
|
296
|
+
for await (const entry of this.walk(criteria, options)) entries.push(entry)
|
|
287
297
|
const lines = []
|
|
288
298
|
for(const entry of entries) {
|
|
289
|
-
let err
|
|
290
299
|
const updated = Object.assign(entry, data)
|
|
291
|
-
const ret = await this.serializer.serialize(updated)
|
|
292
|
-
|
|
300
|
+
const ret = await this.serializer.serialize(updated)
|
|
301
|
+
lines.push(ret)
|
|
293
302
|
}
|
|
294
303
|
const offsets = []
|
|
295
304
|
let byteOffset = 0, k = 0
|
|
@@ -313,17 +322,17 @@ export class Database extends EventEmitter {
|
|
|
313
322
|
return entries
|
|
314
323
|
}
|
|
315
324
|
|
|
316
|
-
async delete(criteria, options={}) {
|
|
325
|
+
async delete(criteria, options = {}) {
|
|
317
326
|
if (this.shouldTruncate) {
|
|
318
|
-
|
|
319
|
-
|
|
327
|
+
this.writeBuffer.push(this.indexOffset)
|
|
328
|
+
this.shouldTruncate = false
|
|
320
329
|
}
|
|
321
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
322
|
-
if(!this.initialized) await this.init()
|
|
330
|
+
if (this.destroyed) throw new Error('Database is destroyed')
|
|
331
|
+
if (!this.initialized) await this.init()
|
|
323
332
|
this.shouldSave && await this.save().catch(console.error)
|
|
324
333
|
const matchingLines = await this.indexManager.query(criteria, options)
|
|
325
334
|
if (!matchingLines || !matchingLines.size) {
|
|
326
|
-
|
|
335
|
+
return 0
|
|
327
336
|
}
|
|
328
337
|
const ranges = this.getRanges([...matchingLines])
|
|
329
338
|
const validMatchingLines = new Set(ranges.map(r => r.index))
|
package/src/FileHandler.mjs
CHANGED
|
@@ -24,17 +24,19 @@ export default class FileHandler {
|
|
|
24
24
|
if(buffer.length > bytesRead) return buffer.subarray(0, bytesRead)
|
|
25
25
|
return buffer
|
|
26
26
|
}
|
|
27
|
-
|
|
27
|
+
|
|
28
28
|
async readRanges(ranges, mapper) {
|
|
29
29
|
const lines = {}, limit = pLimit(4)
|
|
30
30
|
const fd = await fs.promises.open(this.file, 'r')
|
|
31
31
|
const groupedRanges = await this.groupedRanges(ranges)
|
|
32
32
|
try {
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
33
|
+
await Promise.allSettled(groupedRanges.map(async (groupedRange) => {
|
|
34
|
+
await limit(async () => {
|
|
35
|
+
for await (const row of this.readGroupedRange(groupedRange, fd)) {
|
|
36
|
+
lines[row.start] = mapper ? (await mapper(row.line, groupedRange)) : row.line
|
|
37
|
+
}
|
|
38
|
+
})
|
|
39
|
+
}))
|
|
38
40
|
} catch (e) {
|
|
39
41
|
console.error('Error reading ranges:', e)
|
|
40
42
|
} finally {
|
|
@@ -43,7 +45,7 @@ export default class FileHandler {
|
|
|
43
45
|
return lines
|
|
44
46
|
}
|
|
45
47
|
|
|
46
|
-
async groupedRanges(ranges) {
|
|
48
|
+
async groupedRanges(ranges) { // expects ordered ranges from Database.getRanges()
|
|
47
49
|
const readSize = 512 * 1024 // 512KB
|
|
48
50
|
const groupedRanges = []
|
|
49
51
|
let currentGroup = []
|
|
@@ -79,62 +81,78 @@ export default class FileHandler {
|
|
|
79
81
|
let i = 0, buffer = Buffer.alloc(options.end - options.start)
|
|
80
82
|
const results = {}, { bytesRead } = await fd.read(buffer, 0, options.end - options.start, options.start)
|
|
81
83
|
if(buffer.length > bytesRead) buffer = buffer.subarray(0, bytesRead)
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
84
|
+
|
|
85
|
+
for (const range of groupedRange) {
|
|
86
|
+
const startOffset = range.start - options.start;
|
|
87
|
+
let endOffset = range.end - options.start;
|
|
88
|
+
if (endOffset > buffer.length) {
|
|
89
|
+
endOffset = buffer.length;
|
|
90
|
+
}
|
|
91
|
+
if (startOffset >= buffer.length) {
|
|
92
|
+
continue;
|
|
93
|
+
}
|
|
94
|
+
const line = buffer.subarray(startOffset, endOffset);
|
|
95
|
+
if (line.length === 0) continue;
|
|
96
|
+
yield { line, start: range.start };
|
|
85
97
|
}
|
|
86
98
|
|
|
99
|
+
|
|
87
100
|
return results
|
|
88
101
|
}
|
|
89
102
|
|
|
90
|
-
async *walk(ranges
|
|
103
|
+
async *walk(ranges) {
|
|
91
104
|
const fd = await fs.promises.open(this.file, 'r')
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
for
|
|
95
|
-
|
|
105
|
+
try {
|
|
106
|
+
const groupedRanges = await this.groupedRanges(ranges)
|
|
107
|
+
for(const groupedRange of groupedRanges) {
|
|
108
|
+
for await (const row of this.readGroupedRange(groupedRange, fd)) {
|
|
109
|
+
yield row
|
|
110
|
+
}
|
|
96
111
|
}
|
|
112
|
+
} finally {
|
|
113
|
+
await fd.close()
|
|
97
114
|
}
|
|
98
|
-
await fd.close()
|
|
99
115
|
}
|
|
100
116
|
|
|
101
117
|
async replaceLines(ranges, lines) {
|
|
102
|
-
|
|
103
|
-
const
|
|
104
|
-
const
|
|
105
|
-
const reader = await fs.promises.open(this.file, 'r')
|
|
118
|
+
const tmpFile = this.file + '.tmp';
|
|
119
|
+
const writer = await fs.promises.open(tmpFile, 'w+');
|
|
120
|
+
const reader = await fs.promises.open(this.file, 'r');
|
|
106
121
|
try {
|
|
107
|
-
let
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
if (lines[i]) {
|
|
115
|
-
await writer.write(lines[i])
|
|
122
|
+
let position = 0;
|
|
123
|
+
let lineIndex = 0;
|
|
124
|
+
|
|
125
|
+
for (const range of ranges) {
|
|
126
|
+
if (position < range.start) {
|
|
127
|
+
const buffer = await this.readRange(position, range.start);
|
|
128
|
+
await writer.write(buffer);
|
|
116
129
|
}
|
|
117
|
-
|
|
130
|
+
if (lineIndex < lines.length && lines[lineIndex]) {
|
|
131
|
+
await writer.write(lines[lineIndex]);
|
|
132
|
+
}
|
|
133
|
+
position = range.end;
|
|
134
|
+
lineIndex++;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const { size } = await reader.stat();
|
|
138
|
+
if (position < size) {
|
|
139
|
+
const buffer = await this.readRange(position, size);
|
|
140
|
+
await writer.write(buffer);
|
|
118
141
|
}
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
await
|
|
123
|
-
await writer.write(buffer)
|
|
124
|
-
await reader.close()
|
|
125
|
-
await writer.close()
|
|
126
|
-
closed = true
|
|
127
|
-
await fs.promises.copyFile(tmpFile, this.file)
|
|
142
|
+
|
|
143
|
+
await reader.close();
|
|
144
|
+
await writer.close();
|
|
145
|
+
await fs.promises.rename(tmpFile, this.file);
|
|
128
146
|
} catch (e) {
|
|
129
|
-
console.error('
|
|
147
|
+
console.error('Erro ao substituir linhas:', e);
|
|
148
|
+
throw e;
|
|
130
149
|
} finally {
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
}
|
|
135
|
-
await fs.promises.unlink(tmpFile).catch(() => {})
|
|
150
|
+
await reader.close().catch(() => { });
|
|
151
|
+
await writer.close().catch(() => { });
|
|
152
|
+
await fs.promises.unlink(tmpFile).catch(() => { });
|
|
136
153
|
}
|
|
137
154
|
}
|
|
155
|
+
|
|
138
156
|
async writeData(data, immediate, fd) {
|
|
139
157
|
await fd.write(data)
|
|
140
158
|
}
|
|
@@ -150,18 +168,19 @@ export default class FileHandler {
|
|
|
150
168
|
if (size < 1) throw 'empty file'
|
|
151
169
|
this.size = size
|
|
152
170
|
const bufferSize = 16384
|
|
153
|
-
let buffer, lastReadSize, readPosition = Math.max(size - bufferSize, 0)
|
|
171
|
+
let buffer, isFirstRead = true, lastReadSize, readPosition = Math.max(size - bufferSize, 0)
|
|
154
172
|
while (readPosition >= 0) {
|
|
155
173
|
const readSize = Math.min(bufferSize, size - readPosition)
|
|
156
174
|
if (readSize !== lastReadSize) {
|
|
157
175
|
lastReadSize = readSize
|
|
158
176
|
buffer = Buffer.alloc(readSize)
|
|
159
177
|
}
|
|
160
|
-
const { bytesRead } = await reader.read(buffer, 0, readSize, readPosition)
|
|
178
|
+
const { bytesRead } = await reader.read(buffer, 0, isFirstRead ? (readSize - 1) : readSize, readPosition)
|
|
179
|
+
if (isFirstRead) isFirstRead = false
|
|
161
180
|
if (bytesRead === 0) break
|
|
162
|
-
const newlineIndex = buffer.lastIndexOf(10
|
|
181
|
+
const newlineIndex = buffer.lastIndexOf(10)
|
|
182
|
+
const start = readPosition + newlineIndex + 1
|
|
163
183
|
if (newlineIndex !== -1) {
|
|
164
|
-
const start = readPosition + newlineIndex + 1
|
|
165
184
|
const lastLine = Buffer.alloc(size - start)
|
|
166
185
|
await reader.read(lastLine, 0, size - start, start)
|
|
167
186
|
if (!lastLine || !lastLine.length) {
|
package/src/Serializer.mjs
CHANGED
|
@@ -46,6 +46,9 @@ export default class Serializer extends EventEmitter {
|
|
|
46
46
|
}
|
|
47
47
|
|
|
48
48
|
async deserialize(data) {
|
|
49
|
+
if(data.length === 0) {
|
|
50
|
+
return null
|
|
51
|
+
}
|
|
49
52
|
let line, isCompressed, isV8
|
|
50
53
|
const header = data.readUInt8(0)
|
|
51
54
|
const valid = header === 0x00 || header === 0x01 || header === 0x02 || header === 0x03
|
|
@@ -58,7 +61,7 @@ export default class Serializer extends EventEmitter {
|
|
|
58
61
|
try {
|
|
59
62
|
return JSON.parse(data.toString('utf-8').trim())
|
|
60
63
|
} catch (e) {
|
|
61
|
-
throw new Error('Failed to deserialize
|
|
64
|
+
throw new Error('Failed to deserialize JSON data')
|
|
62
65
|
}
|
|
63
66
|
}
|
|
64
67
|
if (isCompressed) {
|
|
Binary file
|
package/test/test-v8.jdb
CHANGED
|
Binary file
|
package/test/test.mjs
CHANGED
|
@@ -83,14 +83,19 @@ const runTests = async (id, name, format, opts) => {
|
|
|
83
83
|
console.assert(pass2, `Round 2 - READ: Test failed: ${character.name} seems to have been teleported out of the database!`);
|
|
84
84
|
if(pass1 && pass2) console.log(`Round 2 - READ: Flawless Victory! All characters inserted successfully, led by ${character.name}.`);
|
|
85
85
|
|
|
86
|
-
// 4. Test
|
|
86
|
+
// 4. Test indexes
|
|
87
|
+
const pass3 = await db.indexManager.readColumnIndex('name').has(character.name)
|
|
88
|
+
console.assert(pass3, `Round 3 - INDEX: Test failed: ${character.name} is not in the index.`);
|
|
89
|
+
if(pass3) console.log(`Round 3 - INDEX: Flawless Victory! ${character.name} is in the index.`);
|
|
90
|
+
|
|
91
|
+
// 5. Test data update
|
|
87
92
|
await db.update({ id: 1 }, { name: character.name + ' Updated' });
|
|
88
93
|
results = await db.query({ id: 1 });
|
|
89
94
|
const pass4 = results.length === 1 && results[0].name === character.name + ' Updated';
|
|
90
95
|
console.assert(pass4, `Round 3 - UPDATE: Test failed: ${character.updateMessage}`);
|
|
91
96
|
if(pass4) console.log(`Round 3 - UPDATE: Flawless Victory! ${character.name} has been updated successfully.`);
|
|
92
97
|
|
|
93
|
-
//
|
|
98
|
+
// 6. Test data deletion
|
|
94
99
|
await db.delete({ name: character.name + ' Updated' });
|
|
95
100
|
results = await db.query({ id: { '<=': 2 } });
|
|
96
101
|
const pass5 = results.length === 1;
|
|
@@ -119,7 +124,7 @@ const runTests = async (id, name, format, opts) => {
|
|
|
119
124
|
}
|
|
120
125
|
|
|
121
126
|
async function runAllTests() {
|
|
122
|
-
const depth =
|
|
127
|
+
const depth = 10
|
|
123
128
|
let err, i = 1
|
|
124
129
|
let tests = [
|
|
125
130
|
['json', 'JSON', { indexes: { id: 'number', name: 'string' }, v8: false, compress: false, compressIndex: false }],
|