jexidb 1.1.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +2 -2
- package/README.md +556 -128
- package/dist/FileHandler.js +688 -0
- package/dist/IndexManager.js +353 -0
- package/dist/IntegrityChecker.js +364 -0
- package/dist/JSONLDatabase.js +1132 -0
- package/dist/index.js +598 -0
- package/package.json +65 -59
- package/src/FileHandler.js +674 -0
- package/src/IndexManager.js +363 -0
- package/src/IntegrityChecker.js +379 -0
- package/src/JSONLDatabase.js +1189 -0
- package/src/index.js +594 -0
- package/.gitattributes +0 -2
- package/babel.config.json +0 -5
- package/dist/Database.cjs +0 -1161
- package/src/Database.mjs +0 -376
- package/src/FileHandler.mjs +0 -202
- package/src/IndexManager.mjs +0 -230
- package/src/Serializer.mjs +0 -120
- package/test/README.md +0 -13
- package/test/test-json-compressed.jdb +0 -0
- package/test/test-json.jdb +0 -0
- package/test/test-v8-compressed.jdb +0 -0
- package/test/test-v8.jdb +0 -0
- package/test/test.mjs +0 -173
package/src/Database.mjs
DELETED
|
@@ -1,376 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from 'events'
|
|
2
|
-
import FileHandler from './FileHandler.mjs'
|
|
3
|
-
import IndexManager from './IndexManager.mjs'
|
|
4
|
-
import Serializer from './Serializer.mjs'
|
|
5
|
-
import { Mutex } from 'async-mutex'
|
|
6
|
-
import fs from 'fs'
|
|
7
|
-
|
|
8
|
-
export class Database extends EventEmitter {
|
|
9
|
-
constructor(file, opts = {}) {
|
|
10
|
-
super()
|
|
11
|
-
this.opts = Object.assign({
|
|
12
|
-
v8: false,
|
|
13
|
-
create: true,
|
|
14
|
-
indexes: {},
|
|
15
|
-
index: { data: {} },
|
|
16
|
-
includeLinePosition: true,
|
|
17
|
-
compress: false,
|
|
18
|
-
compressIndex: false,
|
|
19
|
-
maxMemoryUsage: 64 * 1024 // 64KB
|
|
20
|
-
}, opts)
|
|
21
|
-
this.offsets = []
|
|
22
|
-
this.shouldSave = false
|
|
23
|
-
this.serializer = new Serializer(this.opts)
|
|
24
|
-
this.fileHandler = new FileHandler(file)
|
|
25
|
-
this.indexManager = new IndexManager(this.opts)
|
|
26
|
-
this.indexOffset = 0
|
|
27
|
-
this.writeBuffer = []
|
|
28
|
-
this.mutex = new Mutex()
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
use(plugin) {
|
|
32
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
33
|
-
plugin(this)
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
async init() {
|
|
37
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
38
|
-
if (this.initialized) return
|
|
39
|
-
if (this.initlializing) return await new Promise(resolve => this.once('init', resolve))
|
|
40
|
-
this.initializing = true
|
|
41
|
-
try {
|
|
42
|
-
if (this.opts.clear) {
|
|
43
|
-
await this.fileHandler.truncate(0).catch(console.error)
|
|
44
|
-
throw new Error('Cleared, empty file')
|
|
45
|
-
}
|
|
46
|
-
const lastLine = await this.fileHandler.readLastLine().catch(() => 0)
|
|
47
|
-
if (!lastLine || !lastLine.length) {
|
|
48
|
-
if (this.opts.create) {
|
|
49
|
-
throw new Error('File does not exists or is a empty file')
|
|
50
|
-
} else {
|
|
51
|
-
throw new Error('File is not a valid database file, expected offsets at the end of the file')
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
const offsets = await this.serializer.deserialize(lastLine, { compress: this.opts.compressIndex })
|
|
55
|
-
if (!Array.isArray(offsets)) {
|
|
56
|
-
if(this.opts.create) {
|
|
57
|
-
throw new Error('File does not exists or is a empty file')
|
|
58
|
-
}
|
|
59
|
-
throw new Error('File is not a valid database file, expected offsets at the end of the file to be an array')
|
|
60
|
-
}
|
|
61
|
-
this.indexOffset = offsets[offsets.length - 2]
|
|
62
|
-
this.offsets = offsets
|
|
63
|
-
const ptr = this.locate(offsets.length - 2)
|
|
64
|
-
this.offsets = this.offsets.slice(0, -2)
|
|
65
|
-
this.shouldTruncate = true
|
|
66
|
-
let indexLine = await this.fileHandler.readRange(...ptr)
|
|
67
|
-
const index = await this.serializer.deserialize(indexLine, { compress: this.opts.compressIndex })
|
|
68
|
-
index && this.indexManager.load(index)
|
|
69
|
-
} catch (e) {
|
|
70
|
-
if (Array.isArray(this.offsets)) {
|
|
71
|
-
this.offsets = []
|
|
72
|
-
}
|
|
73
|
-
this.indexOffset = 0
|
|
74
|
-
if(!this.opts.create && !this.opts.clear) {
|
|
75
|
-
throw e
|
|
76
|
-
} else if (!String(e).includes('empty file')) {
|
|
77
|
-
console.error('Error loading database:', e)
|
|
78
|
-
}
|
|
79
|
-
} finally {
|
|
80
|
-
this.initializing = false
|
|
81
|
-
this.initialized = true
|
|
82
|
-
this.emit('init')
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
async save() {
|
|
87
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
88
|
-
if (!this.initialized) throw new Error('Database not initialized')
|
|
89
|
-
if (this.saving) return new Promise(resolve => this.once('save', resolve))
|
|
90
|
-
this.saving = true
|
|
91
|
-
await this.flush()
|
|
92
|
-
if (!this.shouldSave) return
|
|
93
|
-
this.emit('before-save')
|
|
94
|
-
const index = Object.assign({ data: {} }, this.indexManager.index)
|
|
95
|
-
for (const field in this.indexManager.index.data) {
|
|
96
|
-
for (const term in this.indexManager.index.data[field]) {
|
|
97
|
-
index.data[field][term] = [...this.indexManager.index.data[field][term]] // set to array
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
const offsets = this.offsets.slice(0)
|
|
101
|
-
const indexString = await this.serializer.serialize(index, { compress: this.opts.compressIndex, linebreak: true }) // force linebreak here to allow 'init' to read last line as offsets correctly
|
|
102
|
-
for (const field in this.indexManager.index.data) {
|
|
103
|
-
for (const term in this.indexManager.index.data[field]) {
|
|
104
|
-
this.indexManager.index.data[field][term] = new Set(index.data[field][term]) // set back to set because of serialization
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
offsets.push(this.indexOffset)
|
|
108
|
-
offsets.push(this.indexOffset + indexString.length)
|
|
109
|
-
// save offsets as JSON always to prevent linebreaks on last line, which breaks 'init()'
|
|
110
|
-
const offsetsString = await this.serializer.serialize(offsets, { json: true, compress: false, linebreak: false })
|
|
111
|
-
this.writeBuffer.push(indexString)
|
|
112
|
-
this.writeBuffer.push(offsetsString)
|
|
113
|
-
await this.flush() // write the index and offsets
|
|
114
|
-
this.shouldTruncate = true
|
|
115
|
-
this.shouldSave = false
|
|
116
|
-
this.saving = false
|
|
117
|
-
this.emit('save')
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
async ready() {
|
|
121
|
-
if (!this.initialized) {
|
|
122
|
-
await new Promise(resolve => this.once('init', resolve))
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
locate(n) {
|
|
127
|
-
if (this.offsets[n] === undefined) {
|
|
128
|
-
if (this.offsets[n - 1]) {
|
|
129
|
-
return [this.indexOffset, Number.MAX_SAFE_INTEGER]
|
|
130
|
-
}
|
|
131
|
-
return
|
|
132
|
-
}
|
|
133
|
-
let end = (this.offsets[n + 1] || this.indexOffset || Number.MAX_SAFE_INTEGER)
|
|
134
|
-
return [this.offsets[n], end]
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
getRanges(map) {
|
|
138
|
-
return (map || Array.from(this.offsets.keys())).map(n => {
|
|
139
|
-
const ret = this.locate(n)
|
|
140
|
-
if (ret !== undefined) return { start: ret[0], end: ret[1], index: n }
|
|
141
|
-
}).filter(n => n !== undefined)
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
async insert(data) {
|
|
145
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
146
|
-
if (!this.initialized) await this.init()
|
|
147
|
-
if (this.shouldTruncate) {
|
|
148
|
-
this.writeBuffer.push(this.indexOffset)
|
|
149
|
-
this.shouldTruncate = false
|
|
150
|
-
}
|
|
151
|
-
const line = await this.serializer.serialize(data, { compress: this.opts.compress, v8: this.opts.v8 }) // using Buffer for offsets accuracy
|
|
152
|
-
const position = this.offsets.length
|
|
153
|
-
this.offsets.push(this.indexOffset)
|
|
154
|
-
this.indexOffset += line.length
|
|
155
|
-
this.emit('insert', data, position)
|
|
156
|
-
this.writeBuffer.push(line)
|
|
157
|
-
if (!this.flushing && this.currentWriteBufferSize() > this.opts.maxMemoryUsage) {
|
|
158
|
-
await this.flush()
|
|
159
|
-
}
|
|
160
|
-
this.indexManager.add(data, position)
|
|
161
|
-
this.shouldSave = true
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
currentWriteBufferSize() {
|
|
165
|
-
const lengths = this.writeBuffer.filter(b => Buffer.isBuffer(b)).map(b => b.length)
|
|
166
|
-
return lengths.reduce((a, b) => a + b, 0)
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
flush() {
|
|
170
|
-
if (this.flushing) {
|
|
171
|
-
return this.flushing
|
|
172
|
-
}
|
|
173
|
-
return this.flushing = new Promise((resolve, reject) => {
|
|
174
|
-
if (this.destroyed) return reject(new Error('Database is destroyed'))
|
|
175
|
-
if (!this.writeBuffer.length) return resolve()
|
|
176
|
-
let err
|
|
177
|
-
this._flush().catch(e => err = e).finally(() => {
|
|
178
|
-
err ? reject(err) : resolve()
|
|
179
|
-
this.flushing = false
|
|
180
|
-
})
|
|
181
|
-
})
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
async _flush() {
|
|
185
|
-
const release = await this.mutex.acquire()
|
|
186
|
-
let fd = await fs.promises.open(this.fileHandler.file, 'a')
|
|
187
|
-
try {
|
|
188
|
-
while (this.writeBuffer.length) {
|
|
189
|
-
let data
|
|
190
|
-
const pos = this.writeBuffer.findIndex(b => typeof b === 'number')
|
|
191
|
-
if (pos === 0) {
|
|
192
|
-
await fd.close()
|
|
193
|
-
await this.fileHandler.truncate(this.writeBuffer.shift())
|
|
194
|
-
fd = await fs.promises.open(this.fileHandler.file, 'a')
|
|
195
|
-
continue
|
|
196
|
-
} else if (pos === -1) {
|
|
197
|
-
data = Buffer.concat(this.writeBuffer)
|
|
198
|
-
this.writeBuffer.length = 0
|
|
199
|
-
} else {
|
|
200
|
-
data = Buffer.concat(this.writeBuffer.slice(0, pos))
|
|
201
|
-
this.writeBuffer.splice(0, pos)
|
|
202
|
-
}
|
|
203
|
-
await fd.write(data)
|
|
204
|
-
}
|
|
205
|
-
this.shouldSave = true
|
|
206
|
-
} catch (err) {
|
|
207
|
-
console.error('Error flushing:', err)
|
|
208
|
-
} finally {
|
|
209
|
-
let err
|
|
210
|
-
await fd.close().catch(e => err = e)
|
|
211
|
-
release()
|
|
212
|
-
err && console.error('Error closing file:', err)
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
async *walk(map, options = {}) {
|
|
217
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
218
|
-
if (!this.initialized) await this.init()
|
|
219
|
-
this.shouldSave && await this.save().catch(console.error)
|
|
220
|
-
if (this.indexOffset === 0) return
|
|
221
|
-
if (!Array.isArray(map)) {
|
|
222
|
-
if (map instanceof Set) {
|
|
223
|
-
map = [...map]
|
|
224
|
-
} else if (map && typeof map === 'object') {
|
|
225
|
-
map = [...this.indexManager.query(map, options)]
|
|
226
|
-
} else {
|
|
227
|
-
map = [...Array(this.offsets.length).keys()]
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
const ranges = this.getRanges(map)
|
|
231
|
-
const groupedRanges = await this.fileHandler.groupedRanges(ranges)
|
|
232
|
-
const fd = await fs.promises.open(this.fileHandler.file, 'r')
|
|
233
|
-
try {
|
|
234
|
-
let count = 0
|
|
235
|
-
for (const groupedRange of groupedRanges) {
|
|
236
|
-
if (options.limit && count >= options.limit) {
|
|
237
|
-
break
|
|
238
|
-
}
|
|
239
|
-
for await (const row of this.fileHandler.readGroupedRange(groupedRange, fd)) {
|
|
240
|
-
if (options.limit && count >= options.limit) {
|
|
241
|
-
break
|
|
242
|
-
}
|
|
243
|
-
const entry = await this.serializer.deserialize(row.line, { compress: this.opts.compress, v8: this.opts.v8 })
|
|
244
|
-
if (entry === null) continue
|
|
245
|
-
count++
|
|
246
|
-
if (options.includeOffsets) {
|
|
247
|
-
yield { entry, start: row.start, _: row._ || this.offsets.findIndex(n => n === row.start) }
|
|
248
|
-
} else {
|
|
249
|
-
if (this.opts.includeLinePosition) {
|
|
250
|
-
entry._ = row._ || this.offsets.findIndex(n => n === row.start)
|
|
251
|
-
}
|
|
252
|
-
yield entry
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
}
|
|
256
|
-
} finally {
|
|
257
|
-
await fd.close()
|
|
258
|
-
}
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
async query(criteria, options = {}) {
|
|
262
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
263
|
-
if (!this.initialized) await this.init()
|
|
264
|
-
this.shouldSave && await this.save().catch(console.error)
|
|
265
|
-
let results = []
|
|
266
|
-
for await (const entry of this.walk(criteria, options)) results.push(entry)
|
|
267
|
-
if (options.orderBy) {
|
|
268
|
-
const [field, direction = 'asc'] = options.orderBy.split(' ')
|
|
269
|
-
results.sort((a, b) => {
|
|
270
|
-
if (a[field] > b[field]) return direction === 'asc' ? 1 : -1
|
|
271
|
-
if (a[field] < b[field]) return direction === 'asc' ? -1 : 1
|
|
272
|
-
return 0;
|
|
273
|
-
})
|
|
274
|
-
}
|
|
275
|
-
return results
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
async update(criteria, data, options={}) {
|
|
279
|
-
if (this.shouldTruncate) {
|
|
280
|
-
this.writeBuffer.push(this.indexOffset)
|
|
281
|
-
this.shouldTruncate = false
|
|
282
|
-
}
|
|
283
|
-
if(this.destroyed) throw new Error('Database is destroyed')
|
|
284
|
-
if(!this.initialized) await this.init()
|
|
285
|
-
this.shouldSave && await this.save().catch(console.error)
|
|
286
|
-
const matchingLines = await this.indexManager.query(criteria, options)
|
|
287
|
-
if (!matchingLines || !matchingLines.size) {
|
|
288
|
-
return []
|
|
289
|
-
}
|
|
290
|
-
const ranges = this.getRanges([...matchingLines])
|
|
291
|
-
const validMatchingLines = new Set(ranges.map(r => r.index))
|
|
292
|
-
if (!validMatchingLines.size) {
|
|
293
|
-
return []
|
|
294
|
-
}
|
|
295
|
-
let entries = []
|
|
296
|
-
for await (const entry of this.walk(criteria, options)) entries.push(entry)
|
|
297
|
-
const lines = []
|
|
298
|
-
for(const entry of entries) {
|
|
299
|
-
const updated = Object.assign(entry, data)
|
|
300
|
-
const ret = await this.serializer.serialize(updated)
|
|
301
|
-
lines.push(ret)
|
|
302
|
-
}
|
|
303
|
-
const offsets = []
|
|
304
|
-
let byteOffset = 0, k = 0
|
|
305
|
-
this.offsets.forEach((n, i) => {
|
|
306
|
-
const prevByteOffset = byteOffset
|
|
307
|
-
if (validMatchingLines.has(i) && ranges[k]) {
|
|
308
|
-
const r = ranges[k]
|
|
309
|
-
byteOffset += lines[k].length - (r.end - r.start)
|
|
310
|
-
k++
|
|
311
|
-
}
|
|
312
|
-
offsets.push(n + prevByteOffset)
|
|
313
|
-
})
|
|
314
|
-
this.offsets = offsets
|
|
315
|
-
this.indexOffset += byteOffset
|
|
316
|
-
await this.fileHandler.replaceLines(ranges, lines);
|
|
317
|
-
[...validMatchingLines].forEach((lineNumber, i) => {
|
|
318
|
-
this.indexManager.dryRemove(lineNumber)
|
|
319
|
-
this.indexManager.add(entries[i], lineNumber)
|
|
320
|
-
})
|
|
321
|
-
this.shouldSave = true
|
|
322
|
-
return entries
|
|
323
|
-
}
|
|
324
|
-
|
|
325
|
-
async delete(criteria, options = {}) {
|
|
326
|
-
if (this.shouldTruncate) {
|
|
327
|
-
this.writeBuffer.push(this.indexOffset)
|
|
328
|
-
this.shouldTruncate = false
|
|
329
|
-
}
|
|
330
|
-
if (this.destroyed) throw new Error('Database is destroyed')
|
|
331
|
-
if (!this.initialized) await this.init()
|
|
332
|
-
this.shouldSave && await this.save().catch(console.error)
|
|
333
|
-
const matchingLines = await this.indexManager.query(criteria, options)
|
|
334
|
-
if (!matchingLines || !matchingLines.size) {
|
|
335
|
-
return 0
|
|
336
|
-
}
|
|
337
|
-
const ranges = this.getRanges([...matchingLines])
|
|
338
|
-
const validMatchingLines = new Set(ranges.map(r => r.index))
|
|
339
|
-
await this.fileHandler.replaceLines(ranges, [])
|
|
340
|
-
const offsets = []
|
|
341
|
-
let byteOffset = 0, k = 0
|
|
342
|
-
this.offsets.forEach((n, i) => {
|
|
343
|
-
if (validMatchingLines.has(i)) {
|
|
344
|
-
const r = ranges[k]
|
|
345
|
-
byteOffset -= (r.end - r.start)
|
|
346
|
-
k++
|
|
347
|
-
} else {
|
|
348
|
-
offsets.push(n + byteOffset)
|
|
349
|
-
}
|
|
350
|
-
})
|
|
351
|
-
this.offsets = offsets
|
|
352
|
-
this.indexOffset += byteOffset
|
|
353
|
-
this.indexManager.remove([...validMatchingLines])
|
|
354
|
-
this.shouldSave = true
|
|
355
|
-
return ranges.length
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
async destroy() {
|
|
359
|
-
this.shouldSave && await this.save().catch(console.error)
|
|
360
|
-
this.destroyed = true
|
|
361
|
-
this.indexOffset = 0
|
|
362
|
-
this.indexManager.index = {}
|
|
363
|
-
this.writeBuffer.length = 0
|
|
364
|
-
this.initialized = false
|
|
365
|
-
this.fileHandler.destroy()
|
|
366
|
-
}
|
|
367
|
-
|
|
368
|
-
get length() {
|
|
369
|
-
return this?.offsets?.length || 0
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
get index() {
|
|
373
|
-
return this.indexManager.index
|
|
374
|
-
}
|
|
375
|
-
|
|
376
|
-
}
|
package/src/FileHandler.mjs
DELETED
|
@@ -1,202 +0,0 @@
|
|
|
1
|
-
import fs from 'fs'
|
|
2
|
-
import pLimit from 'p-limit'
|
|
3
|
-
|
|
4
|
-
export default class FileHandler {
|
|
5
|
-
constructor(file) {
|
|
6
|
-
this.file = file
|
|
7
|
-
}
|
|
8
|
-
|
|
9
|
-
async truncate(offset) {
|
|
10
|
-
try {
|
|
11
|
-
await fs.promises.access(this.file, fs.constants.F_OK)
|
|
12
|
-
await fs.promises.truncate(this.file, offset)
|
|
13
|
-
} catch (err) {
|
|
14
|
-
await fs.promises.writeFile(this.file, '')
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
async readRange(start, end) {
|
|
19
|
-
let fd = await fs.promises.open(this.file, 'r')
|
|
20
|
-
const length = end - start
|
|
21
|
-
let buffer = Buffer.alloc(length)
|
|
22
|
-
const { bytesRead } = await fd.read(buffer, 0, length, start).catch(console.error)
|
|
23
|
-
await fd.close()
|
|
24
|
-
if(buffer.length > bytesRead) return buffer.subarray(0, bytesRead)
|
|
25
|
-
return buffer
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
async readRanges(ranges, mapper) {
|
|
29
|
-
const lines = {}, limit = pLimit(4)
|
|
30
|
-
const fd = await fs.promises.open(this.file, 'r')
|
|
31
|
-
const groupedRanges = await this.groupedRanges(ranges)
|
|
32
|
-
try {
|
|
33
|
-
await Promise.allSettled(groupedRanges.map(async (groupedRange) => {
|
|
34
|
-
await limit(async () => {
|
|
35
|
-
for await (const row of this.readGroupedRange(groupedRange, fd)) {
|
|
36
|
-
lines[row.start] = mapper ? (await mapper(row.line, groupedRange)) : row.line
|
|
37
|
-
}
|
|
38
|
-
})
|
|
39
|
-
}))
|
|
40
|
-
} catch (e) {
|
|
41
|
-
console.error('Error reading ranges:', e)
|
|
42
|
-
} finally {
|
|
43
|
-
await fd.close()
|
|
44
|
-
}
|
|
45
|
-
return lines
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
async groupedRanges(ranges) { // expects ordered ranges from Database.getRanges()
|
|
49
|
-
const readSize = 512 * 1024 // 512KB
|
|
50
|
-
const groupedRanges = []
|
|
51
|
-
let currentGroup = []
|
|
52
|
-
let currentSize = 0
|
|
53
|
-
|
|
54
|
-
// each range is a {start: number, end: number} object
|
|
55
|
-
for(const range of ranges) {
|
|
56
|
-
const rangeSize = range.end - range.start
|
|
57
|
-
|
|
58
|
-
if(currentGroup.length > 0) {
|
|
59
|
-
const lastRange = currentGroup[currentGroup.length - 1]
|
|
60
|
-
if(lastRange.end !== range.start || currentSize + rangeSize > readSize) {
|
|
61
|
-
groupedRanges.push(currentGroup)
|
|
62
|
-
currentGroup = []
|
|
63
|
-
currentSize = 0
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
currentGroup.push(range)
|
|
68
|
-
currentSize += rangeSize
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
if(currentGroup.length > 0) {
|
|
72
|
-
groupedRanges.push(currentGroup)
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
return groupedRanges
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
async *readGroupedRange(groupedRange, fd) {
|
|
79
|
-
const options = {start: groupedRange[0].start, end: groupedRange[groupedRange.length - 1].end}
|
|
80
|
-
|
|
81
|
-
let i = 0, buffer = Buffer.alloc(options.end - options.start)
|
|
82
|
-
const results = {}, { bytesRead } = await fd.read(buffer, 0, options.end - options.start, options.start)
|
|
83
|
-
if(buffer.length > bytesRead) buffer = buffer.subarray(0, bytesRead)
|
|
84
|
-
|
|
85
|
-
for (const range of groupedRange) {
|
|
86
|
-
const startOffset = range.start - options.start;
|
|
87
|
-
let endOffset = range.end - options.start;
|
|
88
|
-
if (endOffset > buffer.length) {
|
|
89
|
-
endOffset = buffer.length;
|
|
90
|
-
}
|
|
91
|
-
if (startOffset >= buffer.length) {
|
|
92
|
-
continue;
|
|
93
|
-
}
|
|
94
|
-
const line = buffer.subarray(startOffset, endOffset);
|
|
95
|
-
if (line.length === 0) continue;
|
|
96
|
-
yield { line, start: range.start };
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
return results
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
async *walk(ranges) {
|
|
104
|
-
const fd = await fs.promises.open(this.file, 'r')
|
|
105
|
-
try {
|
|
106
|
-
const groupedRanges = await this.groupedRanges(ranges)
|
|
107
|
-
for(const groupedRange of groupedRanges) {
|
|
108
|
-
for await (const row of this.readGroupedRange(groupedRange, fd)) {
|
|
109
|
-
yield row
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
} finally {
|
|
113
|
-
await fd.close()
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
async replaceLines(ranges, lines) {
|
|
118
|
-
const tmpFile = this.file + '.tmp';
|
|
119
|
-
const writer = await fs.promises.open(tmpFile, 'w+');
|
|
120
|
-
const reader = await fs.promises.open(this.file, 'r');
|
|
121
|
-
try {
|
|
122
|
-
let position = 0;
|
|
123
|
-
let lineIndex = 0;
|
|
124
|
-
|
|
125
|
-
for (const range of ranges) {
|
|
126
|
-
if (position < range.start) {
|
|
127
|
-
const buffer = await this.readRange(position, range.start);
|
|
128
|
-
await writer.write(buffer);
|
|
129
|
-
}
|
|
130
|
-
if (lineIndex < lines.length && lines[lineIndex]) {
|
|
131
|
-
await writer.write(lines[lineIndex]);
|
|
132
|
-
}
|
|
133
|
-
position = range.end;
|
|
134
|
-
lineIndex++;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
const { size } = await reader.stat();
|
|
138
|
-
if (position < size) {
|
|
139
|
-
const buffer = await this.readRange(position, size);
|
|
140
|
-
await writer.write(buffer);
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
await reader.close();
|
|
144
|
-
await writer.close();
|
|
145
|
-
await fs.promises.rename(tmpFile, this.file);
|
|
146
|
-
} catch (e) {
|
|
147
|
-
console.error('Erro ao substituir linhas:', e);
|
|
148
|
-
throw e;
|
|
149
|
-
} finally {
|
|
150
|
-
await reader.close().catch(() => { });
|
|
151
|
-
await writer.close().catch(() => { });
|
|
152
|
-
await fs.promises.unlink(tmpFile).catch(() => { });
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
async writeData(data, immediate, fd) {
|
|
157
|
-
await fd.write(data)
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
writeDataSync(data) {
|
|
161
|
-
fs.writeFileSync(this.file, data, { flag: 'a' })
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
async readLastLine() {
|
|
165
|
-
const reader = await fs.promises.open(this.file, 'r')
|
|
166
|
-
try {
|
|
167
|
-
const { size } = await reader.stat()
|
|
168
|
-
if (size < 1) throw 'empty file'
|
|
169
|
-
this.size = size
|
|
170
|
-
const bufferSize = 16384
|
|
171
|
-
let buffer, isFirstRead = true, lastReadSize, readPosition = Math.max(size - bufferSize, 0)
|
|
172
|
-
while (readPosition >= 0) {
|
|
173
|
-
const readSize = Math.min(bufferSize, size - readPosition)
|
|
174
|
-
if (readSize !== lastReadSize) {
|
|
175
|
-
lastReadSize = readSize
|
|
176
|
-
buffer = Buffer.alloc(readSize)
|
|
177
|
-
}
|
|
178
|
-
const { bytesRead } = await reader.read(buffer, 0, isFirstRead ? (readSize - 1) : readSize, readPosition)
|
|
179
|
-
if (isFirstRead) isFirstRead = false
|
|
180
|
-
if (bytesRead === 0) break
|
|
181
|
-
const newlineIndex = buffer.lastIndexOf(10)
|
|
182
|
-
const start = readPosition + newlineIndex + 1
|
|
183
|
-
if (newlineIndex !== -1) {
|
|
184
|
-
const lastLine = Buffer.alloc(size - start)
|
|
185
|
-
await reader.read(lastLine, 0, size - start, start)
|
|
186
|
-
if (!lastLine || !lastLine.length) {
|
|
187
|
-
throw 'no metadata or empty file'
|
|
188
|
-
}
|
|
189
|
-
return lastLine
|
|
190
|
-
} else {
|
|
191
|
-
readPosition -= bufferSize
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
} catch (e) {
|
|
195
|
-
String(e).includes('empty file') || console.error('Error reading last line:', e)
|
|
196
|
-
} finally {
|
|
197
|
-
reader.close()
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
async destroy() {}
|
|
202
|
-
}
|