jexidb 1.0.5 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jexidb",
3
- "version": "1.0.5",
3
+ "version": "1.0.8",
4
4
  "description": "JexiDB is a pure JS NPM library for managing data on disk using JSONL efficiently, without the need for a server.",
5
5
  "main": "./dist/Database.cjs",
6
6
  "module": "./src/Database.mjs",
@@ -23,6 +23,7 @@
23
23
  "@babel/preset-env": "^7.25.4"
24
24
  },
25
25
  "dependencies": {
26
+ "async-mutex": "^0.5.0",
26
27
  "p-limit": "^6.1.0"
27
28
  },
28
29
  "directories": {
package/src/Database.mjs CHANGED
@@ -2,14 +2,15 @@ import { EventEmitter } from 'events'
2
2
  import FileHandler from './FileHandler.mjs'
3
3
  import IndexManager from './IndexManager.mjs'
4
4
  import Serializer from './Serializer.mjs'
5
+ import { Mutex } from 'async-mutex'
5
6
  import fs from 'fs'
6
7
 
7
8
  export class Database extends EventEmitter {
8
- constructor(file, opts={}) {
9
+ constructor(file, opts = {}) {
9
10
  super()
10
11
  this.opts = Object.assign({
11
12
  v8: false,
12
- index: {data: {}},
13
+ index: { data: {} },
13
14
  indexes: {},
14
15
  compress: false,
15
16
  compressIndex: false,
@@ -22,29 +23,30 @@ export class Database extends EventEmitter {
22
23
  this.indexManager = new IndexManager(this.opts)
23
24
  this.indexOffset = 0
24
25
  this.writeBuffer = []
26
+ this.mutex = new Mutex()
25
27
  }
26
28
 
27
29
  use(plugin) {
28
- if(this.destroyed) throw new Error('Database is destroyed')
30
+ if (this.destroyed) throw new Error('Database is destroyed')
29
31
  plugin(this)
30
32
  }
31
33
 
32
34
  async init() {
33
- if(this.destroyed) throw new Error('Database is destroyed')
34
- if(this.initialized) return
35
- if(this.initlializing) return await new Promise(resolve => this.once('init', resolve))
35
+ if (this.destroyed) throw new Error('Database is destroyed')
36
+ if (this.initialized) return
37
+ if (this.initlializing) return await new Promise(resolve => this.once('init', resolve))
36
38
  this.initializing = true
37
39
  try {
38
- if(this.opts.clear) {
40
+ if (this.opts.clear) {
39
41
  await this.fileHandler.truncate(0).catch(console.error)
40
42
  throw new Error('Cleared, empty file')
41
43
  }
42
44
  const lastLine = await this.fileHandler.readLastLine()
43
- if(!lastLine || !lastLine.length) {
45
+ if (!lastLine || !lastLine.length) {
44
46
  throw new Error('File does not exists or is a empty file')
45
47
  }
46
- const offsets = await this.serializer.deserialize(lastLine, {compress: this.opts.compressIndex})
47
- if(!Array.isArray(offsets)) {
48
+ const offsets = await this.serializer.deserialize(lastLine, { compress: this.opts.compressIndex })
49
+ if (!Array.isArray(offsets)) {
48
50
  throw new Error('File to parse offsets, expected an array')
49
51
  }
50
52
  this.indexOffset = offsets[offsets.length - 2]
@@ -53,14 +55,14 @@ export class Database extends EventEmitter {
53
55
  this.offsets = this.offsets.slice(0, -2)
54
56
  this.shouldTruncate = true
55
57
  let indexLine = await this.fileHandler.readRange(...ptr)
56
- const index = await this.serializer.deserialize(indexLine, {compress: this.opts.compressIndex})
58
+ const index = await this.serializer.deserialize(indexLine, { compress: this.opts.compressIndex })
57
59
  index && this.indexManager.load(index)
58
60
  } catch (e) {
59
- if(Array.isArray(this.offsets)) {
61
+ if (Array.isArray(this.offsets)) {
60
62
  this.offsets = []
61
63
  }
62
64
  this.indexOffset = 0
63
- if(!String(e).includes('empty file')) {
65
+ if (!String(e).includes('empty file')) {
64
66
  console.error('Error loading database:', e)
65
67
  }
66
68
  } finally {
@@ -71,30 +73,30 @@ export class Database extends EventEmitter {
71
73
  }
72
74
 
73
75
  async save() {
74
- if(this.destroyed) throw new Error('Database is destroyed')
75
- if(!this.initialized) throw new Error('Database not initialized')
76
- if(this.saving) return new Promise(resolve => this.once('save', resolve))
76
+ if (this.destroyed) throw new Error('Database is destroyed')
77
+ if (!this.initialized) throw new Error('Database not initialized')
78
+ if (this.saving) return new Promise(resolve => this.once('save', resolve))
77
79
  this.saving = true
78
80
  await this.flush()
79
81
  if (!this.shouldSave) return
80
82
  this.emit('before-save')
81
- const index = Object.assign({data: {}}, this.indexManager.index)
82
- for(const field in this.indexManager.index.data) {
83
- for(const term in this.indexManager.index.data[field]) {
83
+ const index = Object.assign({ data: {} }, this.indexManager.index)
84
+ for (const field in this.indexManager.index.data) {
85
+ for (const term in this.indexManager.index.data[field]) {
84
86
  index.data[field][term] = [...this.indexManager.index.data[field][term]] // set to array
85
87
  }
86
88
  }
87
89
  const offsets = this.offsets.slice(0)
88
- const indexString = await this.serializer.serialize(index, {compress: this.opts.compressIndex, linebreak: true}) // force linebreak here to allow 'init' to read last line as offsets correctly
89
- for(const field in this.indexManager.index.data) {
90
- for(const term in this.indexManager.index.data[field]) {
90
+ const indexString = await this.serializer.serialize(index, { compress: this.opts.compressIndex, linebreak: true }) // force linebreak here to allow 'init' to read last line as offsets correctly
91
+ for (const field in this.indexManager.index.data) {
92
+ for (const term in this.indexManager.index.data[field]) {
91
93
  this.indexManager.index.data[field][term] = new Set(index.data[field][term]) // set back to set because of serialization
92
94
  }
93
95
  }
94
96
  offsets.push(this.indexOffset)
95
97
  offsets.push(this.indexOffset + indexString.length)
96
98
  // save offsets as JSON always to prevent linebreaks on last line, which breaks 'init()'
97
- const offsetsString = await this.serializer.serialize(offsets, {json: true, compress: false, linebreak: false})
99
+ const offsetsString = await this.serializer.serialize(offsets, { json: true, compress: false, linebreak: false })
98
100
  this.writeBuffer.push(indexString)
99
101
  this.writeBuffer.push(offsetsString)
100
102
  await this.flush() // write the index and offsets
@@ -112,7 +114,7 @@ export class Database extends EventEmitter {
112
114
 
113
115
  locate(n) {
114
116
  if (this.offsets[n] === undefined) {
115
- if(this.offsets[n - 1]) {
117
+ if (this.offsets[n - 1]) {
116
118
  return [this.indexOffset, Number.MAX_SAFE_INTEGER]
117
119
  }
118
120
  return
@@ -120,58 +122,58 @@ export class Database extends EventEmitter {
120
122
  let end = (this.offsets[n + 1] || this.indexOffset || Number.MAX_SAFE_INTEGER)
121
123
  return [this.offsets[n], end]
122
124
  }
123
-
125
+
124
126
  getRanges(map) {
125
127
  return (map || Array.from(this.offsets.keys())).map(n => {
126
- const ret = this.locate(n)
127
- if(ret !== undefined) return {start: ret[0], end: ret[1], index: n}
128
+ const ret = this.locate(n)
129
+ if (ret !== undefined) return { start: ret[0], end: ret[1], index: n }
128
130
  }).filter(n => n !== undefined)
129
131
  }
130
132
 
131
133
  async readLines(map, ranges) {
132
- if(!ranges) ranges = this.getRanges(map)
134
+ if (!ranges) ranges = this.getRanges(map)
133
135
  const results = await this.fileHandler.readRanges(ranges, this.serializer.deserialize.bind(this.serializer))
134
136
  let i = 0
135
- for(const start in results) {
136
- if(!results[start] || results[start]._ !== undefined) continue
137
- while(this.offsets[i] != start && i < map.length) i++ // weak comparison as 'start' is a string
137
+ for (const start in results) {
138
+ if (!results[start] || results[start]._ !== undefined) continue
139
+ while (this.offsets[i] != start && i < map.length) i++ // weak comparison as 'start' is a string
138
140
  results[start]._ = map[i++]
139
141
  }
140
142
  return Object.values(results).filter(r => r !== undefined)
141
143
  }
142
144
 
143
145
  async insert(data) {
144
- if(this.destroyed) throw new Error('Database is destroyed')
145
- if(!this.initialized) await this.init()
146
+ if (this.destroyed) throw new Error('Database is destroyed')
147
+ if (!this.initialized) await this.init()
146
148
  if (this.shouldTruncate) {
147
- this.writeBuffer.push(this.indexOffset)
148
- this.shouldTruncate = false
149
+ this.writeBuffer.push(this.indexOffset)
150
+ this.shouldTruncate = false
149
151
  }
150
- const line = await this.serializer.serialize(data, {compress: this.opts.compress}) // using Buffer for offsets accuracy
152
+ const line = await this.serializer.serialize(data, { compress: this.opts.compress, v8: this.opts.v8 }) // using Buffer for offsets accuracy
151
153
  const position = this.offsets.length
152
154
  this.offsets.push(this.indexOffset)
153
155
  this.indexOffset += line.length
154
- this.indexManager.add(data, position)
155
156
  this.emit('insert', data, position)
156
157
  this.writeBuffer.push(line)
157
- if(!this.flushing && this.currentWriteBufferSize() > this.opts.maxMemoryUsage) {
158
+ if (!this.flushing && this.currentWriteBufferSize() > this.opts.maxMemoryUsage) {
158
159
  await this.flush()
159
160
  }
161
+ this.indexManager.add(data, position)
160
162
  this.shouldSave = true
161
163
  }
162
164
 
163
- currentWriteBufferSize(){
165
+ currentWriteBufferSize() {
164
166
  const lengths = this.writeBuffer.filter(b => Buffer.isBuffer(b)).map(b => b.length)
165
167
  return lengths.reduce((a, b) => a + b, 0)
166
168
  }
167
169
 
168
170
  flush() {
169
- if(this.flushing) {
171
+ if (this.flushing) {
170
172
  return this.flushing
171
173
  }
172
174
  return this.flushing = new Promise((resolve, reject) => {
173
- if(this.destroyed) return reject(new Error('Database is destroyed'))
174
- if(!this.writeBuffer.length) return resolve()
175
+ if (this.destroyed) return reject(new Error('Database is destroyed'))
176
+ if (!this.writeBuffer.length) return resolve()
175
177
  let err
176
178
  this._flush().catch(e => err = e).finally(() => {
177
179
  err ? reject(err) : resolve()
@@ -181,17 +183,18 @@ export class Database extends EventEmitter {
181
183
  }
182
184
 
183
185
  async _flush() {
186
+ const release = await this.mutex.acquire()
184
187
  let fd = await fs.promises.open(this.fileHandler.file, 'a')
185
188
  try {
186
- while(this.writeBuffer.length) {
189
+ while (this.writeBuffer.length) {
187
190
  let data
188
191
  const pos = this.writeBuffer.findIndex(b => typeof b === 'number')
189
- if(pos === 0) {
192
+ if (pos === 0) {
190
193
  await fd.close()
191
194
  await this.fileHandler.truncate(this.writeBuffer.shift())
192
195
  fd = await fs.promises.open(this.fileHandler.file, 'a')
193
196
  continue
194
- } else if(pos === -1) {
197
+ } else if (pos === -1) {
195
198
  data = Buffer.concat(this.writeBuffer)
196
199
  this.writeBuffer.length = 0
197
200
  } else {
@@ -201,76 +204,72 @@ export class Database extends EventEmitter {
201
204
  await fd.write(data)
202
205
  }
203
206
  this.shouldSave = true
204
- } catch(err) {
207
+ } catch (err) {
205
208
  console.error('Error flushing:', err)
206
209
  } finally {
207
- await fd.close()
210
+ let err
211
+ await fd.close().catch(e => err = e)
212
+ release()
213
+ err && console.error('Error closing file:', err)
208
214
  }
209
215
  }
210
216
 
211
- async *walk(map, options={}) {
212
- if(this.destroyed) throw new Error('Database is destroyed')
213
- if(!this.initialized) await this.init()
217
+ async *walk(map, options = {}) {
218
+ if (this.destroyed) throw new Error('Database is destroyed')
219
+ if (!this.initialized) await this.init()
214
220
  this.shouldSave && await this.save().catch(console.error)
215
- if(this.indexOffset === 0) return
216
- if(!Array.isArray(map)) {
221
+ if (this.indexOffset === 0) return
222
+ if (!Array.isArray(map)) {
217
223
  if (map instanceof Set) {
218
224
  map = [...map]
219
- } else if(map && typeof map === 'object') {
225
+ } else if (map && typeof map === 'object') {
220
226
  map = [...this.indexManager.query(map, options)]
221
227
  } else {
222
228
  map = [...Array(this.offsets.length).keys()]
223
229
  }
224
230
  }
225
231
  const ranges = this.getRanges(map)
226
- const partitionedRanges = [], currentPartition = 0
227
- for (const line in ranges) {
228
- if (partitionedRanges[currentPartition] === undefined) {
229
- partitionedRanges[currentPartition] = []
230
- }
231
- partitionedRanges[currentPartition].push(ranges[line])
232
- if (partitionedRanges[currentPartition].length >= this.opts.maxMemoryUsage) {
233
- currentPartition++
234
- }
235
- }
236
- let m = 0
237
- for (const ranges of partitionedRanges) {
238
- const lines = await this.fileHandler.readRanges(ranges)
239
- for (const line in lines) {
240
- let err
241
- const entry = await this.serializer.deserialize(lines[line]).catch(e => console.error(err = e))
242
- if (err) continue
243
- if (entry._ === undefined) {
244
- while(this.offsets[m] != line && m < map.length) m++ // weak comparison as 'start' is a string
245
- entry._ = m++
232
+ const groupedRanges = await this.fileHandler.groupedRanges(ranges)
233
+ const fd = await fs.promises.open(this.fileHandler.file, 'r')
234
+ try {
235
+ for (const groupedRange of groupedRanges) {
236
+ for await (const row of this.fileHandler.readGroupedRange(groupedRange, fd)) {
237
+ const entry = await this.serializer.deserialize(row.line, { compress: this.opts.compress, v8: this.opts.v8 })
238
+ if (entry === null) continue
239
+ if (options.includeOffsets) {
240
+ yield { entry, start: row.start }
241
+ } else {
242
+ yield entry
243
+ }
246
244
  }
247
- yield entry
248
245
  }
246
+ } finally {
247
+ await fd.close()
249
248
  }
250
249
  }
251
250
 
252
- async query(criteria, options={}) {
253
- if(this.destroyed) throw new Error('Database is destroyed')
254
- if(!this.initialized) await this.init()
251
+ async query(criteria, options = {}) {
252
+ if (this.destroyed) throw new Error('Database is destroyed')
253
+ if (!this.initialized) await this.init()
255
254
  this.shouldSave && await this.save().catch(console.error)
256
- if(Array.isArray(criteria)) {
255
+ if (Array.isArray(criteria)) {
257
256
  let results = await this.readLines(criteria)
258
257
  if (options.orderBy) {
259
- const [field, direction = 'asc'] = options.orderBy.split(' ')
260
- results.sort((a, b) => {
261
- if (a[field] > b[field]) return direction === 'asc' ? 1 : -1
262
- if (a[field] < b[field]) return direction === 'asc' ? -1 : 1
263
- return 0;
264
- })
258
+ const [field, direction = 'asc'] = options.orderBy.split(' ')
259
+ results.sort((a, b) => {
260
+ if (a[field] > b[field]) return direction === 'asc' ? 1 : -1
261
+ if (a[field] < b[field]) return direction === 'asc' ? -1 : 1
262
+ return 0;
263
+ })
265
264
  }
266
265
  if (options.limit) {
267
- results = results.slice(0, options.limit);
266
+ results = results.slice(0, options.limit);
268
267
  }
269
268
  return results
270
269
  } else {
271
270
  const matchingLines = await this.indexManager.query(criteria, options)
272
271
  if (!matchingLines || !matchingLines.size) {
273
- return []
272
+ return []
274
273
  }
275
274
  return await this.query([...matchingLines], options)
276
275
  }
@@ -323,17 +322,17 @@ export class Database extends EventEmitter {
323
322
  return entries
324
323
  }
325
324
 
326
- async delete(criteria, options={}) {
325
+ async delete(criteria, options = {}) {
327
326
  if (this.shouldTruncate) {
328
- this.writeBuffer.push(this.indexOffset)
329
- this.shouldTruncate = false
327
+ this.writeBuffer.push(this.indexOffset)
328
+ this.shouldTruncate = false
330
329
  }
331
- if(this.destroyed) throw new Error('Database is destroyed')
332
- if(!this.initialized) await this.init()
330
+ if (this.destroyed) throw new Error('Database is destroyed')
331
+ if (!this.initialized) await this.init()
333
332
  this.shouldSave && await this.save().catch(console.error)
334
333
  const matchingLines = await this.indexManager.query(criteria, options)
335
334
  if (!matchingLines || !matchingLines.size) {
336
- return 0
335
+ return 0
337
336
  }
338
337
  const ranges = this.getRanges([...matchingLines])
339
338
  const validMatchingLines = new Set(ranges.map(r => r.index))
@@ -24,22 +24,19 @@ export default class FileHandler {
24
24
  if(buffer.length > bytesRead) return buffer.subarray(0, bytesRead)
25
25
  return buffer
26
26
  }
27
-
27
+
28
28
  async readRanges(ranges, mapper) {
29
29
  const lines = {}, limit = pLimit(4)
30
30
  const fd = await fs.promises.open(this.file, 'r')
31
+ const groupedRanges = await this.groupedRanges(ranges)
31
32
  try {
32
- const tasks = ranges.map(r => {
33
- return async () => {
34
- let err
35
- const length = r.end - r.start
36
- let buffer = Buffer.alloc(length)
37
- const { bytesRead } = await fd.read(buffer, 0, length, r.start).catch(e => err = e)
38
- if (buffer.length > bytesRead) buffer = buffer.subarray(0, bytesRead)
39
- lines[r.start] = mapper ? (await mapper(buffer, r)) : buffer
40
- }
41
- })
42
- await Promise.allSettled(tasks.map(limit))
33
+ await Promise.allSettled(groupedRanges.map(async (groupedRange) => {
34
+ await limit(async () => {
35
+ for await (const row of this.readGroupedRange(groupedRange, fd)) {
36
+ lines[row.start] = mapper ? (await mapper(row.line, groupedRange)) : row.line
37
+ }
38
+ })
39
+ }))
43
40
  } catch (e) {
44
41
  console.error('Error reading ranges:', e)
45
42
  } finally {
@@ -48,43 +45,114 @@ export default class FileHandler {
48
45
  return lines
49
46
  }
50
47
 
48
+ async groupedRanges(ranges) { // expects ordered ranges from Database.getRanges()
49
+ const readSize = 512 * 1024 // 512KB
50
+ const groupedRanges = []
51
+ let currentGroup = []
52
+ let currentSize = 0
53
+
54
+ // each range is a {start: number, end: number} object
55
+ for(const range of ranges) {
56
+ const rangeSize = range.end - range.start
57
+
58
+ if(currentGroup.length > 0) {
59
+ const lastRange = currentGroup[currentGroup.length - 1]
60
+ if(lastRange.end !== range.start || currentSize + rangeSize > readSize) {
61
+ groupedRanges.push(currentGroup)
62
+ currentGroup = []
63
+ currentSize = 0
64
+ }
65
+ }
66
+
67
+ currentGroup.push(range)
68
+ currentSize += rangeSize
69
+ }
70
+
71
+ if(currentGroup.length > 0) {
72
+ groupedRanges.push(currentGroup)
73
+ }
74
+
75
+ return groupedRanges
76
+ }
77
+
78
+ async *readGroupedRange(groupedRange, fd) {
79
+ const options = {start: groupedRange[0].start, end: groupedRange[groupedRange.length - 1].end}
80
+
81
+ let i = 0, buffer = Buffer.alloc(options.end - options.start)
82
+ const results = {}, { bytesRead } = await fd.read(buffer, 0, options.end - options.start, options.start)
83
+ if(buffer.length > bytesRead) buffer = buffer.subarray(0, bytesRead)
84
+
85
+ for (const range of groupedRange) {
86
+ const startOffset = range.start - options.start;
87
+ let endOffset = range.end - options.start;
88
+ if (endOffset > buffer.length) {
89
+ endOffset = buffer.length;
90
+ }
91
+ if (startOffset >= buffer.length) {
92
+ continue;
93
+ }
94
+ const line = buffer.subarray(startOffset, endOffset);
95
+ if (line.length === 0) continue;
96
+ yield { line, start: range.start };
97
+ }
98
+
99
+
100
+ return results
101
+ }
102
+
103
+ async *walk(ranges) {
104
+ const fd = await fs.promises.open(this.file, 'r')
105
+ try {
106
+ const groupedRanges = await this.groupedRanges(ranges)
107
+ for(const groupedRange of groupedRanges) {
108
+ for await (const row of this.readGroupedRange(groupedRange, fd)) {
109
+ yield row
110
+ }
111
+ }
112
+ } finally {
113
+ await fd.close()
114
+ }
115
+ }
116
+
51
117
  async replaceLines(ranges, lines) {
52
- let closed
53
- const tmpFile = this.file + '.tmp'
54
- const writer = await fs.promises.open(tmpFile, 'w+')
55
- const reader = await fs.promises.open(this.file, 'r')
118
+ const tmpFile = this.file + '.tmp';
119
+ const writer = await fs.promises.open(tmpFile, 'w+');
120
+ const reader = await fs.promises.open(this.file, 'r');
56
121
  try {
57
- let i = 0, start = 0
58
- for (const r of ranges) {
59
- const length = r.start - start
60
- const buffer = Buffer.alloc(length)
61
- await reader.read(buffer, 0, length, start)
62
- start = r.end
63
- buffer.length && await writer.write(buffer)
64
- if (lines[i]) {
65
- await writer.write(lines[i])
122
+ let position = 0;
123
+ let lineIndex = 0;
124
+
125
+ for (const range of ranges) {
126
+ if (position < range.start) {
127
+ const buffer = await this.readRange(position, range.start);
128
+ await writer.write(buffer);
66
129
  }
67
- i++
130
+ if (lineIndex < lines.length && lines[lineIndex]) {
131
+ await writer.write(lines[lineIndex]);
132
+ }
133
+ position = range.end;
134
+ lineIndex++;
68
135
  }
69
- const size = (await reader.stat()).size
70
- const length = size - start
71
- const buffer = Buffer.alloc(length)
72
- await reader.read(buffer, 0, length, start)
73
- await writer.write(buffer)
74
- await reader.close()
75
- await writer.close()
76
- closed = true
77
- await fs.promises.copyFile(tmpFile, this.file)
136
+
137
+ const { size } = await reader.stat();
138
+ if (position < size) {
139
+ const buffer = await this.readRange(position, size);
140
+ await writer.write(buffer);
141
+ }
142
+
143
+ await reader.close();
144
+ await writer.close();
145
+ await fs.promises.rename(tmpFile, this.file);
78
146
  } catch (e) {
79
- console.error('Error replacing lines:', e)
147
+ console.error('Erro ao substituir linhas:', e);
148
+ throw e;
80
149
  } finally {
81
- if(!closed) {
82
- await reader.close()
83
- await writer.close()
84
- }
85
- await fs.promises.unlink(tmpFile).catch(() => {})
150
+ await reader.close().catch(() => { });
151
+ await writer.close().catch(() => { });
152
+ await fs.promises.unlink(tmpFile).catch(() => { });
86
153
  }
87
154
  }
155
+
88
156
  async writeData(data, immediate, fd) {
89
157
  await fd.write(data)
90
158
  }
@@ -100,18 +168,19 @@ export default class FileHandler {
100
168
  if (size < 1) throw 'empty file'
101
169
  this.size = size
102
170
  const bufferSize = 16384
103
- let buffer, lastReadSize, readPosition = Math.max(size - bufferSize, 0)
171
+ let buffer, isFirstRead = true, lastReadSize, readPosition = Math.max(size - bufferSize, 0)
104
172
  while (readPosition >= 0) {
105
173
  const readSize = Math.min(bufferSize, size - readPosition)
106
174
  if (readSize !== lastReadSize) {
107
175
  lastReadSize = readSize
108
176
  buffer = Buffer.alloc(readSize)
109
177
  }
110
- const { bytesRead } = await reader.read(buffer, 0, readSize, readPosition)
178
+ const { bytesRead } = await reader.read(buffer, 0, isFirstRead ? (readSize - 1) : readSize, readPosition)
179
+ if (isFirstRead) isFirstRead = false
111
180
  if (bytesRead === 0) break
112
- const newlineIndex = buffer.lastIndexOf(10, size - 4) // 0x0A is the ASCII code for '\n'
181
+ const newlineIndex = buffer.lastIndexOf(10)
182
+ const start = readPosition + newlineIndex + 1
113
183
  if (newlineIndex !== -1) {
114
- const start = readPosition + newlineIndex + 1
115
184
  const lastLine = Buffer.alloc(size - start)
116
185
  await reader.read(lastLine, 0, size - start, start)
117
186
  if (!lastLine || !lastLine.length) {
@@ -23,7 +23,7 @@ export default class Serializer extends EventEmitter {
23
23
  header |= 0x02 // set V8
24
24
  line = v8.serialize(data)
25
25
  } else {
26
- let json = JSON.stringify(data)
26
+ const json = JSON.stringify(data)
27
27
  line = Buffer.from(json, 'utf-8')
28
28
  }
29
29
  if (compress) {
@@ -46,6 +46,9 @@ export default class Serializer extends EventEmitter {
46
46
  }
47
47
 
48
48
  async deserialize(data) {
49
+ if(data.length === 0) {
50
+ return null
51
+ }
49
52
  let line, isCompressed, isV8
50
53
  const header = data.readUInt8(0)
51
54
  const valid = header === 0x00 || header === 0x01 || header === 0x02 || header === 0x03
@@ -58,7 +61,7 @@ export default class Serializer extends EventEmitter {
58
61
  try {
59
62
  return JSON.parse(data.toString('utf-8').trim())
60
63
  } catch (e) {
61
- throw new Error('Failed to deserialize legacy JSON data')
64
+ throw new Error('Failed to deserialize JSON data')
62
65
  }
63
66
  }
64
67
  if (isCompressed) {
Binary file
Binary file
Binary file