readeof 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -7,6 +7,8 @@ A high-performance, memory-efficient library to read large files backward (line-
7
7
  - Fast file reading from the end without loading entire file into memory
8
8
  - Memory efficient using configurable buffer sizes
9
9
  - Supports various encodings
10
+ - Real-time file tailing (like `tail -f`) with streaming support
11
+ - Automatic handling of file truncation/rotation
10
12
 
11
13
  ## Installation
12
14
 
@@ -42,7 +44,7 @@ console.log(recentLogs);
42
44
  import { readeof } from 'readeof';
43
45
 
44
46
  // Read file with latin1 encoding
45
- const content = await readeof('./data.txt', 20, 'latin1');
47
+ const content = await readeof('./data.txt', 20, { encoding: 'latin1' });
46
48
  ```
47
49
 
48
50
  ### Custom Buffer Size
@@ -51,24 +53,62 @@ const content = await readeof('./data.txt', 20, 'latin1');
51
53
  import { readeof } from 'readeof';
52
54
 
53
55
  // Use 64KB buffer for very large files
54
- const content = await readeof('./huge-log.log', 100, 'utf8', 64 * 1024);
56
+ const content = await readeof('./huge-log.log', 100, {
57
+ encoding: 'utf8',
58
+ bufferSize: 64 * 1024,
59
+ });
55
60
  ```
56
61
 
57
- ### Real-time Log Monitoring
62
+ ### Real-time Log Streaming (tail -f)
63
+
64
+ Stream new lines as they are appended to the file. First reads the last N lines, then continues watching for new content.
58
65
 
59
66
  ```typescript
60
67
  import { readeof } from 'readeof';
61
68
 
62
- async function monitorLog(filePath: string) {
63
- setInterval(async () => {
64
- const latestLines = await readeof(filePath, 5);
65
- console.clear();
66
- console.log('=== Latest Logs ===');
67
- console.log(latestLines);
68
- }, 2000);
69
+ // Stream with AbortController for manual control
70
+ const controller = new AbortController();
71
+
72
+ for await (const line of readeof('/var/log/app.log', 10, {
73
+ enabled: true,
74
+ signal: controller.signal,
75
+ })) {
76
+ console.log(line);
77
+
78
+ // Stop streaming when needed
79
+ if (line.includes('SHUTDOWN')) {
80
+ controller.abort();
81
+ }
69
82
  }
83
+ ```
84
+
85
+ ### Streaming with Timeout
86
+
87
+ ```typescript
88
+ import { readeof } from 'readeof';
70
89
 
71
- monitorLog('/var/log/application.log');
90
+ // Stream for 30 seconds then stop automatically
91
+ for await (const line of readeof('/var/log/app.log', 10, {
92
+ enabled: true,
93
+ signal: AbortSignal.timeout(30_000),
94
+ })) {
95
+ console.log(line);
96
+ }
97
+ ```
98
+
99
+ ### Streaming with Custom Poll Interval
100
+
101
+ ```typescript
102
+ import { readeof } from 'readeof';
103
+
104
+ // Check for new content every 500ms instead of default 1000ms
105
+ for await (const line of readeof('/var/log/app.log', 10, {
106
+ enabled: true,
107
+ pollInterval: 500,
108
+ signal: AbortSignal.timeout(60_000),
109
+ })) {
110
+ console.log(line);
111
+ }
72
112
  ```
73
113
 
74
114
  ### Error Analysis
@@ -86,6 +126,40 @@ async function findRecentErrors(logPath: string) {
86
126
  }
87
127
  ```
88
128
 
129
+ ## API Reference
130
+
131
+ ### `readeof(filePath, maxLines, options?)`
132
+
133
+ Reads the last N lines from a file efficiently.
134
+
135
+ #### Parameters
136
+
137
+ | Parameter | Type | Description |
138
+ |-----------|------|-------------|
139
+ | `filePath` | `string` | Path to the file to read from |
140
+ | `maxLines` | `number` | Total lines to read from the end of the file |
141
+ | `options` | `ReadOptions \| StreamOptions` | Optional configuration options |
142
+
143
+ #### ReadOptions
144
+
145
+ | Option | Type | Default | Description |
146
+ |--------|------|---------|-------------|
147
+ | `encoding` | `BufferEncoding` | `'utf8'` | File encoding |
148
+ | `bufferSize` | `number` | `16384` | Buffer size for reading (16KB) |
149
+
150
+ #### StreamOptions (extends ReadOptions)
151
+
152
+ | Option | Type | Default | Description |
153
+ |--------|------|---------|-------------|
154
+ | `enabled` | `boolean` | - | Enable streaming mode |
155
+ | `pollInterval` | `number` | `1000` | Polling interval in milliseconds |
156
+ | `signal` | `AbortSignal` | - | Signal to stop streaming |
157
+
158
+ #### Returns
159
+
160
+ - **Without streaming**: `Promise<string>` - The last N lines as a single string
161
+ - **With streaming**: `AsyncGenerator<string>` - Yields each line as it appears
162
+
89
163
  ## License
90
164
 
91
165
  See [LICENSE](LICENSE) file for details.
package/dist/index.d.ts CHANGED
@@ -1,32 +1,76 @@
1
1
  /**
2
- * Reads the last N lines from a file efficiently.
3
- *
4
- * @param filePath - Path to the file to read from
5
- * @param maxLines - Total lines to read from the end of the file
6
- * @param encoding - File encoding (default: 'utf8')
7
- * Available encodings: https://nodejs.org/api/buffer.html#buffers-and-character-encodings
8
- * @param bufferSize - Buffer size to use when reading the file (default: 16KB)
9
- * A larger buffer size may improve performance for large files.
10
- * @returns A promise that resolves to a string containing the last N lines of the file
2
+ * Options for reading the file
11
3
  */
12
- export declare function readeof(
4
+ export interface ReadOptions {
5
+ /**
6
+ * File encoding (default: 'utf8')
7
+ *
8
+ * Available encodings: https://nodejs.org/api/buffer.html#buffers-and-character-encodings
9
+ */
10
+ encoding?: BufferEncoding;
11
+ /**
12
+ * Buffer size to use when reading the file (default: 16KB)
13
+ *
14
+ * A larger buffer size may improve performance for large files.
15
+ */
16
+ bufferSize?: number;
17
+ }
13
18
  /**
14
- * Path to the file to read from
19
+ * Options for streaming/tailing the file
15
20
  */
16
- filePath: string,
21
+ export interface StreamOptions extends ReadOptions {
22
+ /**
23
+ * Enable streaming mode to tail the file after reading last N lines
24
+ */
25
+ enabled: boolean;
26
+ /**
27
+ * Polling interval in milliseconds (default: 1000)
28
+ */
29
+ pollInterval?: number;
30
+ /**
31
+ * AbortSignal to stop tailing
32
+ */
33
+ signal?: AbortSignal;
34
+ }
17
35
  /**
18
- * Total lines to read from the end of the file
19
- */
20
- maxLines: number,
21
- /**
22
- * File encoding (default: 'utf8')
36
+ * Reads the last N lines from a file efficiently.
37
+ * Can optionally continue streaming new lines as they are appended.
23
38
  *
24
- * Available encodings: https://nodejs.org/api/buffer.html#buffers-and-character-encodings
25
- */
26
- encoding?: BufferEncoding,
27
- /**
28
- * Buffer size to use when reading the file (default: 16KB)
39
+ * @param filePath - Path to the file to read from
40
+ * @param maxLines - Total lines to read from the end of the file
41
+ * @param options - Optional options for reading/streaming
42
+ * @returns A promise that resolves to a string, or an async generator if streaming is enabled
43
+ *
44
+ * @example
45
+ * ```typescript
46
+ * // Read last 10 lines
47
+ * const lines = await readeof('/var/log/app.log', 10);
48
+ *
49
+ * // Read last 10 lines with custom encoding
50
+ * const lines = await readeof('/var/log/app.log', 10, { encoding: 'utf8' });
51
+ *
52
+ * // Read last 10 lines and continue streaming
53
+ * const controller = new AbortController();
54
+ * for await (const line of readeof('/var/log/app.log', 10, {
55
+ * enabled: true,
56
+ * signal: controller.signal
57
+ * })) {
58
+ * console.log(line);
59
+ * }
29
60
  *
30
- * A larger buffer size may improve performance for large files.
61
+ * // Read last 10 lines and continue streaming with 30s timeout
62
+ * for await (const line of readeof('/var/log/app.log', 10, {
63
+ * enabled: true,
64
+ * signal: AbortSignal.timeout(30_000),
65
+ * })) {
66
+ * console.log(line);
67
+ * }
68
+ * ```
31
69
  */
32
- bufferSize?: number): Promise<string>;
70
+ export declare function readeof(filePath: string, maxLines: number, options: StreamOptions & {
71
+ enabled: true;
72
+ }): AsyncGenerator<string, void, unknown>;
73
+ export declare function readeof(filePath: string, maxLines: number, options?: StreamOptions & {
74
+ enabled: false;
75
+ }): Promise<string>;
76
+ export declare function readeof(filePath: string, maxLines: number, options?: ReadOptions): Promise<string>;
package/dist/index.js CHANGED
@@ -33,7 +33,15 @@ __export(exports_src, {
33
33
  });
34
34
  module.exports = __toCommonJS(exports_src);
35
35
  var import_promises = require("node:fs/promises");
36
- async function readeof(filePath, maxLines, encoding = "utf8", bufferSize = 16 * 1024) {
36
+ function readeof(filePath, maxLines, options) {
37
+ const encoding = options?.encoding ?? "utf8";
38
+ const bufferSize = options?.bufferSize ?? 16 * 1024;
39
+ if (options?.enabled) {
40
+ return readeofStream(filePath, maxLines, encoding, bufferSize, options);
41
+ }
42
+ return readeofOnce(filePath, maxLines, encoding, bufferSize);
43
+ }
44
+ async function readeofOnce(filePath, maxLines, encoding, bufferSize) {
37
45
  if (maxLines <= 0)
38
46
  return "";
39
47
  let fileHandle = null;
@@ -73,7 +81,106 @@ async function readeof(filePath, maxLines, encoding = "utf8", bufferSize = 16 *
73
81
  } catch (error) {
74
82
  throw error;
75
83
  } finally {
76
- console.log("Closing file handle");
84
+ if (fileHandle) {
85
+ await fileHandle.close();
86
+ }
87
+ }
88
+ }
89
+ async function* readeofStream(filePath, maxLines, encoding, bufferSize, options) {
90
+ const { pollInterval = 1000, signal } = options;
91
+ let fileHandle = null;
92
+ let position = 0;
93
+ let remainder = "";
94
+ try {
95
+ fileHandle = await import_promises.open(filePath, "r");
96
+ const stat = await fileHandle.stat();
97
+ if (stat.size > 0 && maxLines > 0) {
98
+ const buffer = Buffer.alloc(bufferSize);
99
+ let linesFound = 0;
100
+ let searchPosition = stat.size;
101
+ let startReadPos = stat.size;
102
+ while (searchPosition > 0 && linesFound < maxLines) {
103
+ const readLength = Math.min(bufferSize, searchPosition);
104
+ searchPosition -= readLength;
105
+ const result = await fileHandle.read(buffer, 0, readLength, searchPosition);
106
+ const bytesRead = result.bytesRead;
107
+ for (let i = bytesRead - 1;i >= 0; i--) {
108
+ if (buffer[i] === 10) {
109
+ if (searchPosition + i !== stat.size - 1 || linesFound > 0) {
110
+ linesFound++;
111
+ }
112
+ if (linesFound >= maxLines) {
113
+ startReadPos = searchPosition + i + 1;
114
+ break;
115
+ }
116
+ }
117
+ }
118
+ }
119
+ if (linesFound < maxLines) {
120
+ startReadPos = 0;
121
+ }
122
+ const lengthToRead = stat.size - startReadPos;
123
+ const resultBuffer = Buffer.alloc(lengthToRead);
124
+ await fileHandle.read(resultBuffer, 0, lengthToRead, startReadPos);
125
+ const content = resultBuffer.toString(encoding);
126
+ const lines = content.split(`
127
+ `);
128
+ for (const line of lines) {
129
+ if (line.length > 0) {
130
+ yield line;
131
+ }
132
+ }
133
+ }
134
+ position = stat.size;
135
+ const streamBuffer = Buffer.alloc(bufferSize);
136
+ while (!signal?.aborted) {
137
+ try {
138
+ const currentStat = await fileHandle.stat();
139
+ if (currentStat.size < position) {
140
+ position = 0;
141
+ remainder = "";
142
+ }
143
+ if (currentStat.size > position) {
144
+ const bytesToRead = Math.min(bufferSize, currentStat.size - position);
145
+ const { bytesRead } = await fileHandle.read(streamBuffer, 0, bytesToRead, position);
146
+ if (bytesRead > 0) {
147
+ position += bytesRead;
148
+ const chunk = streamBuffer.subarray(0, bytesRead).toString(encoding);
149
+ const content = remainder + chunk;
150
+ const lines = content.split(`
151
+ `);
152
+ remainder = lines.pop() ?? "";
153
+ for (const line of lines) {
154
+ if (line.length > 0) {
155
+ yield line;
156
+ }
157
+ }
158
+ }
159
+ }
160
+ await new Promise((resolve, reject) => {
161
+ const timeout = setTimeout(resolve, pollInterval);
162
+ signal?.addEventListener("abort", () => {
163
+ clearTimeout(timeout);
164
+ reject(new Error("Aborted"));
165
+ }, { once: true });
166
+ });
167
+ } catch (error) {
168
+ if (signal?.aborted)
169
+ break;
170
+ try {
171
+ await fileHandle.close();
172
+ fileHandle = await import_promises.open(filePath, "r");
173
+ position = 0;
174
+ remainder = "";
175
+ } catch {
176
+ throw error;
177
+ }
178
+ }
179
+ }
180
+ if (remainder.length > 0) {
181
+ yield remainder;
182
+ }
183
+ } finally {
77
184
  if (fileHandle) {
78
185
  await fileHandle.close();
79
186
  }
package/dist/index.mjs CHANGED
@@ -1,6 +1,14 @@
1
1
  // src/index.ts
2
2
  import { open } from "node:fs/promises";
3
- async function readeof(filePath, maxLines, encoding = "utf8", bufferSize = 16 * 1024) {
3
+ function readeof(filePath, maxLines, options) {
4
+ const encoding = options?.encoding ?? "utf8";
5
+ const bufferSize = options?.bufferSize ?? 16 * 1024;
6
+ if (options?.enabled) {
7
+ return readeofStream(filePath, maxLines, encoding, bufferSize, options);
8
+ }
9
+ return readeofOnce(filePath, maxLines, encoding, bufferSize);
10
+ }
11
+ async function readeofOnce(filePath, maxLines, encoding, bufferSize) {
4
12
  if (maxLines <= 0)
5
13
  return "";
6
14
  let fileHandle = null;
@@ -40,7 +48,106 @@ async function readeof(filePath, maxLines, encoding = "utf8", bufferSize = 16 *
40
48
  } catch (error) {
41
49
  throw error;
42
50
  } finally {
43
- console.log("Closing file handle");
51
+ if (fileHandle) {
52
+ await fileHandle.close();
53
+ }
54
+ }
55
+ }
56
+ async function* readeofStream(filePath, maxLines, encoding, bufferSize, options) {
57
+ const { pollInterval = 1000, signal } = options;
58
+ let fileHandle = null;
59
+ let position = 0;
60
+ let remainder = "";
61
+ try {
62
+ fileHandle = await open(filePath, "r");
63
+ const stat = await fileHandle.stat();
64
+ if (stat.size > 0 && maxLines > 0) {
65
+ const buffer = Buffer.alloc(bufferSize);
66
+ let linesFound = 0;
67
+ let searchPosition = stat.size;
68
+ let startReadPos = stat.size;
69
+ while (searchPosition > 0 && linesFound < maxLines) {
70
+ const readLength = Math.min(bufferSize, searchPosition);
71
+ searchPosition -= readLength;
72
+ const result = await fileHandle.read(buffer, 0, readLength, searchPosition);
73
+ const bytesRead = result.bytesRead;
74
+ for (let i = bytesRead - 1;i >= 0; i--) {
75
+ if (buffer[i] === 10) {
76
+ if (searchPosition + i !== stat.size - 1 || linesFound > 0) {
77
+ linesFound++;
78
+ }
79
+ if (linesFound >= maxLines) {
80
+ startReadPos = searchPosition + i + 1;
81
+ break;
82
+ }
83
+ }
84
+ }
85
+ }
86
+ if (linesFound < maxLines) {
87
+ startReadPos = 0;
88
+ }
89
+ const lengthToRead = stat.size - startReadPos;
90
+ const resultBuffer = Buffer.alloc(lengthToRead);
91
+ await fileHandle.read(resultBuffer, 0, lengthToRead, startReadPos);
92
+ const content = resultBuffer.toString(encoding);
93
+ const lines = content.split(`
94
+ `);
95
+ for (const line of lines) {
96
+ if (line.length > 0) {
97
+ yield line;
98
+ }
99
+ }
100
+ }
101
+ position = stat.size;
102
+ const streamBuffer = Buffer.alloc(bufferSize);
103
+ while (!signal?.aborted) {
104
+ try {
105
+ const currentStat = await fileHandle.stat();
106
+ if (currentStat.size < position) {
107
+ position = 0;
108
+ remainder = "";
109
+ }
110
+ if (currentStat.size > position) {
111
+ const bytesToRead = Math.min(bufferSize, currentStat.size - position);
112
+ const { bytesRead } = await fileHandle.read(streamBuffer, 0, bytesToRead, position);
113
+ if (bytesRead > 0) {
114
+ position += bytesRead;
115
+ const chunk = streamBuffer.subarray(0, bytesRead).toString(encoding);
116
+ const content = remainder + chunk;
117
+ const lines = content.split(`
118
+ `);
119
+ remainder = lines.pop() ?? "";
120
+ for (const line of lines) {
121
+ if (line.length > 0) {
122
+ yield line;
123
+ }
124
+ }
125
+ }
126
+ }
127
+ await new Promise((resolve, reject) => {
128
+ const timeout = setTimeout(resolve, pollInterval);
129
+ signal?.addEventListener("abort", () => {
130
+ clearTimeout(timeout);
131
+ reject(new Error("Aborted"));
132
+ }, { once: true });
133
+ });
134
+ } catch (error) {
135
+ if (signal?.aborted)
136
+ break;
137
+ try {
138
+ await fileHandle.close();
139
+ fileHandle = await open(filePath, "r");
140
+ position = 0;
141
+ remainder = "";
142
+ } catch {
143
+ throw error;
144
+ }
145
+ }
146
+ }
147
+ if (remainder.length > 0) {
148
+ yield remainder;
149
+ }
150
+ } finally {
44
151
  if (fileHandle) {
45
152
  await fileHandle.close();
46
153
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "readeof",
3
3
  "description": "A high-performance, memory-efficient library to read large files backward (line-by-line). Optimized for Node.js and Bun log parsing.",
4
- "version": "1.0.0",
4
+ "version": "1.1.0",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
7
7
  "module": "./dist/index.mjs",