bm2 1.0.24 → 1.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bm2",
3
- "version": "1.0.24",
3
+ "version": "1.0.26",
4
4
  "description": "A blazing-fast, full-featured process manager built entirely on Bun native APIs. The modern PM2 replacement — zero Node.js dependencies, pure Bun performance.",
5
5
  "main": "src/api.ts",
6
6
  "module": "src/api.ts",
@@ -13,11 +13,12 @@
13
13
  * License: GPL-3.0-only
14
14
  * Author: Zak <zak@maxxpainn.com>
15
15
  */
16
- import type { Subprocess } from "bun";
17
- import type { ProcessDescription } from "./types";
18
- import { getCpuCount } from "./utils";
16
+ import type { Subprocess } from "bun";
17
+ import type { ProcessDescription } from "./types";
18
+ import { getCpuCount } from "./utils";
19
+ import path from "path"
19
20
 
20
- export class ClusterManager {
21
+ export class ClusterManager {
21
22
  private workers: Map<number, Map<number, Subprocess>> = new Map();
22
23
 
23
24
  resolveInstances(instances: number | string | undefined): number {
@@ -67,7 +68,7 @@
67
68
  cmd.push(...config.nodeArgs);
68
69
  }
69
70
 
70
- cmd.push(config.script);
71
+ cmd.push(path.resolve(config.script));
71
72
  if (config.args?.length) cmd.push(...config.args);
72
73
 
73
74
  return cmd;
package/src/index.ts CHANGED
@@ -141,7 +141,9 @@ async function sendToDaemon(msg: DaemonMessage): Promise<DaemonResponse> {
141
141
  // ---------------------------------------------------------------------------
142
142
 
143
143
  async function loadEcosystemConfig(filePath: string): Promise<EcosystemConfig> {
144
+
144
145
  const abs = resolve(filePath);
146
+
145
147
  if (!existsSync(abs)) {
146
148
  throw new Error(`Ecosystem file not found: ${abs}`);
147
149
  }
@@ -13,9 +13,10 @@
13
13
  * License: GPL-3.0-only
14
14
  * Author: Zak <zak@maxxpainn.com>
15
15
  */
16
-
16
+
17
17
  import { join, dirname } from "path";
18
- import { existsSync, readdirSync, unlinkSync, renameSync, statSync } from "fs";
18
+ import { openSync, readSync, closeSync } from "fs";
19
+ import { appendFile, stat, rename, unlink, readdir, access } from "fs/promises";
19
20
  import { LOG_DIR, DEFAULT_LOG_MAX_SIZE, DEFAULT_LOG_RETAIN } from "./constants";
20
21
  import type { LogRotateOptions } from "./types";
21
22
 
@@ -56,11 +57,12 @@ export class LogManager {
56
57
  this.flushTimers.delete(filePath);
57
58
 
58
59
  try {
59
- const file = Bun.file(filePath);
60
- const existing = (await file.exists()) ? await file.text() : "";
61
- await Bun.write(filePath, existing + content);
60
+ // Use appendFile (O_APPEND) instead of read-entire-file-then-rewrite.
61
+ // The old Bun.write approach pulled the whole log into a JS string on
62
+ // every flush — O(file size) memory per flush, quadratic overall.
63
+ // appendFile seeks to EOF at the kernel level and writes only new bytes.
64
+ await appendFile(filePath, content, { encoding: "utf8" });
62
65
  } catch (err) {
63
- // If file too large, log the error
64
66
  console.error(`[bm2] Failed to write log: ${filePath}`, err);
65
67
  }
66
68
  }
@@ -120,14 +122,29 @@ export class LogManager {
120
122
  try {
121
123
  const f = Bun.file(filePath);
122
124
  if (!(await f.exists())) return;
125
+
123
126
  const currentSize = f.size;
124
- if (currentSize > lastSize) {
125
- const text = await f.text();
126
- const newContent = text.substring(lastSize);
127
- lastSize = currentSize;
128
- for (const line of newContent.split("\n").filter(Boolean)) {
129
- callback(line);
130
- }
127
+ if (currentSize <= lastSize) return;
128
+
129
+ const byteLength = currentSize - lastSize;
130
+
131
+ // Read only the new bytes via fs.readSync to avoid:
132
+ // 1. Loading the entire file into memory on every poll.
133
+ // 2. Slicing by character offset (lastSize) on a UTF-8 string,
134
+ // which silently corrupts multi-byte sequences.
135
+ const buf = Buffer.allocUnsafe(byteLength);
136
+ const fd = openSync(filePath, "r");
137
+ try {
138
+ readSync(fd, buf, 0, byteLength, lastSize);
139
+ } finally {
140
+ closeSync(fd);
141
+ }
142
+
143
+ lastSize = currentSize;
144
+
145
+ const newContent = new TextDecoder().decode(buf);
146
+ for (const line of newContent.split("\n").filter(Boolean)) {
147
+ callback(line);
131
148
  }
132
149
  } catch {}
133
150
  }, 500);
@@ -138,43 +155,55 @@ export class LogManager {
138
155
  const file = Bun.file(filePath);
139
156
  if (!(await file.exists())) return;
140
157
 
141
- const stat = statSync(filePath);
142
- if (stat.size < options.maxSize) return;
158
+ // Async stat no thread-blocking syscall on the main event loop
159
+ const fileStat = await stat(filePath);
160
+ if (fileStat.size < options.maxSize) return;
143
161
 
144
- // Rotate files
162
+ // Rotate files: shift .N → .N+1, filePath → .1
145
163
  for (let i = options.retain - 1; i >= 1; i--) {
146
164
  const src = i === 1 ? filePath : `${filePath}.${i - 1}`;
147
165
  const dst = `${filePath}.${i}`;
148
- if (existsSync(src)) {
149
- renameSync(src, dst);
150
-
151
- if (options.compress && i > 0) {
152
- // Compress rotated file using Bun's gzip
153
- try {
154
- const content = await Bun.file(dst).arrayBuffer();
155
- const compressed = Bun.gzipSync(new Uint8Array(content));
156
- await Bun.write(`${dst}.gz`, compressed);
157
- unlinkSync(dst);
158
- } catch {}
166
+
167
+ const srcExists = await access(src).then(() => true).catch(() => false);
168
+ if (!srcExists) continue;
169
+
170
+ await rename(src, dst);
171
+
172
+ if (options.compress) {
173
+ // Spawn the system `gzip` binary as a background subprocess so
174
+ // compression never blocks the JS event loop. gzip -f replaces
175
+ // `dst` with `dst.gz` in-place, matching the old .gz naming.
176
+ try {
177
+ const proc = Bun.spawn(["gzip", "-f", dst], {
178
+ stdout: "ignore",
179
+ stderr: "pipe",
180
+ });
181
+ const exitCode = await proc.exited;
182
+ if (exitCode !== 0) {
183
+ const errText = await new Response(proc.stderr).text();
184
+ console.error(`[bm2] gzip failed for ${dst}: ${errText.trim()}`);
185
+ }
186
+ } catch (compressErr) {
187
+ console.error(`[bm2] Failed to compress rotated log ${dst}:`, compressErr);
159
188
  }
160
189
  }
161
190
  }
162
191
 
163
- // Clean excess rotated files
192
+ // Clean excess rotated files asynchronously
164
193
  const dir = dirname(filePath);
165
194
  const baseName = filePath.split("/").pop()!;
166
195
  try {
167
- const files = readdirSync(dir);
196
+ const files = await readdir(dir);
168
197
  const rotated = files
169
198
  .filter((f) => f.startsWith(baseName + "."))
170
199
  .sort()
171
200
  .reverse();
172
- for (let i = options.retain; i < rotated.length; i++) {
173
- unlinkSync(join(dir, rotated[i]!));
174
- }
201
+ await Promise.all(
202
+ rotated.slice(options.retain).map((f) => unlink(join(dir, f)).catch(() => {}))
203
+ );
175
204
  } catch {}
176
205
 
177
- // Truncate original
206
+ // Truncate original to reclaim inode while keeping it open for writers
178
207
  await Bun.write(filePath, "");
179
208
  } catch (err) {
180
209
  console.error(`[bm2] Log rotation failed for ${filePath}:`, err);
@@ -227,18 +227,57 @@ export class ProcessContainer {
227
227
 
228
228
  private async pipeStream(stream: ReadableStream<Uint8Array>, filePath: string) {
229
229
  const reader = stream.getReader();
230
+ const decoder = new TextDecoder();
231
+
232
+ // Holds the tail of the last chunk if it did not end on a newline.
233
+ // Without this, a chunk boundary mid-word (e.g. "hel" / "lo\n") would be
234
+ // written as two separate log lines, corrupting the output.
235
+ let remainder = "";
236
+
230
237
  try {
231
238
  while (true) {
232
239
  const { done, value } = await reader.read();
233
- if (done) break;
234
- const text = new TextDecoder().decode(value);
235
- const timestamp = new Date().toISOString();
236
- const lines = text.split("\n").filter(Boolean);
237
- for (const line of lines) {
238
- await this.logManager.appendLog(filePath, `[${timestamp}] ${line}\n`);
240
+
241
+ if (done) {
242
+ // Flush any buffered content that was never terminated with \n
243
+ if (remainder.length > 0) {
244
+ const timestamp = new Date().toISOString();
245
+ await this.logManager.appendLog(filePath, `[${timestamp}] ${remainder}\n`);
246
+ remainder = "";
247
+ }
248
+ break;
239
249
  }
250
+
251
+ // stream=true tells the decoder to hold multi-byte UTF-8 sequences
252
+ // that straddle chunk boundaries rather than emitting replacement chars.
253
+ const chunk = decoder.decode(value, { stream: true });
254
+
255
+ // Prepend any leftover from the previous chunk before splitting.
256
+ // This is a single string allocation per chunk (not per line), so
257
+ // allocation pressure stays O(chunk size) rather than O(line count).
258
+ const text = remainder + chunk;
259
+ const lines = text.split("\n");
260
+
261
+ // The last element is either "" (chunk ended on \n) or an incomplete
262
+ // line. Either way, hold it back for the next iteration.
263
+ remainder = lines.pop()!;
264
+
265
+ if (lines.length === 0) continue;
266
+
267
+ const timestamp = new Date().toISOString();
268
+ // Build a single string for all complete lines in this chunk so
269
+ // appendLog (and the underlying O_APPEND write) is called once per
270
+ // chunk, not once per line.
271
+ const output = lines.map((line) => `[${timestamp}] ${line}\n`).join("");
272
+ await this.logManager.appendLog(filePath, output);
240
273
  }
241
- } catch {}
274
+ } catch {
275
+ // Flush remainder on unexpected stream error
276
+ if (remainder.length > 0) {
277
+ const timestamp = new Date().toISOString();
278
+ await this.logManager.appendLog(filePath, `[${timestamp}] ${remainder}\n`).catch(() => {});
279
+ }
280
+ }
242
281
  }
243
282
 
244
283
 
@@ -35,7 +35,9 @@
35
35
  DEFAULT_RESTART_DELAY,
36
36
  DEFAULT_LOG_MAX_SIZE,
37
37
  DEFAULT_LOG_RETAIN,
38
- } from "./constants";
38
+ } from "./constants";
39
+ import path from "path";
40
+
39
41
 
40
42
  export class ProcessManager {
41
43
  private processes: Map<number, ProcessContainer> = new Map();
@@ -73,8 +75,12 @@
73
75
  const config = this.buildConfig(id, name, options, resolvedInstances, i);
74
76
 
75
77
  const container = new ProcessContainer(
76
- id, config, this.logManager, this.clusterManager,
77
- this.healthChecker, this.cronManager
78
+ id,
79
+ config,
80
+ this.logManager,
81
+ this.clusterManager,
82
+ this.healthChecker,
83
+ this.cronManager
78
84
  );
79
85
 
80
86
  this.processes.set(id, container);
@@ -109,10 +115,15 @@
109
115
  instances: number,
110
116
  workerIndex: number
111
117
  ): ProcessDescription {
118
+
119
+ const script = path.isAbsolute(options.script)
120
+ ? options.script
121
+ : path.resolve(process.cwd(), options.script);
122
+
112
123
  return {
113
124
  id,
114
125
  name,
115
- script: options.script,
126
+ script,
116
127
  args: options.args || [],
117
128
  cwd: options.cwd || process.cwd(),
118
129
  env: {