@orkify/cli 1.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +191 -0
- package/README.md +1701 -0
- package/bin/orkify +3 -0
- package/boot/systemd/orkify@.service +30 -0
- package/dist/agent-name.d.ts +4 -0
- package/dist/agent-name.js +42 -0
- package/dist/alerts/AlertEvaluator.d.ts +14 -0
- package/dist/alerts/AlertEvaluator.js +135 -0
- package/dist/cli/commands/autostart.d.ts +3 -0
- package/dist/cli/commands/autostart.js +11 -0
- package/dist/cli/commands/crash-test.d.ts +3 -0
- package/dist/cli/commands/crash-test.js +17 -0
- package/dist/cli/commands/daemon-reload.d.ts +3 -0
- package/dist/cli/commands/daemon-reload.js +72 -0
- package/dist/cli/commands/delete.d.ts +3 -0
- package/dist/cli/commands/delete.js +37 -0
- package/dist/cli/commands/deploy.d.ts +6 -0
- package/dist/cli/commands/deploy.js +266 -0
- package/dist/cli/commands/down.d.ts +3 -0
- package/dist/cli/commands/down.js +36 -0
- package/dist/cli/commands/flush.d.ts +3 -0
- package/dist/cli/commands/flush.js +28 -0
- package/dist/cli/commands/kill.d.ts +3 -0
- package/dist/cli/commands/kill.js +35 -0
- package/dist/cli/commands/list.d.ts +14 -0
- package/dist/cli/commands/list.js +361 -0
- package/dist/cli/commands/logs.d.ts +3 -0
- package/dist/cli/commands/logs.js +107 -0
- package/dist/cli/commands/mcp.d.ts +3 -0
- package/dist/cli/commands/mcp.js +151 -0
- package/dist/cli/commands/reload.d.ts +3 -0
- package/dist/cli/commands/reload.js +54 -0
- package/dist/cli/commands/restart.d.ts +3 -0
- package/dist/cli/commands/restart.js +43 -0
- package/dist/cli/commands/restore.d.ts +3 -0
- package/dist/cli/commands/restore.js +88 -0
- package/dist/cli/commands/run.d.ts +8 -0
- package/dist/cli/commands/run.js +212 -0
- package/dist/cli/commands/snap.d.ts +3 -0
- package/dist/cli/commands/snap.js +30 -0
- package/dist/cli/commands/up.d.ts +3 -0
- package/dist/cli/commands/up.js +125 -0
- package/dist/cli/crash-recovery.d.ts +2 -0
- package/dist/cli/crash-recovery.js +67 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.js +46 -0
- package/dist/cli/parse.d.ts +28 -0
- package/dist/cli/parse.js +97 -0
- package/dist/cluster/ClusterWrapper.d.ts +18 -0
- package/dist/cluster/ClusterWrapper.js +602 -0
- package/dist/config/ConfigStore.d.ts +11 -0
- package/dist/config/ConfigStore.js +21 -0
- package/dist/config/schema.d.ts +103 -0
- package/dist/config/schema.js +49 -0
- package/dist/constants.d.ts +83 -0
- package/dist/constants.js +289 -0
- package/dist/cron/CronScheduler.d.ts +25 -0
- package/dist/cron/CronScheduler.js +149 -0
- package/dist/daemon/GracefulManager.d.ts +8 -0
- package/dist/daemon/GracefulManager.js +29 -0
- package/dist/daemon/ManagedProcess.d.ts +71 -0
- package/dist/daemon/ManagedProcess.js +1020 -0
- package/dist/daemon/Orchestrator.d.ts +51 -0
- package/dist/daemon/Orchestrator.js +416 -0
- package/dist/daemon/RotatingWriter.d.ts +27 -0
- package/dist/daemon/RotatingWriter.js +264 -0
- package/dist/daemon/index.d.ts +2 -0
- package/dist/daemon/index.js +106 -0
- package/dist/daemon/startDaemon.d.ts +30 -0
- package/dist/daemon/startDaemon.js +693 -0
- package/dist/deploy/CommandPoller.d.ts +13 -0
- package/dist/deploy/CommandPoller.js +53 -0
- package/dist/deploy/DeployExecutor.d.ts +33 -0
- package/dist/deploy/DeployExecutor.js +340 -0
- package/dist/deploy/config.d.ts +20 -0
- package/dist/deploy/config.js +161 -0
- package/dist/deploy/env.d.ts +2 -0
- package/dist/deploy/env.js +17 -0
- package/dist/deploy/tarball.d.ts +32 -0
- package/dist/deploy/tarball.js +243 -0
- package/dist/detect/framework.d.ts +2 -0
- package/dist/detect/framework.js +24 -0
- package/dist/ipc/DaemonClient.d.ts +31 -0
- package/dist/ipc/DaemonClient.js +248 -0
- package/dist/ipc/DaemonServer.d.ts +28 -0
- package/dist/ipc/DaemonServer.js +166 -0
- package/dist/ipc/MultiUserClient.d.ts +27 -0
- package/dist/ipc/MultiUserClient.js +203 -0
- package/dist/ipc/protocol.d.ts +7 -0
- package/dist/ipc/protocol.js +53 -0
- package/dist/ipc/restoreDaemon.d.ts +8 -0
- package/dist/ipc/restoreDaemon.js +19 -0
- package/dist/machine-id.d.ts +11 -0
- package/dist/machine-id.js +51 -0
- package/dist/mcp/auth.d.ts +118 -0
- package/dist/mcp/auth.js +245 -0
- package/dist/mcp/http.d.ts +20 -0
- package/dist/mcp/http.js +229 -0
- package/dist/mcp/index.d.ts +3 -0
- package/dist/mcp/index.js +8 -0
- package/dist/mcp/server.d.ts +37 -0
- package/dist/mcp/server.js +413 -0
- package/dist/probe/compute-fingerprint.d.ts +27 -0
- package/dist/probe/compute-fingerprint.js +65 -0
- package/dist/probe/parse-frames.d.ts +21 -0
- package/dist/probe/parse-frames.js +57 -0
- package/dist/probe/resolve-sourcemaps.d.ts +25 -0
- package/dist/probe/resolve-sourcemaps.js +281 -0
- package/dist/state/StateStore.d.ts +11 -0
- package/dist/state/StateStore.js +78 -0
- package/dist/telemetry/TelemetryReporter.d.ts +49 -0
- package/dist/telemetry/TelemetryReporter.js +451 -0
- package/dist/types/index.d.ts +373 -0
- package/dist/types/index.js +2 -0
- package/package.json +148 -0
- package/packages/cache/README.md +114 -0
- package/packages/cache/dist/CacheClient.d.ts +26 -0
- package/packages/cache/dist/CacheClient.d.ts.map +1 -0
- package/packages/cache/dist/CacheClient.js +174 -0
- package/packages/cache/dist/CacheClient.js.map +1 -0
- package/packages/cache/dist/CacheFileStore.d.ts +45 -0
- package/packages/cache/dist/CacheFileStore.d.ts.map +1 -0
- package/packages/cache/dist/CacheFileStore.js +446 -0
- package/packages/cache/dist/CacheFileStore.js.map +1 -0
- package/packages/cache/dist/CachePersistence.d.ts +9 -0
- package/packages/cache/dist/CachePersistence.d.ts.map +1 -0
- package/packages/cache/dist/CachePersistence.js +67 -0
- package/packages/cache/dist/CachePersistence.js.map +1 -0
- package/packages/cache/dist/CachePrimary.d.ts +25 -0
- package/packages/cache/dist/CachePrimary.d.ts.map +1 -0
- package/packages/cache/dist/CachePrimary.js +155 -0
- package/packages/cache/dist/CachePrimary.js.map +1 -0
- package/packages/cache/dist/CacheStore.d.ts +50 -0
- package/packages/cache/dist/CacheStore.d.ts.map +1 -0
- package/packages/cache/dist/CacheStore.js +271 -0
- package/packages/cache/dist/CacheStore.js.map +1 -0
- package/packages/cache/dist/constants.d.ts +6 -0
- package/packages/cache/dist/constants.d.ts.map +1 -0
- package/packages/cache/dist/constants.js +9 -0
- package/packages/cache/dist/constants.js.map +1 -0
- package/packages/cache/dist/index.d.ts +16 -0
- package/packages/cache/dist/index.d.ts.map +1 -0
- package/packages/cache/dist/index.js +86 -0
- package/packages/cache/dist/index.js.map +1 -0
- package/packages/cache/dist/serialize.d.ts +9 -0
- package/packages/cache/dist/serialize.d.ts.map +1 -0
- package/packages/cache/dist/serialize.js +40 -0
- package/packages/cache/dist/serialize.js.map +1 -0
- package/packages/cache/dist/types.d.ts +123 -0
- package/packages/cache/dist/types.d.ts.map +1 -0
- package/packages/cache/dist/types.js +2 -0
- package/packages/cache/dist/types.js.map +1 -0
- package/packages/cache/package.json +27 -0
- package/packages/cache/src/CacheClient.ts +227 -0
- package/packages/cache/src/CacheFileStore.ts +528 -0
- package/packages/cache/src/CachePersistence.ts +89 -0
- package/packages/cache/src/CachePrimary.ts +172 -0
- package/packages/cache/src/CacheStore.ts +308 -0
- package/packages/cache/src/constants.ts +10 -0
- package/packages/cache/src/index.ts +100 -0
- package/packages/cache/src/serialize.ts +49 -0
- package/packages/cache/src/types.ts +156 -0
- package/packages/cache/tsconfig.json +18 -0
- package/packages/cache/tsconfig.tsbuildinfo +1 -0
- package/packages/next/README.md +166 -0
- package/packages/next/dist/error-capture.d.ts +34 -0
- package/packages/next/dist/error-capture.d.ts.map +1 -0
- package/packages/next/dist/error-capture.js +130 -0
- package/packages/next/dist/error-capture.js.map +1 -0
- package/packages/next/dist/error-handler.d.ts +10 -0
- package/packages/next/dist/error-handler.d.ts.map +1 -0
- package/packages/next/dist/error-handler.js +186 -0
- package/packages/next/dist/error-handler.js.map +1 -0
- package/packages/next/dist/isr-cache.d.ts +9 -0
- package/packages/next/dist/isr-cache.d.ts.map +1 -0
- package/packages/next/dist/isr-cache.js +86 -0
- package/packages/next/dist/isr-cache.js.map +1 -0
- package/packages/next/dist/stream.d.ts +5 -0
- package/packages/next/dist/stream.d.ts.map +1 -0
- package/packages/next/dist/stream.js +22 -0
- package/packages/next/dist/stream.js.map +1 -0
- package/packages/next/dist/types.d.ts +33 -0
- package/packages/next/dist/types.d.ts.map +1 -0
- package/packages/next/dist/types.js +6 -0
- package/packages/next/dist/types.js.map +1 -0
- package/packages/next/dist/use-cache.d.ts +4 -0
- package/packages/next/dist/use-cache.d.ts.map +1 -0
- package/packages/next/dist/use-cache.js +86 -0
- package/packages/next/dist/use-cache.js.map +1 -0
- package/packages/next/dist/utils.d.ts +32 -0
- package/packages/next/dist/utils.d.ts.map +1 -0
- package/packages/next/dist/utils.js +88 -0
- package/packages/next/dist/utils.js.map +1 -0
- package/packages/next/package.json +52 -0
- package/packages/next/src/error-capture.ts +177 -0
- package/packages/next/src/error-handler.ts +221 -0
- package/packages/next/src/isr-cache.ts +100 -0
- package/packages/next/src/stream.ts +23 -0
- package/packages/next/src/types.ts +33 -0
- package/packages/next/src/use-cache.ts +99 -0
- package/packages/next/src/utils.ts +102 -0
- package/packages/next/tsconfig.json +19 -0
- package/packages/next/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
import { closeSync, createReadStream, createWriteStream, existsSync, mkdirSync, openSync, readdirSync, renameSync, statSync, truncateSync, unlinkSync, writeSync, } from 'node:fs';
|
|
2
|
+
import { readdir, unlink } from 'node:fs/promises';
|
|
3
|
+
import { basename, dirname, join } from 'node:path';
|
|
4
|
+
import { pipeline } from 'node:stream/promises';
|
|
5
|
+
import { createGzip } from 'node:zlib';
|
|
6
|
+
export class RotatingWriter {
|
|
7
|
+
fd;
|
|
8
|
+
bytesWritten;
|
|
9
|
+
lastRotationDate;
|
|
10
|
+
rotating = false;
|
|
11
|
+
compressionChain = Promise.resolve();
|
|
12
|
+
filePath;
|
|
13
|
+
maxSize;
|
|
14
|
+
maxFiles;
|
|
15
|
+
maxAge;
|
|
16
|
+
// Cache todayString() — only recompute once per second
|
|
17
|
+
cachedDateString;
|
|
18
|
+
cachedDateAt = 0;
|
|
19
|
+
constructor(filePath, maxSize, maxFiles, maxAge = 0) {
|
|
20
|
+
this.filePath = filePath;
|
|
21
|
+
this.maxSize = maxSize;
|
|
22
|
+
this.maxFiles = maxFiles;
|
|
23
|
+
this.maxAge = maxAge;
|
|
24
|
+
// Ensure parent directory exists (handles restarts after dir deletion)
|
|
25
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
26
|
+
// Seed bytesWritten from existing file
|
|
27
|
+
try {
|
|
28
|
+
this.bytesWritten = statSync(filePath).size;
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
this.bytesWritten = 0;
|
|
32
|
+
}
|
|
33
|
+
this.cachedDateString = this.computeDateString();
|
|
34
|
+
this.cachedDateAt = Date.now();
|
|
35
|
+
this.lastRotationDate = this.cachedDateString;
|
|
36
|
+
// Open file descriptor in append mode (creates file if it doesn't exist)
|
|
37
|
+
this.fd = openSync(filePath, 'a');
|
|
38
|
+
}
|
|
39
|
+
write(data) {
|
|
40
|
+
try {
|
|
41
|
+
writeSync(this.fd, data);
|
|
42
|
+
}
|
|
43
|
+
catch (err) {
|
|
44
|
+
// Self-heal a closed fd (e.g., from a failed rotation on a full disk)
|
|
45
|
+
if (err.code === 'EBADF') {
|
|
46
|
+
try {
|
|
47
|
+
mkdirSync(dirname(this.filePath), { recursive: true });
|
|
48
|
+
this.fd = openSync(this.filePath, 'a');
|
|
49
|
+
writeSync(this.fd, data);
|
|
50
|
+
}
|
|
51
|
+
catch {
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
else {
|
|
56
|
+
console.error(`Log write error (${this.filePath}):`, err.message);
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
this.bytesWritten += Buffer.byteLength(data);
|
|
61
|
+
if (this.maxFiles > 0 && !this.rotating) {
|
|
62
|
+
const today = this.todayString();
|
|
63
|
+
if (this.bytesWritten >= this.maxSize || today !== this.lastRotationDate) {
|
|
64
|
+
this.rotate();
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
rotate() {
|
|
69
|
+
try {
|
|
70
|
+
this.rotating = true;
|
|
71
|
+
const now = new Date();
|
|
72
|
+
const timestamp = this.formatTimestamp(now);
|
|
73
|
+
const rotatedPath = `${this.filePath}-${timestamp}`;
|
|
74
|
+
// Close the current fd so all data is flushed to the inode
|
|
75
|
+
closeSync(this.fd);
|
|
76
|
+
// Rename the file — the data stays with the inode
|
|
77
|
+
renameSync(this.filePath, rotatedPath);
|
|
78
|
+
// Open a new fd at the original path
|
|
79
|
+
this.fd = openSync(this.filePath, 'a');
|
|
80
|
+
this.bytesWritten = 0;
|
|
81
|
+
this.lastRotationDate = this.todayString();
|
|
82
|
+
this.rotating = false;
|
|
83
|
+
// Chain compressions so they run sequentially and drain() awaits all of them
|
|
84
|
+
this.compressionChain = this.compressionChain
|
|
85
|
+
.then(() => this.compressAndPrune(rotatedPath))
|
|
86
|
+
.catch((err) => {
|
|
87
|
+
console.error(`Log compression error (${rotatedPath}):`, err.message);
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
catch (err) {
|
|
91
|
+
this.rotating = false;
|
|
92
|
+
console.error(`Log rotation error (${this.filePath}):`, err.message);
|
|
93
|
+
// Try to re-open fd if it was closed
|
|
94
|
+
try {
|
|
95
|
+
this.fd = openSync(this.filePath, 'a');
|
|
96
|
+
}
|
|
97
|
+
catch {
|
|
98
|
+
// Nothing more we can do
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
async compressAndPrune(uncompressedPath) {
|
|
103
|
+
// Skip if already compressed by a previous call's prune step.
|
|
104
|
+
// Without this guard, createWriteStream truncates the valid .gz to 0 bytes
|
|
105
|
+
// before createReadStream fails with ENOENT, corrupting the archive.
|
|
106
|
+
if (existsSync(uncompressedPath)) {
|
|
107
|
+
const gzPath = `${uncompressedPath}.gz`;
|
|
108
|
+
try {
|
|
109
|
+
await pipeline(createReadStream(uncompressedPath), createGzip(), createWriteStream(gzPath));
|
|
110
|
+
unlinkSync(uncompressedPath);
|
|
111
|
+
}
|
|
112
|
+
catch (err) {
|
|
113
|
+
console.error(`Log compression error (${uncompressedPath}):`, err.message);
|
|
114
|
+
// Leave uncompressed file — bare files are cleaned up on next prune cycle
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
// Prune old archives and bare files left by failed compressions
|
|
118
|
+
try {
|
|
119
|
+
const dir = dirname(this.filePath);
|
|
120
|
+
const base = basename(this.filePath);
|
|
121
|
+
const files = await readdir(dir);
|
|
122
|
+
// Compress bare timestamped files left by failed/interrupted compressions.
|
|
123
|
+
// If a .gz already exists for the same timestamp (corrupt from crash), replace it.
|
|
124
|
+
const bareFiles = files.filter((f) => f.startsWith(`${base}-`) && !f.endsWith('.gz'));
|
|
125
|
+
for (const bare of bareFiles) {
|
|
126
|
+
const barePath = join(dir, bare);
|
|
127
|
+
if (!existsSync(barePath))
|
|
128
|
+
continue;
|
|
129
|
+
const bareGzPath = `${barePath}.gz`;
|
|
130
|
+
try {
|
|
131
|
+
await pipeline(createReadStream(barePath), createGzip(), createWriteStream(bareGzPath));
|
|
132
|
+
await unlink(barePath);
|
|
133
|
+
}
|
|
134
|
+
catch {
|
|
135
|
+
// If compression fails, delete the bare file to prevent infinite retries
|
|
136
|
+
try {
|
|
137
|
+
await unlink(barePath);
|
|
138
|
+
}
|
|
139
|
+
catch {
|
|
140
|
+
// Ignore
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
// Prune .gz archives by age and count
|
|
145
|
+
const archives = files.filter((f) => f.startsWith(`${base}-`) && f.endsWith('.gz')).sort();
|
|
146
|
+
// Delete archives older than maxAge
|
|
147
|
+
if (this.maxAge > 0) {
|
|
148
|
+
const cutoff = Date.now() - this.maxAge;
|
|
149
|
+
for (let i = archives.length - 1; i >= 0; i--) {
|
|
150
|
+
const ts = this.parseArchiveTimestamp(archives[i], base);
|
|
151
|
+
if (ts > 0 && ts < cutoff) {
|
|
152
|
+
try {
|
|
153
|
+
await unlink(join(dir, archives[i]));
|
|
154
|
+
archives.splice(i, 1);
|
|
155
|
+
}
|
|
156
|
+
catch {
|
|
157
|
+
// Ignore
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
// Delete oldest archives if count exceeds maxFiles
|
|
163
|
+
while (archives.length > this.maxFiles) {
|
|
164
|
+
const oldest = archives.shift();
|
|
165
|
+
try {
|
|
166
|
+
await unlink(join(dir, oldest));
|
|
167
|
+
}
|
|
168
|
+
catch {
|
|
169
|
+
// Ignore individual delete errors
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
catch {
|
|
174
|
+
// Ignore prune errors
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
async flush() {
|
|
178
|
+
// Wait for any in-flight compression to finish before cleaning up
|
|
179
|
+
await this.compressionChain;
|
|
180
|
+
try {
|
|
181
|
+
closeSync(this.fd);
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
// fd may already be closed
|
|
185
|
+
}
|
|
186
|
+
try {
|
|
187
|
+
truncateSync(this.filePath, 0);
|
|
188
|
+
}
|
|
189
|
+
catch {
|
|
190
|
+
// File may not exist
|
|
191
|
+
}
|
|
192
|
+
// Remove all rotated files (compressed and bare)
|
|
193
|
+
const dir = dirname(this.filePath);
|
|
194
|
+
const base = basename(this.filePath);
|
|
195
|
+
try {
|
|
196
|
+
const files = readdirSync(dir);
|
|
197
|
+
for (const f of files) {
|
|
198
|
+
if (f.startsWith(`${base}-`)) {
|
|
199
|
+
try {
|
|
200
|
+
unlinkSync(join(dir, f));
|
|
201
|
+
}
|
|
202
|
+
catch {
|
|
203
|
+
// Ignore
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
catch {
|
|
209
|
+
// Ignore
|
|
210
|
+
}
|
|
211
|
+
this.fd = openSync(this.filePath, 'a');
|
|
212
|
+
this.bytesWritten = 0;
|
|
213
|
+
}
|
|
214
|
+
end() {
|
|
215
|
+
try {
|
|
216
|
+
closeSync(this.fd);
|
|
217
|
+
}
|
|
218
|
+
catch {
|
|
219
|
+
// fd may already be closed
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
/** Wait for all pending compressions to complete. */
|
|
223
|
+
async drain() {
|
|
224
|
+
await this.compressionChain;
|
|
225
|
+
}
|
|
226
|
+
todayString() {
|
|
227
|
+
const now = Date.now();
|
|
228
|
+
// Recompute at most once per second
|
|
229
|
+
if (now - this.cachedDateAt >= 1000) {
|
|
230
|
+
this.cachedDateString = this.computeDateString();
|
|
231
|
+
this.cachedDateAt = now;
|
|
232
|
+
}
|
|
233
|
+
return this.cachedDateString;
|
|
234
|
+
}
|
|
235
|
+
computeDateString() {
|
|
236
|
+
const now = new Date();
|
|
237
|
+
return `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`;
|
|
238
|
+
}
|
|
239
|
+
/** Parse epoch ms from archive filename like `app.stdout.log-20260217T143052.123.gz` */
|
|
240
|
+
parseArchiveTimestamp(filename, base) {
|
|
241
|
+
// Extract timestamp portion: after `{base}-` and before `.gz`
|
|
242
|
+
const prefix = `${base}-`;
|
|
243
|
+
if (!filename.startsWith(prefix) || !filename.endsWith('.gz'))
|
|
244
|
+
return 0;
|
|
245
|
+
const ts = filename.slice(prefix.length, -3); // remove prefix and .gz
|
|
246
|
+
// Format: YYYYMMDDTHHMMSS.mmm
|
|
247
|
+
const match = ts.match(/^(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})(?:\.(\d{3}))?$/);
|
|
248
|
+
if (!match)
|
|
249
|
+
return 0;
|
|
250
|
+
const [, y, mo, d, h, mi, s, ms] = match;
|
|
251
|
+
return new Date(+y, +mo - 1, +d, +h, +mi, +s, +(ms || 0)).getTime();
|
|
252
|
+
}
|
|
253
|
+
formatTimestamp(date) {
|
|
254
|
+
const y = date.getFullYear();
|
|
255
|
+
const m = String(date.getMonth() + 1).padStart(2, '0');
|
|
256
|
+
const d = String(date.getDate()).padStart(2, '0');
|
|
257
|
+
const h = String(date.getHours()).padStart(2, '0');
|
|
258
|
+
const min = String(date.getMinutes()).padStart(2, '0');
|
|
259
|
+
const sec = String(date.getSeconds()).padStart(2, '0');
|
|
260
|
+
const milli = String(date.getMilliseconds()).padStart(3, '0');
|
|
261
|
+
return `${y}${m}${d}T${h}${min}${sec}.${milli}`;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
//# sourceMappingURL=RotatingWriter.js.map
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { spawn as spawnChild } from 'node:child_process';
|
|
2
|
+
import { openSync } from 'node:fs';
|
|
3
|
+
import { dirname, join } from 'node:path';
|
|
4
|
+
import { fileURLToPath } from 'node:url';
|
|
5
|
+
import { DAEMON_LOG_FILE } from '../constants.js';
|
|
6
|
+
import { startDaemon } from './startDaemon.js';
|
|
7
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
8
|
+
const __dirname = dirname(__filename);
|
|
9
|
+
const ctx = await startDaemon({ foreground: false });
|
|
10
|
+
const { orchestrator, gracefulShutdown, cleanup } = ctx;
|
|
11
|
+
/**
|
|
12
|
+
* Spawn a detached crash-recovery process that will start a new daemon
|
|
13
|
+
* and restore all running process configs. Only runs once — if this daemon
|
|
14
|
+
* was itself started by crash recovery (ORKIFY_CRASH_RECOVERY is set),
|
|
15
|
+
* we skip to prevent infinite crash loops.
|
|
16
|
+
*/
|
|
17
|
+
function crashRecovery() {
|
|
18
|
+
if (process.env.ORKIFY_CRASH_RECOVERY) {
|
|
19
|
+
console.error('Skipping crash recovery — this daemon was started by crash recovery');
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
try {
|
|
23
|
+
const configs = orchestrator.getRunningConfigs();
|
|
24
|
+
const mcpOptions = ctx.getMcpOptions();
|
|
25
|
+
if (configs.length === 0 && !mcpOptions) {
|
|
26
|
+
console.error('Crash recovery: no running processes to restore');
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
const env = {};
|
|
30
|
+
if (process.env.ORKIFY_API_KEY)
|
|
31
|
+
env.ORKIFY_API_KEY = process.env.ORKIFY_API_KEY;
|
|
32
|
+
if (process.env.ORKIFY_API_HOST)
|
|
33
|
+
env.ORKIFY_API_HOST = process.env.ORKIFY_API_HOST;
|
|
34
|
+
const payload = JSON.stringify({
|
|
35
|
+
env,
|
|
36
|
+
configs,
|
|
37
|
+
mcpOptions: mcpOptions ?? undefined,
|
|
38
|
+
daemonPid: process.pid,
|
|
39
|
+
});
|
|
40
|
+
const recoveryScript = join(__dirname, '..', 'cli', 'crash-recovery.js');
|
|
41
|
+
// Remove PID file and socket now so the recovery script doesn't have to
|
|
42
|
+
// wait for gracefulShutdown() — which may hang in a crashing process.
|
|
43
|
+
cleanup();
|
|
44
|
+
ctx.markSkipServerStop();
|
|
45
|
+
const logFd = openSync(DAEMON_LOG_FILE, 'a');
|
|
46
|
+
const child = spawnChild(process.execPath, [recoveryScript], {
|
|
47
|
+
detached: true,
|
|
48
|
+
stdio: ['ignore', logFd, logFd],
|
|
49
|
+
env: { ...process.env, ORKIFY_CRASH_RECOVERY: payload },
|
|
50
|
+
});
|
|
51
|
+
child.unref();
|
|
52
|
+
console.error(`Crash recovery: spawned recovery process (PID: ${child.pid})`);
|
|
53
|
+
}
|
|
54
|
+
catch (err) {
|
|
55
|
+
console.error('Crash recovery: failed to spawn recovery process:', err.message);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
// SIGUSR2 handler for crash testing (Unix only) — triggers an uncaught exception
|
|
59
|
+
// which exercises the crashRecovery → gracefulShutdown → exit path.
|
|
60
|
+
if (process.platform !== 'win32') {
|
|
61
|
+
process.on('SIGUSR2', () => {
|
|
62
|
+
throw new Error('SIGUSR2 crash trigger');
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
process.on('SIGTERM', async () => {
|
|
66
|
+
await gracefulShutdown();
|
|
67
|
+
process.exit(0);
|
|
68
|
+
});
|
|
69
|
+
process.on('SIGINT', async () => {
|
|
70
|
+
await gracefulShutdown();
|
|
71
|
+
process.exit(0);
|
|
72
|
+
});
|
|
73
|
+
process.on('uncaughtException', async (err) => {
|
|
74
|
+
console.error('Uncaught exception:', err);
|
|
75
|
+
crashRecovery();
|
|
76
|
+
await gracefulShutdown();
|
|
77
|
+
process.exit(1);
|
|
78
|
+
});
|
|
79
|
+
process.on('unhandledRejection', async (reason) => {
|
|
80
|
+
console.error('Unhandled rejection:', reason);
|
|
81
|
+
crashRecovery();
|
|
82
|
+
await gracefulShutdown();
|
|
83
|
+
process.exit(1);
|
|
84
|
+
});
|
|
85
|
+
// Start the server
|
|
86
|
+
ctx
|
|
87
|
+
.startServer()
|
|
88
|
+
.then(() => {
|
|
89
|
+
console.log(`ORKIFY daemon started (PID: ${process.pid})`);
|
|
90
|
+
// If this daemon was started by crash recovery, re-enable crash recovery
|
|
91
|
+
// after a stability window. If it crashes again within 60s it's likely
|
|
92
|
+
// the same root cause — the guard in crashRecovery() prevents a loop.
|
|
93
|
+
if (process.env.ORKIFY_CRASH_RECOVERY) {
|
|
94
|
+
const stabilityTimer = setTimeout(() => {
|
|
95
|
+
delete process.env.ORKIFY_CRASH_RECOVERY;
|
|
96
|
+
console.log('Crash recovery re-enabled after stability window');
|
|
97
|
+
}, 60_000);
|
|
98
|
+
stabilityTimer.unref();
|
|
99
|
+
}
|
|
100
|
+
})
|
|
101
|
+
.catch((err) => {
|
|
102
|
+
console.error('Failed to start daemon:', err);
|
|
103
|
+
cleanup();
|
|
104
|
+
process.exit(1);
|
|
105
|
+
});
|
|
106
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import type { McpStartPayload } from '../types/index.js';
|
|
2
|
+
import { CronScheduler } from '../cron/CronScheduler.js';
|
|
3
|
+
import { DaemonServer } from '../ipc/DaemonServer.js';
|
|
4
|
+
import { TelemetryReporter } from '../telemetry/TelemetryReporter.js';
|
|
5
|
+
import { Orchestrator } from './Orchestrator.js';
|
|
6
|
+
export interface DaemonOptions {
|
|
7
|
+
/** Skip crash recovery, adjust KILL_DAEMON behavior (no process.exit) */
|
|
8
|
+
foreground?: boolean;
|
|
9
|
+
/** Don't monkey-patch console.log with timestamp prefixes */
|
|
10
|
+
skipTimestampPrefix?: boolean;
|
|
11
|
+
}
|
|
12
|
+
export interface DaemonContext {
|
|
13
|
+
orchestrator: Orchestrator;
|
|
14
|
+
cronScheduler: CronScheduler;
|
|
15
|
+
server: DaemonServer;
|
|
16
|
+
telemetry: null | TelemetryReporter;
|
|
17
|
+
startMcpHttp: (opts: McpStartPayload) => Promise<void>;
|
|
18
|
+
getMcpOptions: () => McpStartPayload | null;
|
|
19
|
+
gracefulShutdown: (opts?: {
|
|
20
|
+
persistCache?: boolean;
|
|
21
|
+
}) => Promise<void>;
|
|
22
|
+
/** Bind IPC socket and start listening */
|
|
23
|
+
startServer: () => Promise<void>;
|
|
24
|
+
cleanup: () => void;
|
|
25
|
+
/** Mark that server.stop()/cleanup() should be skipped during shutdown
|
|
26
|
+
* (e.g. because crash recovery already cleaned up the socket). */
|
|
27
|
+
markSkipServerStop: () => void;
|
|
28
|
+
}
|
|
29
|
+
export declare function startDaemon(options?: DaemonOptions): Promise<DaemonContext>;
|
|
30
|
+
//# sourceMappingURL=startDaemon.d.ts.map
|