@orkify/cli 1.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +191 -0
- package/README.md +1701 -0
- package/bin/orkify +3 -0
- package/boot/systemd/orkify@.service +30 -0
- package/dist/agent-name.d.ts +4 -0
- package/dist/agent-name.js +42 -0
- package/dist/alerts/AlertEvaluator.d.ts +14 -0
- package/dist/alerts/AlertEvaluator.js +135 -0
- package/dist/cli/commands/autostart.d.ts +3 -0
- package/dist/cli/commands/autostart.js +11 -0
- package/dist/cli/commands/crash-test.d.ts +3 -0
- package/dist/cli/commands/crash-test.js +17 -0
- package/dist/cli/commands/daemon-reload.d.ts +3 -0
- package/dist/cli/commands/daemon-reload.js +72 -0
- package/dist/cli/commands/delete.d.ts +3 -0
- package/dist/cli/commands/delete.js +37 -0
- package/dist/cli/commands/deploy.d.ts +6 -0
- package/dist/cli/commands/deploy.js +266 -0
- package/dist/cli/commands/down.d.ts +3 -0
- package/dist/cli/commands/down.js +36 -0
- package/dist/cli/commands/flush.d.ts +3 -0
- package/dist/cli/commands/flush.js +28 -0
- package/dist/cli/commands/kill.d.ts +3 -0
- package/dist/cli/commands/kill.js +35 -0
- package/dist/cli/commands/list.d.ts +14 -0
- package/dist/cli/commands/list.js +361 -0
- package/dist/cli/commands/logs.d.ts +3 -0
- package/dist/cli/commands/logs.js +107 -0
- package/dist/cli/commands/mcp.d.ts +3 -0
- package/dist/cli/commands/mcp.js +151 -0
- package/dist/cli/commands/reload.d.ts +3 -0
- package/dist/cli/commands/reload.js +54 -0
- package/dist/cli/commands/restart.d.ts +3 -0
- package/dist/cli/commands/restart.js +43 -0
- package/dist/cli/commands/restore.d.ts +3 -0
- package/dist/cli/commands/restore.js +88 -0
- package/dist/cli/commands/run.d.ts +8 -0
- package/dist/cli/commands/run.js +212 -0
- package/dist/cli/commands/snap.d.ts +3 -0
- package/dist/cli/commands/snap.js +30 -0
- package/dist/cli/commands/up.d.ts +3 -0
- package/dist/cli/commands/up.js +125 -0
- package/dist/cli/crash-recovery.d.ts +2 -0
- package/dist/cli/crash-recovery.js +67 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.js +46 -0
- package/dist/cli/parse.d.ts +28 -0
- package/dist/cli/parse.js +97 -0
- package/dist/cluster/ClusterWrapper.d.ts +18 -0
- package/dist/cluster/ClusterWrapper.js +602 -0
- package/dist/config/ConfigStore.d.ts +11 -0
- package/dist/config/ConfigStore.js +21 -0
- package/dist/config/schema.d.ts +103 -0
- package/dist/config/schema.js +49 -0
- package/dist/constants.d.ts +83 -0
- package/dist/constants.js +289 -0
- package/dist/cron/CronScheduler.d.ts +25 -0
- package/dist/cron/CronScheduler.js +149 -0
- package/dist/daemon/GracefulManager.d.ts +8 -0
- package/dist/daemon/GracefulManager.js +29 -0
- package/dist/daemon/ManagedProcess.d.ts +71 -0
- package/dist/daemon/ManagedProcess.js +1020 -0
- package/dist/daemon/Orchestrator.d.ts +51 -0
- package/dist/daemon/Orchestrator.js +416 -0
- package/dist/daemon/RotatingWriter.d.ts +27 -0
- package/dist/daemon/RotatingWriter.js +264 -0
- package/dist/daemon/index.d.ts +2 -0
- package/dist/daemon/index.js +106 -0
- package/dist/daemon/startDaemon.d.ts +30 -0
- package/dist/daemon/startDaemon.js +693 -0
- package/dist/deploy/CommandPoller.d.ts +13 -0
- package/dist/deploy/CommandPoller.js +53 -0
- package/dist/deploy/DeployExecutor.d.ts +33 -0
- package/dist/deploy/DeployExecutor.js +340 -0
- package/dist/deploy/config.d.ts +20 -0
- package/dist/deploy/config.js +161 -0
- package/dist/deploy/env.d.ts +2 -0
- package/dist/deploy/env.js +17 -0
- package/dist/deploy/tarball.d.ts +32 -0
- package/dist/deploy/tarball.js +243 -0
- package/dist/detect/framework.d.ts +2 -0
- package/dist/detect/framework.js +24 -0
- package/dist/ipc/DaemonClient.d.ts +31 -0
- package/dist/ipc/DaemonClient.js +248 -0
- package/dist/ipc/DaemonServer.d.ts +28 -0
- package/dist/ipc/DaemonServer.js +166 -0
- package/dist/ipc/MultiUserClient.d.ts +27 -0
- package/dist/ipc/MultiUserClient.js +203 -0
- package/dist/ipc/protocol.d.ts +7 -0
- package/dist/ipc/protocol.js +53 -0
- package/dist/ipc/restoreDaemon.d.ts +8 -0
- package/dist/ipc/restoreDaemon.js +19 -0
- package/dist/machine-id.d.ts +11 -0
- package/dist/machine-id.js +51 -0
- package/dist/mcp/auth.d.ts +118 -0
- package/dist/mcp/auth.js +245 -0
- package/dist/mcp/http.d.ts +20 -0
- package/dist/mcp/http.js +229 -0
- package/dist/mcp/index.d.ts +3 -0
- package/dist/mcp/index.js +8 -0
- package/dist/mcp/server.d.ts +37 -0
- package/dist/mcp/server.js +413 -0
- package/dist/probe/compute-fingerprint.d.ts +27 -0
- package/dist/probe/compute-fingerprint.js +65 -0
- package/dist/probe/parse-frames.d.ts +21 -0
- package/dist/probe/parse-frames.js +57 -0
- package/dist/probe/resolve-sourcemaps.d.ts +25 -0
- package/dist/probe/resolve-sourcemaps.js +281 -0
- package/dist/state/StateStore.d.ts +11 -0
- package/dist/state/StateStore.js +78 -0
- package/dist/telemetry/TelemetryReporter.d.ts +49 -0
- package/dist/telemetry/TelemetryReporter.js +451 -0
- package/dist/types/index.d.ts +373 -0
- package/dist/types/index.js +2 -0
- package/package.json +148 -0
- package/packages/cache/README.md +114 -0
- package/packages/cache/dist/CacheClient.d.ts +26 -0
- package/packages/cache/dist/CacheClient.d.ts.map +1 -0
- package/packages/cache/dist/CacheClient.js +174 -0
- package/packages/cache/dist/CacheClient.js.map +1 -0
- package/packages/cache/dist/CacheFileStore.d.ts +45 -0
- package/packages/cache/dist/CacheFileStore.d.ts.map +1 -0
- package/packages/cache/dist/CacheFileStore.js +446 -0
- package/packages/cache/dist/CacheFileStore.js.map +1 -0
- package/packages/cache/dist/CachePersistence.d.ts +9 -0
- package/packages/cache/dist/CachePersistence.d.ts.map +1 -0
- package/packages/cache/dist/CachePersistence.js +67 -0
- package/packages/cache/dist/CachePersistence.js.map +1 -0
- package/packages/cache/dist/CachePrimary.d.ts +25 -0
- package/packages/cache/dist/CachePrimary.d.ts.map +1 -0
- package/packages/cache/dist/CachePrimary.js +155 -0
- package/packages/cache/dist/CachePrimary.js.map +1 -0
- package/packages/cache/dist/CacheStore.d.ts +50 -0
- package/packages/cache/dist/CacheStore.d.ts.map +1 -0
- package/packages/cache/dist/CacheStore.js +271 -0
- package/packages/cache/dist/CacheStore.js.map +1 -0
- package/packages/cache/dist/constants.d.ts +6 -0
- package/packages/cache/dist/constants.d.ts.map +1 -0
- package/packages/cache/dist/constants.js +9 -0
- package/packages/cache/dist/constants.js.map +1 -0
- package/packages/cache/dist/index.d.ts +16 -0
- package/packages/cache/dist/index.d.ts.map +1 -0
- package/packages/cache/dist/index.js +86 -0
- package/packages/cache/dist/index.js.map +1 -0
- package/packages/cache/dist/serialize.d.ts +9 -0
- package/packages/cache/dist/serialize.d.ts.map +1 -0
- package/packages/cache/dist/serialize.js +40 -0
- package/packages/cache/dist/serialize.js.map +1 -0
- package/packages/cache/dist/types.d.ts +123 -0
- package/packages/cache/dist/types.d.ts.map +1 -0
- package/packages/cache/dist/types.js +2 -0
- package/packages/cache/dist/types.js.map +1 -0
- package/packages/cache/package.json +27 -0
- package/packages/cache/src/CacheClient.ts +227 -0
- package/packages/cache/src/CacheFileStore.ts +528 -0
- package/packages/cache/src/CachePersistence.ts +89 -0
- package/packages/cache/src/CachePrimary.ts +172 -0
- package/packages/cache/src/CacheStore.ts +308 -0
- package/packages/cache/src/constants.ts +10 -0
- package/packages/cache/src/index.ts +100 -0
- package/packages/cache/src/serialize.ts +49 -0
- package/packages/cache/src/types.ts +156 -0
- package/packages/cache/tsconfig.json +18 -0
- package/packages/cache/tsconfig.tsbuildinfo +1 -0
- package/packages/next/README.md +166 -0
- package/packages/next/dist/error-capture.d.ts +34 -0
- package/packages/next/dist/error-capture.d.ts.map +1 -0
- package/packages/next/dist/error-capture.js +130 -0
- package/packages/next/dist/error-capture.js.map +1 -0
- package/packages/next/dist/error-handler.d.ts +10 -0
- package/packages/next/dist/error-handler.d.ts.map +1 -0
- package/packages/next/dist/error-handler.js +186 -0
- package/packages/next/dist/error-handler.js.map +1 -0
- package/packages/next/dist/isr-cache.d.ts +9 -0
- package/packages/next/dist/isr-cache.d.ts.map +1 -0
- package/packages/next/dist/isr-cache.js +86 -0
- package/packages/next/dist/isr-cache.js.map +1 -0
- package/packages/next/dist/stream.d.ts +5 -0
- package/packages/next/dist/stream.d.ts.map +1 -0
- package/packages/next/dist/stream.js +22 -0
- package/packages/next/dist/stream.js.map +1 -0
- package/packages/next/dist/types.d.ts +33 -0
- package/packages/next/dist/types.d.ts.map +1 -0
- package/packages/next/dist/types.js +6 -0
- package/packages/next/dist/types.js.map +1 -0
- package/packages/next/dist/use-cache.d.ts +4 -0
- package/packages/next/dist/use-cache.d.ts.map +1 -0
- package/packages/next/dist/use-cache.js +86 -0
- package/packages/next/dist/use-cache.js.map +1 -0
- package/packages/next/dist/utils.d.ts +32 -0
- package/packages/next/dist/utils.d.ts.map +1 -0
- package/packages/next/dist/utils.js +88 -0
- package/packages/next/dist/utils.js.map +1 -0
- package/packages/next/package.json +52 -0
- package/packages/next/src/error-capture.ts +177 -0
- package/packages/next/src/error-handler.ts +221 -0
- package/packages/next/src/isr-cache.ts +100 -0
- package/packages/next/src/stream.ts +23 -0
- package/packages/next/src/types.ts +33 -0
- package/packages/next/src/use-cache.ts +99 -0
- package/packages/next/src/utils.ts +102 -0
- package/packages/next/tsconfig.json +19 -0
- package/packages/next/tsconfig.tsbuildinfo +1 -0
package/bin/orkify
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# orkify process manager — systemd template unit
|
|
2
|
+
#
|
|
3
|
+
# Install:
|
|
4
|
+
# sudo cp orkify@.service /etc/systemd/system/
|
|
5
|
+
# sudo systemctl daemon-reload
|
|
6
|
+
# sudo systemctl enable orkify@<username>
|
|
7
|
+
#
|
|
8
|
+
# The service restores the process snapshot on boot and kills the daemon on stop.
|
|
9
|
+
# Make sure to run `orkify snap` at least once so there is a snapshot to restore.
|
|
10
|
+
#
|
|
11
|
+
# Update ExecStart/ExecStop paths to match your installation:
|
|
12
|
+
# which orkify
|
|
13
|
+
|
|
14
|
+
[Unit]
|
|
15
|
+
Description=orkify process manager (%i)
|
|
16
|
+
Documentation=https://github.com/orkify/orkify
|
|
17
|
+
After=network-online.target
|
|
18
|
+
Wants=network-online.target
|
|
19
|
+
|
|
20
|
+
[Service]
|
|
21
|
+
Type=oneshot
|
|
22
|
+
RemainAfterExit=yes
|
|
23
|
+
User=%i
|
|
24
|
+
EnvironmentFile=-/etc/orkify/env
|
|
25
|
+
ExecStart=/usr/local/bin/orkify restore
|
|
26
|
+
ExecReload=/usr/local/bin/orkify daemon-reload
|
|
27
|
+
ExecStop=/usr/local/bin/orkify kill
|
|
28
|
+
|
|
29
|
+
[Install]
|
|
30
|
+
WantedBy=multi-user.target
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { randomBytes } from 'node:crypto';
|
|
2
|
+
import { mkdirSync, readFileSync, writeFileSync } from 'node:fs';
|
|
3
|
+
import { hostname } from 'node:os';
|
|
4
|
+
import { AGENT_NAME_FILE, ORKIFY_HOME } from './constants.js';
|
|
5
|
+
let cached;
|
|
6
|
+
export function getAgentName() {
|
|
7
|
+
if (cached)
|
|
8
|
+
return cached;
|
|
9
|
+
// 1. Env var override (containers)
|
|
10
|
+
const envName = process.env.ORKIFY_AGENT_NAME;
|
|
11
|
+
if (envName) {
|
|
12
|
+
cached = envName;
|
|
13
|
+
return cached;
|
|
14
|
+
}
|
|
15
|
+
// 2. Read persisted file
|
|
16
|
+
try {
|
|
17
|
+
const name = readFileSync(AGENT_NAME_FILE, 'utf8').trim();
|
|
18
|
+
if (name) {
|
|
19
|
+
cached = name;
|
|
20
|
+
return cached;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
// File doesn't exist yet — generate below
|
|
25
|
+
}
|
|
26
|
+
// 3. Generate: hostname-6hex
|
|
27
|
+
const name = `${hostname()}-${randomBytes(3).toString('hex')}`;
|
|
28
|
+
try {
|
|
29
|
+
mkdirSync(ORKIFY_HOME, { recursive: true });
|
|
30
|
+
writeFileSync(AGENT_NAME_FILE, name + '\n', 'utf8');
|
|
31
|
+
}
|
|
32
|
+
catch {
|
|
33
|
+
// Best-effort persist — don't crash if write fails
|
|
34
|
+
}
|
|
35
|
+
cached = name;
|
|
36
|
+
return cached;
|
|
37
|
+
}
|
|
38
|
+
/** Reset cached value — for testing only */
|
|
39
|
+
export function _resetAgentNameCache() {
|
|
40
|
+
cached = undefined;
|
|
41
|
+
}
|
|
42
|
+
//# sourceMappingURL=agent-name.js.map
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { ConfigStore } from '../config/ConfigStore.js';
|
|
2
|
+
import type { TelemetryAlertEvent, TelemetryMetricsSnapshot } from '../types/index.js';
|
|
3
|
+
export declare class AlertEvaluator {
|
|
4
|
+
private configStore;
|
|
5
|
+
private states;
|
|
6
|
+
private buffer;
|
|
7
|
+
private knownRuleIds;
|
|
8
|
+
private host;
|
|
9
|
+
constructor(configStore: ConfigStore);
|
|
10
|
+
evaluate(snapshots: TelemetryMetricsSnapshot[]): void;
|
|
11
|
+
drainAlerts(): TelemetryAlertEvent[];
|
|
12
|
+
restoreAlerts(alerts: TelemetryAlertEvent[]): void;
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=AlertEvaluator.d.ts.map
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import { hostname } from 'node:os';
|
|
2
|
+
const COOLDOWN_MS = 5 * 60 * 1000; // 5 minutes
|
|
3
|
+
export class AlertEvaluator {
|
|
4
|
+
configStore;
|
|
5
|
+
states = new Map();
|
|
6
|
+
buffer = [];
|
|
7
|
+
knownRuleIds = new Set();
|
|
8
|
+
host = hostname();
|
|
9
|
+
constructor(configStore) {
|
|
10
|
+
this.configStore = configStore;
|
|
11
|
+
}
|
|
12
|
+
evaluate(snapshots) {
|
|
13
|
+
const rules = this.configStore.getAlertRules().filter((r) => r.is_enabled);
|
|
14
|
+
const now = Date.now();
|
|
15
|
+
// Track current rule IDs to prune stale states
|
|
16
|
+
const currentRuleIds = new Set(rules.map((r) => r.id));
|
|
17
|
+
// Prune states for removed rules
|
|
18
|
+
if (!setsEqual(currentRuleIds, this.knownRuleIds)) {
|
|
19
|
+
for (const key of this.states.keys()) {
|
|
20
|
+
const ruleId = key.split(':')[0];
|
|
21
|
+
if (!currentRuleIds.has(ruleId)) {
|
|
22
|
+
this.states.delete(key);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
this.knownRuleIds = currentRuleIds;
|
|
26
|
+
}
|
|
27
|
+
// Build lookup of current process:worker combos for pruning
|
|
28
|
+
const activeKeys = new Set();
|
|
29
|
+
for (const rule of rules) {
|
|
30
|
+
const { metric, threshold, duration } = rule.condition;
|
|
31
|
+
// Heartbeat rules are server-evaluated — skip on CLI
|
|
32
|
+
if (metric === 'heartbeat')
|
|
33
|
+
continue;
|
|
34
|
+
for (const snapshot of snapshots) {
|
|
35
|
+
for (const worker of snapshot.workers) {
|
|
36
|
+
const key = `${rule.id}:${snapshot.processName}:${worker.id}`;
|
|
37
|
+
activeKeys.add(key);
|
|
38
|
+
const value = metric === 'cpu' ? worker.cpu : worker.memory;
|
|
39
|
+
let state = this.states.get(key);
|
|
40
|
+
if (!state) {
|
|
41
|
+
state = { consecutiveViolations: 0, triggered: false, cooldownUntil: 0 };
|
|
42
|
+
this.states.set(key, state);
|
|
43
|
+
}
|
|
44
|
+
if (value > threshold) {
|
|
45
|
+
state.consecutiveViolations++;
|
|
46
|
+
if (!state.triggered &&
|
|
47
|
+
state.consecutiveViolations >= duration &&
|
|
48
|
+
now >= state.cooldownUntil) {
|
|
49
|
+
state.triggered = true;
|
|
50
|
+
state.cooldownUntil = now + COOLDOWN_MS;
|
|
51
|
+
this.buffer.push({
|
|
52
|
+
type: 'alert:triggered',
|
|
53
|
+
rule_id: rule.id,
|
|
54
|
+
rule_name: rule.name,
|
|
55
|
+
metric,
|
|
56
|
+
value,
|
|
57
|
+
threshold,
|
|
58
|
+
process_name: snapshot.processName,
|
|
59
|
+
worker_id: worker.id,
|
|
60
|
+
hostname: this.host,
|
|
61
|
+
timestamp: now,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
else {
|
|
66
|
+
if (state.triggered) {
|
|
67
|
+
this.buffer.push({
|
|
68
|
+
type: 'alert:resolved',
|
|
69
|
+
rule_id: rule.id,
|
|
70
|
+
rule_name: rule.name,
|
|
71
|
+
metric,
|
|
72
|
+
value,
|
|
73
|
+
threshold,
|
|
74
|
+
process_name: snapshot.processName,
|
|
75
|
+
worker_id: worker.id,
|
|
76
|
+
hostname: this.host,
|
|
77
|
+
timestamp: now,
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
state.consecutiveViolations = 0;
|
|
81
|
+
state.triggered = false;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
// Prune states for workers that are no longer active
|
|
87
|
+
for (const key of this.states.keys()) {
|
|
88
|
+
if (!activeKeys.has(key)) {
|
|
89
|
+
const state = this.states.get(key);
|
|
90
|
+
if (!state)
|
|
91
|
+
continue;
|
|
92
|
+
if (state.triggered) {
|
|
93
|
+
const parts = key.split(':');
|
|
94
|
+
const ruleId = parts[0];
|
|
95
|
+
const processName = parts.slice(1, -1).join(':');
|
|
96
|
+
const workerId = Number(parts[parts.length - 1]);
|
|
97
|
+
const rule = rules.find((r) => r.id === ruleId);
|
|
98
|
+
if (rule) {
|
|
99
|
+
this.buffer.push({
|
|
100
|
+
type: 'alert:resolved',
|
|
101
|
+
rule_id: ruleId,
|
|
102
|
+
rule_name: rule.name,
|
|
103
|
+
metric: rule.condition.metric,
|
|
104
|
+
value: 0,
|
|
105
|
+
threshold: rule.condition.threshold,
|
|
106
|
+
process_name: processName,
|
|
107
|
+
worker_id: workerId,
|
|
108
|
+
hostname: this.host,
|
|
109
|
+
timestamp: now,
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
this.states.delete(key);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
drainAlerts() {
|
|
118
|
+
const alerts = this.buffer;
|
|
119
|
+
this.buffer = [];
|
|
120
|
+
return alerts;
|
|
121
|
+
}
|
|
122
|
+
restoreAlerts(alerts) {
|
|
123
|
+
this.buffer.unshift(...alerts);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
function setsEqual(a, b) {
|
|
127
|
+
if (a.size !== b.size)
|
|
128
|
+
return false;
|
|
129
|
+
for (const item of a) {
|
|
130
|
+
if (!b.has(item))
|
|
131
|
+
return false;
|
|
132
|
+
}
|
|
133
|
+
return true;
|
|
134
|
+
}
|
|
135
|
+
//# sourceMappingURL=AlertEvaluator.js.map
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
export const autostartCommand = new Command('autostart')
|
|
4
|
+
.alias('boot')
|
|
5
|
+
.description('Configure orkify to start on system boot')
|
|
6
|
+
.action(() => {
|
|
7
|
+
console.log(`${chalk.bold('Boot persistence')} requires a one-time systemd (Linux) or launchd (macOS) setup.\n` +
|
|
8
|
+
`\n` +
|
|
9
|
+
`See the guide: ${chalk.cyan('https://github.com/orkify/orkify#boot-persistence')}`);
|
|
10
|
+
});
|
|
11
|
+
//# sourceMappingURL=autostart.js.map
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { IPCMessageType } from '../../constants.js';
|
|
3
|
+
import { daemonClient } from '../../ipc/DaemonClient.js';
|
|
4
|
+
export const crashTestCommand = new Command('_crash-test')
|
|
5
|
+
.description('Trigger a daemon crash for testing (internal)')
|
|
6
|
+
.action(async () => {
|
|
7
|
+
try {
|
|
8
|
+
await daemonClient.request(IPCMessageType.CRASH_TEST);
|
|
9
|
+
}
|
|
10
|
+
catch {
|
|
11
|
+
// Expected — daemon crashes after responding
|
|
12
|
+
}
|
|
13
|
+
finally {
|
|
14
|
+
daemonClient.disconnect();
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
//# sourceMappingURL=crash-test.js.map
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { IPCMessageType } from '../../constants.js';
|
|
4
|
+
import { daemonClient } from '../../ipc/DaemonClient.js';
|
|
5
|
+
import { restoreDaemon } from '../../ipc/restoreDaemon.js';
|
|
6
|
+
export const daemonReloadCommand = new Command('daemon-reload')
|
|
7
|
+
.description('Reload the daemon to pick up new code')
|
|
8
|
+
.action(async () => {
|
|
9
|
+
try {
|
|
10
|
+
// 1. Kill the daemon and capture its state (env vars + process configs + MCP)
|
|
11
|
+
console.log(chalk.blue('⟳ Stopping daemon...'));
|
|
12
|
+
let daemonEnv = {};
|
|
13
|
+
let processes = [];
|
|
14
|
+
let savedMcpOptions;
|
|
15
|
+
try {
|
|
16
|
+
const killResponse = await daemonClient.request(IPCMessageType.KILL_DAEMON);
|
|
17
|
+
const data = killResponse.data;
|
|
18
|
+
if (data?.env)
|
|
19
|
+
daemonEnv = data.env;
|
|
20
|
+
if (data?.processes)
|
|
21
|
+
processes = data.processes;
|
|
22
|
+
if (data?.mcpOptions)
|
|
23
|
+
savedMcpOptions = data.mcpOptions;
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
// Connection close is expected when daemon shuts down
|
|
27
|
+
}
|
|
28
|
+
daemonClient.disconnect();
|
|
29
|
+
// 2. Start new daemon and restore processes from in-memory configs
|
|
30
|
+
console.log(chalk.blue('⟳ Starting new daemon and restoring processes...'));
|
|
31
|
+
const resResponse = await restoreDaemon(daemonClient, processes, daemonEnv);
|
|
32
|
+
if (resResponse.success) {
|
|
33
|
+
const results = resResponse.data;
|
|
34
|
+
if (results.length === 0) {
|
|
35
|
+
console.log(chalk.green('✓ Daemon reloaded (no processes to restore)'));
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
console.log(chalk.green(`✓ Daemon reloaded, restored ${results.length} process(es):`));
|
|
39
|
+
for (const info of results) {
|
|
40
|
+
console.log(` - ${info.name} (${info.workers.length} worker(s))`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
console.error(chalk.red(`✗ Daemon restarted but failed to restore: ${resResponse.error}`));
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
// 3. Restore MCP HTTP server if it was running
|
|
49
|
+
if (savedMcpOptions) {
|
|
50
|
+
try {
|
|
51
|
+
const mcpResponse = await daemonClient.request(IPCMessageType.MCP_START, savedMcpOptions);
|
|
52
|
+
if (mcpResponse.success) {
|
|
53
|
+
console.log(chalk.green(`✓ MCP HTTP server restored on http://${savedMcpOptions.bind}:${savedMcpOptions.port}/mcp`));
|
|
54
|
+
}
|
|
55
|
+
else {
|
|
56
|
+
console.error(chalk.red(`✗ Failed to restore MCP server: ${mcpResponse.error}`));
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
catch (err) {
|
|
60
|
+
console.error(chalk.red(`✗ MCP restore error: ${err.message}`));
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
catch (err) {
|
|
65
|
+
console.error(chalk.red(`✗ Error: ${err.message}`));
|
|
66
|
+
process.exit(1);
|
|
67
|
+
}
|
|
68
|
+
finally {
|
|
69
|
+
daemonClient.disconnect();
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
//# sourceMappingURL=daemon-reload.js.map
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { IPCMessageType } from '../../constants.js';
|
|
4
|
+
import { daemonClient } from '../../ipc/DaemonClient.js';
|
|
5
|
+
export const deleteCommand = new Command('delete')
|
|
6
|
+
.alias('rm')
|
|
7
|
+
.description('Stop and remove process(es) from list')
|
|
8
|
+
.argument('<target>', 'Process name, id, or "all"')
|
|
9
|
+
.action(async (target) => {
|
|
10
|
+
try {
|
|
11
|
+
const payload = {
|
|
12
|
+
target: target === 'all' ? 'all' : isNaN(Number(target)) ? target : Number(target),
|
|
13
|
+
};
|
|
14
|
+
const response = await daemonClient.request(IPCMessageType.DELETE, payload);
|
|
15
|
+
if (response.success) {
|
|
16
|
+
const results = response.data;
|
|
17
|
+
for (const info of results) {
|
|
18
|
+
console.log(chalk.red(`✗ Process "${info.name}" deleted`));
|
|
19
|
+
}
|
|
20
|
+
if (results.length === 0) {
|
|
21
|
+
console.log(chalk.gray('No processes to delete'));
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
console.error(chalk.red(`✗ Failed to delete: ${response.error}`));
|
|
26
|
+
process.exit(1);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
console.error(chalk.red(`✗ Error: ${err.message}`));
|
|
31
|
+
process.exit(1);
|
|
32
|
+
}
|
|
33
|
+
finally {
|
|
34
|
+
daemonClient.disconnect();
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
//# sourceMappingURL=delete.js.map
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
export declare const deployCommand: Command;
|
|
3
|
+
export declare function parseErrorBody(resp: Response): Promise<string>;
|
|
4
|
+
export declare function formatSize(bytes: number): string;
|
|
5
|
+
export declare function computeSha256(filePath: string): Promise<string>;
|
|
6
|
+
//# sourceMappingURL=deploy.d.ts.map
|
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { execSync } from 'node:child_process';
|
|
4
|
+
import { createHash } from 'node:crypto';
|
|
5
|
+
import { createReadStream, existsSync, mkdtempSync, readFileSync, renameSync, rmSync, statSync, unlinkSync, } from 'node:fs';
|
|
6
|
+
import { tmpdir } from 'node:os';
|
|
7
|
+
import { join, resolve } from 'node:path';
|
|
8
|
+
import { IPC_DEPLOY_TIMEOUT, IPCMessageType, ORKIFY_CONFIG_FILE, TELEMETRY_DEFAULT_API_HOST, } from '../../constants.js';
|
|
9
|
+
import { collectGitMetadata, getOrkifyConfig, interactiveConfig, readPackageJson, saveOrkifyConfig, } from '../../deploy/config.js';
|
|
10
|
+
import { parseEnvFile } from '../../deploy/env.js';
|
|
11
|
+
import { createTarball } from '../../deploy/tarball.js';
|
|
12
|
+
import { daemonClient } from '../../ipc/DaemonClient.js';
|
|
13
|
+
export const deployCommand = new Command('deploy').description('Deployment commands');
|
|
14
|
+
deployCommand
|
|
15
|
+
.command('upload [dir]')
|
|
16
|
+
.description('Upload a build artifact for deployment (default: current directory)')
|
|
17
|
+
.option('--interactive', 'Force interactive config prompts')
|
|
18
|
+
.option('--api-key <key>', 'API key (or ORKIFY_API_KEY env)')
|
|
19
|
+
.option('--api-host <url>', 'API host (or ORKIFY_API_HOST env)')
|
|
20
|
+
.option('--npm-version-patch', 'Bump package.json patch version before upload (e.g. 1.0.0 → 1.0.1)')
|
|
21
|
+
.option('--no-sourcemaps', 'Exclude .map files from the artifact')
|
|
22
|
+
.action(async (dir, options) => {
|
|
23
|
+
try {
|
|
24
|
+
const projectDir = resolve(dir ?? process.cwd());
|
|
25
|
+
const apiKey = options.apiKey || process.env.ORKIFY_API_KEY;
|
|
26
|
+
const apiHost = options.apiHost || process.env.ORKIFY_API_HOST || TELEMETRY_DEFAULT_API_HOST;
|
|
27
|
+
if (!apiKey) {
|
|
28
|
+
console.error(chalk.red('✗ API key required. Set ORKIFY_API_KEY or use --api-key'));
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
if (!existsSync(resolve(projectDir, 'package.json'))) {
|
|
32
|
+
console.error(chalk.red('✗ No package.json found in ' + projectDir));
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
// 1. Get or create orkify.yml config
|
|
36
|
+
let config = getOrkifyConfig(projectDir);
|
|
37
|
+
if (!config || !config.processes?.length || options.interactive) {
|
|
38
|
+
config = await interactiveConfig(projectDir);
|
|
39
|
+
saveOrkifyConfig(projectDir, config);
|
|
40
|
+
console.log(chalk.green(`✓ Deploy config saved to ${ORKIFY_CONFIG_FILE}`));
|
|
41
|
+
}
|
|
42
|
+
if (!config.deploy) {
|
|
43
|
+
console.error(chalk.red(`✗ No deploy section found in ${ORKIFY_CONFIG_FILE}. Run with --interactive`));
|
|
44
|
+
process.exit(1);
|
|
45
|
+
}
|
|
46
|
+
if (!config.processes?.length) {
|
|
47
|
+
console.error(chalk.red(`✗ No processes defined in ${ORKIFY_CONFIG_FILE}. Run with --interactive`));
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
// 2. Bump patch version if requested
|
|
51
|
+
if (options.npmVersionPatch) {
|
|
52
|
+
const current = readPackageJson(projectDir).version || '0.0.0';
|
|
53
|
+
execSync('npm version patch --no-git-tag-version', { cwd: projectDir, stdio: 'pipe' });
|
|
54
|
+
const bumped = readPackageJson(projectDir).version || current;
|
|
55
|
+
console.log(chalk.dim(` version: ${current} → ${bumped}`));
|
|
56
|
+
}
|
|
57
|
+
// 3. Collect git metadata
|
|
58
|
+
const gitMeta = collectGitMetadata(projectDir);
|
|
59
|
+
if (gitMeta.gitSha) {
|
|
60
|
+
console.log(chalk.dim(` git: ${gitMeta.gitBranch ?? 'detached'} ${gitMeta.gitSha.slice(0, 7)}`));
|
|
61
|
+
}
|
|
62
|
+
// 4. Create tarball
|
|
63
|
+
const excludeSourceMaps = options.sourcemaps === false || config.deploy?.sourcemaps === false;
|
|
64
|
+
console.log('Creating artifact...');
|
|
65
|
+
const tarPath = await createTarball(projectDir, { excludeSourceMaps });
|
|
66
|
+
const tarStat = statSync(tarPath);
|
|
67
|
+
const sizeStr = formatSize(tarStat.size);
|
|
68
|
+
console.log(chalk.dim(` ${sizeStr}`));
|
|
69
|
+
// 5. Compute SHA-256
|
|
70
|
+
const sha256 = await computeSha256(tarPath);
|
|
71
|
+
// 6. Request upload URL
|
|
72
|
+
console.log('Uploading...');
|
|
73
|
+
const pkg = readPackageJson(projectDir);
|
|
74
|
+
const uploadResp = await fetch(`${apiHost}/api/v1/deploy/upload`, {
|
|
75
|
+
method: 'POST',
|
|
76
|
+
headers: {
|
|
77
|
+
'Content-Type': 'application/json',
|
|
78
|
+
Authorization: `Bearer ${apiKey}`,
|
|
79
|
+
},
|
|
80
|
+
body: JSON.stringify({
|
|
81
|
+
sha256,
|
|
82
|
+
sizeBytes: tarStat.size,
|
|
83
|
+
filename: `${pkg.name || 'artifact'}.tar.gz`,
|
|
84
|
+
gitSha: gitMeta.gitSha,
|
|
85
|
+
gitBranch: gitMeta.gitBranch,
|
|
86
|
+
gitAuthor: gitMeta.gitAuthor,
|
|
87
|
+
gitMessage: gitMeta.gitMessage,
|
|
88
|
+
deployConfig: config.deploy,
|
|
89
|
+
}),
|
|
90
|
+
});
|
|
91
|
+
if (!uploadResp.ok) {
|
|
92
|
+
const msg = await parseErrorBody(uploadResp);
|
|
93
|
+
console.error(chalk.red(`✗ Artifact upload failed: ${msg}`));
|
|
94
|
+
process.exit(1);
|
|
95
|
+
}
|
|
96
|
+
const { artifactId, uploadUrl, version } = (await uploadResp.json());
|
|
97
|
+
// 7. Upload tarball to pre-signed URL
|
|
98
|
+
const tarBuffer = readFileSync(tarPath);
|
|
99
|
+
const putResp = await fetch(uploadUrl, {
|
|
100
|
+
method: 'PUT',
|
|
101
|
+
headers: {
|
|
102
|
+
'Content-Type': 'application/gzip',
|
|
103
|
+
'Content-Length': String(tarStat.size),
|
|
104
|
+
},
|
|
105
|
+
body: tarBuffer,
|
|
106
|
+
});
|
|
107
|
+
if (!putResp.ok) {
|
|
108
|
+
const msg = await parseErrorBody(putResp);
|
|
109
|
+
console.error(chalk.red(`✗ Artifact storage failed: ${msg}`));
|
|
110
|
+
process.exit(1);
|
|
111
|
+
}
|
|
112
|
+
// 8. Confirm upload
|
|
113
|
+
const confirmResp = await fetch(`${apiHost}/api/v1/deploy/upload/${artifactId}/confirm`, {
|
|
114
|
+
method: 'POST',
|
|
115
|
+
headers: {
|
|
116
|
+
'Content-Type': 'application/json',
|
|
117
|
+
Authorization: `Bearer ${apiKey}`,
|
|
118
|
+
},
|
|
119
|
+
});
|
|
120
|
+
if (!confirmResp.ok) {
|
|
121
|
+
const msg = await parseErrorBody(confirmResp);
|
|
122
|
+
console.error(chalk.red(`✗ Artifact confirmation failed: ${msg}`));
|
|
123
|
+
process.exit(1);
|
|
124
|
+
}
|
|
125
|
+
// 9. Clean up tarball
|
|
126
|
+
unlinkSync(tarPath);
|
|
127
|
+
console.log(chalk.green(`✓ Artifact v${version} uploaded (${sizeStr}, ${sha256.slice(0, 12)}...)`));
|
|
128
|
+
}
|
|
129
|
+
catch (err) {
|
|
130
|
+
console.error(chalk.red(`✗ Error: ${err.message}`));
|
|
131
|
+
process.exit(1);
|
|
132
|
+
}
|
|
133
|
+
});
|
|
134
|
+
deployCommand
|
|
135
|
+
.command('pack [dir]')
|
|
136
|
+
.description('Create a deploy tarball without uploading')
|
|
137
|
+
.option('--output <path>', 'Output tarball path')
|
|
138
|
+
.option('--interactive', 'Force interactive config prompts')
|
|
139
|
+
.option('--no-sourcemaps', 'Exclude .map files from the artifact')
|
|
140
|
+
.action(async (dir, options) => {
|
|
141
|
+
try {
|
|
142
|
+
const projectDir = resolve(dir ?? process.cwd());
|
|
143
|
+
let config = getOrkifyConfig(projectDir);
|
|
144
|
+
if (!config || !config.processes?.length || options.interactive) {
|
|
145
|
+
config = await interactiveConfig(projectDir);
|
|
146
|
+
saveOrkifyConfig(projectDir, config);
|
|
147
|
+
console.log(chalk.green(`✓ Deploy config saved to ${ORKIFY_CONFIG_FILE}`));
|
|
148
|
+
}
|
|
149
|
+
if (!existsSync(join(projectDir, ORKIFY_CONFIG_FILE))) {
|
|
150
|
+
console.error(chalk.red(`✗ ${ORKIFY_CONFIG_FILE} not found. Run with --interactive to create one.`));
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
const excludeMaps = options.sourcemaps === false || config?.deploy?.sourcemaps === false;
|
|
154
|
+
console.log('Creating artifact...');
|
|
155
|
+
const tarPath = await createTarball(projectDir, { excludeSourceMaps: excludeMaps });
|
|
156
|
+
let finalPath = tarPath;
|
|
157
|
+
if (options.output) {
|
|
158
|
+
finalPath = resolve(options.output);
|
|
159
|
+
renameSync(tarPath, finalPath);
|
|
160
|
+
}
|
|
161
|
+
const tarStat = statSync(finalPath);
|
|
162
|
+
const sha256 = await computeSha256(finalPath);
|
|
163
|
+
console.log(chalk.green(`✓ Created: ${finalPath} (${formatSize(tarStat.size)}, sha256: ${sha256.slice(0, 12)}...)`));
|
|
164
|
+
}
|
|
165
|
+
catch (err) {
|
|
166
|
+
console.error(chalk.red(`✗ Error: ${err.message}`));
|
|
167
|
+
process.exit(1);
|
|
168
|
+
}
|
|
169
|
+
});
|
|
170
|
+
deployCommand
|
|
171
|
+
.command('local <tarball>')
|
|
172
|
+
.description('Deploy from a local tarball')
|
|
173
|
+
.option('--env-file <path>', 'Load env vars from file')
|
|
174
|
+
.action(async (tarball, options) => {
|
|
175
|
+
try {
|
|
176
|
+
const tarballPath = resolve(tarball);
|
|
177
|
+
if (!existsSync(tarballPath)) {
|
|
178
|
+
console.error(chalk.red(`✗ Tarball not found: ${tarballPath}`));
|
|
179
|
+
process.exit(1);
|
|
180
|
+
}
|
|
181
|
+
// Extract orkify.yml from tarball to read deploy config
|
|
182
|
+
const tmpDir = mkdtempSync(join(tmpdir(), 'orkify-local-'));
|
|
183
|
+
try {
|
|
184
|
+
execSync(`tar xzf "${tarballPath}" -C "${tmpDir}" ${ORKIFY_CONFIG_FILE}`, {
|
|
185
|
+
stdio: 'pipe',
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
catch {
|
|
189
|
+
rmSync(tmpDir, { recursive: true, force: true });
|
|
190
|
+
console.error(chalk.red(`✗ Tarball is not a valid orkify package: ${ORKIFY_CONFIG_FILE} not found`));
|
|
191
|
+
process.exit(1);
|
|
192
|
+
}
|
|
193
|
+
const config = getOrkifyConfig(tmpDir);
|
|
194
|
+
rmSync(tmpDir, { recursive: true, force: true });
|
|
195
|
+
if (!config?.deploy) {
|
|
196
|
+
console.error(chalk.red(`✗ No deploy section found in ${ORKIFY_CONFIG_FILE}`));
|
|
197
|
+
process.exit(1);
|
|
198
|
+
}
|
|
199
|
+
// Parse env file if provided
|
|
200
|
+
let env;
|
|
201
|
+
if (options.envFile) {
|
|
202
|
+
const envPath = resolve(options.envFile);
|
|
203
|
+
if (!existsSync(envPath)) {
|
|
204
|
+
console.error(chalk.red(`✗ Env file not found: ${envPath}`));
|
|
205
|
+
process.exit(1);
|
|
206
|
+
}
|
|
207
|
+
env = parseEnvFile(readFileSync(envPath, 'utf-8'));
|
|
208
|
+
}
|
|
209
|
+
console.log(`Deploying ${tarballPath}...`);
|
|
210
|
+
const payload = {
|
|
211
|
+
tarballPath,
|
|
212
|
+
deployConfig: config.deploy,
|
|
213
|
+
env,
|
|
214
|
+
};
|
|
215
|
+
const response = await daemonClient.requestWithProgress(IPCMessageType.DEPLOY_LOCAL, payload, (data) => {
|
|
216
|
+
const { phase, output } = data;
|
|
217
|
+
if (phase)
|
|
218
|
+
console.log(chalk.dim(` → ${phase}`));
|
|
219
|
+
else if (output)
|
|
220
|
+
console.log(chalk.dim(` ${output}`));
|
|
221
|
+
}, IPC_DEPLOY_TIMEOUT);
|
|
222
|
+
if (response.success) {
|
|
223
|
+
console.log(chalk.green('✓ Deploy complete'));
|
|
224
|
+
}
|
|
225
|
+
else {
|
|
226
|
+
console.error(chalk.red(`✗ Deploy failed: ${response.error}`));
|
|
227
|
+
process.exit(1);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
catch (err) {
|
|
231
|
+
console.error(chalk.red(`✗ Error: ${err.message}`));
|
|
232
|
+
process.exit(1);
|
|
233
|
+
}
|
|
234
|
+
finally {
|
|
235
|
+
daemonClient.disconnect();
|
|
236
|
+
}
|
|
237
|
+
});
|
|
238
|
+
export async function parseErrorBody(resp) {
|
|
239
|
+
try {
|
|
240
|
+
const json = (await resp.json());
|
|
241
|
+
if (json.error)
|
|
242
|
+
return json.error;
|
|
243
|
+
}
|
|
244
|
+
catch {
|
|
245
|
+
// Not JSON — fall through to text
|
|
246
|
+
}
|
|
247
|
+
return `${resp.status} ${resp.statusText}`;
|
|
248
|
+
}
|
|
249
|
+
export function formatSize(bytes) {
|
|
250
|
+
if (bytes === 0)
|
|
251
|
+
return '0 B';
|
|
252
|
+
const k = 1024;
|
|
253
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
254
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
255
|
+
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
|
256
|
+
}
|
|
257
|
+
export async function computeSha256(filePath) {
|
|
258
|
+
return new Promise((resolve, reject) => {
|
|
259
|
+
const hash = createHash('sha256');
|
|
260
|
+
const stream = createReadStream(filePath);
|
|
261
|
+
stream.on('data', (chunk) => hash.update(chunk));
|
|
262
|
+
stream.on('end', () => resolve(hash.digest('hex')));
|
|
263
|
+
stream.on('error', reject);
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
//# sourceMappingURL=deploy.js.map
|