triflux 10.15.0 → 10.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -0
- package/bin/triflux.mjs +48 -25
- package/hooks/keyword-rules.json +12 -0
- package/hooks/safety-guard.mjs +3 -0
- package/hub/cli-adapter-base.mjs +10 -5
- package/hub/lib/hosts-compat.mjs +19 -4
- package/hub/lib/ssh-command.mjs +4 -10
- package/hub/lib/state-snapshot.mjs +293 -0
- package/hub/router.mjs +20 -3
- package/hub/server.mjs +13 -18
- package/hub/team/agent-map.json +2 -0
- package/hub/team/codex-review.mjs +14 -9
- package/hub/team/conductor.mjs +8 -4
- package/hub/team/psmux.mjs +1 -1
- package/hub/team/swarm-cli.mjs +26 -16
- package/hub/team/worker-signal.mjs +263 -0
- package/hub/team/worker-signal.types.d.ts +52 -0
- package/hub/team/worktree-lifecycle.mjs +14 -1
- package/hub/workers/codex-app-server-worker.mjs +4 -6
- package/hub/workers/codex-mcp.mjs +90 -13
- package/hub/workers/lib/jsonrpc-stdio.mjs +0 -1
- package/hub/workers/worker-utils.mjs +1 -2
- package/package.json +4 -1
- package/scripts/check-codex-config-stable.mjs +122 -0
- package/scripts/codex-mcp-gateway-sync.mjs +22 -0
- package/scripts/doctor-diagnose.mjs +24 -11
- package/scripts/hub-ensure.mjs +24 -0
- package/scripts/lib/mcp-guard-engine.mjs +20 -0
- package/scripts/lib/mcp-health.mjs +4 -1
- package/scripts/release/bump-version.mjs +20 -12
- package/scripts/release/lib.mjs +7 -0
- package/scripts/release/prepare.mjs +8 -0
- package/scripts/setup.mjs +48 -3
- package/scripts/snapshot-codex-state.mjs +37 -0
- package/scripts/snapshot-gemini-state.mjs +37 -0
- package/scripts/sync-hub-mcp-settings.mjs +68 -9
- package/scripts/test-lock.mjs +134 -29
- package/scripts/tfx-route.sh +13 -9
- package/skills/tfx-ship/SKILL.md +6 -2
- package/skills/tfx-workspace/async-tests/run-tests.sh +0 -203
- package/skills/tfx-workspace/evals/evals.json +0 -79
- package/skills/tfx-workspace/iteration-1/benchmark.json +0 -524
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/eval_metadata.json +0 -11
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/outputs/analysis.md +0 -154
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/outputs/analysis.md +0 -126
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/eval_metadata.json +0 -11
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/outputs/analysis.md +0 -119
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/outputs/analysis.md +0 -115
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/eval_metadata.json +0 -10
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/grading.json +0 -20
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/outputs/analysis.md +0 -86
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/grading.json +0 -20
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/outputs/analysis.md +0 -81
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/multi-team-creation/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/outputs/analysis.md +0 -316
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/outputs/analysis.md +0 -352
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/review.html +0 -1325
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/outputs/analysis.md +0 -97
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/outputs/analysis.md +0 -94
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/outputs/analysis.md +0 -209
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/outputs/analysis.md +0 -193
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/benchmark.json +0 -144
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/eval_metadata.json +0 -13
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/grading.json +0 -35
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/outputs/analysis.md +0 -382
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/grading.json +0 -35
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/outputs/analysis.md +0 -333
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/review.html +0 -1325
- package/skills/tfx-workspace/skill-snapshot/tfx-auto/SKILL.md +0 -217
- package/skills/tfx-workspace/skill-snapshot/tfx-auto-codex/SKILL.md +0 -77
- package/skills/tfx-workspace/skill-snapshot/tfx-codex/SKILL.md +0 -65
- package/skills/tfx-workspace/skill-snapshot/tfx-doctor/SKILL.md +0 -94
- package/skills/tfx-workspace/skill-snapshot/tfx-gemini/SKILL.md +0 -82
- package/skills/tfx-workspace/skill-snapshot/tfx-hub/SKILL.md +0 -133
- package/skills/tfx-workspace/skill-snapshot/tfx-multi/SKILL.md +0 -426
- package/skills/tfx-workspace/skill-snapshot/tfx-setup/SKILL.md +0 -101
package/README.md
CHANGED
|
@@ -101,6 +101,21 @@ Then run `tfx setup` to configure your environment.
|
|
|
101
101
|
|
|
102
102
|
> **Note**: Deep skills require **psmux** (or tmux), **triflux Hub**, **Codex CLI**, and **Gemini CLI** for full Tri-CLI consensus. Without these, skills automatically degrade to Claude-only mode. Run `tfx doctor` to check your environment.
|
|
103
103
|
|
|
104
|
+
### State Snapshots
|
|
105
|
+
|
|
106
|
+
Hub startup also takes a best-effort daily snapshot of selected `~/.codex/` and
|
|
107
|
+
`~/.gemini/` state into `references/codex-snapshots/` and
|
|
108
|
+
`references/gemini-snapshots/`. Snapshot archives are rolling backups capped at
|
|
109
|
+
10 files per tool and are ignored by git.
|
|
110
|
+
|
|
111
|
+
Manual commands:
|
|
112
|
+
|
|
113
|
+
```bash
|
|
114
|
+
npm run snapshot:codex
|
|
115
|
+
npm run snapshot:gemini
|
|
116
|
+
npm run snapshot:all
|
|
117
|
+
```
|
|
118
|
+
|
|
104
119
|
---
|
|
105
120
|
|
|
106
121
|
## Core Engine
|
package/bin/triflux.mjs
CHANGED
|
@@ -5030,6 +5030,16 @@ function stopHubForUpdate() {
|
|
|
5030
5030
|
return info;
|
|
5031
5031
|
}
|
|
5032
5032
|
|
|
5033
|
+
function openHubLogFd() {
|
|
5034
|
+
try {
|
|
5035
|
+
const logDir = join(homedir(), ".claude", "cache", "tfx-hub");
|
|
5036
|
+
mkdirSync(logDir, { recursive: true });
|
|
5037
|
+
return openSync(join(logDir, "hub.log"), "a");
|
|
5038
|
+
} catch {
|
|
5039
|
+
return undefined;
|
|
5040
|
+
}
|
|
5041
|
+
}
|
|
5042
|
+
|
|
5033
5043
|
function startHubAfterUpdate(info) {
|
|
5034
5044
|
if (!info) return false;
|
|
5035
5045
|
const serverPath = join(PKG_ROOT, "hub", "server.mjs");
|
|
@@ -5040,13 +5050,19 @@ function startHubAfterUpdate(info) {
|
|
|
5040
5050
|
: String(process.env.TFX_HUB_PORT || "27888");
|
|
5041
5051
|
|
|
5042
5052
|
try {
|
|
5053
|
+
const logFd = openHubLogFd();
|
|
5043
5054
|
const child = spawn(process.execPath, [serverPath], {
|
|
5044
5055
|
env: { ...process.env, TFX_HUB_PORT: port },
|
|
5045
|
-
stdio: "ignore",
|
|
5056
|
+
stdio: ["ignore", logFd ?? "ignore", logFd ?? "ignore"],
|
|
5046
5057
|
detached: true,
|
|
5047
5058
|
windowsHide: true,
|
|
5048
5059
|
});
|
|
5049
5060
|
child.unref();
|
|
5061
|
+
if (logFd !== undefined) {
|
|
5062
|
+
try {
|
|
5063
|
+
closeSync(logFd);
|
|
5064
|
+
} catch {}
|
|
5065
|
+
}
|
|
5050
5066
|
return true;
|
|
5051
5067
|
} catch {
|
|
5052
5068
|
return false;
|
|
@@ -5057,29 +5073,28 @@ function startHubAfterUpdate(info) {
|
|
|
5057
5073
|
function autoRegisterMcp(mcpUrl, { codexEnabled = false } = {}) {
|
|
5058
5074
|
section("MCP 자동 등록");
|
|
5059
5075
|
|
|
5060
|
-
// Codex — config.json에 기본 disabled 엔트리로
|
|
5061
|
-
|
|
5062
|
-
|
|
5063
|
-
|
|
5064
|
-
|
|
5065
|
-
|
|
5066
|
-
|
|
5067
|
-
|
|
5068
|
-
|
|
5069
|
-
|
|
5070
|
-
|
|
5071
|
-
|
|
5072
|
-
|
|
5073
|
-
|
|
5074
|
-
|
|
5075
|
-
|
|
5076
|
-
|
|
5077
|
-
|
|
5078
|
-
|
|
5079
|
-
warn(`Codex 등록 실패: ${e.message}`);
|
|
5076
|
+
// Codex — config.json에 기본 disabled 엔트리로 등록.
|
|
5077
|
+
// Hub startup must keep the MCP config fresh even on CI/dev machines where
|
|
5078
|
+
// the Codex CLI binary itself is not installed.
|
|
5079
|
+
try {
|
|
5080
|
+
const result = ensureCodexHubServerConfig({
|
|
5081
|
+
mcpUrl,
|
|
5082
|
+
createIfMissing: true,
|
|
5083
|
+
enabled: codexEnabled,
|
|
5084
|
+
});
|
|
5085
|
+
if (!result.ok) throw new Error(result.reason || "unknown");
|
|
5086
|
+
const suffix = which("codex") ? "" : " (CLI 미설치)";
|
|
5087
|
+
if (result.changed) {
|
|
5088
|
+
ok(
|
|
5089
|
+
`Codex: config.json에 등록 완료 (${codexEnabled ? "enabled" : "기본 disabled"})${suffix}`,
|
|
5090
|
+
);
|
|
5091
|
+
} else {
|
|
5092
|
+
ok(
|
|
5093
|
+
`Codex: 이미 등록됨 (${codexEnabled ? "enabled" : "기본 disabled"})${suffix}`,
|
|
5094
|
+
);
|
|
5080
5095
|
}
|
|
5081
|
-
}
|
|
5082
|
-
|
|
5096
|
+
} catch (e) {
|
|
5097
|
+
warn(`Codex 등록 실패: ${e.message}`);
|
|
5083
5098
|
}
|
|
5084
5099
|
|
|
5085
5100
|
// Gemini — settings.json 직접 수정
|
|
@@ -5211,7 +5226,9 @@ async function cmdHub(args = [], options = {}) {
|
|
|
5211
5226
|
});
|
|
5212
5227
|
}
|
|
5213
5228
|
|
|
5214
|
-
// Issue #102: spawn stderr 를
|
|
5229
|
+
// Issue #102 + hub-detach fix: spawn stdout/stderr 를 두 채널로 redirect.
|
|
5230
|
+
// - startupErrPath (tmp): 3초 안의 startup 실패 진단 (성공 시 cleanup)
|
|
5231
|
+
// - hub.log (cache): runtime stdout/stderr 영구 보존 (crash 추적)
|
|
5215
5232
|
// detached spawn 은 pipe 유지가 까다로우니 fd 리다이렉트로 접근.
|
|
5216
5233
|
const { openSync: _openSync, closeSync: _closeSync } = await import(
|
|
5217
5234
|
"node:fs"
|
|
@@ -5227,10 +5244,11 @@ async function cmdHub(args = [], options = {}) {
|
|
|
5227
5244
|
} catch {
|
|
5228
5245
|
errFd = undefined;
|
|
5229
5246
|
}
|
|
5247
|
+
const logFd = openHubLogFd();
|
|
5230
5248
|
|
|
5231
5249
|
const child = spawn(process.execPath, [serverPath], {
|
|
5232
5250
|
env: { ...process.env, TFX_HUB_PORT: port },
|
|
5233
|
-
stdio: ["ignore", "ignore", errFd ?? "ignore"],
|
|
5251
|
+
stdio: ["ignore", logFd ?? "ignore", errFd ?? logFd ?? "ignore"],
|
|
5234
5252
|
detached: true,
|
|
5235
5253
|
windowsHide: true,
|
|
5236
5254
|
});
|
|
@@ -5240,6 +5258,11 @@ async function cmdHub(args = [], options = {}) {
|
|
|
5240
5258
|
_closeSync(errFd);
|
|
5241
5259
|
} catch {}
|
|
5242
5260
|
}
|
|
5261
|
+
if (logFd !== undefined) {
|
|
5262
|
+
try {
|
|
5263
|
+
_closeSync(logFd);
|
|
5264
|
+
} catch {}
|
|
5265
|
+
}
|
|
5243
5266
|
|
|
5244
5267
|
// PID 파일 확인 (최대 3초 대기, 100ms 폴링)
|
|
5245
5268
|
let started = false;
|
package/hooks/keyword-rules.json
CHANGED
|
@@ -97,6 +97,18 @@
|
|
|
97
97
|
"source": "(?:정리해|슬롭|클린업)",
|
|
98
98
|
"flags": "i"
|
|
99
99
|
},
|
|
100
|
+
{
|
|
101
|
+
"source": "(?:병렬|동시에|parallel|concurrent)",
|
|
102
|
+
"flags": "i"
|
|
103
|
+
},
|
|
104
|
+
{
|
|
105
|
+
"source": "(?:점검|진단|확인해)",
|
|
106
|
+
"flags": "i"
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
"source": "(?:계속해|이어서|진행해)",
|
|
110
|
+
"flags": "i"
|
|
111
|
+
},
|
|
100
112
|
{
|
|
101
113
|
"source": "\\b(?:implement|build|fix|review|test|plan|analyze)\\b",
|
|
102
114
|
"flags": "i"
|
package/hooks/safety-guard.mjs
CHANGED
|
@@ -123,9 +123,12 @@ function getWindowsHostIds() {
|
|
|
123
123
|
ids.add(name);
|
|
124
124
|
if (cfg.tailscale?.ip) ids.add(cfg.tailscale.ip);
|
|
125
125
|
if (cfg.tailscale?.dns) ids.add(cfg.tailscale.dns);
|
|
126
|
+
if (cfg.ssh?.host) ids.add(cfg.ssh.host);
|
|
126
127
|
if (cfg.ssh?.user) {
|
|
127
128
|
ids.add(`${cfg.ssh.user}@${name}`);
|
|
128
129
|
if (cfg.tailscale?.ip) ids.add(`${cfg.ssh.user}@${cfg.tailscale.ip}`);
|
|
130
|
+
if (cfg.tailscale?.dns) ids.add(`${cfg.ssh.user}@${cfg.tailscale.dns}`);
|
|
131
|
+
if (cfg.ssh?.host) ids.add(`${cfg.ssh.user}@${cfg.ssh.host}`);
|
|
129
132
|
}
|
|
130
133
|
}
|
|
131
134
|
} catch {
|
package/hub/cli-adapter-base.mjs
CHANGED
|
@@ -72,6 +72,11 @@ let _cachedVersion = null;
|
|
|
72
72
|
*/
|
|
73
73
|
export function getCodexVersion() {
|
|
74
74
|
if (_cachedVersion !== null) return _cachedVersion;
|
|
75
|
+
const override = Number(process.env.TFX_CODEX_VERSION_MINOR);
|
|
76
|
+
if (Number.isFinite(override) && override > 0) {
|
|
77
|
+
_cachedVersion = override;
|
|
78
|
+
return _cachedVersion;
|
|
79
|
+
}
|
|
75
80
|
try {
|
|
76
81
|
const out = execSync("codex --version", {
|
|
77
82
|
encoding: "utf8",
|
|
@@ -80,7 +85,10 @@ export function getCodexVersion() {
|
|
|
80
85
|
const match = out.match(/(\d+)\.(\d+)\.(\d+)/);
|
|
81
86
|
_cachedVersion = match ? Number.parseInt(match[2], 10) : 0;
|
|
82
87
|
} catch {
|
|
83
|
-
|
|
88
|
+
// Command builders should remain stable in CI even when the real Codex
|
|
89
|
+
// CLI is absent. Runtime preflight still reports/install-gates Codex
|
|
90
|
+
// separately; this fallback only selects the modern argv shape.
|
|
91
|
+
_cachedVersion = 117;
|
|
84
92
|
}
|
|
85
93
|
return _cachedVersion;
|
|
86
94
|
}
|
|
@@ -182,10 +190,7 @@ export function buildExecCommand(prompt, resultFile = null, opts = {}) {
|
|
|
182
190
|
// ── Sleep ───────────────────────────────────────────────────────
|
|
183
191
|
|
|
184
192
|
export function sleep(ms) {
|
|
185
|
-
return new Promise((resolve) =>
|
|
186
|
-
const timer = setTimeout(resolve, ms);
|
|
187
|
-
timer.unref?.();
|
|
188
|
-
});
|
|
193
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
189
194
|
}
|
|
190
195
|
|
|
191
196
|
// ── Result factory ──────────────────────────────────────────────
|
package/hub/lib/hosts-compat.mjs
CHANGED
|
@@ -133,8 +133,20 @@ function normalizeLastProbe(rawProbe) {
|
|
|
133
133
|
return Object.keys(probe).length > 0 ? probe : null;
|
|
134
134
|
}
|
|
135
135
|
|
|
136
|
+
function normalizeResources(rawHost) {
|
|
137
|
+
const rawResources =
|
|
138
|
+
rawHost.resources && typeof rawHost.resources === "object"
|
|
139
|
+
? rawHost.resources
|
|
140
|
+
: {};
|
|
141
|
+
const rawSpecs =
|
|
142
|
+
rawHost.specs && typeof rawHost.specs === "object" ? rawHost.specs : {};
|
|
143
|
+
return { ...rawResources, ...rawSpecs };
|
|
144
|
+
}
|
|
145
|
+
|
|
136
146
|
export function normalizeHost(rawHost = {}, name = "") {
|
|
137
147
|
const sshUser = rawHost.ssh_user || rawHost.ssh?.user || rawHost.user || null;
|
|
148
|
+
const sshHost = rawHost.ssh?.host || rawHost.host || null;
|
|
149
|
+
const resources = normalizeResources(rawHost);
|
|
138
150
|
const tailscale = {
|
|
139
151
|
ip: rawHost.tailscale?.ip || null,
|
|
140
152
|
dns: rawHost.tailscale?.dns || null,
|
|
@@ -167,15 +179,14 @@ export function normalizeHost(rawHost = {}, name = "") {
|
|
|
167
179
|
ssh: {
|
|
168
180
|
...(rawHost.ssh && typeof rawHost.ssh === "object" ? rawHost.ssh : {}),
|
|
169
181
|
user: sshUser,
|
|
182
|
+
host: sshHost,
|
|
170
183
|
},
|
|
171
184
|
tailscale,
|
|
172
185
|
capabilities,
|
|
173
186
|
capabilities_v2,
|
|
174
187
|
last_probe: normalizeLastProbe(rawHost.last_probe),
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
? { ...rawHost.specs }
|
|
178
|
-
: {},
|
|
188
|
+
resources,
|
|
189
|
+
specs: { ...resources },
|
|
179
190
|
raw: { ...rawHost },
|
|
180
191
|
};
|
|
181
192
|
}
|
|
@@ -234,6 +245,7 @@ export function resolveHost(nameOrAlias, repoRoot) {
|
|
|
234
245
|
...host.aliases,
|
|
235
246
|
host.tailscale.ip,
|
|
236
247
|
host.tailscale.dns,
|
|
248
|
+
host.ssh.host,
|
|
237
249
|
host.ssh_user ? `${host.ssh_user}@${name}` : null,
|
|
238
250
|
host.ssh_user && host.tailscale.ip
|
|
239
251
|
? `${host.ssh_user}@${host.tailscale.ip}`
|
|
@@ -241,6 +253,9 @@ export function resolveHost(nameOrAlias, repoRoot) {
|
|
|
241
253
|
host.ssh_user && host.tailscale.dns
|
|
242
254
|
? `${host.ssh_user}@${host.tailscale.dns}`
|
|
243
255
|
: null,
|
|
256
|
+
host.ssh_user && host.ssh.host
|
|
257
|
+
? `${host.ssh_user}@${host.ssh.host}`
|
|
258
|
+
: null,
|
|
244
259
|
]);
|
|
245
260
|
for (const alias of aliases) {
|
|
246
261
|
if (alias && String(alias).toLowerCase() === lowered) {
|
package/hub/lib/ssh-command.mjs
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
// PowerShell 호스트에 bash 문법(2>/dev/null, &&, $())을 보내는 사고를 방지한다.
|
|
3
3
|
// 모든 SSH 명령 생성 코드에서 이 유틸리티를 사용할 것.
|
|
4
4
|
|
|
5
|
-
import { readHosts } from "./hosts-compat.mjs";
|
|
5
|
+
import { readHost, readHosts, resolveHost } from "./hosts-compat.mjs";
|
|
6
6
|
|
|
7
7
|
/** hosts.json 캐시 (프로세스 수명 동안 유지) */
|
|
8
8
|
let hostsCache = null;
|
|
@@ -28,7 +28,7 @@ function loadHostsCache(repoRoot) {
|
|
|
28
28
|
export function detectHostOs(hostAlias, repoRoot) {
|
|
29
29
|
loadHostsCache(repoRoot);
|
|
30
30
|
|
|
31
|
-
const hostCfg =
|
|
31
|
+
const hostCfg = resolveHost(hostAlias, repoRoot)?.host;
|
|
32
32
|
if (hostCfg?.os === "windows") return "windows";
|
|
33
33
|
if (hostCfg?.os) return "posix";
|
|
34
34
|
|
|
@@ -163,7 +163,7 @@ export function validateCommandForOs(command, os) {
|
|
|
163
163
|
*/
|
|
164
164
|
export function getHostConfig(hostAlias, repoRoot) {
|
|
165
165
|
loadHostsCache(repoRoot);
|
|
166
|
-
return
|
|
166
|
+
return readHost(hostAlias, repoRoot);
|
|
167
167
|
}
|
|
168
168
|
|
|
169
169
|
/**
|
|
@@ -175,13 +175,7 @@ export function getHostConfig(hostAlias, repoRoot) {
|
|
|
175
175
|
export function resolveHostAlias(alias, repoRoot) {
|
|
176
176
|
loadHostsCache(repoRoot);
|
|
177
177
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
for (const [name, cfg] of Object.entries(hostsCache.hosts || {})) {
|
|
181
|
-
if (cfg.aliases.includes(alias)) return name;
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
return null;
|
|
178
|
+
return resolveHost(alias, repoRoot)?.name ?? null;
|
|
185
179
|
}
|
|
186
180
|
|
|
187
181
|
/**
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import {
|
|
3
|
+
copyFile,
|
|
4
|
+
mkdir,
|
|
5
|
+
readdir,
|
|
6
|
+
rename,
|
|
7
|
+
rm,
|
|
8
|
+
stat,
|
|
9
|
+
writeFile,
|
|
10
|
+
} from "node:fs/promises";
|
|
11
|
+
import { tmpdir } from "node:os";
|
|
12
|
+
import { dirname, isAbsolute, join, relative, resolve } from "node:path";
|
|
13
|
+
import { promisify } from "node:util";
|
|
14
|
+
|
|
15
|
+
const execFileAsync = promisify(execFile);
|
|
16
|
+
const DEFAULT_MAX_SNAPSHOTS = 10;
|
|
17
|
+
const DAY_MS = 24 * 60 * 60 * 1000;
|
|
18
|
+
|
|
19
|
+
export const CODEX_STATE_INCLUDES = Object.freeze([
|
|
20
|
+
"config.toml",
|
|
21
|
+
"AGENTS.md",
|
|
22
|
+
"skills",
|
|
23
|
+
"agents",
|
|
24
|
+
"prompts",
|
|
25
|
+
"plugins",
|
|
26
|
+
]);
|
|
27
|
+
|
|
28
|
+
export const CODEX_STATE_EXCLUDES = Object.freeze([
|
|
29
|
+
"*.sqlite*",
|
|
30
|
+
".sandbox*",
|
|
31
|
+
".tmp",
|
|
32
|
+
"_archived_skills",
|
|
33
|
+
"memories",
|
|
34
|
+
"cache",
|
|
35
|
+
"log",
|
|
36
|
+
"logs",
|
|
37
|
+
"sessions",
|
|
38
|
+
"auth.json",
|
|
39
|
+
".credentials.json",
|
|
40
|
+
"*.bak*",
|
|
41
|
+
"*.tmp-*",
|
|
42
|
+
"cap_sid",
|
|
43
|
+
"installation_id",
|
|
44
|
+
"history.jsonl",
|
|
45
|
+
"models_cache.json",
|
|
46
|
+
]);
|
|
47
|
+
|
|
48
|
+
export const GEMINI_STATE_INCLUDES = Object.freeze([
|
|
49
|
+
"settings.json",
|
|
50
|
+
"settings.local.json",
|
|
51
|
+
"GEMINI.md",
|
|
52
|
+
"commands",
|
|
53
|
+
"extensions",
|
|
54
|
+
"plugins",
|
|
55
|
+
"skills",
|
|
56
|
+
"agents",
|
|
57
|
+
]);
|
|
58
|
+
|
|
59
|
+
export const GEMINI_STATE_EXCLUDES = Object.freeze([
|
|
60
|
+
"*.sqlite*",
|
|
61
|
+
"cache",
|
|
62
|
+
"log",
|
|
63
|
+
"logs",
|
|
64
|
+
"sessions",
|
|
65
|
+
"auth.json",
|
|
66
|
+
".credentials.json",
|
|
67
|
+
"*.bak*",
|
|
68
|
+
]);
|
|
69
|
+
|
|
70
|
+
export const STATE_SNAPSHOT_THRESHOLD_MS = DAY_MS;
|
|
71
|
+
export const STATE_SNAPSHOT_MAX_SNAPSHOTS = DEFAULT_MAX_SNAPSHOTS;
|
|
72
|
+
|
|
73
|
+
function normalizePath(path) {
|
|
74
|
+
return String(path || "")
|
|
75
|
+
.replace(/\\/gu, "/")
|
|
76
|
+
.replace(/^\/+/u, "");
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function globToRegExp(pattern) {
|
|
80
|
+
const escaped = String(pattern).replace(/[.+^${}()|[\]\\]/gu, "\\$&");
|
|
81
|
+
return new RegExp(`^${escaped.replace(/\*/gu, ".*")}$`, "u");
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function compileExclude(pattern) {
|
|
85
|
+
const text = normalizePath(pattern).replace(/\/+$/u, "");
|
|
86
|
+
if (text.includes("*")) {
|
|
87
|
+
const regex = globToRegExp(text);
|
|
88
|
+
return (relativePath) => {
|
|
89
|
+
const normalized = normalizePath(relativePath);
|
|
90
|
+
return normalized.split("/").some((part) => regex.test(part));
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return (relativePath) => {
|
|
95
|
+
const normalized = normalizePath(relativePath);
|
|
96
|
+
return normalized.split("/").includes(text);
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function isSubpath(parent, child) {
|
|
101
|
+
const rel = relative(parent, child);
|
|
102
|
+
return rel === "" || (!rel.startsWith("..") && !isAbsolute(rel));
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async function pathStats(path) {
|
|
106
|
+
try {
|
|
107
|
+
return await stat(path);
|
|
108
|
+
} catch (error) {
|
|
109
|
+
if (error?.code === "ENOENT") return null;
|
|
110
|
+
throw error;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
async function collectFiles({ sourceDir, includes, excludeMatchers }) {
|
|
115
|
+
const sourceRoot = resolve(sourceDir);
|
|
116
|
+
const files = [];
|
|
117
|
+
|
|
118
|
+
async function visit(absPath, relativePath) {
|
|
119
|
+
if (excludeMatchers.some((matcher) => matcher(relativePath))) return;
|
|
120
|
+
|
|
121
|
+
const info = await pathStats(absPath);
|
|
122
|
+
if (!info) return;
|
|
123
|
+
if (info.isDirectory()) {
|
|
124
|
+
const children = await readdir(absPath, { withFileTypes: true });
|
|
125
|
+
for (const child of children) {
|
|
126
|
+
await visit(join(absPath, child.name), join(relativePath, child.name));
|
|
127
|
+
}
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
if (info.isFile()) {
|
|
131
|
+
files.push({
|
|
132
|
+
absPath,
|
|
133
|
+
relativePath: normalizePath(relativePath),
|
|
134
|
+
size: info.size,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
for (const include of includes || []) {
|
|
140
|
+
const relativeInclude = normalizePath(include);
|
|
141
|
+
if (!relativeInclude || relativeInclude.startsWith("../")) continue;
|
|
142
|
+
const absPath = resolve(sourceRoot, relativeInclude);
|
|
143
|
+
if (!isSubpath(sourceRoot, absPath)) continue;
|
|
144
|
+
await visit(absPath, relativeInclude);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
files.sort((a, b) => a.relativePath.localeCompare(b.relativePath));
|
|
148
|
+
return files;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async function listSnapshots(destDir) {
|
|
152
|
+
const names = await readdir(destDir).catch((error) => {
|
|
153
|
+
if (error?.code === "ENOENT") return [];
|
|
154
|
+
throw error;
|
|
155
|
+
});
|
|
156
|
+
const snapshots = [];
|
|
157
|
+
for (const name of names) {
|
|
158
|
+
if (!name.endsWith(".tar.gz")) continue;
|
|
159
|
+
const path = join(destDir, name);
|
|
160
|
+
const info = await pathStats(path);
|
|
161
|
+
if (info?.isFile()) snapshots.push({ name, path, mtimeMs: info.mtimeMs });
|
|
162
|
+
}
|
|
163
|
+
snapshots.sort(
|
|
164
|
+
(a, b) => b.mtimeMs - a.mtimeMs || b.name.localeCompare(a.name),
|
|
165
|
+
);
|
|
166
|
+
return snapshots;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
async function copyToStaging(files, stagingDir) {
|
|
170
|
+
for (const file of files) {
|
|
171
|
+
const targetPath = join(stagingDir, ...file.relativePath.split("/"));
|
|
172
|
+
await mkdir(dirname(targetPath), { recursive: true });
|
|
173
|
+
await copyFile(file.absPath, targetPath);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async function createArchive({ stagingDir, archivePath, files }) {
|
|
178
|
+
const listPath = join(stagingDir, ".snapshot-files");
|
|
179
|
+
const archiveName = normalizePath(relative(stagingDir, archivePath));
|
|
180
|
+
await writeFile(
|
|
181
|
+
listPath,
|
|
182
|
+
`${files.map((file) => file.relativePath).join("\n")}\n`,
|
|
183
|
+
"utf8",
|
|
184
|
+
);
|
|
185
|
+
await execFileAsync(
|
|
186
|
+
"tar",
|
|
187
|
+
["-czf", archiveName, "-C", ".", "-T", ".snapshot-files"],
|
|
188
|
+
{
|
|
189
|
+
cwd: stagingDir,
|
|
190
|
+
windowsHide: true,
|
|
191
|
+
},
|
|
192
|
+
);
|
|
193
|
+
await rm(listPath, { force: true });
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
async function pruneSnapshots(destDir, maxSnapshots) {
|
|
197
|
+
const snapshots = await listSnapshots(destDir);
|
|
198
|
+
const keep = Math.max(1, Number(maxSnapshots) || DEFAULT_MAX_SNAPSHOTS);
|
|
199
|
+
for (const snapshot of snapshots.slice(keep)) {
|
|
200
|
+
await rm(snapshot.path, { force: true });
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
function formatStamp(date) {
|
|
205
|
+
return date
|
|
206
|
+
.toISOString()
|
|
207
|
+
.replace(/[-:]/gu, "")
|
|
208
|
+
.replace(/\.\d{3}Z$/u, "Z");
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
function uniqueSuffix() {
|
|
212
|
+
return `${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Snapshot selected user state into a rolling tar.gz archive.
|
|
217
|
+
*
|
|
218
|
+
* @param {object} options
|
|
219
|
+
* @param {string} options.sourceDir
|
|
220
|
+
* @param {string} options.destDir
|
|
221
|
+
* @param {string[]} options.includes
|
|
222
|
+
* @param {string[]} options.excludes
|
|
223
|
+
* @param {number} options.thresholdMs
|
|
224
|
+
* @param {number} [options.maxSnapshots=10]
|
|
225
|
+
* @returns {Promise<{skipped: boolean, reason?: string, path?: string, sizeBytes?: number, fileCount?: number}>}
|
|
226
|
+
*/
|
|
227
|
+
export async function snapshotState({
|
|
228
|
+
sourceDir,
|
|
229
|
+
destDir,
|
|
230
|
+
includes,
|
|
231
|
+
excludes = [],
|
|
232
|
+
thresholdMs = 0,
|
|
233
|
+
maxSnapshots = DEFAULT_MAX_SNAPSHOTS,
|
|
234
|
+
}) {
|
|
235
|
+
const sourceRoot = resolve(sourceDir || "");
|
|
236
|
+
const destRoot = resolve(destDir || "");
|
|
237
|
+
const sourceInfo = await pathStats(sourceRoot);
|
|
238
|
+
if (!sourceInfo?.isDirectory()) {
|
|
239
|
+
return { skipped: true, reason: "source-missing" };
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
await mkdir(destRoot, { recursive: true });
|
|
243
|
+
const snapshots = await listSnapshots(destRoot);
|
|
244
|
+
const newest = snapshots[0];
|
|
245
|
+
if (
|
|
246
|
+
newest &&
|
|
247
|
+
Number(thresholdMs) > 0 &&
|
|
248
|
+
Date.now() - newest.mtimeMs < Number(thresholdMs)
|
|
249
|
+
) {
|
|
250
|
+
return { skipped: true, reason: "threshold", path: newest.path };
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
const excludeMatchers = excludes.map((pattern) => compileExclude(pattern));
|
|
254
|
+
const files = await collectFiles({
|
|
255
|
+
sourceDir: sourceRoot,
|
|
256
|
+
includes,
|
|
257
|
+
excludeMatchers,
|
|
258
|
+
});
|
|
259
|
+
if (files.length === 0) {
|
|
260
|
+
return { skipped: true, reason: "empty" };
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const suffix = uniqueSuffix();
|
|
264
|
+
const stagingDir = join(tmpdir(), `tfx-state-snapshot-${suffix}`);
|
|
265
|
+
const tempArchivePath = join(stagingDir, `.state-${suffix}.tar.gz.tmp`);
|
|
266
|
+
const finalArchivePath = join(
|
|
267
|
+
destRoot,
|
|
268
|
+
`state-${formatStamp(new Date())}-${suffix}.tar.gz`,
|
|
269
|
+
);
|
|
270
|
+
|
|
271
|
+
try {
|
|
272
|
+
await mkdir(stagingDir, { recursive: true });
|
|
273
|
+
await copyToStaging(files, stagingDir);
|
|
274
|
+
await createArchive({
|
|
275
|
+
stagingDir,
|
|
276
|
+
archivePath: tempArchivePath,
|
|
277
|
+
files,
|
|
278
|
+
});
|
|
279
|
+
await rename(tempArchivePath, finalArchivePath);
|
|
280
|
+
await pruneSnapshots(destRoot, maxSnapshots);
|
|
281
|
+
const archiveInfo = await stat(finalArchivePath);
|
|
282
|
+
const sizeBytes = files.reduce((sum, file) => sum + file.size, 0);
|
|
283
|
+
return {
|
|
284
|
+
skipped: false,
|
|
285
|
+
path: finalArchivePath,
|
|
286
|
+
sizeBytes: archiveInfo.size || sizeBytes,
|
|
287
|
+
fileCount: files.length,
|
|
288
|
+
};
|
|
289
|
+
} finally {
|
|
290
|
+
await rm(stagingDir, { recursive: true, force: true });
|
|
291
|
+
await rm(tempArchivePath, { force: true });
|
|
292
|
+
}
|
|
293
|
+
}
|
package/hub/router.mjs
CHANGED
|
@@ -60,6 +60,21 @@ function normalizeAgentTopics(store, agentId, runtimeTopics) {
|
|
|
60
60
|
return Array.from(topics);
|
|
61
61
|
}
|
|
62
62
|
|
|
63
|
+
function waitForEmitterOnce(emitter, eventName, timeoutMs) {
|
|
64
|
+
let timer = null;
|
|
65
|
+
const timeout = Math.max(0, Math.min(Number(timeoutMs) || 0, 30000));
|
|
66
|
+
return Promise.race([
|
|
67
|
+
once(emitter, eventName),
|
|
68
|
+
new Promise((_, reject) => {
|
|
69
|
+
timer = setTimeout(() => {
|
|
70
|
+
reject(new Error(`timed out waiting for ${eventName}`));
|
|
71
|
+
}, timeout);
|
|
72
|
+
}),
|
|
73
|
+
]).finally(() => {
|
|
74
|
+
if (timer) clearTimeout(timer);
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
|
|
63
78
|
/**
|
|
64
79
|
* 라우터 생성
|
|
65
80
|
* @param {object} store
|
|
@@ -525,9 +540,11 @@ export function createRouter(store) {
|
|
|
525
540
|
}
|
|
526
541
|
|
|
527
542
|
try {
|
|
528
|
-
const [response] = await
|
|
529
|
-
|
|
530
|
-
|
|
543
|
+
const [response] = await waitForEmitterOnce(
|
|
544
|
+
responseEmitter,
|
|
545
|
+
cid,
|
|
546
|
+
await_response_ms,
|
|
547
|
+
);
|
|
531
548
|
return {
|
|
532
549
|
ok: true,
|
|
533
550
|
data: {
|