triflux 10.15.0 → 10.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -0
- package/bin/triflux.mjs +27 -3
- package/hub/lib/state-snapshot.mjs +293 -0
- package/hub/server.mjs +5 -17
- package/hub/team/swarm-cli.mjs +26 -16
- package/hub/team/worker-signal.mjs +263 -0
- package/hub/team/worker-signal.types.d.ts +52 -0
- package/hub/workers/codex-mcp.mjs +90 -13
- package/package.json +4 -1
- package/scripts/check-codex-config-stable.mjs +122 -0
- package/scripts/hub-ensure.mjs +24 -0
- package/scripts/lib/mcp-health.mjs +4 -1
- package/scripts/release/bump-version.mjs +20 -12
- package/scripts/release/lib.mjs +7 -0
- package/scripts/release/prepare.mjs +8 -0
- package/scripts/setup.mjs +19 -2
- package/scripts/snapshot-codex-state.mjs +37 -0
- package/scripts/snapshot-gemini-state.mjs +37 -0
- package/scripts/sync-hub-mcp-settings.mjs +30 -8
- package/scripts/test-lock.mjs +8 -1
- package/scripts/tfx-route.sh +13 -9
- package/skills/tfx-ship/SKILL.md +6 -2
- package/skills/tfx-workspace/async-tests/run-tests.sh +0 -203
- package/skills/tfx-workspace/evals/evals.json +0 -79
- package/skills/tfx-workspace/iteration-1/benchmark.json +0 -524
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/eval_metadata.json +0 -11
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/outputs/analysis.md +0 -154
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/outputs/analysis.md +0 -126
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/eval_metadata.json +0 -11
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/outputs/analysis.md +0 -119
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/outputs/analysis.md +0 -115
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/eval_metadata.json +0 -10
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/grading.json +0 -20
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/outputs/analysis.md +0 -86
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/grading.json +0 -20
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/outputs/analysis.md +0 -81
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/multi-team-creation/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/outputs/analysis.md +0 -316
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/outputs/analysis.md +0 -352
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/review.html +0 -1325
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/outputs/analysis.md +0 -97
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/outputs/analysis.md +0 -94
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/outputs/analysis.md +0 -209
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/outputs/analysis.md +0 -193
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/benchmark.json +0 -144
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/eval_metadata.json +0 -13
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/grading.json +0 -35
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/outputs/analysis.md +0 -382
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/grading.json +0 -35
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/outputs/analysis.md +0 -333
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/review.html +0 -1325
- package/skills/tfx-workspace/skill-snapshot/tfx-auto/SKILL.md +0 -217
- package/skills/tfx-workspace/skill-snapshot/tfx-auto-codex/SKILL.md +0 -77
- package/skills/tfx-workspace/skill-snapshot/tfx-codex/SKILL.md +0 -65
- package/skills/tfx-workspace/skill-snapshot/tfx-doctor/SKILL.md +0 -94
- package/skills/tfx-workspace/skill-snapshot/tfx-gemini/SKILL.md +0 -82
- package/skills/tfx-workspace/skill-snapshot/tfx-hub/SKILL.md +0 -133
- package/skills/tfx-workspace/skill-snapshot/tfx-multi/SKILL.md +0 -426
- package/skills/tfx-workspace/skill-snapshot/tfx-setup/SKILL.md +0 -101
package/README.md
CHANGED
|
@@ -101,6 +101,21 @@ Then run `tfx setup` to configure your environment.
|
|
|
101
101
|
|
|
102
102
|
> **Note**: Deep skills require **psmux** (or tmux), **triflux Hub**, **Codex CLI**, and **Gemini CLI** for full Tri-CLI consensus. Without these, skills automatically degrade to Claude-only mode. Run `tfx doctor` to check your environment.
|
|
103
103
|
|
|
104
|
+
### State Snapshots
|
|
105
|
+
|
|
106
|
+
Hub startup also takes a best-effort daily snapshot of selected `~/.codex/` and
|
|
107
|
+
`~/.gemini/` state into `references/codex-snapshots/` and
|
|
108
|
+
`references/gemini-snapshots/`. Snapshot archives are rolling backups capped at
|
|
109
|
+
10 files per tool and are ignored by git.
|
|
110
|
+
|
|
111
|
+
Manual commands:
|
|
112
|
+
|
|
113
|
+
```bash
|
|
114
|
+
npm run snapshot:codex
|
|
115
|
+
npm run snapshot:gemini
|
|
116
|
+
npm run snapshot:all
|
|
117
|
+
```
|
|
118
|
+
|
|
104
119
|
---
|
|
105
120
|
|
|
106
121
|
## Core Engine
|
package/bin/triflux.mjs
CHANGED
|
@@ -5030,6 +5030,16 @@ function stopHubForUpdate() {
|
|
|
5030
5030
|
return info;
|
|
5031
5031
|
}
|
|
5032
5032
|
|
|
5033
|
+
function openHubLogFd() {
|
|
5034
|
+
try {
|
|
5035
|
+
const logDir = join(homedir(), ".claude", "cache", "tfx-hub");
|
|
5036
|
+
mkdirSync(logDir, { recursive: true });
|
|
5037
|
+
return openSync(join(logDir, "hub.log"), "a");
|
|
5038
|
+
} catch {
|
|
5039
|
+
return undefined;
|
|
5040
|
+
}
|
|
5041
|
+
}
|
|
5042
|
+
|
|
5033
5043
|
function startHubAfterUpdate(info) {
|
|
5034
5044
|
if (!info) return false;
|
|
5035
5045
|
const serverPath = join(PKG_ROOT, "hub", "server.mjs");
|
|
@@ -5040,13 +5050,19 @@ function startHubAfterUpdate(info) {
|
|
|
5040
5050
|
: String(process.env.TFX_HUB_PORT || "27888");
|
|
5041
5051
|
|
|
5042
5052
|
try {
|
|
5053
|
+
const logFd = openHubLogFd();
|
|
5043
5054
|
const child = spawn(process.execPath, [serverPath], {
|
|
5044
5055
|
env: { ...process.env, TFX_HUB_PORT: port },
|
|
5045
|
-
stdio: "ignore",
|
|
5056
|
+
stdio: ["ignore", logFd ?? "ignore", logFd ?? "ignore"],
|
|
5046
5057
|
detached: true,
|
|
5047
5058
|
windowsHide: true,
|
|
5048
5059
|
});
|
|
5049
5060
|
child.unref();
|
|
5061
|
+
if (logFd !== undefined) {
|
|
5062
|
+
try {
|
|
5063
|
+
closeSync(logFd);
|
|
5064
|
+
} catch {}
|
|
5065
|
+
}
|
|
5050
5066
|
return true;
|
|
5051
5067
|
} catch {
|
|
5052
5068
|
return false;
|
|
@@ -5211,7 +5227,9 @@ async function cmdHub(args = [], options = {}) {
|
|
|
5211
5227
|
});
|
|
5212
5228
|
}
|
|
5213
5229
|
|
|
5214
|
-
// Issue #102: spawn stderr 를
|
|
5230
|
+
// Issue #102 + hub-detach fix: spawn stdout/stderr 를 두 채널로 redirect.
|
|
5231
|
+
// - startupErrPath (tmp): 3초 안의 startup 실패 진단 (성공 시 cleanup)
|
|
5232
|
+
// - hub.log (cache): runtime stdout/stderr 영구 보존 (crash 추적)
|
|
5215
5233
|
// detached spawn 은 pipe 유지가 까다로우니 fd 리다이렉트로 접근.
|
|
5216
5234
|
const { openSync: _openSync, closeSync: _closeSync } = await import(
|
|
5217
5235
|
"node:fs"
|
|
@@ -5227,10 +5245,11 @@ async function cmdHub(args = [], options = {}) {
|
|
|
5227
5245
|
} catch {
|
|
5228
5246
|
errFd = undefined;
|
|
5229
5247
|
}
|
|
5248
|
+
const logFd = openHubLogFd();
|
|
5230
5249
|
|
|
5231
5250
|
const child = spawn(process.execPath, [serverPath], {
|
|
5232
5251
|
env: { ...process.env, TFX_HUB_PORT: port },
|
|
5233
|
-
stdio: ["ignore", "ignore", errFd ?? "ignore"],
|
|
5252
|
+
stdio: ["ignore", logFd ?? "ignore", errFd ?? logFd ?? "ignore"],
|
|
5234
5253
|
detached: true,
|
|
5235
5254
|
windowsHide: true,
|
|
5236
5255
|
});
|
|
@@ -5240,6 +5259,11 @@ async function cmdHub(args = [], options = {}) {
|
|
|
5240
5259
|
_closeSync(errFd);
|
|
5241
5260
|
} catch {}
|
|
5242
5261
|
}
|
|
5262
|
+
if (logFd !== undefined) {
|
|
5263
|
+
try {
|
|
5264
|
+
_closeSync(logFd);
|
|
5265
|
+
} catch {}
|
|
5266
|
+
}
|
|
5243
5267
|
|
|
5244
5268
|
// PID 파일 확인 (최대 3초 대기, 100ms 폴링)
|
|
5245
5269
|
let started = false;
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import {
|
|
3
|
+
copyFile,
|
|
4
|
+
mkdir,
|
|
5
|
+
readdir,
|
|
6
|
+
rename,
|
|
7
|
+
rm,
|
|
8
|
+
stat,
|
|
9
|
+
writeFile,
|
|
10
|
+
} from "node:fs/promises";
|
|
11
|
+
import { tmpdir } from "node:os";
|
|
12
|
+
import { dirname, isAbsolute, join, relative, resolve } from "node:path";
|
|
13
|
+
import { promisify } from "node:util";
|
|
14
|
+
|
|
15
|
+
const execFileAsync = promisify(execFile);
|
|
16
|
+
const DEFAULT_MAX_SNAPSHOTS = 10;
|
|
17
|
+
const DAY_MS = 24 * 60 * 60 * 1000;
|
|
18
|
+
|
|
19
|
+
export const CODEX_STATE_INCLUDES = Object.freeze([
|
|
20
|
+
"config.toml",
|
|
21
|
+
"AGENTS.md",
|
|
22
|
+
"skills",
|
|
23
|
+
"agents",
|
|
24
|
+
"prompts",
|
|
25
|
+
"plugins",
|
|
26
|
+
]);
|
|
27
|
+
|
|
28
|
+
export const CODEX_STATE_EXCLUDES = Object.freeze([
|
|
29
|
+
"*.sqlite*",
|
|
30
|
+
".sandbox*",
|
|
31
|
+
".tmp",
|
|
32
|
+
"_archived_skills",
|
|
33
|
+
"memories",
|
|
34
|
+
"cache",
|
|
35
|
+
"log",
|
|
36
|
+
"logs",
|
|
37
|
+
"sessions",
|
|
38
|
+
"auth.json",
|
|
39
|
+
".credentials.json",
|
|
40
|
+
"*.bak*",
|
|
41
|
+
"*.tmp-*",
|
|
42
|
+
"cap_sid",
|
|
43
|
+
"installation_id",
|
|
44
|
+
"history.jsonl",
|
|
45
|
+
"models_cache.json",
|
|
46
|
+
]);
|
|
47
|
+
|
|
48
|
+
export const GEMINI_STATE_INCLUDES = Object.freeze([
|
|
49
|
+
"settings.json",
|
|
50
|
+
"settings.local.json",
|
|
51
|
+
"GEMINI.md",
|
|
52
|
+
"commands",
|
|
53
|
+
"extensions",
|
|
54
|
+
"plugins",
|
|
55
|
+
"skills",
|
|
56
|
+
"agents",
|
|
57
|
+
]);
|
|
58
|
+
|
|
59
|
+
export const GEMINI_STATE_EXCLUDES = Object.freeze([
|
|
60
|
+
"*.sqlite*",
|
|
61
|
+
"cache",
|
|
62
|
+
"log",
|
|
63
|
+
"logs",
|
|
64
|
+
"sessions",
|
|
65
|
+
"auth.json",
|
|
66
|
+
".credentials.json",
|
|
67
|
+
"*.bak*",
|
|
68
|
+
]);
|
|
69
|
+
|
|
70
|
+
export const STATE_SNAPSHOT_THRESHOLD_MS = DAY_MS;
|
|
71
|
+
export const STATE_SNAPSHOT_MAX_SNAPSHOTS = DEFAULT_MAX_SNAPSHOTS;
|
|
72
|
+
|
|
73
|
+
function normalizePath(path) {
|
|
74
|
+
return String(path || "")
|
|
75
|
+
.replace(/\\/gu, "/")
|
|
76
|
+
.replace(/^\/+/u, "");
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function globToRegExp(pattern) {
|
|
80
|
+
const escaped = String(pattern).replace(/[.+^${}()|[\]\\]/gu, "\\$&");
|
|
81
|
+
return new RegExp(`^${escaped.replace(/\*/gu, ".*")}$`, "u");
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function compileExclude(pattern) {
|
|
85
|
+
const text = normalizePath(pattern).replace(/\/+$/u, "");
|
|
86
|
+
if (text.includes("*")) {
|
|
87
|
+
const regex = globToRegExp(text);
|
|
88
|
+
return (relativePath) => {
|
|
89
|
+
const normalized = normalizePath(relativePath);
|
|
90
|
+
return normalized.split("/").some((part) => regex.test(part));
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return (relativePath) => {
|
|
95
|
+
const normalized = normalizePath(relativePath);
|
|
96
|
+
return normalized.split("/").includes(text);
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function isSubpath(parent, child) {
|
|
101
|
+
const rel = relative(parent, child);
|
|
102
|
+
return rel === "" || (!rel.startsWith("..") && !isAbsolute(rel));
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async function pathStats(path) {
|
|
106
|
+
try {
|
|
107
|
+
return await stat(path);
|
|
108
|
+
} catch (error) {
|
|
109
|
+
if (error?.code === "ENOENT") return null;
|
|
110
|
+
throw error;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
async function collectFiles({ sourceDir, includes, excludeMatchers }) {
|
|
115
|
+
const sourceRoot = resolve(sourceDir);
|
|
116
|
+
const files = [];
|
|
117
|
+
|
|
118
|
+
async function visit(absPath, relativePath) {
|
|
119
|
+
if (excludeMatchers.some((matcher) => matcher(relativePath))) return;
|
|
120
|
+
|
|
121
|
+
const info = await pathStats(absPath);
|
|
122
|
+
if (!info) return;
|
|
123
|
+
if (info.isDirectory()) {
|
|
124
|
+
const children = await readdir(absPath, { withFileTypes: true });
|
|
125
|
+
for (const child of children) {
|
|
126
|
+
await visit(join(absPath, child.name), join(relativePath, child.name));
|
|
127
|
+
}
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
if (info.isFile()) {
|
|
131
|
+
files.push({
|
|
132
|
+
absPath,
|
|
133
|
+
relativePath: normalizePath(relativePath),
|
|
134
|
+
size: info.size,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
for (const include of includes || []) {
|
|
140
|
+
const relativeInclude = normalizePath(include);
|
|
141
|
+
if (!relativeInclude || relativeInclude.startsWith("../")) continue;
|
|
142
|
+
const absPath = resolve(sourceRoot, relativeInclude);
|
|
143
|
+
if (!isSubpath(sourceRoot, absPath)) continue;
|
|
144
|
+
await visit(absPath, relativeInclude);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
files.sort((a, b) => a.relativePath.localeCompare(b.relativePath));
|
|
148
|
+
return files;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async function listSnapshots(destDir) {
|
|
152
|
+
const names = await readdir(destDir).catch((error) => {
|
|
153
|
+
if (error?.code === "ENOENT") return [];
|
|
154
|
+
throw error;
|
|
155
|
+
});
|
|
156
|
+
const snapshots = [];
|
|
157
|
+
for (const name of names) {
|
|
158
|
+
if (!name.endsWith(".tar.gz")) continue;
|
|
159
|
+
const path = join(destDir, name);
|
|
160
|
+
const info = await pathStats(path);
|
|
161
|
+
if (info?.isFile()) snapshots.push({ name, path, mtimeMs: info.mtimeMs });
|
|
162
|
+
}
|
|
163
|
+
snapshots.sort(
|
|
164
|
+
(a, b) => b.mtimeMs - a.mtimeMs || b.name.localeCompare(a.name),
|
|
165
|
+
);
|
|
166
|
+
return snapshots;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
async function copyToStaging(files, stagingDir) {
|
|
170
|
+
for (const file of files) {
|
|
171
|
+
const targetPath = join(stagingDir, ...file.relativePath.split("/"));
|
|
172
|
+
await mkdir(dirname(targetPath), { recursive: true });
|
|
173
|
+
await copyFile(file.absPath, targetPath);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async function createArchive({ stagingDir, archivePath, files }) {
|
|
178
|
+
const listPath = join(stagingDir, ".snapshot-files");
|
|
179
|
+
const archiveName = normalizePath(relative(stagingDir, archivePath));
|
|
180
|
+
await writeFile(
|
|
181
|
+
listPath,
|
|
182
|
+
`${files.map((file) => file.relativePath).join("\n")}\n`,
|
|
183
|
+
"utf8",
|
|
184
|
+
);
|
|
185
|
+
await execFileAsync(
|
|
186
|
+
"tar",
|
|
187
|
+
["-czf", archiveName, "-C", ".", "-T", ".snapshot-files"],
|
|
188
|
+
{
|
|
189
|
+
cwd: stagingDir,
|
|
190
|
+
windowsHide: true,
|
|
191
|
+
},
|
|
192
|
+
);
|
|
193
|
+
await rm(listPath, { force: true });
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
async function pruneSnapshots(destDir, maxSnapshots) {
|
|
197
|
+
const snapshots = await listSnapshots(destDir);
|
|
198
|
+
const keep = Math.max(1, Number(maxSnapshots) || DEFAULT_MAX_SNAPSHOTS);
|
|
199
|
+
for (const snapshot of snapshots.slice(keep)) {
|
|
200
|
+
await rm(snapshot.path, { force: true });
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
function formatStamp(date) {
|
|
205
|
+
return date
|
|
206
|
+
.toISOString()
|
|
207
|
+
.replace(/[-:]/gu, "")
|
|
208
|
+
.replace(/\.\d{3}Z$/u, "Z");
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
function uniqueSuffix() {
|
|
212
|
+
return `${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Snapshot selected user state into a rolling tar.gz archive.
|
|
217
|
+
*
|
|
218
|
+
* @param {object} options
|
|
219
|
+
* @param {string} options.sourceDir
|
|
220
|
+
* @param {string} options.destDir
|
|
221
|
+
* @param {string[]} options.includes
|
|
222
|
+
* @param {string[]} options.excludes
|
|
223
|
+
* @param {number} options.thresholdMs
|
|
224
|
+
* @param {number} [options.maxSnapshots=10]
|
|
225
|
+
* @returns {Promise<{skipped: boolean, reason?: string, path?: string, sizeBytes?: number, fileCount?: number}>}
|
|
226
|
+
*/
|
|
227
|
+
export async function snapshotState({
|
|
228
|
+
sourceDir,
|
|
229
|
+
destDir,
|
|
230
|
+
includes,
|
|
231
|
+
excludes = [],
|
|
232
|
+
thresholdMs = 0,
|
|
233
|
+
maxSnapshots = DEFAULT_MAX_SNAPSHOTS,
|
|
234
|
+
}) {
|
|
235
|
+
const sourceRoot = resolve(sourceDir || "");
|
|
236
|
+
const destRoot = resolve(destDir || "");
|
|
237
|
+
const sourceInfo = await pathStats(sourceRoot);
|
|
238
|
+
if (!sourceInfo?.isDirectory()) {
|
|
239
|
+
return { skipped: true, reason: "source-missing" };
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
await mkdir(destRoot, { recursive: true });
|
|
243
|
+
const snapshots = await listSnapshots(destRoot);
|
|
244
|
+
const newest = snapshots[0];
|
|
245
|
+
if (
|
|
246
|
+
newest &&
|
|
247
|
+
Number(thresholdMs) > 0 &&
|
|
248
|
+
Date.now() - newest.mtimeMs < Number(thresholdMs)
|
|
249
|
+
) {
|
|
250
|
+
return { skipped: true, reason: "threshold", path: newest.path };
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
const excludeMatchers = excludes.map((pattern) => compileExclude(pattern));
|
|
254
|
+
const files = await collectFiles({
|
|
255
|
+
sourceDir: sourceRoot,
|
|
256
|
+
includes,
|
|
257
|
+
excludeMatchers,
|
|
258
|
+
});
|
|
259
|
+
if (files.length === 0) {
|
|
260
|
+
return { skipped: true, reason: "empty" };
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const suffix = uniqueSuffix();
|
|
264
|
+
const stagingDir = join(tmpdir(), `tfx-state-snapshot-${suffix}`);
|
|
265
|
+
const tempArchivePath = join(stagingDir, `.state-${suffix}.tar.gz.tmp`);
|
|
266
|
+
const finalArchivePath = join(
|
|
267
|
+
destRoot,
|
|
268
|
+
`state-${formatStamp(new Date())}-${suffix}.tar.gz`,
|
|
269
|
+
);
|
|
270
|
+
|
|
271
|
+
try {
|
|
272
|
+
await mkdir(stagingDir, { recursive: true });
|
|
273
|
+
await copyToStaging(files, stagingDir);
|
|
274
|
+
await createArchive({
|
|
275
|
+
stagingDir,
|
|
276
|
+
archivePath: tempArchivePath,
|
|
277
|
+
files,
|
|
278
|
+
});
|
|
279
|
+
await rename(tempArchivePath, finalArchivePath);
|
|
280
|
+
await pruneSnapshots(destRoot, maxSnapshots);
|
|
281
|
+
const archiveInfo = await stat(finalArchivePath);
|
|
282
|
+
const sizeBytes = files.reduce((sum, file) => sum + file.size, 0);
|
|
283
|
+
return {
|
|
284
|
+
skipped: false,
|
|
285
|
+
path: finalArchivePath,
|
|
286
|
+
sizeBytes: archiveInfo.size || sizeBytes,
|
|
287
|
+
fileCount: files.length,
|
|
288
|
+
};
|
|
289
|
+
} finally {
|
|
290
|
+
await rm(stagingDir, { recursive: true, force: true });
|
|
291
|
+
await rm(tempArchivePath, { force: true });
|
|
292
|
+
}
|
|
293
|
+
}
|
package/hub/server.mjs
CHANGED
|
@@ -140,7 +140,7 @@ export async function tryReuseExistingHub({
|
|
|
140
140
|
} = {}) {
|
|
141
141
|
const existing = readCurrentState();
|
|
142
142
|
const existingPort = Number(existing?.port);
|
|
143
|
-
const requestedPort = parseHubPort(port);
|
|
143
|
+
const requestedPort = parseHubPort(port) ?? HUB_DEFAULT_PORT;
|
|
144
144
|
const livePeer = detectPeer();
|
|
145
145
|
const livePidPort = parseHubPort(livePeer?.port);
|
|
146
146
|
if (
|
|
@@ -150,11 +150,7 @@ export async function tryReuseExistingHub({
|
|
|
150
150
|
) {
|
|
151
151
|
return null;
|
|
152
152
|
}
|
|
153
|
-
if (
|
|
154
|
-
if (portSpecified) return null;
|
|
155
|
-
if (!livePeer?.alive || !livePidPort || existingPort !== livePidPort) {
|
|
156
|
-
return null;
|
|
157
|
-
}
|
|
153
|
+
if (existingPort !== requestedPort) {
|
|
158
154
|
log.warn(
|
|
159
155
|
{
|
|
160
156
|
requestedPort,
|
|
@@ -162,8 +158,9 @@ export async function tryReuseExistingHub({
|
|
|
162
158
|
pid: livePeer.pid,
|
|
163
159
|
livePidPort,
|
|
164
160
|
},
|
|
165
|
-
"hub.
|
|
161
|
+
"hub.port_mismatch_not_reusing_live_pid",
|
|
166
162
|
);
|
|
163
|
+
return null;
|
|
167
164
|
}
|
|
168
165
|
if (!(await checkHealth(existingPort))) return null;
|
|
169
166
|
|
|
@@ -237,18 +234,9 @@ function readHubPidFile(
|
|
|
237
234
|
}
|
|
238
235
|
|
|
239
236
|
export function resolveHubPort(env = process.env, opts = {}) {
|
|
240
|
-
|
|
241
|
-
preferLivePid = true,
|
|
242
|
-
detectPeer = detectLivePeer,
|
|
243
|
-
pidFilePath = PID_FILE,
|
|
244
|
-
} = opts;
|
|
237
|
+
void opts;
|
|
245
238
|
const envPort = parseHubPort(env?.TFX_HUB_PORT);
|
|
246
239
|
if (envPort) return envPort;
|
|
247
|
-
if (preferLivePid) {
|
|
248
|
-
const peer = detectPeer(pidFilePath);
|
|
249
|
-
const peerPort = parseHubPort(peer?.port);
|
|
250
|
-
if (peer?.alive && peerPort) return peerPort;
|
|
251
|
-
}
|
|
252
240
|
return HUB_DEFAULT_PORT;
|
|
253
241
|
}
|
|
254
242
|
|
package/hub/team/swarm-cli.mjs
CHANGED
|
@@ -14,12 +14,18 @@ const YELLOW = "\u001b[93m";
|
|
|
14
14
|
const GRAY = "\u001b[90m";
|
|
15
15
|
|
|
16
16
|
/**
|
|
17
|
-
* #116-C: non-TTY background 환경에서 `tfx swarm`
|
|
18
|
-
* 무한 hang
|
|
17
|
+
* #116-C: non-TTY background 환경에서 `tfx swarm` 실행 시 codex worker spawn 이
|
|
18
|
+
* 무한 hang 가능성이 있다 (stdin TTY 대기 또는 hub MCP lease race).
|
|
19
19
|
*
|
|
20
|
-
*
|
|
21
|
-
* -
|
|
22
|
-
* -
|
|
20
|
+
* Policy (v10.15+):
|
|
21
|
+
* - stdout 또는 stdin 중 하나라도 TTY → silent OK (기존 동작).
|
|
22
|
+
* - 양측 non-TTY → warning + 진행 (기본, 사용자 친화 + CI/background 호환).
|
|
23
|
+
* - `TFX_BLOCK_NON_TTY_SWARM=1` opt-out → fail-fast + 복구 경로 안내 (안전 망).
|
|
24
|
+
* - `TFX_ALLOW_NON_TTY_SWARM=1` 은 silent OK (호환 유지, warning suppress).
|
|
25
|
+
*
|
|
26
|
+
* 기존 fail-fast 정책은 첫 사용자에게 묻기 효과 (실제 user terminal 은 TTY 인데
|
|
27
|
+
* Claude Code run_in_background 같은 spawn 환경에서 child stdio 는 non-TTY).
|
|
28
|
+
* 다른 사용자도 동일 마찰 → 기본 동작을 "proceed with warning" 으로 변경.
|
|
23
29
|
*
|
|
24
30
|
* @param {{
|
|
25
31
|
* stdoutIsTTY?: boolean,
|
|
@@ -44,21 +50,25 @@ export function assertTtyForSwarm(deps = {}) {
|
|
|
44
50
|
return { ok: true, optIn: false, warnings };
|
|
45
51
|
}
|
|
46
52
|
|
|
53
|
+
// 양측 non-TTY 부터 적용되는 정책.
|
|
54
|
+
if (env.TFX_BLOCK_NON_TTY_SWARM === "1") {
|
|
55
|
+
const reason =
|
|
56
|
+
"tfx swarm 이 차단됨 — non-TTY 환경 + TFX_BLOCK_NON_TTY_SWARM=1 (#116-C).\n" +
|
|
57
|
+
" 복구 경로:\n" +
|
|
58
|
+
" 1) 터미널에서 직접 실행: tfx swarm <prd>\n" +
|
|
59
|
+
" 2) TFX_BLOCK_NON_TTY_SWARM=0 (또는 unset) 으로 차단 해제 후 재시도";
|
|
60
|
+
return { ok: false, optIn: false, warnings, reason };
|
|
61
|
+
}
|
|
62
|
+
|
|
47
63
|
if (env.TFX_ALLOW_NON_TTY_SWARM === "1") {
|
|
48
|
-
|
|
49
|
-
"non-TTY 환경 감지 — TFX_ALLOW_NON_TTY_SWARM=1 opt-in 으로 진행합니다. codex worker spawn hang 가능성 존재 (#116-C).",
|
|
50
|
-
);
|
|
64
|
+
// 명시 opt-in — silent OK (기존 호환, warning 미출력).
|
|
51
65
|
return { ok: true, optIn: true, warnings };
|
|
52
66
|
}
|
|
53
67
|
|
|
54
|
-
|
|
55
|
-
"
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
" 2) tmux 경로: tfx multi --teammate-mode tmux --auto-attach --dashboard --assign ...\n" +
|
|
59
|
-
" 3) opt-in (위험): TFX_ALLOW_NON_TTY_SWARM=1 tfx swarm <prd>";
|
|
60
|
-
|
|
61
|
-
return { ok: false, optIn: false, warnings, reason };
|
|
68
|
+
warnings.push(
|
|
69
|
+
"non-TTY 환경 감지 — codex worker spawn hang 가능성 존재 (#116-C). 차단하려면 TFX_BLOCK_NON_TTY_SWARM=1.",
|
|
70
|
+
);
|
|
71
|
+
return { ok: true, optIn: true, warnings };
|
|
62
72
|
}
|
|
63
73
|
|
|
64
74
|
export function parseFlags(args) {
|