triflux 10.9.21 → 10.9.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +34 -0
- package/.claude-plugin/plugin.json +22 -0
- package/config/mcp-registry.json +29 -0
- package/hub/account-broker.mjs +6 -4
- package/hub/cli-adapter-base.mjs +14 -14
- package/hub/lib/env-detect.mjs +47 -20
- package/hub/server.mjs +17 -15
- package/hub/team/headless.mjs +10 -0
- package/hub/team/swarm-hypervisor.mjs +2 -2
- package/hub/workers/delegator-mcp.mjs +129 -1
- package/hud/constants.mjs +24 -13
- package/hud/renderers.mjs +2 -1
- package/package.json +62 -21
- package/scripts/__tests__/keyword-detector.test.mjs +4 -4
- package/scripts/__tests__/release-governance.test.mjs +148 -0
- package/scripts/doctor-diagnose.mjs +6 -7
- package/scripts/lib/cross-review-utils.mjs +2 -2
- package/scripts/lib/mcp-filter.mjs +12 -24
- package/scripts/release/bump-version.mjs +77 -0
- package/scripts/release/check-sync.mjs +51 -0
- package/scripts/release/lib.mjs +303 -0
- package/scripts/release/prepare.mjs +85 -0
- package/scripts/release/publish.mjs +87 -0
- package/scripts/release/verify.mjs +81 -0
- package/scripts/release/version-manifest.json +26 -0
- package/scripts/remote-spawn.mjs +3 -3
- package/scripts/setup.mjs +18 -15
- package/scripts/tfx-route.sh +64 -8
- package/tui/codex-profile.mjs +457 -0
- package/tui/core.mjs +266 -0
- package/tui/doctor.mjs +375 -0
- package/tui/gemini-profile.mjs +299 -0
- package/tui/monitor-data.mjs +152 -0
- package/tui/monitor.mjs +339 -0
- package/tui/setup.mjs +598 -0
- package/CLAUDE.md +0 -212
- package/references/hosts.json +0 -46
- package/skills/tfx-workspace/async-tests/run-tests.sh +0 -203
- package/skills/tfx-workspace/evals/evals.json +0 -79
- package/skills/tfx-workspace/iteration-1/benchmark.json +0 -524
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/eval_metadata.json +0 -11
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/outputs/analysis.md +0 -154
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/outputs/analysis.md +0 -126
- package/skills/tfx-workspace/iteration-1/codex-gemini-remap/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/eval_metadata.json +0 -11
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/outputs/analysis.md +0 -119
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/grading.json +0 -25
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/outputs/analysis.md +0 -115
- package/skills/tfx-workspace/iteration-1/doctor-diagnosis/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/eval_metadata.json +0 -10
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/grading.json +0 -20
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/outputs/analysis.md +0 -86
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/grading.json +0 -20
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/outputs/analysis.md +0 -81
- package/skills/tfx-workspace/iteration-1/hub-start-sequence/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/multi-team-creation/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/outputs/analysis.md +0 -316
- package/skills/tfx-workspace/iteration-1/multi-team-creation/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/outputs/analysis.md +0 -352
- package/skills/tfx-workspace/iteration-1/multi-team-creation/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/review.html +0 -1325
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/outputs/analysis.md +0 -97
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/outputs/analysis.md +0 -94
- package/skills/tfx-workspace/iteration-1/routing-implement-shortcut/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/eval_metadata.json +0 -12
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/outputs/analysis.md +0 -209
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/grading.json +0 -30
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/outputs/analysis.md +0 -193
- package/skills/tfx-workspace/iteration-1/routing-multi-task-triage/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/benchmark.json +0 -144
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/eval_metadata.json +0 -13
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/grading.json +0 -35
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/outputs/analysis.md +0 -382
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/old_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/grading.json +0 -35
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/outputs/analysis.md +0 -333
- package/skills/tfx-workspace/iteration-2/multi-team-creation-refactored/with_skill/timing.json +0 -5
- package/skills/tfx-workspace/iteration-2/review.html +0 -1325
- package/skills/tfx-workspace/skill-snapshot/tfx-auto/SKILL.md +0 -217
- package/skills/tfx-workspace/skill-snapshot/tfx-auto-codex/SKILL.md +0 -77
- package/skills/tfx-workspace/skill-snapshot/tfx-codex/SKILL.md +0 -65
- package/skills/tfx-workspace/skill-snapshot/tfx-doctor/SKILL.md +0 -94
- package/skills/tfx-workspace/skill-snapshot/tfx-gemini/SKILL.md +0 -82
- package/skills/tfx-workspace/skill-snapshot/tfx-hub/SKILL.md +0 -133
- package/skills/tfx-workspace/skill-snapshot/tfx-multi/SKILL.md +0 -426
- package/skills/tfx-workspace/skill-snapshot/tfx-setup/SKILL.md +0 -101
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import { execFileSync } from "node:child_process";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { dirname, join, resolve } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
|
|
6
|
+
export const ROOT = resolve(
|
|
7
|
+
dirname(fileURLToPath(import.meta.url)),
|
|
8
|
+
"..",
|
|
9
|
+
"..",
|
|
10
|
+
);
|
|
11
|
+
export const DEFAULT_MANIFEST_PATH = join(
|
|
12
|
+
ROOT,
|
|
13
|
+
"scripts",
|
|
14
|
+
"release",
|
|
15
|
+
"version-manifest.json",
|
|
16
|
+
);
|
|
17
|
+
|
|
18
|
+
export function parseArgs(argv) {
|
|
19
|
+
const args = {};
|
|
20
|
+
for (let i = 0; i < argv.length; i++) {
|
|
21
|
+
const token = argv[i];
|
|
22
|
+
if (!token.startsWith("--")) continue;
|
|
23
|
+
const key = token.slice(2);
|
|
24
|
+
const next = argv[i + 1];
|
|
25
|
+
if (!next || next.startsWith("--")) {
|
|
26
|
+
args[key] = true;
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
29
|
+
args[key] = next;
|
|
30
|
+
i++;
|
|
31
|
+
}
|
|
32
|
+
return args;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function readJson(filePath) {
|
|
36
|
+
return JSON.parse(readFileSync(filePath, "utf8"));
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export function writeJson(filePath, value) {
|
|
40
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
41
|
+
writeFileSync(filePath, `${JSON.stringify(value, null, 2)}\n`, "utf8");
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export function getValueAtPath(obj, pathSegments) {
|
|
45
|
+
return pathSegments.reduce((acc, segment) => {
|
|
46
|
+
if (acc === undefined || acc === null) return undefined;
|
|
47
|
+
return acc[segment];
|
|
48
|
+
}, obj);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export function setValueAtPath(obj, pathSegments, value) {
|
|
52
|
+
if (!pathSegments.length) {
|
|
53
|
+
throw new Error("pathSegments must not be empty");
|
|
54
|
+
}
|
|
55
|
+
let cursor = obj;
|
|
56
|
+
for (let i = 0; i < pathSegments.length - 1; i++) {
|
|
57
|
+
const segment = pathSegments[i];
|
|
58
|
+
const nextSegment = pathSegments[i + 1];
|
|
59
|
+
if (
|
|
60
|
+
cursor[segment] === undefined ||
|
|
61
|
+
cursor[segment] === null ||
|
|
62
|
+
typeof cursor[segment] !== "object"
|
|
63
|
+
) {
|
|
64
|
+
cursor[segment] = typeof nextSegment === "number" ? [] : {};
|
|
65
|
+
}
|
|
66
|
+
cursor = cursor[segment];
|
|
67
|
+
}
|
|
68
|
+
cursor[pathSegments.at(-1)] = value;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export function formatPathSegments(pathSegments) {
|
|
72
|
+
return pathSegments
|
|
73
|
+
.map((segment) =>
|
|
74
|
+
typeof segment === "number"
|
|
75
|
+
? `[${segment}]`
|
|
76
|
+
: segment === ""
|
|
77
|
+
? '[""]'
|
|
78
|
+
: `.${segment}`,
|
|
79
|
+
)
|
|
80
|
+
.join("")
|
|
81
|
+
.replace(/^\./, "");
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export function isSemver(value) {
|
|
85
|
+
return /^\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?$/.test(String(value || "").trim());
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export function loadVersionManifest({
|
|
89
|
+
rootDir = ROOT,
|
|
90
|
+
manifestPath = join(rootDir, "scripts", "release", "version-manifest.json"),
|
|
91
|
+
} = {}) {
|
|
92
|
+
const manifest = readJson(manifestPath);
|
|
93
|
+
if (!manifest.canonicalFile || !Array.isArray(manifest.targets)) {
|
|
94
|
+
throw new Error(`Invalid version manifest: ${manifestPath}`);
|
|
95
|
+
}
|
|
96
|
+
return manifest;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export function getCanonicalVersion({
|
|
100
|
+
rootDir = ROOT,
|
|
101
|
+
manifest = loadVersionManifest({ rootDir }),
|
|
102
|
+
} = {}) {
|
|
103
|
+
const canonicalPath = join(rootDir, manifest.canonicalFile);
|
|
104
|
+
const canonicalJson = readJson(canonicalPath);
|
|
105
|
+
const value = getValueAtPath(
|
|
106
|
+
canonicalJson,
|
|
107
|
+
manifest.canonicalPath || ["version"],
|
|
108
|
+
);
|
|
109
|
+
if (!isSemver(value)) {
|
|
110
|
+
throw new Error(
|
|
111
|
+
`Canonical version is missing or invalid at ${manifest.canonicalFile}`,
|
|
112
|
+
);
|
|
113
|
+
}
|
|
114
|
+
return value;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
export function collectVersionTargets({
|
|
118
|
+
rootDir = ROOT,
|
|
119
|
+
manifest = loadVersionManifest({ rootDir }),
|
|
120
|
+
expectedVersion = getCanonicalVersion({ rootDir, manifest }),
|
|
121
|
+
} = {}) {
|
|
122
|
+
return manifest.targets.flatMap((target) => {
|
|
123
|
+
const absolutePath = join(rootDir, target.file);
|
|
124
|
+
if (!existsSync(absolutePath)) {
|
|
125
|
+
return target.paths.map((pathSegments) => ({
|
|
126
|
+
file: target.file,
|
|
127
|
+
absolutePath,
|
|
128
|
+
path: pathSegments,
|
|
129
|
+
found: undefined,
|
|
130
|
+
expected: expectedVersion,
|
|
131
|
+
inSync: false,
|
|
132
|
+
missing: true,
|
|
133
|
+
}));
|
|
134
|
+
}
|
|
135
|
+
const json = readJson(absolutePath);
|
|
136
|
+
return target.paths.map((pathSegments) => {
|
|
137
|
+
const found = getValueAtPath(json, pathSegments);
|
|
138
|
+
return {
|
|
139
|
+
file: target.file,
|
|
140
|
+
absolutePath,
|
|
141
|
+
path: pathSegments,
|
|
142
|
+
found,
|
|
143
|
+
expected: expectedVersion,
|
|
144
|
+
inSync: found === expectedVersion,
|
|
145
|
+
missing: false,
|
|
146
|
+
};
|
|
147
|
+
});
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export function syncVersionTargets({
|
|
152
|
+
rootDir = ROOT,
|
|
153
|
+
manifest = loadVersionManifest({ rootDir }),
|
|
154
|
+
expectedVersion = getCanonicalVersion({ rootDir, manifest }),
|
|
155
|
+
} = {}) {
|
|
156
|
+
const touched = new Set();
|
|
157
|
+
for (const target of manifest.targets) {
|
|
158
|
+
const absolutePath = join(rootDir, target.file);
|
|
159
|
+
if (!existsSync(absolutePath)) {
|
|
160
|
+
throw new Error(`Cannot sync missing target: ${target.file}`);
|
|
161
|
+
}
|
|
162
|
+
const json = readJson(absolutePath);
|
|
163
|
+
let changed = false;
|
|
164
|
+
for (const pathSegments of target.paths) {
|
|
165
|
+
if (getValueAtPath(json, pathSegments) !== expectedVersion) {
|
|
166
|
+
setValueAtPath(json, pathSegments, expectedVersion);
|
|
167
|
+
changed = true;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
if (changed) {
|
|
171
|
+
writeJson(absolutePath, json);
|
|
172
|
+
touched.add(target.file);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
return [...touched];
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export function assertVersionSync({
|
|
179
|
+
rootDir = ROOT,
|
|
180
|
+
manifestPath = join(rootDir, "scripts", "release", "version-manifest.json"),
|
|
181
|
+
expectedVersion,
|
|
182
|
+
fix = false,
|
|
183
|
+
} = {}) {
|
|
184
|
+
const manifest = loadVersionManifest({ rootDir, manifestPath });
|
|
185
|
+
const rootVersion =
|
|
186
|
+
expectedVersion || getCanonicalVersion({ rootDir, manifest });
|
|
187
|
+
let targets = collectVersionTargets({
|
|
188
|
+
rootDir,
|
|
189
|
+
manifest,
|
|
190
|
+
expectedVersion: rootVersion,
|
|
191
|
+
});
|
|
192
|
+
const mismatches = targets.filter((target) => !target.inSync);
|
|
193
|
+
let fixedFiles = [];
|
|
194
|
+
|
|
195
|
+
if (fix && mismatches.length) {
|
|
196
|
+
fixedFiles = syncVersionTargets({
|
|
197
|
+
rootDir,
|
|
198
|
+
manifest,
|
|
199
|
+
expectedVersion: rootVersion,
|
|
200
|
+
});
|
|
201
|
+
targets = collectVersionTargets({
|
|
202
|
+
rootDir,
|
|
203
|
+
manifest,
|
|
204
|
+
expectedVersion: rootVersion,
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
return {
|
|
209
|
+
ok: targets.every((target) => target.inSync),
|
|
210
|
+
rootVersion,
|
|
211
|
+
targets,
|
|
212
|
+
mismatches: targets.filter((target) => !target.inSync),
|
|
213
|
+
fixedFiles,
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
export function ensureGitClean({
|
|
218
|
+
rootDir = ROOT,
|
|
219
|
+
execFileSyncFn = execFileSync,
|
|
220
|
+
} = {}) {
|
|
221
|
+
const output = execFileSyncFn("git", ["status", "--porcelain"], {
|
|
222
|
+
cwd: rootDir,
|
|
223
|
+
encoding: "utf8",
|
|
224
|
+
}).trim();
|
|
225
|
+
return { clean: output.length === 0, output };
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
export function getPreviousTag({
|
|
229
|
+
rootDir = ROOT,
|
|
230
|
+
execFileSyncFn = execFileSync,
|
|
231
|
+
} = {}) {
|
|
232
|
+
try {
|
|
233
|
+
return execFileSyncFn("git", ["describe", "--tags", "--abbrev=0"], {
|
|
234
|
+
cwd: rootDir,
|
|
235
|
+
encoding: "utf8",
|
|
236
|
+
}).trim();
|
|
237
|
+
} catch {
|
|
238
|
+
return null;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
export function getCommitSummaries({
|
|
243
|
+
rootDir = ROOT,
|
|
244
|
+
previousTag,
|
|
245
|
+
execFileSyncFn = execFileSync,
|
|
246
|
+
} = {}) {
|
|
247
|
+
const range = previousTag ? `${previousTag}..HEAD` : "HEAD~10..HEAD";
|
|
248
|
+
try {
|
|
249
|
+
return execFileSyncFn("git", ["log", "--oneline", range], {
|
|
250
|
+
cwd: rootDir,
|
|
251
|
+
encoding: "utf8",
|
|
252
|
+
})
|
|
253
|
+
.trim()
|
|
254
|
+
.split(/\r?\n/)
|
|
255
|
+
.filter(Boolean);
|
|
256
|
+
} catch {
|
|
257
|
+
return [];
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
export function buildReleaseNotes({
|
|
262
|
+
version,
|
|
263
|
+
rootDir = ROOT,
|
|
264
|
+
execFileSyncFn = execFileSync,
|
|
265
|
+
} = {}) {
|
|
266
|
+
const previousTag = getPreviousTag({ rootDir, execFileSyncFn });
|
|
267
|
+
const commits = getCommitSummaries({
|
|
268
|
+
rootDir,
|
|
269
|
+
previousTag,
|
|
270
|
+
execFileSyncFn,
|
|
271
|
+
});
|
|
272
|
+
const heading = previousTag
|
|
273
|
+
? `Changes since ${previousTag}`
|
|
274
|
+
: "Recent changes (no prior tag found)";
|
|
275
|
+
const lines = commits.length
|
|
276
|
+
? commits.map((commit) => `- ${commit}`)
|
|
277
|
+
: ["- No commit summary available"];
|
|
278
|
+
|
|
279
|
+
return [
|
|
280
|
+
`# Release v${version}`,
|
|
281
|
+
"",
|
|
282
|
+
`## ${heading}`,
|
|
283
|
+
...lines,
|
|
284
|
+
"",
|
|
285
|
+
"## Install",
|
|
286
|
+
`- npm: \`npm install -g triflux@${version}\``,
|
|
287
|
+
"- Claude Code:",
|
|
288
|
+
" - `/plugin marketplace add tellang/triflux`",
|
|
289
|
+
" - `/plugin install triflux@tellang`",
|
|
290
|
+
"",
|
|
291
|
+
].join("\n");
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
export function runCommand(
|
|
295
|
+
command,
|
|
296
|
+
args,
|
|
297
|
+
{ cwd = ROOT, execFileSyncFn = execFileSync } = {},
|
|
298
|
+
) {
|
|
299
|
+
execFileSyncFn(command, args, {
|
|
300
|
+
cwd,
|
|
301
|
+
stdio: "inherit",
|
|
302
|
+
});
|
|
303
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { writeFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import {
|
|
6
|
+
assertVersionSync,
|
|
7
|
+
buildReleaseNotes,
|
|
8
|
+
ensureGitClean,
|
|
9
|
+
parseArgs,
|
|
10
|
+
ROOT,
|
|
11
|
+
runCommand,
|
|
12
|
+
} from "./lib.mjs";
|
|
13
|
+
|
|
14
|
+
export async function prepareRelease({
|
|
15
|
+
version,
|
|
16
|
+
rootDir = ROOT,
|
|
17
|
+
allowDirty = false,
|
|
18
|
+
dryRun = true,
|
|
19
|
+
execFileSyncFn,
|
|
20
|
+
} = {}) {
|
|
21
|
+
const sync = assertVersionSync({ rootDir });
|
|
22
|
+
if (!sync.ok) {
|
|
23
|
+
throw new Error(
|
|
24
|
+
"Version sync failed. Run scripts/release/check-sync.mjs first.",
|
|
25
|
+
);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const gitState = ensureGitClean({ rootDir, execFileSyncFn });
|
|
29
|
+
if (!gitState.clean && !allowDirty) {
|
|
30
|
+
throw new Error(
|
|
31
|
+
"Working tree is dirty. Re-run with --allow-dirty only for scaffolding.",
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const releaseVersion = version || sync.rootVersion;
|
|
36
|
+
const commands = [
|
|
37
|
+
["npm", ["test"]],
|
|
38
|
+
["npm", ["run", "lint"]],
|
|
39
|
+
["npm", ["pack", "--dry-run"]],
|
|
40
|
+
];
|
|
41
|
+
|
|
42
|
+
if (!dryRun) {
|
|
43
|
+
for (const [command, args] of commands) {
|
|
44
|
+
runCommand(command, args, { cwd: rootDir, execFileSyncFn });
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const notes = buildReleaseNotes({
|
|
49
|
+
version: releaseVersion,
|
|
50
|
+
rootDir,
|
|
51
|
+
execFileSyncFn,
|
|
52
|
+
});
|
|
53
|
+
const notesPath = join(
|
|
54
|
+
rootDir,
|
|
55
|
+
".omx",
|
|
56
|
+
"plans",
|
|
57
|
+
`release-notes-v${releaseVersion}.md`,
|
|
58
|
+
);
|
|
59
|
+
writeFileSync(notesPath, notes, "utf8");
|
|
60
|
+
|
|
61
|
+
return {
|
|
62
|
+
ok: true,
|
|
63
|
+
version: releaseVersion,
|
|
64
|
+
clean: gitState.clean,
|
|
65
|
+
allowDirty,
|
|
66
|
+
dryRun,
|
|
67
|
+
commands: commands.map(([command, args]) => [command, ...args].join(" ")),
|
|
68
|
+
releaseNotesPath: notesPath,
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1]) {
|
|
73
|
+
const args = parseArgs(process.argv.slice(2));
|
|
74
|
+
const result = await prepareRelease({
|
|
75
|
+
version: args.version,
|
|
76
|
+
rootDir: args.root,
|
|
77
|
+
allowDirty: Boolean(args["allow-dirty"]),
|
|
78
|
+
dryRun: !args.execute,
|
|
79
|
+
});
|
|
80
|
+
if (args.json) {
|
|
81
|
+
console.log(JSON.stringify(result, null, 2));
|
|
82
|
+
} else {
|
|
83
|
+
console.log(JSON.stringify(result, null, 2));
|
|
84
|
+
}
|
|
85
|
+
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { assertVersionSync, parseArgs, ROOT, runCommand } from "./lib.mjs";
|
|
5
|
+
|
|
6
|
+
export async function publishRelease({
|
|
7
|
+
version,
|
|
8
|
+
rootDir = ROOT,
|
|
9
|
+
channel = "stable",
|
|
10
|
+
dryRun = true,
|
|
11
|
+
createGithubRelease = true,
|
|
12
|
+
execFileSyncFn,
|
|
13
|
+
} = {}) {
|
|
14
|
+
const sync = assertVersionSync({ rootDir });
|
|
15
|
+
if (!sync.ok) {
|
|
16
|
+
throw new Error("Version sync failed. Refusing to publish.");
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const releaseVersion = version || sync.rootVersion;
|
|
20
|
+
const npmTag = channel === "canary" ? "canary" : "latest";
|
|
21
|
+
const notesPath = join(
|
|
22
|
+
rootDir,
|
|
23
|
+
".omx",
|
|
24
|
+
"plans",
|
|
25
|
+
`release-notes-v${releaseVersion}.md`,
|
|
26
|
+
);
|
|
27
|
+
const steps = [
|
|
28
|
+
{
|
|
29
|
+
label: "npm publish",
|
|
30
|
+
command: "npm",
|
|
31
|
+
args: ["publish", "--tag", npmTag],
|
|
32
|
+
},
|
|
33
|
+
{ label: "git tag", command: "git", args: ["tag", `v${releaseVersion}`] },
|
|
34
|
+
{
|
|
35
|
+
label: "git push",
|
|
36
|
+
command: "git",
|
|
37
|
+
args: ["push", "origin", "HEAD", "--tags"],
|
|
38
|
+
},
|
|
39
|
+
];
|
|
40
|
+
|
|
41
|
+
if (createGithubRelease) {
|
|
42
|
+
steps.push({
|
|
43
|
+
label: "gh release create",
|
|
44
|
+
command: "gh",
|
|
45
|
+
args: [
|
|
46
|
+
"release",
|
|
47
|
+
"create",
|
|
48
|
+
`v${releaseVersion}`,
|
|
49
|
+
"--title",
|
|
50
|
+
`v${releaseVersion}`,
|
|
51
|
+
"--notes-file",
|
|
52
|
+
notesPath,
|
|
53
|
+
],
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (!dryRun) {
|
|
58
|
+
for (const step of steps) {
|
|
59
|
+
runCommand(step.command, step.args, { cwd: rootDir, execFileSyncFn });
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
ok: true,
|
|
65
|
+
version: releaseVersion,
|
|
66
|
+
channel,
|
|
67
|
+
npmTag,
|
|
68
|
+
dryRun,
|
|
69
|
+
notesPath,
|
|
70
|
+
steps: steps.map((step) => ({
|
|
71
|
+
label: step.label,
|
|
72
|
+
command: [step.command, ...step.args].join(" "),
|
|
73
|
+
})),
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if (process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1]) {
|
|
78
|
+
const args = parseArgs(process.argv.slice(2));
|
|
79
|
+
const result = await publishRelease({
|
|
80
|
+
version: args.version,
|
|
81
|
+
rootDir: args.root,
|
|
82
|
+
channel: args.channel || "stable",
|
|
83
|
+
dryRun: !args.execute,
|
|
84
|
+
createGithubRelease: !args["skip-gh-release"],
|
|
85
|
+
});
|
|
86
|
+
console.log(JSON.stringify(result, null, 2));
|
|
87
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { execFileSync } from "node:child_process";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { assertVersionSync, parseArgs, ROOT } from "./lib.mjs";
|
|
5
|
+
|
|
6
|
+
export async function verifyRelease({
|
|
7
|
+
version,
|
|
8
|
+
rootDir = ROOT,
|
|
9
|
+
dryRun = true,
|
|
10
|
+
execFileSyncFn = execFileSync,
|
|
11
|
+
} = {}) {
|
|
12
|
+
const sync = assertVersionSync({ rootDir });
|
|
13
|
+
if (!sync.ok) {
|
|
14
|
+
throw new Error("Version sync failed. Fix metadata before verify.");
|
|
15
|
+
}
|
|
16
|
+
const releaseVersion = version || sync.rootVersion;
|
|
17
|
+
const checks = [
|
|
18
|
+
{
|
|
19
|
+
name: "version-sync",
|
|
20
|
+
ok: true,
|
|
21
|
+
detail: `repo metadata matches ${releaseVersion}`,
|
|
22
|
+
},
|
|
23
|
+
];
|
|
24
|
+
|
|
25
|
+
if (!dryRun) {
|
|
26
|
+
const npmVersion = execFileSyncFn("npm", ["view", "triflux", "version"], {
|
|
27
|
+
cwd: rootDir,
|
|
28
|
+
encoding: "utf8",
|
|
29
|
+
}).trim();
|
|
30
|
+
checks.push({
|
|
31
|
+
name: "npm-view",
|
|
32
|
+
ok: npmVersion === releaseVersion,
|
|
33
|
+
detail: npmVersion,
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
const ghRelease = execFileSyncFn(
|
|
37
|
+
"gh",
|
|
38
|
+
["release", "view", `v${releaseVersion}`, "--json", "tagName"],
|
|
39
|
+
{
|
|
40
|
+
cwd: rootDir,
|
|
41
|
+
encoding: "utf8",
|
|
42
|
+
},
|
|
43
|
+
).trim();
|
|
44
|
+
checks.push({
|
|
45
|
+
name: "github-release",
|
|
46
|
+
ok: ghRelease.length > 0,
|
|
47
|
+
detail: ghRelease,
|
|
48
|
+
});
|
|
49
|
+
} else {
|
|
50
|
+
checks.push(
|
|
51
|
+
{
|
|
52
|
+
name: "npm-view",
|
|
53
|
+
ok: null,
|
|
54
|
+
detail: `would run: npm view triflux version`,
|
|
55
|
+
},
|
|
56
|
+
{
|
|
57
|
+
name: "github-release",
|
|
58
|
+
ok: null,
|
|
59
|
+
detail: `would run: gh release view v${releaseVersion} --json tagName`,
|
|
60
|
+
},
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return {
|
|
65
|
+
ok: checks.every((check) => check.ok !== false),
|
|
66
|
+
version: releaseVersion,
|
|
67
|
+
dryRun,
|
|
68
|
+
checks,
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1]) {
|
|
73
|
+
const args = parseArgs(process.argv.slice(2));
|
|
74
|
+
const result = await verifyRelease({
|
|
75
|
+
version: args.version,
|
|
76
|
+
rootDir: args.root,
|
|
77
|
+
dryRun: !args.execute,
|
|
78
|
+
});
|
|
79
|
+
console.log(JSON.stringify(result, null, 2));
|
|
80
|
+
process.exitCode = result.ok ? 0 : 1;
|
|
81
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
{
|
|
2
|
+
"canonicalFile": "package.json",
|
|
3
|
+
"canonicalPath": ["version"],
|
|
4
|
+
"targets": [
|
|
5
|
+
{
|
|
6
|
+
"file": "package.json",
|
|
7
|
+
"paths": [["version"]]
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"file": "packages/triflux/package.json",
|
|
11
|
+
"paths": [["version"]]
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"file": ".claude-plugin/plugin.json",
|
|
15
|
+
"paths": [["version"]]
|
|
16
|
+
},
|
|
17
|
+
{
|
|
18
|
+
"file": ".claude-plugin/marketplace.json",
|
|
19
|
+
"paths": [["version"], ["plugins", 0, "version"]]
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"file": "package-lock.json",
|
|
23
|
+
"paths": [["version"], ["packages", "", "version"]]
|
|
24
|
+
}
|
|
25
|
+
]
|
|
26
|
+
}
|
package/scripts/remote-spawn.mjs
CHANGED
|
@@ -591,7 +591,7 @@ function rewritePromptPaths(prompt, stagedFiles) {
|
|
|
591
591
|
return rewritten;
|
|
592
592
|
}
|
|
593
593
|
|
|
594
|
-
function spawnLocalFallback(args, claudePath, prompt) {
|
|
594
|
+
async function spawnLocalFallback(args, claudePath, prompt) {
|
|
595
595
|
const dir = args.dir ? resolve(args.dir) : process.cwd();
|
|
596
596
|
|
|
597
597
|
if (!IS_WINDOWS_LOCAL) {
|
|
@@ -626,7 +626,7 @@ function spawnLocalFallback(args, claudePath, prompt) {
|
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
628
|
|
|
629
|
-
function spawnRemoteFallback(args, promptContext) {
|
|
629
|
+
async function spawnRemoteFallback(args, promptContext) {
|
|
630
630
|
const { host } = args;
|
|
631
631
|
if (!host) {
|
|
632
632
|
console.error("--host required for remote spawn");
|
|
@@ -999,7 +999,7 @@ function listSpawnSessions() {
|
|
|
999
999
|
}
|
|
1000
1000
|
}
|
|
1001
1001
|
|
|
1002
|
-
function openAttachTab(sessionName, title = null) {
|
|
1002
|
+
async function openAttachTab(sessionName, title = null) {
|
|
1003
1003
|
if (IS_WINDOWS_LOCAL) {
|
|
1004
1004
|
const wtArgs = title
|
|
1005
1005
|
try {
|
package/scripts/setup.mjs
CHANGED
|
@@ -64,6 +64,23 @@ const REQUIRED_CODEX_PROFILES = [
|
|
|
64
64
|
|
|
65
65
|
const HUD_SYNC_EXCLUDES = new Set(["omc-hud.mjs", "omc-hud.mjs.bak"]);
|
|
66
66
|
|
|
67
|
+
/**
|
|
68
|
+
* scripts/lib/*.mjs 자동 스캔.
|
|
69
|
+
* 수동 리스트 대신 glob으로 탐색하여 lib 파일 추가 시 sync 누락 방지.
|
|
70
|
+
*/
|
|
71
|
+
function scanLibFiles(pluginRoot, claudeDir) {
|
|
72
|
+
const libDir = join(pluginRoot, "scripts", "lib");
|
|
73
|
+
if (!existsSync(libDir)) return [];
|
|
74
|
+
return readdirSync(libDir)
|
|
75
|
+
.sort()
|
|
76
|
+
.filter((f) => f.endsWith(".mjs"))
|
|
77
|
+
.map((f) => ({
|
|
78
|
+
src: join(libDir, f),
|
|
79
|
+
dst: join(claudeDir, "scripts", "lib", f),
|
|
80
|
+
label: `lib/${f}`,
|
|
81
|
+
}));
|
|
82
|
+
}
|
|
83
|
+
|
|
67
84
|
/**
|
|
68
85
|
* hub/workers/*.mjs + hub/ 루트의 worker 의존성 파일을 자동 스캔.
|
|
69
86
|
* 수동 리스트 대신 glob으로 탐색하여 파일 추가 시 sync 누락 방지.
|
|
@@ -173,21 +190,7 @@ const SYNC_MAP = [
|
|
|
173
190
|
dst: join(CLAUDE_DIR, "scripts", "tfx-batch-stats.mjs"),
|
|
174
191
|
label: "tfx-batch-stats.mjs",
|
|
175
192
|
},
|
|
176
|
-
|
|
177
|
-
src: join(PLUGIN_ROOT, "scripts", "lib", "mcp-filter.mjs"),
|
|
178
|
-
dst: join(CLAUDE_DIR, "scripts", "lib", "mcp-filter.mjs"),
|
|
179
|
-
label: "lib/mcp-filter.mjs",
|
|
180
|
-
},
|
|
181
|
-
{
|
|
182
|
-
src: join(PLUGIN_ROOT, "scripts", "lib", "mcp-server-catalog.mjs"),
|
|
183
|
-
dst: join(CLAUDE_DIR, "scripts", "lib", "mcp-server-catalog.mjs"),
|
|
184
|
-
label: "lib/mcp-server-catalog.mjs",
|
|
185
|
-
},
|
|
186
|
-
{
|
|
187
|
-
src: join(PLUGIN_ROOT, "scripts", "lib", "keyword-rules.mjs"),
|
|
188
|
-
dst: join(CLAUDE_DIR, "scripts", "lib", "keyword-rules.mjs"),
|
|
189
|
-
label: "lib/keyword-rules.mjs",
|
|
190
|
-
},
|
|
193
|
+
...scanLibFiles(PLUGIN_ROOT, CLAUDE_DIR),
|
|
191
194
|
{
|
|
192
195
|
src: join(PLUGIN_ROOT, "hub", "team", "agent-map.json"),
|
|
193
196
|
dst: join(CLAUDE_DIR, "hub", "team", "agent-map.json"),
|