claude-code-workflow 7.2.24 → 7.2.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.ccw/workflows/cli-tools-usage.md +123 -521
- package/.claude/CLAUDE.md +20 -0
- package/.claude/agents/action-planning-agent.md +6 -0
- package/.claude/agents/cli-explore-agent.md +63 -77
- package/.claude/agents/cli-lite-planning-agent.md +10 -11
- package/.claude/agents/issue-plan-agent.md +7 -2
- package/.claude/commands/workflow/spec/setup.md +1 -1
- package/.claude/skills/brainstorm/SKILL.md +408 -408
- package/.claude/skills/review-cycle/SKILL.md +132 -132
- package/.claude/skills/review-cycle/phases/review-module.md +4 -4
- package/.claude/skills/review-cycle/phases/review-session.md +4 -4
- package/.claude/skills/spec-generator/SKILL.md +1 -1
- package/.claude/skills/team-designer/phases/02-scaffold-generation.md +1 -1
- package/.claude/skills/team-lifecycle-v4/SKILL.md +1 -1
- package/.claude/skills/team-review/SKILL.md +1 -1
- package/.claude/skills/team-ultra-analyze/SKILL.md +1 -1
- package/.claude/skills/workflow-multi-cli-plan/SKILL.md +3 -3
- package/.claude/skills/workflow-plan/SKILL.md +1 -1
- package/.claude/skills/workflow-plan/phases/03-conflict-resolution.md +2 -2
- package/.claude/skills/workflow-plan/phases/05-plan-verify.md +2 -2
- package/.claude/skills/workflow-tdd-plan/phases/02-context-gathering.md +3 -3
- package/.claude/skills/workflow-tdd-plan/phases/04-conflict-resolution.md +2 -2
- package/.claude/skills/workflow-test-fix/SKILL.md +1 -1
- package/.claude/skills/workflow-test-fix/phases/02-test-context-gather.md +2 -2
- package/.codex/AGENTS.md +16 -0
- package/.codex/skills/analyze-with-file/SKILL.md +966 -966
- package/.codex/skills/issue-discover/SKILL.md +361 -361
- package/.codex/skills/review-cycle/SKILL.md +1 -1
- package/.codex/skills/roadmap-with-file/SKILL.md +901 -901
- package/.codex/skills/spec-generator/SKILL.md +425 -425
- package/.codex/skills/spec-setup/SKILL.md +669 -669
- package/.codex/skills/team-designer/phases/02-scaffold-generation.md +1 -1
- package/.codex/skills/workflow-test-fix-cycle/SKILL.md +402 -402
- package/ccw/dist/tools/index.d.ts.map +1 -1
- package/ccw/dist/tools/index.js +2 -0
- package/ccw/dist/tools/index.js.map +1 -1
- package/ccw/dist/tools/json-builder.d.ts +17 -0
- package/ccw/dist/tools/json-builder.d.ts.map +1 -0
- package/ccw/dist/tools/json-builder.js +746 -0
- package/ccw/dist/tools/json-builder.js.map +1 -0
- package/ccw/dist/tools/schema-registry.d.ts +71 -0
- package/ccw/dist/tools/schema-registry.d.ts.map +1 -0
- package/ccw/dist/tools/schema-registry.js +136 -0
- package/ccw/dist/tools/schema-registry.js.map +1 -0
- package/package.json +1 -1
- package/.claude/skills/team-iterdev/SKILL.md +0 -127
- package/.claude/skills/team-iterdev/roles/architect/role.md +0 -65
- package/.claude/skills/team-iterdev/roles/coordinator/commands/analyze.md +0 -62
- package/.claude/skills/team-iterdev/roles/coordinator/commands/dispatch.md +0 -234
- package/.claude/skills/team-iterdev/roles/coordinator/commands/monitor.md +0 -182
- package/.claude/skills/team-iterdev/roles/coordinator/role.md +0 -153
- package/.claude/skills/team-iterdev/roles/developer/role.md +0 -74
- package/.claude/skills/team-iterdev/roles/reviewer/role.md +0 -66
- package/.claude/skills/team-iterdev/roles/tester/role.md +0 -88
- package/.claude/skills/team-iterdev/specs/pipelines.md +0 -94
- package/.claude/skills/team-iterdev/specs/team-config.json +0 -172
- package/.codex/prompts/prep-cycle.md +0 -416
- package/.codex/prompts/prep-plan.md +0 -371
- package/.codex/skills/team-iterdev/SKILL.md +0 -219
- package/.codex/skills/team-iterdev/roles/architect/role.md +0 -65
- package/.codex/skills/team-iterdev/roles/coordinator/commands/analyze.md +0 -62
- package/.codex/skills/team-iterdev/roles/coordinator/commands/dispatch.md +0 -187
- package/.codex/skills/team-iterdev/roles/coordinator/commands/monitor.md +0 -227
- package/.codex/skills/team-iterdev/roles/coordinator/role.md +0 -193
- package/.codex/skills/team-iterdev/roles/developer/role.md +0 -74
- package/.codex/skills/team-iterdev/roles/reviewer/role.md +0 -66
- package/.codex/skills/team-iterdev/roles/tester/role.md +0 -88
- package/.codex/skills/team-iterdev/specs/pipelines.md +0 -94
- package/.codex/skills/team-iterdev/specs/team-config.json +0 -172
|
@@ -0,0 +1,746 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* JSON Builder Tool - Schema-aware structured JSON construction/validation.
|
|
3
|
+
*
|
|
4
|
+
* Commands:
|
|
5
|
+
* init — Create empty schema-compliant JSON skeleton
|
|
6
|
+
* set — Set/append fields with instant validation
|
|
7
|
+
* validate — Full schema + semantic validation
|
|
8
|
+
* merge — Merge multiple same-schema JSONs
|
|
9
|
+
* info — Get schema summary (replaces agent reading raw schema)
|
|
10
|
+
*
|
|
11
|
+
* Replaces agent hand-writing JSON + self-validation with tool-assisted
|
|
12
|
+
* incremental build + automatic validation.
|
|
13
|
+
*/
|
|
14
|
+
import { z } from 'zod';
|
|
15
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
|
16
|
+
import { dirname } from 'path';
|
|
17
|
+
import { validatePath } from '../utils/path-validator.js';
|
|
18
|
+
import { loadSchema, getSchemaInfo, listSchemas, } from './schema-registry.js';
|
|
19
|
+
// ─── Params ──────────────────────────────────────────────────
|
|
20
|
+
const OpSchema = z.object({
|
|
21
|
+
path: z.string().min(1),
|
|
22
|
+
value: z.unknown(),
|
|
23
|
+
});
|
|
24
|
+
const ParamsSchema = z.object({
|
|
25
|
+
cmd: z.enum(['init', 'set', 'validate', 'merge', 'info']),
|
|
26
|
+
schema: z.string().optional(),
|
|
27
|
+
target: z.string().optional(),
|
|
28
|
+
output: z.string().optional(),
|
|
29
|
+
ops: z.array(OpSchema).optional(),
|
|
30
|
+
sources: z.array(z.string()).optional(),
|
|
31
|
+
strategy: z.string().optional(),
|
|
32
|
+
});
|
|
33
|
+
// ─── Tool Schema ─────────────────────────────────────────────
|
|
34
|
+
export const schema = {
|
|
35
|
+
name: 'json_builder',
|
|
36
|
+
description: `Schema-aware JSON builder with validation. Commands:
|
|
37
|
+
init: Create skeleton from schema. Params: schema (string), output (string)
|
|
38
|
+
set: Set/append fields. Params: target (string), ops [{path, value}...]
|
|
39
|
+
validate: Full validation. Params: target (string), schema? (string)
|
|
40
|
+
merge: Merge JSONs. Params: sources (string[]), output (string), strategy? (string)
|
|
41
|
+
info: Schema summary. Params: schema (string)`,
|
|
42
|
+
inputSchema: {
|
|
43
|
+
type: 'object',
|
|
44
|
+
properties: {
|
|
45
|
+
cmd: { type: 'string', description: 'Command: init|set|validate|merge|info' },
|
|
46
|
+
schema: { type: 'string', description: 'Schema ID (e.g. explore, task, diagnosis)' },
|
|
47
|
+
target: { type: 'string', description: 'Target JSON file path' },
|
|
48
|
+
output: { type: 'string', description: 'Output file path' },
|
|
49
|
+
ops: {
|
|
50
|
+
type: 'array',
|
|
51
|
+
description: 'Set operations: [{path: "field.sub" or "arr[+]", value: ...}]',
|
|
52
|
+
},
|
|
53
|
+
sources: { type: 'array', description: 'Source files for merge' },
|
|
54
|
+
strategy: { type: 'string', description: 'Merge strategy: dedup_by_path (default)' },
|
|
55
|
+
},
|
|
56
|
+
required: ['cmd'],
|
|
57
|
+
},
|
|
58
|
+
};
|
|
59
|
+
// ─── Handler ─────────────────────────────────────────────────
|
|
60
|
+
export async function handler(params) {
|
|
61
|
+
const parsed = ParamsSchema.safeParse(params);
|
|
62
|
+
if (!parsed.success) {
|
|
63
|
+
return { success: false, error: `Invalid params: ${parsed.error.message}` };
|
|
64
|
+
}
|
|
65
|
+
const p = parsed.data;
|
|
66
|
+
try {
|
|
67
|
+
switch (p.cmd) {
|
|
68
|
+
case 'init': return await cmdInit(p);
|
|
69
|
+
case 'set': return await cmdSet(p);
|
|
70
|
+
case 'validate': return await cmdValidate(p);
|
|
71
|
+
case 'merge': return await cmdMerge(p);
|
|
72
|
+
case 'info': return cmdInfo(p);
|
|
73
|
+
default:
|
|
74
|
+
return { success: false, error: `Unknown command: ${p.cmd}` };
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
catch (err) {
|
|
78
|
+
return { success: false, error: err.message };
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
// ─── init ────────────────────────────────────────────────────
|
|
82
|
+
async function cmdInit(p) {
|
|
83
|
+
if (!p.schema)
|
|
84
|
+
return { success: false, error: 'schema is required for init' };
|
|
85
|
+
if (!p.output)
|
|
86
|
+
return { success: false, error: 'output is required for init' };
|
|
87
|
+
const jsonSchema = loadSchema(p.schema);
|
|
88
|
+
const skeleton = buildSkeleton(jsonSchema);
|
|
89
|
+
const outputPath = await validatePath(p.output);
|
|
90
|
+
ensureDir(outputPath);
|
|
91
|
+
const content = JSON.stringify(skeleton, null, 2);
|
|
92
|
+
writeFileSync(outputPath, content, 'utf-8');
|
|
93
|
+
const info = getSchemaInfo(p.schema);
|
|
94
|
+
return {
|
|
95
|
+
success: true,
|
|
96
|
+
result: {
|
|
97
|
+
path: outputPath,
|
|
98
|
+
schema: p.schema,
|
|
99
|
+
requiredFields: info.requiredFields,
|
|
100
|
+
arrayFields: info.arrayFields,
|
|
101
|
+
message: `Initialized ${p.schema} skeleton (${info.requiredFields.length} required fields)`,
|
|
102
|
+
},
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Build a JSON skeleton from schema — fills required fields with type-appropriate defaults
|
|
107
|
+
*/
|
|
108
|
+
function buildSkeleton(schema) {
|
|
109
|
+
const result = {};
|
|
110
|
+
const props = schema.properties || {};
|
|
111
|
+
const required = new Set(schema.required || []);
|
|
112
|
+
for (const [name, prop] of Object.entries(props)) {
|
|
113
|
+
if (name.startsWith('_comment') || name.startsWith('$'))
|
|
114
|
+
continue;
|
|
115
|
+
if (name === 'deprecated' || name === 'deprecated_message' || name === 'migration_guide')
|
|
116
|
+
continue;
|
|
117
|
+
if (name === '_field_usage_by_producer' || name === '_directory_convention')
|
|
118
|
+
continue;
|
|
119
|
+
// Only include required fields in skeleton
|
|
120
|
+
if (!required.has(name))
|
|
121
|
+
continue;
|
|
122
|
+
result[name] = getDefaultValue(prop);
|
|
123
|
+
}
|
|
124
|
+
return result;
|
|
125
|
+
}
|
|
126
|
+
function getDefaultValue(prop) {
|
|
127
|
+
if (prop.default !== undefined)
|
|
128
|
+
return prop.default;
|
|
129
|
+
const type = Array.isArray(prop.type) ? prop.type[0] : prop.type;
|
|
130
|
+
switch (type) {
|
|
131
|
+
case 'string': return '';
|
|
132
|
+
case 'number':
|
|
133
|
+
case 'integer': return 0;
|
|
134
|
+
case 'boolean': return false;
|
|
135
|
+
case 'array': return [];
|
|
136
|
+
case 'object': {
|
|
137
|
+
if (!prop.properties)
|
|
138
|
+
return {};
|
|
139
|
+
const obj = {};
|
|
140
|
+
const reqSet = new Set(prop.required || []);
|
|
141
|
+
for (const [k, v] of Object.entries(prop.properties)) {
|
|
142
|
+
if (reqSet.has(k)) {
|
|
143
|
+
obj[k] = getDefaultValue(v);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return obj;
|
|
147
|
+
}
|
|
148
|
+
default: return null;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
// ─── set ─────────────────────────────────────────────────────
|
|
152
|
+
async function cmdSet(p) {
|
|
153
|
+
if (!p.target)
|
|
154
|
+
return { success: false, error: 'target is required for set' };
|
|
155
|
+
if (!p.ops || p.ops.length === 0)
|
|
156
|
+
return { success: false, error: 'ops is required for set' };
|
|
157
|
+
const targetPath = await validatePath(p.target);
|
|
158
|
+
if (!existsSync(targetPath)) {
|
|
159
|
+
return { success: false, error: `Target file not found: ${targetPath}` };
|
|
160
|
+
}
|
|
161
|
+
const raw = readFileSync(targetPath, 'utf-8');
|
|
162
|
+
const doc = JSON.parse(raw);
|
|
163
|
+
// Detect schema from doc._metadata?.source or from file name
|
|
164
|
+
const schemaId = p.schema || detectSchema(doc, targetPath);
|
|
165
|
+
const errors = [];
|
|
166
|
+
const warnings = [];
|
|
167
|
+
let applied = 0;
|
|
168
|
+
for (const op of p.ops) {
|
|
169
|
+
const result = applyOp(doc, op.path, op.value, schemaId);
|
|
170
|
+
if (result.error) {
|
|
171
|
+
errors.push(`${op.path}: ${result.error}`);
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
applied++;
|
|
175
|
+
if (result.warnings)
|
|
176
|
+
warnings.push(...result.warnings);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
if (errors.length > 0 && applied === 0) {
|
|
180
|
+
return { success: false, error: `All ops failed: ${errors.join('; ')}` };
|
|
181
|
+
}
|
|
182
|
+
// Write back
|
|
183
|
+
writeFileSync(targetPath, JSON.stringify(doc, null, 2), 'utf-8');
|
|
184
|
+
return {
|
|
185
|
+
success: true,
|
|
186
|
+
result: { applied, errors, warnings },
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
function applyOp(doc, path, value, schemaId) {
|
|
190
|
+
const warnings = [];
|
|
191
|
+
// Handle "auto" values
|
|
192
|
+
if (value === 'auto') {
|
|
193
|
+
if (path.endsWith('timestamp')) {
|
|
194
|
+
value = new Date().toISOString();
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
// Parse path: "field.sub", "arr[+]", "arr[0]", "arr[?key=val]"
|
|
198
|
+
const segments = parsePath(path);
|
|
199
|
+
if (!segments || segments.length === 0) {
|
|
200
|
+
return { error: 'Invalid path syntax' };
|
|
201
|
+
}
|
|
202
|
+
// Validate value against schema if schema is known
|
|
203
|
+
if (schemaId) {
|
|
204
|
+
const validationResult = validateFieldValue(schemaId, path, value);
|
|
205
|
+
if (validationResult.error)
|
|
206
|
+
return { error: validationResult.error };
|
|
207
|
+
if (validationResult.warnings)
|
|
208
|
+
warnings.push(...validationResult.warnings);
|
|
209
|
+
}
|
|
210
|
+
// Navigate to parent and set
|
|
211
|
+
let current = doc;
|
|
212
|
+
for (let i = 0; i < segments.length - 1; i++) {
|
|
213
|
+
const seg = segments[i];
|
|
214
|
+
if (seg.type === 'key') {
|
|
215
|
+
if (typeof current !== 'object' || current === null) {
|
|
216
|
+
return { error: `Cannot navigate into non-object at "${seg.value}"` };
|
|
217
|
+
}
|
|
218
|
+
const obj = current;
|
|
219
|
+
if (obj[seg.value] === undefined) {
|
|
220
|
+
// Auto-create intermediate objects/arrays
|
|
221
|
+
const nextSeg = segments[i + 1];
|
|
222
|
+
obj[seg.value] = nextSeg.type === 'append' || nextSeg.type === 'index' ? [] : {};
|
|
223
|
+
}
|
|
224
|
+
current = obj[seg.value];
|
|
225
|
+
}
|
|
226
|
+
else if (seg.type === 'index') {
|
|
227
|
+
if (!Array.isArray(current))
|
|
228
|
+
return { error: `Not an array at index ${seg.value}` };
|
|
229
|
+
current = current[Number(seg.value)];
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
// Apply final segment
|
|
233
|
+
const last = segments[segments.length - 1];
|
|
234
|
+
if (last.type === 'key') {
|
|
235
|
+
if (typeof current !== 'object' || current === null || Array.isArray(current)) {
|
|
236
|
+
return { error: `Cannot set key "${last.value}" on non-object` };
|
|
237
|
+
}
|
|
238
|
+
current[last.value] = value;
|
|
239
|
+
}
|
|
240
|
+
else if (last.type === 'append') {
|
|
241
|
+
if (!Array.isArray(current)) {
|
|
242
|
+
return { error: `Cannot append to non-array` };
|
|
243
|
+
}
|
|
244
|
+
current.push(value);
|
|
245
|
+
}
|
|
246
|
+
else if (last.type === 'index') {
|
|
247
|
+
if (!Array.isArray(current)) {
|
|
248
|
+
return { error: `Cannot index into non-array` };
|
|
249
|
+
}
|
|
250
|
+
current[Number(last.value)] = value;
|
|
251
|
+
}
|
|
252
|
+
else if (last.type === 'query') {
|
|
253
|
+
if (!Array.isArray(current)) {
|
|
254
|
+
return { error: `Cannot query non-array` };
|
|
255
|
+
}
|
|
256
|
+
const { key, val } = last;
|
|
257
|
+
const idx = current.findIndex((item) => typeof item === 'object' && item !== null && item[key] === val);
|
|
258
|
+
if (idx === -1)
|
|
259
|
+
return { error: `No item found where ${key}=${val}` };
|
|
260
|
+
current[idx] = value;
|
|
261
|
+
}
|
|
262
|
+
return { warnings: warnings.length > 0 ? warnings : undefined };
|
|
263
|
+
}
|
|
264
|
+
function parsePath(path) {
|
|
265
|
+
const segments = [];
|
|
266
|
+
// Split by '.' but respect brackets
|
|
267
|
+
const parts = path.split(/\.(?![^\[]*\])/);
|
|
268
|
+
for (const part of parts) {
|
|
269
|
+
const bracketMatch = part.match(/^(\w+)\[(.+)\]$/);
|
|
270
|
+
if (bracketMatch) {
|
|
271
|
+
const [, field, bracket] = bracketMatch;
|
|
272
|
+
segments.push({ type: 'key', value: field });
|
|
273
|
+
if (bracket === '+') {
|
|
274
|
+
segments.push({ type: 'append', value: '+' });
|
|
275
|
+
}
|
|
276
|
+
else if (/^\d+$/.test(bracket)) {
|
|
277
|
+
segments.push({ type: 'index', value: bracket });
|
|
278
|
+
}
|
|
279
|
+
else if (bracket.includes('=')) {
|
|
280
|
+
const [key, val] = bracket.split('=', 2);
|
|
281
|
+
segments.push({ type: 'query', value: bracket, key: key.replace('?', ''), val });
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
else {
|
|
285
|
+
segments.push({ type: 'key', value: part });
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
return segments.length > 0 ? segments : null;
|
|
289
|
+
}
|
|
290
|
+
// ─── validate ────────────────────────────────────────────────
|
|
291
|
+
async function cmdValidate(p) {
|
|
292
|
+
if (!p.target)
|
|
293
|
+
return { success: false, error: 'target is required for validate' };
|
|
294
|
+
const targetPath = await validatePath(p.target);
|
|
295
|
+
if (!existsSync(targetPath)) {
|
|
296
|
+
return { success: false, error: `Target file not found: ${targetPath}` };
|
|
297
|
+
}
|
|
298
|
+
const raw = readFileSync(targetPath, 'utf-8');
|
|
299
|
+
let doc;
|
|
300
|
+
try {
|
|
301
|
+
doc = JSON.parse(raw);
|
|
302
|
+
}
|
|
303
|
+
catch {
|
|
304
|
+
return { success: false, error: 'Invalid JSON in target file' };
|
|
305
|
+
}
|
|
306
|
+
const schemaId = p.schema || detectSchema(doc, targetPath);
|
|
307
|
+
if (!schemaId) {
|
|
308
|
+
return { success: false, error: 'Cannot detect schema. Provide schema param.' };
|
|
309
|
+
}
|
|
310
|
+
const jsonSchema = loadSchema(schemaId);
|
|
311
|
+
const errors = [];
|
|
312
|
+
const warnings = [];
|
|
313
|
+
// Layer 1: JSON Schema structural validation
|
|
314
|
+
validateObject(doc, jsonSchema, '', errors, warnings);
|
|
315
|
+
// Layer 2: Semantic quality validation
|
|
316
|
+
validateSemantics(doc, schemaId, errors, warnings);
|
|
317
|
+
const stats = {
|
|
318
|
+
fields: Object.keys(doc).filter(k => !k.startsWith('_comment')).length,
|
|
319
|
+
schema: schemaId,
|
|
320
|
+
arrayItems: countArrayItems(doc, jsonSchema),
|
|
321
|
+
};
|
|
322
|
+
return {
|
|
323
|
+
success: true,
|
|
324
|
+
result: {
|
|
325
|
+
valid: errors.length === 0,
|
|
326
|
+
errors,
|
|
327
|
+
warnings,
|
|
328
|
+
stats,
|
|
329
|
+
},
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
function validateObject(obj, schema, prefix, errors, warnings) {
|
|
333
|
+
const props = schema.properties || {};
|
|
334
|
+
const required = new Set(schema.required || []);
|
|
335
|
+
// Check required fields
|
|
336
|
+
for (const req of required) {
|
|
337
|
+
const val = obj[req];
|
|
338
|
+
if (val === undefined || val === null) {
|
|
339
|
+
errors.push(`${prefix}${req}: required field missing`);
|
|
340
|
+
}
|
|
341
|
+
else if (typeof val === 'string' && val === '' && req !== 'error_message') {
|
|
342
|
+
errors.push(`${prefix}${req}: required field is empty string`);
|
|
343
|
+
}
|
|
344
|
+
else if (Array.isArray(val) && val.length === 0) {
|
|
345
|
+
const propSchema = props[req];
|
|
346
|
+
if (propSchema?.minItems && propSchema.minItems > 0) {
|
|
347
|
+
errors.push(`${prefix}${req}: array requires at least ${propSchema.minItems} items`);
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
// Validate each field
|
|
352
|
+
for (const [name, value] of Object.entries(obj)) {
|
|
353
|
+
if (name.startsWith('_comment') || name.startsWith('$'))
|
|
354
|
+
continue;
|
|
355
|
+
const propSchema = props[name];
|
|
356
|
+
if (!propSchema)
|
|
357
|
+
continue; // allow additional props
|
|
358
|
+
validateValue(value, propSchema, `${prefix}${name}`, errors, warnings);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
function validateValue(value, propSchema, path, errors, warnings) {
|
|
362
|
+
if (value === null || value === undefined)
|
|
363
|
+
return;
|
|
364
|
+
const expectedType = Array.isArray(propSchema.type) ? propSchema.type : [propSchema.type];
|
|
365
|
+
// Type check
|
|
366
|
+
const actualType = Array.isArray(value) ? 'array' : typeof value;
|
|
367
|
+
if (propSchema.type && !expectedType.includes(actualType) && !expectedType.includes('null')) {
|
|
368
|
+
// integer is typeof 'number'
|
|
369
|
+
if (!(actualType === 'number' && expectedType.includes('integer'))) {
|
|
370
|
+
errors.push(`${path}: expected ${expectedType.join('|')}, got ${actualType}`);
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
// Enum check
|
|
375
|
+
if (propSchema.enum && !propSchema.enum.includes(value)) {
|
|
376
|
+
errors.push(`${path}: value "${value}" not in enum [${propSchema.enum.join(', ')}]`);
|
|
377
|
+
}
|
|
378
|
+
// Const check
|
|
379
|
+
if (propSchema.const !== undefined && value !== propSchema.const) {
|
|
380
|
+
errors.push(`${path}: expected const "${propSchema.const}", got "${value}"`);
|
|
381
|
+
}
|
|
382
|
+
// String constraints
|
|
383
|
+
if (typeof value === 'string') {
|
|
384
|
+
if (propSchema.minLength && value.length < propSchema.minLength) {
|
|
385
|
+
errors.push(`${path}: string length ${value.length} < minLength ${propSchema.minLength}`);
|
|
386
|
+
}
|
|
387
|
+
if (propSchema.maxLength && value.length > propSchema.maxLength) {
|
|
388
|
+
errors.push(`${path}: string length ${value.length} > maxLength ${propSchema.maxLength}`);
|
|
389
|
+
}
|
|
390
|
+
if (propSchema.pattern) {
|
|
391
|
+
try {
|
|
392
|
+
if (!new RegExp(propSchema.pattern).test(value)) {
|
|
393
|
+
errors.push(`${path}: does not match pattern "${propSchema.pattern}"`);
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
catch { /* skip invalid regex in schema */ }
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
// Number constraints
|
|
400
|
+
if (typeof value === 'number') {
|
|
401
|
+
if (propSchema.minimum !== undefined && value < propSchema.minimum) {
|
|
402
|
+
errors.push(`${path}: ${value} < minimum ${propSchema.minimum}`);
|
|
403
|
+
}
|
|
404
|
+
if (propSchema.maximum !== undefined && value > propSchema.maximum) {
|
|
405
|
+
errors.push(`${path}: ${value} > maximum ${propSchema.maximum}`);
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
// Array constraints
|
|
409
|
+
if (Array.isArray(value)) {
|
|
410
|
+
if (propSchema.minItems && value.length < propSchema.minItems) {
|
|
411
|
+
errors.push(`${path}: array has ${value.length} items, needs >= ${propSchema.minItems}`);
|
|
412
|
+
}
|
|
413
|
+
if (propSchema.maxItems && value.length > propSchema.maxItems) {
|
|
414
|
+
warnings.push(`${path}: array has ${value.length} items, max recommended ${propSchema.maxItems}`);
|
|
415
|
+
}
|
|
416
|
+
// Validate each item
|
|
417
|
+
if (propSchema.items && typeof propSchema.items === 'object') {
|
|
418
|
+
for (let i = 0; i < value.length; i++) {
|
|
419
|
+
const item = value[i];
|
|
420
|
+
if (propSchema.items.type === 'object' && typeof item === 'object' && item !== null) {
|
|
421
|
+
validateObject(item, propSchema.items, `${path}[${i}].`, errors, warnings);
|
|
422
|
+
}
|
|
423
|
+
else {
|
|
424
|
+
validateValue(item, propSchema.items, `${path}[${i}]`, errors, warnings);
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
// Object: recurse
|
|
430
|
+
if (typeof value === 'object' && !Array.isArray(value) && value !== null && propSchema.properties) {
|
|
431
|
+
validateObject(value, propSchema, `${path}.`, errors, warnings);
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
// ─── Semantic Validation (Layer 2) ───────────────────────────
|
|
435
|
+
function validateSemantics(doc, schemaId, errors, warnings) {
|
|
436
|
+
// explore + diagnosis: file list quality
|
|
437
|
+
if (schemaId === 'explore') {
|
|
438
|
+
validateFileList(doc, 'relevant_files', errors, warnings);
|
|
439
|
+
}
|
|
440
|
+
else if (schemaId === 'diagnosis') {
|
|
441
|
+
validateFileList(doc, 'affected_files', errors, warnings);
|
|
442
|
+
}
|
|
443
|
+
// task: circular dependency check
|
|
444
|
+
if (schemaId === 'task' || schemaId === 'solution' || schemaId === 'plan' || schemaId === 'plan-legacy') {
|
|
445
|
+
validateNoCyclicDeps(doc, errors);
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
const GENERIC_PHRASES = [
|
|
449
|
+
'related to', 'relevant file', 'relevant to', 'important file',
|
|
450
|
+
'related file', 'useful for', 'needed for',
|
|
451
|
+
];
|
|
452
|
+
function validateFileList(doc, field, errors, warnings) {
|
|
453
|
+
const files = doc[field];
|
|
454
|
+
if (!Array.isArray(files))
|
|
455
|
+
return;
|
|
456
|
+
const allManual = files.length > 0 && files.every((f) => f.discovery_source === 'manual');
|
|
457
|
+
if (allManual && files.length > 3) {
|
|
458
|
+
warnings.push(`${field}: all ${files.length} files discovered via "manual" — consider using bash-scan or cli-analysis`);
|
|
459
|
+
}
|
|
460
|
+
for (let i = 0; i < files.length; i++) {
|
|
461
|
+
const f = files[i];
|
|
462
|
+
const rationale = f.rationale || '';
|
|
463
|
+
const relevance = f.relevance || 0;
|
|
464
|
+
// Check generic rationale
|
|
465
|
+
const lower = rationale.toLowerCase();
|
|
466
|
+
for (const phrase of GENERIC_PHRASES) {
|
|
467
|
+
if (lower === phrase || (lower.length < 25 && lower.includes(phrase))) {
|
|
468
|
+
warnings.push(`${field}[${i}].rationale: too generic ("${rationale}") — be more specific`);
|
|
469
|
+
break;
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
// High relevance files need key_code and topic_relation
|
|
473
|
+
if (relevance >= 0.7) {
|
|
474
|
+
if (!f.key_code || (Array.isArray(f.key_code) && f.key_code.length === 0)) {
|
|
475
|
+
warnings.push(`${field}[${i}]: relevance=${relevance} but missing key_code (recommended for >= 0.7)`);
|
|
476
|
+
}
|
|
477
|
+
if (!f.topic_relation) {
|
|
478
|
+
warnings.push(`${field}[${i}]: relevance=${relevance} but missing topic_relation (recommended for >= 0.7)`);
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
function validateNoCyclicDeps(doc, errors) {
|
|
484
|
+
const tasks = doc.tasks || [];
|
|
485
|
+
if (tasks.length === 0)
|
|
486
|
+
return;
|
|
487
|
+
// Build adjacency
|
|
488
|
+
const deps = new Map();
|
|
489
|
+
for (const t of tasks) {
|
|
490
|
+
const id = t.id;
|
|
491
|
+
if (!id)
|
|
492
|
+
continue;
|
|
493
|
+
deps.set(id, t.depends_on || []);
|
|
494
|
+
}
|
|
495
|
+
// DFS cycle check
|
|
496
|
+
const visited = new Set();
|
|
497
|
+
const stack = new Set();
|
|
498
|
+
function hasCycle(node) {
|
|
499
|
+
if (stack.has(node))
|
|
500
|
+
return true;
|
|
501
|
+
if (visited.has(node))
|
|
502
|
+
return false;
|
|
503
|
+
visited.add(node);
|
|
504
|
+
stack.add(node);
|
|
505
|
+
for (const dep of deps.get(node) || []) {
|
|
506
|
+
if (hasCycle(dep))
|
|
507
|
+
return true;
|
|
508
|
+
}
|
|
509
|
+
stack.delete(node);
|
|
510
|
+
return false;
|
|
511
|
+
}
|
|
512
|
+
for (const id of deps.keys()) {
|
|
513
|
+
if (hasCycle(id)) {
|
|
514
|
+
errors.push(`tasks: circular dependency detected involving "${id}"`);
|
|
515
|
+
break;
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
function countArrayItems(doc, schema) {
|
|
520
|
+
const counts = {};
|
|
521
|
+
for (const [name, value] of Object.entries(doc)) {
|
|
522
|
+
if (Array.isArray(value)) {
|
|
523
|
+
counts[name] = value.length;
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
return counts;
|
|
527
|
+
}
|
|
528
|
+
function validateFieldValue(schemaId, fieldPath, value) {
|
|
529
|
+
const warnings = [];
|
|
530
|
+
let jsonSchema;
|
|
531
|
+
try {
|
|
532
|
+
jsonSchema = loadSchema(schemaId);
|
|
533
|
+
}
|
|
534
|
+
catch {
|
|
535
|
+
return {}; // Skip validation if schema not found
|
|
536
|
+
}
|
|
537
|
+
// Resolve the property schema for this path
|
|
538
|
+
const propSchema = resolvePropertySchema(jsonSchema, fieldPath);
|
|
539
|
+
if (!propSchema)
|
|
540
|
+
return {}; // Unknown field, allow it
|
|
541
|
+
// For array appends, validate the item against items schema
|
|
542
|
+
if (fieldPath.includes('[+]') || fieldPath.match(/\[\d+\]/)) {
|
|
543
|
+
const itemSchema = propSchema.items;
|
|
544
|
+
if (itemSchema && typeof value === 'object' && value !== null) {
|
|
545
|
+
const errors = [];
|
|
546
|
+
if (itemSchema.type === 'object') {
|
|
547
|
+
validateObject(value, itemSchema, '', errors, warnings);
|
|
548
|
+
}
|
|
549
|
+
if (errors.length > 0)
|
|
550
|
+
return { error: errors.join('; ') };
|
|
551
|
+
}
|
|
552
|
+
return { warnings: warnings.length > 0 ? warnings : undefined };
|
|
553
|
+
}
|
|
554
|
+
// For direct field set, validate the value
|
|
555
|
+
const errors = [];
|
|
556
|
+
validateValue(value, propSchema, fieldPath, errors, warnings);
|
|
557
|
+
if (errors.length > 0)
|
|
558
|
+
return { error: errors.join('; ') };
|
|
559
|
+
return { warnings: warnings.length > 0 ? warnings : undefined };
|
|
560
|
+
}
|
|
561
|
+
function resolvePropertySchema(schema, fieldPath) {
|
|
562
|
+
const cleanPath = fieldPath.replace(/\[\+\]|\[\d+\]|\[\?[^\]]+\]/g, '');
|
|
563
|
+
const parts = cleanPath.split('.');
|
|
564
|
+
let current = schema;
|
|
565
|
+
for (const part of parts) {
|
|
566
|
+
if (!part)
|
|
567
|
+
continue;
|
|
568
|
+
if (current?.properties?.[part]) {
|
|
569
|
+
current = current.properties[part];
|
|
570
|
+
}
|
|
571
|
+
else if (current?.items?.properties?.[part]) {
|
|
572
|
+
current = current.items.properties[part];
|
|
573
|
+
}
|
|
574
|
+
else {
|
|
575
|
+
return null;
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
return current || null;
|
|
579
|
+
}
|
|
580
|
+
// ─── merge ───────────────────────────────────────────────────
|
|
581
|
+
async function cmdMerge(p) {
|
|
582
|
+
if (!p.sources || p.sources.length < 2) {
|
|
583
|
+
return { success: false, error: 'merge requires at least 2 sources' };
|
|
584
|
+
}
|
|
585
|
+
if (!p.output)
|
|
586
|
+
return { success: false, error: 'output is required for merge' };
|
|
587
|
+
const docs = [];
|
|
588
|
+
for (const src of p.sources) {
|
|
589
|
+
const srcPath = await validatePath(src);
|
|
590
|
+
if (!existsSync(srcPath)) {
|
|
591
|
+
return { success: false, error: `Source not found: ${srcPath}` };
|
|
592
|
+
}
|
|
593
|
+
docs.push(JSON.parse(readFileSync(srcPath, 'utf-8')));
|
|
594
|
+
}
|
|
595
|
+
const schemaId = p.schema || detectSchema(docs[0], p.sources[0]);
|
|
596
|
+
const jsonSchema = schemaId ? loadSchema(schemaId) : null;
|
|
597
|
+
const strategy = p.strategy || 'dedup_by_path';
|
|
598
|
+
const merged = mergeDocuments(docs, jsonSchema, strategy);
|
|
599
|
+
const outputPath = await validatePath(p.output);
|
|
600
|
+
ensureDir(outputPath);
|
|
601
|
+
writeFileSync(outputPath, JSON.stringify(merged, null, 2), 'utf-8');
|
|
602
|
+
return {
|
|
603
|
+
success: true,
|
|
604
|
+
result: {
|
|
605
|
+
path: outputPath,
|
|
606
|
+
sourceCount: docs.length,
|
|
607
|
+
strategy,
|
|
608
|
+
message: `Merged ${docs.length} documents`,
|
|
609
|
+
},
|
|
610
|
+
};
|
|
611
|
+
}
|
|
612
|
+
function mergeDocuments(docs, schema, strategy) {
|
|
613
|
+
const base = structuredClone(docs[0]);
|
|
614
|
+
const props = schema?.properties || {};
|
|
615
|
+
for (let i = 1; i < docs.length; i++) {
|
|
616
|
+
const other = docs[i];
|
|
617
|
+
for (const [key, value] of Object.entries(other)) {
|
|
618
|
+
if (key.startsWith('_') || key.startsWith('$'))
|
|
619
|
+
continue;
|
|
620
|
+
const existing = base[key];
|
|
621
|
+
const propSchema = props[key];
|
|
622
|
+
const propType = propSchema?.type;
|
|
623
|
+
if (Array.isArray(existing) && Array.isArray(value)) {
|
|
624
|
+
// Array merge with dedup
|
|
625
|
+
if (strategy === 'dedup_by_path') {
|
|
626
|
+
base[key] = deduplicateArrays(existing, value);
|
|
627
|
+
}
|
|
628
|
+
else {
|
|
629
|
+
base[key] = [...existing, ...value];
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
else if (typeof existing === 'string' && typeof value === 'string' && propType === 'string') {
|
|
633
|
+
// Text fields: concatenate if both non-empty
|
|
634
|
+
if (existing && value && existing !== value) {
|
|
635
|
+
base[key] = `${existing}\n\n${value}`;
|
|
636
|
+
}
|
|
637
|
+
else if (!existing && value) {
|
|
638
|
+
base[key] = value;
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
else if (existing === undefined || existing === null || existing === '' || existing === 0) {
|
|
642
|
+
// Fill empty values
|
|
643
|
+
base[key] = value;
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
// Update metadata
|
|
648
|
+
if (base._metadata && typeof base._metadata === 'object') {
|
|
649
|
+
base._metadata.timestamp = new Date().toISOString();
|
|
650
|
+
base._metadata.merged_from = docs.length;
|
|
651
|
+
}
|
|
652
|
+
return base;
|
|
653
|
+
}
|
|
654
|
+
function deduplicateArrays(a, b) {
|
|
655
|
+
const result = [...a];
|
|
656
|
+
const existingPaths = new Set(a.filter(item => typeof item === 'object' && item !== null)
|
|
657
|
+
.map(item => item.path)
|
|
658
|
+
.filter(Boolean));
|
|
659
|
+
for (const item of b) {
|
|
660
|
+
if (typeof item === 'object' && item !== null) {
|
|
661
|
+
const path = item.path;
|
|
662
|
+
if (path && existingPaths.has(path)) {
|
|
663
|
+
// Dedup: keep the one with higher relevance
|
|
664
|
+
const existingIdx = result.findIndex(e => typeof e === 'object' && e !== null && e.path === path);
|
|
665
|
+
if (existingIdx !== -1) {
|
|
666
|
+
const existingRel = result[existingIdx].relevance || 0;
|
|
667
|
+
const newRel = item.relevance || 0;
|
|
668
|
+
if (newRel > existingRel) {
|
|
669
|
+
result[existingIdx] = item;
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
else {
|
|
674
|
+
result.push(item);
|
|
675
|
+
if (path)
|
|
676
|
+
existingPaths.add(path);
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
else {
|
|
680
|
+
// Primitive: dedup by value
|
|
681
|
+
if (!result.includes(item)) {
|
|
682
|
+
result.push(item);
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
return result;
|
|
687
|
+
}
|
|
688
|
+
// ─── info ────────────────────────────────────────────────────
|
|
689
|
+
function cmdInfo(p) {
|
|
690
|
+
if (!p.schema) {
|
|
691
|
+
// List all schemas
|
|
692
|
+
const schemas = listSchemas();
|
|
693
|
+
const summaries = schemas.map(id => {
|
|
694
|
+
try {
|
|
695
|
+
const info = getSchemaInfo(id);
|
|
696
|
+
return { id, title: info.title, required: info.requiredFields.length, format: info.format };
|
|
697
|
+
}
|
|
698
|
+
catch {
|
|
699
|
+
return { id, title: '(load error)', required: 0, format: 'json' };
|
|
700
|
+
}
|
|
701
|
+
});
|
|
702
|
+
return { success: true, result: { schemas: summaries } };
|
|
703
|
+
}
|
|
704
|
+
const info = getSchemaInfo(p.schema);
|
|
705
|
+
return { success: true, result: info };
|
|
706
|
+
}
|
|
707
|
+
// ─── Utilities ───────────────────────────────────────────────
|
|
708
|
+
function ensureDir(filePath) {
|
|
709
|
+
const dir = dirname(filePath);
|
|
710
|
+
if (!existsSync(dir)) {
|
|
711
|
+
mkdirSync(dir, { recursive: true });
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
function detectSchema(doc, filePath) {
|
|
715
|
+
// Try _metadata.source
|
|
716
|
+
const meta = doc._metadata;
|
|
717
|
+
if (meta?.source === 'cli-explore-agent') {
|
|
718
|
+
if (doc.symptom || doc.root_cause)
|
|
719
|
+
return 'diagnosis';
|
|
720
|
+
return 'explore';
|
|
721
|
+
}
|
|
722
|
+
// Try file name patterns
|
|
723
|
+
const lower = (filePath || '').toLowerCase();
|
|
724
|
+
if (lower.includes('exploration') || lower.includes('explore'))
|
|
725
|
+
return 'explore';
|
|
726
|
+
if (lower.includes('diagnosis') || lower.includes('diagnos'))
|
|
727
|
+
return 'diagnosis';
|
|
728
|
+
if (lower.includes('finding') || lower.includes('discovery'))
|
|
729
|
+
return 'finding';
|
|
730
|
+
if (lower.includes('fix-plan') || lower.includes('fixplan'))
|
|
731
|
+
return 'fix-legacy';
|
|
732
|
+
if (lower.includes('plan'))
|
|
733
|
+
return 'plan';
|
|
734
|
+
if (lower.includes('task') || lower.includes('impl-'))
|
|
735
|
+
return 'task';
|
|
736
|
+
if (lower.includes('solution'))
|
|
737
|
+
return 'solution';
|
|
738
|
+
if (lower.includes('queue'))
|
|
739
|
+
return 'queue';
|
|
740
|
+
if (lower.includes('review-dim'))
|
|
741
|
+
return 'review-dim';
|
|
742
|
+
if (lower.includes('review-deep'))
|
|
743
|
+
return 'review-deep';
|
|
744
|
+
return undefined;
|
|
745
|
+
}
|
|
746
|
+
//# sourceMappingURL=json-builder.js.map
|