@sparkleideas/claude-flow-patch 3.1.0-alpha.44.patch.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/AGENTS.md +162 -0
  2. package/CLAUDE.md +458 -0
  3. package/README.md +306 -0
  4. package/bin/claude-flow-patch.mjs +148 -0
  5. package/check-patches.sh +176 -0
  6. package/lib/categories.json +15 -0
  7. package/lib/common.py +92 -0
  8. package/lib/discover.mjs +181 -0
  9. package/package.json +85 -0
  10. package/patch/010-CF-001-doctor-yaml/README.md +11 -0
  11. package/patch/010-CF-001-doctor-yaml/fix.py +20 -0
  12. package/patch/010-CF-001-doctor-yaml/sentinel +1 -0
  13. package/patch/020-CF-002-config-export-yaml/README.md +11 -0
  14. package/patch/020-CF-002-config-export-yaml/fix.py +130 -0
  15. package/patch/020-CF-002-config-export-yaml/sentinel +1 -0
  16. package/patch/030-DM-001-daemon-log-zero/README.md +12 -0
  17. package/patch/030-DM-001-daemon-log-zero/fix.py +37 -0
  18. package/patch/030-DM-001-daemon-log-zero/sentinel +1 -0
  19. package/patch/040-DM-002-cpu-load-threshold/README.md +11 -0
  20. package/patch/040-DM-002-cpu-load-threshold/fix.py +6 -0
  21. package/patch/040-DM-002-cpu-load-threshold/sentinel +1 -0
  22. package/patch/050-DM-003-macos-freemem/README.md +11 -0
  23. package/patch/050-DM-003-macos-freemem/fix.py +7 -0
  24. package/patch/050-DM-003-macos-freemem/sentinel +1 -0
  25. package/patch/060-DM-004-preload-worker-stub/README.md +11 -0
  26. package/patch/060-DM-004-preload-worker-stub/fix.py +34 -0
  27. package/patch/060-DM-004-preload-worker-stub/sentinel +1 -0
  28. package/patch/070-DM-005-consolidation-worker-stub/README.md +11 -0
  29. package/patch/070-DM-005-consolidation-worker-stub/fix.py +46 -0
  30. package/patch/070-DM-005-consolidation-worker-stub/sentinel +1 -0
  31. package/patch/080-EM-001-embedding-ignores-config/README.md +11 -0
  32. package/patch/080-EM-001-embedding-ignores-config/fix.py +111 -0
  33. package/patch/080-EM-001-embedding-ignores-config/sentinel +1 -0
  34. package/patch/090-EM-002-transformers-cache-eacces/README.md +11 -0
  35. package/patch/090-EM-002-transformers-cache-eacces/fix.sh +12 -0
  36. package/patch/090-EM-002-transformers-cache-eacces/sentinel +1 -0
  37. package/patch/100-GV-001-hnsw-ghost-vectors/README.md +11 -0
  38. package/patch/100-GV-001-hnsw-ghost-vectors/fix.py +34 -0
  39. package/patch/100-GV-001-hnsw-ghost-vectors/sentinel +1 -0
  40. package/patch/110-HK-001-post-edit-file-path/README.md +44 -0
  41. package/patch/110-HK-001-post-edit-file-path/fix.py +23 -0
  42. package/patch/110-HK-001-post-edit-file-path/sentinel +1 -0
  43. package/patch/120-HK-002-hooks-tools-stub/README.md +36 -0
  44. package/patch/120-HK-002-hooks-tools-stub/fix.py +155 -0
  45. package/patch/120-HK-002-hooks-tools-stub/sentinel +1 -0
  46. package/patch/130-HK-003-metrics-hardcoded/README.md +30 -0
  47. package/patch/130-HK-003-metrics-hardcoded/fix.py +82 -0
  48. package/patch/130-HK-003-metrics-hardcoded/sentinel +1 -0
  49. package/patch/140-HW-001-stdin-hang/README.md +11 -0
  50. package/patch/140-HW-001-stdin-hang/fix.py +6 -0
  51. package/patch/140-HW-001-stdin-hang/sentinel +1 -0
  52. package/patch/150-HW-002-failures-swallowed/README.md +11 -0
  53. package/patch/150-HW-002-failures-swallowed/fix.py +42 -0
  54. package/patch/150-HW-002-failures-swallowed/sentinel +1 -0
  55. package/patch/160-HW-003-aggressive-intervals/README.md +11 -0
  56. package/patch/160-HW-003-aggressive-intervals/fix.py +16 -0
  57. package/patch/160-HW-003-aggressive-intervals/sentinel +1 -0
  58. package/patch/170-IN-001-intelligence-stub/README.md +64 -0
  59. package/patch/170-IN-001-intelligence-stub/fix.py +70 -0
  60. package/patch/170-IN-001-intelligence-stub/sentinel +1 -0
  61. package/patch/180-MM-001-memory-persist-path/README.md +27 -0
  62. package/patch/180-MM-001-memory-persist-path/fix.py +54 -0
  63. package/patch/180-MM-001-memory-persist-path/sentinel +1 -0
  64. package/patch/190-NS-001-discovery-default-namespace/README.md +16 -0
  65. package/patch/190-NS-001-discovery-default-namespace/fix.py +68 -0
  66. package/patch/190-NS-001-discovery-default-namespace/sentinel +2 -0
  67. package/patch/200-NS-002-targeted-require-namespace/README.md +19 -0
  68. package/patch/200-NS-002-targeted-require-namespace/fix.py +158 -0
  69. package/patch/200-NS-002-targeted-require-namespace/sentinel +2 -0
  70. package/patch/210-NS-003-namespace-typo-pattern/README.md +15 -0
  71. package/patch/210-NS-003-namespace-typo-pattern/fix.py +23 -0
  72. package/patch/210-NS-003-namespace-typo-pattern/sentinel +1 -0
  73. package/patch/220-RS-001-better-sqlite3-node24/README.md +54 -0
  74. package/patch/220-RS-001-better-sqlite3-node24/fix.py +22 -0
  75. package/patch/220-RS-001-better-sqlite3-node24/rebuild.sh +31 -0
  76. package/patch/220-RS-001-better-sqlite3-node24/sentinel +2 -0
  77. package/patch/230-RV-001-force-learn-tick/README.md +31 -0
  78. package/patch/230-RV-001-force-learn-tick/fix.py +14 -0
  79. package/patch/230-RV-001-force-learn-tick/sentinel +2 -0
  80. package/patch/240-RV-002-trajectory-load/README.md +28 -0
  81. package/patch/240-RV-002-trajectory-load/fix.py +14 -0
  82. package/patch/240-RV-002-trajectory-load/sentinel +2 -0
  83. package/patch/250-RV-003-trajectory-stats-sync/README.md +31 -0
  84. package/patch/250-RV-003-trajectory-stats-sync/fix.py +18 -0
  85. package/patch/250-RV-003-trajectory-stats-sync/sentinel +2 -0
  86. package/patch/260-SG-001-init-settings/README.md +29 -0
  87. package/patch/260-SG-001-init-settings/fix.py +143 -0
  88. package/patch/260-SG-001-init-settings/sentinel +4 -0
  89. package/patch/270-SG-003-init-helpers-all-paths/README.md +60 -0
  90. package/patch/270-SG-003-init-helpers-all-paths/fix.py +164 -0
  91. package/patch/270-SG-003-init-helpers-all-paths/sentinel +3 -0
  92. package/patch/280-UI-001-intelligence-stats-crash/README.md +11 -0
  93. package/patch/280-UI-001-intelligence-stats-crash/fix.py +57 -0
  94. package/patch/280-UI-001-intelligence-stats-crash/sentinel +1 -0
  95. package/patch/290-UI-002-neural-status-not-loaded/README.md +11 -0
  96. package/patch/290-UI-002-neural-status-not-loaded/fix.py +19 -0
  97. package/patch/290-UI-002-neural-status-not-loaded/sentinel +1 -0
  98. package/patch/300-DM-006-log-rotation/README.md +11 -0
  99. package/patch/300-DM-006-log-rotation/fix.py +58 -0
  100. package/patch/300-DM-006-log-rotation/sentinel +1 -0
  101. package/patch/310-HW-004-runwithtimeout-orphan/README.md +11 -0
  102. package/patch/310-HW-004-runwithtimeout-orphan/fix.py +10 -0
  103. package/patch/310-HW-004-runwithtimeout-orphan/sentinel +1 -0
  104. package/patch-all.sh +203 -0
  105. package/repair-post-init.sh +245 -0
  106. package/scripts/update-docs.mjs +208 -0
  107. package/scripts/upstream-log.mjs +257 -0
package/lib/common.py ADDED
@@ -0,0 +1,92 @@
1
+ # common.py — shared patch infrastructure
2
+ # Extracted from apply-patches.sh. Provides patch()/patch_all() + path variables.
3
+
4
+ import sys, os, re
5
+
6
+ base = os.environ.get("BASE", "")
7
+ if not base or base == "/dev/null":
8
+ base = "" # No claude-flow/cli, paths will be invalid (patch() will skip gracefully)
9
+ services = base + "/services" if base else ""
10
+ commands = base + "/commands" if base else ""
11
+ memory = base + "/memory" if base else ""
12
+
13
+ applied = 0
14
+ skipped = 0
15
+
16
+ def patch(label, filepath, old, new):
17
+ global applied, skipped
18
+ if not filepath:
19
+ return # Skip if path is empty (package not found)
20
+ try:
21
+ with open(filepath, 'r') as f:
22
+ code = f.read()
23
+ if new in code:
24
+ skipped += 1
25
+ return
26
+ if old not in code:
27
+ print(f" WARN: {label} — pattern not found (code may have changed)")
28
+ return
29
+ code = code.replace(old, new, 1)
30
+ with open(filepath, 'w') as f:
31
+ f.write(code)
32
+ print(f" Applied: {label}")
33
+ applied += 1
34
+ except FileNotFoundError:
35
+ pass # Silently skip if file doesn't exist (package not installed)
36
+ except Exception as e:
37
+ print(f" ERROR: {label} — {e}")
38
+
39
+ def patch_all(label, filepath, old, new):
40
+ """Replace ALL occurrences"""
41
+ global applied, skipped
42
+ if not filepath:
43
+ return # Skip if path is empty (package not found)
44
+ try:
45
+ with open(filepath, 'r') as f:
46
+ code = f.read()
47
+ if new in code and old not in code:
48
+ skipped += 1
49
+ return
50
+ if old not in code:
51
+ print(f" WARN: {label} — pattern not found")
52
+ return
53
+ code = code.replace(old, new)
54
+ with open(filepath, 'w') as f:
55
+ f.write(code)
56
+ print(f" Applied: {label}")
57
+ applied += 1
58
+ except FileNotFoundError:
59
+ pass # Silently skip if file doesn't exist (package not installed)
60
+ except Exception as e:
61
+ print(f" ERROR: {label} — {e}")
62
+
63
+ # ── Target file paths ──
64
+ # These may be empty strings if base is not set (no claude-flow/cli found)
65
+ HWE = services + "/headless-worker-executor.js" if services else ""
66
+ WD = services + "/worker-daemon.js" if services else ""
67
+ DJ = commands + "/daemon.js" if commands else ""
68
+ DOC = commands + "/doctor.js" if commands else ""
69
+ MI = memory + "/memory-initializer.js" if memory else ""
70
+
71
+ MCP_MEMORY = base + "/mcp-tools/memory-tools.js" if base else ""
72
+ MCP_HOOKS = base + "/mcp-tools/hooks-tools.js" if base else ""
73
+ CLI_MEMORY = commands + "/memory.js" if commands else ""
74
+ CONF = commands + "/config.js" if commands else ""
75
+ HOOKS_CMD = commands + "/hooks.js" if commands else ""
76
+ NEURAL = commands + "/neural.js" if commands else ""
77
+ EMB_TOOLS = base + "/mcp-tools/embeddings-tools.js" if base else ""
78
+
79
+ # Init module
80
+ init = base + "/init" if base else ""
81
+ SETTINGS_GEN = init + "/settings-generator.js" if init else ""
82
+ HELPERS_GEN = init + "/helpers-generator.js" if init else ""
83
+ EXECUTOR = init + "/executor.js" if init else ""
84
+ TYPES = init + "/types.js" if init else ""
85
+ INIT_CMD = commands + "/init.js" if commands else ""
86
+
87
+ # Source helpers (shipped with package, copied by writeHelpers when source dir found)
88
+ _pkg_root = os.path.dirname(os.path.dirname(base)) if base else ""
89
+ SRC_HOOK_HANDLER = os.path.join(_pkg_root, ".claude", "helpers", "hook-handler.cjs") if _pkg_root else ""
90
+
91
+ # RuVector (separate package, path set by patch-all.sh)
92
+ ruvector_cli = os.environ.get("RUVECTOR_CLI", "")
@@ -0,0 +1,181 @@
1
+ #!/usr/bin/env node
2
+ // lib/discover.mjs — Dynamic patch discovery
3
+ // Scans patch/*/ directories, parses README.md + fix.py headers for metadata.
4
+ // Single source of truth for scripts, sentinel checks, and documentation.
5
+
6
+ import { readdirSync, readFileSync, existsSync } from 'node:fs';
7
+ import { resolve, dirname } from 'node:path';
8
+ import { fileURLToPath } from 'node:url';
9
+
10
+ const __dirname = dirname(fileURLToPath(import.meta.url));
11
+ const ROOT = resolve(__dirname, '..');
12
+ const PATCH_DIR = resolve(ROOT, 'patch');
13
+
14
+ // Category labels from lib/categories.json
15
+ const CATEGORY_MAP = JSON.parse(
16
+ readFileSync(resolve(__dirname, 'categories.json'), 'utf-8')
17
+ );
18
+
19
+ /**
20
+ * Parse a patch README.md for doc metadata.
21
+ * Expected format:
22
+ * # {ID}: {title}
23
+ * **Severity**: {severity}
24
+ * **GitHub**: [{label}]({url})
25
+ * ## Files Patched
26
+ * - {file}
27
+ * ## Ops
28
+ * {N} op(s) in fix.py
29
+ */
30
+ function parseReadme(readmePath, dirName) {
31
+ const text = readFileSync(readmePath, 'utf-8');
32
+ const lines = text.split('\n');
33
+
34
+ const titleMatch = lines[0]?.match(/^#\s+(\S+):\s+(.+)/);
35
+ const stripped = dirName.replace(/^\d+-/, '');
36
+ const id = titleMatch?.[1] ?? stripped.split('-').slice(0, 2).join('-');
37
+ const title = titleMatch?.[2]?.trim() ?? '';
38
+
39
+ const sevLine = lines.find(l => l.startsWith('**Severity**'));
40
+ const severity = sevLine?.match(/\*\*Severity\*\*:\s*(\S+)/)?.[1] ?? 'Unknown';
41
+
42
+ const ghLine = lines.find(l => l.startsWith('**GitHub**'));
43
+ const ghMatch = ghLine?.match(/\[([^\]]+)\]\(([^)]+)\)/);
44
+ const github = ghMatch?.[1] ?? '';
45
+ const githubUrl = ghMatch?.[2] ?? '';
46
+
47
+ const filesIdx = lines.findIndex(l => /^##\s+Files Patched/i.test(l));
48
+ const files = [];
49
+ if (filesIdx >= 0) {
50
+ for (let i = filesIdx + 1; i < lines.length; i++) {
51
+ const m = lines[i].match(/^-\s+`?([^`\n]+)`?\s*$/);
52
+ if (m) files.push(m[1].trim());
53
+ else if (lines[i].startsWith('#') || (lines[i].trim() === '' && files.length > 0)) break;
54
+ }
55
+ }
56
+
57
+ const opsLine = lines.find(l => /^\d+\s+ops?\b/i.test(l));
58
+ const ops = opsLine ? parseInt(opsLine, 10) : 0;
59
+
60
+ return { id, title, severity, github, githubUrl, files, ops };
61
+ }
62
+
63
+ /**
64
+ * Parse a sentinel file for verification metadata.
65
+ * Each line is one of:
66
+ * package: <name> — target package (default: @claude-flow/cli)
67
+ * grep "<pattern>" <file> — pass if pattern found
68
+ * absent "<pattern>" <file> — pass if pattern NOT found
69
+ * none — skip verification
70
+ */
71
+ function parseSentinels(sentinelPath) {
72
+ const text = readFileSync(sentinelPath, 'utf-8');
73
+ const lines = text.split('\n');
74
+
75
+ let pkg = null;
76
+ const sentinels = [];
77
+
78
+ for (const line of lines) {
79
+ const trimmed = line.trim();
80
+ if (!trimmed) continue;
81
+
82
+ if (trimmed.startsWith('package:')) {
83
+ pkg = trimmed.replace('package:', '').trim();
84
+ } else if (trimmed === 'none') {
85
+ sentinels.push({ type: 'none' });
86
+ } else {
87
+ const absentMatch = trimmed.match(/^absent\s+"(.+)"\s+(.+)$/);
88
+ if (absentMatch) {
89
+ sentinels.push({ type: 'absent', pattern: absentMatch[1], file: absentMatch[2] });
90
+ continue;
91
+ }
92
+ const grepMatch = trimmed.match(/^grep\s+"(.+)"\s+(.+)$/);
93
+ if (grepMatch) {
94
+ sentinels.push({ type: 'grep', pattern: grepMatch[1], file: grepMatch[2] });
95
+ }
96
+ }
97
+ }
98
+
99
+ return { package: pkg, sentinels };
100
+ }
101
+
102
+ /**
103
+ * Discover all patches. Returns structured JSON with everything needed
104
+ * by scripts, sentinel checks, and documentation.
105
+ */
106
+ export function discover() {
107
+ let dirs;
108
+ try {
109
+ dirs = readdirSync(PATCH_DIR, { withFileTypes: true })
110
+ .filter(d => d.isDirectory())
111
+ .map(d => d.name)
112
+ .sort();
113
+ } catch {
114
+ return { patches: [], categories: {}, stats: { total: 0, categories: 0 } };
115
+ }
116
+
117
+ const patches = [];
118
+ const categorySet = new Set();
119
+
120
+ for (const dirName of dirs) {
121
+ const readmePath = resolve(PATCH_DIR, dirName, 'README.md');
122
+ if (!existsSync(readmePath)) continue;
123
+
124
+ const meta = parseReadme(readmePath, dirName);
125
+ const prefix = meta.id.split('-')[0];
126
+ const stripped = dirName.replace(/^\d+-/, '');
127
+ const slug = stripped.replace(/^[A-Z]+-\d+-/, '');
128
+ const orderMatch = dirName.match(/^(\d+)-/);
129
+ const order = orderMatch ? parseInt(orderMatch[1], 10) : null;
130
+ const hasPy = existsSync(resolve(PATCH_DIR, dirName, 'fix.py'));
131
+ const hasSh = existsSync(resolve(PATCH_DIR, dirName, 'fix.sh'));
132
+ const fixType = hasPy ? 'python' : hasSh ? 'shell' : 'unknown';
133
+ const category = CATEGORY_MAP[prefix] ?? prefix;
134
+ categorySet.add(category);
135
+
136
+ // Parse sentinel metadata from dedicated sentinel file
137
+ const sentinelPath = resolve(PATCH_DIR, dirName, 'sentinel');
138
+ const sentinel = existsSync(sentinelPath) ? parseSentinels(sentinelPath) : { package: null, sentinels: [] };
139
+
140
+ patches.push({
141
+ id: meta.id,
142
+ order,
143
+ slug,
144
+ dir: dirName,
145
+ title: meta.title,
146
+ severity: meta.severity,
147
+ github: meta.github,
148
+ githubUrl: meta.githubUrl,
149
+ category,
150
+ prefix,
151
+ type: fixType,
152
+ files: meta.files,
153
+ ops: meta.ops,
154
+ package: sentinel.package,
155
+ sentinels: sentinel.sentinels,
156
+ });
157
+ }
158
+
159
+ // Build categories object: { prefix: label } for active categories
160
+ const categories = {};
161
+ for (const p of patches) {
162
+ if (!categories[p.prefix]) {
163
+ categories[p.prefix] = CATEGORY_MAP[p.prefix] ?? p.prefix;
164
+ }
165
+ }
166
+
167
+ return {
168
+ patches,
169
+ categories,
170
+ stats: {
171
+ total: patches.length,
172
+ categories: categorySet.size,
173
+ },
174
+ };
175
+ }
176
+
177
+ // CLI: `node lib/discover.mjs` prints JSON to stdout
178
+ const thisFile = resolve(__dirname, 'discover.mjs');
179
+ if (process.argv[1] && resolve(process.argv[1]) === thisFile) {
180
+ console.log(JSON.stringify(discover(), null, 2));
181
+ }
package/package.json ADDED
@@ -0,0 +1,85 @@
1
+ {
2
+ "name": "@sparkleideas/claude-flow-patch",
3
+ "version": "3.1.0-alpha.44.patch.3",
4
+ "description": "Patch toolkit for @claude-flow/cli init/runtime defects with verify and post-init repair commands",
5
+ "scripts": {
6
+ "test": "node --test tests/*.test.mjs",
7
+ "package": "bash npm/package.sh",
8
+ "package:dry": "bash npm/package.sh --dry-run",
9
+ "publish:npm": "bash npm/publish.sh",
10
+ "publish:dry": "bash npm/publish.sh --dry-run",
11
+ "update-docs": "node scripts/update-docs.mjs",
12
+ "update-docs:check": "node scripts/update-docs.mjs --check",
13
+ "upstream-log": "node scripts/upstream-log.mjs",
14
+ "guidance:analyze": "guidance analyze",
15
+ "guidance:optimize": "guidance autopilot --once --apply --source manual",
16
+ "guidance:autopilot:once": "guidance autopilot --once --source manual",
17
+ "guidance:autopilot:daemon": "guidance autopilot --daemon --apply --source daemon",
18
+ "guidance:ab-benchmark": "guidance benchmark",
19
+ "guidance:scaffold": "guidance scaffold",
20
+ "guidance:all": "guidance run all",
21
+ "guidance:status": "guidance run status",
22
+ "guidance:hooks": "guidance run hooks",
23
+ "guidance:trust": "guidance run trust",
24
+ "guidance:adversarial": "guidance run adversarial",
25
+ "guidance:proof": "guidance run proof",
26
+ "guidance:conformance": "guidance run conformance",
27
+ "guidance:evolution": "guidance run evolution",
28
+ "guidance:runtime": "guidance runtime demo",
29
+ "guidance:codex:status": "guidance codex status",
30
+ "guidance:codex:pre-command": "guidance codex pre-command",
31
+ "guidance:codex:pre-edit": "guidance codex pre-edit",
32
+ "guidance:codex:pre-task": "guidance codex pre-task",
33
+ "guidance:codex:post-edit": "guidance codex post-edit",
34
+ "guidance:codex:post-task": "guidance codex post-task",
35
+ "guidance:codex:session-start": "guidance codex session-start",
36
+ "guidance:codex:session-end": "guidance codex session-end"
37
+ },
38
+ "private": false,
39
+ "license": "MIT",
40
+ "type": "module",
41
+ "author": "Sparkling Ideas <henrik@sparklingideas.co.uk>",
42
+ "repository": {
43
+ "type": "git",
44
+ "url": "git+https://github.com/sparkling/claude-flow-patch.git"
45
+ },
46
+ "homepage": "https://sparklingideas.co.uk/patch/claude-flow",
47
+ "bugs": {
48
+ "url": "https://github.com/sparkling/claude-flow-patch/issues"
49
+ },
50
+ "engines": {
51
+ "node": ">=20"
52
+ },
53
+ "publishConfig": {
54
+ "access": "public"
55
+ },
56
+ "bin": {
57
+ "claude-flow-patch": "bin/claude-flow-patch.mjs"
58
+ },
59
+ "files": [
60
+ "bin",
61
+ "lib",
62
+ "patch",
63
+ "scripts",
64
+ "patch-all.sh",
65
+ "check-patches.sh",
66
+ "repair-post-init.sh",
67
+ "README.md",
68
+ "AGENTS.md",
69
+ "CLAUDE.md"
70
+ ],
71
+ "keywords": [
72
+ "claude-flow",
73
+ "@claude-flow/cli",
74
+ "patch",
75
+ "hotfix",
76
+ "init",
77
+ "hook-handler",
78
+ "npm-cache",
79
+ "hooks",
80
+ "ruvector",
81
+ "ruv-swarm",
82
+ "automation"
83
+ ],
84
+ "packageManager": "npm@10"
85
+ }
@@ -0,0 +1,11 @@
1
+ # CF-001: Doctor ignores YAML config files
2
+ **Severity**: Low
3
+ **GitHub**: [#1141](https://github.com/ruvnet/claude-flow/issues/1141)
4
+ ## Root Cause
5
+ `checkConfigFile()` only checks `.json` paths, but `claude-flow init` generates `config.yaml`. Also, `JSON.parse()` runs on all config content, crashing on YAML.
6
+ ## Fix
7
+ Add `.claude-flow/config.yaml` and `.claude-flow/config.yml` to the config search paths. Skip `JSON.parse()` for non-JSON files.
8
+ ## Files Patched
9
+ - commands/doctor.js
10
+ ## Ops
11
+ 2 ops in fix.py
@@ -0,0 +1,20 @@
1
+ # CF-001: Doctor ignores YAML config files
2
+ patch("7: YAML config",
3
+ DOC,
4
+ """ const configPaths = [
5
+ '.claude-flow/config.json',
6
+ 'claude-flow.config.json',
7
+ '.claude-flow.json'
8
+ ];""",
9
+ """ const configPaths = [
10
+ '.claude-flow/config.json',
11
+ 'claude-flow.config.json',
12
+ '.claude-flow.json',
13
+ '.claude-flow/config.yaml',
14
+ '.claude-flow/config.yml'
15
+ ];""")
16
+
17
+ patch("7: YAML JSON.parse skip",
18
+ DOC,
19
+ " JSON.parse(content);",
20
+ " if (configPath.endsWith('.json')) { JSON.parse(content); }")
@@ -0,0 +1 @@
1
+ grep "config.yaml" commands/doctor.js
@@ -0,0 +1,11 @@
1
+ # CF-002: Config export shows hardcoded defaults
2
+ **Severity**: Medium
3
+ **GitHub**: [#1142](https://github.com/ruvnet/claude-flow/issues/1142)
4
+ ## Root Cause
5
+ `config export` and `config get` use hardcoded defaults (topology: 'hybrid', cacheSize: 256) instead of reading `.claude-flow/config.yaml`. The commands are misleading when the project has custom config.
6
+ ## Fix
7
+ Add `readYamlConfig()` helper function. Merge YAML config values over defaults in both `getCommand` and `exportCommand` actions.
8
+ ## Files Patched
9
+ - commands/config.js
10
+ ## Ops
11
+ 3 ops in fix.py
@@ -0,0 +1,130 @@
1
+ # CF-002: Config export shows hardcoded defaults instead of reading YAML
2
+ # Absorbed from old patch-16. Applied manually to npx cache.
3
+ # NOTE: This patch was originally applied via sed, not via patch().
4
+ # The patch() calls below replicate the same changes.
5
+
6
+ # Add fs/path imports after existing imports
7
+ patch("16a: config.js add readYamlConfig",
8
+ CONF,
9
+ "const getCommand = {",
10
+ """import { readFileSync, existsSync } from 'fs';
11
+ import { join } from 'path';
12
+
13
+ // Helper to read config.yaml if it exists
14
+ function readYamlConfig() {
15
+ const configPath = join(process.cwd(), '.claude-flow', 'config.yaml');
16
+ if (!existsSync(configPath)) { return {}; }
17
+ try {
18
+ const content = readFileSync(configPath, 'utf8');
19
+ const config = {};
20
+ const lines = content.split('\\n');
21
+ let currentSection = null;
22
+ for (const line of lines) {
23
+ const trimmed = line.trim();
24
+ if (!trimmed || trimmed.startsWith('#')) continue;
25
+ if (!trimmed.includes(':')) continue;
26
+ const indent = line.match(/^\\s*/)[0].length;
27
+ if (indent === 0) {
28
+ const [key, ...rest] = trimmed.split(':');
29
+ const value = rest.join(':').trim();
30
+ if (value && value !== '') {
31
+ config[key.trim()] = value.replace(/^["']|["']$/g, '');
32
+ } else {
33
+ currentSection = key.trim();
34
+ config[currentSection] = {};
35
+ }
36
+ } else if (currentSection && indent > 0) {
37
+ const [key, ...rest] = trimmed.split(':');
38
+ const value = rest.join(':').trim();
39
+ if (value && value !== '') {
40
+ config[currentSection][key.trim()] = value.replace(/^["']|["']$/g, '');
41
+ }
42
+ }
43
+ }
44
+ return config;
45
+ } catch (error) { return {}; }
46
+ }
47
+
48
+ const getCommand = {""")
49
+
50
+ # Update getCommand to merge YAML config
51
+ patch("16b: config get merge yaml",
52
+ CONF,
53
+ """const configValues = {
54
+ 'version': '3.0.0',
55
+ 'v3Mode': true,
56
+ 'swarm.topology': 'hybrid',
57
+ 'swarm.maxAgents': 15,
58
+ 'swarm.autoScale': true,
59
+ 'memory.backend': 'hybrid',
60
+ 'memory.cacheSize': 256,
61
+ 'mcp.transport': 'stdio',
62
+ 'agents.defaultType': 'coder',
63
+ 'agents.maxConcurrent': 15
64
+ };""",
65
+ """// Default config values
66
+ const defaults = {
67
+ 'version': '3.0.0',
68
+ 'v3Mode': true,
69
+ 'swarm.topology': 'hybrid',
70
+ 'swarm.maxAgents': 15,
71
+ 'swarm.autoScale': true,
72
+ 'memory.backend': 'hybrid',
73
+ 'memory.cacheSize': 256,
74
+ 'mcp.transport': 'stdio',
75
+ 'agents.defaultType': 'coder',
76
+ 'agents.maxConcurrent': 15
77
+ };
78
+ // Read YAML config and merge with defaults
79
+ const yamlConfig = readYamlConfig();
80
+ const configValues = { ...defaults };
81
+ if (yamlConfig.swarm) {
82
+ if (yamlConfig.swarm.topology) configValues['swarm.topology'] = yamlConfig.swarm.topology;
83
+ if (yamlConfig.swarm.maxAgents) configValues['swarm.maxAgents'] = parseInt(yamlConfig.swarm.maxAgents) || defaults['swarm.maxAgents'];
84
+ if (yamlConfig.swarm.autoScale !== undefined) configValues['swarm.autoScale'] = yamlConfig.swarm.autoScale === 'true' || yamlConfig.swarm.autoScale === true;
85
+ }
86
+ if (yamlConfig.memory) {
87
+ if (yamlConfig.memory.backend) configValues['memory.backend'] = yamlConfig.memory.backend;
88
+ if (yamlConfig.memory.cacheSize) configValues['memory.cacheSize'] = parseInt(yamlConfig.memory.cacheSize) || defaults['memory.cacheSize'];
89
+ }
90
+ if (yamlConfig.mcp && yamlConfig.mcp.transport) {
91
+ configValues['mcp.transport'] = yamlConfig.mcp.transport;
92
+ }
93
+ if (yamlConfig.version) {
94
+ configValues['version'] = yamlConfig.version;
95
+ }""")
96
+
97
+ # Update exportCommand to merge YAML config
98
+ patch("16c: config export merge yaml",
99
+ CONF,
100
+ """const config = {
101
+ version: '3.0.0',
102
+ exportedAt: new Date().toISOString(),
103
+ agents: { defaultType: 'coder', maxConcurrent: 15 },
104
+ swarm: { topology: 'hybrid', maxAgents: 15 },
105
+ memory: { backend: 'hybrid', cacheSize: 256 },
106
+ mcp: { transport: 'stdio', tools: 'all' }
107
+ };""",
108
+ """// Start with defaults
109
+ const config = {
110
+ version: '3.0.0',
111
+ exportedAt: new Date().toISOString(),
112
+ agents: { defaultType: 'coder', maxConcurrent: 15 },
113
+ swarm: { topology: 'hybrid', maxAgents: 15 },
114
+ memory: { backend: 'hybrid', cacheSize: 256 },
115
+ mcp: { transport: 'stdio', tools: 'all' }
116
+ };
117
+ // Read YAML config and merge
118
+ const yamlConfig = readYamlConfig();
119
+ if (yamlConfig.version) { config.version = yamlConfig.version; }
120
+ if (yamlConfig.swarm) {
121
+ if (yamlConfig.swarm.topology) config.swarm.topology = yamlConfig.swarm.topology;
122
+ if (yamlConfig.swarm.maxAgents) config.swarm.maxAgents = parseInt(yamlConfig.swarm.maxAgents) || 15;
123
+ }
124
+ if (yamlConfig.memory) {
125
+ if (yamlConfig.memory.backend) config.memory.backend = yamlConfig.memory.backend;
126
+ if (yamlConfig.memory.cacheSize) config.memory.cacheSize = parseInt(yamlConfig.memory.cacheSize) || 256;
127
+ }
128
+ if (yamlConfig.mcp && yamlConfig.mcp.transport) {
129
+ config.mcp.transport = yamlConfig.mcp.transport;
130
+ }""")
@@ -0,0 +1 @@
1
+ grep "readYamlConfig" commands/config.js
@@ -0,0 +1,12 @@
1
+ # DM-001: daemon.log always 0 bytes
2
+ **Severity**: Medium
3
+ **GitHub**: [#1116](https://github.com/ruvnet/claude-flow/issues/1116)
4
+ ## Root Cause
5
+ Two issues: (1) `log()` method uses `require('fs')` which is undefined in ESM modules — `appendFileSync` silently fails. (2) Path mismatch — daemon spawn writes to `.claude-flow/daemon.log` but `log()` targets `.claude-flow/logs/daemon.log`.
6
+ ## Fix
7
+ (A) Add `appendFileSync` to ESM import. (B) Replace `require('fs')` call with imported function. (C) Align spawn log path to `.claude-flow/logs/daemon.log`.
8
+ ## Files Patched
9
+ - services/worker-daemon.js (Parts A, B)
10
+ - commands/daemon.js (Part C)
11
+ ## Ops
12
+ 3 ops in fix.py
@@ -0,0 +1,37 @@
1
+ # DM-001: daemon.log always 0 bytes (ESM require + path mismatch)
2
+ # GitHub: #1116
3
+ patch("4A: appendFileSync import",
4
+ WD,
5
+ "import { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';",
6
+ "import { existsSync, mkdirSync, writeFileSync, readFileSync, appendFileSync } from 'fs';")
7
+
8
+ patch("4B: remove require('fs')",
9
+ WD,
10
+ """ const fs = require('fs');
11
+ fs.appendFileSync(logFile, logMessage + '\\n');""",
12
+ " appendFileSync(logFile, logMessage + '\\n');")
13
+
14
+ patch("4C: daemon log path",
15
+ DJ,
16
+ """ const logFile = join(stateDir, 'daemon.log');
17
+ // Validate all paths
18
+ validatePath(stateDir, 'State directory');
19
+ validatePath(pidFile, 'PID file');
20
+ validatePath(logFile, 'Log file');
21
+ // Ensure state directory exists
22
+ if (!fs.existsSync(stateDir)) {
23
+ fs.mkdirSync(stateDir, { recursive: true });
24
+ }""",
25
+ """ const logsDir = join(stateDir, 'logs');
26
+ if (!fs.existsSync(logsDir)) {
27
+ fs.mkdirSync(logsDir, { recursive: true });
28
+ }
29
+ const logFile = join(logsDir, 'daemon.log');
30
+ // Validate all paths
31
+ validatePath(stateDir, 'State directory');
32
+ validatePath(pidFile, 'PID file');
33
+ validatePath(logFile, 'Log file');
34
+ // Ensure state directory exists
35
+ if (!fs.existsSync(stateDir)) {
36
+ fs.mkdirSync(stateDir, { recursive: true });
37
+ }""")
@@ -0,0 +1 @@
1
+ grep "appendFileSync" services/worker-daemon.js
@@ -0,0 +1,11 @@
1
+ # DM-002: maxCpuLoad=2.0 blocks all workers on multi-core
2
+ **Severity**: Critical
3
+ **GitHub**: [#1138](https://github.com/ruvnet/claude-flow/issues/1138)
4
+ ## Root Cause
5
+ `maxCpuLoad` defaults to 2.0, but load average reflects total system load across all cores. An 8-core Mac idles at 3-5. A 32-core server idles at 1-3 but spikes above 2.0 easily. No worker ever passes the resource gate.
6
+ ## Fix
7
+ Raise threshold to match hardware. Currently set to 28.0 for 32-core server. Adjust per machine (8-core Mac: 6.0).
8
+ ## Files Patched
9
+ - services/worker-daemon.js
10
+ ## Ops
11
+ 1 op in fix.py
@@ -0,0 +1,6 @@
1
+ # DM-002: maxCpuLoad=2.0 blocks all workers on multi-core
2
+ # NOTE: 32-core server uses 28.0. Adjust per machine (8-core Mac: 6.0).
3
+ patch("5: CPU load threshold",
4
+ WD,
5
+ "maxCpuLoad: 2.0",
6
+ "maxCpuLoad: 28.0")
@@ -0,0 +1 @@
1
+ grep "maxCpuLoad" services/worker-daemon.js
@@ -0,0 +1,11 @@
1
+ # DM-003: macOS freemem() always ~0% — workers blocked
2
+ **Severity**: Critical (macOS only)
3
+ **GitHub**: [#1077](https://github.com/ruvnet/claude-flow/issues/1077)
4
+ ## Root Cause
5
+ `os.freemem()` on macOS excludes file cache, always reporting ~0.3% free. macOS reclaims cache on demand, so the number is meaningless. The `minFreeMemoryPercent: 20` gate never passes.
6
+ ## Fix
7
+ Skip the free memory check on macOS (`os.platform() !== 'darwin'`).
8
+ ## Files Patched
9
+ - services/worker-daemon.js
10
+ ## Ops
11
+ 1 op in fix.py (skipped on Linux)
@@ -0,0 +1,7 @@
1
+ # DM-003: macOS freemem() always ~0% — workers blocked
2
+ # GitHub: #1077
3
+ # SKIPPED on Linux (only affects macOS)
4
+ patch("6: macOS memory",
5
+ WD,
6
+ "if (freePercent < this.config.resourceThresholds.minFreeMemoryPercent) {",
7
+ "if (os.platform() !== 'darwin' && freePercent < this.config.resourceThresholds.minFreeMemoryPercent) {")
@@ -0,0 +1 @@
1
+ grep "darwin" services/worker-daemon.js
@@ -0,0 +1,11 @@
1
+ # DM-004: Preload worker stub + missing from defaults
2
+ **Severity**: Enhancement
3
+ **GitHub**: [#1139](https://github.com/ruvnet/claude-flow/issues/1139)
4
+ ## Root Cause
5
+ The `preload` worker type exists in the switch statement but was missing from `DEFAULT_WORKERS` — never scheduled. The `runPreloadWorkerLocal()` was a stub returning `{resourcesPreloaded: 0}`. Also missing: ultralearn, deepdive, refactor, benchmark workers.
6
+ ## Fix
7
+ Add missing workers to DEFAULT_WORKERS. Implement real preload that calls `loadEmbeddingModel()` and `getHNSWIndex()` from memory-initializer.js.
8
+ ## Files Patched
9
+ - services/worker-daemon.js
10
+ ## Ops
11
+ 2 ops in fix.py