chorus-cli 0.4.6 → 0.4.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js
CHANGED
|
@@ -54,9 +54,8 @@ async function getMachineId() {
|
|
|
54
54
|
|
|
55
55
|
if (!rawId) {
|
|
56
56
|
rawId = randomUUID();
|
|
57
|
-
|
|
58
|
-
await fs.
|
|
59
|
-
await fs.writeFile(path.join(configDir2, 'machine-id'), rawId + '\n');
|
|
57
|
+
await fs.mkdir(configDir, { recursive: true });
|
|
58
|
+
await fs.writeFile(idPath, rawId + '\n');
|
|
60
59
|
}
|
|
61
60
|
}
|
|
62
61
|
|
|
@@ -581,7 +580,7 @@ async function processTicket(issueArg, { useSuper = false, skipQA = false, qaNam
|
|
|
581
580
|
efs(CONFIG.ai.venvPython, ['-m', 'pip', 'install', '-r', reqFile], { stdio: 'inherit' });
|
|
582
581
|
}
|
|
583
582
|
|
|
584
|
-
// 0a. Verify no modified tracked files (untracked files like .
|
|
583
|
+
// 0a. Verify no modified tracked files (untracked files like .chorus/ are fine)
|
|
585
584
|
const { stdout: gitStatus } = await execPromise('git status --porcelain --untracked-files=no');
|
|
586
585
|
if (gitStatus.trim()) {
|
|
587
586
|
console.error('⚠️ Working directory has uncommitted changes. Commit or stash first:');
|
|
@@ -907,7 +906,7 @@ async function setupGitHub() {
|
|
|
907
906
|
CONFIG.github.repo = repo;
|
|
908
907
|
if (token) CONFIG.github.token = token;
|
|
909
908
|
|
|
910
|
-
console.log(`\n✅ GitHub config saved
|
|
909
|
+
console.log(`\n✅ GitHub config saved (${owner}/${repo})\n`);
|
|
911
910
|
}
|
|
912
911
|
|
|
913
912
|
async function setupProxyAuth() {
|
|
@@ -1003,7 +1002,7 @@ async function setupProxyAuth() {
|
|
|
1003
1002
|
CONFIG.ai.chorusApiKey = apiKey;
|
|
1004
1003
|
process.env.CHORUS_API_KEY = apiKey;
|
|
1005
1004
|
|
|
1006
|
-
console.log(`\n✅ Chorus API key saved
|
|
1005
|
+
console.log(`\n✅ Chorus API key saved\n`);
|
|
1007
1006
|
}
|
|
1008
1007
|
|
|
1009
1008
|
async function setupTeamsAuth() {
|
|
@@ -1028,7 +1027,7 @@ async function setupTeamsAuth() {
|
|
|
1028
1027
|
await context.storageState({ path: CONFIG.teams.authPath });
|
|
1029
1028
|
await browser.close();
|
|
1030
1029
|
|
|
1031
|
-
console.log(`\n✅
|
|
1030
|
+
console.log(`\n✅ Teams authentication saved`);
|
|
1032
1031
|
}
|
|
1033
1032
|
|
|
1034
1033
|
async function setupSlack() {
|
|
@@ -1076,7 +1075,7 @@ async function setupSlack() {
|
|
|
1076
1075
|
CONFIG.messenger = 'slack';
|
|
1077
1076
|
CONFIG.slack.botToken = token;
|
|
1078
1077
|
|
|
1079
|
-
console.log(`\n✅ Slack config saved
|
|
1078
|
+
console.log(`\n✅ Slack config saved\n`);
|
|
1080
1079
|
}
|
|
1081
1080
|
|
|
1082
1081
|
async function setupAzureDevOps() {
|
|
@@ -1118,7 +1117,7 @@ async function setupAzureDevOps() {
|
|
|
1118
1117
|
CONFIG.azuredevops.repo = adoRepo;
|
|
1119
1118
|
if (adoPat) CONFIG.azuredevops.pat = adoPat;
|
|
1120
1119
|
|
|
1121
|
-
console.log(`\n✅ Azure DevOps config saved
|
|
1120
|
+
console.log(`\n✅ Azure DevOps config saved (${adoOrg}/${adoProject}/${adoRepo})\n`);
|
|
1122
1121
|
}
|
|
1123
1122
|
|
|
1124
1123
|
async function setup() {
|
package/package.json
CHANGED
|
Binary file
|
|
Binary file
|
package/tools/coder.py
CHANGED
|
@@ -785,7 +785,6 @@ def stream_response(client, messages, system):
|
|
|
785
785
|
tool_results = []
|
|
786
786
|
for tool_call in message.tool_calls:
|
|
787
787
|
function_name = tool_call.function.name
|
|
788
|
-
import json
|
|
789
788
|
try:
|
|
790
789
|
arguments = json.loads(tool_call.function.arguments)
|
|
791
790
|
except json.JSONDecodeError:
|
|
@@ -915,7 +914,6 @@ def run_prompt(client, prompt, plan_system, exec_system):
|
|
|
915
914
|
tool_results = []
|
|
916
915
|
for tool_call in message.tool_calls:
|
|
917
916
|
function_name = tool_call.function.name
|
|
918
|
-
import json
|
|
919
917
|
try:
|
|
920
918
|
arguments = json.loads(tool_call.function.arguments)
|
|
921
919
|
except json.JSONDecodeError:
|
|
@@ -1064,7 +1062,7 @@ def main():
|
|
|
1064
1062
|
|
|
1065
1063
|
# Load codebase map if available
|
|
1066
1064
|
map_suffix = ""
|
|
1067
|
-
map_file = Path.cwd() / ".
|
|
1065
|
+
map_file = Path.cwd() / ".chorus" / "map.md"
|
|
1068
1066
|
if map_file.exists():
|
|
1069
1067
|
try:
|
|
1070
1068
|
map_content = map_file.read_text(encoding="utf-8").strip()
|
|
@@ -1115,7 +1113,7 @@ def main():
|
|
|
1115
1113
|
print(f"{C.DIM}Commands: /clear /quit /help{C.RESET}")
|
|
1116
1114
|
print()
|
|
1117
1115
|
|
|
1118
|
-
histfile = os.path.expanduser("~/.
|
|
1116
|
+
histfile = os.path.expanduser("~/.chorus_history")
|
|
1119
1117
|
try:
|
|
1120
1118
|
readline.read_history_file(histfile)
|
|
1121
1119
|
except (FileNotFoundError, OSError, PermissionError):
|
package/tools/mapper.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"""
|
|
3
3
|
mapper.py — Generate a codebase map for the coder agent.
|
|
4
4
|
|
|
5
|
-
Scans a project directory and produces a compact .
|
|
5
|
+
Scans a project directory and produces a compact .chorus/map.md that coder
|
|
6
6
|
loads into its system prompt, so it starts every session already knowing
|
|
7
7
|
the file structure, key modules, exports, dependencies, and test status.
|
|
8
8
|
|
|
@@ -275,6 +275,157 @@ def detect_project(root):
|
|
|
275
275
|
return info
|
|
276
276
|
|
|
277
277
|
|
|
278
|
+
def _parse_jsonc(text):
|
|
279
|
+
"""Parse JSON with trailing commas and // comments (tsconfig, eslint, etc.)."""
|
|
280
|
+
# Strip single-line comments (but not inside strings)
|
|
281
|
+
lines = []
|
|
282
|
+
for line in text.splitlines():
|
|
283
|
+
stripped = line.lstrip()
|
|
284
|
+
if stripped.startswith("//"):
|
|
285
|
+
continue
|
|
286
|
+
# Remove inline // comments (naive but covers common cases)
|
|
287
|
+
in_str = False
|
|
288
|
+
result = []
|
|
289
|
+
i = 0
|
|
290
|
+
while i < len(line):
|
|
291
|
+
ch = line[i]
|
|
292
|
+
if ch == '"' and (i == 0 or line[i - 1] != '\\'):
|
|
293
|
+
in_str = not in_str
|
|
294
|
+
elif ch == '/' and i + 1 < len(line) and line[i + 1] == '/' and not in_str:
|
|
295
|
+
break
|
|
296
|
+
result.append(ch)
|
|
297
|
+
i += 1
|
|
298
|
+
lines.append(''.join(result))
|
|
299
|
+
cleaned = '\n'.join(lines)
|
|
300
|
+
# Strip trailing commas before } or ]
|
|
301
|
+
cleaned = re.sub(r',\s*([}\]])', r'\1', cleaned)
|
|
302
|
+
return json.loads(cleaned)
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def detect_project_rules(root):
|
|
306
|
+
"""Extract project configuration and rules that the coder should follow."""
|
|
307
|
+
rules = {}
|
|
308
|
+
|
|
309
|
+
# TypeScript config — resolve extends chain to find compilerOptions
|
|
310
|
+
ts_candidates = ["tsconfig.json", "tsconfig.app.json", "tsconfig.build.json"]
|
|
311
|
+
ts_checked = set()
|
|
312
|
+
ts_queue = [name for name in ts_candidates if (root / name).exists()]
|
|
313
|
+
ts_merged = {}
|
|
314
|
+
|
|
315
|
+
while ts_queue:
|
|
316
|
+
name = ts_queue.pop(0)
|
|
317
|
+
if name in ts_checked:
|
|
318
|
+
continue
|
|
319
|
+
ts_checked.add(name)
|
|
320
|
+
ts_path = root / name
|
|
321
|
+
if not ts_path.exists():
|
|
322
|
+
continue
|
|
323
|
+
try:
|
|
324
|
+
data = _parse_jsonc(ts_path.read_text())
|
|
325
|
+
# Follow extends
|
|
326
|
+
extends = data.get("extends")
|
|
327
|
+
if extends and isinstance(extends, str):
|
|
328
|
+
ext_name = extends.lstrip("./")
|
|
329
|
+
if ext_name not in ts_checked:
|
|
330
|
+
ts_queue.append(ext_name)
|
|
331
|
+
# Merge compilerOptions (later files override earlier)
|
|
332
|
+
compiler = data.get("compilerOptions", {})
|
|
333
|
+
for key in ("strict", "target", "module", "moduleResolution",
|
|
334
|
+
"jsx", "baseUrl", "paths", "esModuleInterop",
|
|
335
|
+
"noImplicitAny", "strictNullChecks", "skipLibCheck"):
|
|
336
|
+
if key in compiler:
|
|
337
|
+
ts_merged[key] = compiler[key]
|
|
338
|
+
if data.get("include"):
|
|
339
|
+
ts_merged["include"] = data["include"]
|
|
340
|
+
if data.get("exclude"):
|
|
341
|
+
ts_merged["exclude"] = data["exclude"]
|
|
342
|
+
except (json.JSONDecodeError, OSError, ValueError):
|
|
343
|
+
pass
|
|
344
|
+
|
|
345
|
+
if ts_merged:
|
|
346
|
+
rules["tsconfig.json"] = ts_merged
|
|
347
|
+
|
|
348
|
+
# ESLint config
|
|
349
|
+
eslint_files = [
|
|
350
|
+
".eslintrc.json", ".eslintrc.js", ".eslintrc.cjs", ".eslintrc.yml",
|
|
351
|
+
".eslintrc.yaml", ".eslintrc", "eslint.config.js", "eslint.config.mjs",
|
|
352
|
+
"eslint.config.cjs",
|
|
353
|
+
]
|
|
354
|
+
for name in eslint_files:
|
|
355
|
+
eslint_path = root / name
|
|
356
|
+
if eslint_path.exists():
|
|
357
|
+
try:
|
|
358
|
+
content = eslint_path.read_text(encoding="utf-8", errors="replace")
|
|
359
|
+
# For JSON configs, parse and extract key fields
|
|
360
|
+
if name.endswith(".json") or name == ".eslintrc":
|
|
361
|
+
try:
|
|
362
|
+
data = _parse_jsonc(content)
|
|
363
|
+
summary = {}
|
|
364
|
+
if data.get("extends"):
|
|
365
|
+
summary["extends"] = data["extends"]
|
|
366
|
+
if data.get("parser"):
|
|
367
|
+
summary["parser"] = data["parser"]
|
|
368
|
+
if data.get("rules"):
|
|
369
|
+
summary["rules"] = data["rules"]
|
|
370
|
+
rules[name] = summary
|
|
371
|
+
except json.JSONDecodeError:
|
|
372
|
+
# Treat as raw text
|
|
373
|
+
if len(content) <= 3000:
|
|
374
|
+
rules[name] = content
|
|
375
|
+
else:
|
|
376
|
+
# JS/YAML configs — include raw (truncated)
|
|
377
|
+
if len(content) <= 3000:
|
|
378
|
+
rules[name] = content
|
|
379
|
+
else:
|
|
380
|
+
rules[name] = content[:3000] + "\n... (truncated)"
|
|
381
|
+
except OSError:
|
|
382
|
+
pass
|
|
383
|
+
break # Only include the first eslint config found
|
|
384
|
+
|
|
385
|
+
# Prettier config
|
|
386
|
+
prettier_files = [
|
|
387
|
+
".prettierrc", ".prettierrc.json", ".prettierrc.js", ".prettierrc.cjs",
|
|
388
|
+
".prettierrc.yml", ".prettierrc.yaml", "prettier.config.js",
|
|
389
|
+
"prettier.config.cjs",
|
|
390
|
+
]
|
|
391
|
+
for name in prettier_files:
|
|
392
|
+
p_path = root / name
|
|
393
|
+
if p_path.exists():
|
|
394
|
+
try:
|
|
395
|
+
content = p_path.read_text(encoding="utf-8", errors="replace")
|
|
396
|
+
if len(content) <= 1000:
|
|
397
|
+
rules[name] = content
|
|
398
|
+
except OSError:
|
|
399
|
+
pass
|
|
400
|
+
break
|
|
401
|
+
|
|
402
|
+
# .editorconfig
|
|
403
|
+
ec_path = root / ".editorconfig"
|
|
404
|
+
if ec_path.exists():
|
|
405
|
+
try:
|
|
406
|
+
content = ec_path.read_text(encoding="utf-8", errors="replace")
|
|
407
|
+
if len(content) <= 1000:
|
|
408
|
+
rules[".editorconfig"] = content
|
|
409
|
+
except OSError:
|
|
410
|
+
pass
|
|
411
|
+
|
|
412
|
+
# AI agent instructions (CLAUDE.md, CONTRIBUTING.md, .cursorrules)
|
|
413
|
+
for name in ("CLAUDE.md", ".claude", "CONTRIBUTING.md", ".cursorrules",
|
|
414
|
+
".github/CONTRIBUTING.md"):
|
|
415
|
+
ai_path = root / name
|
|
416
|
+
if ai_path.exists() and ai_path.is_file():
|
|
417
|
+
try:
|
|
418
|
+
content = ai_path.read_text(encoding="utf-8", errors="replace")
|
|
419
|
+
if len(content) <= 5000:
|
|
420
|
+
rules[name] = content
|
|
421
|
+
else:
|
|
422
|
+
rules[name] = content[:5000] + "\n... (truncated)"
|
|
423
|
+
except OSError:
|
|
424
|
+
pass
|
|
425
|
+
|
|
426
|
+
return rules
|
|
427
|
+
|
|
428
|
+
|
|
278
429
|
def detect_tests(root):
|
|
279
430
|
"""Check if the project has substantive test coverage."""
|
|
280
431
|
test_dir_names = {"test", "tests", "__tests__", "spec", "specs", "test_suite"}
|
|
@@ -364,6 +515,22 @@ def generate_map(root):
|
|
|
364
515
|
lines.append(f" Dev dependencies: {dev_str}")
|
|
365
516
|
lines.append("")
|
|
366
517
|
|
|
518
|
+
# Project rules and configs
|
|
519
|
+
rules = detect_project_rules(root)
|
|
520
|
+
if rules:
|
|
521
|
+
lines.append("PROJECT RULES")
|
|
522
|
+
for name, value in rules.items():
|
|
523
|
+
lines.append(f" {name}:")
|
|
524
|
+
if isinstance(value, dict):
|
|
525
|
+
for k, v in value.items():
|
|
526
|
+
lines.append(f" {k}: {json.dumps(v) if not isinstance(v, str) else v}")
|
|
527
|
+
elif isinstance(value, str):
|
|
528
|
+
for vline in value.splitlines():
|
|
529
|
+
lines.append(f" {vline}")
|
|
530
|
+
else:
|
|
531
|
+
lines.append(f" {value}")
|
|
532
|
+
lines.append("")
|
|
533
|
+
|
|
367
534
|
# Tests
|
|
368
535
|
test_status, test_files = detect_tests(root)
|
|
369
536
|
lines.append("TESTS")
|
|
@@ -434,22 +601,22 @@ def main():
|
|
|
434
601
|
|
|
435
602
|
map_content = generate_map(root)
|
|
436
603
|
|
|
437
|
-
out_dir = root / ".
|
|
604
|
+
out_dir = root / ".chorus"
|
|
438
605
|
out_dir.mkdir(exist_ok=True)
|
|
439
606
|
out_file = out_dir / "map.md"
|
|
440
607
|
out_file.write_text(map_content, encoding="utf-8")
|
|
441
608
|
|
|
442
|
-
# Add .
|
|
609
|
+
# Add .chorus to .gitignore if not already there
|
|
443
610
|
gitignore = root / ".gitignore"
|
|
444
611
|
if gitignore.exists():
|
|
445
612
|
gi_text = gitignore.read_text()
|
|
446
|
-
if ".
|
|
613
|
+
if ".chorus" not in gi_text and ".chorus/" not in gi_text:
|
|
447
614
|
with open(gitignore, "a") as f:
|
|
448
|
-
f.write("\n.
|
|
449
|
-
print(f"Added .
|
|
615
|
+
f.write("\n.chorus/\n")
|
|
616
|
+
print(f"Added .chorus/ to .gitignore", file=sys.stderr)
|
|
450
617
|
elif (root / ".git").is_dir():
|
|
451
|
-
gitignore.write_text(".
|
|
452
|
-
print(f"Created .gitignore with .
|
|
618
|
+
gitignore.write_text(".chorus/\n")
|
|
619
|
+
print(f"Created .gitignore with .chorus/", file=sys.stderr)
|
|
453
620
|
|
|
454
621
|
# Stats
|
|
455
622
|
line_count = map_content.count("\n")
|