@ouro.bot/cli 0.0.1-alpha.0 → 0.1.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AdoptionSpecialist.ouro/agent.json +20 -0
- package/AdoptionSpecialist.ouro/psyche/SOUL.md +22 -0
- package/AdoptionSpecialist.ouro/psyche/identities/basilisk.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/jafar.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/jormungandr.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/kaa.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/medusa.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/monty.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/nagini.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/ouroboros.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/python.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/quetzalcoatl.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/sir-hiss.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/the-serpent.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/the-snake.md +31 -0
- package/README.md +224 -6
- package/dist/heart/agent-entry.js +17 -0
- package/dist/heart/api-error.js +34 -0
- package/dist/heart/config.js +296 -0
- package/dist/heart/core.js +485 -0
- package/dist/heart/daemon/daemon-cli.js +626 -0
- package/dist/heart/daemon/daemon-entry.js +74 -0
- package/dist/heart/daemon/daemon.js +310 -0
- package/dist/heart/daemon/hatch-flow.js +284 -0
- package/dist/heart/daemon/hatch-specialist.js +107 -0
- package/dist/heart/daemon/health-monitor.js +79 -0
- package/dist/heart/daemon/message-router.js +98 -0
- package/dist/heart/daemon/ouro-bot-entry.js +23 -0
- package/dist/heart/daemon/ouro-bot-wrapper.js +90 -0
- package/dist/heart/daemon/ouro-entry.js +23 -0
- package/dist/heart/daemon/ouro-uti.js +212 -0
- package/dist/heart/daemon/process-manager.js +220 -0
- package/dist/heart/daemon/runtime-logging.js +98 -0
- package/dist/heart/daemon/subagent-installer.js +125 -0
- package/dist/heart/daemon/task-scheduler.js +237 -0
- package/dist/heart/harness.js +26 -0
- package/dist/heart/identity.js +270 -0
- package/dist/heart/kicks.js +144 -0
- package/dist/heart/primitives.js +4 -0
- package/dist/heart/providers/anthropic.js +329 -0
- package/dist/heart/providers/azure.js +66 -0
- package/dist/heart/providers/minimax.js +53 -0
- package/dist/heart/providers/openai-codex.js +162 -0
- package/dist/heart/streaming.js +412 -0
- package/dist/heart/turn-coordinator.js +62 -0
- package/dist/inner-worker-entry.js +4 -0
- package/dist/mind/associative-recall.js +176 -0
- package/dist/mind/bundle-manifest.js +118 -0
- package/dist/mind/context.js +218 -0
- package/dist/mind/first-impressions.js +43 -0
- package/dist/mind/format.js +56 -0
- package/dist/mind/friends/channel.js +41 -0
- package/dist/mind/friends/resolver.js +84 -0
- package/dist/mind/friends/store-file.js +171 -0
- package/dist/mind/friends/store.js +4 -0
- package/dist/mind/friends/tokens.js +26 -0
- package/dist/mind/friends/types.js +21 -0
- package/dist/mind/memory.js +326 -0
- package/dist/mind/phrases.js +43 -0
- package/dist/mind/prompt.js +254 -0
- package/dist/mind/token-estimate.js +119 -0
- package/dist/nerves/cli-logging.js +31 -0
- package/dist/nerves/coverage/audit-rules.js +81 -0
- package/dist/nerves/coverage/audit.js +200 -0
- package/dist/nerves/coverage/cli-main.js +5 -0
- package/dist/nerves/coverage/cli.js +51 -0
- package/dist/nerves/coverage/contract.js +23 -0
- package/dist/nerves/coverage/file-completeness.js +46 -0
- package/dist/nerves/coverage/run-artifacts.js +77 -0
- package/dist/nerves/coverage/source-scanner.js +34 -0
- package/dist/nerves/index.js +152 -0
- package/dist/nerves/runtime.js +38 -0
- package/dist/repertoire/ado-client.js +211 -0
- package/dist/repertoire/ado-context.js +73 -0
- package/dist/repertoire/ado-semantic.js +841 -0
- package/dist/repertoire/ado-templates.js +146 -0
- package/dist/repertoire/coding/index.js +36 -0
- package/dist/repertoire/coding/manager.js +489 -0
- package/dist/repertoire/coding/monitor.js +60 -0
- package/dist/repertoire/coding/reporter.js +45 -0
- package/dist/repertoire/coding/spawner.js +102 -0
- package/dist/repertoire/coding/tools.js +167 -0
- package/dist/repertoire/coding/types.js +2 -0
- package/dist/repertoire/data/ado-endpoints.json +122 -0
- package/dist/repertoire/data/graph-endpoints.json +212 -0
- package/dist/repertoire/github-client.js +64 -0
- package/dist/repertoire/graph-client.js +118 -0
- package/dist/repertoire/skills.js +156 -0
- package/dist/repertoire/tasks/board.js +122 -0
- package/dist/repertoire/tasks/index.js +210 -0
- package/dist/repertoire/tasks/lifecycle.js +80 -0
- package/dist/repertoire/tasks/middleware.js +65 -0
- package/dist/repertoire/tasks/parser.js +173 -0
- package/dist/repertoire/tasks/scanner.js +132 -0
- package/dist/repertoire/tasks/transitions.js +145 -0
- package/dist/repertoire/tasks/types.js +2 -0
- package/dist/repertoire/tools-base.js +622 -0
- package/dist/repertoire/tools-github.js +53 -0
- package/dist/repertoire/tools-teams.js +308 -0
- package/dist/repertoire/tools.js +199 -0
- package/dist/senses/cli-entry.js +15 -0
- package/dist/senses/cli.js +523 -0
- package/dist/senses/commands.js +98 -0
- package/dist/senses/inner-dialog-worker.js +61 -0
- package/dist/senses/inner-dialog.js +216 -0
- package/dist/senses/teams-entry.js +15 -0
- package/dist/senses/teams.js +695 -0
- package/dist/senses/trust-gate.js +150 -0
- package/package.json +34 -11
- package/subagents/README.md +71 -0
- package/subagents/work-doer.md +233 -0
- package/subagents/work-merger.md +593 -0
- package/subagents/work-planner.md +373 -0
- package/bin/ouro.js +0 -6
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.runAuditCli = runAuditCli;
|
|
4
|
+
const fs_1 = require("fs");
|
|
5
|
+
const path_1 = require("path");
|
|
6
|
+
const audit_1 = require("./audit");
|
|
7
|
+
const run_artifacts_1 = require("./run-artifacts");
|
|
8
|
+
function parseArgs(argv) {
|
|
9
|
+
const args = {};
|
|
10
|
+
for (let i = 0; i < argv.length; i++) {
|
|
11
|
+
const token = argv[i];
|
|
12
|
+
const next = argv[i + 1];
|
|
13
|
+
if (!next)
|
|
14
|
+
continue;
|
|
15
|
+
if (token === "--run-dir")
|
|
16
|
+
args.runDir = next;
|
|
17
|
+
if (token === "--events-path")
|
|
18
|
+
args.eventsPath = next;
|
|
19
|
+
if (token === "--per-test-path")
|
|
20
|
+
args.perTestPath = next;
|
|
21
|
+
if (token === "--source-root")
|
|
22
|
+
args.sourceRoot = next;
|
|
23
|
+
if (token === "--output")
|
|
24
|
+
args.output = next;
|
|
25
|
+
}
|
|
26
|
+
return args;
|
|
27
|
+
}
|
|
28
|
+
function runAuditCli(argv) {
|
|
29
|
+
const args = parseArgs(argv);
|
|
30
|
+
const latestRun = (0, run_artifacts_1.readLatestRun)();
|
|
31
|
+
const runDir = args.runDir ?? latestRun?.run_dir;
|
|
32
|
+
if (!runDir) {
|
|
33
|
+
// eslint-disable-next-line no-console -- meta-tooling: audit error message
|
|
34
|
+
console.error("nerves audit: no run directory found; provide --run-dir");
|
|
35
|
+
return 2;
|
|
36
|
+
}
|
|
37
|
+
const eventsPath = args.eventsPath ?? (0, path_1.join)(runDir, "vitest-events.ndjson");
|
|
38
|
+
const perTestPath = args.perTestPath ?? (0, path_1.join)(runDir, "vitest-events-per-test.json");
|
|
39
|
+
const sourceRoot = args.sourceRoot ?? (0, path_1.resolve)("src");
|
|
40
|
+
const outputPath = args.output ?? (0, path_1.join)(runDir, "nerves-coverage.json");
|
|
41
|
+
const report = (0, audit_1.auditNervesCoverage)({
|
|
42
|
+
eventsPath,
|
|
43
|
+
perTestPath,
|
|
44
|
+
sourceRoot,
|
|
45
|
+
});
|
|
46
|
+
(0, fs_1.mkdirSync)((0, path_1.dirname)(outputPath), { recursive: true });
|
|
47
|
+
(0, fs_1.writeFileSync)(outputPath, JSON.stringify(report, null, 2), "utf8");
|
|
48
|
+
// eslint-disable-next-line no-console -- meta-tooling: audit result message
|
|
49
|
+
console.log(`nerves audit: ${report.overall_status} (${outputPath})`);
|
|
50
|
+
return report.overall_status === "pass" ? 0 : 1;
|
|
51
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SENSITIVE_PATTERNS = exports.REQUIRED_ENVELOPE_FIELDS = void 0;
|
|
4
|
+
exports.eventKey = eventKey;
|
|
5
|
+
exports.REQUIRED_ENVELOPE_FIELDS = [
|
|
6
|
+
"ts",
|
|
7
|
+
"level",
|
|
8
|
+
"event",
|
|
9
|
+
"trace_id",
|
|
10
|
+
"component",
|
|
11
|
+
"message",
|
|
12
|
+
"meta",
|
|
13
|
+
];
|
|
14
|
+
exports.SENSITIVE_PATTERNS = [
|
|
15
|
+
/\btoken\s*[:=]/i,
|
|
16
|
+
/\bapi[_-]?key\b/i,
|
|
17
|
+
/\bpassword\b/i,
|
|
18
|
+
/\bsecret\b/i,
|
|
19
|
+
/\bauthorization\b/i,
|
|
20
|
+
];
|
|
21
|
+
function eventKey(component, event) {
|
|
22
|
+
return `${component}:${event}`;
|
|
23
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* File completeness check (Rule 5).
|
|
4
|
+
*
|
|
5
|
+
* Every production file with executable code must have at least one
|
|
6
|
+
* emitNervesEvent call. Type-only files (containing only type/interface/enum
|
|
7
|
+
* declarations) are exempt.
|
|
8
|
+
*/
|
|
9
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
|
+
exports.isTypeOnlyFile = isTypeOnlyFile;
|
|
11
|
+
exports.checkFileCompleteness = checkFileCompleteness;
|
|
12
|
+
/**
|
|
13
|
+
* Determines if a source file is type-only (no executable code).
|
|
14
|
+
* A file is type-only if it contains no function, class, or const declarations.
|
|
15
|
+
*/
|
|
16
|
+
function isTypeOnlyFile(source) {
|
|
17
|
+
// Look for executable code markers: function, class, const/let/var declarations
|
|
18
|
+
const executablePattern = /\b(function|class|const|let|var)\s/;
|
|
19
|
+
return !executablePattern.test(source);
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Check that all production files have at least one emitNervesEvent call.
|
|
23
|
+
*
|
|
24
|
+
* @param filesWithKeys - Map of filePath -> keys found by source scanner
|
|
25
|
+
* @param fileContents - Map of filePath -> source content for ALL production files
|
|
26
|
+
*/
|
|
27
|
+
function checkFileCompleteness(filesWithKeys, fileContents) {
|
|
28
|
+
const missing = [];
|
|
29
|
+
const exempt = [];
|
|
30
|
+
for (const [filePath, source] of fileContents) {
|
|
31
|
+
const hasKeys = filesWithKeys.has(filePath) && filesWithKeys.get(filePath).length > 0;
|
|
32
|
+
if (hasKeys)
|
|
33
|
+
continue;
|
|
34
|
+
if (isTypeOnlyFile(source)) {
|
|
35
|
+
exempt.push(filePath);
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
missing.push(filePath);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return {
|
|
42
|
+
status: missing.length === 0 ? "pass" : "fail",
|
|
43
|
+
missing: missing.sort(),
|
|
44
|
+
exempt: exempt.sort(),
|
|
45
|
+
};
|
|
46
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.REPO_SLUG = void 0;
|
|
4
|
+
exports.getTestRunsRoot = getTestRunsRoot;
|
|
5
|
+
exports.createRunId = createRunId;
|
|
6
|
+
exports.getRunDir = getRunDir;
|
|
7
|
+
exports.writeActiveRun = writeActiveRun;
|
|
8
|
+
exports.readActiveRun = readActiveRun;
|
|
9
|
+
exports.clearActiveRun = clearActiveRun;
|
|
10
|
+
exports.writeLatestRun = writeLatestRun;
|
|
11
|
+
exports.readLatestRun = readLatestRun;
|
|
12
|
+
const fs_1 = require("fs");
|
|
13
|
+
const path_1 = require("path");
|
|
14
|
+
const os_1 = require("os");
|
|
15
|
+
exports.REPO_SLUG = "ouroboros-agent-harness";
|
|
16
|
+
function getTestRunsRoot(repoSlug = exports.REPO_SLUG) {
|
|
17
|
+
return (0, path_1.join)((0, os_1.homedir)(), ".agentstate", "test-runs", repoSlug);
|
|
18
|
+
}
|
|
19
|
+
function createRunId(now = new Date()) {
|
|
20
|
+
return now.toISOString().replace(/[:.]/g, "-");
|
|
21
|
+
}
|
|
22
|
+
function getRunDir(runId, repoSlug = exports.REPO_SLUG) {
|
|
23
|
+
return (0, path_1.join)(getTestRunsRoot(repoSlug), runId);
|
|
24
|
+
}
|
|
25
|
+
function getActiveRunPath(repoSlug = exports.REPO_SLUG) {
|
|
26
|
+
return (0, path_1.join)(getTestRunsRoot(repoSlug), ".active-run.json");
|
|
27
|
+
}
|
|
28
|
+
function getLatestRunPath(repoSlug = exports.REPO_SLUG) {
|
|
29
|
+
return (0, path_1.join)(getTestRunsRoot(repoSlug), "latest-run.json");
|
|
30
|
+
}
|
|
31
|
+
function ensureRoot(repoSlug = exports.REPO_SLUG) {
|
|
32
|
+
const root = getTestRunsRoot(repoSlug);
|
|
33
|
+
(0, fs_1.mkdirSync)(root, { recursive: true });
|
|
34
|
+
return root;
|
|
35
|
+
}
|
|
36
|
+
function writeActiveRun(info) {
|
|
37
|
+
ensureRoot(info.repo_slug);
|
|
38
|
+
(0, fs_1.writeFileSync)(getActiveRunPath(info.repo_slug), JSON.stringify(info, null, 2), "utf8");
|
|
39
|
+
}
|
|
40
|
+
function readActiveRun(repoSlug = exports.REPO_SLUG) {
|
|
41
|
+
const filePath = getActiveRunPath(repoSlug);
|
|
42
|
+
if (!(0, fs_1.existsSync)(filePath))
|
|
43
|
+
return null;
|
|
44
|
+
try {
|
|
45
|
+
const parsed = JSON.parse((0, fs_1.readFileSync)(filePath, "utf8"));
|
|
46
|
+
if (!parsed.run_id || !parsed.run_dir)
|
|
47
|
+
return null;
|
|
48
|
+
return parsed;
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
function clearActiveRun(repoSlug = exports.REPO_SLUG) {
|
|
55
|
+
const filePath = getActiveRunPath(repoSlug);
|
|
56
|
+
if ((0, fs_1.existsSync)(filePath)) {
|
|
57
|
+
(0, fs_1.unlinkSync)(filePath);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
function writeLatestRun(info) {
|
|
61
|
+
ensureRoot(info.repo_slug);
|
|
62
|
+
(0, fs_1.writeFileSync)(getLatestRunPath(info.repo_slug), JSON.stringify(info, null, 2), "utf8");
|
|
63
|
+
}
|
|
64
|
+
function readLatestRun(repoSlug = exports.REPO_SLUG) {
|
|
65
|
+
const filePath = getLatestRunPath(repoSlug);
|
|
66
|
+
if (!(0, fs_1.existsSync)(filePath))
|
|
67
|
+
return null;
|
|
68
|
+
try {
|
|
69
|
+
const parsed = JSON.parse((0, fs_1.readFileSync)(filePath, "utf8"));
|
|
70
|
+
if (!parsed.run_id || !parsed.run_dir)
|
|
71
|
+
return null;
|
|
72
|
+
return parsed;
|
|
73
|
+
}
|
|
74
|
+
catch {
|
|
75
|
+
return null;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Static source scanner for emitNervesEvent calls.
|
|
4
|
+
*
|
|
5
|
+
* Extracts component:event keys from production source files by
|
|
6
|
+
* regex-matching emitNervesEvent({ component: "...", event: "..." })
|
|
7
|
+
* calls. Only accepts static string literals (single or double quotes).
|
|
8
|
+
* Template literals and variable references are rejected.
|
|
9
|
+
*/
|
|
10
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
11
|
+
exports.scanSourceForNervesKeys = scanSourceForNervesKeys;
|
|
12
|
+
const EMIT_CALL_RE = /emitNervesEvent\s*\(\s*\{([\s\S]*?)\}\s*\)/g;
|
|
13
|
+
function extractStringLiteral(block, field) {
|
|
14
|
+
const re = new RegExp(`${field}\\s*:\\s*(['"])((?:(?!\\1).)+)\\1`);
|
|
15
|
+
const match = re.exec(block);
|
|
16
|
+
return match ? match[2] : null;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Scan a source file's content for emitNervesEvent calls and extract
|
|
20
|
+
* component:event keys. Only static string literals are accepted.
|
|
21
|
+
*/
|
|
22
|
+
function scanSourceForNervesKeys(source) {
|
|
23
|
+
const keys = new Set();
|
|
24
|
+
let match;
|
|
25
|
+
while ((match = EMIT_CALL_RE.exec(source)) !== null) {
|
|
26
|
+
const block = match[1];
|
|
27
|
+
const component = extractStringLiteral(block, "component");
|
|
28
|
+
const event = extractStringLiteral(block, "event");
|
|
29
|
+
if (component && event) {
|
|
30
|
+
keys.add(`${component}:${event}`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
return [...keys].sort();
|
|
34
|
+
}
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createTraceId = createTraceId;
|
|
4
|
+
exports.ensureTraceId = ensureTraceId;
|
|
5
|
+
exports.createFanoutSink = createFanoutSink;
|
|
6
|
+
exports.formatTerminalEntry = formatTerminalEntry;
|
|
7
|
+
exports.createTerminalSink = createTerminalSink;
|
|
8
|
+
exports.createStderrSink = createStderrSink;
|
|
9
|
+
exports.createNdjsonFileSink = createNdjsonFileSink;
|
|
10
|
+
exports.registerGlobalLogSink = registerGlobalLogSink;
|
|
11
|
+
exports.createLogger = createLogger;
|
|
12
|
+
const fs_1 = require("fs");
|
|
13
|
+
const path_1 = require("path");
|
|
14
|
+
const crypto_1 = require("crypto");
|
|
15
|
+
const LEVEL_PRIORITY = {
|
|
16
|
+
debug: 10,
|
|
17
|
+
info: 20,
|
|
18
|
+
warn: 30,
|
|
19
|
+
error: 40,
|
|
20
|
+
};
|
|
21
|
+
const GLOBAL_SINKS_KEY = Symbol.for("ouroboros.nerves.global-sinks");
|
|
22
|
+
function resolveGlobalSinks() {
|
|
23
|
+
const scope = globalThis;
|
|
24
|
+
const existing = scope[GLOBAL_SINKS_KEY];
|
|
25
|
+
if (existing instanceof Set) {
|
|
26
|
+
return existing;
|
|
27
|
+
}
|
|
28
|
+
const created = new Set();
|
|
29
|
+
scope[GLOBAL_SINKS_KEY] = created;
|
|
30
|
+
return created;
|
|
31
|
+
}
|
|
32
|
+
const globalSinks = resolveGlobalSinks();
|
|
33
|
+
function shouldEmit(configuredLevel, eventLevel) {
|
|
34
|
+
return LEVEL_PRIORITY[eventLevel] >= LEVEL_PRIORITY[configuredLevel];
|
|
35
|
+
}
|
|
36
|
+
function createTraceId() {
|
|
37
|
+
return (0, crypto_1.randomUUID)();
|
|
38
|
+
}
|
|
39
|
+
function ensureTraceId(traceId) {
|
|
40
|
+
return traceId && traceId.trim() ? traceId : createTraceId();
|
|
41
|
+
}
|
|
42
|
+
function createFanoutSink(sinks) {
|
|
43
|
+
return (entry) => {
|
|
44
|
+
for (const sink of sinks) {
|
|
45
|
+
try {
|
|
46
|
+
sink(entry);
|
|
47
|
+
}
|
|
48
|
+
catch {
|
|
49
|
+
// Fanout must stay resilient: one sink failure cannot block others.
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
function formatTerminalTime(ts) {
|
|
55
|
+
const parsed = new Date(ts);
|
|
56
|
+
if (Number.isNaN(parsed.getTime())) {
|
|
57
|
+
return ts;
|
|
58
|
+
}
|
|
59
|
+
return parsed.toISOString().slice(11, 19);
|
|
60
|
+
}
|
|
61
|
+
function formatTerminalMeta(meta) {
|
|
62
|
+
if (Object.keys(meta).length === 0)
|
|
63
|
+
return "";
|
|
64
|
+
return ` ${JSON.stringify(meta)}`;
|
|
65
|
+
}
|
|
66
|
+
const LEVEL_COLORS = {
|
|
67
|
+
debug: "\x1b[2m",
|
|
68
|
+
info: "\x1b[36m",
|
|
69
|
+
warn: "\x1b[33m",
|
|
70
|
+
error: "\x1b[31m",
|
|
71
|
+
};
|
|
72
|
+
function formatTerminalEntry(entry) {
|
|
73
|
+
const level = entry.level.toUpperCase();
|
|
74
|
+
return `${formatTerminalTime(entry.ts)} ${level} [${entry.component}] ${entry.message}${formatTerminalMeta(entry.meta)}`;
|
|
75
|
+
}
|
|
76
|
+
function createTerminalSink(write = (chunk) => process.stderr.write(chunk), colorize = true) {
|
|
77
|
+
return (entry) => {
|
|
78
|
+
const line = formatTerminalEntry(entry);
|
|
79
|
+
if (!colorize) {
|
|
80
|
+
write(`${line}\n`);
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
const prefix = LEVEL_COLORS[entry.level];
|
|
84
|
+
write(`${prefix}${line}\x1b[0m\n`);
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
function createStderrSink(write = (chunk) => process.stderr.write(chunk)) {
|
|
88
|
+
return createTerminalSink(write);
|
|
89
|
+
}
|
|
90
|
+
function createNdjsonFileSink(filePath) {
|
|
91
|
+
(0, fs_1.mkdirSync)((0, path_1.dirname)(filePath), { recursive: true });
|
|
92
|
+
const queue = [];
|
|
93
|
+
let flushing = false;
|
|
94
|
+
function flush() {
|
|
95
|
+
if (flushing || queue.length === 0)
|
|
96
|
+
return;
|
|
97
|
+
flushing = true;
|
|
98
|
+
const line = queue.shift();
|
|
99
|
+
(0, fs_1.appendFile)(filePath, line, "utf8", () => {
|
|
100
|
+
flushing = false;
|
|
101
|
+
flush();
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
return (entry) => {
|
|
105
|
+
queue.push(`${JSON.stringify(entry)}\n`);
|
|
106
|
+
flush();
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
function registerGlobalLogSink(sink) {
|
|
110
|
+
globalSinks.add(sink);
|
|
111
|
+
return () => {
|
|
112
|
+
globalSinks.delete(sink);
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
function emitToGlobalSinks(entry) {
|
|
116
|
+
for (const sink of globalSinks) {
|
|
117
|
+
try {
|
|
118
|
+
sink(entry);
|
|
119
|
+
}
|
|
120
|
+
catch {
|
|
121
|
+
// Never fail runtime logging if an auxiliary sink errors.
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
function createLogger(options = {}) {
|
|
126
|
+
const configuredLevel = options.level ?? "info";
|
|
127
|
+
const sinks = options.sinks ?? [createStderrSink()];
|
|
128
|
+
const sink = createFanoutSink(sinks);
|
|
129
|
+
const now = options.now ?? (() => new Date());
|
|
130
|
+
function emit(level, entry) {
|
|
131
|
+
if (!shouldEmit(configuredLevel, level)) {
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
const payload = {
|
|
135
|
+
ts: now().toISOString(),
|
|
136
|
+
level,
|
|
137
|
+
event: entry.event,
|
|
138
|
+
trace_id: entry.trace_id,
|
|
139
|
+
component: entry.component,
|
|
140
|
+
message: entry.message,
|
|
141
|
+
meta: entry.meta,
|
|
142
|
+
};
|
|
143
|
+
sink(payload);
|
|
144
|
+
emitToGlobalSinks(payload);
|
|
145
|
+
}
|
|
146
|
+
return {
|
|
147
|
+
debug: (entry) => emit("debug", entry),
|
|
148
|
+
info: (entry) => emit("info", entry),
|
|
149
|
+
warn: (entry) => emit("warn", entry),
|
|
150
|
+
error: (entry) => emit("error", entry),
|
|
151
|
+
};
|
|
152
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.setRuntimeLogger = setRuntimeLogger;
|
|
4
|
+
exports.emitNervesEvent = emitNervesEvent;
|
|
5
|
+
const index_1 = require("./index");
|
|
6
|
+
let runtimeLogger = null;
|
|
7
|
+
function getRuntimeLogger() {
|
|
8
|
+
if (!runtimeLogger) {
|
|
9
|
+
runtimeLogger = (0, index_1.createLogger)({ level: "info" });
|
|
10
|
+
}
|
|
11
|
+
return runtimeLogger;
|
|
12
|
+
}
|
|
13
|
+
function setRuntimeLogger(logger) {
|
|
14
|
+
runtimeLogger = logger;
|
|
15
|
+
}
|
|
16
|
+
function emitNervesEvent(event) {
|
|
17
|
+
const logger = getRuntimeLogger();
|
|
18
|
+
const payload = {
|
|
19
|
+
event: event.event,
|
|
20
|
+
trace_id: (0, index_1.ensureTraceId)(event.trace_id),
|
|
21
|
+
component: event.component,
|
|
22
|
+
message: event.message,
|
|
23
|
+
meta: event.meta ?? {},
|
|
24
|
+
};
|
|
25
|
+
const level = event.level ?? "info";
|
|
26
|
+
if (level === "debug") {
|
|
27
|
+
logger.debug(payload);
|
|
28
|
+
}
|
|
29
|
+
else if (level === "warn") {
|
|
30
|
+
logger.warn(payload);
|
|
31
|
+
}
|
|
32
|
+
else if (level === "error") {
|
|
33
|
+
logger.error(payload);
|
|
34
|
+
}
|
|
35
|
+
else {
|
|
36
|
+
logger.info(payload);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Azure DevOps API client.
|
|
3
|
+
// Provides a generic adoRequest() for arbitrary endpoints
|
|
4
|
+
// and a thin queryWorkItems() wrapper for backward compatibility.
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.adoRequest = adoRequest;
|
|
7
|
+
exports.queryWorkItems = queryWorkItems;
|
|
8
|
+
exports.discoverOrganizations = discoverOrganizations;
|
|
9
|
+
exports.discoverProjects = discoverProjects;
|
|
10
|
+
const api_error_1 = require("../heart/api-error");
|
|
11
|
+
const runtime_1 = require("../nerves/runtime");
|
|
12
|
+
const ADO_BASE = "https://dev.azure.com";
|
|
13
|
+
const VSSPS_BASE = "https://app.vssps.visualstudio.com";
|
|
14
|
+
const DEFAULT_API_VERSION = "api-version=7.1";
|
|
15
|
+
// Append api-version=7.1 to the URL if not already present.
|
|
16
|
+
function ensureApiVersion(path) {
|
|
17
|
+
if (path.includes("api-version="))
|
|
18
|
+
return path;
|
|
19
|
+
return path.includes("?") ? `${path}&${DEFAULT_API_VERSION}` : `${path}?${DEFAULT_API_VERSION}`;
|
|
20
|
+
}
|
|
21
|
+
// Determine correct Content-Type for ADO requests.
|
|
22
|
+
function resolveContentType(method, path) {
|
|
23
|
+
const upper = method.toUpperCase();
|
|
24
|
+
const isWorkItemMutation = (upper === "POST" || upper === "PATCH") &&
|
|
25
|
+
path.toLowerCase().includes("/_apis/wit/workitems/");
|
|
26
|
+
return isWorkItemMutation
|
|
27
|
+
? "application/json-patch+json"
|
|
28
|
+
: "application/json";
|
|
29
|
+
}
|
|
30
|
+
// Generic ADO API request. Returns response body as pretty-printed JSON string.
|
|
31
|
+
async function adoRequest(token, method, org, path, body) {
|
|
32
|
+
try {
|
|
33
|
+
(0, runtime_1.emitNervesEvent)({
|
|
34
|
+
event: "client.request_start",
|
|
35
|
+
component: "clients",
|
|
36
|
+
message: "starting ADO request",
|
|
37
|
+
meta: { client: "ado", method, org, path },
|
|
38
|
+
});
|
|
39
|
+
const fullPath = ensureApiVersion(path);
|
|
40
|
+
const url = `${ADO_BASE}/${org}${fullPath}`;
|
|
41
|
+
const contentType = resolveContentType(method, path);
|
|
42
|
+
const opts = {
|
|
43
|
+
method,
|
|
44
|
+
headers: {
|
|
45
|
+
Authorization: `Bearer ${token}`,
|
|
46
|
+
"Content-Type": contentType,
|
|
47
|
+
},
|
|
48
|
+
};
|
|
49
|
+
if (body)
|
|
50
|
+
opts.body = body;
|
|
51
|
+
const res = await fetch(url, opts);
|
|
52
|
+
if (!res.ok) {
|
|
53
|
+
(0, runtime_1.emitNervesEvent)({
|
|
54
|
+
level: "error",
|
|
55
|
+
event: "client.error",
|
|
56
|
+
component: "clients",
|
|
57
|
+
message: "ADO request failed",
|
|
58
|
+
meta: { client: "ado", method, org, path, status: res.status },
|
|
59
|
+
});
|
|
60
|
+
return (0, api_error_1.handleApiError)(res, "ADO", "ado");
|
|
61
|
+
}
|
|
62
|
+
const data = await res.json();
|
|
63
|
+
(0, runtime_1.emitNervesEvent)({
|
|
64
|
+
event: "client.request_end",
|
|
65
|
+
component: "clients",
|
|
66
|
+
message: "ADO request completed",
|
|
67
|
+
meta: { client: "ado", method, org, path, success: true },
|
|
68
|
+
});
|
|
69
|
+
return JSON.stringify(data, null, 2);
|
|
70
|
+
}
|
|
71
|
+
catch (err) {
|
|
72
|
+
(0, runtime_1.emitNervesEvent)({
|
|
73
|
+
level: "error",
|
|
74
|
+
event: "client.error",
|
|
75
|
+
component: "clients",
|
|
76
|
+
message: "ADO request threw exception",
|
|
77
|
+
meta: {
|
|
78
|
+
client: "ado",
|
|
79
|
+
method,
|
|
80
|
+
org,
|
|
81
|
+
path,
|
|
82
|
+
reason: err instanceof Error ? err.message : String(err),
|
|
83
|
+
},
|
|
84
|
+
});
|
|
85
|
+
return (0, api_error_1.handleApiError)(err, "ADO", "ado");
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
// Backward-compatible thin wrapper: runs WIQL query and returns formatted work items.
|
|
89
|
+
async function queryWorkItems(token, org, query) {
|
|
90
|
+
try {
|
|
91
|
+
(0, runtime_1.emitNervesEvent)({
|
|
92
|
+
event: "client.request_start",
|
|
93
|
+
component: "clients",
|
|
94
|
+
message: "starting ADO work item query",
|
|
95
|
+
meta: { client: "ado", org, operation: "queryWorkItems" },
|
|
96
|
+
});
|
|
97
|
+
// Step 1: Run WIQL query to get work item IDs
|
|
98
|
+
const wiqlRes = await fetch(`${ADO_BASE}/${org}/_apis/wit/wiql?${DEFAULT_API_VERSION}`, {
|
|
99
|
+
method: "POST",
|
|
100
|
+
headers: {
|
|
101
|
+
Authorization: `Bearer ${token}`,
|
|
102
|
+
"Content-Type": "application/json",
|
|
103
|
+
},
|
|
104
|
+
body: JSON.stringify({ query }),
|
|
105
|
+
});
|
|
106
|
+
if (!wiqlRes.ok) {
|
|
107
|
+
(0, runtime_1.emitNervesEvent)({
|
|
108
|
+
level: "error",
|
|
109
|
+
event: "client.error",
|
|
110
|
+
component: "clients",
|
|
111
|
+
message: "ADO WIQL query failed",
|
|
112
|
+
meta: { client: "ado", org, operation: "queryWorkItems", stage: "wiql", status: wiqlRes.status },
|
|
113
|
+
});
|
|
114
|
+
return (0, api_error_1.handleApiError)(wiqlRes, "ADO", "ado");
|
|
115
|
+
}
|
|
116
|
+
const wiqlData = (await wiqlRes.json());
|
|
117
|
+
if (!wiqlData.workItems || wiqlData.workItems.length === 0) {
|
|
118
|
+
(0, runtime_1.emitNervesEvent)({
|
|
119
|
+
event: "client.request_end",
|
|
120
|
+
component: "clients",
|
|
121
|
+
message: "ADO work item query returned no results",
|
|
122
|
+
meta: { client: "ado", org, operation: "queryWorkItems", count: 0 },
|
|
123
|
+
});
|
|
124
|
+
return "No work items found matching the query.";
|
|
125
|
+
}
|
|
126
|
+
// Step 2: Fetch work item details (batch, max 200)
|
|
127
|
+
const ids = wiqlData.workItems.slice(0, 200).map((wi) => wi.id);
|
|
128
|
+
const detailRes = await fetch(`${ADO_BASE}/${org}/_apis/wit/workitems?ids=${ids.join(",")}&${DEFAULT_API_VERSION}`, {
|
|
129
|
+
headers: {
|
|
130
|
+
Authorization: `Bearer ${token}`,
|
|
131
|
+
},
|
|
132
|
+
});
|
|
133
|
+
if (!detailRes.ok) {
|
|
134
|
+
(0, runtime_1.emitNervesEvent)({
|
|
135
|
+
level: "error",
|
|
136
|
+
event: "client.error",
|
|
137
|
+
component: "clients",
|
|
138
|
+
message: "ADO work item details fetch failed",
|
|
139
|
+
meta: { client: "ado", org, operation: "queryWorkItems", stage: "details", status: detailRes.status },
|
|
140
|
+
});
|
|
141
|
+
return (0, api_error_1.handleApiError)(detailRes, "ADO", "ado");
|
|
142
|
+
}
|
|
143
|
+
const detailData = (await detailRes.json());
|
|
144
|
+
// Format results
|
|
145
|
+
const lines = detailData.value.map((wi) => {
|
|
146
|
+
const assignedTo = wi.fields["System.AssignedTo"]?.displayName || "Unassigned";
|
|
147
|
+
return `#${wi.id}: ${wi.fields["System.Title"]} [${wi.fields["System.State"]}] (${assignedTo})`;
|
|
148
|
+
});
|
|
149
|
+
(0, runtime_1.emitNervesEvent)({
|
|
150
|
+
event: "client.request_end",
|
|
151
|
+
component: "clients",
|
|
152
|
+
message: "ADO work item query completed",
|
|
153
|
+
meta: { client: "ado", org, operation: "queryWorkItems", count: lines.length },
|
|
154
|
+
});
|
|
155
|
+
return lines.join("\n");
|
|
156
|
+
}
|
|
157
|
+
catch (err) {
|
|
158
|
+
(0, runtime_1.emitNervesEvent)({
|
|
159
|
+
level: "error",
|
|
160
|
+
event: "client.error",
|
|
161
|
+
component: "clients",
|
|
162
|
+
message: "ADO work item query threw exception",
|
|
163
|
+
meta: {
|
|
164
|
+
client: "ado",
|
|
165
|
+
org,
|
|
166
|
+
operation: "queryWorkItems",
|
|
167
|
+
reason: err instanceof Error ? err.message : String(err),
|
|
168
|
+
},
|
|
169
|
+
});
|
|
170
|
+
return (0, api_error_1.handleApiError)(err, "ADO", "ado");
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
// Discover ADO organizations accessible by the authenticated user.
|
|
174
|
+
// 1. Fetches user profile to get publicAlias (member ID)
|
|
175
|
+
// 2. Uses Accounts API to list organizations for that member
|
|
176
|
+
// Throws on API errors (callers handle error presentation).
|
|
177
|
+
async function discoverOrganizations(token) {
|
|
178
|
+
// Step 1: Get the user's publicAlias from their profile
|
|
179
|
+
const profileRes = await fetch(`${VSSPS_BASE}/_apis/profile/profiles/me?${DEFAULT_API_VERSION}`, {
|
|
180
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
181
|
+
});
|
|
182
|
+
if (!profileRes.ok) {
|
|
183
|
+
throw new Error(`ADO profile request failed: ${profileRes.status} ${profileRes.statusText}`);
|
|
184
|
+
}
|
|
185
|
+
const profile = (await profileRes.json());
|
|
186
|
+
const memberId = profile.publicAlias;
|
|
187
|
+
if (!memberId) {
|
|
188
|
+
throw new Error("ADO profile response missing publicAlias");
|
|
189
|
+
}
|
|
190
|
+
// Step 2: List organizations for this member
|
|
191
|
+
const accountsRes = await fetch(`${VSSPS_BASE}/_apis/accounts?memberId=${memberId}&${DEFAULT_API_VERSION}`, {
|
|
192
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
193
|
+
});
|
|
194
|
+
if (!accountsRes.ok) {
|
|
195
|
+
throw new Error(`ADO accounts request failed: ${accountsRes.status} ${accountsRes.statusText}`);
|
|
196
|
+
}
|
|
197
|
+
const data = (await accountsRes.json());
|
|
198
|
+
return (data.value ?? []).map((a) => a.accountName);
|
|
199
|
+
}
|
|
200
|
+
// Discover projects within an ADO organization.
|
|
201
|
+
// Throws on API errors (callers handle error presentation).
|
|
202
|
+
async function discoverProjects(token, org) {
|
|
203
|
+
const res = await fetch(`${ADO_BASE}/${org}/_apis/projects?${DEFAULT_API_VERSION}`, {
|
|
204
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
205
|
+
});
|
|
206
|
+
if (!res.ok) {
|
|
207
|
+
throw new Error(`ADO projects request failed: ${res.status} ${res.statusText}`);
|
|
208
|
+
}
|
|
209
|
+
const data = (await res.json());
|
|
210
|
+
return (data.value ?? []).map((p) => p.name);
|
|
211
|
+
}
|