@danielblomma/cortex-mcp 1.2.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cortex.mjs +11 -2
- package/package.json +1 -1
- package/scaffold/scripts/context.sh +4 -0
- package/scaffold/scripts/doctor.sh +298 -0
package/bin/cortex.mjs
CHANGED
|
@@ -53,6 +53,7 @@ function printHelp() {
|
|
|
53
53
|
console.log(" cortex bootstrap");
|
|
54
54
|
console.log(" cortex update");
|
|
55
55
|
console.log(" cortex status");
|
|
56
|
+
console.log(" cortex doctor");
|
|
56
57
|
console.log(" cortex ingest [--changed] [--verbose]");
|
|
57
58
|
console.log(" cortex embed [--changed]");
|
|
58
59
|
console.log(" cortex graph-load [--no-reset]");
|
|
@@ -147,7 +148,7 @@ function ensureScaffoldExists() {
|
|
|
147
148
|
|
|
148
149
|
// Files that should never be overwritten if they already exist in the target.
|
|
149
150
|
// These contain user-specific configuration that would be lost on re-init.
|
|
150
|
-
const PRESERVE_FILES = new Set(["config.yaml", "enterprise.yml", "enterprise.yaml"]);
|
|
151
|
+
const PRESERVE_FILES = new Set(["config.yaml", "enterprise.yml", "enterprise.yaml", "CLAUDE.md"]);
|
|
151
152
|
|
|
152
153
|
function copyDirectory(sourceDir, targetDir) {
|
|
153
154
|
fs.mkdirSync(targetDir, { recursive: true });
|
|
@@ -206,6 +207,13 @@ function installScaffold(targetDir, force) {
|
|
|
206
207
|
copyDirectory(sourcePath, targetPath);
|
|
207
208
|
}
|
|
208
209
|
|
|
210
|
+
// Copy CLAUDE.md (skip if already exists to preserve user edits)
|
|
211
|
+
const claudeMdSource = path.join(SCAFFOLD_ROOT, "CLAUDE.md");
|
|
212
|
+
const claudeMdTarget = path.join(targetDir, "CLAUDE.md");
|
|
213
|
+
if (fs.existsSync(claudeMdSource) && !fs.existsSync(claudeMdTarget)) {
|
|
214
|
+
fs.copyFileSync(claudeMdSource, claudeMdTarget);
|
|
215
|
+
}
|
|
216
|
+
|
|
209
217
|
const docsDir = path.join(targetDir, "docs");
|
|
210
218
|
fs.mkdirSync(docsDir, { recursive: true });
|
|
211
219
|
const docsSource = path.join(SCAFFOLD_ROOT, "docs", "architecture.md");
|
|
@@ -635,7 +643,8 @@ async function run() {
|
|
|
635
643
|
"watch",
|
|
636
644
|
"refresh",
|
|
637
645
|
"memory-compile",
|
|
638
|
-
"memory-lint"
|
|
646
|
+
"memory-lint",
|
|
647
|
+
"doctor"
|
|
639
648
|
]);
|
|
640
649
|
|
|
641
650
|
if (!passthrough.has(command)) {
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@danielblomma/cortex-mcp",
|
|
3
3
|
"mcpName": "io.github.DanielBlomma/cortex",
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.3.0",
|
|
5
5
|
"description": "Local, repo-scoped context platform for coding assistants. Semantic search, graph relationships, and architectural rule context.",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"author": "Daniel Blomma",
|
|
@@ -18,6 +18,7 @@ Commands:
|
|
|
18
18
|
graph-load [--no-reset] Build RyuGraph DB from indexed context
|
|
19
19
|
dashboard [--interval <sec>] Live TUI showing what Cortex adds to your repo
|
|
20
20
|
status Show latest ingest summary
|
|
21
|
+
doctor Health check — verify config, index, MCP, and enterprise
|
|
21
22
|
memory-compile [--dry-run] [--verbose]
|
|
22
23
|
Compile raw memory notes into structured articles
|
|
23
24
|
memory-lint [--verbose] [--json] Lint compiled memory articles for issues
|
|
@@ -58,6 +59,9 @@ case "$COMMAND" in
|
|
|
58
59
|
status)
|
|
59
60
|
"$SCRIPT_DIR/status.sh"
|
|
60
61
|
;;
|
|
62
|
+
doctor)
|
|
63
|
+
"$SCRIPT_DIR/doctor.sh"
|
|
64
|
+
;;
|
|
61
65
|
memory-compile)
|
|
62
66
|
"$SCRIPT_DIR/memory-compile.sh" "$@"
|
|
63
67
|
;;
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
5
|
+
CONTEXT_DIR="$REPO_ROOT/.context"
|
|
6
|
+
MCP_DIR="$REPO_ROOT/mcp"
|
|
7
|
+
|
|
8
|
+
PASS=0
|
|
9
|
+
FAIL=0
|
|
10
|
+
WARN=0
|
|
11
|
+
|
|
12
|
+
pass() { echo " ✓ $1"; PASS=$((PASS + 1)); }
|
|
13
|
+
fail() { echo " ✗ $1"; FAIL=$((FAIL + 1)); }
|
|
14
|
+
warn() { echo " ! $1"; WARN=$((WARN + 1)); }
|
|
15
|
+
info() { echo " - $1"; }
|
|
16
|
+
|
|
17
|
+
echo ""
|
|
18
|
+
echo "[cortex] Doctor — checking your setup"
|
|
19
|
+
|
|
20
|
+
# ── Config ──────────────────────────────────────────────
|
|
21
|
+
|
|
22
|
+
echo ""
|
|
23
|
+
echo " Config"
|
|
24
|
+
|
|
25
|
+
if [[ -f "$CONTEXT_DIR/config.yaml" ]]; then
|
|
26
|
+
pass ".context/config.yaml found"
|
|
27
|
+
# Show source_paths
|
|
28
|
+
PATHS=$(node -e '
|
|
29
|
+
const fs = require("node:fs");
|
|
30
|
+
const raw = fs.readFileSync(process.argv[1], "utf8");
|
|
31
|
+
const paths = [];
|
|
32
|
+
let inSection = false;
|
|
33
|
+
for (const line of raw.split("\n")) {
|
|
34
|
+
if (/^source_paths:\s*$/.test(line.trim())) { inSection = true; continue; }
|
|
35
|
+
if (!inSection) continue;
|
|
36
|
+
const m = line.match(/^\s*-\s*(.+?)\s*$/);
|
|
37
|
+
if (m) { paths.push(m[1].replace(/^["\x27]|["\x27]$/g, "")); continue; }
|
|
38
|
+
if (line.trim() !== "" && !/^\s/.test(line)) break;
|
|
39
|
+
}
|
|
40
|
+
console.log(paths.join(", ") || "(none)");
|
|
41
|
+
' "$CONTEXT_DIR/config.yaml" 2>/dev/null || echo "(parse error)")
|
|
42
|
+
info "source_paths: $PATHS"
|
|
43
|
+
else
|
|
44
|
+
fail ".context/config.yaml not found — run: cortex init"
|
|
45
|
+
fi
|
|
46
|
+
|
|
47
|
+
# Enterprise config
|
|
48
|
+
ENTERPRISE_CONFIG=""
|
|
49
|
+
if [[ -f "$CONTEXT_DIR/enterprise.yml" ]]; then
|
|
50
|
+
ENTERPRISE_CONFIG="$CONTEXT_DIR/enterprise.yml"
|
|
51
|
+
elif [[ -f "$CONTEXT_DIR/enterprise.yaml" ]]; then
|
|
52
|
+
ENTERPRISE_CONFIG="$CONTEXT_DIR/enterprise.yaml"
|
|
53
|
+
fi
|
|
54
|
+
|
|
55
|
+
if [[ -n "$ENTERPRISE_CONFIG" ]]; then
|
|
56
|
+
pass "enterprise config found: $(basename "$ENTERPRISE_CONFIG")"
|
|
57
|
+
else
|
|
58
|
+
info "no enterprise config (community mode)"
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
# ── Index ───────────────────────────────────────────────
|
|
62
|
+
|
|
63
|
+
echo ""
|
|
64
|
+
echo " Index"
|
|
65
|
+
|
|
66
|
+
INGEST_MANIFEST="$CONTEXT_DIR/cache/manifest.json"
|
|
67
|
+
if [[ -f "$INGEST_MANIFEST" ]]; then
|
|
68
|
+
INGEST_INFO=$(node -e '
|
|
69
|
+
const fs = require("node:fs");
|
|
70
|
+
const d = JSON.parse(fs.readFileSync(process.argv[1], "utf8"));
|
|
71
|
+
const c = d.counts || {};
|
|
72
|
+
const age = Math.round((Date.now() - new Date(d.generated_at).getTime()) / 60000);
|
|
73
|
+
const ageStr = age < 60 ? age + " min ago" : Math.round(age / 60) + "h ago";
|
|
74
|
+
console.log(`${c.files ?? 0} files, ${c.rules ?? 0} rules (${ageStr})`);
|
|
75
|
+
' "$INGEST_MANIFEST" 2>/dev/null || echo "parse error")
|
|
76
|
+
pass "Ingest: $INGEST_INFO"
|
|
77
|
+
else
|
|
78
|
+
warn "Ingest manifest missing — run: cortex bootstrap"
|
|
79
|
+
fi
|
|
80
|
+
|
|
81
|
+
GRAPH_MANIFEST="$CONTEXT_DIR/cache/graph-manifest.json"
|
|
82
|
+
if [[ -f "$GRAPH_MANIFEST" ]]; then
|
|
83
|
+
GRAPH_INFO=$(node -e '
|
|
84
|
+
const fs = require("node:fs");
|
|
85
|
+
const d = JSON.parse(fs.readFileSync(process.argv[1], "utf8"));
|
|
86
|
+
const c = d.counts || {};
|
|
87
|
+
const age = Math.round((Date.now() - new Date(d.generated_at).getTime()) / 60000);
|
|
88
|
+
const ageStr = age < 60 ? age + " min ago" : Math.round(age / 60) + "h ago";
|
|
89
|
+
console.log(`${c.files ?? 0} files, ${c.constrains ?? 0} constrains, ${c.calls ?? 0} calls (${ageStr})`);
|
|
90
|
+
' "$GRAPH_MANIFEST" 2>/dev/null || echo "parse error")
|
|
91
|
+
pass "Graph: $GRAPH_INFO"
|
|
92
|
+
else
|
|
93
|
+
warn "Graph manifest missing — run: cortex bootstrap"
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
EMBED_MANIFEST="$CONTEXT_DIR/embeddings/manifest.json"
|
|
97
|
+
if [[ -f "$EMBED_MANIFEST" ]]; then
|
|
98
|
+
EMBED_INFO=$(node -e '
|
|
99
|
+
const fs = require("node:fs");
|
|
100
|
+
const d = JSON.parse(fs.readFileSync(process.argv[1], "utf8"));
|
|
101
|
+
const c = d.counts || {};
|
|
102
|
+
console.log(`${c.entities ?? 0} entities, model ${d.model || "unknown"}`);
|
|
103
|
+
' "$EMBED_MANIFEST" 2>/dev/null || echo "parse error")
|
|
104
|
+
pass "Embeddings: $EMBED_INFO"
|
|
105
|
+
else
|
|
106
|
+
warn "Embeddings missing — run: cortex bootstrap"
|
|
107
|
+
fi
|
|
108
|
+
|
|
109
|
+
# Freshness
|
|
110
|
+
if [[ -f "$INGEST_MANIFEST" ]] && command -v git &>/dev/null && git -C "$REPO_ROOT" rev-parse --git-dir &>/dev/null; then
|
|
111
|
+
FRESHNESS=$(node -e '
|
|
112
|
+
const fs = require("node:fs");
|
|
113
|
+
const { execSync } = require("node:child_process");
|
|
114
|
+
const d = JSON.parse(fs.readFileSync(process.argv[1], "utf8"));
|
|
115
|
+
const sp = Array.isArray(d.source_paths) ? d.source_paths : [];
|
|
116
|
+
const files = Number(d.counts?.files ?? 0);
|
|
117
|
+
let changed = 0;
|
|
118
|
+
try {
|
|
119
|
+
const out = execSync("git status --porcelain", { cwd: process.argv[2], encoding: "utf8", timeout: 3000 });
|
|
120
|
+
for (const line of out.split("\n")) {
|
|
121
|
+
if (!line || line.length < 4) continue;
|
|
122
|
+
const p = line.slice(3).trim().split(" -> ").pop();
|
|
123
|
+
if (p.startsWith(".context/")) continue;
|
|
124
|
+
if (sp.length === 0 || sp.some(s => p === s || p.startsWith(s + "/"))) changed++;
|
|
125
|
+
}
|
|
126
|
+
} catch {}
|
|
127
|
+
const base = Math.max(files, changed, 1);
|
|
128
|
+
const pct = Math.round(Math.max(0, (base - changed) / base) * 100);
|
|
129
|
+
console.log(pct);
|
|
130
|
+
' "$INGEST_MANIFEST" "$REPO_ROOT" 2>/dev/null || echo "-1")
|
|
131
|
+
if [[ "$FRESHNESS" != "-1" ]]; then
|
|
132
|
+
if [[ "$FRESHNESS" -ge 90 ]]; then
|
|
133
|
+
pass "Freshness: ${FRESHNESS}%"
|
|
134
|
+
elif [[ "$FRESHNESS" -ge 50 ]]; then
|
|
135
|
+
warn "Freshness: ${FRESHNESS}% — run: cortex update"
|
|
136
|
+
else
|
|
137
|
+
fail "Freshness: ${FRESHNESS}% — run: cortex update"
|
|
138
|
+
fi
|
|
139
|
+
fi
|
|
140
|
+
fi
|
|
141
|
+
|
|
142
|
+
# ── MCP Server ──────────────────────────────────────────
|
|
143
|
+
|
|
144
|
+
echo ""
|
|
145
|
+
echo " MCP Server"
|
|
146
|
+
|
|
147
|
+
if [[ -f "$MCP_DIR/dist/server.js" ]]; then
|
|
148
|
+
pass "mcp/dist/server.js exists"
|
|
149
|
+
else
|
|
150
|
+
fail "mcp/dist/server.js missing — run: cd mcp && npm run build"
|
|
151
|
+
fi
|
|
152
|
+
|
|
153
|
+
if [[ -d "$MCP_DIR/node_modules" ]]; then
|
|
154
|
+
pass "mcp/node_modules present"
|
|
155
|
+
else
|
|
156
|
+
fail "mcp/node_modules missing — run: cd mcp && npm install"
|
|
157
|
+
fi
|
|
158
|
+
|
|
159
|
+
# Quick MCP import check
|
|
160
|
+
if [[ -f "$MCP_DIR/dist/server.js" ]] && [[ -d "$MCP_DIR/node_modules" ]]; then
|
|
161
|
+
MCP_CHECK=$(cd "$REPO_ROOT" && timeout 10 node -e '
|
|
162
|
+
const start = Date.now();
|
|
163
|
+
try {
|
|
164
|
+
require("./mcp/dist/graph.js");
|
|
165
|
+
console.log("ok " + (Date.now() - start));
|
|
166
|
+
} catch(e) {
|
|
167
|
+
console.log("fail " + e.message);
|
|
168
|
+
}
|
|
169
|
+
' 2>/dev/null || echo "fail timeout")
|
|
170
|
+
if [[ "$MCP_CHECK" == ok* ]]; then
|
|
171
|
+
MS="${MCP_CHECK#ok }"
|
|
172
|
+
pass "Graph module loads (${MS}ms)"
|
|
173
|
+
else
|
|
174
|
+
warn "Graph module failed to load: ${MCP_CHECK#fail }"
|
|
175
|
+
fi
|
|
176
|
+
fi
|
|
177
|
+
|
|
178
|
+
# ── Enterprise ──────────────────────────────────────────
|
|
179
|
+
|
|
180
|
+
if [[ -n "$ENTERPRISE_CONFIG" ]]; then
|
|
181
|
+
echo ""
|
|
182
|
+
echo " Enterprise"
|
|
183
|
+
|
|
184
|
+
# Plugin installed?
|
|
185
|
+
ENTERPRISE_PKG="$MCP_DIR/node_modules/@danielblomma/cortex-enterprise/package.json"
|
|
186
|
+
if [[ -f "$ENTERPRISE_PKG" ]]; then
|
|
187
|
+
ENT_VERSION=$(node -e 'console.log(JSON.parse(require("fs").readFileSync(process.argv[1],"utf8")).version)' "$ENTERPRISE_PKG" 2>/dev/null || echo "unknown")
|
|
188
|
+
pass "Plugin installed: v${ENT_VERSION}"
|
|
189
|
+
else
|
|
190
|
+
fail "Plugin not installed — run: cortex bootstrap"
|
|
191
|
+
fi
|
|
192
|
+
|
|
193
|
+
# Parse enterprise config for checks
|
|
194
|
+
TELEMETRY_ENDPOINT=$(node -e '
|
|
195
|
+
const fs = require("node:fs");
|
|
196
|
+
const raw = fs.readFileSync(process.argv[1], "utf8");
|
|
197
|
+
let section = "", fields = {};
|
|
198
|
+
for (const line of raw.split("\n")) {
|
|
199
|
+
const t = line.trimEnd();
|
|
200
|
+
if (!t || t.startsWith("#")) continue;
|
|
201
|
+
const sm = t.match(/^(\w+):\s*$/);
|
|
202
|
+
if (sm) { section = sm[1]; continue; }
|
|
203
|
+
const kv = t.match(/^\s+(\w+):\s*(.+?)\s*$/);
|
|
204
|
+
if (kv && section) fields[section + "." + kv[1]] = kv[2].replace(/^["\x27]|["\x27]$/g, "");
|
|
205
|
+
}
|
|
206
|
+
console.log(fields["telemetry.endpoint"] || "");
|
|
207
|
+
' "$ENTERPRISE_CONFIG" 2>/dev/null || echo "")
|
|
208
|
+
|
|
209
|
+
POLICY_ENDPOINT=$(node -e '
|
|
210
|
+
const fs = require("node:fs");
|
|
211
|
+
const raw = fs.readFileSync(process.argv[1], "utf8");
|
|
212
|
+
let section = "", fields = {};
|
|
213
|
+
for (const line of raw.split("\n")) {
|
|
214
|
+
const t = line.trimEnd();
|
|
215
|
+
if (!t || t.startsWith("#")) continue;
|
|
216
|
+
const sm = t.match(/^(\w+):\s*$/);
|
|
217
|
+
if (sm) { section = sm[1]; continue; }
|
|
218
|
+
const kv = t.match(/^\s+(\w+):\s*(.+?)\s*$/);
|
|
219
|
+
if (kv && section) fields[section + "." + kv[1]] = kv[2].replace(/^["\x27]|["\x27]$/g, "");
|
|
220
|
+
}
|
|
221
|
+
console.log(fields["policy.endpoint"] || "");
|
|
222
|
+
' "$ENTERPRISE_CONFIG" 2>/dev/null || echo "")
|
|
223
|
+
|
|
224
|
+
# Telemetry
|
|
225
|
+
if [[ -n "$TELEMETRY_ENDPOINT" ]]; then
|
|
226
|
+
pass "Telemetry: endpoint configured"
|
|
227
|
+
HTTP_CODE=$(curl -so /dev/null -w '%{http_code}' --max-time 5 -X POST \
|
|
228
|
+
-H "Content-Type: application/json" \
|
|
229
|
+
-d '{}' \
|
|
230
|
+
"$TELEMETRY_ENDPOINT" 2>/dev/null | tail -c 3 || echo "000")
|
|
231
|
+
if [[ "$HTTP_CODE" == "000" ]]; then
|
|
232
|
+
fail "Telemetry: endpoint not reachable (timeout/DNS)"
|
|
233
|
+
elif [[ "$HTTP_CODE" =~ ^[23] ]]; then
|
|
234
|
+
pass "Telemetry: endpoint reachable (HTTP ${HTTP_CODE})"
|
|
235
|
+
elif [[ "$HTTP_CODE" == "401" ]]; then
|
|
236
|
+
pass "Telemetry: endpoint reachable (auth required — expected)"
|
|
237
|
+
else
|
|
238
|
+
warn "Telemetry: endpoint returned HTTP ${HTTP_CODE}"
|
|
239
|
+
fi
|
|
240
|
+
else
|
|
241
|
+
warn "Telemetry: no endpoint configured"
|
|
242
|
+
fi
|
|
243
|
+
|
|
244
|
+
# Policy
|
|
245
|
+
POLICY_COUNT=0
|
|
246
|
+
if [[ -f "$CONTEXT_DIR/rules.yaml" ]]; then
|
|
247
|
+
LOCAL_RULES=$(grep -c "^ - id:" "$CONTEXT_DIR/rules.yaml" 2>/dev/null || echo "0")
|
|
248
|
+
POLICY_COUNT=$((POLICY_COUNT + LOCAL_RULES))
|
|
249
|
+
fi
|
|
250
|
+
if [[ -f "$CONTEXT_DIR/policies/org-rules.yaml" ]]; then
|
|
251
|
+
ORG_RULES=$(grep -c "^ - id:" "$CONTEXT_DIR/policies/org-rules.yaml" 2>/dev/null || echo "0")
|
|
252
|
+
POLICY_COUNT=$((POLICY_COUNT + ORG_RULES))
|
|
253
|
+
fi
|
|
254
|
+
if [[ "$POLICY_COUNT" -gt 0 ]]; then
|
|
255
|
+
pass "Policies: ${POLICY_COUNT} loaded"
|
|
256
|
+
else
|
|
257
|
+
info "Policies: none loaded"
|
|
258
|
+
fi
|
|
259
|
+
|
|
260
|
+
if [[ -n "$POLICY_ENDPOINT" ]]; then
|
|
261
|
+
POLICY_HTTP=$(curl -so /dev/null -w '%{http_code}' --max-time 5 "$POLICY_ENDPOINT" 2>/dev/null | tail -c 3 || echo "000")
|
|
262
|
+
if [[ "$POLICY_HTTP" == "000" ]]; then
|
|
263
|
+
fail "Policy: endpoint not reachable (timeout/DNS)"
|
|
264
|
+
elif [[ "$POLICY_HTTP" =~ ^[23] ]] || [[ "$POLICY_HTTP" == "401" ]]; then
|
|
265
|
+
pass "Policy: endpoint reachable (HTTP ${POLICY_HTTP})"
|
|
266
|
+
else
|
|
267
|
+
warn "Policy: endpoint returned HTTP ${POLICY_HTTP}"
|
|
268
|
+
fi
|
|
269
|
+
fi
|
|
270
|
+
|
|
271
|
+
# Audit
|
|
272
|
+
LATEST_AUDIT=$(ls -t "$CONTEXT_DIR/audit/"*.jsonl 2>/dev/null | head -1 || echo "")
|
|
273
|
+
if [[ -n "$LATEST_AUDIT" ]]; then
|
|
274
|
+
AUDIT_AGE=$(node -e '
|
|
275
|
+
const fs = require("node:fs");
|
|
276
|
+
const stat = fs.statSync(process.argv[1]);
|
|
277
|
+
const mins = Math.round((Date.now() - stat.mtimeMs) / 60000);
|
|
278
|
+
if (mins < 60) console.log(mins + " min ago");
|
|
279
|
+
else console.log(Math.round(mins / 60) + "h ago");
|
|
280
|
+
' "$LATEST_AUDIT" 2>/dev/null || echo "unknown")
|
|
281
|
+
pass "Audit: last entry ${AUDIT_AGE}"
|
|
282
|
+
else
|
|
283
|
+
info "Audit: no entries yet"
|
|
284
|
+
fi
|
|
285
|
+
fi
|
|
286
|
+
|
|
287
|
+
# ── Summary ─────────────────────────────────────────────
|
|
288
|
+
|
|
289
|
+
echo ""
|
|
290
|
+
TOTAL=$((PASS + FAIL + WARN))
|
|
291
|
+
if [[ "$FAIL" -eq 0 ]]; then
|
|
292
|
+
echo "[cortex] ${PASS}/${TOTAL} checks passed"
|
|
293
|
+
else
|
|
294
|
+
echo "[cortex] ${PASS}/${TOTAL} checks passed, ${FAIL} failed, ${WARN} warnings"
|
|
295
|
+
fi
|
|
296
|
+
echo ""
|
|
297
|
+
|
|
298
|
+
exit "$FAIL"
|