openhermes 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +281 -0
- package/autorecall.mjs +167 -0
- package/bootstrap.mjs +255 -0
- package/curator.mjs +470 -0
- package/harness/commands/build-fix.md +60 -0
- package/harness/commands/code-review.md +71 -0
- package/harness/commands/doctor.md +42 -0
- package/harness/commands/learn.md +37 -0
- package/harness/commands/memory-search.md +37 -0
- package/harness/commands/plan.md +53 -0
- package/harness/commands/security.md +93 -0
- package/harness/constitution/soul.md +76 -0
- package/harness/instructions/RUNTIME.md +21 -0
- package/harness/prompts/architect.txt +175 -0
- package/harness/prompts/build-error-resolver.md +37 -0
- package/harness/prompts/code-reviewer.md +33 -0
- package/harness/prompts/e2e-runner.txt +305 -0
- package/harness/prompts/explore.md +29 -0
- package/harness/prompts/planner.md +30 -0
- package/harness/prompts/security-reviewer.md +35 -0
- package/harness/rules/audit.md +84 -0
- package/harness/rules/checkpointing.md +75 -0
- package/harness/rules/context-loading.md +33 -0
- package/harness/rules/credential-exposure.md +0 -0
- package/harness/rules/delegation.md +76 -0
- package/harness/rules/memory-management.md +28 -0
- package/harness/rules/precedence.md +52 -0
- package/harness/rules/promotion.md +46 -0
- package/harness/rules/ranking.md +64 -0
- package/harness/rules/retrieval.md +94 -0
- package/harness/rules/runtime-guards.md +196 -0
- package/harness/rules/self-heal.md +79 -0
- package/harness/rules/session-start.md +34 -0
- package/harness/rules/skills-management.md +165 -0
- package/harness/rules/state-drift.md +192 -0
- package/harness/rules/verification.md +88 -0
- package/harness/skills/.bundled_manifest +17 -0
- package/harness/skills/.usage.json +6 -0
- package/harness/skills/api-design/SKILL.md +523 -0
- package/harness/skills/backend-patterns/SKILL.md +598 -0
- package/harness/skills/coding-standards/SKILL.md +549 -0
- package/harness/skills/e2e-testing/SKILL.md +326 -0
- package/harness/skills/frontend-patterns/SKILL.md +642 -0
- package/harness/skills/frontend-slides/SKILL.md +184 -0
- package/harness/skills/security-review/SKILL.md +495 -0
- package/harness/skills/strategic-compact/SKILL.md +131 -0
- package/harness/skills/tdd-workflow/SKILL.md +463 -0
- package/harness/skills/verification-loop/SKILL.md +126 -0
- package/index.mjs +5 -0
- package/lib/hardening.mjs +113 -0
- package/lib/memory-tools-plugin.mjs +265 -0
- package/lib/schema-validator.mjs +77 -0
- package/lib/tools/_memory.mjs +230 -0
- package/lib/tools/hm_get.mjs +13 -0
- package/lib/tools/hm_latest.mjs +12 -0
- package/lib/tools/hm_list.mjs +13 -0
- package/lib/tools/hm_put.mjs +14 -0
- package/lib/tools/hm_search.mjs +16 -0
- package/package.json +49 -0
- package/schemas/audit.schema.json +61 -0
- package/schemas/backlog.schema.json +42 -0
- package/schemas/checkpoint.schema.json +44 -0
- package/schemas/constraint.schema.json +41 -0
- package/schemas/decision.schema.json +42 -0
- package/schemas/instinct.schema.json +42 -0
- package/schemas/loop-state.schema.json +33 -0
- package/schemas/mistake.schema.json +43 -0
- package/schemas/verification_receipt.schema.json +67 -0
- package/skill-builder.mjs +113 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: verification-loop
|
|
3
|
+
description: "A comprehensive verification system for Claude Code sessions."
|
|
4
|
+
origin: ECC
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# Verification Loop Skill
|
|
8
|
+
|
|
9
|
+
A comprehensive verification system for Claude Code sessions.
|
|
10
|
+
|
|
11
|
+
## When to Use
|
|
12
|
+
|
|
13
|
+
Invoke this skill:
|
|
14
|
+
- After completing a feature or significant code change
|
|
15
|
+
- Before creating a PR
|
|
16
|
+
- When you want to ensure quality gates pass
|
|
17
|
+
- After refactoring
|
|
18
|
+
|
|
19
|
+
## Verification Phases
|
|
20
|
+
|
|
21
|
+
### Phase 1: Build Verification
|
|
22
|
+
```bash
|
|
23
|
+
# Check if project builds
|
|
24
|
+
npm run build 2>&1 | tail -20
|
|
25
|
+
# OR
|
|
26
|
+
pnpm build 2>&1 | tail -20
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
If build fails, STOP and fix before continuing.
|
|
30
|
+
|
|
31
|
+
### Phase 2: Type Check
|
|
32
|
+
```bash
|
|
33
|
+
# TypeScript projects
|
|
34
|
+
npx tsc --noEmit 2>&1 | head -30
|
|
35
|
+
|
|
36
|
+
# Python projects
|
|
37
|
+
pyright . 2>&1 | head -30
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
Report all type errors. Fix critical ones before continuing.
|
|
41
|
+
|
|
42
|
+
### Phase 3: Lint Check
|
|
43
|
+
```bash
|
|
44
|
+
# JavaScript/TypeScript
|
|
45
|
+
npm run lint 2>&1 | head -30
|
|
46
|
+
|
|
47
|
+
# Python
|
|
48
|
+
ruff check . 2>&1 | head -30
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
### Phase 4: Test Suite
|
|
52
|
+
```bash
|
|
53
|
+
# Run tests with coverage
|
|
54
|
+
npm run test -- --coverage 2>&1 | tail -50
|
|
55
|
+
|
|
56
|
+
# Check coverage threshold
|
|
57
|
+
# Target: 80% minimum
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
Report:
|
|
61
|
+
- Total tests: X
|
|
62
|
+
- Passed: X
|
|
63
|
+
- Failed: X
|
|
64
|
+
- Coverage: X%
|
|
65
|
+
|
|
66
|
+
### Phase 5: Security Scan
|
|
67
|
+
```bash
|
|
68
|
+
# Check for secrets
|
|
69
|
+
grep -rn "sk-" --include="*.ts" --include="*.js" . 2>/dev/null | head -10
|
|
70
|
+
grep -rn "api_key" --include="*.ts" --include="*.js" . 2>/dev/null | head -10
|
|
71
|
+
|
|
72
|
+
# Check for console.log
|
|
73
|
+
grep -rn "console.log" --include="*.ts" --include="*.tsx" src/ 2>/dev/null | head -10
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Phase 6: Diff Review
|
|
77
|
+
```bash
|
|
78
|
+
# Show what changed
|
|
79
|
+
git diff --stat
|
|
80
|
+
git diff HEAD~1 --name-only
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
Review each changed file for:
|
|
84
|
+
- Unintended changes
|
|
85
|
+
- Missing error handling
|
|
86
|
+
- Potential edge cases
|
|
87
|
+
|
|
88
|
+
## Output Format
|
|
89
|
+
|
|
90
|
+
After running all phases, produce a verification report:
|
|
91
|
+
|
|
92
|
+
```
|
|
93
|
+
VERIFICATION REPORT
|
|
94
|
+
==================
|
|
95
|
+
|
|
96
|
+
Build: [PASS/FAIL]
|
|
97
|
+
Types: [PASS/FAIL] (X errors)
|
|
98
|
+
Lint: [PASS/FAIL] (X warnings)
|
|
99
|
+
Tests: [PASS/FAIL] (X/Y passed, Z% coverage)
|
|
100
|
+
Security: [PASS/FAIL] (X issues)
|
|
101
|
+
Diff: [X files changed]
|
|
102
|
+
|
|
103
|
+
Overall: [READY/NOT READY] for PR
|
|
104
|
+
|
|
105
|
+
Issues to Fix:
|
|
106
|
+
1. ...
|
|
107
|
+
2. ...
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
## Continuous Mode
|
|
111
|
+
|
|
112
|
+
For long sessions, run verification every 15 minutes or after major changes:
|
|
113
|
+
|
|
114
|
+
```markdown
|
|
115
|
+
Set a mental checkpoint:
|
|
116
|
+
- After completing each function
|
|
117
|
+
- After finishing a component
|
|
118
|
+
- Before moving to next task
|
|
119
|
+
|
|
120
|
+
Run: /verify
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
## Integration with Hooks
|
|
124
|
+
|
|
125
|
+
This skill complements PostToolUse hooks but provides deeper verification.
|
|
126
|
+
Hooks catch issues immediately; this skill provides comprehensive review.
|
package/index.mjs
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { AutorecallPlugin } from "./autorecall.mjs"
|
|
2
|
+
export { CuratorPlugin } from "./curator.mjs"
|
|
3
|
+
export { SkillBuilderPlugin } from "./skill-builder.mjs"
|
|
4
|
+
export { BootstrapPlugin } from "./bootstrap.mjs"
|
|
5
|
+
export { MemoryToolsPlugin } from "./lib/memory-tools-plugin.mjs"
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import crypto from "node:crypto"
|
|
2
|
+
import fs from "node:fs"
|
|
3
|
+
import os from "node:os"
|
|
4
|
+
import path from "node:path"
|
|
5
|
+
|
|
6
|
+
const SECRET_KEY_PATTERN = /(token|secret|password|passwd|passphrase|api[-_ ]?key|access[-_ ]?key|refresh[-_ ]?token|authorization|cookie|bearer)/i
|
|
7
|
+
const TEXT_REDACTIONS = [
|
|
8
|
+
[/\bsk-[A-Za-z0-9]{16,}\b/g, "[REDACTED]"],
|
|
9
|
+
[/\bgh[pousr]_[A-Za-z0-9_]{20,}\b/g, "[REDACTED]"],
|
|
10
|
+
[/\bxox[baprs]-[A-Za-z0-9-]+\b/g, "[REDACTED]"],
|
|
11
|
+
[/\bBearer\s+[A-Za-z0-9._~+/=-]{8,}\b/gi, "Bearer [REDACTED]"],
|
|
12
|
+
[/\b[A-Za-z0-9_-]{20,}\.[A-Za-z0-9_-]{20,}\.[A-Za-z0-9_-]{20,}\b/g, "[REDACTED]"],
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
function isPlainObject(value) {
|
|
16
|
+
return !!value && typeof value === "object" && !Array.isArray(value)
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function truncateText(text, limit = 12000) {
|
|
20
|
+
const value = String(text ?? "")
|
|
21
|
+
if (limit <= 0 || value.length <= limit) return value
|
|
22
|
+
const suffix = "...[truncated]"
|
|
23
|
+
const sliceLength = Math.max(0, limit - suffix.length)
|
|
24
|
+
return value.slice(0, sliceLength) + suffix
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function redactSensitiveText(text) {
|
|
28
|
+
let output = String(text ?? "")
|
|
29
|
+
for (const [pattern, replacement] of TEXT_REDACTIONS) {
|
|
30
|
+
output = output.replace(pattern, replacement)
|
|
31
|
+
}
|
|
32
|
+
return output
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function sanitizeValue(value, key = "", options = {}) {
|
|
36
|
+
const maxStringLength = Number.isFinite(options.maxStringLength) ? options.maxStringLength : 12000
|
|
37
|
+
if (value === null || value === undefined) return value
|
|
38
|
+
if (typeof value === "string") return truncateText(redactSensitiveText(value), maxStringLength)
|
|
39
|
+
if (typeof value === "number" || typeof value === "boolean") return value
|
|
40
|
+
if (Array.isArray(value)) return value.map(item => sanitizeValue(item, key, options))
|
|
41
|
+
if (isPlainObject(value)) {
|
|
42
|
+
const redacted = {}
|
|
43
|
+
for (const [childKey, childValue] of Object.entries(value)) {
|
|
44
|
+
redacted[childKey] = SECRET_KEY_PATTERN.test(childKey)
|
|
45
|
+
? "[REDACTED]"
|
|
46
|
+
: sanitizeValue(childValue, childKey, options)
|
|
47
|
+
}
|
|
48
|
+
return redacted
|
|
49
|
+
}
|
|
50
|
+
return truncateText(redactSensitiveText(String(value)), maxStringLength)
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function sanitizeRecord(record, options = {}) {
|
|
54
|
+
return sanitizeValue(record, "", options)
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function fingerprintEnvironment(input = {}) {
|
|
58
|
+
const fingerprint = {
|
|
59
|
+
cwd: path.resolve(input.cwd || process.cwd()),
|
|
60
|
+
harness_root: input.harnessRoot ? path.resolve(input.harnessRoot) : null,
|
|
61
|
+
project_root: input.projectRoot ? path.resolve(input.projectRoot) : null,
|
|
62
|
+
project: input.project || null,
|
|
63
|
+
session_id: input.sessionId || null,
|
|
64
|
+
os: process.platform,
|
|
65
|
+
release: os.release(),
|
|
66
|
+
arch: process.arch,
|
|
67
|
+
shell: process.env.ComSpec || process.env.COMSPEC || "cmd.exe",
|
|
68
|
+
provider: process.env.OPENCODE_PROVIDER || "lmstudio",
|
|
69
|
+
model: process.env.OPENCODE_MODEL || null,
|
|
70
|
+
}
|
|
71
|
+
const sha256 = crypto.createHash("sha256").update(JSON.stringify(fingerprint)).digest("hex")
|
|
72
|
+
return { ...fingerprint, sha256 }
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function fingerprintFile(filePath) {
|
|
76
|
+
try {
|
|
77
|
+
const stat = fs.statSync(filePath)
|
|
78
|
+
const content = fs.readFileSync(filePath)
|
|
79
|
+
return {
|
|
80
|
+
path: path.normalize(filePath),
|
|
81
|
+
mtime: stat.mtime.toISOString(),
|
|
82
|
+
size: stat.size,
|
|
83
|
+
sha256: crypto.createHash("sha256").update(content).digest("hex"),
|
|
84
|
+
}
|
|
85
|
+
} catch {
|
|
86
|
+
return null
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function atomicWriteJson(filePath, data) {
|
|
91
|
+
const dir = path.dirname(filePath)
|
|
92
|
+
fs.mkdirSync(dir, { recursive: true })
|
|
93
|
+
const tmpPath = path.join(dir, `.${path.basename(filePath)}.${process.pid}.${Date.now()}.tmp`)
|
|
94
|
+
const payload = `${JSON.stringify(data, null, 2)}\n`
|
|
95
|
+
fs.writeFileSync(tmpPath, payload, "utf8")
|
|
96
|
+
try {
|
|
97
|
+
fs.renameSync(tmpPath, filePath)
|
|
98
|
+
} catch (err) {
|
|
99
|
+
try { if (fs.existsSync(filePath)) fs.rmSync(filePath, { force: true }) } catch {}
|
|
100
|
+
try {
|
|
101
|
+
fs.renameSync(tmpPath, filePath)
|
|
102
|
+
} catch (retryErr) {
|
|
103
|
+
try { fs.rmSync(tmpPath, { force: true }) } catch {}
|
|
104
|
+
throw retryErr
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function isTruthy(value) {
|
|
110
|
+
return /^(1|true|yes|on)$/i.test(String(value || ""))
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
export { atomicWriteJson, fingerprintEnvironment, fingerprintFile, isTruthy, redactSensitiveText, sanitizeRecord, truncateText }
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
import { tool } from "@opencode-ai/plugin"
|
|
2
|
+
import { fileURLToPath } from "url"
|
|
3
|
+
import path from "path"
|
|
4
|
+
import fs from "fs"
|
|
5
|
+
import os from "os"
|
|
6
|
+
|
|
7
|
+
import { atomicWriteJson, fingerprintEnvironment, sanitizeRecord, truncateText } from "./hardening.mjs"
|
|
8
|
+
import { findUnsupportedSchemaKeywords, validateSchema } from "./schema-validator.mjs"
|
|
9
|
+
|
|
10
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
|
11
|
+
const PACKAGE_SCHEMAS = path.resolve(__dirname, "..", "schemas")
|
|
12
|
+
const ROOT = path.join(os.homedir(), ".config", "opencode", "openhermes")
|
|
13
|
+
const MEMORY_DIR = path.join(ROOT, "memory")
|
|
14
|
+
|
|
15
|
+
const CLASSES = ["audit", "checkpoint", "mistake", "instinct", "decision", "constraint", "backlog", "verification_receipt"]
|
|
16
|
+
const PLURALS = { audit: "audits", checkpoint: "checkpoints", mistake: "mistakes", instinct: "instincts", decision: "decisions", constraint: "constraints", backlog: "backlog", verification_receipt: "verification_receipts" }
|
|
17
|
+
|
|
18
|
+
function isPlainObject(v) { return !!v && typeof v === "object" && !Array.isArray(v) }
|
|
19
|
+
|
|
20
|
+
function classDir(cls) { return path.join(MEMORY_DIR, PLURALS[cls]) }
|
|
21
|
+
|
|
22
|
+
function stableStringify(v, space = 0) {
|
|
23
|
+
function sort(o) {
|
|
24
|
+
if (Array.isArray(o)) return o.map(sort)
|
|
25
|
+
if (!isPlainObject(o)) return o
|
|
26
|
+
const r = {}
|
|
27
|
+
for (const k of Object.keys(o).sort()) r[k] = sort(o[k])
|
|
28
|
+
return r
|
|
29
|
+
}
|
|
30
|
+
return JSON.stringify(sort(v), null, space)
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function buildEntry(cls, r) {
|
|
34
|
+
const e = { id: r.id, summary: r.summary, status: r.status, updated_at: r.updated_at ?? r.created_at, path: path.join("openhermes", "memory", PLURALS[cls], `${r.id}.json`), scope: r.scope ?? null, project: r.project ?? null }
|
|
35
|
+
if (cls === "audit") { e.target = r.target; e.overall_score = r.overall_score }
|
|
36
|
+
if (cls === "backlog") { e.priority = r.priority; e.trigger = r.trigger }
|
|
37
|
+
return e
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function hasExpired(r) {
|
|
41
|
+
if (r?.status === "expired" || r?.status === "decayed") return true
|
|
42
|
+
if (r?.decay_at && Date.parse(r.decay_at) < Date.now()) return true
|
|
43
|
+
if (r?.expires_at && Date.parse(r.expires_at) < Date.now()) return true
|
|
44
|
+
return false
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function readJSON(fp, fallback) {
|
|
48
|
+
try { return JSON.parse(fs.readFileSync(fp, "utf8")) } catch { return fallback }
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function readJSONL(fp) {
|
|
52
|
+
try {
|
|
53
|
+
return fs.readFileSync(fp, "utf8").split(/\r?\n/).map(l => l.trim()).filter(Boolean).map(l => JSON.parse(l))
|
|
54
|
+
} catch { return [] }
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function sortRecent(entries) {
|
|
58
|
+
function ts(e) { return e?.updated_at ?? e?.created_at ?? "" }
|
|
59
|
+
return [...entries].sort((a, b) => {
|
|
60
|
+
const at = Date.parse(ts(a)), bt = Date.parse(ts(b))
|
|
61
|
+
if (!Number.isNaN(at) && !Number.isNaN(bt) && at !== bt) return bt - at
|
|
62
|
+
return String(ts(b)).localeCompare(String(ts(a)))
|
|
63
|
+
})
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function filterActive(entries) { return entries.filter(e => !hasExpired(e)) }
|
|
67
|
+
|
|
68
|
+
function writeObject(cls, record) {
|
|
69
|
+
const dir = classDir(cls)
|
|
70
|
+
fs.mkdirSync(dir, { recursive: true })
|
|
71
|
+
const fp = path.join(dir, `${record.id}.json`)
|
|
72
|
+
atomicWriteJson(fp, record)
|
|
73
|
+
const indexPath = path.join(dir, "index.json")
|
|
74
|
+
let index = readJSON(indexPath, [])
|
|
75
|
+
if (!Array.isArray(index)) index = []
|
|
76
|
+
const idx = index.findIndex(e => e?.id === record.id)
|
|
77
|
+
const entry = buildEntry(cls, record)
|
|
78
|
+
if (idx >= 0) index[idx] = entry; else index.push(entry)
|
|
79
|
+
atomicWriteJson(indexPath, index)
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function upsertMistake(record) {
|
|
83
|
+
const dir = classDir("mistake")
|
|
84
|
+
fs.mkdirSync(dir, { recursive: true })
|
|
85
|
+
const fp = path.join(dir, "mistakes.jsonl")
|
|
86
|
+
let entries = readJSONL(fp)
|
|
87
|
+
const idx = entries.findIndex(e => e?.id === record.id)
|
|
88
|
+
if (idx >= 0) entries[idx] = record; else entries.push(record)
|
|
89
|
+
const text = entries.map(e => stableStringify(e)).join("\n")
|
|
90
|
+
fs.writeFileSync(fp, text ? `${text}\n` : "", "utf8")
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function queryList(cls, limit = 10) {
|
|
94
|
+
if (cls === "mistake") {
|
|
95
|
+
return sortRecent(filterActive(readJSONL(path.join(classDir(cls), "mistakes.jsonl")))).slice(0, limit)
|
|
96
|
+
}
|
|
97
|
+
const dir = classDir(cls)
|
|
98
|
+
let files = []
|
|
99
|
+
try { files = fs.readdirSync(dir).filter(f => f.endsWith(".json") && f !== "index.json").map(f => path.join(dir, f)) } catch { return [] }
|
|
100
|
+
const entries = files.map(f => readJSON(f, null)).filter(Boolean).map(r => buildEntry(cls, r))
|
|
101
|
+
return sortRecent(filterActive(entries)).slice(0, limit)
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function queryGet(cls, id) {
|
|
105
|
+
if (cls === "mistake") return readJSONL(path.join(classDir(cls), "mistakes.jsonl")).find(e => e?.id === id) ?? null
|
|
106
|
+
return readJSON(path.join(classDir(cls), `${id}.json`), null)
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function scoreRelevance(r, query, project) {
|
|
110
|
+
const q = query.toLowerCase()
|
|
111
|
+
let score = 0
|
|
112
|
+
const fields = [r.summary, r.id, r.description, r.mission, r.current_state, r.failure, r.root_cause, r.fix, r.prevention, r.command, r.project, r.scope, ...(Array.isArray(r.tags) ? r.tags : []), ...(Array.isArray(r.next_actions) ? r.next_actions : []), ...(Array.isArray(r.refs) ? r.refs : [])].filter(Boolean)
|
|
113
|
+
for (const f of fields) {
|
|
114
|
+
const str = String(f).toLowerCase()
|
|
115
|
+
let idx = 0; let count = 0
|
|
116
|
+
while ((idx = str.indexOf(q, idx)) !== -1) { count++; idx += q.length }
|
|
117
|
+
score += count * 10
|
|
118
|
+
if (str.startsWith(q)) score += 5
|
|
119
|
+
if (str.includes(q)) score += 2
|
|
120
|
+
}
|
|
121
|
+
if (r.project && r.project.toLowerCase() === (project || "").toLowerCase()) score += 20
|
|
122
|
+
if (r.project && project && r.project.toLowerCase().includes(project.toLowerCase())) score += 10
|
|
123
|
+
const age = Date.now() - Date.parse(r.updated_at || r.created_at || 0)
|
|
124
|
+
if (!Number.isNaN(age)) score += Math.max(0, 10 - age / 604800000)
|
|
125
|
+
if (r.status === "active") score += 3
|
|
126
|
+
if (r.status === "closed") score -= 2
|
|
127
|
+
return score
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
function enforceAuditEvidence(record) {
|
|
131
|
+
const prov = isPlainObject(record.provenance) ? record.provenance : {}
|
|
132
|
+
return ["db_refs", "file_refs", "log_refs"].some(k => Array.isArray(prov[k]) && prov[k].some(i => typeof i === "string" && i.trim()))
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
function handlePut(cls, id, dataStr) {
|
|
136
|
+
let parsed
|
|
137
|
+
try { parsed = JSON.parse(dataStr) } catch (e) { return `data must be valid JSON: ${e.message}` }
|
|
138
|
+
if (!isPlainObject(parsed)) return "data must be a JSON object"
|
|
139
|
+
if (!id?.trim()) return "non-blank id is required"
|
|
140
|
+
|
|
141
|
+
const now = new Date().toISOString()
|
|
142
|
+
const record = { ...parsed, id, class: cls, source: parsed.source ?? "agent", status: parsed.status ?? "active", created_at: parsed.created_at ?? now, updated_at: now }
|
|
143
|
+
|
|
144
|
+
const schema = readJSON(path.join(PACKAGE_SCHEMAS, `${cls}.schema.json`), null)
|
|
145
|
+
if (schema) {
|
|
146
|
+
const unsupported = findUnsupportedSchemaKeywords(schema)
|
|
147
|
+
if (unsupported.length) return `Unsupported schema keywords: ${unsupported.join(", ")}`
|
|
148
|
+
const errs = validateSchema(schema, record, "$")
|
|
149
|
+
if (cls === "audit" && !enforceAuditEvidence(record)) errs.push("$.provenance must include at least one non-empty evidence ref")
|
|
150
|
+
if (errs.length) return `Validation errors: ${errs.join("; ")}`
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (cls === "mistake") upsertMistake(record)
|
|
154
|
+
else writeObject(cls, record)
|
|
155
|
+
|
|
156
|
+
return stableStringify({ ok: true, id })
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
function handleGet(cls, id) {
|
|
160
|
+
if (!id?.trim()) return "non-blank id is required"
|
|
161
|
+
const record = queryGet(cls, id.trim())
|
|
162
|
+
if (!record) return stableStringify({ ok: false, found: false })
|
|
163
|
+
return stableStringify({ ok: true, record })
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function handleList(cls, limit = 10) {
|
|
167
|
+
const entries = queryList(cls, Math.min(limit, 100))
|
|
168
|
+
return stableStringify({ ok: true, count: entries.length, entries })
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
function handleLatest(cls) {
|
|
172
|
+
const list = queryList(cls, 100)
|
|
173
|
+
const active = filterActive(list)
|
|
174
|
+
if (!active[0]?.id) return stableStringify({ ok: false, found: false })
|
|
175
|
+
const record = queryGet(cls, active[0].id)
|
|
176
|
+
if (!record) return stableStringify({ ok: false, found: false })
|
|
177
|
+
return stableStringify({ ok: true, record })
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function handleSearch(query, scope, classes, project, limit) {
|
|
181
|
+
const q = (query || "").trim()
|
|
182
|
+
if (!q) return "non-blank query is required"
|
|
183
|
+
const clsList = Array.isArray(classes) && classes.length ? classes : CLASSES
|
|
184
|
+
const lim = Math.min(limit ?? 10, 50)
|
|
185
|
+
let records = []
|
|
186
|
+
for (const cls of clsList) {
|
|
187
|
+
if (cls === "mistake") {
|
|
188
|
+
for (const m of readJSONL(path.join(classDir(cls), "mistakes.jsonl"))) {
|
|
189
|
+
if (!hasExpired(m)) records.push(m)
|
|
190
|
+
}
|
|
191
|
+
} else {
|
|
192
|
+
const dir = classDir(cls)
|
|
193
|
+
let files = []
|
|
194
|
+
try { files = fs.readdirSync(dir).filter(f => f.endsWith(".json") && f !== "index.json") } catch { continue }
|
|
195
|
+
for (const f of files) {
|
|
196
|
+
const r = readJSON(path.join(dir, f), null)
|
|
197
|
+
if (r && !hasExpired(r)) records.push(r)
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
if (scope === "global") records = records.filter(r => r.scope === "global" || !r.scope)
|
|
202
|
+
else if (scope === "local") records = records.filter(r => r.scope === "project" || r.scope === "session")
|
|
203
|
+
const scored = records.map(r => ({ ...buildEntry(r.class || "verification_receipt", r), score: scoreRelevance(r, q, project || "") }))
|
|
204
|
+
.filter(e => e.score > 0)
|
|
205
|
+
.sort((a, b) => b.score - a.score)
|
|
206
|
+
.slice(0, lim)
|
|
207
|
+
return stableStringify({ ok: true, count: scored.length, query: q, results: scored })
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
export const MemoryToolsPlugin = async () => {
|
|
211
|
+
fs.mkdirSync(MEMORY_DIR, { recursive: true })
|
|
212
|
+
fs.mkdirSync(path.join(ROOT, "runtime"), { recursive: true })
|
|
213
|
+
|
|
214
|
+
return {
|
|
215
|
+
tool: {
|
|
216
|
+
hm_put: tool({
|
|
217
|
+
description: "Create or update an OpenHermes memory record",
|
|
218
|
+
args: {
|
|
219
|
+
class: tool.schema.enum(CLASSES),
|
|
220
|
+
id: tool.schema.string(),
|
|
221
|
+
data: tool.schema.string(),
|
|
222
|
+
},
|
|
223
|
+
async execute(args) { return handlePut(args.class, args.id, args.data) },
|
|
224
|
+
}),
|
|
225
|
+
|
|
226
|
+
hm_get: tool({
|
|
227
|
+
description: "Get a specific OpenHermes memory record by ID",
|
|
228
|
+
args: {
|
|
229
|
+
class: tool.schema.enum(CLASSES).describe("Memory class"),
|
|
230
|
+
id: tool.schema.string().describe("Record ID"),
|
|
231
|
+
},
|
|
232
|
+
async execute(args) { return handleGet(args.class, args.id) },
|
|
233
|
+
}),
|
|
234
|
+
|
|
235
|
+
hm_list: tool({
|
|
236
|
+
description: "List OpenHermes memory records by class, sorted by recency",
|
|
237
|
+
args: {
|
|
238
|
+
class: tool.schema.enum(CLASSES).describe("Memory class"),
|
|
239
|
+
limit: tool.schema.number().optional().default(10).describe("Max results (max 100)"),
|
|
240
|
+
},
|
|
241
|
+
async execute(args) { return handleList(args.class, args.limit) },
|
|
242
|
+
}),
|
|
243
|
+
|
|
244
|
+
hm_latest: tool({
|
|
245
|
+
description: "Get the latest active OpenHermes memory record by class",
|
|
246
|
+
args: {
|
|
247
|
+
class: tool.schema.enum(CLASSES).describe("Memory class"),
|
|
248
|
+
},
|
|
249
|
+
async execute(args) { return handleLatest(args.class) },
|
|
250
|
+
}),
|
|
251
|
+
|
|
252
|
+
hm_search: tool({
|
|
253
|
+
description: "Search OpenHermes memory records with keyword matching and relevance ranking",
|
|
254
|
+
args: {
|
|
255
|
+
query: tool.schema.string().describe("Search query string"),
|
|
256
|
+
scope: tool.schema.enum(["global", "local", "auto"]).optional().default("auto").describe("Search scope"),
|
|
257
|
+
classes: tool.schema.array(tool.schema.enum(CLASSES)).optional().describe("Memory classes to search (default: all)"),
|
|
258
|
+
project: tool.schema.string().optional().describe("Project filter"),
|
|
259
|
+
limit: tool.schema.number().optional().default(10).describe("Max results (max 50)"),
|
|
260
|
+
},
|
|
261
|
+
async execute(args) { return handleSearch(args.query, args.scope, args.classes, args.project, args.limit) },
|
|
262
|
+
}),
|
|
263
|
+
},
|
|
264
|
+
}
|
|
265
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
const SUPPORTED_SCHEMA_KEYS = new Set(["type", "const", "enum", "format", "minimum", "maximum", "required", "properties", "items"])
|
|
2
|
+
const SCHEMA_METADATA_KEYS = new Set(["$schema", "title", "description", "default"])
|
|
3
|
+
|
|
4
|
+
function isPlainObject(value) {
|
|
5
|
+
return !!value && typeof value === "object" && !Array.isArray(value)
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
function matchesType(type, value) {
|
|
9
|
+
if (type === "null") return value === null
|
|
10
|
+
if (type === "array") return Array.isArray(value)
|
|
11
|
+
if (type === "object") return isPlainObject(value)
|
|
12
|
+
if (type === "integer") return Number.isInteger(value)
|
|
13
|
+
if (type === "number") return typeof value === "number" && Number.isFinite(value)
|
|
14
|
+
return typeof value === type
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function validateNode(schema, value, at, errors) {
|
|
18
|
+
if (!isPlainObject(schema) || value === undefined) return
|
|
19
|
+
if (schema.const !== undefined && JSON.stringify(value) !== JSON.stringify(schema.const)) errors.push(`${at} must equal ${JSON.stringify(schema.const)}`)
|
|
20
|
+
if (Array.isArray(schema.enum) && !schema.enum.some(option => JSON.stringify(option) === JSON.stringify(value))) errors.push(`${at} must be one of ${schema.enum.map(option => JSON.stringify(option)).join(", ")}`)
|
|
21
|
+
|
|
22
|
+
const types = Array.isArray(schema.type) ? schema.type : (schema.type ? [schema.type] : [])
|
|
23
|
+
if (types.length && !types.some(type => matchesType(type, value))) {
|
|
24
|
+
errors.push(`${at} must be ${types.join(" or ")}`)
|
|
25
|
+
return
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (typeof value === "string" && schema.format === "date-time" && Number.isNaN(Date.parse(value))) errors.push(`${at} must be a valid date-time string`)
|
|
29
|
+
if (typeof value === "number" || Number.isInteger(value)) {
|
|
30
|
+
if (schema.minimum !== undefined && value < schema.minimum) errors.push(`${at} must be >= ${schema.minimum}`)
|
|
31
|
+
if (schema.maximum !== undefined && value > schema.maximum) errors.push(`${at} must be <= ${schema.maximum}`)
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
if (Array.isArray(value)) {
|
|
35
|
+
value.forEach((item, index) => validateNode(schema.items, item, `${at}[${index}]`, errors))
|
|
36
|
+
return
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
if (isPlainObject(value)) {
|
|
40
|
+
for (const key of Array.isArray(schema.required) ? schema.required : []) {
|
|
41
|
+
if (value[key] === undefined) errors.push(`${at}.${key} is required`)
|
|
42
|
+
}
|
|
43
|
+
for (const [key, child] of Object.entries(isPlainObject(schema.properties) ? schema.properties : {})) {
|
|
44
|
+
if (value[key] !== undefined) validateNode(child, value[key], `${at}.${key}`, errors)
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function validateSchema(schema, value, at = "$") {
|
|
50
|
+
const errors = []
|
|
51
|
+
validateNode(schema, value, at, errors)
|
|
52
|
+
return errors
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function visitSchemaNode(schema, at, unsupported) {
|
|
56
|
+
if (!isPlainObject(schema)) return
|
|
57
|
+
for (const [key, value] of Object.entries(schema)) {
|
|
58
|
+
if (key === "properties") {
|
|
59
|
+
if (!isPlainObject(value)) unsupported.push(`${at}.properties must be an object`)
|
|
60
|
+
else for (const [propertyName, propertySchema] of Object.entries(value)) visitSchemaNode(propertySchema, `${at}.properties.${propertyName}`, unsupported)
|
|
61
|
+
continue
|
|
62
|
+
}
|
|
63
|
+
if (key === "items") {
|
|
64
|
+
visitSchemaNode(value, `${at}.items`, unsupported)
|
|
65
|
+
continue
|
|
66
|
+
}
|
|
67
|
+
if (!SUPPORTED_SCHEMA_KEYS.has(key) && !SCHEMA_METADATA_KEYS.has(key)) unsupported.push(`${at} uses unsupported schema keyword "${key}"`)
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function findUnsupportedSchemaKeywords(schema) {
|
|
72
|
+
const unsupported = []
|
|
73
|
+
visitSchemaNode(schema, "$", unsupported)
|
|
74
|
+
return unsupported
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export { findUnsupportedSchemaKeywords, isPlainObject, validateSchema }
|