@agntk/agent-harness 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/NOTICE +41 -0
- package/README.md +445 -0
- package/defaults/agents/summarizer.md +49 -0
- package/defaults/instincts/lead-with-answer.md +24 -0
- package/defaults/instincts/qualify-before-recommending.md +40 -0
- package/defaults/instincts/read-before-edit.md +23 -0
- package/defaults/instincts/search-before-create.md +23 -0
- package/defaults/playbooks/ship-feature.md +31 -0
- package/defaults/rules/ask-before-assuming.md +35 -0
- package/defaults/rules/operations.md +35 -0
- package/defaults/rules/respect-the-user.md +39 -0
- package/defaults/skills/business-analyst.md +181 -0
- package/defaults/skills/content-marketer.md +184 -0
- package/defaults/skills/research.md +34 -0
- package/defaults/tools/example-web-search.md +60 -0
- package/defaults/workflows/daily-reflection.md +54 -0
- package/dist/agent-framework-K4GUIICH.js +344 -0
- package/dist/agent-framework-K4GUIICH.js.map +1 -0
- package/dist/analytics-RPT73WNM.js +12 -0
- package/dist/analytics-RPT73WNM.js.map +1 -0
- package/dist/auto-processor-OLE45UI3.js +13 -0
- package/dist/auto-processor-OLE45UI3.js.map +1 -0
- package/dist/chunk-274RV3YO.js +162 -0
- package/dist/chunk-274RV3YO.js.map +1 -0
- package/dist/chunk-4CWAGBNS.js +168 -0
- package/dist/chunk-4CWAGBNS.js.map +1 -0
- package/dist/chunk-4FDUOGSZ.js +69 -0
- package/dist/chunk-4FDUOGSZ.js.map +1 -0
- package/dist/chunk-5H34JPMB.js +199 -0
- package/dist/chunk-5H34JPMB.js.map +1 -0
- package/dist/chunk-6EMOEYGU.js +102 -0
- package/dist/chunk-6EMOEYGU.js.map +1 -0
- package/dist/chunk-A7BJPQQ6.js +236 -0
- package/dist/chunk-A7BJPQQ6.js.map +1 -0
- package/dist/chunk-AGAAFJEO.js +76 -0
- package/dist/chunk-AGAAFJEO.js.map +1 -0
- package/dist/chunk-BSKDOFRT.js +65 -0
- package/dist/chunk-BSKDOFRT.js.map +1 -0
- package/dist/chunk-CHJ5GNZC.js +100 -0
- package/dist/chunk-CHJ5GNZC.js.map +1 -0
- package/dist/chunk-CSL3ERUI.js +307 -0
- package/dist/chunk-CSL3ERUI.js.map +1 -0
- package/dist/chunk-DA7IKHC4.js +229 -0
- package/dist/chunk-DA7IKHC4.js.map +1 -0
- package/dist/chunk-DGUM43GV.js +11 -0
- package/dist/chunk-DGUM43GV.js.map +1 -0
- package/dist/chunk-DTTXPHFW.js +211 -0
- package/dist/chunk-DTTXPHFW.js.map +1 -0
- package/dist/chunk-FD55B3IO.js +204 -0
- package/dist/chunk-FD55B3IO.js.map +1 -0
- package/dist/chunk-FLZU44SV.js +230 -0
- package/dist/chunk-FLZU44SV.js.map +1 -0
- package/dist/chunk-GJNNR2RA.js +200 -0
- package/dist/chunk-GJNNR2RA.js.map +1 -0
- package/dist/chunk-GNUSHD2Y.js +111 -0
- package/dist/chunk-GNUSHD2Y.js.map +1 -0
- package/dist/chunk-GUJTBGVS.js +2212 -0
- package/dist/chunk-GUJTBGVS.js.map +1 -0
- package/dist/chunk-IZ6UZ3ZL.js +207 -0
- package/dist/chunk-IZ6UZ3ZL.js.map +1 -0
- package/dist/chunk-JKMGYWXB.js +197 -0
- package/dist/chunk-JKMGYWXB.js.map +1 -0
- package/dist/chunk-KFX54TQM.js +165 -0
- package/dist/chunk-KFX54TQM.js.map +1 -0
- package/dist/chunk-M7NXUK55.js +199 -0
- package/dist/chunk-M7NXUK55.js.map +1 -0
- package/dist/chunk-MPZ3BPUI.js +374 -0
- package/dist/chunk-MPZ3BPUI.js.map +1 -0
- package/dist/chunk-OC6YSTDX.js +119 -0
- package/dist/chunk-OC6YSTDX.js.map +1 -0
- package/dist/chunk-RC6MEZB6.js +469 -0
- package/dist/chunk-RC6MEZB6.js.map +1 -0
- package/dist/chunk-RY3ZFII7.js +3440 -0
- package/dist/chunk-RY3ZFII7.js.map +1 -0
- package/dist/chunk-TAT6JU3X.js +167 -0
- package/dist/chunk-TAT6JU3X.js.map +1 -0
- package/dist/chunk-UDZIS2AQ.js +79 -0
- package/dist/chunk-UDZIS2AQ.js.map +1 -0
- package/dist/chunk-UPLBF4RZ.js +115 -0
- package/dist/chunk-UPLBF4RZ.js.map +1 -0
- package/dist/chunk-UWQTZMNI.js +154 -0
- package/dist/chunk-UWQTZMNI.js.map +1 -0
- package/dist/chunk-W4T7PGI2.js +346 -0
- package/dist/chunk-W4T7PGI2.js.map +1 -0
- package/dist/chunk-XTBKL5BI.js +111 -0
- package/dist/chunk-XTBKL5BI.js.map +1 -0
- package/dist/chunk-YIJY5DBV.js +399 -0
- package/dist/chunk-YIJY5DBV.js.map +1 -0
- package/dist/chunk-YUFNYN2H.js +242 -0
- package/dist/chunk-YUFNYN2H.js.map +1 -0
- package/dist/chunk-Z2PUCXTZ.js +94 -0
- package/dist/chunk-Z2PUCXTZ.js.map +1 -0
- package/dist/chunk-ZZJOFKAT.js +13 -0
- package/dist/chunk-ZZJOFKAT.js.map +1 -0
- package/dist/cli/index.js +3661 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config-WVMRUOCA.js +13 -0
- package/dist/config-WVMRUOCA.js.map +1 -0
- package/dist/context-loader-3ORBPMHJ.js +13 -0
- package/dist/context-loader-3ORBPMHJ.js.map +1 -0
- package/dist/conversation-QDEIDQPH.js +22 -0
- package/dist/conversation-QDEIDQPH.js.map +1 -0
- package/dist/cost-tracker-RS3W7SVY.js +24 -0
- package/dist/cost-tracker-RS3W7SVY.js.map +1 -0
- package/dist/delegate-VJCJLYEK.js +29 -0
- package/dist/delegate-VJCJLYEK.js.map +1 -0
- package/dist/emotional-state-VQVRA6ED.js +206 -0
- package/dist/emotional-state-VQVRA6ED.js.map +1 -0
- package/dist/env-discovery-2BLVMAIM.js +251 -0
- package/dist/env-discovery-2BLVMAIM.js.map +1 -0
- package/dist/export-6GCYHEHQ.js +165 -0
- package/dist/export-6GCYHEHQ.js.map +1 -0
- package/dist/graph-YUIPOSOO.js +14 -0
- package/dist/graph-YUIPOSOO.js.map +1 -0
- package/dist/harness-LCHA3DWP.js +10 -0
- package/dist/harness-LCHA3DWP.js.map +1 -0
- package/dist/harness-WE4SLCML.js +26 -0
- package/dist/harness-WE4SLCML.js.map +1 -0
- package/dist/health-NZ6WNIMV.js +23 -0
- package/dist/health-NZ6WNIMV.js.map +1 -0
- package/dist/index.d.ts +3612 -0
- package/dist/index.js +13501 -0
- package/dist/index.js.map +1 -0
- package/dist/indexer-LONANRRM.js +16 -0
- package/dist/indexer-LONANRRM.js.map +1 -0
- package/dist/instinct-learner-SRM72DHF.js +20 -0
- package/dist/instinct-learner-SRM72DHF.js.map +1 -0
- package/dist/intake-4M3HNU43.js +21 -0
- package/dist/intake-4M3HNU43.js.map +1 -0
- package/dist/intelligence-HJOCA4SJ.js +1081 -0
- package/dist/intelligence-HJOCA4SJ.js.map +1 -0
- package/dist/journal-WANJL3MI.js +24 -0
- package/dist/journal-WANJL3MI.js.map +1 -0
- package/dist/loader-C3TKIKZR.js +23 -0
- package/dist/loader-C3TKIKZR.js.map +1 -0
- package/dist/mcp-WTQJJZAO.js +15 -0
- package/dist/mcp-WTQJJZAO.js.map +1 -0
- package/dist/mcp-discovery-WPAQFL6S.js +377 -0
- package/dist/mcp-discovery-WPAQFL6S.js.map +1 -0
- package/dist/mcp-installer-6O2XXD3V.js +394 -0
- package/dist/mcp-installer-6O2XXD3V.js.map +1 -0
- package/dist/metrics-KXGNFAAB.js +20 -0
- package/dist/metrics-KXGNFAAB.js.map +1 -0
- package/dist/primitive-registry-I6VTIR4W.js +512 -0
- package/dist/primitive-registry-I6VTIR4W.js.map +1 -0
- package/dist/project-discovery-C4UMD7JI.js +246 -0
- package/dist/project-discovery-C4UMD7JI.js.map +1 -0
- package/dist/provider-LQHQX7Z7.js +26 -0
- package/dist/provider-LQHQX7Z7.js.map +1 -0
- package/dist/provider-SXPQZ74H.js +28 -0
- package/dist/provider-SXPQZ74H.js.map +1 -0
- package/dist/rate-limiter-RLRVM325.js +22 -0
- package/dist/rate-limiter-RLRVM325.js.map +1 -0
- package/dist/rule-engine-YGQ3RYZM.js +182 -0
- package/dist/rule-engine-YGQ3RYZM.js.map +1 -0
- package/dist/scaffold-A3VRRCBV.js +347 -0
- package/dist/scaffold-A3VRRCBV.js.map +1 -0
- package/dist/scheduler-XHHIVHRI.js +397 -0
- package/dist/scheduler-XHHIVHRI.js.map +1 -0
- package/dist/search-V3W5JMJG.js +75 -0
- package/dist/search-V3W5JMJG.js.map +1 -0
- package/dist/semantic-search-2DTOO5UX.js +241 -0
- package/dist/semantic-search-2DTOO5UX.js.map +1 -0
- package/dist/serve-DTQ3HENY.js +291 -0
- package/dist/serve-DTQ3HENY.js.map +1 -0
- package/dist/sessions-CZGVXKQE.js +21 -0
- package/dist/sessions-CZGVXKQE.js.map +1 -0
- package/dist/sources-RW5DT56F.js +32 -0
- package/dist/sources-RW5DT56F.js.map +1 -0
- package/dist/starter-packs-76YUVHEU.js +893 -0
- package/dist/starter-packs-76YUVHEU.js.map +1 -0
- package/dist/state-GMXILIHW.js +13 -0
- package/dist/state-GMXILIHW.js.map +1 -0
- package/dist/state-merge-NKO5FRBA.js +174 -0
- package/dist/state-merge-NKO5FRBA.js.map +1 -0
- package/dist/telemetry-UC6PBXC7.js +22 -0
- package/dist/telemetry-UC6PBXC7.js.map +1 -0
- package/dist/tool-executor-MJ7IG7PQ.js +28 -0
- package/dist/tool-executor-MJ7IG7PQ.js.map +1 -0
- package/dist/tools-DZ4KETET.js +20 -0
- package/dist/tools-DZ4KETET.js.map +1 -0
- package/dist/types-EW7AIB3R.js +18 -0
- package/dist/types-EW7AIB3R.js.map +1 -0
- package/dist/types-WGDLSPO6.js +16 -0
- package/dist/types-WGDLSPO6.js.map +1 -0
- package/dist/universal-installer-QGS4SJGX.js +578 -0
- package/dist/universal-installer-QGS4SJGX.js.map +1 -0
- package/dist/validator-7WXMDIHH.js +22 -0
- package/dist/validator-7WXMDIHH.js.map +1 -0
- package/dist/verification-gate-FYXUX6LH.js +246 -0
- package/dist/verification-gate-FYXUX6LH.js.map +1 -0
- package/dist/versioning-Z3XNE2Q2.js +271 -0
- package/dist/versioning-Z3XNE2Q2.js.map +1 -0
- package/dist/watcher-ISJC7YKL.js +109 -0
- package/dist/watcher-ISJC7YKL.js.map +1 -0
- package/dist/web-server-DD7ZOP46.js +28 -0
- package/dist/web-server-DD7ZOP46.js.map +1 -0
- package/package.json +76 -0
- package/sources.yaml +121 -0
- package/templates/assistant/CORE.md +24 -0
- package/templates/assistant/SYSTEM.md +24 -0
- package/templates/assistant/config.yaml +51 -0
- package/templates/base/CORE.md +17 -0
- package/templates/base/SYSTEM.md +24 -0
- package/templates/base/config.yaml +51 -0
- package/templates/claude-opus/config.yaml +51 -0
- package/templates/code-reviewer/CORE.md +25 -0
- package/templates/code-reviewer/SYSTEM.md +30 -0
- package/templates/code-reviewer/config.yaml +51 -0
- package/templates/gpt4/config.yaml +51 -0
- package/templates/local/config.yaml +51 -0
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
listUnjournaled,
|
|
5
|
+
synthesizeJournal
|
|
6
|
+
} from "./chunk-CSL3ERUI.js";
|
|
7
|
+
import {
|
|
8
|
+
learnFromSessions
|
|
9
|
+
} from "./chunk-GJNNR2RA.js";
|
|
10
|
+
import {
|
|
11
|
+
recordRun
|
|
12
|
+
} from "./chunk-6EMOEYGU.js";
|
|
13
|
+
import {
|
|
14
|
+
createHarness
|
|
15
|
+
} from "./chunk-YIJY5DBV.js";
|
|
16
|
+
import "./chunk-AGAAFJEO.js";
|
|
17
|
+
import "./chunk-5H34JPMB.js";
|
|
18
|
+
import {
|
|
19
|
+
delegateTo
|
|
20
|
+
} from "./chunk-YUFNYN2H.js";
|
|
21
|
+
import "./chunk-DA7IKHC4.js";
|
|
22
|
+
import "./chunk-UWQTZMNI.js";
|
|
23
|
+
import "./chunk-UDZIS2AQ.js";
|
|
24
|
+
import {
|
|
25
|
+
archiveOldFiles
|
|
26
|
+
} from "./chunk-DTTXPHFW.js";
|
|
27
|
+
import "./chunk-Z2PUCXTZ.js";
|
|
28
|
+
import {
|
|
29
|
+
recordFailure,
|
|
30
|
+
recordSuccess
|
|
31
|
+
} from "./chunk-TAT6JU3X.js";
|
|
32
|
+
import "./chunk-JKMGYWXB.js";
|
|
33
|
+
import "./chunk-OC6YSTDX.js";
|
|
34
|
+
import "./chunk-XTBKL5BI.js";
|
|
35
|
+
import {
|
|
36
|
+
loadDirectory,
|
|
37
|
+
parseHarnessDocument
|
|
38
|
+
} from "./chunk-UPLBF4RZ.js";
|
|
39
|
+
import {
|
|
40
|
+
log
|
|
41
|
+
} from "./chunk-BSKDOFRT.js";
|
|
42
|
+
import "./chunk-IZ6UZ3ZL.js";
|
|
43
|
+
import {
|
|
44
|
+
loadConfig
|
|
45
|
+
} from "./chunk-CHJ5GNZC.js";
|
|
46
|
+
import "./chunk-4CWAGBNS.js";
|
|
47
|
+
import "./chunk-ZZJOFKAT.js";
|
|
48
|
+
|
|
49
|
+
// src/runtime/scheduler.ts
|
|
50
|
+
import cron from "node-cron";
|
|
51
|
+
import { existsSync } from "fs";
|
|
52
|
+
import { join } from "path";
|
|
53
|
+
function sleep(ms) {
|
|
54
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
55
|
+
}
|
|
56
|
+
function isQuietHours(config, now) {
|
|
57
|
+
const { start, end } = config.runtime.quiet_hours;
|
|
58
|
+
const tz = config.runtime.timezone;
|
|
59
|
+
let hour;
|
|
60
|
+
try {
|
|
61
|
+
const formatter = new Intl.DateTimeFormat("en-US", {
|
|
62
|
+
hour: "numeric",
|
|
63
|
+
hour12: false,
|
|
64
|
+
timeZone: tz
|
|
65
|
+
});
|
|
66
|
+
hour = parseInt(formatter.format(now ?? /* @__PURE__ */ new Date()), 10);
|
|
67
|
+
} catch {
|
|
68
|
+
hour = (now ?? /* @__PURE__ */ new Date()).getHours();
|
|
69
|
+
}
|
|
70
|
+
if (start === end) return false;
|
|
71
|
+
if (start < end) {
|
|
72
|
+
return hour >= start && hour < end;
|
|
73
|
+
}
|
|
74
|
+
return hour >= start || hour < end;
|
|
75
|
+
}
|
|
76
|
+
var Scheduler = class {
|
|
77
|
+
workflows = /* @__PURE__ */ new Map();
|
|
78
|
+
harnessDir;
|
|
79
|
+
apiKey;
|
|
80
|
+
autoArchival;
|
|
81
|
+
archivalCron;
|
|
82
|
+
archivalTask = null;
|
|
83
|
+
autoJournal;
|
|
84
|
+
autoLearn;
|
|
85
|
+
journalTask = null;
|
|
86
|
+
/** Tracks proactive executions: workflowId → timestamps of recent runs */
|
|
87
|
+
proactiveHistory = /* @__PURE__ */ new Map();
|
|
88
|
+
onRun;
|
|
89
|
+
onError;
|
|
90
|
+
onSchedule;
|
|
91
|
+
onSkipQuietHours;
|
|
92
|
+
onArchival;
|
|
93
|
+
onRetry;
|
|
94
|
+
onJournal;
|
|
95
|
+
onLearn;
|
|
96
|
+
running = false;
|
|
97
|
+
constructor(options) {
|
|
98
|
+
this.harnessDir = options.harnessDir;
|
|
99
|
+
this.apiKey = options.apiKey;
|
|
100
|
+
this.autoArchival = options.autoArchival ?? true;
|
|
101
|
+
this.archivalCron = options.archivalCron ?? "0 23 * * *";
|
|
102
|
+
this.autoJournal = options.autoJournal ?? false;
|
|
103
|
+
this.autoLearn = options.autoLearn ?? false;
|
|
104
|
+
this.onRun = options.onRun;
|
|
105
|
+
this.onError = options.onError;
|
|
106
|
+
this.onSchedule = options.onSchedule;
|
|
107
|
+
this.onSkipQuietHours = options.onSkipQuietHours;
|
|
108
|
+
this.onArchival = options.onArchival;
|
|
109
|
+
this.onRetry = options.onRetry;
|
|
110
|
+
this.onJournal = options.onJournal;
|
|
111
|
+
this.onLearn = options.onLearn;
|
|
112
|
+
}
|
|
113
|
+
start() {
|
|
114
|
+
if (this.running) return;
|
|
115
|
+
this.running = true;
|
|
116
|
+
if (this.autoArchival && cron.validate(this.archivalCron)) {
|
|
117
|
+
this.archivalTask = cron.schedule(this.archivalCron, () => {
|
|
118
|
+
this.runArchival();
|
|
119
|
+
});
|
|
120
|
+
log.debug(`Auto-archival scheduled: ${this.archivalCron}`);
|
|
121
|
+
}
|
|
122
|
+
if (this.autoJournal) {
|
|
123
|
+
const journalCron = typeof this.autoJournal === "string" ? this.autoJournal : "0 22 * * *";
|
|
124
|
+
if (cron.validate(journalCron)) {
|
|
125
|
+
this.journalTask = cron.schedule(journalCron, () => {
|
|
126
|
+
void this.runJournalSynthesis();
|
|
127
|
+
});
|
|
128
|
+
log.debug(`Auto-journal scheduled: ${journalCron}${this.autoLearn ? " (with auto-learn)" : ""}`);
|
|
129
|
+
} else {
|
|
130
|
+
log.warn(`Invalid auto_journal cron: ${journalCron}`);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
const workflowDir = join(this.harnessDir, "workflows");
|
|
134
|
+
if (!existsSync(workflowDir)) return;
|
|
135
|
+
const docs = loadDirectory(workflowDir);
|
|
136
|
+
for (const doc of docs) {
|
|
137
|
+
const cronExpr = doc.frontmatter.schedule;
|
|
138
|
+
if (!cronExpr) continue;
|
|
139
|
+
if (!cron.validate(cronExpr)) {
|
|
140
|
+
try {
|
|
141
|
+
this.onError?.(doc.frontmatter.id, new Error(`Invalid cron expression: ${cronExpr}`));
|
|
142
|
+
} catch (e) {
|
|
143
|
+
log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
144
|
+
}
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
const task = cron.schedule(cronExpr, async () => {
|
|
148
|
+
await this.executeWorkflow(doc);
|
|
149
|
+
});
|
|
150
|
+
this.workflows.set(doc.frontmatter.id, { doc, cronExpression: cronExpr, task });
|
|
151
|
+
try {
|
|
152
|
+
this.onSchedule?.(doc.frontmatter.id, cronExpr);
|
|
153
|
+
} catch (e) {
|
|
154
|
+
log.warn(`onSchedule hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
stop() {
|
|
159
|
+
if (!this.running) return;
|
|
160
|
+
this.running = false;
|
|
161
|
+
if (this.archivalTask) {
|
|
162
|
+
this.archivalTask.stop();
|
|
163
|
+
this.archivalTask = null;
|
|
164
|
+
}
|
|
165
|
+
if (this.journalTask) {
|
|
166
|
+
this.journalTask.stop();
|
|
167
|
+
this.journalTask = null;
|
|
168
|
+
}
|
|
169
|
+
for (const [, workflow] of this.workflows) {
|
|
170
|
+
workflow.task?.stop();
|
|
171
|
+
}
|
|
172
|
+
this.workflows.clear();
|
|
173
|
+
this.proactiveHistory.clear();
|
|
174
|
+
}
|
|
175
|
+
async executeWorkflow(doc) {
|
|
176
|
+
const workflowId = doc.frontmatter.id;
|
|
177
|
+
const config = loadConfig(this.harnessDir);
|
|
178
|
+
if (isQuietHours(config)) {
|
|
179
|
+
log.debug(`Skipping workflow "${workflowId}" \u2014 quiet hours active`);
|
|
180
|
+
try {
|
|
181
|
+
this.onSkipQuietHours?.(workflowId);
|
|
182
|
+
} catch (e) {
|
|
183
|
+
log.warn(`onSkipQuietHours hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
184
|
+
}
|
|
185
|
+
return "";
|
|
186
|
+
}
|
|
187
|
+
const isProactive = doc.frontmatter["proactive"] === true;
|
|
188
|
+
if (isProactive && !this.checkProactiveCooldown(workflowId, config)) {
|
|
189
|
+
log.debug(`Skipping proactive workflow "${workflowId}" \u2014 rate limited or in cooldown`);
|
|
190
|
+
return "";
|
|
191
|
+
}
|
|
192
|
+
const maxRetries = doc.frontmatter.max_retries ?? 0;
|
|
193
|
+
const baseDelay = doc.frontmatter.retry_delay_ms ?? 1e3;
|
|
194
|
+
let lastError = null;
|
|
195
|
+
const startTime = Date.now();
|
|
196
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
197
|
+
try {
|
|
198
|
+
const prompt = `Execute this workflow:
|
|
199
|
+
|
|
200
|
+
${doc.body}`;
|
|
201
|
+
let resultText;
|
|
202
|
+
let tokensUsed;
|
|
203
|
+
const delegateAgentId = doc.frontmatter.with;
|
|
204
|
+
if (delegateAgentId) {
|
|
205
|
+
log.debug(`Workflow "${workflowId}" delegating to agent "${delegateAgentId}"`);
|
|
206
|
+
const delegateResult = await delegateTo({
|
|
207
|
+
harnessDir: this.harnessDir,
|
|
208
|
+
agentId: delegateAgentId,
|
|
209
|
+
prompt,
|
|
210
|
+
apiKey: this.apiKey
|
|
211
|
+
});
|
|
212
|
+
resultText = delegateResult.text;
|
|
213
|
+
tokensUsed = delegateResult.usage.totalTokens;
|
|
214
|
+
} else {
|
|
215
|
+
const agent = createHarness({
|
|
216
|
+
dir: this.harnessDir,
|
|
217
|
+
apiKey: this.apiKey
|
|
218
|
+
});
|
|
219
|
+
const result = await agent.run(prompt);
|
|
220
|
+
await agent.shutdown();
|
|
221
|
+
resultText = result.text;
|
|
222
|
+
tokensUsed = result.usage.totalTokens;
|
|
223
|
+
}
|
|
224
|
+
recordSuccess(this.harnessDir);
|
|
225
|
+
const endTime2 = Date.now();
|
|
226
|
+
recordRun(this.harnessDir, {
|
|
227
|
+
workflow_id: workflowId,
|
|
228
|
+
started: new Date(startTime).toISOString(),
|
|
229
|
+
ended: new Date(endTime2).toISOString(),
|
|
230
|
+
duration_ms: endTime2 - startTime,
|
|
231
|
+
success: true,
|
|
232
|
+
tokens_used: tokensUsed,
|
|
233
|
+
attempt: attempt + 1,
|
|
234
|
+
max_retries: maxRetries
|
|
235
|
+
});
|
|
236
|
+
try {
|
|
237
|
+
this.onRun?.(workflowId, resultText);
|
|
238
|
+
} catch (e) {
|
|
239
|
+
log.warn(`onRun hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
240
|
+
}
|
|
241
|
+
return resultText;
|
|
242
|
+
} catch (err) {
|
|
243
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
244
|
+
if (attempt < maxRetries) {
|
|
245
|
+
const delay = baseDelay * Math.pow(2, attempt);
|
|
246
|
+
log.debug(`Workflow "${workflowId}" failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms`);
|
|
247
|
+
try {
|
|
248
|
+
this.onRetry?.(workflowId, attempt + 1, maxRetries, lastError);
|
|
249
|
+
} catch (e) {
|
|
250
|
+
log.warn(`onRetry hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
251
|
+
}
|
|
252
|
+
await sleep(delay);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
recordFailure(this.harnessDir, lastError?.message);
|
|
257
|
+
const endTime = Date.now();
|
|
258
|
+
recordRun(this.harnessDir, {
|
|
259
|
+
workflow_id: workflowId,
|
|
260
|
+
started: new Date(startTime).toISOString(),
|
|
261
|
+
ended: new Date(endTime).toISOString(),
|
|
262
|
+
duration_ms: endTime - startTime,
|
|
263
|
+
success: false,
|
|
264
|
+
error: lastError?.message,
|
|
265
|
+
attempt: maxRetries + 1,
|
|
266
|
+
max_retries: maxRetries
|
|
267
|
+
});
|
|
268
|
+
try {
|
|
269
|
+
this.onError?.(workflowId, lastError);
|
|
270
|
+
} catch (e) {
|
|
271
|
+
log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
272
|
+
}
|
|
273
|
+
throw lastError;
|
|
274
|
+
}
|
|
275
|
+
async runOnce(workflowId) {
|
|
276
|
+
const workflowDir = join(this.harnessDir, "workflows");
|
|
277
|
+
const filePath = join(workflowDir, `${workflowId}.md`);
|
|
278
|
+
if (!existsSync(filePath)) {
|
|
279
|
+
throw new Error(`Workflow not found: ${workflowId}`);
|
|
280
|
+
}
|
|
281
|
+
const doc = parseHarnessDocument(filePath);
|
|
282
|
+
return this.executeWorkflow(doc);
|
|
283
|
+
}
|
|
284
|
+
listScheduled() {
|
|
285
|
+
return Array.from(this.workflows.entries()).map(([id, w]) => ({
|
|
286
|
+
id,
|
|
287
|
+
cron: w.cronExpression,
|
|
288
|
+
path: w.doc.path
|
|
289
|
+
}));
|
|
290
|
+
}
|
|
291
|
+
/** Run archival of expired sessions/journals based on config retention policy. */
|
|
292
|
+
runArchival() {
|
|
293
|
+
try {
|
|
294
|
+
const config = loadConfig(this.harnessDir);
|
|
295
|
+
const result = archiveOldFiles(
|
|
296
|
+
this.harnessDir,
|
|
297
|
+
config.memory.session_retention_days,
|
|
298
|
+
config.memory.journal_retention_days
|
|
299
|
+
);
|
|
300
|
+
const total = result.sessionsArchived + result.journalsArchived;
|
|
301
|
+
if (total > 0) {
|
|
302
|
+
log.info(`Archived ${result.sessionsArchived} session(s), ${result.journalsArchived} journal(s)`);
|
|
303
|
+
}
|
|
304
|
+
try {
|
|
305
|
+
this.onArchival?.(result.sessionsArchived, result.journalsArchived);
|
|
306
|
+
} catch (e) {
|
|
307
|
+
log.warn(`onArchival hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
308
|
+
}
|
|
309
|
+
} catch (err) {
|
|
310
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
311
|
+
log.error(`Archival failed: ${error.message}`);
|
|
312
|
+
try {
|
|
313
|
+
this.onError?.("__archival__", error);
|
|
314
|
+
} catch (e) {
|
|
315
|
+
log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
/**
|
|
320
|
+
* Synthesize today's journal from unjournaled sessions.
|
|
321
|
+
* Optionally runs instinct learning after synthesis if auto_learn is enabled.
|
|
322
|
+
*/
|
|
323
|
+
async runJournalSynthesis() {
|
|
324
|
+
try {
|
|
325
|
+
const unjournaled = listUnjournaled(this.harnessDir);
|
|
326
|
+
if (unjournaled.length === 0) {
|
|
327
|
+
log.debug("Auto-journal: no unjournaled sessions, skipping");
|
|
328
|
+
return;
|
|
329
|
+
}
|
|
330
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
331
|
+
log.info(`Auto-journal: synthesizing ${unjournaled.length} unjournaled date(s)`);
|
|
332
|
+
const entry = await synthesizeJournal(this.harnessDir, today, this.apiKey);
|
|
333
|
+
try {
|
|
334
|
+
this.onJournal?.(today, entry.sessions.length);
|
|
335
|
+
} catch (e) {
|
|
336
|
+
log.warn(`onJournal hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
337
|
+
}
|
|
338
|
+
if (this.autoLearn) {
|
|
339
|
+
log.info("Auto-learn: running instinct learning after journal synthesis");
|
|
340
|
+
const learnResult = await learnFromSessions(this.harnessDir, true, this.apiKey);
|
|
341
|
+
try {
|
|
342
|
+
this.onLearn?.(learnResult.installed.length, learnResult.skipped.length);
|
|
343
|
+
} catch (e) {
|
|
344
|
+
log.warn(`onLearn hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
} catch (err) {
|
|
348
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
349
|
+
log.error(`Auto-journal failed: ${error.message}`);
|
|
350
|
+
try {
|
|
351
|
+
this.onError?.("__auto_journal__", error);
|
|
352
|
+
} catch (e) {
|
|
353
|
+
log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
/**
|
|
358
|
+
* Check if a proactive workflow is allowed to run based on rate limits and cooldown.
|
|
359
|
+
* Returns true if the workflow should proceed, false if it should be skipped.
|
|
360
|
+
*/
|
|
361
|
+
checkProactiveCooldown(workflowId, config) {
|
|
362
|
+
const proactive = config.proactive;
|
|
363
|
+
if (!proactive?.enabled) return true;
|
|
364
|
+
const now = Date.now();
|
|
365
|
+
const oneHourAgo = now - 36e5;
|
|
366
|
+
const cooldownMs = (proactive.cooldown_minutes ?? 30) * 6e4;
|
|
367
|
+
const maxPerHour = proactive.max_per_hour ?? 5;
|
|
368
|
+
let history = this.proactiveHistory.get(workflowId);
|
|
369
|
+
if (!history) {
|
|
370
|
+
history = [];
|
|
371
|
+
this.proactiveHistory.set(workflowId, history);
|
|
372
|
+
}
|
|
373
|
+
const recent = history.filter((ts) => ts > oneHourAgo);
|
|
374
|
+
this.proactiveHistory.set(workflowId, recent);
|
|
375
|
+
if (recent.length >= maxPerHour) {
|
|
376
|
+
log.debug(`Proactive cooldown: ${workflowId} hit max_per_hour (${maxPerHour})`);
|
|
377
|
+
return false;
|
|
378
|
+
}
|
|
379
|
+
if (recent.length > 0) {
|
|
380
|
+
const lastRun = recent[recent.length - 1];
|
|
381
|
+
if (now - lastRun < cooldownMs) {
|
|
382
|
+
log.debug(`Proactive cooldown: ${workflowId} within cooldown (${proactive.cooldown_minutes}min)`);
|
|
383
|
+
return false;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
recent.push(now);
|
|
387
|
+
return true;
|
|
388
|
+
}
|
|
389
|
+
isRunning() {
|
|
390
|
+
return this.running;
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
export {
|
|
394
|
+
Scheduler,
|
|
395
|
+
isQuietHours
|
|
396
|
+
};
|
|
397
|
+
//# sourceMappingURL=scheduler-XHHIVHRI.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/scheduler.ts"],"sourcesContent":["import cron from 'node-cron';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { loadDirectory, parseHarnessDocument } from '../primitives/loader.js';\nimport { loadConfig } from '../core/config.js';\nimport { createHarness } from '../core/harness.js';\nimport { delegateTo } from './delegate.js';\nimport { archiveOldFiles } from './sessions.js';\nimport { recordRun } from './metrics.js';\nimport { log } from '../core/logger.js';\nimport { recordSuccess, recordFailure } from './health.js';\nimport { synthesizeJournal, listUnjournaled } from './journal.js';\nimport { learnFromSessions } from './instinct-learner.js';\nimport type { HarnessConfig, HarnessDocument } from '../core/types.js';\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\n/**\n * Check if the current time falls within quiet hours.\n * Quiet hours wrap around midnight (e.g. start: 23, end: 6 means 23:00–05:59).\n * Returns true if the agent should be quiet (no scheduled workflows).\n */\nexport function isQuietHours(\n config: HarnessConfig,\n now?: Date,\n): boolean {\n const { start, end } = config.runtime.quiet_hours;\n const tz = config.runtime.timezone;\n\n // Get current hour in the configured timezone\n let hour: number;\n try {\n const formatter = new Intl.DateTimeFormat('en-US', {\n hour: 'numeric',\n hour12: false,\n timeZone: tz,\n });\n hour = parseInt(formatter.format(now ?? new Date()), 10);\n } catch {\n // Fallback to local time if timezone is invalid\n hour = (now ?? new Date()).getHours();\n }\n\n if (start === end) return false; // No quiet hours configured\n if (start < end) {\n // Simple range (e.g., start: 8, end: 17 means 8:00–16:59)\n return hour >= start && hour < end;\n }\n // Wraps midnight (e.g., start: 23, end: 6 means 23:00–05:59)\n return hour >= start || hour < end;\n}\n\nexport interface ScheduledWorkflow {\n doc: HarnessDocument;\n cronExpression: string;\n task: ReturnType<typeof cron.schedule> | null;\n}\n\nexport interface SchedulerOptions {\n harnessDir: string;\n apiKey?: string;\n /** Enable daily auto-archival of expired sessions/journals (default: true) */\n autoArchival?: boolean;\n /** Cron expression for auto-archival (default: \"0 23 * * *\" = daily at 23:00) */\n archivalCron?: string;\n /** Enable auto-journal synthesis (cron string or true for default \"0 22 * * *\") */\n autoJournal?: boolean | string;\n /** Enable auto-learn after journal synthesis (default: false) */\n autoLearn?: boolean;\n onRun?: (workflowId: string, result: string) => void;\n onError?: (workflowId: string, error: Error) => void;\n onSchedule?: (workflowId: string, cron: string) => void;\n onSkipQuietHours?: (workflowId: string) => void;\n onArchival?: (sessionsArchived: number, journalsArchived: number) => void;\n onRetry?: (workflowId: string, attempt: number, maxRetries: number, error: Error) => void;\n onJournal?: (date: string, sessionsCount: number) => void;\n onLearn?: (installed: number, skipped: number) => void;\n}\n\nexport class Scheduler {\n private workflows: Map<string, ScheduledWorkflow> = new Map();\n private harnessDir: string;\n private apiKey?: string;\n private autoArchival: boolean;\n private archivalCron: string;\n private archivalTask: ReturnType<typeof cron.schedule> | null = null;\n private autoJournal: boolean | string;\n private autoLearn: boolean;\n private journalTask: ReturnType<typeof cron.schedule> | null = null;\n /** Tracks proactive executions: workflowId → timestamps of recent runs */\n private proactiveHistory: Map<string, number[]> = new Map();\n private onRun?: (workflowId: string, result: string) => void;\n private onError?: (workflowId: string, error: Error) => void;\n private onSchedule?: (workflowId: string, cron: string) => void;\n private onSkipQuietHours?: (workflowId: string) => void;\n private onArchival?: (sessionsArchived: number, journalsArchived: number) => void;\n private onRetry?: (workflowId: string, attempt: number, maxRetries: number, error: Error) => void;\n private onJournal?: (date: string, sessionsCount: number) => void;\n private onLearn?: (installed: number, skipped: number) => void;\n private running = false;\n\n constructor(options: SchedulerOptions) {\n this.harnessDir = options.harnessDir;\n this.apiKey = options.apiKey;\n this.autoArchival = options.autoArchival ?? true;\n this.archivalCron = options.archivalCron ?? '0 23 * * *';\n this.autoJournal = options.autoJournal ?? false;\n this.autoLearn = options.autoLearn ?? false;\n this.onRun = options.onRun;\n this.onError = options.onError;\n this.onSchedule = options.onSchedule;\n this.onSkipQuietHours = options.onSkipQuietHours;\n this.onArchival = options.onArchival;\n this.onRetry = options.onRetry;\n this.onJournal = options.onJournal;\n this.onLearn = options.onLearn;\n }\n\n start(): void {\n if (this.running) return;\n this.running = true;\n\n // Schedule auto-archival\n if (this.autoArchival && cron.validate(this.archivalCron)) {\n this.archivalTask = cron.schedule(this.archivalCron, () => {\n this.runArchival();\n });\n log.debug(`Auto-archival scheduled: ${this.archivalCron}`);\n }\n\n // Schedule auto-journal synthesis\n if (this.autoJournal) {\n const journalCron = typeof this.autoJournal === 'string' ? this.autoJournal : '0 22 * * *';\n if (cron.validate(journalCron)) {\n this.journalTask = cron.schedule(journalCron, () => {\n void this.runJournalSynthesis();\n });\n log.debug(`Auto-journal scheduled: ${journalCron}${this.autoLearn ? ' (with auto-learn)' : ''}`);\n } else {\n log.warn(`Invalid auto_journal cron: ${journalCron}`);\n }\n }\n\n // Load all workflows\n const workflowDir = join(this.harnessDir, 'workflows');\n if (!existsSync(workflowDir)) return;\n\n const docs = loadDirectory(workflowDir);\n\n for (const doc of docs) {\n const cronExpr = doc.frontmatter.schedule;\n if (!cronExpr) continue;\n\n if (!cron.validate(cronExpr)) {\n try { this.onError?.(doc.frontmatter.id, new Error(`Invalid cron expression: ${cronExpr}`)); } catch (e) {\n log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n continue;\n }\n\n const task = cron.schedule(cronExpr, async () => {\n await this.executeWorkflow(doc);\n });\n\n this.workflows.set(doc.frontmatter.id, { doc, cronExpression: cronExpr, task });\n try { this.onSchedule?.(doc.frontmatter.id, cronExpr); } catch (e) {\n log.warn(`onSchedule hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n }\n }\n\n stop(): void {\n if (!this.running) return;\n this.running = false;\n\n if (this.archivalTask) {\n this.archivalTask.stop();\n this.archivalTask = null;\n }\n\n if (this.journalTask) {\n this.journalTask.stop();\n this.journalTask = null;\n }\n\n for (const [, workflow] of this.workflows) {\n workflow.task?.stop();\n }\n this.workflows.clear();\n this.proactiveHistory.clear();\n }\n\n async executeWorkflow(doc: HarnessDocument): Promise<string> {\n const workflowId = doc.frontmatter.id;\n\n // Check quiet hours — skip scheduled workflows during quiet time\n const config = loadConfig(this.harnessDir);\n if (isQuietHours(config)) {\n log.debug(`Skipping workflow \"${workflowId}\" — quiet hours active`);\n try { this.onSkipQuietHours?.(workflowId); } catch (e) {\n log.warn(`onSkipQuietHours hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n return '';\n }\n\n // Check proactive cooldown — if workflow has proactive: true in frontmatter\n const isProactive = (doc.frontmatter as Record<string, unknown>)['proactive'] === true;\n if (isProactive && !this.checkProactiveCooldown(workflowId, config)) {\n log.debug(`Skipping proactive workflow \"${workflowId}\" — rate limited or in cooldown`);\n return '';\n }\n\n const maxRetries = doc.frontmatter.max_retries ?? 0;\n const baseDelay = doc.frontmatter.retry_delay_ms ?? 1000;\n let lastError: Error | null = null;\n const startTime = Date.now();\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n try {\n // The workflow body IS the prompt — it describes what to do\n const prompt = `Execute this workflow:\\n\\n${doc.body}`;\n\n let resultText: string;\n let tokensUsed: number;\n\n // If workflow has a `with:` field, delegate to that sub-agent\n const delegateAgentId = doc.frontmatter.with;\n if (delegateAgentId) {\n log.debug(`Workflow \"${workflowId}\" delegating to agent \"${delegateAgentId}\"`);\n const delegateResult = await delegateTo({\n harnessDir: this.harnessDir,\n agentId: delegateAgentId,\n prompt,\n apiKey: this.apiKey,\n });\n resultText = delegateResult.text;\n tokensUsed = delegateResult.usage.totalTokens;\n } else {\n const agent = createHarness({\n dir: this.harnessDir,\n apiKey: this.apiKey,\n });\n const result = await agent.run(prompt);\n await agent.shutdown();\n resultText = result.text;\n tokensUsed = result.usage.totalTokens;\n }\n\n // Record success in health metrics\n recordSuccess(this.harnessDir);\n\n // Record successful run\n const endTime = Date.now();\n recordRun(this.harnessDir, {\n workflow_id: workflowId,\n started: new Date(startTime).toISOString(),\n ended: new Date(endTime).toISOString(),\n duration_ms: endTime - startTime,\n success: true,\n tokens_used: tokensUsed,\n attempt: attempt + 1,\n max_retries: maxRetries,\n });\n\n try { this.onRun?.(workflowId, resultText); } catch (e) {\n log.warn(`onRun hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n return resultText;\n } catch (err) {\n lastError = err instanceof Error ? err : new Error(String(err));\n\n if (attempt < maxRetries) {\n // Exponential backoff: baseDelay * 2^attempt\n const delay = baseDelay * Math.pow(2, attempt);\n log.debug(`Workflow \"${workflowId}\" failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms`);\n try { this.onRetry?.(workflowId, attempt + 1, maxRetries, lastError); } catch (e) {\n log.warn(`onRetry hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n await sleep(delay);\n }\n }\n }\n\n // Record failure in health metrics\n recordFailure(this.harnessDir, lastError?.message);\n\n // Record failed run\n const endTime = Date.now();\n recordRun(this.harnessDir, {\n workflow_id: workflowId,\n started: new Date(startTime).toISOString(),\n ended: new Date(endTime).toISOString(),\n duration_ms: endTime - startTime,\n success: false,\n error: lastError?.message,\n attempt: maxRetries + 1,\n max_retries: maxRetries,\n });\n\n // All attempts exhausted\n try { this.onError?.(workflowId, lastError!); } catch (e) {\n log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n throw lastError;\n }\n\n async runOnce(workflowId: string): Promise<string> {\n const workflowDir = join(this.harnessDir, 'workflows');\n const filePath = join(workflowDir, `${workflowId}.md`);\n\n if (!existsSync(filePath)) {\n throw new Error(`Workflow not found: ${workflowId}`);\n }\n\n const doc = parseHarnessDocument(filePath);\n return this.executeWorkflow(doc);\n }\n\n listScheduled(): Array<{ id: string; cron: string; path: string }> {\n return Array.from(this.workflows.entries()).map(([id, w]) => ({\n id,\n cron: w.cronExpression,\n path: w.doc.path,\n }));\n }\n\n /** Run archival of expired sessions/journals based on config retention policy. */\n runArchival(): void {\n try {\n const config = loadConfig(this.harnessDir);\n const result = archiveOldFiles(\n this.harnessDir,\n config.memory.session_retention_days,\n config.memory.journal_retention_days,\n );\n const total = result.sessionsArchived + result.journalsArchived;\n if (total > 0) {\n log.info(`Archived ${result.sessionsArchived} session(s), ${result.journalsArchived} journal(s)`);\n }\n try { this.onArchival?.(result.sessionsArchived, result.journalsArchived); } catch (e) {\n log.warn(`onArchival hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n log.error(`Archival failed: ${error.message}`);\n try { this.onError?.('__archival__', error); } catch (e) {\n log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n }\n }\n\n /**\n * Synthesize today's journal from unjournaled sessions.\n * Optionally runs instinct learning after synthesis if auto_learn is enabled.\n */\n async runJournalSynthesis(): Promise<void> {\n try {\n const unjournaled = listUnjournaled(this.harnessDir);\n if (unjournaled.length === 0) {\n log.debug('Auto-journal: no unjournaled sessions, skipping');\n return;\n }\n\n // Synthesize today's journal\n const today = new Date().toISOString().slice(0, 10);\n log.info(`Auto-journal: synthesizing ${unjournaled.length} unjournaled date(s)`);\n const entry = await synthesizeJournal(this.harnessDir, today, this.apiKey);\n\n try { this.onJournal?.(today, entry.sessions.length); } catch (e) {\n log.warn(`onJournal hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n\n // Auto-learn if enabled\n if (this.autoLearn) {\n log.info('Auto-learn: running instinct learning after journal synthesis');\n const learnResult = await learnFromSessions(this.harnessDir, true, this.apiKey);\n try { this.onLearn?.(learnResult.installed.length, learnResult.skipped.length); } catch (e) {\n log.warn(`onLearn hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n }\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n log.error(`Auto-journal failed: ${error.message}`);\n try { this.onError?.('__auto_journal__', error); } catch (e) {\n log.warn(`onError hook failed: ${e instanceof Error ? e.message : String(e)}`);\n }\n }\n }\n\n /**\n * Check if a proactive workflow is allowed to run based on rate limits and cooldown.\n * Returns true if the workflow should proceed, false if it should be skipped.\n */\n checkProactiveCooldown(workflowId: string, config: HarnessConfig): boolean {\n const proactive = config.proactive;\n if (!proactive?.enabled) return true; // proactive not enabled — no restrictions\n\n const now = Date.now();\n const oneHourAgo = now - 3_600_000;\n const cooldownMs = (proactive.cooldown_minutes ?? 30) * 60_000;\n const maxPerHour = proactive.max_per_hour ?? 5;\n\n // Get or create history for this workflow\n let history = this.proactiveHistory.get(workflowId);\n if (!history) {\n history = [];\n this.proactiveHistory.set(workflowId, history);\n }\n\n // Prune entries older than 1 hour\n const recent = history.filter(ts => ts > oneHourAgo);\n this.proactiveHistory.set(workflowId, recent);\n\n // Check hourly rate limit\n if (recent.length >= maxPerHour) {\n log.debug(`Proactive cooldown: ${workflowId} hit max_per_hour (${maxPerHour})`);\n return false;\n }\n\n // Check cooldown since last run\n if (recent.length > 0) {\n const lastRun = recent[recent.length - 1];\n if (now - lastRun < cooldownMs) {\n log.debug(`Proactive cooldown: ${workflowId} within cooldown (${proactive.cooldown_minutes}min)`);\n return false;\n }\n }\n\n // Allowed — record this execution\n recent.push(now);\n return true;\n }\n\n isRunning(): boolean {\n return this.running;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,UAAU;AACjB,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AAarB,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;AAOO,SAAS,aACd,QACA,KACS;AACT,QAAM,EAAE,OAAO,IAAI,IAAI,OAAO,QAAQ;AACtC,QAAM,KAAK,OAAO,QAAQ;AAG1B,MAAI;AACJ,MAAI;AACF,UAAM,YAAY,IAAI,KAAK,eAAe,SAAS;AAAA,MACjD,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,UAAU;AAAA,IACZ,CAAC;AACD,WAAO,SAAS,UAAU,OAAO,OAAO,oBAAI,KAAK,CAAC,GAAG,EAAE;AAAA,EACzD,QAAQ;AAEN,YAAQ,OAAO,oBAAI,KAAK,GAAG,SAAS;AAAA,EACtC;AAEA,MAAI,UAAU,IAAK,QAAO;AAC1B,MAAI,QAAQ,KAAK;AAEf,WAAO,QAAQ,SAAS,OAAO;AAAA,EACjC;AAEA,SAAO,QAAQ,SAAS,OAAO;AACjC;AA6BO,IAAM,YAAN,MAAgB;AAAA,EACb,YAA4C,oBAAI,IAAI;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAAwD;AAAA,EACxD;AAAA,EACA;AAAA,EACA,cAAuD;AAAA;AAAA,EAEvD,mBAA0C,oBAAI,IAAI;AAAA,EAClD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU;AAAA,EAElB,YAAY,SAA2B;AACrC,SAAK,aAAa,QAAQ;AAC1B,SAAK,SAAS,QAAQ;AACtB,SAAK,eAAe,QAAQ,gBAAgB;AAC5C,SAAK,eAAe,QAAQ,gBAAgB;AAC5C,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,QAAQ,QAAQ;AACrB,SAAK,UAAU,QAAQ;AACvB,SAAK,aAAa,QAAQ;AAC1B,SAAK,mBAAmB,QAAQ;AAChC,SAAK,aAAa,QAAQ;AAC1B,SAAK,UAAU,QAAQ;AACvB,SAAK,YAAY,QAAQ;AACzB,SAAK,UAAU,QAAQ;AAAA,EACzB;AAAA,EAEA,QAAc;AACZ,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AAGf,QAAI,KAAK,gBAAgB,KAAK,SAAS,KAAK,YAAY,GAAG;AACzD,WAAK,eAAe,KAAK,SAAS,KAAK,cAAc,MAAM;AACzD,aAAK,YAAY;AAAA,MACnB,CAAC;AACD,UAAI,MAAM,4BAA4B,KAAK,YAAY,EAAE;AAAA,IAC3D;AAGA,QAAI,KAAK,aAAa;AACpB,YAAM,cAAc,OAAO,KAAK,gBAAgB,WAAW,KAAK,cAAc;AAC9E,UAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,aAAK,cAAc,KAAK,SAAS,aAAa,MAAM;AAClD,eAAK,KAAK,oBAAoB;AAAA,QAChC,CAAC;AACD,YAAI,MAAM,2BAA2B,WAAW,GAAG,KAAK,YAAY,uBAAuB,EAAE,EAAE;AAAA,MACjG,OAAO;AACL,YAAI,KAAK,8BAA8B,WAAW,EAAE;AAAA,MACtD;AAAA,IACF;AAGA,UAAM,cAAc,KAAK,KAAK,YAAY,WAAW;AACrD,QAAI,CAAC,WAAW,WAAW,EAAG;AAE9B,UAAM,OAAO,cAAc,WAAW;AAEtC,eAAW,OAAO,MAAM;AACtB,YAAM,WAAW,IAAI,YAAY;AACjC,UAAI,CAAC,SAAU;AAEf,UAAI,CAAC,KAAK,SAAS,QAAQ,GAAG;AAC5B,YAAI;AAAE,eAAK,UAAU,IAAI,YAAY,IAAI,IAAI,MAAM,4BAA4B,QAAQ,EAAE,CAAC;AAAA,QAAG,SAAS,GAAG;AACvG,cAAI,KAAK,wBAAwB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,QAC/E;AACA;AAAA,MACF;AAEA,YAAM,OAAO,KAAK,SAAS,UAAU,YAAY;AAC/C,cAAM,KAAK,gBAAgB,GAAG;AAAA,MAChC,CAAC;AAED,WAAK,UAAU,IAAI,IAAI,YAAY,IAAI,EAAE,KAAK,gBAAgB,UAAU,KAAK,CAAC;AAC9E,UAAI;AAAE,aAAK,aAAa,IAAI,YAAY,IAAI,QAAQ;AAAA,MAAG,SAAS,GAAG;AACjE,YAAI,KAAK,2BAA2B,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,MAClF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,OAAa;AACX,QAAI,CAAC,KAAK,QAAS;AACnB,SAAK,UAAU;AAEf,QAAI,KAAK,cAAc;AACrB,WAAK,aAAa,KAAK;AACvB,WAAK,eAAe;AAAA,IACtB;AAEA,QAAI,KAAK,aAAa;AACpB,WAAK,YAAY,KAAK;AACtB,WAAK,cAAc;AAAA,IACrB;AAEA,eAAW,CAAC,EAAE,QAAQ,KAAK,KAAK,WAAW;AACzC,eAAS,MAAM,KAAK;AAAA,IACtB;AACA,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAAA,EAC9B;AAAA,EAEA,MAAM,gBAAgB,KAAuC;AAC3D,UAAM,aAAa,IAAI,YAAY;AAGnC,UAAM,SAAS,WAAW,KAAK,UAAU;AACzC,QAAI,aAAa,MAAM,GAAG;AACxB,UAAI,MAAM,sBAAsB,UAAU,6BAAwB;AAClE,UAAI;AAAE,aAAK,mBAAmB,UAAU;AAAA,MAAG,SAAS,GAAG;AACrD,YAAI,KAAK,iCAAiC,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,MACxF;AACA,aAAO;AAAA,IACT;AAGA,UAAM,cAAe,IAAI,YAAwC,WAAW,MAAM;AAClF,QAAI,eAAe,CAAC,KAAK,uBAAuB,YAAY,MAAM,GAAG;AACnE,UAAI,MAAM,gCAAgC,UAAU,sCAAiC;AACrF,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,IAAI,YAAY,eAAe;AAClD,UAAM,YAAY,IAAI,YAAY,kBAAkB;AACpD,QAAI,YAA0B;AAC9B,UAAM,YAAY,KAAK,IAAI;AAE3B,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,UAAI;AAEF,cAAM,SAAS;AAAA;AAAA,EAA6B,IAAI,IAAI;AAEpD,YAAI;AACJ,YAAI;AAGJ,cAAM,kBAAkB,IAAI,YAAY;AACxC,YAAI,iBAAiB;AACnB,cAAI,MAAM,aAAa,UAAU,0BAA0B,eAAe,GAAG;AAC7E,gBAAM,iBAAiB,MAAM,WAAW;AAAA,YACtC,YAAY,KAAK;AAAA,YACjB,SAAS;AAAA,YACT;AAAA,YACA,QAAQ,KAAK;AAAA,UACf,CAAC;AACD,uBAAa,eAAe;AAC5B,uBAAa,eAAe,MAAM;AAAA,QACpC,OAAO;AACL,gBAAM,QAAQ,cAAc;AAAA,YAC1B,KAAK,KAAK;AAAA,YACV,QAAQ,KAAK;AAAA,UACf,CAAC;AACD,gBAAM,SAAS,MAAM,MAAM,IAAI,MAAM;AACrC,gBAAM,MAAM,SAAS;AACrB,uBAAa,OAAO;AACpB,uBAAa,OAAO,MAAM;AAAA,QAC5B;AAGA,sBAAc,KAAK,UAAU;AAG7B,cAAMA,WAAU,KAAK,IAAI;AACzB,kBAAU,KAAK,YAAY;AAAA,UACzB,aAAa;AAAA,UACb,SAAS,IAAI,KAAK,SAAS,EAAE,YAAY;AAAA,UACzC,OAAO,IAAI,KAAKA,QAAO,EAAE,YAAY;AAAA,UACrC,aAAaA,WAAU;AAAA,UACvB,SAAS;AAAA,UACT,aAAa;AAAA,UACb,SAAS,UAAU;AAAA,UACnB,aAAa;AAAA,QACf,CAAC;AAED,YAAI;AAAE,eAAK,QAAQ,YAAY,UAAU;AAAA,QAAG,SAAS,GAAG;AACtD,cAAI,KAAK,sBAAsB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,QAC7E;AACA,eAAO;AAAA,MACT,SAAS,KAAK;AACZ,oBAAY,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAE9D,YAAI,UAAU,YAAY;AAExB,gBAAM,QAAQ,YAAY,KAAK,IAAI,GAAG,OAAO;AAC7C,cAAI,MAAM,aAAa,UAAU,qBAAqB,UAAU,CAAC,IAAI,aAAa,CAAC,kBAAkB,KAAK,IAAI;AAC9G,cAAI;AAAE,iBAAK,UAAU,YAAY,UAAU,GAAG,YAAY,SAAS;AAAA,UAAG,SAAS,GAAG;AAChF,gBAAI,KAAK,wBAAwB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,UAC/E;AACA,gBAAM,MAAM,KAAK;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AAGA,kBAAc,KAAK,YAAY,WAAW,OAAO;AAGjD,UAAM,UAAU,KAAK,IAAI;AACzB,cAAU,KAAK,YAAY;AAAA,MACzB,aAAa;AAAA,MACb,SAAS,IAAI,KAAK,SAAS,EAAE,YAAY;AAAA,MACzC,OAAO,IAAI,KAAK,OAAO,EAAE,YAAY;AAAA,MACrC,aAAa,UAAU;AAAA,MACvB,SAAS;AAAA,MACT,OAAO,WAAW;AAAA,MAClB,SAAS,aAAa;AAAA,MACtB,aAAa;AAAA,IACf,CAAC;AAGD,QAAI;AAAE,WAAK,UAAU,YAAY,SAAU;AAAA,IAAG,SAAS,GAAG;AACxD,UAAI,KAAK,wBAAwB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,IAC/E;AACA,UAAM;AAAA,EACR;AAAA,EAEA,MAAM,QAAQ,YAAqC;AACjD,UAAM,cAAc,KAAK,KAAK,YAAY,WAAW;AACrD,UAAM,WAAW,KAAK,aAAa,GAAG,UAAU,KAAK;AAErD,QAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAEA,UAAM,MAAM,qBAAqB,QAAQ;AACzC,WAAO,KAAK,gBAAgB,GAAG;AAAA,EACjC;AAAA,EAEA,gBAAmE;AACjE,WAAO,MAAM,KAAK,KAAK,UAAU,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO;AAAA,MAC5D;AAAA,MACA,MAAM,EAAE;AAAA,MACR,MAAM,EAAE,IAAI;AAAA,IACd,EAAE;AAAA,EACJ;AAAA;AAAA,EAGA,cAAoB;AAClB,QAAI;AACF,YAAM,SAAS,WAAW,KAAK,UAAU;AACzC,YAAM,SAAS;AAAA,QACb,KAAK;AAAA,QACL,OAAO,OAAO;AAAA,QACd,OAAO,OAAO;AAAA,MAChB;AACA,YAAM,QAAQ,OAAO,mBAAmB,OAAO;AAC/C,UAAI,QAAQ,GAAG;AACb,YAAI,KAAK,YAAY,OAAO,gBAAgB,gBAAgB,OAAO,gBAAgB,aAAa;AAAA,MAClG;AACA,UAAI;AAAE,aAAK,aAAa,OAAO,kBAAkB,OAAO,gBAAgB;AAAA,MAAG,SAAS,GAAG;AACrF,YAAI,KAAK,2BAA2B,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,MAClF;AAAA,IACF,SAAS,KAAK;AACZ,YAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAChE,UAAI,MAAM,oBAAoB,MAAM,OAAO,EAAE;AAC7C,UAAI;AAAE,aAAK,UAAU,gBAAgB,KAAK;AAAA,MAAG,SAAS,GAAG;AACvD,YAAI,KAAK,wBAAwB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,sBAAqC;AACzC,QAAI;AACF,YAAM,cAAc,gBAAgB,KAAK,UAAU;AACnD,UAAI,YAAY,WAAW,GAAG;AAC5B,YAAI,MAAM,iDAAiD;AAC3D;AAAA,MACF;AAGA,YAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAClD,UAAI,KAAK,8BAA8B,YAAY,MAAM,sBAAsB;AAC/E,YAAM,QAAQ,MAAM,kBAAkB,KAAK,YAAY,OAAO,KAAK,MAAM;AAEzE,UAAI;AAAE,aAAK,YAAY,OAAO,MAAM,SAAS,MAAM;AAAA,MAAG,SAAS,GAAG;AAChE,YAAI,KAAK,0BAA0B,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,MACjF;AAGA,UAAI,KAAK,WAAW;AAClB,YAAI,KAAK,+DAA+D;AACxE,cAAM,cAAc,MAAM,kBAAkB,KAAK,YAAY,MAAM,KAAK,MAAM;AAC9E,YAAI;AAAE,eAAK,UAAU,YAAY,UAAU,QAAQ,YAAY,QAAQ,MAAM;AAAA,QAAG,SAAS,GAAG;AAC1F,cAAI,KAAK,wBAAwB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,QAC/E;AAAA,MACF;AAAA,IACF,SAAS,KAAK;AACZ,YAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAChE,UAAI,MAAM,wBAAwB,MAAM,OAAO,EAAE;AACjD,UAAI;AAAE,aAAK,UAAU,oBAAoB,KAAK;AAAA,MAAG,SAAS,GAAG;AAC3D,YAAI,KAAK,wBAAwB,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,uBAAuB,YAAoB,QAAgC;AACzE,UAAM,YAAY,OAAO;AACzB,QAAI,CAAC,WAAW,QAAS,QAAO;AAEhC,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,aAAa,MAAM;AACzB,UAAM,cAAc,UAAU,oBAAoB,MAAM;AACxD,UAAM,aAAa,UAAU,gBAAgB;AAG7C,QAAI,UAAU,KAAK,iBAAiB,IAAI,UAAU;AAClD,QAAI,CAAC,SAAS;AACZ,gBAAU,CAAC;AACX,WAAK,iBAAiB,IAAI,YAAY,OAAO;AAAA,IAC/C;AAGA,UAAM,SAAS,QAAQ,OAAO,QAAM,KAAK,UAAU;AACnD,SAAK,iBAAiB,IAAI,YAAY,MAAM;AAG5C,QAAI,OAAO,UAAU,YAAY;AAC/B,UAAI,MAAM,uBAAuB,UAAU,sBAAsB,UAAU,GAAG;AAC9E,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,UAAU,OAAO,OAAO,SAAS,CAAC;AACxC,UAAI,MAAM,UAAU,YAAY;AAC9B,YAAI,MAAM,uBAAuB,UAAU,qBAAqB,UAAU,gBAAgB,MAAM;AAChG,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO,KAAK,GAAG;AACf,WAAO;AAAA,EACT;AAAA,EAEA,YAAqB;AACnB,WAAO,KAAK;AAAA,EACd;AACF;","names":["endTime"]}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
loadDirectory
|
|
5
|
+
} from "./chunk-UPLBF4RZ.js";
|
|
6
|
+
import {
|
|
7
|
+
getPrimitiveDirs
|
|
8
|
+
} from "./chunk-4CWAGBNS.js";
|
|
9
|
+
import "./chunk-ZZJOFKAT.js";
|
|
10
|
+
|
|
11
|
+
// src/runtime/search.ts
|
|
12
|
+
import { existsSync } from "fs";
|
|
13
|
+
import { join } from "path";
|
|
14
|
+
function searchPrimitives(harnessDir, query, options, config) {
|
|
15
|
+
const results = [];
|
|
16
|
+
const dirs = getPrimitiveDirs(config);
|
|
17
|
+
const queryLower = query?.toLowerCase();
|
|
18
|
+
for (const dir of dirs) {
|
|
19
|
+
if (options?.type) {
|
|
20
|
+
const typeNorm = options.type.toLowerCase();
|
|
21
|
+
if (dir !== typeNorm && dir !== typeNorm + "s" && dir.replace(/s$/, "") !== typeNorm) {
|
|
22
|
+
continue;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
const fullPath = join(harnessDir, dir);
|
|
26
|
+
if (!existsSync(fullPath)) continue;
|
|
27
|
+
const docs = loadDirectory(fullPath);
|
|
28
|
+
for (const doc of docs) {
|
|
29
|
+
if (options?.status && doc.frontmatter.status !== options.status) continue;
|
|
30
|
+
if (options?.author && doc.frontmatter.author !== options.author) continue;
|
|
31
|
+
if (options?.tag) {
|
|
32
|
+
const tagLower = options.tag.toLowerCase();
|
|
33
|
+
const hasTag = doc.frontmatter.tags.some((t) => t.toLowerCase() === tagLower);
|
|
34
|
+
if (!hasTag) continue;
|
|
35
|
+
}
|
|
36
|
+
if (queryLower) {
|
|
37
|
+
const matchReason = matchDocument(doc, queryLower);
|
|
38
|
+
if (!matchReason) continue;
|
|
39
|
+
results.push({ doc, directory: dir, matchReason });
|
|
40
|
+
} else {
|
|
41
|
+
results.push({ doc, directory: dir, matchReason: "filter match" });
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return results;
|
|
46
|
+
}
|
|
47
|
+
function matchDocument(doc, queryLower) {
|
|
48
|
+
if (doc.frontmatter.id.toLowerCase().includes(queryLower)) {
|
|
49
|
+
return `id: ${doc.frontmatter.id}`;
|
|
50
|
+
}
|
|
51
|
+
for (const tag of doc.frontmatter.tags) {
|
|
52
|
+
if (tag.toLowerCase().includes(queryLower)) {
|
|
53
|
+
return `tag: ${tag}`;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
if (doc.l0.toLowerCase().includes(queryLower)) {
|
|
57
|
+
return `L0: ${doc.l0.slice(0, 80)}`;
|
|
58
|
+
}
|
|
59
|
+
if (doc.l1.toLowerCase().includes(queryLower)) {
|
|
60
|
+
return `L1 match`;
|
|
61
|
+
}
|
|
62
|
+
const bodyLower = doc.body.toLowerCase();
|
|
63
|
+
const idx = bodyLower.indexOf(queryLower);
|
|
64
|
+
if (idx !== -1) {
|
|
65
|
+
const start = Math.max(0, idx - 20);
|
|
66
|
+
const end = Math.min(bodyLower.length, idx + queryLower.length + 30);
|
|
67
|
+
const snippet = doc.body.slice(start, end).replace(/\n/g, " ").trim();
|
|
68
|
+
return `body: ...${snippet}...`;
|
|
69
|
+
}
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
export {
|
|
73
|
+
searchPrimitives
|
|
74
|
+
};
|
|
75
|
+
//# sourceMappingURL=search-V3W5JMJG.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/search.ts"],"sourcesContent":["import { existsSync } from 'fs';\nimport { join, basename, relative } from 'path';\nimport { loadDirectory } from '../primitives/loader.js';\nimport { getPrimitiveDirs } from '../core/types.js';\nimport type { HarnessConfig, HarnessDocument } from '../core/types.js';\n\nexport interface SearchOptions {\n /** Filter by tag (case-insensitive) */\n tag?: string;\n /** Filter by primitive type directory (e.g., \"rules\", \"skills\") */\n type?: string;\n /** Filter by status (e.g., \"active\", \"draft\") */\n status?: string;\n /** Filter by author (e.g., \"human\", \"agent\") */\n author?: string;\n}\n\nexport interface SearchResult {\n doc: HarnessDocument;\n directory: string;\n matchReason: string;\n}\n\n/**\n * Search primitives across all directories by query text and/or filters.\n * Query matches against: id, tags, L0 summary, L1 summary, body content.\n */\nexport function searchPrimitives(\n harnessDir: string,\n query?: string,\n options?: SearchOptions,\n config?: HarnessConfig,\n): SearchResult[] {\n const results: SearchResult[] = [];\n const dirs = getPrimitiveDirs(config);\n const queryLower = query?.toLowerCase();\n\n for (const dir of dirs) {\n // Filter by type directory if specified\n if (options?.type) {\n const typeNorm = options.type.toLowerCase();\n // Accept both singular (\"rule\") and plural (\"rules\")\n if (dir !== typeNorm && dir !== typeNorm + 's' && dir.replace(/s$/, '') !== typeNorm) {\n continue;\n }\n }\n\n const fullPath = join(harnessDir, dir);\n if (!existsSync(fullPath)) continue;\n\n const docs = loadDirectory(fullPath);\n\n for (const doc of docs) {\n // Filter by status\n if (options?.status && doc.frontmatter.status !== options.status) continue;\n\n // Filter by author\n if (options?.author && doc.frontmatter.author !== options.author) continue;\n\n // Filter by tag\n if (options?.tag) {\n const tagLower = options.tag.toLowerCase();\n const hasTag = doc.frontmatter.tags.some((t) => t.toLowerCase() === tagLower);\n if (!hasTag) continue;\n }\n\n // Match query text\n if (queryLower) {\n const matchReason = matchDocument(doc, queryLower);\n if (!matchReason) continue;\n results.push({ doc, directory: dir, matchReason });\n } else {\n // No query — return all matching filters\n results.push({ doc, directory: dir, matchReason: 'filter match' });\n }\n }\n }\n\n return results;\n}\n\nfunction matchDocument(doc: HarnessDocument, queryLower: string): string | null {\n // Check id\n if (doc.frontmatter.id.toLowerCase().includes(queryLower)) {\n return `id: ${doc.frontmatter.id}`;\n }\n\n // Check tags\n for (const tag of doc.frontmatter.tags) {\n if (tag.toLowerCase().includes(queryLower)) {\n return `tag: ${tag}`;\n }\n }\n\n // Check L0\n if (doc.l0.toLowerCase().includes(queryLower)) {\n return `L0: ${doc.l0.slice(0, 80)}`;\n }\n\n // Check L1\n if (doc.l1.toLowerCase().includes(queryLower)) {\n return `L1 match`;\n }\n\n // Check body content\n const bodyLower = doc.body.toLowerCase();\n const idx = bodyLower.indexOf(queryLower);\n if (idx !== -1) {\n const start = Math.max(0, idx - 20);\n const end = Math.min(bodyLower.length, idx + queryLower.length + 30);\n const snippet = doc.body.slice(start, end).replace(/\\n/g, ' ').trim();\n return `body: ...${snippet}...`;\n }\n\n return null;\n}\n"],"mappings":";;;;;;;;;;;AAAA,SAAS,kBAAkB;AAC3B,SAAS,YAAgC;AA0BlC,SAAS,iBACd,YACA,OACA,SACA,QACgB;AAChB,QAAM,UAA0B,CAAC;AACjC,QAAM,OAAO,iBAAiB,MAAM;AACpC,QAAM,aAAa,OAAO,YAAY;AAEtC,aAAW,OAAO,MAAM;AAEtB,QAAI,SAAS,MAAM;AACjB,YAAM,WAAW,QAAQ,KAAK,YAAY;AAE1C,UAAI,QAAQ,YAAY,QAAQ,WAAW,OAAO,IAAI,QAAQ,MAAM,EAAE,MAAM,UAAU;AACpF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,YAAY,GAAG;AACrC,QAAI,CAAC,WAAW,QAAQ,EAAG;AAE3B,UAAM,OAAO,cAAc,QAAQ;AAEnC,eAAW,OAAO,MAAM;AAEtB,UAAI,SAAS,UAAU,IAAI,YAAY,WAAW,QAAQ,OAAQ;AAGlE,UAAI,SAAS,UAAU,IAAI,YAAY,WAAW,QAAQ,OAAQ;AAGlE,UAAI,SAAS,KAAK;AAChB,cAAM,WAAW,QAAQ,IAAI,YAAY;AACzC,cAAM,SAAS,IAAI,YAAY,KAAK,KAAK,CAAC,MAAM,EAAE,YAAY,MAAM,QAAQ;AAC5E,YAAI,CAAC,OAAQ;AAAA,MACf;AAGA,UAAI,YAAY;AACd,cAAM,cAAc,cAAc,KAAK,UAAU;AACjD,YAAI,CAAC,YAAa;AAClB,gBAAQ,KAAK,EAAE,KAAK,WAAW,KAAK,YAAY,CAAC;AAAA,MACnD,OAAO;AAEL,gBAAQ,KAAK,EAAE,KAAK,WAAW,KAAK,aAAa,eAAe,CAAC;AAAA,MACnE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,KAAsB,YAAmC;AAE9E,MAAI,IAAI,YAAY,GAAG,YAAY,EAAE,SAAS,UAAU,GAAG;AACzD,WAAO,OAAO,IAAI,YAAY,EAAE;AAAA,EAClC;AAGA,aAAW,OAAO,IAAI,YAAY,MAAM;AACtC,QAAI,IAAI,YAAY,EAAE,SAAS,UAAU,GAAG;AAC1C,aAAO,QAAQ,GAAG;AAAA,IACpB;AAAA,EACF;AAGA,MAAI,IAAI,GAAG,YAAY,EAAE,SAAS,UAAU,GAAG;AAC7C,WAAO,OAAO,IAAI,GAAG,MAAM,GAAG,EAAE,CAAC;AAAA,EACnC;AAGA,MAAI,IAAI,GAAG,YAAY,EAAE,SAAS,UAAU,GAAG;AAC7C,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,IAAI,KAAK,YAAY;AACvC,QAAM,MAAM,UAAU,QAAQ,UAAU;AACxC,MAAI,QAAQ,IAAI;AACd,UAAM,QAAQ,KAAK,IAAI,GAAG,MAAM,EAAE;AAClC,UAAM,MAAM,KAAK,IAAI,UAAU,QAAQ,MAAM,WAAW,SAAS,EAAE;AACnE,UAAM,UAAU,IAAI,KAAK,MAAM,OAAO,GAAG,EAAE,QAAQ,OAAO,GAAG,EAAE,KAAK;AACpE,WAAO,YAAY,OAAO;AAAA,EAC5B;AAEA,SAAO;AACT;","names":[]}
|