nodebench-mcp 2.31.1 → 2.32.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -6
- package/dist/engine/server.js +14 -4
- package/dist/engine/server.js.map +1 -1
- package/dist/index.js +1946 -670
- package/dist/index.js.map +1 -1
- package/dist/security/SecurityError.d.ts +18 -0
- package/dist/security/SecurityError.js +22 -0
- package/dist/security/SecurityError.js.map +1 -0
- package/dist/security/__tests__/security.test.d.ts +8 -0
- package/dist/security/__tests__/security.test.js +295 -0
- package/dist/security/__tests__/security.test.js.map +1 -0
- package/dist/security/auditLog.d.ts +36 -0
- package/dist/security/auditLog.js +178 -0
- package/dist/security/auditLog.js.map +1 -0
- package/dist/security/commandSandbox.d.ts +33 -0
- package/dist/security/commandSandbox.js +159 -0
- package/dist/security/commandSandbox.js.map +1 -0
- package/dist/security/config.d.ts +23 -0
- package/dist/security/config.js +43 -0
- package/dist/security/config.js.map +1 -0
- package/dist/security/credentialRedactor.d.ts +22 -0
- package/dist/security/credentialRedactor.js +118 -0
- package/dist/security/credentialRedactor.js.map +1 -0
- package/dist/security/index.d.ts +20 -0
- package/dist/security/index.js +21 -0
- package/dist/security/index.js.map +1 -0
- package/dist/security/pathSandbox.d.ts +23 -0
- package/dist/security/pathSandbox.js +160 -0
- package/dist/security/pathSandbox.js.map +1 -0
- package/dist/security/urlValidator.d.ts +23 -0
- package/dist/security/urlValidator.js +125 -0
- package/dist/security/urlValidator.js.map +1 -0
- package/dist/tools/agentBootstrapTools.js +22 -29
- package/dist/tools/agentBootstrapTools.js.map +1 -1
- package/dist/tools/contextSandboxTools.js +7 -9
- package/dist/tools/contextSandboxTools.js.map +1 -1
- package/dist/tools/deepSimTools.d.ts +2 -0
- package/dist/tools/deepSimTools.js +404 -0
- package/dist/tools/deepSimTools.js.map +1 -0
- package/dist/tools/dimensionTools.d.ts +2 -0
- package/dist/tools/dimensionTools.js +246 -0
- package/dist/tools/dimensionTools.js.map +1 -0
- package/dist/tools/executionTraceTools.d.ts +2 -0
- package/dist/tools/executionTraceTools.js +446 -0
- package/dist/tools/executionTraceTools.js.map +1 -0
- package/dist/tools/founderTools.d.ts +13 -0
- package/dist/tools/founderTools.js +595 -0
- package/dist/tools/founderTools.js.map +1 -0
- package/dist/tools/gitWorkflowTools.js +14 -10
- package/dist/tools/gitWorkflowTools.js.map +1 -1
- package/dist/tools/githubTools.js +19 -2
- package/dist/tools/githubTools.js.map +1 -1
- package/dist/tools/index.d.ts +87 -0
- package/dist/tools/index.js +102 -0
- package/dist/tools/index.js.map +1 -0
- package/dist/tools/localFileTools.js +24 -12
- package/dist/tools/localFileTools.js.map +1 -1
- package/dist/tools/memoryDecay.d.ts +70 -0
- package/dist/tools/memoryDecay.js +247 -0
- package/dist/tools/memoryDecay.js.map +1 -0
- package/dist/tools/missionHarnessTools.d.ts +32 -0
- package/dist/tools/missionHarnessTools.js +972 -0
- package/dist/tools/missionHarnessTools.js.map +1 -0
- package/dist/tools/observabilityTools.d.ts +15 -0
- package/dist/tools/observabilityTools.js +787 -0
- package/dist/tools/observabilityTools.js.map +1 -0
- package/dist/tools/openclawTools.js +151 -36
- package/dist/tools/openclawTools.js.map +1 -1
- package/dist/tools/progressiveDiscoveryTools.js +5 -4
- package/dist/tools/progressiveDiscoveryTools.js.map +1 -1
- package/dist/tools/qualityGateTools.js +118 -2
- package/dist/tools/qualityGateTools.js.map +1 -1
- package/dist/tools/rssTools.js +3 -0
- package/dist/tools/rssTools.js.map +1 -1
- package/dist/tools/scraplingTools.js +15 -0
- package/dist/tools/scraplingTools.js.map +1 -1
- package/dist/tools/seoTools.js +66 -1
- package/dist/tools/seoTools.js.map +1 -1
- package/dist/tools/sessionMemoryTools.js +50 -11
- package/dist/tools/sessionMemoryTools.js.map +1 -1
- package/dist/tools/temporalIntelligenceTools.d.ts +12 -0
- package/dist/tools/temporalIntelligenceTools.js +1068 -0
- package/dist/tools/temporalIntelligenceTools.js.map +1 -0
- package/dist/tools/toolRegistry.d.ts +19 -0
- package/dist/tools/toolRegistry.js +857 -31
- package/dist/tools/toolRegistry.js.map +1 -1
- package/dist/tools/webTools.js +14 -1
- package/dist/tools/webTools.js.map +1 -1
- package/dist/tools/webmcpTools.js +13 -2
- package/dist/tools/webmcpTools.js.map +1 -1
- package/dist/toolsetRegistry.js +13 -0
- package/dist/toolsetRegistry.js.map +1 -1
- package/dist/types.d.ts +10 -0
- package/package.json +124 -124
|
@@ -0,0 +1,1068 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Temporal Intelligence Tools — MCP bridge for the Unified Temporal Agentic OS.
|
|
3
|
+
*
|
|
4
|
+
* 7 tools that bridge the temporal substrate (Convex tables) with the MCP tool
|
|
5
|
+
* ecosystem. Uses HTTP fetch to Convex for persistence with SQLite fallback
|
|
6
|
+
* for offline/standalone operation.
|
|
7
|
+
*
|
|
8
|
+
* Tables: timeSeriesObservations, timeSeriesSignals, causalChains,
|
|
9
|
+
* zeroDraftArtifacts, proofPacks (see convex/domains/temporal/schema.ts)
|
|
10
|
+
*/
|
|
11
|
+
import { getDb } from "../db.js";
|
|
12
|
+
// ─── SQLite Schema (offline mirror of Convex temporal tables) ────────────────
|
|
13
|
+
function ensureTemporalTables() {
|
|
14
|
+
const db = getDb();
|
|
15
|
+
db.exec(`
|
|
16
|
+
CREATE TABLE IF NOT EXISTS temporal_observations (
|
|
17
|
+
id TEXT PRIMARY KEY,
|
|
18
|
+
stream_key TEXT NOT NULL,
|
|
19
|
+
source_type TEXT NOT NULL,
|
|
20
|
+
entity_key TEXT,
|
|
21
|
+
observation_type TEXT NOT NULL,
|
|
22
|
+
observed_at INTEGER NOT NULL,
|
|
23
|
+
value_number REAL,
|
|
24
|
+
value_text TEXT,
|
|
25
|
+
headline TEXT,
|
|
26
|
+
summary TEXT,
|
|
27
|
+
source_excerpt TEXT,
|
|
28
|
+
source_refs TEXT DEFAULT '[]',
|
|
29
|
+
tags TEXT DEFAULT '[]',
|
|
30
|
+
created_at INTEGER NOT NULL
|
|
31
|
+
);
|
|
32
|
+
CREATE INDEX IF NOT EXISTS idx_obs_stream_time ON temporal_observations(stream_key, observed_at);
|
|
33
|
+
CREATE INDEX IF NOT EXISTS idx_obs_entity_time ON temporal_observations(entity_key, observed_at);
|
|
34
|
+
|
|
35
|
+
CREATE TABLE IF NOT EXISTS temporal_signals (
|
|
36
|
+
id TEXT PRIMARY KEY,
|
|
37
|
+
signal_key TEXT NOT NULL,
|
|
38
|
+
stream_key TEXT NOT NULL,
|
|
39
|
+
entity_key TEXT,
|
|
40
|
+
signal_type TEXT NOT NULL,
|
|
41
|
+
status TEXT NOT NULL DEFAULT 'open',
|
|
42
|
+
detected_at INTEGER NOT NULL,
|
|
43
|
+
window_start_at INTEGER,
|
|
44
|
+
window_end_at INTEGER,
|
|
45
|
+
confidence REAL NOT NULL,
|
|
46
|
+
severity TEXT,
|
|
47
|
+
summary TEXT NOT NULL,
|
|
48
|
+
plain_english TEXT NOT NULL,
|
|
49
|
+
evidence_observation_ids TEXT DEFAULT '[]',
|
|
50
|
+
recommended_action TEXT,
|
|
51
|
+
created_at INTEGER NOT NULL
|
|
52
|
+
);
|
|
53
|
+
CREATE INDEX IF NOT EXISTS idx_sig_stream ON temporal_signals(stream_key, detected_at);
|
|
54
|
+
CREATE INDEX IF NOT EXISTS idx_sig_entity ON temporal_signals(entity_key, detected_at);
|
|
55
|
+
CREATE INDEX IF NOT EXISTS idx_sig_status ON temporal_signals(status, detected_at);
|
|
56
|
+
|
|
57
|
+
CREATE TABLE IF NOT EXISTS temporal_causal_chains (
|
|
58
|
+
id TEXT PRIMARY KEY,
|
|
59
|
+
chain_key TEXT NOT NULL,
|
|
60
|
+
title TEXT NOT NULL,
|
|
61
|
+
entity_key TEXT,
|
|
62
|
+
root_question TEXT NOT NULL,
|
|
63
|
+
status TEXT NOT NULL DEFAULT 'draft',
|
|
64
|
+
summary TEXT NOT NULL,
|
|
65
|
+
plain_english TEXT NOT NULL,
|
|
66
|
+
outcome TEXT,
|
|
67
|
+
nodes TEXT NOT NULL DEFAULT '[]',
|
|
68
|
+
source_refs TEXT DEFAULT '[]',
|
|
69
|
+
created_at INTEGER NOT NULL
|
|
70
|
+
);
|
|
71
|
+
CREATE INDEX IF NOT EXISTS idx_chain_entity ON temporal_causal_chains(entity_key, created_at);
|
|
72
|
+
|
|
73
|
+
CREATE TABLE IF NOT EXISTS temporal_zero_drafts (
|
|
74
|
+
id TEXT PRIMARY KEY,
|
|
75
|
+
artifact_key TEXT NOT NULL,
|
|
76
|
+
artifact_type TEXT NOT NULL,
|
|
77
|
+
status TEXT NOT NULL DEFAULT 'draft',
|
|
78
|
+
title TEXT NOT NULL,
|
|
79
|
+
summary TEXT NOT NULL,
|
|
80
|
+
target_audience TEXT,
|
|
81
|
+
body_markdown TEXT NOT NULL,
|
|
82
|
+
linked_signal_ids TEXT DEFAULT '[]',
|
|
83
|
+
linked_chain_id TEXT,
|
|
84
|
+
source_refs TEXT DEFAULT '[]',
|
|
85
|
+
created_at INTEGER NOT NULL
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
CREATE TABLE IF NOT EXISTS temporal_proof_packs (
|
|
89
|
+
id TEXT PRIMARY KEY,
|
|
90
|
+
pack_key TEXT NOT NULL,
|
|
91
|
+
subject_type TEXT NOT NULL,
|
|
92
|
+
subject_id TEXT NOT NULL,
|
|
93
|
+
status TEXT NOT NULL DEFAULT 'draft',
|
|
94
|
+
summary TEXT NOT NULL,
|
|
95
|
+
checklist TEXT NOT NULL DEFAULT '[]',
|
|
96
|
+
pass_rate REAL,
|
|
97
|
+
metrics TEXT,
|
|
98
|
+
dogfood_run_id TEXT,
|
|
99
|
+
zero_draft_artifact_ids TEXT DEFAULT '[]',
|
|
100
|
+
created_at INTEGER NOT NULL
|
|
101
|
+
);
|
|
102
|
+
`);
|
|
103
|
+
}
|
|
104
|
+
function genId(prefix) {
|
|
105
|
+
return `${prefix}_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
|
|
106
|
+
}
|
|
107
|
+
// ─── Convex HTTP helpers ─────────────────────────────────────────────────────
|
|
108
|
+
function getConvexUrl() {
|
|
109
|
+
return process.env.CONVEX_URL || process.env.NEXT_PUBLIC_CONVEX_URL || null;
|
|
110
|
+
}
|
|
111
|
+
async function convexMutation(fnPath, args) {
|
|
112
|
+
const url = getConvexUrl();
|
|
113
|
+
if (!url)
|
|
114
|
+
return null;
|
|
115
|
+
try {
|
|
116
|
+
const resp = await fetch(`${url}/api/mutation`, {
|
|
117
|
+
method: "POST",
|
|
118
|
+
headers: { "Content-Type": "application/json" },
|
|
119
|
+
body: JSON.stringify({ path: fnPath, args }),
|
|
120
|
+
signal: AbortSignal.timeout(8000),
|
|
121
|
+
});
|
|
122
|
+
if (!resp.ok)
|
|
123
|
+
return null;
|
|
124
|
+
return await resp.json();
|
|
125
|
+
}
|
|
126
|
+
catch {
|
|
127
|
+
return null;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
async function convexQuery(fnPath, args) {
|
|
131
|
+
const url = getConvexUrl();
|
|
132
|
+
if (!url)
|
|
133
|
+
return null;
|
|
134
|
+
try {
|
|
135
|
+
const resp = await fetch(`${url}/api/query`, {
|
|
136
|
+
method: "POST",
|
|
137
|
+
headers: { "Content-Type": "application/json" },
|
|
138
|
+
body: JSON.stringify({ path: fnPath, args }),
|
|
139
|
+
signal: AbortSignal.timeout(8000),
|
|
140
|
+
});
|
|
141
|
+
if (!resp.ok)
|
|
142
|
+
return null;
|
|
143
|
+
return await resp.json();
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
return null;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
// ─── Statistical helpers ─────────────────────────────────────────────────────
|
|
150
|
+
function linearRegression(xs, ys) {
|
|
151
|
+
const n = xs.length;
|
|
152
|
+
if (n < 2)
|
|
153
|
+
return { slope: 0, intercept: ys[0] ?? 0, r2: 0 };
|
|
154
|
+
const sumX = xs.reduce((a, b) => a + b, 0);
|
|
155
|
+
const sumY = ys.reduce((a, b) => a + b, 0);
|
|
156
|
+
const sumXY = xs.reduce((a, x, i) => a + x * ys[i], 0);
|
|
157
|
+
const sumX2 = xs.reduce((a, x) => a + x * x, 0);
|
|
158
|
+
const slope = (n * sumXY - sumX * sumY) / (n * sumX2 - sumX * sumX);
|
|
159
|
+
const intercept = (sumY - slope * sumX) / n;
|
|
160
|
+
// R^2
|
|
161
|
+
const meanY = sumY / n;
|
|
162
|
+
const ssTot = ys.reduce((a, y) => a + (y - meanY) ** 2, 0);
|
|
163
|
+
const ssRes = ys.reduce((a, y, i) => a + (y - (slope * xs[i] + intercept)) ** 2, 0);
|
|
164
|
+
const r2 = ssTot === 0 ? 1 : 1 - ssRes / ssTot;
|
|
165
|
+
return { slope, intercept, r2 };
|
|
166
|
+
}
|
|
167
|
+
function zScore(value, mean, std) {
|
|
168
|
+
return std === 0 ? 0 : (value - mean) / std;
|
|
169
|
+
}
|
|
170
|
+
function mean(arr) {
|
|
171
|
+
return arr.length === 0 ? 0 : arr.reduce((a, b) => a + b, 0) / arr.length;
|
|
172
|
+
}
|
|
173
|
+
function stddev(arr) {
|
|
174
|
+
if (arr.length < 2)
|
|
175
|
+
return 0;
|
|
176
|
+
const m = mean(arr);
|
|
177
|
+
return Math.sqrt(arr.reduce((a, v) => a + (v - m) ** 2, 0) / (arr.length - 1));
|
|
178
|
+
}
|
|
179
|
+
// ─── Source type / observation type validators ───────────────────────────────
|
|
180
|
+
const VALID_SOURCE_TYPES = new Set(["slack", "github", "jira", "web", "document", "manual", "system"]);
|
|
181
|
+
const VALID_OBS_TYPES = new Set(["numeric", "categorical", "event", "text"]);
|
|
182
|
+
const VALID_SIGNAL_TYPES = new Set(["momentum", "regime_shift", "anomaly", "causal_hint", "opportunity_window", "risk_window"]);
|
|
183
|
+
const VALID_ARTIFACT_TYPES = new Set(["slack_message", "email", "spec_doc", "pr_draft", "architecture_note", "career_plan", "content_brief"]);
|
|
184
|
+
const VALID_SUBJECT_TYPES = new Set(["deployment", "career_move", "content_release", "research_run", "agent_loop"]);
|
|
185
|
+
// ─── Tools ───────────────────────────────────────────────────────────────────
|
|
186
|
+
export const temporalIntelligenceTools = [
|
|
187
|
+
// 1. ingest_temporal_observation
|
|
188
|
+
{
|
|
189
|
+
name: "ingest_temporal_observation",
|
|
190
|
+
description: "Ingest a raw observation into the temporal substrate (timeSeriesObservations). Supports numeric, categorical, event, and text observations from any source type. Returns observation ID for linking to signals and causal chains.",
|
|
191
|
+
inputSchema: {
|
|
192
|
+
type: "object",
|
|
193
|
+
properties: {
|
|
194
|
+
streamKey: {
|
|
195
|
+
type: "string",
|
|
196
|
+
description: "Stream identifier grouping related observations (e.g. 'github/commits/nodebench', 'jira/velocity/team-alpha')",
|
|
197
|
+
},
|
|
198
|
+
sourceType: {
|
|
199
|
+
type: "string",
|
|
200
|
+
enum: ["slack", "github", "jira", "web", "document", "manual", "system"],
|
|
201
|
+
description: "Where this observation originates",
|
|
202
|
+
},
|
|
203
|
+
observationType: {
|
|
204
|
+
type: "string",
|
|
205
|
+
enum: ["numeric", "categorical", "event", "text"],
|
|
206
|
+
description: "Type of observation value",
|
|
207
|
+
},
|
|
208
|
+
observedAt: {
|
|
209
|
+
type: "string",
|
|
210
|
+
description: "ISO timestamp of when the observation was made",
|
|
211
|
+
},
|
|
212
|
+
valueNumber: {
|
|
213
|
+
type: "number",
|
|
214
|
+
description: "Numeric value (for observationType='numeric')",
|
|
215
|
+
},
|
|
216
|
+
valueText: {
|
|
217
|
+
type: "string",
|
|
218
|
+
description: "Text value (for observationType='text' or 'categorical')",
|
|
219
|
+
},
|
|
220
|
+
headline: {
|
|
221
|
+
type: "string",
|
|
222
|
+
description: "Short headline summarizing the observation",
|
|
223
|
+
},
|
|
224
|
+
summary: {
|
|
225
|
+
type: "string",
|
|
226
|
+
description: "Longer description of what was observed",
|
|
227
|
+
},
|
|
228
|
+
sourceExcerpt: {
|
|
229
|
+
type: "string",
|
|
230
|
+
description: "Verbatim excerpt from the source (for provenance)",
|
|
231
|
+
},
|
|
232
|
+
sourceRefs: {
|
|
233
|
+
type: "array",
|
|
234
|
+
items: {
|
|
235
|
+
type: "object",
|
|
236
|
+
properties: {
|
|
237
|
+
url: { type: "string" },
|
|
238
|
+
title: { type: "string" },
|
|
239
|
+
lineStart: { type: "number" },
|
|
240
|
+
lineEnd: { type: "number" },
|
|
241
|
+
},
|
|
242
|
+
required: ["url", "title"],
|
|
243
|
+
},
|
|
244
|
+
description: "Source references with URLs and optional line ranges",
|
|
245
|
+
},
|
|
246
|
+
entityKey: {
|
|
247
|
+
type: "string",
|
|
248
|
+
description: "Entity this observation relates to (e.g. 'company/openai', 'repo/nodebench')",
|
|
249
|
+
},
|
|
250
|
+
tags: {
|
|
251
|
+
type: "array",
|
|
252
|
+
items: { type: "string" },
|
|
253
|
+
description: "Freeform tags for categorization",
|
|
254
|
+
},
|
|
255
|
+
},
|
|
256
|
+
required: ["streamKey", "sourceType", "observationType", "observedAt", "headline"],
|
|
257
|
+
},
|
|
258
|
+
annotations: { readOnlyHint: false, destructiveHint: false, openWorldHint: true },
|
|
259
|
+
handler: async (args) => {
|
|
260
|
+
const streamKey = args.streamKey;
|
|
261
|
+
const sourceType = args.sourceType;
|
|
262
|
+
const observationType = args.observationType;
|
|
263
|
+
const observedAtStr = args.observedAt;
|
|
264
|
+
const headline = args.headline;
|
|
265
|
+
// Validate
|
|
266
|
+
if (!VALID_SOURCE_TYPES.has(sourceType)) {
|
|
267
|
+
return [{ type: "text", text: JSON.stringify({ error: `Invalid sourceType: ${sourceType}. Must be one of: ${[...VALID_SOURCE_TYPES].join(", ")}` }) }];
|
|
268
|
+
}
|
|
269
|
+
if (!VALID_OBS_TYPES.has(observationType)) {
|
|
270
|
+
return [{ type: "text", text: JSON.stringify({ error: `Invalid observationType: ${observationType}. Must be one of: ${[...VALID_OBS_TYPES].join(", ")}` }) }];
|
|
271
|
+
}
|
|
272
|
+
const observedAt = new Date(observedAtStr).getTime();
|
|
273
|
+
if (isNaN(observedAt)) {
|
|
274
|
+
return [{ type: "text", text: JSON.stringify({ error: `Invalid observedAt: ${observedAtStr}. Must be valid ISO date.` }) }];
|
|
275
|
+
}
|
|
276
|
+
if (observationType === "numeric" && args.valueNumber == null) {
|
|
277
|
+
return [{ type: "text", text: JSON.stringify({ error: "observationType='numeric' requires valueNumber" }) }];
|
|
278
|
+
}
|
|
279
|
+
const id = genId("tobs");
|
|
280
|
+
const now = Date.now();
|
|
281
|
+
const sourceRefs = args.sourceRefs || [];
|
|
282
|
+
const tags = args.tags || [];
|
|
283
|
+
// Try Convex first
|
|
284
|
+
const convexResult = await convexMutation("domains/temporal/mutations:ingestObservation", {
|
|
285
|
+
streamKey,
|
|
286
|
+
sourceType,
|
|
287
|
+
observationType,
|
|
288
|
+
observedAt,
|
|
289
|
+
valueNumber: args.valueNumber ?? undefined,
|
|
290
|
+
valueText: args.valueText ?? undefined,
|
|
291
|
+
headline,
|
|
292
|
+
summary: args.summary ?? undefined,
|
|
293
|
+
sourceExcerpt: args.sourceExcerpt ?? undefined,
|
|
294
|
+
sourceRefs: sourceRefs.map((r) => ({ label: r.title, href: r.url, lineStart: r.lineStart, lineEnd: r.lineEnd })),
|
|
295
|
+
entityKey: args.entityKey ?? undefined,
|
|
296
|
+
tags,
|
|
297
|
+
});
|
|
298
|
+
// Always persist locally (offline mirror)
|
|
299
|
+
ensureTemporalTables();
|
|
300
|
+
const db = getDb();
|
|
301
|
+
db.prepare(`
|
|
302
|
+
INSERT INTO temporal_observations (id, stream_key, source_type, entity_key, observation_type,
|
|
303
|
+
observed_at, value_number, value_text, headline, summary, source_excerpt, source_refs, tags, created_at)
|
|
304
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
305
|
+
`).run(id, streamKey, sourceType, args.entityKey ?? null, observationType, observedAt, args.valueNumber ?? null, args.valueText ?? null, headline, args.summary ?? null, args.sourceExcerpt ?? null, JSON.stringify(sourceRefs), JSON.stringify(tags), now);
|
|
306
|
+
return [{
|
|
307
|
+
type: "text",
|
|
308
|
+
text: JSON.stringify({
|
|
309
|
+
observationId: id,
|
|
310
|
+
streamKey,
|
|
311
|
+
sourceType,
|
|
312
|
+
observationType,
|
|
313
|
+
observedAt: new Date(observedAt).toISOString(),
|
|
314
|
+
headline,
|
|
315
|
+
convexSynced: convexResult != null,
|
|
316
|
+
}),
|
|
317
|
+
}];
|
|
318
|
+
},
|
|
319
|
+
},
|
|
320
|
+
// 2. detect_temporal_signal
|
|
321
|
+
{
|
|
322
|
+
name: "detect_temporal_signal",
|
|
323
|
+
description: "Analyze observations in a stream and detect temporal signals: momentum (sustained directional trend), regime_shift (rolling mean shift), anomaly (z-score outlier), causal_hint, opportunity_window, or risk_window. Returns detected signals with confidence scores.",
|
|
324
|
+
inputSchema: {
|
|
325
|
+
type: "object",
|
|
326
|
+
properties: {
|
|
327
|
+
streamKey: {
|
|
328
|
+
type: "string",
|
|
329
|
+
description: "Stream to analyze",
|
|
330
|
+
},
|
|
331
|
+
entityKey: {
|
|
332
|
+
type: "string",
|
|
333
|
+
description: "Optional entity filter",
|
|
334
|
+
},
|
|
335
|
+
lookbackDays: {
|
|
336
|
+
type: "number",
|
|
337
|
+
description: "How many days of observations to analyze (default: 30)",
|
|
338
|
+
},
|
|
339
|
+
signalTypes: {
|
|
340
|
+
type: "array",
|
|
341
|
+
items: {
|
|
342
|
+
type: "string",
|
|
343
|
+
enum: ["momentum", "regime_shift", "anomaly", "causal_hint", "opportunity_window", "risk_window"],
|
|
344
|
+
},
|
|
345
|
+
description: "Filter to specific signal types (default: all)",
|
|
346
|
+
},
|
|
347
|
+
},
|
|
348
|
+
required: ["streamKey"],
|
|
349
|
+
},
|
|
350
|
+
annotations: { readOnlyHint: true, openWorldHint: false },
|
|
351
|
+
handler: async (args) => {
|
|
352
|
+
ensureTemporalTables();
|
|
353
|
+
const db = getDb();
|
|
354
|
+
const streamKey = args.streamKey;
|
|
355
|
+
const lookbackDays = args.lookbackDays || 30;
|
|
356
|
+
const cutoff = Date.now() - lookbackDays * 86_400_000;
|
|
357
|
+
const filterTypes = args.signalTypes ? new Set(args.signalTypes) : null;
|
|
358
|
+
// Query recent numeric observations
|
|
359
|
+
let sql = "SELECT * FROM temporal_observations WHERE stream_key = ? AND observed_at >= ?";
|
|
360
|
+
const params = [streamKey, cutoff];
|
|
361
|
+
if (args.entityKey) {
|
|
362
|
+
sql += " AND entity_key = ?";
|
|
363
|
+
params.push(args.entityKey);
|
|
364
|
+
}
|
|
365
|
+
sql += " ORDER BY observed_at ASC";
|
|
366
|
+
const observations = db.prepare(sql).all(...params);
|
|
367
|
+
if (observations.length === 0) {
|
|
368
|
+
return [{ type: "text", text: JSON.stringify({
|
|
369
|
+
signals: [],
|
|
370
|
+
message: `No observations found for stream '${streamKey}' in last ${lookbackDays} days`,
|
|
371
|
+
hint: "Ingest observations with ingest_temporal_observation first",
|
|
372
|
+
}) }];
|
|
373
|
+
}
|
|
374
|
+
const detectedSignals = [];
|
|
375
|
+
const now = Date.now();
|
|
376
|
+
// Extract numeric series
|
|
377
|
+
const numericObs = observations.filter((o) => o.observation_type === "numeric" && o.value_number != null);
|
|
378
|
+
const values = numericObs.map((o) => o.value_number);
|
|
379
|
+
const timestamps = numericObs.map((o) => o.observed_at);
|
|
380
|
+
if (values.length >= 3) {
|
|
381
|
+
// ── Momentum detection (linear regression) ──
|
|
382
|
+
if (!filterTypes || filterTypes.has("momentum")) {
|
|
383
|
+
const { slope, r2 } = linearRegression(timestamps, values);
|
|
384
|
+
if (Math.abs(r2) > 0.5 && values.length >= 5) {
|
|
385
|
+
const direction = slope > 0 ? "upward" : "downward";
|
|
386
|
+
const confidence = Math.min(0.95, Math.abs(r2));
|
|
387
|
+
const signalId = genId("tsig");
|
|
388
|
+
detectedSignals.push({
|
|
389
|
+
signalId,
|
|
390
|
+
signalType: "momentum",
|
|
391
|
+
confidence,
|
|
392
|
+
severity: confidence > 0.8 ? "high" : confidence > 0.6 ? "medium" : "low",
|
|
393
|
+
summary: `${direction} momentum detected (slope=${slope.toFixed(4)}, R2=${r2.toFixed(3)})`,
|
|
394
|
+
plainEnglish: `The '${streamKey}' stream shows a consistent ${direction} trend over ${lookbackDays} days with ${(confidence * 100).toFixed(0)}% confidence.`,
|
|
395
|
+
recommendedAction: `Monitor for continuation. ${direction === "upward" ? "Consider capitalizing on positive momentum." : "Investigate root cause of decline."}`,
|
|
396
|
+
evidenceCount: numericObs.length,
|
|
397
|
+
});
|
|
398
|
+
db.prepare(`
|
|
399
|
+
INSERT INTO temporal_signals (id, signal_key, stream_key, entity_key, signal_type, status,
|
|
400
|
+
detected_at, confidence, severity, summary, plain_english, recommended_action, created_at)
|
|
401
|
+
VALUES (?, ?, ?, ?, 'momentum', 'open', ?, ?, ?, ?, ?, ?, ?)
|
|
402
|
+
`).run(signalId, `momentum_${streamKey}_${now}`, streamKey, args.entityKey ?? null, now, confidence, confidence > 0.8 ? "high" : confidence > 0.6 ? "medium" : "low", `${direction} momentum (slope=${slope.toFixed(4)}, R2=${r2.toFixed(3)})`, `Consistent ${direction} trend over ${lookbackDays} days`, `Monitor for continuation`, now);
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
// ── Anomaly detection (z-score on last value) ──
|
|
406
|
+
if (!filterTypes || filterTypes.has("anomaly")) {
|
|
407
|
+
const m = mean(values);
|
|
408
|
+
const s = stddev(values);
|
|
409
|
+
if (s > 0) {
|
|
410
|
+
const lastVal = values[values.length - 1];
|
|
411
|
+
const z = zScore(lastVal, m, s);
|
|
412
|
+
if (Math.abs(z) > 2) {
|
|
413
|
+
const direction = z > 0 ? "above" : "below";
|
|
414
|
+
const confidence = Math.min(0.95, Math.abs(z) / 4);
|
|
415
|
+
const signalId = genId("tsig");
|
|
416
|
+
detectedSignals.push({
|
|
417
|
+
signalId,
|
|
418
|
+
signalType: "anomaly",
|
|
419
|
+
confidence,
|
|
420
|
+
severity: Math.abs(z) > 3 ? "high" : "medium",
|
|
421
|
+
summary: `Anomaly: latest value ${lastVal.toFixed(2)} is ${Math.abs(z).toFixed(1)} std devs ${direction} mean (${m.toFixed(2)})`,
|
|
422
|
+
plainEnglish: `The most recent observation in '${streamKey}' is unusually ${z > 0 ? "high" : "low"} compared to the ${lookbackDays}-day baseline.`,
|
|
423
|
+
recommendedAction: "Investigate whether this is a data quality issue or a genuine outlier requiring action.",
|
|
424
|
+
zScore: z,
|
|
425
|
+
});
|
|
426
|
+
db.prepare(`
|
|
427
|
+
INSERT INTO temporal_signals (id, signal_key, stream_key, entity_key, signal_type, status,
|
|
428
|
+
detected_at, confidence, severity, summary, plain_english, recommended_action, created_at)
|
|
429
|
+
VALUES (?, ?, ?, ?, 'anomaly', 'open', ?, ?, ?, ?, ?, ?, ?)
|
|
430
|
+
`).run(signalId, `anomaly_${streamKey}_${now}`, streamKey, args.entityKey ?? null, now, confidence, Math.abs(z) > 3 ? "high" : "medium", `z-score=${z.toFixed(2)}, value=${lastVal.toFixed(2)}, mean=${m.toFixed(2)}`, `Unusually ${z > 0 ? "high" : "low"} value detected`, "Investigate data quality or genuine outlier", now);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
// ── Regime shift detection (rolling mean comparison) ──
|
|
435
|
+
if ((!filterTypes || filterTypes.has("regime_shift")) && values.length >= 10) {
|
|
436
|
+
const midpoint = Math.floor(values.length / 2);
|
|
437
|
+
const firstHalf = values.slice(0, midpoint);
|
|
438
|
+
const secondHalf = values.slice(midpoint);
|
|
439
|
+
const meanFirst = mean(firstHalf);
|
|
440
|
+
const meanSecond = mean(secondHalf);
|
|
441
|
+
const pooledStd = stddev(values);
|
|
442
|
+
if (pooledStd > 0) {
|
|
443
|
+
const effectSize = Math.abs(meanSecond - meanFirst) / pooledStd;
|
|
444
|
+
if (effectSize > 0.8) {
|
|
445
|
+
const direction = meanSecond > meanFirst ? "upward" : "downward";
|
|
446
|
+
const confidence = Math.min(0.9, effectSize / 2);
|
|
447
|
+
const signalId = genId("tsig");
|
|
448
|
+
detectedSignals.push({
|
|
449
|
+
signalId,
|
|
450
|
+
signalType: "regime_shift",
|
|
451
|
+
confidence,
|
|
452
|
+
severity: effectSize > 1.5 ? "high" : "medium",
|
|
453
|
+
summary: `Regime shift: mean moved ${direction} (${meanFirst.toFixed(2)} -> ${meanSecond.toFixed(2)}, effect size=${effectSize.toFixed(2)})`,
|
|
454
|
+
plainEnglish: `The '${streamKey}' stream appears to have shifted to a new operating regime. The average changed ${direction} significantly.`,
|
|
455
|
+
recommendedAction: "Assess whether this shift is intentional or indicates a systemic change requiring response.",
|
|
456
|
+
windowStartAt: timestamps[midpoint],
|
|
457
|
+
});
|
|
458
|
+
db.prepare(`
|
|
459
|
+
INSERT INTO temporal_signals (id, signal_key, stream_key, entity_key, signal_type, status,
|
|
460
|
+
detected_at, window_start_at, confidence, severity, summary, plain_english, recommended_action, created_at)
|
|
461
|
+
VALUES (?, ?, ?, ?, 'regime_shift', 'open', ?, ?, ?, ?, ?, ?, ?, ?)
|
|
462
|
+
`).run(signalId, `regime_${streamKey}_${now}`, streamKey, args.entityKey ?? null, now, timestamps[midpoint], confidence, effectSize > 1.5 ? "high" : "medium", `Mean shift ${meanFirst.toFixed(2)}->${meanSecond.toFixed(2)} (d=${effectSize.toFixed(2)})`, `Significant ${direction} regime shift detected`, "Assess whether shift is intentional", now);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
// ── Event-based signals (opportunity/risk from text observations) ──
|
|
468
|
+
const eventObs = observations.filter((o) => o.observation_type === "event" || o.observation_type === "text");
|
|
469
|
+
if (eventObs.length > 0 && (!filterTypes || filterTypes.has("opportunity_window") || filterTypes.has("risk_window"))) {
|
|
470
|
+
// Simple keyword heuristic for opportunity/risk (placeholder for LLM classification)
|
|
471
|
+
const riskKeywords = ["risk", "decline", "drop", "fail", "critical", "breach", "vulnerability", "loss", "warning"];
|
|
472
|
+
const oppKeywords = ["opportunity", "growth", "launch", "milestone", "breakthrough", "partnership", "funding", "expand"];
|
|
473
|
+
for (const obs of eventObs.slice(-5)) { // Last 5 events
|
|
474
|
+
const text = `${obs.headline || ""} ${obs.summary || ""} ${obs.value_text || ""}`.toLowerCase();
|
|
475
|
+
const riskScore = riskKeywords.filter((k) => text.includes(k)).length;
|
|
476
|
+
const oppScore = oppKeywords.filter((k) => text.includes(k)).length;
|
|
477
|
+
if (riskScore >= 2 && (!filterTypes || filterTypes.has("risk_window"))) {
|
|
478
|
+
const signalId = genId("tsig");
|
|
479
|
+
detectedSignals.push({
|
|
480
|
+
signalId,
|
|
481
|
+
signalType: "risk_window",
|
|
482
|
+
confidence: Math.min(0.8, riskScore * 0.2),
|
|
483
|
+
severity: riskScore >= 3 ? "high" : "medium",
|
|
484
|
+
summary: `Risk indicators detected: ${obs.headline || "unnamed event"}`,
|
|
485
|
+
plainEnglish: `Recent event in '${streamKey}' contains multiple risk indicators.`,
|
|
486
|
+
sourceObservationId: obs.id,
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
if (oppScore >= 2 && (!filterTypes || filterTypes.has("opportunity_window"))) {
|
|
490
|
+
const signalId = genId("tsig");
|
|
491
|
+
detectedSignals.push({
|
|
492
|
+
signalId,
|
|
493
|
+
signalType: "opportunity_window",
|
|
494
|
+
confidence: Math.min(0.8, oppScore * 0.2),
|
|
495
|
+
severity: oppScore >= 3 ? "high" : "medium",
|
|
496
|
+
summary: `Opportunity indicators detected: ${obs.headline || "unnamed event"}`,
|
|
497
|
+
plainEnglish: `Recent event in '${streamKey}' suggests an actionable opportunity.`,
|
|
498
|
+
sourceObservationId: obs.id,
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
return [{
|
|
504
|
+
type: "text",
|
|
505
|
+
text: JSON.stringify({
|
|
506
|
+
streamKey,
|
|
507
|
+
lookbackDays,
|
|
508
|
+
observationsAnalyzed: observations.length,
|
|
509
|
+
numericPoints: numericObs.length,
|
|
510
|
+
eventPoints: eventObs.length,
|
|
511
|
+
signalsDetected: detectedSignals.length,
|
|
512
|
+
signals: detectedSignals,
|
|
513
|
+
}),
|
|
514
|
+
}];
|
|
515
|
+
},
|
|
516
|
+
},
|
|
517
|
+
// 3. build_causal_chain
|
|
518
|
+
{
|
|
519
|
+
name: "build_causal_chain",
|
|
520
|
+
description: "Construct a causal chain from temporal observations. Nodes must be in chronological order. Each node represents a cause-effect step with timestamp, label, description, and optional evidence links.",
|
|
521
|
+
inputSchema: {
|
|
522
|
+
type: "object",
|
|
523
|
+
properties: {
|
|
524
|
+
title: {
|
|
525
|
+
type: "string",
|
|
526
|
+
description: "Title of the causal chain",
|
|
527
|
+
},
|
|
528
|
+
entityKey: {
|
|
529
|
+
type: "string",
|
|
530
|
+
description: "Entity this chain relates to",
|
|
531
|
+
},
|
|
532
|
+
rootQuestion: {
|
|
533
|
+
type: "string",
|
|
534
|
+
description: "The question this causal chain answers (e.g. 'Why did deployment latency spike?')",
|
|
535
|
+
},
|
|
536
|
+
nodes: {
|
|
537
|
+
type: "array",
|
|
538
|
+
items: {
|
|
539
|
+
type: "object",
|
|
540
|
+
properties: {
|
|
541
|
+
timestamp: { type: "string", description: "ISO timestamp of this node" },
|
|
542
|
+
label: { type: "string", description: "Short label (e.g. 'Config change pushed')" },
|
|
543
|
+
description: { type: "string", description: "Detailed description of this causal step" },
|
|
544
|
+
evidenceObservationIds: {
|
|
545
|
+
type: "array",
|
|
546
|
+
items: { type: "string" },
|
|
547
|
+
description: "IDs of temporal observations supporting this node",
|
|
548
|
+
},
|
|
549
|
+
},
|
|
550
|
+
required: ["timestamp", "label", "description"],
|
|
551
|
+
},
|
|
552
|
+
description: "Ordered list of causal nodes (must be chronological)",
|
|
553
|
+
},
|
|
554
|
+
outcome: {
|
|
555
|
+
type: "string",
|
|
556
|
+
description: "The outcome or conclusion of the causal chain",
|
|
557
|
+
},
|
|
558
|
+
},
|
|
559
|
+
required: ["title", "entityKey", "rootQuestion", "nodes"],
|
|
560
|
+
},
|
|
561
|
+
annotations: { readOnlyHint: false, destructiveHint: false },
|
|
562
|
+
handler: async (args) => {
|
|
563
|
+
const title = args.title;
|
|
564
|
+
const entityKey = args.entityKey;
|
|
565
|
+
const rootQuestion = args.rootQuestion;
|
|
566
|
+
const rawNodes = args.nodes;
|
|
567
|
+
const outcome = args.outcome ?? null;
|
|
568
|
+
if (!rawNodes || rawNodes.length === 0) {
|
|
569
|
+
return [{ type: "text", text: JSON.stringify({ error: "At least one node is required" }) }];
|
|
570
|
+
}
|
|
571
|
+
// Parse and validate chronological order
|
|
572
|
+
const parsedNodes = rawNodes.map((n) => ({
|
|
573
|
+
timestamp: new Date(n.timestamp).getTime(),
|
|
574
|
+
label: n.label,
|
|
575
|
+
description: n.description,
|
|
576
|
+
evidenceObservationIds: n.evidenceObservationIds || [],
|
|
577
|
+
}));
|
|
578
|
+
for (let i = 1; i < parsedNodes.length; i++) {
|
|
579
|
+
if (parsedNodes[i].timestamp < parsedNodes[i - 1].timestamp) {
|
|
580
|
+
return [{ type: "text", text: JSON.stringify({
|
|
581
|
+
error: `Nodes must be in chronological order. Node ${i} ('${parsedNodes[i].label}') is before node ${i - 1} ('${parsedNodes[i - 1].label}')`,
|
|
582
|
+
}) }];
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
if (parsedNodes.some((n) => isNaN(n.timestamp))) {
|
|
586
|
+
return [{ type: "text", text: JSON.stringify({ error: "All node timestamps must be valid ISO dates" }) }];
|
|
587
|
+
}
|
|
588
|
+
const id = genId("tcc");
|
|
589
|
+
const chainKey = `chain_${entityKey}_${Date.now()}`;
|
|
590
|
+
const now = Date.now();
|
|
591
|
+
const summary = `${title}: ${parsedNodes.length} causal steps from '${parsedNodes[0].label}' to '${parsedNodes[parsedNodes.length - 1].label}'`;
|
|
592
|
+
const plainEnglish = `Causal chain answering: ${rootQuestion}. ${parsedNodes.length} steps identified${outcome ? `. Outcome: ${outcome}` : ""}.`;
|
|
593
|
+
// Try Convex
|
|
594
|
+
await convexMutation("domains/temporal/mutations:createCausalChain", {
|
|
595
|
+
chainKey,
|
|
596
|
+
title,
|
|
597
|
+
entityKey,
|
|
598
|
+
rootQuestion,
|
|
599
|
+
status: "draft",
|
|
600
|
+
summary,
|
|
601
|
+
plainEnglish,
|
|
602
|
+
outcome,
|
|
603
|
+
nodes: parsedNodes,
|
|
604
|
+
timeframeStartAt: parsedNodes[0].timestamp,
|
|
605
|
+
timeframeEndAt: parsedNodes[parsedNodes.length - 1].timestamp,
|
|
606
|
+
});
|
|
607
|
+
// Local SQLite
|
|
608
|
+
ensureTemporalTables();
|
|
609
|
+
const db = getDb();
|
|
610
|
+
db.prepare(`
|
|
611
|
+
INSERT INTO temporal_causal_chains (id, chain_key, title, entity_key, root_question, status,
|
|
612
|
+
summary, plain_english, outcome, nodes, created_at)
|
|
613
|
+
VALUES (?, ?, ?, ?, ?, 'draft', ?, ?, ?, ?, ?)
|
|
614
|
+
`).run(id, chainKey, title, entityKey, rootQuestion, summary, plainEnglish, outcome, JSON.stringify(parsedNodes), now);
|
|
615
|
+
return [{
|
|
616
|
+
type: "text",
|
|
617
|
+
text: JSON.stringify({
|
|
618
|
+
chainId: id,
|
|
619
|
+
chainKey,
|
|
620
|
+
title,
|
|
621
|
+
nodeCount: parsedNodes.length,
|
|
622
|
+
timeframeStart: new Date(parsedNodes[0].timestamp).toISOString(),
|
|
623
|
+
timeframeEnd: new Date(parsedNodes[parsedNodes.length - 1].timestamp).toISOString(),
|
|
624
|
+
status: "draft",
|
|
625
|
+
}),
|
|
626
|
+
}];
|
|
627
|
+
},
|
|
628
|
+
},
|
|
629
|
+
// 4. generate_zero_draft
|
|
630
|
+
{
|
|
631
|
+
name: "generate_zero_draft",
|
|
632
|
+
description: "Auto-draft an artifact (slack message, email, spec doc, PR draft, architecture note, career plan, or content brief) based on detected signals and causal chains. Returns draft for human approval before sending.",
|
|
633
|
+
inputSchema: {
|
|
634
|
+
type: "object",
|
|
635
|
+
properties: {
|
|
636
|
+
artifactType: {
|
|
637
|
+
type: "string",
|
|
638
|
+
enum: ["slack_message", "email", "spec_doc", "pr_draft", "architecture_note", "career_plan", "content_brief"],
|
|
639
|
+
description: "Type of artifact to generate",
|
|
640
|
+
},
|
|
641
|
+
title: {
|
|
642
|
+
type: "string",
|
|
643
|
+
description: "Title of the artifact",
|
|
644
|
+
},
|
|
645
|
+
targetAudience: {
|
|
646
|
+
type: "string",
|
|
647
|
+
description: "Who this artifact is for (e.g. 'engineering team', 'VP of Product')",
|
|
648
|
+
},
|
|
649
|
+
linkedSignalIds: {
|
|
650
|
+
type: "array",
|
|
651
|
+
items: { type: "string" },
|
|
652
|
+
description: "IDs of temporal signals to incorporate",
|
|
653
|
+
},
|
|
654
|
+
linkedChainId: {
|
|
655
|
+
type: "string",
|
|
656
|
+
description: "ID of a causal chain to reference",
|
|
657
|
+
},
|
|
658
|
+
context: {
|
|
659
|
+
type: "string",
|
|
660
|
+
description: "Additional context or instructions for draft generation",
|
|
661
|
+
},
|
|
662
|
+
},
|
|
663
|
+
required: ["artifactType", "title"],
|
|
664
|
+
},
|
|
665
|
+
annotations: { readOnlyHint: false, destructiveHint: false },
|
|
666
|
+
handler: async (args) => {
|
|
667
|
+
const artifactType = args.artifactType;
|
|
668
|
+
const title = args.title;
|
|
669
|
+
if (!VALID_ARTIFACT_TYPES.has(artifactType)) {
|
|
670
|
+
return [{ type: "text", text: JSON.stringify({ error: `Invalid artifactType: ${artifactType}` }) }];
|
|
671
|
+
}
|
|
672
|
+
ensureTemporalTables();
|
|
673
|
+
const db = getDb();
|
|
674
|
+
// Gather context from linked signals
|
|
675
|
+
let signalContext = "";
|
|
676
|
+
const linkedSignalIds = args.linkedSignalIds || [];
|
|
677
|
+
if (linkedSignalIds.length > 0) {
|
|
678
|
+
const placeholders = linkedSignalIds.map(() => "?").join(",");
|
|
679
|
+
const signals = db.prepare(`SELECT * FROM temporal_signals WHERE id IN (${placeholders})`).all(...linkedSignalIds);
|
|
680
|
+
signalContext = signals.map((s) => `- [${s.signal_type}] ${s.summary} (confidence: ${s.confidence})`).join("\n");
|
|
681
|
+
}
|
|
682
|
+
// Gather context from linked chain
|
|
683
|
+
let chainContext = "";
|
|
684
|
+
const linkedChainId = args.linkedChainId;
|
|
685
|
+
if (linkedChainId) {
|
|
686
|
+
const chain = db.prepare("SELECT * FROM temporal_causal_chains WHERE id = ?").get(linkedChainId);
|
|
687
|
+
if (chain) {
|
|
688
|
+
chainContext = `Causal Chain: ${chain.title}\nQuestion: ${chain.root_question}\n${chain.plain_english}`;
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
// Generate draft body based on artifact type (template-based, LLM would enhance this)
|
|
692
|
+
const additionalContext = args.context || "";
|
|
693
|
+
const targetAudience = args.targetAudience || "general";
|
|
694
|
+
const sections = [];
|
|
695
|
+
sections.push(`# ${title}\n`);
|
|
696
|
+
sections.push(`**Type:** ${artifactType.replace(/_/g, " ")}`);
|
|
697
|
+
sections.push(`**Audience:** ${targetAudience}`);
|
|
698
|
+
sections.push(`**Generated:** ${new Date().toISOString()}\n`);
|
|
699
|
+
if (signalContext) {
|
|
700
|
+
sections.push(`## Signals\n${signalContext}\n`);
|
|
701
|
+
}
|
|
702
|
+
if (chainContext) {
|
|
703
|
+
sections.push(`## Causal Analysis\n${chainContext}\n`);
|
|
704
|
+
}
|
|
705
|
+
if (additionalContext) {
|
|
706
|
+
sections.push(`## Context\n${additionalContext}\n`);
|
|
707
|
+
}
|
|
708
|
+
// Type-specific scaffolding
|
|
709
|
+
switch (artifactType) {
|
|
710
|
+
case "slack_message":
|
|
711
|
+
sections.push(`## Draft Message\n[Compose concise message based on signals above]\n`);
|
|
712
|
+
break;
|
|
713
|
+
case "email":
|
|
714
|
+
sections.push(`## Subject Line\n[Draft subject]\n\n## Body\n[Draft body with greeting, context, ask, sign-off]\n`);
|
|
715
|
+
break;
|
|
716
|
+
case "spec_doc":
|
|
717
|
+
sections.push(`## Problem Statement\n[What problem does this solve?]\n\n## Proposed Solution\n[How?]\n\n## Success Criteria\n[How do we know it worked?]\n`);
|
|
718
|
+
break;
|
|
719
|
+
case "pr_draft":
|
|
720
|
+
sections.push(`## Summary\n[What changed and why]\n\n## Test Plan\n[How to verify]\n\n## Risk Assessment\n[What could break]\n`);
|
|
721
|
+
break;
|
|
722
|
+
case "architecture_note":
|
|
723
|
+
sections.push(`## Decision\n[The architectural decision]\n\n## Rationale\n[Why this approach]\n\n## Alternatives Considered\n[What else was evaluated]\n\n## Consequences\n[Tradeoffs accepted]\n`);
|
|
724
|
+
break;
|
|
725
|
+
case "career_plan":
|
|
726
|
+
sections.push(`## Current State\n[Where you are]\n\n## Target State\n[Where you want to be]\n\n## Gap Analysis\n[What's missing]\n\n## 90-Day Actions\n[Concrete next steps]\n`);
|
|
727
|
+
break;
|
|
728
|
+
case "content_brief":
|
|
729
|
+
sections.push(`## Hook\n[Opening angle]\n\n## Key Points\n[3-5 main points]\n\n## Evidence\n[Supporting data]\n\n## CTA\n[Call to action]\n`);
|
|
730
|
+
break;
|
|
731
|
+
}
|
|
732
|
+
const bodyMarkdown = sections.join("\n");
|
|
733
|
+
const id = genId("tzd");
|
|
734
|
+
const artifactKey = `draft_${artifactType}_${Date.now()}`;
|
|
735
|
+
const now = Date.now();
|
|
736
|
+
const summary = `Zero-draft ${artifactType.replace(/_/g, " ")} — '${title}' for ${targetAudience}`;
|
|
737
|
+
const plainEnglish = `Draft ${artifactType.replace(/_/g, " ")} created with ${linkedSignalIds.length} linked signals${linkedChainId ? " and causal chain analysis" : ""}.`;
|
|
738
|
+
db.prepare(`
|
|
739
|
+
INSERT INTO temporal_zero_drafts (id, artifact_key, artifact_type, status, title, summary,
|
|
740
|
+
target_audience, body_markdown, linked_signal_ids, linked_chain_id, created_at)
|
|
741
|
+
VALUES (?, ?, ?, 'draft', ?, ?, ?, ?, ?, ?, ?)
|
|
742
|
+
`).run(id, artifactKey, artifactType, title, summary, targetAudience, bodyMarkdown, JSON.stringify(linkedSignalIds), linkedChainId ?? null, now);
|
|
743
|
+
return [{
|
|
744
|
+
type: "text",
|
|
745
|
+
text: JSON.stringify({
|
|
746
|
+
artifactId: id,
|
|
747
|
+
artifactKey,
|
|
748
|
+
artifactType,
|
|
749
|
+
title,
|
|
750
|
+
status: "draft",
|
|
751
|
+
bodyMarkdown,
|
|
752
|
+
linkedSignals: linkedSignalIds.length,
|
|
753
|
+
linkedChain: linkedChainId ?? null,
|
|
754
|
+
hint: "Review and edit the draft, then approve it via the Convex UI or a follow-up tool call.",
|
|
755
|
+
}),
|
|
756
|
+
}];
|
|
757
|
+
},
|
|
758
|
+
},
|
|
759
|
+
// 5. create_proof_pack
|
|
760
|
+
{
|
|
761
|
+
name: "create_proof_pack",
|
|
762
|
+
description: "Assemble an immutable proof pack for verification. Bundles a checklist (pass/fail items), optional metrics (tokens, duration, cost), and links to dogfood runs and zero-draft artifacts. Computes pass rate automatically.",
|
|
763
|
+
inputSchema: {
|
|
764
|
+
type: "object",
|
|
765
|
+
properties: {
|
|
766
|
+
subjectType: {
|
|
767
|
+
type: "string",
|
|
768
|
+
enum: ["deployment", "career_move", "content_release", "research_run", "agent_loop"],
|
|
769
|
+
description: "What this proof pack covers",
|
|
770
|
+
},
|
|
771
|
+
subjectId: {
|
|
772
|
+
type: "string",
|
|
773
|
+
description: "Identifier for the subject (e.g. deployment version, research run ID)",
|
|
774
|
+
},
|
|
775
|
+
checklist: {
|
|
776
|
+
type: "array",
|
|
777
|
+
items: {
|
|
778
|
+
type: "object",
|
|
779
|
+
properties: {
|
|
780
|
+
label: { type: "string", description: "Checklist item label" },
|
|
781
|
+
passed: { type: "boolean", description: "Whether this item passed" },
|
|
782
|
+
note: { type: "string", description: "Optional note about this item" },
|
|
783
|
+
},
|
|
784
|
+
required: ["label", "passed"],
|
|
785
|
+
},
|
|
786
|
+
description: "Checklist items with pass/fail status",
|
|
787
|
+
},
|
|
788
|
+
metrics: {
|
|
789
|
+
type: "object",
|
|
790
|
+
properties: {
|
|
791
|
+
totalTokens: { type: "number" },
|
|
792
|
+
totalDurationMs: { type: "number" },
|
|
793
|
+
estimatedCostUsd: { type: "number" },
|
|
794
|
+
},
|
|
795
|
+
description: "Optional performance metrics",
|
|
796
|
+
},
|
|
797
|
+
dogfoodRunId: {
|
|
798
|
+
type: "string",
|
|
799
|
+
description: "ID of associated dogfood QA run",
|
|
800
|
+
},
|
|
801
|
+
zeroDraftArtifactIds: {
|
|
802
|
+
type: "array",
|
|
803
|
+
items: { type: "string" },
|
|
804
|
+
description: "IDs of zero-draft artifacts included in this proof pack",
|
|
805
|
+
},
|
|
806
|
+
},
|
|
807
|
+
required: ["subjectType", "subjectId", "checklist"],
|
|
808
|
+
},
|
|
809
|
+
annotations: { readOnlyHint: false, destructiveHint: false },
|
|
810
|
+
handler: async (args) => {
|
|
811
|
+
const subjectType = args.subjectType;
|
|
812
|
+
const subjectId = args.subjectId;
|
|
813
|
+
const checklist = args.checklist;
|
|
814
|
+
if (!VALID_SUBJECT_TYPES.has(subjectType)) {
|
|
815
|
+
return [{ type: "text", text: JSON.stringify({ error: `Invalid subjectType: ${subjectType}` }) }];
|
|
816
|
+
}
|
|
817
|
+
if (!checklist || checklist.length === 0) {
|
|
818
|
+
return [{ type: "text", text: JSON.stringify({ error: "Checklist must have at least one item" }) }];
|
|
819
|
+
}
|
|
820
|
+
const passed = checklist.filter((c) => c.passed).length;
|
|
821
|
+
const total = checklist.length;
|
|
822
|
+
const passRate = passed / total;
|
|
823
|
+
const id = genId("tpp");
|
|
824
|
+
const packKey = `proof_${subjectType}_${subjectId}_${Date.now()}`;
|
|
825
|
+
const now = Date.now();
|
|
826
|
+
const metrics = args.metrics ?? null;
|
|
827
|
+
const zeroDraftArtifactIds = args.zeroDraftArtifactIds || [];
|
|
828
|
+
const status = passRate === 1 ? "ready" : "draft";
|
|
829
|
+
const summary = `Proof pack for ${subjectType}/${subjectId}: ${passed}/${total} passed (${(passRate * 100).toFixed(0)}%)`;
|
|
830
|
+
ensureTemporalTables();
|
|
831
|
+
const db = getDb();
|
|
832
|
+
db.prepare(`
|
|
833
|
+
INSERT INTO temporal_proof_packs (id, pack_key, subject_type, subject_id, status, summary,
|
|
834
|
+
checklist, pass_rate, metrics, dogfood_run_id, zero_draft_artifact_ids, created_at)
|
|
835
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
836
|
+
`).run(id, packKey, subjectType, subjectId, status, summary, JSON.stringify(checklist), passRate, metrics ? JSON.stringify(metrics) : null, args.dogfoodRunId ?? null, JSON.stringify(zeroDraftArtifactIds), now);
|
|
837
|
+
return [{
|
|
838
|
+
type: "text",
|
|
839
|
+
text: JSON.stringify({
|
|
840
|
+
proofPackId: id,
|
|
841
|
+
packKey,
|
|
842
|
+
subjectType,
|
|
843
|
+
subjectId,
|
|
844
|
+
status,
|
|
845
|
+
passRate,
|
|
846
|
+
passed,
|
|
847
|
+
total,
|
|
848
|
+
summary,
|
|
849
|
+
metrics: metrics ?? null,
|
|
850
|
+
}),
|
|
851
|
+
}];
|
|
852
|
+
},
|
|
853
|
+
},
|
|
854
|
+
// 6. query_temporal_signals
|
|
855
|
+
{
|
|
856
|
+
name: "query_temporal_signals",
|
|
857
|
+
description: "Search and retrieve temporal signals with filtering by entity, signal type, status, and date range. Returns formatted signal records from the local temporal store.",
|
|
858
|
+
inputSchema: {
|
|
859
|
+
type: "object",
|
|
860
|
+
properties: {
|
|
861
|
+
entityKey: {
|
|
862
|
+
type: "string",
|
|
863
|
+
description: "Filter by entity key",
|
|
864
|
+
},
|
|
865
|
+
signalType: {
|
|
866
|
+
type: "string",
|
|
867
|
+
enum: ["momentum", "regime_shift", "anomaly", "causal_hint", "opportunity_window", "risk_window"],
|
|
868
|
+
description: "Filter by signal type",
|
|
869
|
+
},
|
|
870
|
+
status: {
|
|
871
|
+
type: "string",
|
|
872
|
+
enum: ["open", "watch", "resolved", "dismissed"],
|
|
873
|
+
description: "Filter by status (default: all)",
|
|
874
|
+
},
|
|
875
|
+
startDate: {
|
|
876
|
+
type: "string",
|
|
877
|
+
description: "Filter signals detected after this ISO date",
|
|
878
|
+
},
|
|
879
|
+
endDate: {
|
|
880
|
+
type: "string",
|
|
881
|
+
description: "Filter signals detected before this ISO date",
|
|
882
|
+
},
|
|
883
|
+
limit: {
|
|
884
|
+
type: "number",
|
|
885
|
+
description: "Max results (default: 20)",
|
|
886
|
+
},
|
|
887
|
+
},
|
|
888
|
+
required: [],
|
|
889
|
+
},
|
|
890
|
+
annotations: { readOnlyHint: true, openWorldHint: false },
|
|
891
|
+
handler: async (args) => {
|
|
892
|
+
ensureTemporalTables();
|
|
893
|
+
const db = getDb();
|
|
894
|
+
const limit = args.limit || 20;
|
|
895
|
+
let sql = "SELECT * FROM temporal_signals WHERE 1=1";
|
|
896
|
+
const params = [];
|
|
897
|
+
if (args.entityKey) {
|
|
898
|
+
sql += " AND entity_key = ?";
|
|
899
|
+
params.push(args.entityKey);
|
|
900
|
+
}
|
|
901
|
+
if (args.signalType) {
|
|
902
|
+
sql += " AND signal_type = ?";
|
|
903
|
+
params.push(args.signalType);
|
|
904
|
+
}
|
|
905
|
+
if (args.status) {
|
|
906
|
+
sql += " AND status = ?";
|
|
907
|
+
params.push(args.status);
|
|
908
|
+
}
|
|
909
|
+
if (args.startDate) {
|
|
910
|
+
sql += " AND detected_at >= ?";
|
|
911
|
+
params.push(new Date(args.startDate).getTime());
|
|
912
|
+
}
|
|
913
|
+
if (args.endDate) {
|
|
914
|
+
sql += " AND detected_at <= ?";
|
|
915
|
+
params.push(new Date(args.endDate).getTime());
|
|
916
|
+
}
|
|
917
|
+
sql += " ORDER BY detected_at DESC LIMIT ?";
|
|
918
|
+
params.push(limit);
|
|
919
|
+
const signals = db.prepare(sql).all(...params);
|
|
920
|
+
return [{
|
|
921
|
+
type: "text",
|
|
922
|
+
text: JSON.stringify({
|
|
923
|
+
signals: signals.map((s) => ({
|
|
924
|
+
...s,
|
|
925
|
+
detectedAtISO: new Date(s.detected_at).toISOString(),
|
|
926
|
+
})),
|
|
927
|
+
count: signals.length,
|
|
928
|
+
filters: {
|
|
929
|
+
entityKey: args.entityKey ?? null,
|
|
930
|
+
signalType: args.signalType ?? null,
|
|
931
|
+
status: args.status ?? null,
|
|
932
|
+
startDate: args.startDate ?? null,
|
|
933
|
+
endDate: args.endDate ?? null,
|
|
934
|
+
},
|
|
935
|
+
}),
|
|
936
|
+
}];
|
|
937
|
+
},
|
|
938
|
+
},
|
|
939
|
+
// 7. forecast_temporal_trend
|
|
940
|
+
{
|
|
941
|
+
name: "forecast_temporal_trend",
|
|
942
|
+
description: "Zero-shot forecasting on numeric time series. Supports naive (last value), linear (regression), and exponential_smoothing methods. Returns predictions with confidence intervals. Placeholder for future TSFM (Chronos/TimesFM) microservice integration.",
|
|
943
|
+
inputSchema: {
|
|
944
|
+
type: "object",
|
|
945
|
+
properties: {
|
|
946
|
+
streamKey: {
|
|
947
|
+
type: "string",
|
|
948
|
+
description: "Stream to forecast",
|
|
949
|
+
},
|
|
950
|
+
horizonDays: {
|
|
951
|
+
type: "number",
|
|
952
|
+
description: "How many days ahead to forecast",
|
|
953
|
+
},
|
|
954
|
+
method: {
|
|
955
|
+
type: "string",
|
|
956
|
+
enum: ["naive", "linear", "exponential_smoothing"],
|
|
957
|
+
description: "Forecasting method (default: linear)",
|
|
958
|
+
},
|
|
959
|
+
},
|
|
960
|
+
required: ["streamKey", "horizonDays"],
|
|
961
|
+
},
|
|
962
|
+
annotations: { readOnlyHint: true, openWorldHint: false },
|
|
963
|
+
handler: async (args) => {
|
|
964
|
+
ensureTemporalTables();
|
|
965
|
+
const db = getDb();
|
|
966
|
+
const streamKey = args.streamKey;
|
|
967
|
+
const horizonDays = args.horizonDays;
|
|
968
|
+
const method = args.method || "linear";
|
|
969
|
+
if (horizonDays < 1 || horizonDays > 365) {
|
|
970
|
+
return [{ type: "text", text: JSON.stringify({ error: "horizonDays must be between 1 and 365" }) }];
|
|
971
|
+
}
|
|
972
|
+
// Get numeric observations
|
|
973
|
+
const observations = db.prepare(`
|
|
974
|
+
SELECT observed_at, value_number FROM temporal_observations
|
|
975
|
+
WHERE stream_key = ? AND observation_type = 'numeric' AND value_number IS NOT NULL
|
|
976
|
+
ORDER BY observed_at ASC
|
|
977
|
+
`).all(streamKey);
|
|
978
|
+
if (observations.length < 2) {
|
|
979
|
+
return [{ type: "text", text: JSON.stringify({
|
|
980
|
+
error: `Need at least 2 numeric observations for forecasting. Found ${observations.length} for stream '${streamKey}'.`,
|
|
981
|
+
hint: "Ingest numeric observations with ingest_temporal_observation first.",
|
|
982
|
+
}) }];
|
|
983
|
+
}
|
|
984
|
+
const values = observations.map((o) => o.value_number);
|
|
985
|
+
const timestamps = observations.map((o) => o.observed_at);
|
|
986
|
+
const lastTs = timestamps[timestamps.length - 1];
|
|
987
|
+
const DAY_MS = 86_400_000;
|
|
988
|
+
const s = stddev(values);
|
|
989
|
+
const predictions = [];
|
|
990
|
+
switch (method) {
|
|
991
|
+
case "naive": {
|
|
992
|
+
const lastVal = values[values.length - 1];
|
|
993
|
+
for (let d = 1; d <= horizonDays; d++) {
|
|
994
|
+
const uncertainty = s * Math.sqrt(d) * 0.5;
|
|
995
|
+
predictions.push({
|
|
996
|
+
day: d,
|
|
997
|
+
date: new Date(lastTs + d * DAY_MS).toISOString().split("T")[0],
|
|
998
|
+
predicted: lastVal,
|
|
999
|
+
lower: lastVal - 1.96 * uncertainty,
|
|
1000
|
+
upper: lastVal + 1.96 * uncertainty,
|
|
1001
|
+
});
|
|
1002
|
+
}
|
|
1003
|
+
break;
|
|
1004
|
+
}
|
|
1005
|
+
case "linear": {
|
|
1006
|
+
const { slope, intercept, r2 } = linearRegression(timestamps, values);
|
|
1007
|
+
for (let d = 1; d <= horizonDays; d++) {
|
|
1008
|
+
const futureTs = lastTs + d * DAY_MS;
|
|
1009
|
+
const predicted = slope * futureTs + intercept;
|
|
1010
|
+
// Prediction interval widens with distance and inversely with R2
|
|
1011
|
+
const uncertainty = s * Math.sqrt(1 + 1 / observations.length + d * 0.1) * (1 + (1 - Math.abs(r2)));
|
|
1012
|
+
predictions.push({
|
|
1013
|
+
day: d,
|
|
1014
|
+
date: new Date(futureTs).toISOString().split("T")[0],
|
|
1015
|
+
predicted,
|
|
1016
|
+
lower: predicted - 1.96 * uncertainty,
|
|
1017
|
+
upper: predicted + 1.96 * uncertainty,
|
|
1018
|
+
});
|
|
1019
|
+
}
|
|
1020
|
+
break;
|
|
1021
|
+
}
|
|
1022
|
+
case "exponential_smoothing": {
|
|
1023
|
+
// Simple exponential smoothing (alpha=0.3)
|
|
1024
|
+
const alpha = 0.3;
|
|
1025
|
+
let level = values[0];
|
|
1026
|
+
for (const v of values) {
|
|
1027
|
+
level = alpha * v + (1 - alpha) * level;
|
|
1028
|
+
}
|
|
1029
|
+
for (let d = 1; d <= horizonDays; d++) {
|
|
1030
|
+
const uncertainty = s * Math.sqrt(d) * 0.6;
|
|
1031
|
+
predictions.push({
|
|
1032
|
+
day: d,
|
|
1033
|
+
date: new Date(lastTs + d * DAY_MS).toISOString().split("T")[0],
|
|
1034
|
+
predicted: level,
|
|
1035
|
+
lower: level - 1.96 * uncertainty,
|
|
1036
|
+
upper: level + 1.96 * uncertainty,
|
|
1037
|
+
});
|
|
1038
|
+
}
|
|
1039
|
+
break;
|
|
1040
|
+
}
|
|
1041
|
+
default:
|
|
1042
|
+
return [{ type: "text", text: JSON.stringify({ error: `Unknown method: ${method}. Use naive, linear, or exponential_smoothing.` }) }];
|
|
1043
|
+
}
|
|
1044
|
+
return [{
|
|
1045
|
+
type: "text",
|
|
1046
|
+
text: JSON.stringify({
|
|
1047
|
+
streamKey,
|
|
1048
|
+
method,
|
|
1049
|
+
horizonDays,
|
|
1050
|
+
historicalPoints: observations.length,
|
|
1051
|
+
historicalRange: {
|
|
1052
|
+
start: new Date(timestamps[0]).toISOString(),
|
|
1053
|
+
end: new Date(lastTs).toISOString(),
|
|
1054
|
+
},
|
|
1055
|
+
historicalStats: {
|
|
1056
|
+
mean: mean(values),
|
|
1057
|
+
stddev: s,
|
|
1058
|
+
min: Math.min(...values),
|
|
1059
|
+
max: Math.max(...values),
|
|
1060
|
+
},
|
|
1061
|
+
predictions,
|
|
1062
|
+
disclaimer: "Statistical forecast only. For production use, integrate Chronos or TimesFM TSFM microservice.",
|
|
1063
|
+
}),
|
|
1064
|
+
}];
|
|
1065
|
+
},
|
|
1066
|
+
},
|
|
1067
|
+
];
|
|
1068
|
+
//# sourceMappingURL=temporalIntelligenceTools.js.map
|