@dungle-scrubs/hippo 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +439 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +559 -0
- package/dist/cli.js.map +1 -0
- package/dist/db.d.ts +45 -0
- package/dist/db.d.ts.map +1 -0
- package/dist/db.js +80 -0
- package/dist/db.js.map +1 -0
- package/dist/extractor.d.ts +23 -0
- package/dist/extractor.d.ts.map +1 -0
- package/dist/extractor.js +121 -0
- package/dist/extractor.js.map +1 -0
- package/dist/hash.d.ts +11 -0
- package/dist/hash.d.ts.map +1 -0
- package/dist/hash.js +14 -0
- package/dist/hash.js.map +1 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +40 -0
- package/dist/index.js.map +1 -0
- package/dist/providers/embedding.d.ts +27 -0
- package/dist/providers/embedding.d.ts.map +1 -0
- package/dist/providers/embedding.js +41 -0
- package/dist/providers/embedding.js.map +1 -0
- package/dist/providers/llm.d.ts +29 -0
- package/dist/providers/llm.d.ts.map +1 -0
- package/dist/providers/llm.js +74 -0
- package/dist/providers/llm.js.map +1 -0
- package/dist/schema.d.ts +20 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +78 -0
- package/dist/schema.js.map +1 -0
- package/dist/server/config.d.ts +38 -0
- package/dist/server/config.d.ts.map +1 -0
- package/dist/server/config.js +71 -0
- package/dist/server/config.js.map +1 -0
- package/dist/server/index.d.ts +13 -0
- package/dist/server/index.d.ts.map +1 -0
- package/dist/server/index.js +372 -0
- package/dist/server/index.js.map +1 -0
- package/dist/similarity.d.ts +41 -0
- package/dist/similarity.d.ts.map +1 -0
- package/dist/similarity.js +70 -0
- package/dist/similarity.js.map +1 -0
- package/dist/strength.d.ts +59 -0
- package/dist/strength.d.ts.map +1 -0
- package/dist/strength.js +76 -0
- package/dist/strength.js.map +1 -0
- package/dist/tools/append-memory-block.d.ts +22 -0
- package/dist/tools/append-memory-block.d.ts.map +1 -0
- package/dist/tools/append-memory-block.js +45 -0
- package/dist/tools/append-memory-block.js.map +1 -0
- package/dist/tools/forget-memory.d.ts +31 -0
- package/dist/tools/forget-memory.d.ts.map +1 -0
- package/dist/tools/forget-memory.js +77 -0
- package/dist/tools/forget-memory.js.map +1 -0
- package/dist/tools/index.d.ts +9 -0
- package/dist/tools/index.d.ts.map +1 -0
- package/dist/tools/index.js +9 -0
- package/dist/tools/index.js.map +1 -0
- package/dist/tools/recall-conversation.d.ts +21 -0
- package/dist/tools/recall-conversation.d.ts.map +1 -0
- package/dist/tools/recall-conversation.js +93 -0
- package/dist/tools/recall-conversation.js.map +1 -0
- package/dist/tools/recall-memories.d.ts +29 -0
- package/dist/tools/recall-memories.d.ts.map +1 -0
- package/dist/tools/recall-memories.js +106 -0
- package/dist/tools/recall-memories.js.map +1 -0
- package/dist/tools/recall-memory-block.d.ts +21 -0
- package/dist/tools/recall-memory-block.d.ts.map +1 -0
- package/dist/tools/recall-memory-block.js +36 -0
- package/dist/tools/recall-memory-block.js.map +1 -0
- package/dist/tools/remember-facts.d.ts +30 -0
- package/dist/tools/remember-facts.d.ts.map +1 -0
- package/dist/tools/remember-facts.js +235 -0
- package/dist/tools/remember-facts.js.map +1 -0
- package/dist/tools/replace-memory-block.d.ts +25 -0
- package/dist/tools/replace-memory-block.d.ts.map +1 -0
- package/dist/tools/replace-memory-block.js +69 -0
- package/dist/tools/replace-memory-block.js.map +1 -0
- package/dist/tools/store-memory.d.ts +27 -0
- package/dist/tools/store-memory.d.ts.map +1 -0
- package/dist/tools/store-memory.js +129 -0
- package/dist/tools/store-memory.js.map +1 -0
- package/dist/types.d.ts +86 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/ulid.d.ts +13 -0
- package/dist/ulid.d.ts.map +1 -0
- package/dist/ulid.js +39 -0
- package/dist/ulid.js.map +1 -0
- package/package.json +70 -0
package/dist/db.js
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Build all prepared statements for hippo operations.
|
|
3
|
+
*
|
|
4
|
+
* @param db - better-sqlite3 Database handle
|
|
5
|
+
* @returns Prepared statement cache
|
|
6
|
+
*/
|
|
7
|
+
export function prepareStatements(db) {
|
|
8
|
+
return {
|
|
9
|
+
clearSupersededBy: db.prepare("UPDATE chunks SET superseded_by = NULL WHERE superseded_by = ? AND agent_id = ?"),
|
|
10
|
+
deleteChunk: db.prepare("DELETE FROM chunks WHERE id = ?"),
|
|
11
|
+
getAllActiveChunksByAgent: db.prepare(`
|
|
12
|
+
SELECT * FROM chunks
|
|
13
|
+
WHERE agent_id = ? AND superseded_by IS NULL
|
|
14
|
+
ORDER BY last_accessed_at DESC
|
|
15
|
+
LIMIT ?
|
|
16
|
+
`),
|
|
17
|
+
getActiveChunksByAgent: db.prepare(`
|
|
18
|
+
SELECT * FROM chunks
|
|
19
|
+
WHERE agent_id = ? AND kind = ? AND superseded_by IS NULL
|
|
20
|
+
ORDER BY last_accessed_at DESC
|
|
21
|
+
LIMIT ?
|
|
22
|
+
`),
|
|
23
|
+
getBlockByKey: db.prepare("SELECT * FROM memory_blocks WHERE agent_id = ? AND key = ?"),
|
|
24
|
+
getMemoryByHash: db.prepare("SELECT * FROM chunks WHERE agent_id = ? AND content_hash = ? AND kind = 'memory' AND superseded_by IS NULL"),
|
|
25
|
+
insertChunk: db.prepare(`
|
|
26
|
+
INSERT INTO chunks (id, agent_id, content, content_hash, embedding, metadata,
|
|
27
|
+
kind, running_intensity, encounter_count, access_count, last_accessed_at, created_at)
|
|
28
|
+
VALUES (@id, @agent_id, @content, @content_hash, @embedding, @metadata,
|
|
29
|
+
@kind, @running_intensity, @encounter_count, @access_count, @last_accessed_at, @created_at)
|
|
30
|
+
`),
|
|
31
|
+
reinforceChunk: db.prepare(`
|
|
32
|
+
UPDATE chunks
|
|
33
|
+
SET running_intensity = @running_intensity,
|
|
34
|
+
encounter_count = encounter_count + 1,
|
|
35
|
+
access_count = access_count + 1,
|
|
36
|
+
last_accessed_at = @last_accessed_at
|
|
37
|
+
WHERE id = @id
|
|
38
|
+
`),
|
|
39
|
+
supersedeChunk: db.prepare("UPDATE chunks SET superseded_by = ? WHERE id = ?"),
|
|
40
|
+
touchChunk: db.prepare(`
|
|
41
|
+
UPDATE chunks
|
|
42
|
+
SET access_count = access_count + 1,
|
|
43
|
+
running_intensity = @running_intensity,
|
|
44
|
+
last_accessed_at = @last_accessed_at
|
|
45
|
+
WHERE id = @id
|
|
46
|
+
`),
|
|
47
|
+
upsertBlock: db.prepare(`
|
|
48
|
+
INSERT INTO memory_blocks (agent_id, key, value, updated_at)
|
|
49
|
+
VALUES (@agent_id, @key, @value, @updated_at)
|
|
50
|
+
ON CONFLICT(agent_id, key) DO UPDATE
|
|
51
|
+
SET value = @value, updated_at = @updated_at
|
|
52
|
+
`),
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Get all active (non-superseded) chunks for an agent by kind.
|
|
57
|
+
*
|
|
58
|
+
* @param stmts - Prepared statements
|
|
59
|
+
* @param agentId - Agent namespace
|
|
60
|
+
* @param kind - Chunk kind ('fact' or 'memory')
|
|
61
|
+
* @param limit - Max rows to return (-1 for unlimited, default -1)
|
|
62
|
+
* @returns Array of chunks
|
|
63
|
+
*/
|
|
64
|
+
export function getActiveChunks(stmts, agentId, kind, limit = -1) {
|
|
65
|
+
return stmts.getActiveChunksByAgent.all(agentId, kind, limit);
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Get all active (non-superseded) chunks for an agent regardless of kind.
|
|
69
|
+
*
|
|
70
|
+
* Single query instead of two separate kind-filtered queries.
|
|
71
|
+
*
|
|
72
|
+
* @param stmts - Prepared statements
|
|
73
|
+
* @param agentId - Agent namespace
|
|
74
|
+
* @param limit - Max rows to return (-1 for unlimited, default -1)
|
|
75
|
+
* @returns Array of chunks (facts and memories combined)
|
|
76
|
+
*/
|
|
77
|
+
export function getAllActiveChunks(stmts, agentId, limit = -1) {
|
|
78
|
+
return stmts.getAllActiveChunksByAgent.all(agentId, limit);
|
|
79
|
+
}
|
|
80
|
+
//# sourceMappingURL=db.js.map
|
package/dist/db.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"db.js","sourceRoot":"","sources":["../src/db.ts"],"names":[],"mappings":"AAkBA;;;;;GAKG;AACH,MAAM,UAAU,iBAAiB,CAAC,EAAY;IAC7C,OAAO;QACN,iBAAiB,EAAE,EAAE,CAAC,OAAO,CAC5B,iFAAiF,CACjF;QAED,WAAW,EAAE,EAAE,CAAC,OAAO,CAAC,iCAAiC,CAAC;QAE1D,yBAAyB,EAAE,EAAE,CAAC,OAAO,CAAC;;;;;GAKrC,CAAC;QAEF,sBAAsB,EAAE,EAAE,CAAC,OAAO,CAAC;;;;;GAKlC,CAAC;QAEF,aAAa,EAAE,EAAE,CAAC,OAAO,CAAC,4DAA4D,CAAC;QAEvF,eAAe,EAAE,EAAE,CAAC,OAAO,CAC1B,4GAA4G,CAC5G;QAED,WAAW,EAAE,EAAE,CAAC,OAAO,CAAC;;;;;GAKvB,CAAC;QAEF,cAAc,EAAE,EAAE,CAAC,OAAO,CAAC;;;;;;;GAO1B,CAAC;QAEF,cAAc,EAAE,EAAE,CAAC,OAAO,CAAC,kDAAkD,CAAC;QAE9E,UAAU,EAAE,EAAE,CAAC,OAAO,CAAC;;;;;;GAMtB,CAAC;QAEF,WAAW,EAAE,EAAE,CAAC,OAAO,CAAC;;;;;GAKvB,CAAC;KACF,CAAC;AACH,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,eAAe,CAC9B,KAAmB,EACnB,OAAe,EACf,IAAuB,EACvB,KAAK,GAAG,CAAC,CAAC;IAEV,OAAO,KAAK,CAAC,sBAAsB,CAAC,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,KAAK,CAAY,CAAC;AAC1E,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAmB,EAAE,OAAe,EAAE,KAAK,GAAG,CAAC,CAAC;IAClF,OAAO,KAAK,CAAC,yBAAyB,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAY,CAAC;AACvE,CAAC"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { ConflictClassification, ExtractedFact, LlmClient } from "./types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Extract discrete facts from text with intensity ratings.
|
|
4
|
+
*
|
|
5
|
+
* @param text - User text to extract facts from
|
|
6
|
+
* @param llm - LLM client for extraction
|
|
7
|
+
* @param signal - Optional abort signal
|
|
8
|
+
* @returns Array of extracted facts with intensity ratings
|
|
9
|
+
*/
|
|
10
|
+
export declare function extractFacts(text: string, llm: LlmClient, signal?: AbortSignal): Promise<readonly ExtractedFact[]>;
|
|
11
|
+
/**
|
|
12
|
+
* Classify the relationship between a new fact and an existing fact.
|
|
13
|
+
*
|
|
14
|
+
* Only called for candidates in the ambiguous similarity band (0.78–0.93).
|
|
15
|
+
*
|
|
16
|
+
* @param newFact - The newly extracted fact
|
|
17
|
+
* @param existingFact - The existing fact to compare against
|
|
18
|
+
* @param llm - LLM client for classification
|
|
19
|
+
* @param signal - Optional abort signal
|
|
20
|
+
* @returns Classification: DUPLICATE, SUPERSEDES, or DISTINCT
|
|
21
|
+
*/
|
|
22
|
+
export declare function classifyConflict(newFact: string, existingFact: string, llm: LlmClient, signal?: AbortSignal): Promise<ConflictClassification>;
|
|
23
|
+
//# sourceMappingURL=extractor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"extractor.d.ts","sourceRoot":"","sources":["../src/extractor.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,sBAAsB,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,YAAY,CAAC;AA4CnF;;;;;;;GAOG;AACH,wBAAsB,YAAY,CACjC,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,SAAS,EACd,MAAM,CAAC,EAAE,WAAW,GAClB,OAAO,CAAC,SAAS,aAAa,EAAE,CAAC,CAenC;AAED;;;;;;;;;;GAUG;AACH,wBAAsB,gBAAgB,CACrC,OAAO,EAAE,MAAM,EACf,YAAY,EAAE,MAAM,EACpB,GAAG,EAAE,SAAS,EACd,MAAM,CAAC,EAAE,WAAW,GAClB,OAAO,CAAC,sBAAsB,CAAC,CAgBjC"}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Build a pi-ai UserMessage from text content.
|
|
3
|
+
*
|
|
4
|
+
* @param content - Message text
|
|
5
|
+
* @returns UserMessage compatible with pi-ai's Message type
|
|
6
|
+
*/
|
|
7
|
+
function userMessage(content) {
|
|
8
|
+
return { content, role: "user", timestamp: Date.now() };
|
|
9
|
+
}
|
|
10
|
+
const EXTRACTION_SYSTEM_PROMPT = `You extract discrete facts from user text and rate each fact's intensity.
|
|
11
|
+
|
|
12
|
+
Rules:
|
|
13
|
+
- Extract ONLY factual claims, preferences, or decisions. Not questions or filler.
|
|
14
|
+
- Each fact should be a single, atomic statement.
|
|
15
|
+
- Rate intensity 0.0–1.0 based on:
|
|
16
|
+
- Emotional charge ("hate", "love", "nightmare") → higher
|
|
17
|
+
- Consequence language ("cost us the client") → higher
|
|
18
|
+
- Absolute language ("never", "always", "I refuse") → higher
|
|
19
|
+
- Identity statements ("I'm a backend person") → higher
|
|
20
|
+
- Explicit importance ("remember this", "this is critical") → higher
|
|
21
|
+
- Casual aside, no signal → 0.1–0.2
|
|
22
|
+
- Clear statement with mild opinion → 0.3–0.5
|
|
23
|
+
- Strong conviction or emotional charge → 0.6–0.8
|
|
24
|
+
- Sustained pattern + identity-level → 0.85–1.0
|
|
25
|
+
|
|
26
|
+
Respond with ONLY a JSON array. No markdown, no explanation.
|
|
27
|
+
Example:
|
|
28
|
+
[{"fact": "User dislikes Redux", "intensity": 0.85}, {"fact": "User tried a café on Sukhumvit", "intensity": 0.15}]
|
|
29
|
+
|
|
30
|
+
If there are no extractable facts, respond with an empty array: []`;
|
|
31
|
+
const CLASSIFICATION_SYSTEM_PROMPT = `You classify the relationship between a new fact and an existing fact.
|
|
32
|
+
|
|
33
|
+
Respond with EXACTLY one word: DUPLICATE, SUPERSEDES, or DISTINCT.
|
|
34
|
+
|
|
35
|
+
- DUPLICATE: Same information, different wording. Example: "User lives in Berlin" vs "User's city is Berlin"
|
|
36
|
+
- SUPERSEDES: Same topic, new value replaces old. Example: "User lives in Berlin" vs "User lives in Bangkok"
|
|
37
|
+
- DISTINCT: Related but both can be true simultaneously. Example: "User likes TypeScript" vs "User likes Rust"
|
|
38
|
+
|
|
39
|
+
Respond with ONLY the classification word. No explanation.`;
|
|
40
|
+
/**
|
|
41
|
+
* Extract discrete facts from text with intensity ratings.
|
|
42
|
+
*
|
|
43
|
+
* @param text - User text to extract facts from
|
|
44
|
+
* @param llm - LLM client for extraction
|
|
45
|
+
* @param signal - Optional abort signal
|
|
46
|
+
* @returns Array of extracted facts with intensity ratings
|
|
47
|
+
*/
|
|
48
|
+
export async function extractFacts(text, llm, signal) {
|
|
49
|
+
const response = await llm.complete([userMessage(text)], EXTRACTION_SYSTEM_PROMPT, signal);
|
|
50
|
+
const parsed = parseJsonArray(response);
|
|
51
|
+
if (!parsed) {
|
|
52
|
+
return [];
|
|
53
|
+
}
|
|
54
|
+
return parsed
|
|
55
|
+
.filter(isValidExtractedFact)
|
|
56
|
+
.map((f) => ({
|
|
57
|
+
fact: f.fact.trim(),
|
|
58
|
+
intensity: Math.max(0, Math.min(1, f.intensity)),
|
|
59
|
+
}))
|
|
60
|
+
.filter((f) => f.fact.length > 0);
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Classify the relationship between a new fact and an existing fact.
|
|
64
|
+
*
|
|
65
|
+
* Only called for candidates in the ambiguous similarity band (0.78–0.93).
|
|
66
|
+
*
|
|
67
|
+
* @param newFact - The newly extracted fact
|
|
68
|
+
* @param existingFact - The existing fact to compare against
|
|
69
|
+
* @param llm - LLM client for classification
|
|
70
|
+
* @param signal - Optional abort signal
|
|
71
|
+
* @returns Classification: DUPLICATE, SUPERSEDES, or DISTINCT
|
|
72
|
+
*/
|
|
73
|
+
export async function classifyConflict(newFact, existingFact, llm, signal) {
|
|
74
|
+
const prompt = `New fact: "${newFact}"\nExisting fact: "${existingFact}"`;
|
|
75
|
+
const response = await llm.complete([userMessage(prompt)], CLASSIFICATION_SYSTEM_PROMPT, signal);
|
|
76
|
+
const firstWord = response
|
|
77
|
+
.trim()
|
|
78
|
+
.split(/\s/)[0]
|
|
79
|
+
?.replace(/[^A-Za-z]/g, "")
|
|
80
|
+
.toUpperCase();
|
|
81
|
+
if (firstWord === "DUPLICATE" || firstWord === "SUPERSEDES" || firstWord === "DISTINCT") {
|
|
82
|
+
return firstWord;
|
|
83
|
+
}
|
|
84
|
+
// Default to DISTINCT if LLM returns garbage
|
|
85
|
+
return "DISTINCT";
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Parse a JSON array from an LLM response, tolerating markdown fences.
|
|
89
|
+
*
|
|
90
|
+
* @param text - Raw LLM response
|
|
91
|
+
* @returns Parsed array or null
|
|
92
|
+
*/
|
|
93
|
+
function parseJsonArray(text) {
|
|
94
|
+
let cleaned = text.trim();
|
|
95
|
+
// Strip markdown code fences if present
|
|
96
|
+
if (cleaned.startsWith("```")) {
|
|
97
|
+
cleaned = cleaned.replace(/^```(?:json)?\s*\n?/, "").replace(/\n?```\s*$/, "");
|
|
98
|
+
}
|
|
99
|
+
try {
|
|
100
|
+
const parsed = JSON.parse(cleaned);
|
|
101
|
+
return Array.isArray(parsed) ? parsed : null;
|
|
102
|
+
}
|
|
103
|
+
catch {
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Type guard for extracted fact shape.
|
|
109
|
+
*
|
|
110
|
+
* @param value - Unknown value to check
|
|
111
|
+
* @returns True if value matches ExtractedFact shape
|
|
112
|
+
*/
|
|
113
|
+
function isValidExtractedFact(value) {
|
|
114
|
+
return (typeof value === "object" &&
|
|
115
|
+
value !== null &&
|
|
116
|
+
"fact" in value &&
|
|
117
|
+
typeof value.fact === "string" &&
|
|
118
|
+
"intensity" in value &&
|
|
119
|
+
typeof value.intensity === "number");
|
|
120
|
+
}
|
|
121
|
+
//# sourceMappingURL=extractor.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"extractor.js","sourceRoot":"","sources":["../src/extractor.ts"],"names":[],"mappings":"AAGA;;;;;GAKG;AACH,SAAS,WAAW,CAAC,OAAe;IACnC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,CAAC;AACzD,CAAC;AAED,MAAM,wBAAwB,GAAG;;;;;;;;;;;;;;;;;;;;mEAoBkC,CAAC;AAEpE,MAAM,4BAA4B,GAAG;;;;;;;;2DAQsB,CAAC;AAE5D;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CACjC,IAAY,EACZ,GAAc,EACd,MAAoB;IAEpB,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,EAAE,wBAAwB,EAAE,MAAM,CAAC,CAAC;IAE3F,MAAM,MAAM,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAC;IACxC,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,OAAO,EAAE,CAAC;IACX,CAAC;IAED,OAAO,MAAM;SACX,MAAM,CAAC,oBAAoB,CAAC;SAC5B,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACZ,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE;QACnB,SAAS,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC;KAChD,CAAC,CAAC;SACF,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACpC,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACrC,OAAe,EACf,YAAoB,EACpB,GAAc,EACd,MAAoB;IAEpB,MAAM,MAAM,GAAG,cAAc,OAAO,sBAAsB,YAAY,GAAG,CAAC;IAE1E,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,EAAE,4BAA4B,EAAE,MAAM,CAAC,CAAC;IAEjG,MAAM,SAAS,GAAG,QAAQ;SACxB,IAAI,EAAE;SACN,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACf,EAAE,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC;SAC1B,WAAW,EAAE,CAAC;IAChB,IAAI,SAAS,KAAK,WAAW,IAAI,SAAS,KAAK,YAAY,IAAI,SAAS,KAAK,UAAU,EAAE,CAAC;QACzF,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,6CAA6C;IAC7C,OAAO,UAAU,CAAC;AACnB,CAAC;AAED;;;;;GAKG;AACH,SAAS,cAAc,CAAC,IAAY;IACnC,IAAI,OAAO,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;IAE1B,wCAAwC;IACxC,IAAI,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC;QAC/B,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,qBAAqB,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAChF,CAAC;IAED,IAAI,CAAC;QACJ,MAAM,MAAM,GAAY,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC5C,OAAO,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;IAC9C,CAAC;IAAC,MAAM,CAAC;QACR,OAAO,IAAI,CAAC;IACb,CAAC;AACF,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAAC,KAAc;IAC3C,OAAO,CACN,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,MAAM,IAAI,KAAK;QACf,OAAQ,KAAuB,CAAC,IAAI,KAAK,QAAQ;QACjD,WAAW,IAAI,KAAK;QACpB,OAAQ,KAAuB,CAAC,SAAS,KAAK,QAAQ,CACtD,CAAC;AACH,CAAC"}
|
package/dist/hash.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Compute SHA-256 hash of content for dedup.
|
|
3
|
+
*
|
|
4
|
+
* Synchronous — matches the sync SQLite layer. Uses node:crypto
|
|
5
|
+
* instead of the async Web Crypto API.
|
|
6
|
+
*
|
|
7
|
+
* @param content - Text to hash
|
|
8
|
+
* @returns Hex-encoded SHA-256 hash
|
|
9
|
+
*/
|
|
10
|
+
export declare function contentHash(content: string): string;
|
|
11
|
+
//# sourceMappingURL=hash.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AAEA;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAEnD"}
|
package/dist/hash.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
/**
|
|
3
|
+
* Compute SHA-256 hash of content for dedup.
|
|
4
|
+
*
|
|
5
|
+
* Synchronous — matches the sync SQLite layer. Uses node:crypto
|
|
6
|
+
* instead of the async Web Crypto API.
|
|
7
|
+
*
|
|
8
|
+
* @param content - Text to hash
|
|
9
|
+
* @returns Hex-encoded SHA-256 hash
|
|
10
|
+
*/
|
|
11
|
+
export function contentHash(content) {
|
|
12
|
+
return createHash("sha256").update(content).digest("hex");
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=hash.js.map
|
package/dist/hash.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hash.js","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CAAC,OAAe;IAC1C,OAAO,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC3D,CAAC"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
|
2
|
+
import type { HippoOptions } from "./types.js";
|
|
3
|
+
/**
|
|
4
|
+
* Create all hippo memory tools for an agent.
|
|
5
|
+
*
|
|
6
|
+
* Initializes the SQLite schema (idempotent) and returns AgentTool
|
|
7
|
+
* instances ready to pass to MarrowAgent via `extraTools`.
|
|
8
|
+
*
|
|
9
|
+
* @param opts - Configuration options
|
|
10
|
+
* @returns Array of AgentTool instances (7 or 8 depending on messagesTable)
|
|
11
|
+
*/
|
|
12
|
+
export declare function createHippoTools(opts: HippoOptions): AgentTool<any>[];
|
|
13
|
+
export type { EmbeddingProviderConfig } from "./providers/embedding.js";
|
|
14
|
+
export { createEmbeddingProvider } from "./providers/embedding.js";
|
|
15
|
+
export type { LlmProviderConfig } from "./providers/llm.js";
|
|
16
|
+
export { createLlmProvider } from "./providers/llm.js";
|
|
17
|
+
export { initSchema, verifyEmbeddingModel } from "./schema.js";
|
|
18
|
+
export type { Chunk, ChunkKind, EmbedFn, HippoOptions, LlmClient, MemoryBlock, RememberFactAction, RememberFactsResult, SearchResult, } from "./types.js";
|
|
19
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,6BAA6B,CAAC;AAa7D,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAE/C;;;;;;;;GAQG;AAEH,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,YAAY,GAAG,SAAS,CAAC,GAAG,CAAC,EAAE,CA4BrE;AAED,YAAY,EAAE,uBAAuB,EAAE,MAAM,0BAA0B,CAAC;AACxE,OAAO,EAAE,uBAAuB,EAAE,MAAM,0BAA0B,CAAC;AACnE,YAAY,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAC5D,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AACvD,OAAO,EAAE,UAAU,EAAE,oBAAoB,EAAE,MAAM,aAAa,CAAC;AAE/D,YAAY,EACX,KAAK,EACL,SAAS,EACT,OAAO,EACP,YAAY,EACZ,SAAS,EACT,WAAW,EACX,kBAAkB,EAClB,mBAAmB,EACnB,YAAY,GACZ,MAAM,YAAY,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { prepareStatements } from "./db.js";
|
|
2
|
+
import { initSchema } from "./schema.js";
|
|
3
|
+
import { createAppendMemoryBlockTool, createForgetMemoryTool, createRecallConversationTool, createRecallMemoriesTool, createRecallMemoryBlockTool, createRememberFactsTool, createReplaceMemoryBlockTool, createStoreMemoryTool, } from "./tools/index.js";
|
|
4
|
+
/**
|
|
5
|
+
* Create all hippo memory tools for an agent.
|
|
6
|
+
*
|
|
7
|
+
* Initializes the SQLite schema (idempotent) and returns AgentTool
|
|
8
|
+
* instances ready to pass to MarrowAgent via `extraTools`.
|
|
9
|
+
*
|
|
10
|
+
* @param opts - Configuration options
|
|
11
|
+
* @returns Array of AgentTool instances (7 or 8 depending on messagesTable)
|
|
12
|
+
*/
|
|
13
|
+
// biome-ignore lint/suspicious/noExplicitAny: AgentTool generics don't unify across different parameter schemas
|
|
14
|
+
export function createHippoTools(opts) {
|
|
15
|
+
initSchema(opts.db);
|
|
16
|
+
const stmts = prepareStatements(opts.db);
|
|
17
|
+
const common = { agentId: opts.agentId, stmts };
|
|
18
|
+
const withEmbed = { ...common, embed: opts.embed };
|
|
19
|
+
// biome-ignore lint/suspicious/noExplicitAny: AgentTool generics don't unify across different parameter schemas
|
|
20
|
+
const tools = [
|
|
21
|
+
createRememberFactsTool({ ...withEmbed, db: opts.db, llm: opts.llm }),
|
|
22
|
+
createStoreMemoryTool(withEmbed),
|
|
23
|
+
createRecallMemoriesTool(withEmbed),
|
|
24
|
+
createRecallMemoryBlockTool(common),
|
|
25
|
+
createReplaceMemoryBlockTool(common),
|
|
26
|
+
createAppendMemoryBlockTool(common),
|
|
27
|
+
createForgetMemoryTool({ ...withEmbed, db: opts.db }),
|
|
28
|
+
];
|
|
29
|
+
if (opts.messagesTable) {
|
|
30
|
+
tools.push(createRecallConversationTool({
|
|
31
|
+
db: opts.db,
|
|
32
|
+
messagesTable: opts.messagesTable,
|
|
33
|
+
}));
|
|
34
|
+
}
|
|
35
|
+
return tools;
|
|
36
|
+
}
|
|
37
|
+
export { createEmbeddingProvider } from "./providers/embedding.js";
|
|
38
|
+
export { createLlmProvider } from "./providers/llm.js";
|
|
39
|
+
export { initSchema, verifyEmbeddingModel } from "./schema.js";
|
|
40
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,iBAAiB,EAAE,MAAM,SAAS,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EACN,2BAA2B,EAC3B,sBAAsB,EACtB,4BAA4B,EAC5B,wBAAwB,EACxB,2BAA2B,EAC3B,uBAAuB,EACvB,4BAA4B,EAC5B,qBAAqB,GACrB,MAAM,kBAAkB,CAAC;AAG1B;;;;;;;;GAQG;AACH,gHAAgH;AAChH,MAAM,UAAU,gBAAgB,CAAC,IAAkB;IAClD,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACpB,MAAM,KAAK,GAAG,iBAAiB,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAEzC,MAAM,MAAM,GAAG,EAAE,OAAO,EAAE,IAAI,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC;IAChD,MAAM,SAAS,GAAG,EAAE,GAAG,MAAM,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC;IAEnD,gHAAgH;IAChH,MAAM,KAAK,GAAqB;QAC/B,uBAAuB,CAAC,EAAE,GAAG,SAAS,EAAE,EAAE,EAAE,IAAI,CAAC,EAAE,EAAE,GAAG,EAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACrE,qBAAqB,CAAC,SAAS,CAAC;QAChC,wBAAwB,CAAC,SAAS,CAAC;QACnC,2BAA2B,CAAC,MAAM,CAAC;QACnC,4BAA4B,CAAC,MAAM,CAAC;QACpC,2BAA2B,CAAC,MAAM,CAAC;QACnC,sBAAsB,CAAC,EAAE,GAAG,SAAS,EAAE,EAAE,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC;KACrD,CAAC;IAEF,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC;QACxB,KAAK,CAAC,IAAI,CACT,4BAA4B,CAAC;YAC5B,EAAE,EAAE,IAAI,CAAC,EAAE;YACX,aAAa,EAAE,IAAI,CAAC,aAAa;SACjC,CAAC,CACF,CAAC;IACH,CAAC;IAED,OAAO,KAAK,CAAC;AACd,CAAC;AAGD,OAAO,EAAE,uBAAuB,EAAE,MAAM,0BAA0B,CAAC;AAEnE,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AACvD,OAAO,EAAE,UAAU,EAAE,oBAAoB,EAAE,MAAM,aAAa,CAAC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI-compatible embedding provider.
|
|
3
|
+
*
|
|
4
|
+
* Calls any API that implements the `/v1/embeddings` endpoint
|
|
5
|
+
* (OpenAI, OpenRouter, Ollama, vLLM, etc.) and returns a Float32Array
|
|
6
|
+
* matching hippo's EmbedFn signature.
|
|
7
|
+
*/
|
|
8
|
+
import type { EmbedFn } from "../types.js";
|
|
9
|
+
/** Configuration for the embedding provider. */
|
|
10
|
+
export interface EmbeddingProviderConfig {
|
|
11
|
+
/** API key for authentication (sent as Bearer token). */
|
|
12
|
+
readonly apiKey: string;
|
|
13
|
+
/** Base URL of the OpenAI-compatible API (e.g. "https://api.openai.com/v1"). */
|
|
14
|
+
readonly baseUrl: string;
|
|
15
|
+
/** Embedding dimensions to request (optional, model-dependent). */
|
|
16
|
+
readonly dimensions?: number;
|
|
17
|
+
/** Model identifier (e.g. "text-embedding-3-small"). */
|
|
18
|
+
readonly model: string;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Create an embedding function from OpenAI-compatible API config.
|
|
22
|
+
*
|
|
23
|
+
* @param config - Provider configuration
|
|
24
|
+
* @returns EmbedFn that calls the API and returns Float32Array
|
|
25
|
+
*/
|
|
26
|
+
export declare function createEmbeddingProvider(config: EmbeddingProviderConfig): EmbedFn;
|
|
27
|
+
//# sourceMappingURL=embedding.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"embedding.d.ts","sourceRoot":"","sources":["../../src/providers/embedding.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAE3C,gDAAgD;AAChD,MAAM,WAAW,uBAAuB;IACvC,yDAAyD;IACzD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,gFAAgF;IAChF,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,mEAAmE;IACnE,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;IAC7B,wDAAwD;IACxD,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;CACvB;AASD;;;;;GAKG;AACH,wBAAgB,uBAAuB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CA8BhF"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI-compatible embedding provider.
|
|
3
|
+
*
|
|
4
|
+
* Calls any API that implements the `/v1/embeddings` endpoint
|
|
5
|
+
* (OpenAI, OpenRouter, Ollama, vLLM, etc.) and returns a Float32Array
|
|
6
|
+
* matching hippo's EmbedFn signature.
|
|
7
|
+
*/
|
|
8
|
+
/**
|
|
9
|
+
* Create an embedding function from OpenAI-compatible API config.
|
|
10
|
+
*
|
|
11
|
+
* @param config - Provider configuration
|
|
12
|
+
* @returns EmbedFn that calls the API and returns Float32Array
|
|
13
|
+
*/
|
|
14
|
+
export function createEmbeddingProvider(config) {
|
|
15
|
+
const url = `${config.baseUrl.replace(/\/+$/, "")}/embeddings`;
|
|
16
|
+
return async (text, signal) => {
|
|
17
|
+
const body = {
|
|
18
|
+
input: text,
|
|
19
|
+
model: config.model,
|
|
20
|
+
};
|
|
21
|
+
if (config.dimensions !== undefined) {
|
|
22
|
+
body.dimensions = config.dimensions;
|
|
23
|
+
}
|
|
24
|
+
const response = await fetch(url, {
|
|
25
|
+
body: JSON.stringify(body),
|
|
26
|
+
headers: {
|
|
27
|
+
Authorization: `Bearer ${config.apiKey}`,
|
|
28
|
+
"Content-Type": "application/json",
|
|
29
|
+
},
|
|
30
|
+
method: "POST",
|
|
31
|
+
signal,
|
|
32
|
+
});
|
|
33
|
+
if (!response.ok) {
|
|
34
|
+
const errorText = await response.text().catch(() => "unknown error");
|
|
35
|
+
throw new Error(`Embedding API error ${response.status}: ${errorText}`);
|
|
36
|
+
}
|
|
37
|
+
const json = (await response.json());
|
|
38
|
+
return new Float32Array(json.data[0].embedding);
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
//# sourceMappingURL=embedding.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"embedding.js","sourceRoot":"","sources":["../../src/providers/embedding.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAuBH;;;;;GAKG;AACH,MAAM,UAAU,uBAAuB,CAAC,MAA+B;IACtE,MAAM,GAAG,GAAG,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,aAAa,CAAC;IAE/D,OAAO,KAAK,EAAE,IAAY,EAAE,MAAoB,EAAyB,EAAE;QAC1E,MAAM,IAAI,GAA4B;YACrC,KAAK,EAAE,IAAI;YACX,KAAK,EAAE,MAAM,CAAC,KAAK;SACnB,CAAC;QACF,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;YACrC,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC;QACrC,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;YACjC,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC1B,OAAO,EAAE;gBACR,aAAa,EAAE,UAAU,MAAM,CAAC,MAAM,EAAE;gBACxC,cAAc,EAAE,kBAAkB;aAClC;YACD,MAAM,EAAE,MAAM;YACd,MAAM;SACN,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;YAClB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,eAAe,CAAC,CAAC;YACrE,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE,CAAC,CAAC;QACzE,CAAC;QAED,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAsB,CAAC;QAC1D,OAAO,IAAI,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;IACjD,CAAC,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI-compatible LLM provider.
|
|
3
|
+
*
|
|
4
|
+
* Calls any API implementing `/v1/chat/completions` and implements
|
|
5
|
+
* hippo's LlmClient interface. Used for fact extraction and conflict
|
|
6
|
+
* classification — always non-streaming.
|
|
7
|
+
*/
|
|
8
|
+
import type { LlmClient } from "../types.js";
|
|
9
|
+
/** Configuration for the LLM provider. */
|
|
10
|
+
export interface LlmProviderConfig {
|
|
11
|
+
/** API key for authentication (sent as Bearer token). */
|
|
12
|
+
readonly apiKey: string;
|
|
13
|
+
/** Base URL of the OpenAI-compatible API (e.g. "https://openrouter.ai/api/v1"). */
|
|
14
|
+
readonly baseUrl: string;
|
|
15
|
+
/** Model identifier (e.g. "google/gemini-flash-2.0"). */
|
|
16
|
+
readonly model: string;
|
|
17
|
+
/** Maximum tokens for completion (default: 2048). */
|
|
18
|
+
readonly maxTokens?: number;
|
|
19
|
+
/** Temperature (default: 0). */
|
|
20
|
+
readonly temperature?: number;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Create an LlmClient from OpenAI-compatible API config.
|
|
24
|
+
*
|
|
25
|
+
* @param config - Provider configuration
|
|
26
|
+
* @returns LlmClient instance
|
|
27
|
+
*/
|
|
28
|
+
export declare function createLlmProvider(config: LlmProviderConfig): LlmClient;
|
|
29
|
+
//# sourceMappingURL=llm.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/providers/llm.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAGH,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAE7C,0CAA0C;AAC1C,MAAM,WAAW,iBAAiB;IACjC,yDAAyD;IACzD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,mFAAmF;IACnF,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,yDAAyD;IACzD,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,qDAAqD;IACrD,QAAQ,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IAC5B,gCAAgC;IAChC,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;CAC9B;AA6CD;;;;;GAKG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CAsCtE"}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI-compatible LLM provider.
|
|
3
|
+
*
|
|
4
|
+
* Calls any API implementing `/v1/chat/completions` and implements
|
|
5
|
+
* hippo's LlmClient interface. Used for fact extraction and conflict
|
|
6
|
+
* classification — always non-streaming.
|
|
7
|
+
*/
|
|
8
|
+
/**
|
|
9
|
+
* Convert pi-ai Message[] to OpenAI chat messages.
|
|
10
|
+
*
|
|
11
|
+
* Extracts text content from pi-ai's content block arrays.
|
|
12
|
+
*
|
|
13
|
+
* @param messages - pi-ai Message array
|
|
14
|
+
* @returns OpenAI-compatible chat messages
|
|
15
|
+
*/
|
|
16
|
+
function toOpenAIMessages(messages) {
|
|
17
|
+
const result = [];
|
|
18
|
+
for (const msg of messages) {
|
|
19
|
+
if (msg.role === "user" || msg.role === "assistant") {
|
|
20
|
+
let text;
|
|
21
|
+
if (typeof msg.content === "string") {
|
|
22
|
+
text = msg.content;
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
text = msg.content
|
|
26
|
+
.filter((c) => c.type === "text")
|
|
27
|
+
.map((c) => c.text)
|
|
28
|
+
.join("\n");
|
|
29
|
+
}
|
|
30
|
+
if (text) {
|
|
31
|
+
result.push({ content: text, role: msg.role });
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return result;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Create an LlmClient from OpenAI-compatible API config.
|
|
39
|
+
*
|
|
40
|
+
* @param config - Provider configuration
|
|
41
|
+
* @returns LlmClient instance
|
|
42
|
+
*/
|
|
43
|
+
export function createLlmProvider(config) {
|
|
44
|
+
const url = `${config.baseUrl.replace(/\/+$/, "")}/chat/completions`;
|
|
45
|
+
return {
|
|
46
|
+
async complete(messages, systemPrompt, signal) {
|
|
47
|
+
const chatMessages = [
|
|
48
|
+
{ content: systemPrompt, role: "system" },
|
|
49
|
+
...toOpenAIMessages(messages),
|
|
50
|
+
];
|
|
51
|
+
const response = await fetch(url, {
|
|
52
|
+
body: JSON.stringify({
|
|
53
|
+
max_tokens: config.maxTokens ?? 2048,
|
|
54
|
+
messages: chatMessages,
|
|
55
|
+
model: config.model,
|
|
56
|
+
temperature: config.temperature ?? 0,
|
|
57
|
+
}),
|
|
58
|
+
headers: {
|
|
59
|
+
Authorization: `Bearer ${config.apiKey}`,
|
|
60
|
+
"Content-Type": "application/json",
|
|
61
|
+
},
|
|
62
|
+
method: "POST",
|
|
63
|
+
signal,
|
|
64
|
+
});
|
|
65
|
+
if (!response.ok) {
|
|
66
|
+
const errorText = await response.text().catch(() => "unknown error");
|
|
67
|
+
throw new Error(`LLM API error ${response.status}: ${errorText}`);
|
|
68
|
+
}
|
|
69
|
+
const json = (await response.json());
|
|
70
|
+
return json.choices[0].message.content;
|
|
71
|
+
},
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
//# sourceMappingURL=llm.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llm.js","sourceRoot":"","sources":["../../src/providers/llm.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AA8BH;;;;;;;GAOG;AACH,SAAS,gBAAgB,CAAC,QAA4B;IACrD,MAAM,MAAM,GAAkB,EAAE,CAAC;IACjC,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrD,IAAI,IAAY,CAAC;YACjB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,IAAI,GAAG,GAAG,CAAC,OAAO,CAAC;YACpB,CAAC;iBAAM,CAAC;gBACP,IAAI,GAAG,GAAG,CAAC,OAAO;qBAChB,MAAM,CACN,CAAC,CAAkC,EAAuC,EAAE,CAC3E,CAAC,CAAC,IAAI,KAAK,MAAM,CAClB;qBACA,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;qBAClB,IAAI,CAAC,IAAI,CAAC,CAAC;YACd,CAAC;YACD,IAAI,IAAI,EAAE,CAAC;gBACV,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;YAChD,CAAC;QACF,CAAC;IACF,CAAC;IACD,OAAO,MAAM,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAyB;IAC1D,MAAM,GAAG,GAAG,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,mBAAmB,CAAC;IAErE,OAAO;QACN,KAAK,CAAC,QAAQ,CACb,QAAmB,EACnB,YAAoB,EACpB,MAAoB;YAEpB,MAAM,YAAY,GAAkB;gBACnC,EAAE,OAAO,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACzC,GAAG,gBAAgB,CAAC,QAAQ,CAAC;aAC7B,CAAC;YAEF,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;gBACjC,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACpB,UAAU,EAAE,MAAM,CAAC,SAAS,IAAI,IAAI;oBACpC,QAAQ,EAAE,YAAY;oBACtB,KAAK,EAAE,MAAM,CAAC,KAAK;oBACnB,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,CAAC;iBACpC,CAAC;gBACF,OAAO,EAAE;oBACR,aAAa,EAAE,UAAU,MAAM,CAAC,MAAM,EAAE;oBACxC,cAAc,EAAE,kBAAkB;iBAClC;gBACD,MAAM,EAAE,MAAM;gBACd,MAAM;aACN,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBAClB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,eAAe,CAAC,CAAC;gBACrE,MAAM,IAAI,KAAK,CAAC,iBAAiB,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE,CAAC,CAAC;YACnE,CAAC;YAED,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAA2B,CAAC;YAC/D,OAAO,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC;QACxC,CAAC;KACD,CAAC;AACH,CAAC"}
|
package/dist/schema.d.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { Database } from "better-sqlite3";
|
|
2
|
+
/**
|
|
3
|
+
* Apply WAL mode, busy timeout, and create tables/indexes.
|
|
4
|
+
*
|
|
5
|
+
* @param db - better-sqlite3 Database handle
|
|
6
|
+
*/
|
|
7
|
+
export declare function initSchema(db: Database): void;
|
|
8
|
+
/**
|
|
9
|
+
* Verify or record the embedding model for this database.
|
|
10
|
+
*
|
|
11
|
+
* On first call, stores the model name. On subsequent calls, verifies
|
|
12
|
+
* the configured model matches what's stored. Throws if there's a
|
|
13
|
+
* mismatch — mixing embedding models produces incompatible vectors.
|
|
14
|
+
*
|
|
15
|
+
* @param db - better-sqlite3 Database handle
|
|
16
|
+
* @param model - Embedding model identifier (e.g. "text-embedding-3-small")
|
|
17
|
+
* @throws Error if model doesn't match the one already stored
|
|
18
|
+
*/
|
|
19
|
+
export declare function verifyEmbeddingModel(db: Database, model: string): void;
|
|
20
|
+
//# sourceMappingURL=schema.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAgD/C;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,EAAE,EAAE,QAAQ,GAAG,IAAI,CAI7C;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,oBAAoB,CAAC,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAgBtE"}
|
package/dist/schema.js
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
const SCHEMA_SQL = `
|
|
2
|
+
CREATE TABLE IF NOT EXISTS hippo_meta (
|
|
3
|
+
key TEXT PRIMARY KEY,
|
|
4
|
+
value TEXT NOT NULL
|
|
5
|
+
);
|
|
6
|
+
|
|
7
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
8
|
+
id TEXT PRIMARY KEY,
|
|
9
|
+
agent_id TEXT NOT NULL,
|
|
10
|
+
content TEXT NOT NULL,
|
|
11
|
+
content_hash TEXT,
|
|
12
|
+
embedding BLOB NOT NULL,
|
|
13
|
+
metadata TEXT,
|
|
14
|
+
kind TEXT NOT NULL CHECK(kind IN ('fact', 'memory')),
|
|
15
|
+
running_intensity REAL NOT NULL DEFAULT 0.5,
|
|
16
|
+
encounter_count INTEGER NOT NULL DEFAULT 1,
|
|
17
|
+
access_count INTEGER NOT NULL DEFAULT 0,
|
|
18
|
+
last_accessed_at TEXT NOT NULL,
|
|
19
|
+
superseded_by TEXT,
|
|
20
|
+
created_at TEXT NOT NULL
|
|
21
|
+
);
|
|
22
|
+
|
|
23
|
+
CREATE TABLE IF NOT EXISTS memory_blocks (
|
|
24
|
+
agent_id TEXT NOT NULL,
|
|
25
|
+
key TEXT NOT NULL,
|
|
26
|
+
value TEXT NOT NULL,
|
|
27
|
+
updated_at TEXT NOT NULL,
|
|
28
|
+
PRIMARY KEY (agent_id, key)
|
|
29
|
+
);
|
|
30
|
+
|
|
31
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_chunks_memory_dedup
|
|
32
|
+
ON chunks(agent_id, content_hash) WHERE kind = 'memory';
|
|
33
|
+
|
|
34
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_agent_kind
|
|
35
|
+
ON chunks(agent_id, kind);
|
|
36
|
+
|
|
37
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_last_accessed
|
|
38
|
+
ON chunks(agent_id, last_accessed_at);
|
|
39
|
+
|
|
40
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_superseded
|
|
41
|
+
ON chunks(superseded_by) WHERE superseded_by IS NOT NULL;
|
|
42
|
+
|
|
43
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_created_at
|
|
44
|
+
ON chunks(agent_id, created_at);
|
|
45
|
+
`;
|
|
46
|
+
/**
|
|
47
|
+
* Apply WAL mode, busy timeout, and create tables/indexes.
|
|
48
|
+
*
|
|
49
|
+
* @param db - better-sqlite3 Database handle
|
|
50
|
+
*/
|
|
51
|
+
export function initSchema(db) {
|
|
52
|
+
db.pragma("journal_mode=WAL");
|
|
53
|
+
db.pragma("busy_timeout=5000");
|
|
54
|
+
db.exec(SCHEMA_SQL);
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Verify or record the embedding model for this database.
|
|
58
|
+
*
|
|
59
|
+
* On first call, stores the model name. On subsequent calls, verifies
|
|
60
|
+
* the configured model matches what's stored. Throws if there's a
|
|
61
|
+
* mismatch — mixing embedding models produces incompatible vectors.
|
|
62
|
+
*
|
|
63
|
+
* @param db - better-sqlite3 Database handle
|
|
64
|
+
* @param model - Embedding model identifier (e.g. "text-embedding-3-small")
|
|
65
|
+
* @throws Error if model doesn't match the one already stored
|
|
66
|
+
*/
|
|
67
|
+
export function verifyEmbeddingModel(db, model) {
|
|
68
|
+
const row = db.prepare("SELECT value FROM hippo_meta WHERE key = 'embedding_model'").get();
|
|
69
|
+
if (!row) {
|
|
70
|
+
db.prepare("INSERT INTO hippo_meta (key, value) VALUES ('embedding_model', ?)").run(model);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
if (row.value !== model) {
|
|
74
|
+
throw new Error(`Embedding model mismatch: database was created with "${row.value}" but server is configured with "${model}". ` +
|
|
75
|
+
"Mixing models produces incompatible vectors. Re-embed the database or change the config.");
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
//# sourceMappingURL=schema.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.js","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4ClB,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,EAAY;IACtC,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC;IAC9B,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC;IAC/B,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AACrB,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,oBAAoB,CAAC,EAAY,EAAE,KAAa;IAC/D,MAAM,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,4DAA4D,CAAC,CAAC,GAAG,EAE5E,CAAC;IAEb,IAAI,CAAC,GAAG,EAAE,CAAC;QACV,EAAE,CAAC,OAAO,CAAC,mEAAmE,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAC3F,OAAO;IACR,CAAC;IAED,IAAI,GAAG,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QACzB,MAAM,IAAI,KAAK,CACd,wDAAwD,GAAG,CAAC,KAAK,oCAAoC,KAAK,KAAK;YAC9G,0FAA0F,CAC3F,CAAC;IACH,CAAC;AACF,CAAC"}
|