hammer-ai 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/index.d.ts +2231 -0
- package/dist/index.js +7216 -0
- package/dist/index.js.map +1 -0
- package/dist/node.d.ts +102 -0
- package/dist/node.js +47 -0
- package/dist/node.js.map +1 -0
- package/package.json +59 -0
package/dist/node.d.ts
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base Memory Layer — shared infrastructure for memory compaction.
|
|
3
|
+
*
|
|
4
|
+
* Both workspace agents and the Voice Agent use episodic compaction with
|
|
5
|
+
* structured state extraction. This module provides the shared skeleton:
|
|
6
|
+
*
|
|
7
|
+
* - Generic base class (`BaseMemoryLayer<TState, TMessage>`)
|
|
8
|
+
* - Shared types: `MemoryMessage`, `CompactionCursor`, `MemoryStorage`, etc.
|
|
9
|
+
* - Token estimation strategy (pluggable: tiktoken vs char-based)
|
|
10
|
+
* - Shared algorithms: sliding-window, buildMessages, triggerCompaction
|
|
11
|
+
*
|
|
12
|
+
* Core Invariants (enforced by this base):
|
|
13
|
+
* 1. Raw history is NEVER modified in-place; entries are pruned after compaction
|
|
14
|
+
* 2. Compaction is a state transition, not a view transform
|
|
15
|
+
* 3. Compressed state is NEVER recursively summarized
|
|
16
|
+
* 4. State compaction removes obsolete structure but preserves meaning
|
|
17
|
+
* 5. buildMessages is a PURE function (cache writes are benign memoization)
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
/** Base message stored in raw history. */
|
|
21
|
+
interface MemoryMessage {
|
|
22
|
+
id: string;
|
|
23
|
+
role: "system" | "user" | "assistant" | "tool";
|
|
24
|
+
content: string;
|
|
25
|
+
timestamp: number;
|
|
26
|
+
/** Monotonic message counter. */
|
|
27
|
+
turn: number;
|
|
28
|
+
/** Cached token count for this message (computed once at creation). */
|
|
29
|
+
tokenCount: number;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Strategy for estimating token counts.
|
|
33
|
+
*
|
|
34
|
+
* - Workspace agents share the same estimator strategy by default
|
|
35
|
+
* - CharTokenEstimator is the cross-platform baseline implementation
|
|
36
|
+
*/
|
|
37
|
+
interface TokenEstimator {
|
|
38
|
+
/** Estimate token count for a string. */
|
|
39
|
+
estimateTokens(text: string): number;
|
|
40
|
+
/** Dispose any resources (e.g. tiktoken WASM). No-op by default. */
|
|
41
|
+
dispose?(): void;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Precise token estimator using tiktoken (cl100k_base encoding).
|
|
46
|
+
*
|
|
47
|
+
* Separated into its own module so environments can opt into the tiktoken WASM
|
|
48
|
+
* dependency explicitly. Browser runtimes should reach it through async
|
|
49
|
+
* estimator helpers instead of importing it from hot paths.
|
|
50
|
+
*/
|
|
51
|
+
|
|
52
|
+
declare class TiktokenEstimator implements TokenEstimator {
|
|
53
|
+
private encoder;
|
|
54
|
+
constructor();
|
|
55
|
+
estimateTokens(text: string): number;
|
|
56
|
+
dispose(): void;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Shared conversation history utilities.
|
|
61
|
+
*
|
|
62
|
+
* Provides:
|
|
63
|
+
* - `atomicWriteJSON()` — crash-safe file writing (tmp + rename)
|
|
64
|
+
* - `ConversationHistoryEntry` — debug-friendly message format
|
|
65
|
+
* - `formatConversationHistory()` — converts MemoryMessages to debug entries
|
|
66
|
+
*
|
|
67
|
+
* Used by:
|
|
68
|
+
* - Voice agent (Bun server writes conversation_history.json alongside voice-memory.json)
|
|
69
|
+
* - Hammer agent (ConversationManager, UnifiedAgent)
|
|
70
|
+
*
|
|
71
|
+
* NOTE: Server-only module (requires Node/Bun fs). Not imported by browser/mobile code.
|
|
72
|
+
*/
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Atomically write JSON data to a file using tmp + rename.
|
|
76
|
+
*
|
|
77
|
+
* 1. Creates parent directories if needed
|
|
78
|
+
* 2. Writes to `filePath.tmp`
|
|
79
|
+
* 3. Renames to `filePath` (atomic on most filesystems)
|
|
80
|
+
*
|
|
81
|
+
* Prevents data corruption if the process crashes mid-write.
|
|
82
|
+
*/
|
|
83
|
+
declare function atomicWriteJSON(filePath: string, data: unknown): Promise<void>;
|
|
84
|
+
/** Debug-friendly conversation history entry. */
|
|
85
|
+
interface ConversationHistoryEntry {
|
|
86
|
+
turn: number;
|
|
87
|
+
role: string;
|
|
88
|
+
content: string;
|
|
89
|
+
timestamp: number;
|
|
90
|
+
id?: string;
|
|
91
|
+
tokenCount?: number;
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Convert raw MemoryMessages to a flat, debug-friendly array.
|
|
95
|
+
*
|
|
96
|
+
* The output is designed for easy reading in a JSON viewer — no nested
|
|
97
|
+
* compressed-state or cursor objects. Each entry is one message with
|
|
98
|
+
* its turn number, role, content, and timestamp.
|
|
99
|
+
*/
|
|
100
|
+
declare function formatConversationHistory(messages: MemoryMessage[]): ConversationHistoryEntry[];
|
|
101
|
+
|
|
102
|
+
export { type ConversationHistoryEntry, TiktokenEstimator, atomicWriteJSON, formatConversationHistory };
|
package/dist/node.js
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { encoding_for_model } from 'tiktoken';
|
|
2
|
+
import { mkdir, writeFile, rename } from 'fs/promises';
|
|
3
|
+
import { dirname } from 'path';
|
|
4
|
+
|
|
5
|
+
// src/tiktoken-estimator.ts
|
|
6
|
+
var TiktokenEstimator = class {
|
|
7
|
+
encoder;
|
|
8
|
+
constructor() {
|
|
9
|
+
try {
|
|
10
|
+
this.encoder = encoding_for_model("gpt-4");
|
|
11
|
+
} catch (err) {
|
|
12
|
+
throw new Error(
|
|
13
|
+
`Failed to initialize tiktoken encoder (WASM may not be available in this environment): ${err instanceof Error ? err.message : err}`
|
|
14
|
+
);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
estimateTokens(text) {
|
|
18
|
+
if (!this.encoder) throw new Error("TiktokenEstimator has been disposed");
|
|
19
|
+
return this.encoder.encode(text).length;
|
|
20
|
+
}
|
|
21
|
+
dispose() {
|
|
22
|
+
if (this.encoder) {
|
|
23
|
+
this.encoder.free();
|
|
24
|
+
this.encoder = null;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
async function atomicWriteJSON(filePath, data) {
|
|
29
|
+
await mkdir(dirname(filePath), { recursive: true });
|
|
30
|
+
const tmpFile = filePath + ".tmp";
|
|
31
|
+
await writeFile(tmpFile, JSON.stringify(data, null, 2), "utf-8");
|
|
32
|
+
await rename(tmpFile, filePath);
|
|
33
|
+
}
|
|
34
|
+
function formatConversationHistory(messages) {
|
|
35
|
+
return messages.map((m) => ({
|
|
36
|
+
turn: m.turn,
|
|
37
|
+
role: m.role,
|
|
38
|
+
content: m.content,
|
|
39
|
+
timestamp: m.timestamp,
|
|
40
|
+
...m.id ? { id: m.id } : {},
|
|
41
|
+
...m.tokenCount ? { tokenCount: m.tokenCount } : {}
|
|
42
|
+
}));
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export { TiktokenEstimator, atomicWriteJSON, formatConversationHistory };
|
|
46
|
+
//# sourceMappingURL=node.js.map
|
|
47
|
+
//# sourceMappingURL=node.js.map
|
package/dist/node.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/tiktoken-estimator.ts","../src/conversation-history-writer.ts"],"names":[],"mappings":";;;;;AAWO,IAAM,oBAAN,MAAkD;AAAA,EAC7C,OAAA;AAAA,EAER,WAAA,GAAc;AACV,IAAA,IAAI;AACA,MAAA,IAAA,CAAK,OAAA,GAAU,mBAAmB,OAAO,CAAA;AAAA,IAC7C,SAAS,GAAA,EAAK;AACV,MAAA,MAAM,IAAI,KAAA;AAAA,QACN,CAAA,uFAAA,EAA0F,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,UAAU,GAAG,CAAA;AAAA,OACtI;AAAA,IACJ;AAAA,EACJ;AAAA,EAEA,eAAe,IAAA,EAAsB;AACjC,IAAA,IAAI,CAAC,IAAA,CAAK,OAAA,EAAS,MAAM,IAAI,MAAM,qCAAqC,CAAA;AACxE,IAAA,OAAO,IAAA,CAAK,OAAA,CAAQ,MAAA,CAAO,IAAI,CAAA,CAAE,MAAA;AAAA,EACrC;AAAA,EAEA,OAAA,GAAgB;AACZ,IAAA,IAAI,KAAK,OAAA,EAAS;AACd,MAAA,IAAA,CAAK,QAAQ,IAAA,EAAK;AAClB,MAAA,IAAA,CAAK,OAAA,GAAU,IAAA;AAAA,IACnB;AAAA,EACJ;AACJ;ACDA,eAAsB,eAAA,CAClB,UACA,IAAA,EACa;AACb,EAAA,MAAM,MAAM,OAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAClD,EAAA,MAAM,UAAU,QAAA,GAAW,MAAA;AAC3B,EAAA,MAAM,SAAA,CAAU,SAAS,IAAA,CAAK,SAAA,CAAU,MAAM,IAAA,EAAM,CAAC,GAAG,OAAO,CAAA;AAC/D,EAAA,MAAM,MAAA,CAAO,SAAS,QAAQ,CAAA;AAClC;AAuBO,SAAS,0BACZ,QAAA,EAC0B;AAC1B,EAAA,OAAO,QAAA,CAAS,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,IACxB,MAAM,CAAA,CAAE,IAAA;AAAA,IACR,MAAM,CAAA,CAAE,IAAA;AAAA,IACR,SAAS,CAAA,CAAE,OAAA;AAAA,IACX,WAAW,CAAA,CAAE,SAAA;AAAA,IACb,GAAI,EAAE,EAAA,GAAK,EAAE,IAAI,CAAA,CAAE,EAAA,KAAO,EAAC;AAAA,IAC3B,GAAI,EAAE,UAAA,GAAa,EAAE,YAAY,CAAA,CAAE,UAAA,KAAe;AAAC,GACvD,CAAE,CAAA;AACN","file":"node.js","sourcesContent":["/**\n * Precise token estimator using tiktoken (cl100k_base encoding).\n *\n * Separated into its own module so environments can opt into the tiktoken WASM\n * dependency explicitly. Browser runtimes should reach it through async\n * estimator helpers instead of importing it from hot paths.\n */\n\nimport { encoding_for_model, type Tiktoken } from \"tiktoken\"\nimport type { TokenEstimator } from \"./memory-layer\"\n\nexport class TiktokenEstimator implements TokenEstimator {\n private encoder: Tiktoken | null\n\n constructor() {\n try {\n this.encoder = encoding_for_model(\"gpt-4\")\n } catch (err) {\n throw new Error(\n `Failed to initialize tiktoken encoder (WASM may not be available in this environment): ${err instanceof Error ? err.message : err}`,\n )\n }\n }\n\n estimateTokens(text: string): number {\n if (!this.encoder) throw new Error(\"TiktokenEstimator has been disposed\")\n return this.encoder.encode(text).length\n }\n\n dispose(): void {\n if (this.encoder) {\n this.encoder.free()\n this.encoder = null\n }\n }\n}\n","/**\n * Shared conversation history utilities.\n *\n * Provides:\n * - `atomicWriteJSON()` — crash-safe file writing (tmp + rename)\n * - `ConversationHistoryEntry` — debug-friendly message format\n * - `formatConversationHistory()` — converts MemoryMessages to debug entries\n *\n * Used by:\n * - Voice agent (Bun server writes conversation_history.json alongside voice-memory.json)\n * - Hammer agent (ConversationManager, UnifiedAgent)\n *\n * NOTE: Server-only module (requires Node/Bun fs). Not imported by browser/mobile code.\n */\n\n/// <reference types=\"node\" />\n\nimport { writeFile, rename, mkdir } from \"fs/promises\"\nimport { dirname } from \"path\"\nimport type { MemoryMessage } from \"./memory-layer\"\n\n// ============================================================================\n// Atomic File Writing\n// ============================================================================\n\n/**\n * Atomically write JSON data to a file using tmp + rename.\n *\n * 1. Creates parent directories if needed\n * 2. Writes to `filePath.tmp`\n * 3. Renames to `filePath` (atomic on most filesystems)\n *\n * Prevents data corruption if the process crashes mid-write.\n */\nexport async function atomicWriteJSON(\n filePath: string,\n data: unknown,\n): Promise<void> {\n await mkdir(dirname(filePath), { recursive: true })\n const tmpFile = filePath + \".tmp\"\n await writeFile(tmpFile, JSON.stringify(data, null, 2), \"utf-8\")\n await rename(tmpFile, filePath)\n}\n\n// ============================================================================\n// Conversation History\n// ============================================================================\n\n/** Debug-friendly conversation history entry. */\nexport interface ConversationHistoryEntry {\n turn: number\n role: string\n content: string\n timestamp: number\n id?: string\n tokenCount?: number\n}\n\n/**\n * Convert raw MemoryMessages to a flat, debug-friendly array.\n *\n * The output is designed for easy reading in a JSON viewer — no nested\n * compressed-state or cursor objects. Each entry is one message with\n * its turn number, role, content, and timestamp.\n */\nexport function formatConversationHistory(\n messages: MemoryMessage[],\n): ConversationHistoryEntry[] {\n return messages.map((m) => ({\n turn: m.turn,\n role: m.role,\n content: m.content,\n timestamp: m.timestamp,\n ...(m.id ? { id: m.id } : {}),\n ...(m.tokenCount ? { tokenCount: m.tokenCount } : {}),\n }))\n}\n"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "hammer-ai",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Infrastructure for building tool-calling bash-format chat agents: LLM client, agent loop, memory, tool registry, validation, and web runtime.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
12
|
+
},
|
|
13
|
+
"./node": {
|
|
14
|
+
"types": "./dist/node.d.ts",
|
|
15
|
+
"import": "./dist/node.js"
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"dist",
|
|
20
|
+
"README.md",
|
|
21
|
+
"LICENSE"
|
|
22
|
+
],
|
|
23
|
+
"scripts": {
|
|
24
|
+
"clean": "rm -rf dist",
|
|
25
|
+
"build": "tsup",
|
|
26
|
+
"dev": "tsup --watch",
|
|
27
|
+
"prepublishOnly": "npm run clean && npm run build"
|
|
28
|
+
},
|
|
29
|
+
"keywords": [
|
|
30
|
+
"ai",
|
|
31
|
+
"agent",
|
|
32
|
+
"llm",
|
|
33
|
+
"tool-calling",
|
|
34
|
+
"bash",
|
|
35
|
+
"agentic-loop",
|
|
36
|
+
"memory",
|
|
37
|
+
"compaction"
|
|
38
|
+
],
|
|
39
|
+
"author": "",
|
|
40
|
+
"license": "MIT",
|
|
41
|
+
"dependencies": {
|
|
42
|
+
"xstate": "5",
|
|
43
|
+
"zod": "^3.25.76"
|
|
44
|
+
},
|
|
45
|
+
"devDependencies": {
|
|
46
|
+
"@types/node": "^22.0.0",
|
|
47
|
+
"tiktoken": "^1.0.22",
|
|
48
|
+
"tsup": "^8.4.0",
|
|
49
|
+
"typescript": "^5.9.0"
|
|
50
|
+
},
|
|
51
|
+
"peerDependencies": {
|
|
52
|
+
"tiktoken": "^1.0.22"
|
|
53
|
+
},
|
|
54
|
+
"peerDependenciesMeta": {
|
|
55
|
+
"tiktoken": {
|
|
56
|
+
"optional": true
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|