portable-agent-layer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +80 -0
- package/assets/agents/claude-researcher.md +43 -0
- package/assets/agents/investigative-researcher.md +44 -0
- package/assets/agents/multi-perspective-researcher.md +43 -0
- package/assets/skills/analyze-pdf.md +40 -0
- package/assets/skills/analyze-youtube.md +35 -0
- package/assets/skills/council.md +43 -0
- package/assets/skills/create-skill.md +31 -0
- package/assets/skills/extract-entities.md +63 -0
- package/assets/skills/extract-wisdom.md +18 -0
- package/assets/skills/first-principles.md +17 -0
- package/assets/skills/fyzz-chat-api.md +43 -0
- package/assets/skills/reflect.md +87 -0
- package/assets/skills/research.md +68 -0
- package/assets/skills/review.md +19 -0
- package/assets/skills/summarize.md +15 -0
- package/assets/templates/AGENTS.md.template +45 -0
- package/assets/templates/telos/BELIEFS.md +4 -0
- package/assets/templates/telos/CHALLENGES.md +4 -0
- package/assets/templates/telos/GOALS.md +12 -0
- package/assets/templates/telos/IDEAS.md +4 -0
- package/assets/templates/telos/IDENTITY.md +4 -0
- package/assets/templates/telos/LEARNED.md +4 -0
- package/assets/templates/telos/MISSION.md +4 -0
- package/assets/templates/telos/MODELS.md +4 -0
- package/assets/templates/telos/NARRATIVES.md +4 -0
- package/assets/templates/telos/PROJECTS.md +7 -0
- package/assets/templates/telos/STRATEGIES.md +4 -0
- package/bin/pal +24 -0
- package/bin/pal.bat +8 -0
- package/bin/pal.ps1 +30 -0
- package/package.json +82 -0
- package/src/cli/index.ts +344 -0
- package/src/cli/install.ts +86 -0
- package/src/cli/uninstall.ts +45 -0
- package/src/hooks/LoadContext.ts +41 -0
- package/src/hooks/SecurityValidator.ts +52 -0
- package/src/hooks/SkillGuard.ts +41 -0
- package/src/hooks/StopOrchestrator.ts +35 -0
- package/src/hooks/UserPromptOrchestrator.ts +35 -0
- package/src/hooks/handlers/backup.ts +41 -0
- package/src/hooks/handlers/failure.ts +136 -0
- package/src/hooks/handlers/rating.ts +409 -0
- package/src/hooks/handlers/relationship.ts +113 -0
- package/src/hooks/handlers/session-name.ts +121 -0
- package/src/hooks/handlers/synthesis.ts +109 -0
- package/src/hooks/handlers/tab.ts +8 -0
- package/src/hooks/handlers/update-counts.ts +151 -0
- package/src/hooks/handlers/work-learning.ts +183 -0
- package/src/hooks/handlers/work-session.ts +58 -0
- package/src/hooks/lib/claude-md.ts +121 -0
- package/src/hooks/lib/context.ts +433 -0
- package/src/hooks/lib/entities.ts +304 -0
- package/src/hooks/lib/export.ts +76 -0
- package/src/hooks/lib/inference.ts +91 -0
- package/src/hooks/lib/learning-category.ts +14 -0
- package/src/hooks/lib/log.ts +53 -0
- package/src/hooks/lib/models.ts +16 -0
- package/src/hooks/lib/paths.ts +80 -0
- package/src/hooks/lib/relationship.ts +135 -0
- package/src/hooks/lib/security.ts +122 -0
- package/src/hooks/lib/session-names.ts +247 -0
- package/src/hooks/lib/setup.ts +189 -0
- package/src/hooks/lib/signal-trends.ts +117 -0
- package/src/hooks/lib/signals.ts +37 -0
- package/src/hooks/lib/stdin.ts +18 -0
- package/src/hooks/lib/stop.ts +155 -0
- package/src/hooks/lib/time.ts +19 -0
- package/src/hooks/lib/token-usage.ts +42 -0
- package/src/hooks/lib/transcript.ts +76 -0
- package/src/hooks/lib/wisdom.ts +48 -0
- package/src/hooks/lib/work-tracking.ts +193 -0
- package/src/hooks/setup-check.ts +42 -0
- package/src/targets/claude/install.ts +145 -0
- package/src/targets/claude/uninstall.ts +101 -0
- package/src/targets/lib.ts +337 -0
- package/src/targets/opencode/install.ts +59 -0
- package/src/targets/opencode/plugin.ts +328 -0
- package/src/targets/opencode/uninstall.ts +57 -0
- package/src/tools/entity-save.ts +110 -0
- package/src/tools/export.ts +34 -0
- package/src/tools/fyzz-api.ts +104 -0
- package/src/tools/import.ts +123 -0
- package/src/tools/pattern-synthesis.ts +435 -0
- package/src/tools/pdf-download.ts +102 -0
- package/src/tools/relationship-reflect.ts +362 -0
- package/src/tools/session-summary.ts +206 -0
- package/src/tools/token-cost.ts +301 -0
- package/src/tools/youtube-analyze.ts +105 -0
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Entity Collision Detection — deduplicates people, companies, links,
|
|
3
|
+
* and sources across extracted content, assigning stable UUIDs and
|
|
4
|
+
* tracking occurrences to build a knowledge graph.
|
|
5
|
+
*
|
|
6
|
+
* Ported from ~/git/Personal_AI_Infrastructure/Packs/Utilities/src/Parser/Utils/collision-detection.ts
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { existsSync, readFileSync, renameSync, writeFileSync } from "node:fs";
|
|
10
|
+
import { resolve } from "node:path";
|
|
11
|
+
import { ensureDir, paths } from "./paths";
|
|
12
|
+
|
|
13
|
+
// --- Types ---
|
|
14
|
+
|
|
15
|
+
export interface PersonEntity {
|
|
16
|
+
id: string;
|
|
17
|
+
name: string;
|
|
18
|
+
first_seen: string;
|
|
19
|
+
occurrences: number;
|
|
20
|
+
source_ids: string[];
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface CompanyEntity {
|
|
24
|
+
id: string;
|
|
25
|
+
name: string;
|
|
26
|
+
domain: string | null;
|
|
27
|
+
first_seen: string;
|
|
28
|
+
occurrences: number;
|
|
29
|
+
source_ids: string[];
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface LinkEntity {
|
|
33
|
+
id: string;
|
|
34
|
+
url: string;
|
|
35
|
+
first_seen: string;
|
|
36
|
+
occurrences: number;
|
|
37
|
+
source_ids: string[];
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export interface SourceEntity {
|
|
41
|
+
id: string;
|
|
42
|
+
url: string | null;
|
|
43
|
+
author: string | null;
|
|
44
|
+
publication: string | null;
|
|
45
|
+
first_seen: string;
|
|
46
|
+
occurrences: number;
|
|
47
|
+
source_ids: string[];
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export interface EntityIndex {
|
|
51
|
+
version: string;
|
|
52
|
+
last_updated: string;
|
|
53
|
+
people: Record<string, PersonEntity>;
|
|
54
|
+
companies: Record<string, CompanyEntity>;
|
|
55
|
+
links: Record<string, LinkEntity>;
|
|
56
|
+
sources: Record<string, SourceEntity>;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// --- Normalization ---
|
|
60
|
+
|
|
61
|
+
export function normalizeName(name: string): string {
|
|
62
|
+
return name.toLowerCase().trim();
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export function normalizeCompanyKey(name: string, domain: string | null): string {
|
|
66
|
+
return domain ? domain.toLowerCase().trim() : normalizeName(name);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export function normalizeUrl(url: string): string {
|
|
70
|
+
return url.toLowerCase().trim().replace(/\/$/, "");
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export function normalizeSourceKey(
|
|
74
|
+
url: string | null,
|
|
75
|
+
author: string | null,
|
|
76
|
+
publication: string | null
|
|
77
|
+
): string {
|
|
78
|
+
if (url) return normalizeUrl(url);
|
|
79
|
+
const a = author ? normalizeName(author) : "";
|
|
80
|
+
const p = publication ? normalizeName(publication) : "";
|
|
81
|
+
return `${a}|${p}`;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// --- Index I/O ---
|
|
85
|
+
|
|
86
|
+
function defaultIndexPath(): string {
|
|
87
|
+
return resolve(ensureDir(paths.entities()), "entity-index.json");
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function emptyIndex(): EntityIndex {
|
|
91
|
+
return {
|
|
92
|
+
version: "1.1.0",
|
|
93
|
+
last_updated: new Date().toISOString(),
|
|
94
|
+
people: {},
|
|
95
|
+
companies: {},
|
|
96
|
+
links: {},
|
|
97
|
+
sources: {},
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/** Migrate older indexes that lack links/sources. */
|
|
102
|
+
function ensureShape(index: EntityIndex): EntityIndex {
|
|
103
|
+
if (!index.links) index.links = {};
|
|
104
|
+
if (!index.sources) index.sources = {};
|
|
105
|
+
return index;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function loadEntityIndex(filepath?: string): EntityIndex {
|
|
109
|
+
const p = filepath ?? defaultIndexPath();
|
|
110
|
+
if (!existsSync(p)) return emptyIndex();
|
|
111
|
+
try {
|
|
112
|
+
return ensureShape(JSON.parse(readFileSync(p, "utf-8")));
|
|
113
|
+
} catch {
|
|
114
|
+
return emptyIndex();
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export function saveEntityIndex(index: EntityIndex, filepath?: string): void {
|
|
119
|
+
const p = filepath ?? defaultIndexPath();
|
|
120
|
+
const tempPath = `${p}.tmp`;
|
|
121
|
+
index.last_updated = new Date().toISOString();
|
|
122
|
+
writeFileSync(tempPath, JSON.stringify(index, null, 2), "utf-8");
|
|
123
|
+
renameSync(tempPath, p);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// --- Deduplication ---
|
|
127
|
+
|
|
128
|
+
export function getOrCreatePerson(
|
|
129
|
+
person: { name: string },
|
|
130
|
+
index: EntityIndex,
|
|
131
|
+
sourceId: string
|
|
132
|
+
): string {
|
|
133
|
+
const key = normalizeName(person.name);
|
|
134
|
+
const existing = index.people[key];
|
|
135
|
+
|
|
136
|
+
if (existing) {
|
|
137
|
+
if (!existing.source_ids.includes(sourceId)) {
|
|
138
|
+
existing.occurrences++;
|
|
139
|
+
existing.source_ids.push(sourceId);
|
|
140
|
+
}
|
|
141
|
+
return existing.id;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
const id = crypto.randomUUID();
|
|
145
|
+
index.people[key] = {
|
|
146
|
+
id,
|
|
147
|
+
name: person.name,
|
|
148
|
+
first_seen: new Date().toISOString(),
|
|
149
|
+
occurrences: 1,
|
|
150
|
+
source_ids: [sourceId],
|
|
151
|
+
};
|
|
152
|
+
return id;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
export function getOrCreateCompany(
|
|
156
|
+
company: { name: string; domain: string | null },
|
|
157
|
+
index: EntityIndex,
|
|
158
|
+
sourceId: string
|
|
159
|
+
): string {
|
|
160
|
+
const key = normalizeCompanyKey(company.name, company.domain);
|
|
161
|
+
const existing = index.companies[key];
|
|
162
|
+
|
|
163
|
+
if (existing) {
|
|
164
|
+
if (!existing.source_ids.includes(sourceId)) {
|
|
165
|
+
existing.occurrences++;
|
|
166
|
+
existing.source_ids.push(sourceId);
|
|
167
|
+
}
|
|
168
|
+
return existing.id;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const id = crypto.randomUUID();
|
|
172
|
+
index.companies[key] = {
|
|
173
|
+
id,
|
|
174
|
+
name: company.name,
|
|
175
|
+
domain: company.domain,
|
|
176
|
+
first_seen: new Date().toISOString(),
|
|
177
|
+
occurrences: 1,
|
|
178
|
+
source_ids: [sourceId],
|
|
179
|
+
};
|
|
180
|
+
return id;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
export function getOrCreateLink(
|
|
184
|
+
link: { url: string },
|
|
185
|
+
index: EntityIndex,
|
|
186
|
+
sourceId: string
|
|
187
|
+
): string {
|
|
188
|
+
const key = normalizeUrl(link.url);
|
|
189
|
+
const existing = index.links[key];
|
|
190
|
+
|
|
191
|
+
if (existing) {
|
|
192
|
+
if (!existing.source_ids.includes(sourceId)) {
|
|
193
|
+
existing.occurrences++;
|
|
194
|
+
existing.source_ids.push(sourceId);
|
|
195
|
+
}
|
|
196
|
+
return existing.id;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const id = crypto.randomUUID();
|
|
200
|
+
index.links[key] = {
|
|
201
|
+
id,
|
|
202
|
+
url: link.url,
|
|
203
|
+
first_seen: new Date().toISOString(),
|
|
204
|
+
occurrences: 1,
|
|
205
|
+
source_ids: [sourceId],
|
|
206
|
+
};
|
|
207
|
+
return id;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
export function getOrCreateSource(
|
|
211
|
+
source: { url: string | null; author: string | null; publication: string | null },
|
|
212
|
+
index: EntityIndex,
|
|
213
|
+
sourceId: string
|
|
214
|
+
): string {
|
|
215
|
+
const key = normalizeSourceKey(source.url, source.author, source.publication);
|
|
216
|
+
const existing = index.sources[key];
|
|
217
|
+
|
|
218
|
+
if (existing) {
|
|
219
|
+
if (!existing.source_ids.includes(sourceId)) {
|
|
220
|
+
existing.occurrences++;
|
|
221
|
+
existing.source_ids.push(sourceId);
|
|
222
|
+
}
|
|
223
|
+
return existing.id;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
const id = crypto.randomUUID();
|
|
227
|
+
index.sources[key] = {
|
|
228
|
+
id,
|
|
229
|
+
url: source.url,
|
|
230
|
+
author: source.author,
|
|
231
|
+
publication: source.publication,
|
|
232
|
+
first_seen: new Date().toISOString(),
|
|
233
|
+
occurrences: 1,
|
|
234
|
+
source_ids: [sourceId],
|
|
235
|
+
};
|
|
236
|
+
return id;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
/** Check if a URL has already been parsed (exists in the links index). */
|
|
240
|
+
export function isUrlAlreadyParsed(url: string, index: EntityIndex): boolean {
|
|
241
|
+
const key = normalizeUrl(url);
|
|
242
|
+
const link = index.links[key];
|
|
243
|
+
return !!link && link.source_ids.length > 0;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/** Get the first source_id that referenced this URL, or null. */
|
|
247
|
+
export function getExistingContentId(url: string, index: EntityIndex): string | null {
|
|
248
|
+
const key = normalizeUrl(url);
|
|
249
|
+
const link = index.links[key];
|
|
250
|
+
return link?.source_ids[0] ?? null;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// --- Batch processing ---
|
|
254
|
+
|
|
255
|
+
export function processEntities(
|
|
256
|
+
extractedData: {
|
|
257
|
+
people: Array<{ name: string; [key: string]: unknown }>;
|
|
258
|
+
companies: Array<{
|
|
259
|
+
name: string;
|
|
260
|
+
domain: string | null;
|
|
261
|
+
[key: string]: unknown;
|
|
262
|
+
}>;
|
|
263
|
+
links?: Array<{ url: string; [key: string]: unknown }>;
|
|
264
|
+
sources?: Array<{
|
|
265
|
+
url: string | null;
|
|
266
|
+
author: string | null;
|
|
267
|
+
publication: string | null;
|
|
268
|
+
[key: string]: unknown;
|
|
269
|
+
}>;
|
|
270
|
+
},
|
|
271
|
+
sourceId: string,
|
|
272
|
+
indexPath?: string
|
|
273
|
+
): {
|
|
274
|
+
people: Array<{ id: string; [key: string]: unknown }>;
|
|
275
|
+
companies: Array<{ id: string; [key: string]: unknown }>;
|
|
276
|
+
links: Array<{ id: string; [key: string]: unknown }>;
|
|
277
|
+
sources: Array<{ id: string; [key: string]: unknown }>;
|
|
278
|
+
} {
|
|
279
|
+
const index = loadEntityIndex(indexPath);
|
|
280
|
+
|
|
281
|
+
const people = extractedData.people.map((person) => ({
|
|
282
|
+
...person,
|
|
283
|
+
id: getOrCreatePerson(person, index, sourceId),
|
|
284
|
+
}));
|
|
285
|
+
|
|
286
|
+
const companies = extractedData.companies.map((company) => ({
|
|
287
|
+
...company,
|
|
288
|
+
id: getOrCreateCompany(company, index, sourceId),
|
|
289
|
+
}));
|
|
290
|
+
|
|
291
|
+
const links = (extractedData.links ?? []).map((link) => ({
|
|
292
|
+
...link,
|
|
293
|
+
id: getOrCreateLink(link, index, sourceId),
|
|
294
|
+
}));
|
|
295
|
+
|
|
296
|
+
const sources = (extractedData.sources ?? []).map((source) => ({
|
|
297
|
+
...source,
|
|
298
|
+
id: getOrCreateSource(source, index, sourceId),
|
|
299
|
+
}));
|
|
300
|
+
|
|
301
|
+
saveEntityIndex(index, indexPath);
|
|
302
|
+
|
|
303
|
+
return { people, companies, links, sources };
|
|
304
|
+
}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared export logic — zips user state directories.
|
|
3
|
+
* Used by tools/export.ts (manual) and handlers/backup.ts (automatic).
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { existsSync, readdirSync } from "node:fs";
|
|
7
|
+
import { relative, resolve } from "node:path";
|
|
8
|
+
import AdmZip from "adm-zip";
|
|
9
|
+
import { palHome } from "./paths";
|
|
10
|
+
|
|
11
|
+
/** Directories within PAL_HOME that contain user state worth exporting. */
|
|
12
|
+
const EXPORT_DIRS = ["telos", "memory"];
|
|
13
|
+
|
|
14
|
+
/** Subdirectories/files to skip during export. */
|
|
15
|
+
const SKIP_PATTERNS = ["memory/downloads"];
|
|
16
|
+
|
|
17
|
+
function shouldSkip(relPath: string): boolean {
|
|
18
|
+
return SKIP_PATTERNS.some((p) => relPath.startsWith(p));
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/** Recursively collect all files under a directory, returning paths relative to root. */
|
|
22
|
+
function walkDir(dir: string, root: string): string[] {
|
|
23
|
+
const files: string[] = [];
|
|
24
|
+
if (!existsSync(dir)) return files;
|
|
25
|
+
|
|
26
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
27
|
+
const fullPath = resolve(dir, entry.name);
|
|
28
|
+
const relPath = relative(root, fullPath);
|
|
29
|
+
|
|
30
|
+
if (shouldSkip(relPath)) continue;
|
|
31
|
+
|
|
32
|
+
if (entry.isDirectory()) {
|
|
33
|
+
files.push(...walkDir(fullPath, root));
|
|
34
|
+
} else if (entry.isFile()) {
|
|
35
|
+
files.push(relPath);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return files;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/** Collect the list of user state files to export. */
|
|
42
|
+
export function collectExportFiles(): string[] {
|
|
43
|
+
const root = palHome();
|
|
44
|
+
const files: string[] = [];
|
|
45
|
+
|
|
46
|
+
for (const dir of EXPORT_DIRS) {
|
|
47
|
+
files.push(...walkDir(resolve(root, dir), root));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return files;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/** Zip the given files and write to outputPath. Returns file count. */
|
|
54
|
+
export function exportZip(outputPath: string): number {
|
|
55
|
+
const root = palHome();
|
|
56
|
+
const files = collectExportFiles();
|
|
57
|
+
if (files.length === 0) return 0;
|
|
58
|
+
|
|
59
|
+
const zip = new AdmZip();
|
|
60
|
+
for (const file of files) {
|
|
61
|
+
const fullPath = resolve(root, file);
|
|
62
|
+
const dir = file.includes("/") ? file.slice(0, file.lastIndexOf("/")) : "";
|
|
63
|
+
zip.addLocalFile(fullPath, dir);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
zip.writeZip(outputPath);
|
|
67
|
+
return files.length;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/** Generate a timestamped filename prefix. */
|
|
71
|
+
export function timestamp(): string {
|
|
72
|
+
return new Date()
|
|
73
|
+
.toISOString()
|
|
74
|
+
.replace(/[-:T.]/g, "")
|
|
75
|
+
.slice(0, 14);
|
|
76
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lightweight Anthropic API wrapper used by session naming, failure capture, etc.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { HAIKU_MODEL } from "./models";
|
|
6
|
+
|
|
7
|
+
export interface InferenceOptions {
|
|
8
|
+
system?: string;
|
|
9
|
+
user: string;
|
|
10
|
+
model?: string;
|
|
11
|
+
maxTokens?: number;
|
|
12
|
+
timeout?: number;
|
|
13
|
+
/** JSON schema for structured output — guarantees valid JSON matching the schema */
|
|
14
|
+
jsonSchema?: Record<string, unknown>;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface InferenceResult {
|
|
18
|
+
success: boolean;
|
|
19
|
+
output?: string;
|
|
20
|
+
usage?: { inputTokens: number; outputTokens: number };
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function inference(opts: InferenceOptions): Promise<InferenceResult> {
|
|
24
|
+
const apiKey = process.env.ANTHROPIC_API_KEY;
|
|
25
|
+
if (!apiKey) return { success: false };
|
|
26
|
+
|
|
27
|
+
const {
|
|
28
|
+
system,
|
|
29
|
+
user,
|
|
30
|
+
model = HAIKU_MODEL,
|
|
31
|
+
maxTokens = 200,
|
|
32
|
+
timeout = 5000,
|
|
33
|
+
jsonSchema,
|
|
34
|
+
} = opts;
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
const controller = new AbortController();
|
|
38
|
+
const timer = setTimeout(() => controller.abort(), timeout);
|
|
39
|
+
|
|
40
|
+
const body: Record<string, unknown> = {
|
|
41
|
+
model,
|
|
42
|
+
max_tokens: maxTokens,
|
|
43
|
+
messages: [{ role: "user", content: user }],
|
|
44
|
+
};
|
|
45
|
+
if (system) body.system = system;
|
|
46
|
+
if (jsonSchema) {
|
|
47
|
+
body.output_config = {
|
|
48
|
+
format: { type: "json_schema", schema: jsonSchema },
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const response = await fetch("https://api.anthropic.com/v1/messages", {
|
|
53
|
+
method: "POST",
|
|
54
|
+
headers: {
|
|
55
|
+
"x-api-key": apiKey,
|
|
56
|
+
"anthropic-version": "2023-06-01",
|
|
57
|
+
"content-type": "application/json",
|
|
58
|
+
},
|
|
59
|
+
body: JSON.stringify(body),
|
|
60
|
+
signal: controller.signal,
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
clearTimeout(timer);
|
|
64
|
+
|
|
65
|
+
if (!response.ok) {
|
|
66
|
+
const { logError } = await import("./log");
|
|
67
|
+
const errBody = await response.text().catch(() => "");
|
|
68
|
+
logError("inference", `HTTP ${response.status}: ${errBody.slice(0, 200)}`);
|
|
69
|
+
return { success: false };
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const data = (await response.json()) as Record<string, unknown>;
|
|
73
|
+
const rawUsage = data?.usage as
|
|
74
|
+
| { input_tokens?: number; output_tokens?: number }
|
|
75
|
+
| undefined;
|
|
76
|
+
const usage =
|
|
77
|
+
rawUsage?.input_tokens != null && rawUsage?.output_tokens != null
|
|
78
|
+
? { inputTokens: rawUsage.input_tokens, outputTokens: rawUsage.output_tokens }
|
|
79
|
+
: undefined;
|
|
80
|
+
|
|
81
|
+
const content = data?.content as Array<{ text?: string }> | undefined;
|
|
82
|
+
const text = content?.[0]?.text?.trim();
|
|
83
|
+
if (!text) return { success: false, usage };
|
|
84
|
+
|
|
85
|
+
return { success: true, output: text, usage };
|
|
86
|
+
} catch (err) {
|
|
87
|
+
const { logError } = await import("./log");
|
|
88
|
+
logError("inference", err);
|
|
89
|
+
return { success: false };
|
|
90
|
+
}
|
|
91
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Learning categorization: SYSTEM (tooling/infra) vs ALGORITHM (approach/design).
|
|
3
|
+
* Used by both learning.ts and work-learning.ts handlers.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export type LearningCategory = "system" | "algorithm";
|
|
7
|
+
|
|
8
|
+
const SYSTEM_KEYWORDS =
|
|
9
|
+
/\b(config|setting|install|deploy|build|lint|format|biome|typescript|tsc|hook|plugin|ci|cd|pipeline|docker|package|dependency|migration|schema|database|env|permission|security|git|commit|branch|merge)\b/i;
|
|
10
|
+
|
|
11
|
+
/** Classify a learning based on title and summary content */
|
|
12
|
+
export function categorizeLearning(...texts: string[]): LearningCategory {
|
|
13
|
+
return SYSTEM_KEYWORDS.test(texts.join(" ")) ? "system" : "algorithm";
|
|
14
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple file-based debug logger for PAL hooks.
|
|
3
|
+
* Writes to memory/state/debug.log — rotated on each session start.
|
|
4
|
+
*
|
|
5
|
+
* Only writes when PAL_DEBUG=1 or when called via logError (always logged).
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { appendFileSync, existsSync, statSync, writeFileSync } from "node:fs";
|
|
9
|
+
import { resolve } from "node:path";
|
|
10
|
+
import { paths } from "./paths";
|
|
11
|
+
|
|
12
|
+
const LOG_FILE = resolve(paths.state(), "debug.log");
|
|
13
|
+
const MAX_LOG_SIZE = 50_000; // ~50KB, then rotate
|
|
14
|
+
|
|
15
|
+
function timestamp(): string {
|
|
16
|
+
return new Date().toISOString().replace("T", " ").slice(0, 19);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function rotateIfNeeded(): void {
|
|
20
|
+
try {
|
|
21
|
+
if (existsSync(LOG_FILE) && statSync(LOG_FILE).size > MAX_LOG_SIZE) {
|
|
22
|
+
const prev = `${LOG_FILE}.prev`;
|
|
23
|
+
writeFileSync(prev, "");
|
|
24
|
+
// Swap: current → prev, start fresh
|
|
25
|
+
const { renameSync } = require("node:fs");
|
|
26
|
+
renameSync(LOG_FILE, prev);
|
|
27
|
+
}
|
|
28
|
+
} catch {
|
|
29
|
+
/* non-critical */
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/** Log a debug message (only when PAL_DEBUG=1) */
|
|
34
|
+
export function logDebug(source: string, message: string): void {
|
|
35
|
+
if (process.env.PAL_DEBUG !== "1") return;
|
|
36
|
+
rotateIfNeeded();
|
|
37
|
+
try {
|
|
38
|
+
appendFileSync(LOG_FILE, `[${timestamp()}] DEBUG ${source}: ${message}\n`);
|
|
39
|
+
} catch {
|
|
40
|
+
/* non-critical */
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/** Log an error (always written, regardless of PAL_DEBUG) */
|
|
45
|
+
export function logError(source: string, error: unknown): void {
|
|
46
|
+
rotateIfNeeded();
|
|
47
|
+
const msg = error instanceof Error ? `${error.message}\n${error.stack}` : String(error);
|
|
48
|
+
try {
|
|
49
|
+
appendFileSync(LOG_FILE, `[${timestamp()}] ERROR ${source}: ${msg}\n`);
|
|
50
|
+
} catch {
|
|
51
|
+
/* non-critical */
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Single source of truth for model IDs and pricing.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export const HAIKU_MODEL = "claude-haiku-4-5-20251001";
|
|
6
|
+
|
|
7
|
+
/** Pricing per million tokens (USD) — from https://platform.claude.com/docs/en/about-claude/pricing */
|
|
8
|
+
export const MODEL_PRICING: Record<
|
|
9
|
+
string,
|
|
10
|
+
{ input: number; output: number; cacheWrite: number; cacheRead: number }
|
|
11
|
+
> = {
|
|
12
|
+
[HAIKU_MODEL]: { input: 1.0, output: 5.0, cacheWrite: 1.25, cacheRead: 0.1 },
|
|
13
|
+
"claude-opus-4-6": { input: 5.0, output: 25.0, cacheWrite: 6.25, cacheRead: 0.5 },
|
|
14
|
+
"claude-sonnet-4-6": { input: 3.0, output: 15.0, cacheWrite: 3.75, cacheRead: 0.3 },
|
|
15
|
+
"claude-sonnet-4-5": { input: 3.0, output: 15.0, cacheWrite: 3.75, cacheRead: 0.3 },
|
|
16
|
+
};
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { existsSync, mkdirSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { resolve } from "node:path";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Root of the PAL package (engine code + shipped assets).
|
|
7
|
+
* In repo mode: the repo root.
|
|
8
|
+
* In package mode: the global node_modules package directory.
|
|
9
|
+
*/
|
|
10
|
+
export function palPkg(): string {
|
|
11
|
+
return process.env.PAL_PKG || resolve(import.meta.dir, "..", "..", "..");
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Root of the user's personal state (telos, memory, etc.).
|
|
16
|
+
* In repo mode: same as palPkg() (the repo root).
|
|
17
|
+
* In package mode: ~/.pal/ (or PAL_HOME override).
|
|
18
|
+
*
|
|
19
|
+
* Repo mode is detected by the presence of .palroot next to the package.
|
|
20
|
+
* This file is not included in the npm package, so it only exists in cloned repos.
|
|
21
|
+
*/
|
|
22
|
+
export function palHome(): string {
|
|
23
|
+
if (process.env.PAL_HOME) return process.env.PAL_HOME;
|
|
24
|
+
|
|
25
|
+
const pkgRoot = palPkg();
|
|
26
|
+
if (existsSync(resolve(pkgRoot, ".palroot"))) return pkgRoot;
|
|
27
|
+
|
|
28
|
+
return resolve(homedir(), ".pal");
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/** Ensure a directory exists, creating it recursively if needed */
|
|
32
|
+
export function ensureDir(path: string): string {
|
|
33
|
+
if (!existsSync(path)) mkdirSync(path, { recursive: true });
|
|
34
|
+
return path;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/** Resolve a path relative to the user's home */
|
|
38
|
+
function home(...segments: string[]): string {
|
|
39
|
+
return resolve(palHome(), ...segments);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/** Resolve a path relative to the package root */
|
|
43
|
+
function pkg(...segments: string[]): string {
|
|
44
|
+
return resolve(palPkg(), ...segments);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// User state paths (in PAL_HOME / repo root)
|
|
48
|
+
export const paths = {
|
|
49
|
+
telos: () => home("telos"),
|
|
50
|
+
memory: () => home("memory"),
|
|
51
|
+
learning: () => ensureDir(home("memory", "learning")),
|
|
52
|
+
signals: () => ensureDir(home("memory", "signals")),
|
|
53
|
+
state: () => ensureDir(home("memory", "state")),
|
|
54
|
+
research: () => ensureDir(home("memory", "research")),
|
|
55
|
+
wisdom: () => ensureDir(home("memory", "wisdom", "frames")),
|
|
56
|
+
wisdomState: () => ensureDir(home("memory", "wisdom", "state")),
|
|
57
|
+
relationship: () => ensureDir(home("memory", "relationship")),
|
|
58
|
+
entities: () => ensureDir(home("memory", "entities")),
|
|
59
|
+
failures: () => ensureDir(home("memory", "learning", "failures")),
|
|
60
|
+
sessionLearning: () => ensureDir(home("memory", "learning", "session")),
|
|
61
|
+
synthesis: () => ensureDir(home("memory", "learning", "synthesis")),
|
|
62
|
+
backups: () => ensureDir(home("backups")),
|
|
63
|
+
} as const;
|
|
64
|
+
|
|
65
|
+
// Platform directories (env override or cross-platform defaults)
|
|
66
|
+
const h = homedir();
|
|
67
|
+
export const platform = {
|
|
68
|
+
claudeDir: () => process.env.PAL_CLAUDE_DIR || resolve(h, ".claude"),
|
|
69
|
+
opencodeDir: () => process.env.PAL_OPENCODE_DIR || resolve(h, ".config", "opencode"),
|
|
70
|
+
agentsDir: () => process.env.PAL_AGENTS_DIR || resolve(h, ".agents"),
|
|
71
|
+
} as const;
|
|
72
|
+
|
|
73
|
+
// Engine/asset paths (in PAL_PKG / repo root)
|
|
74
|
+
export const assets = {
|
|
75
|
+
skills: () => pkg("assets", "skills"),
|
|
76
|
+
agents: () => pkg("assets", "agents"),
|
|
77
|
+
hooks: () => pkg("src", "hooks"),
|
|
78
|
+
telosTemplates: () => pkg("assets", "templates", "telos"),
|
|
79
|
+
agentsMdTemplate: () => pkg("assets", "templates", "AGENTS.md.template"),
|
|
80
|
+
} as const;
|