omegon 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.gitattributes +3 -0
- package/AGENTS.md +16 -0
- package/LICENSE +15 -0
- package/README.md +289 -0
- package/bin/pi.mjs +30 -0
- package/extensions/00-secrets/index.ts +1126 -0
- package/extensions/01-auth/auth.ts +401 -0
- package/extensions/01-auth/index.ts +289 -0
- package/extensions/auto-compact.ts +42 -0
- package/extensions/bootstrap/deps.ts +291 -0
- package/extensions/bootstrap/index.ts +811 -0
- package/extensions/chronos/chronos.sh +487 -0
- package/extensions/chronos/index.ts +148 -0
- package/extensions/cleave/assessment.ts +754 -0
- package/extensions/cleave/bridge.ts +31 -0
- package/extensions/cleave/conflicts.ts +250 -0
- package/extensions/cleave/dispatcher.ts +808 -0
- package/extensions/cleave/guardrails.ts +426 -0
- package/extensions/cleave/index.ts +3121 -0
- package/extensions/cleave/lifecycle-emitter.ts +20 -0
- package/extensions/cleave/openspec.ts +811 -0
- package/extensions/cleave/planner.ts +260 -0
- package/extensions/cleave/review.ts +579 -0
- package/extensions/cleave/skills.ts +355 -0
- package/extensions/cleave/types.ts +261 -0
- package/extensions/cleave/workspace.ts +861 -0
- package/extensions/cleave/worktree.ts +243 -0
- package/extensions/core-renderers.ts +253 -0
- package/extensions/dashboard/context-gauge.ts +58 -0
- package/extensions/dashboard/file-watch.ts +14 -0
- package/extensions/dashboard/footer.ts +1145 -0
- package/extensions/dashboard/git.ts +185 -0
- package/extensions/dashboard/index.ts +478 -0
- package/extensions/dashboard/memory-audit.ts +34 -0
- package/extensions/dashboard/overlay-data.ts +705 -0
- package/extensions/dashboard/overlay.ts +365 -0
- package/extensions/dashboard/render-utils.ts +54 -0
- package/extensions/dashboard/types.ts +191 -0
- package/extensions/dashboard/uri-helper.ts +45 -0
- package/extensions/debug.ts +69 -0
- package/extensions/defaults.ts +282 -0
- package/extensions/design-tree/dashboard-state.ts +161 -0
- package/extensions/design-tree/design-card.ts +362 -0
- package/extensions/design-tree/index.ts +2130 -0
- package/extensions/design-tree/lifecycle-emitter.ts +41 -0
- package/extensions/design-tree/tree.ts +1607 -0
- package/extensions/design-tree/types.ts +163 -0
- package/extensions/distill.ts +127 -0
- package/extensions/effort/index.ts +395 -0
- package/extensions/effort/tiers.ts +146 -0
- package/extensions/effort/types.ts +105 -0
- package/extensions/lib/git-state.ts +227 -0
- package/extensions/lib/local-models.ts +157 -0
- package/extensions/lib/model-preferences.ts +51 -0
- package/extensions/lib/model-routing.ts +720 -0
- package/extensions/lib/operator-fallback.ts +205 -0
- package/extensions/lib/operator-profile.ts +360 -0
- package/extensions/lib/slash-command-bridge.ts +253 -0
- package/extensions/lib/typebox-helpers.ts +16 -0
- package/extensions/local-inference/index.ts +727 -0
- package/extensions/mcp-bridge/README.md +220 -0
- package/extensions/mcp-bridge/index.ts +951 -0
- package/extensions/mcp-bridge/lib.ts +365 -0
- package/extensions/mcp-bridge/mcp.json +3 -0
- package/extensions/mcp-bridge/package.json +11 -0
- package/extensions/model-budget.ts +752 -0
- package/extensions/offline-driver.ts +403 -0
- package/extensions/openspec/archive-gate.ts +164 -0
- package/extensions/openspec/branch-cleanup.ts +64 -0
- package/extensions/openspec/dashboard-state.ts +50 -0
- package/extensions/openspec/index.ts +1917 -0
- package/extensions/openspec/lifecycle-emitter.ts +65 -0
- package/extensions/openspec/lifecycle-files.ts +70 -0
- package/extensions/openspec/lifecycle.ts +50 -0
- package/extensions/openspec/reconcile.ts +187 -0
- package/extensions/openspec/spec.ts +1385 -0
- package/extensions/openspec/types.ts +98 -0
- package/extensions/project-memory/DESIGN-global-mind.md +198 -0
- package/extensions/project-memory/README.md +202 -0
- package/extensions/project-memory/api-types.ts +382 -0
- package/extensions/project-memory/compaction-policy.ts +29 -0
- package/extensions/project-memory/core.ts +164 -0
- package/extensions/project-memory/embeddings.ts +230 -0
- package/extensions/project-memory/extraction-v2.ts +861 -0
- package/extensions/project-memory/factstore.ts +2177 -0
- package/extensions/project-memory/index.ts +3459 -0
- package/extensions/project-memory/injection-metrics.ts +91 -0
- package/extensions/project-memory/jsonl-io.ts +12 -0
- package/extensions/project-memory/lifecycle.ts +331 -0
- package/extensions/project-memory/migration.ts +293 -0
- package/extensions/project-memory/package.json +9 -0
- package/extensions/project-memory/sci-renderers.ts +7 -0
- package/extensions/project-memory/template.ts +103 -0
- package/extensions/project-memory/triggers.ts +52 -0
- package/extensions/project-memory/types.ts +102 -0
- package/extensions/render/composition/fonts/Inter-Bold.ttf +0 -0
- package/extensions/render/composition/fonts/Inter-Regular.ttf +0 -0
- package/extensions/render/composition/fonts/Tomorrow-Bold.ttf +0 -0
- package/extensions/render/composition/fonts/Tomorrow-Regular.ttf +0 -0
- package/extensions/render/composition/package-lock.json +534 -0
- package/extensions/render/composition/package.json +22 -0
- package/extensions/render/composition/render.mjs +246 -0
- package/extensions/render/composition/test-comp.tsx +87 -0
- package/extensions/render/composition/types.ts +24 -0
- package/extensions/render/excalidraw/UPSTREAM.md +81 -0
- package/extensions/render/excalidraw/elements.ts +764 -0
- package/extensions/render/excalidraw/index.ts +66 -0
- package/extensions/render/excalidraw/types.ts +223 -0
- package/extensions/render/excalidraw-renderer/pyproject.toml +8 -0
- package/extensions/render/excalidraw-renderer/render_excalidraw.py +182 -0
- package/extensions/render/excalidraw-renderer/render_template.html +59 -0
- package/extensions/render/index.ts +830 -0
- package/extensions/render/native-diagrams/index.ts +57 -0
- package/extensions/render/native-diagrams/motifs.ts +542 -0
- package/extensions/render/native-diagrams/raster.ts +8 -0
- package/extensions/render/native-diagrams/scene.ts +75 -0
- package/extensions/render/native-diagrams/spec.ts +204 -0
- package/extensions/render/native-diagrams/svg.ts +116 -0
- package/extensions/sci-ui.ts +304 -0
- package/extensions/session-log.ts +174 -0
- package/extensions/shared-state.ts +146 -0
- package/extensions/spinner-verbs.ts +91 -0
- package/extensions/style.ts +281 -0
- package/extensions/terminal-title.ts +191 -0
- package/extensions/tool-profile/index.ts +291 -0
- package/extensions/tool-profile/profiles.ts +290 -0
- package/extensions/types.d.ts +9 -0
- package/extensions/vault/index.ts +185 -0
- package/extensions/version-check.ts +90 -0
- package/extensions/view/index.ts +859 -0
- package/extensions/view/uri-resolver.ts +148 -0
- package/extensions/web-search/index.ts +182 -0
- package/extensions/web-search/providers.ts +121 -0
- package/extensions/web-ui/index.ts +110 -0
- package/extensions/web-ui/server.ts +265 -0
- package/extensions/web-ui/state.ts +462 -0
- package/extensions/web-ui/static/index.html +145 -0
- package/extensions/web-ui/types.ts +284 -0
- package/package.json +76 -0
- package/prompts/init.md +75 -0
- package/prompts/new-repo.md +54 -0
- package/prompts/oci-login.md +56 -0
- package/prompts/status.md +50 -0
- package/settings.json +4 -0
- package/skills/cleave/SKILL.md +218 -0
- package/skills/git/SKILL.md +209 -0
- package/skills/git/_reference/ci-validation.md +204 -0
- package/skills/oci/SKILL.md +338 -0
- package/skills/openspec/SKILL.md +346 -0
- package/skills/pi-extensions/SKILL.md +191 -0
- package/skills/pi-tui/SKILL.md +517 -0
- package/skills/python/SKILL.md +189 -0
- package/skills/rust/SKILL.md +268 -0
- package/skills/security/SKILL.md +206 -0
- package/skills/style/SKILL.md +264 -0
- package/skills/typescript/SKILL.md +225 -0
- package/skills/vault/SKILL.md +102 -0
- package/themes/alpharius-legacy.json +85 -0
- package/themes/alpharius.conf +59 -0
- package/themes/alpharius.json +88 -0
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Project Memory — Migration
|
|
3
|
+
*
|
|
4
|
+
* Migrates existing markdown-based memory (memory.md, archive/*.md, minds/*)
|
|
5
|
+
* into the SQLite fact store.
|
|
6
|
+
*
|
|
7
|
+
* Migration is idempotent — running it twice won't duplicate facts
|
|
8
|
+
* (dedup via content_hash handles this).
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import * as fs from "node:fs";
|
|
12
|
+
import * as path from "node:path";
|
|
13
|
+
import { FactStore, type StoreFactOptions } from "./factstore.ts";
|
|
14
|
+
import { SECTIONS, type SectionName } from "./template.ts";
|
|
15
|
+
|
|
16
|
+
interface MigrationResult {
|
|
17
|
+
factsImported: number;
|
|
18
|
+
duplicatesSkipped: number;
|
|
19
|
+
archiveFactsImported: number;
|
|
20
|
+
mindsImported: number;
|
|
21
|
+
errors: string[];
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Parse a markdown memory file into section → bullet[] map.
|
|
26
|
+
*/
|
|
27
|
+
function parseMarkdownMemory(content: string): Map<SectionName, string[]> {
|
|
28
|
+
const sectionBullets = new Map<SectionName, string[]>();
|
|
29
|
+
let currentSection: SectionName | null = null;
|
|
30
|
+
|
|
31
|
+
for (const line of content.split("\n")) {
|
|
32
|
+
const sectionMatch = line.match(/^## (.+)$/);
|
|
33
|
+
if (sectionMatch) {
|
|
34
|
+
const name = sectionMatch[1].trim();
|
|
35
|
+
if ((SECTIONS as readonly string[]).includes(name)) {
|
|
36
|
+
currentSection = name as SectionName;
|
|
37
|
+
} else {
|
|
38
|
+
currentSection = null;
|
|
39
|
+
}
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
if (currentSection && line.trim().startsWith("- ")) {
|
|
43
|
+
if (!sectionBullets.has(currentSection)) {
|
|
44
|
+
sectionBullets.set(currentSection, []);
|
|
45
|
+
}
|
|
46
|
+
// Strip the bullet prefix
|
|
47
|
+
const content = line.trim().replace(/^-\s*/, "").trim();
|
|
48
|
+
if (content) {
|
|
49
|
+
sectionBullets.get(currentSection)!.push(content);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return sectionBullets;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Parse an archive file for timestamped sections.
|
|
59
|
+
* Archive files have <!-- Archived YYYY-MM-DD --> markers and
|
|
60
|
+
* optionally [SectionName] prefixes on facts.
|
|
61
|
+
*/
|
|
62
|
+
function parseArchiveFile(content: string): { date: string; section: SectionName; content: string }[] {
|
|
63
|
+
const facts: { date: string; section: SectionName; content: string }[] = [];
|
|
64
|
+
let currentDate = "";
|
|
65
|
+
|
|
66
|
+
for (const line of content.split("\n")) {
|
|
67
|
+
const dateMatch = line.match(/<!--\s*Archived\s+(\d{4}-\d{2}-\d{2})\s*-->/);
|
|
68
|
+
if (dateMatch) {
|
|
69
|
+
currentDate = dateMatch[1];
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const trimmed = line.trim();
|
|
74
|
+
if (!trimmed || trimmed.startsWith("<!--") || trimmed.startsWith("##")) continue;
|
|
75
|
+
|
|
76
|
+
// Try to extract [SectionName] prefix
|
|
77
|
+
const sectionMatch = trimmed.match(/^\[([^\]]+)\]\s*(.+)$/);
|
|
78
|
+
let section: SectionName = "Architecture"; // fallback
|
|
79
|
+
let factContent: string;
|
|
80
|
+
|
|
81
|
+
if (sectionMatch) {
|
|
82
|
+
const sectionName = sectionMatch[1].trim();
|
|
83
|
+
if ((SECTIONS as readonly string[]).includes(sectionName)) {
|
|
84
|
+
section = sectionName as SectionName;
|
|
85
|
+
}
|
|
86
|
+
factContent = sectionMatch[2].replace(/^-\s*/, "").trim();
|
|
87
|
+
} else {
|
|
88
|
+
factContent = trimmed.replace(/^-\s*/, "").trim();
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (factContent) {
|
|
92
|
+
facts.push({
|
|
93
|
+
date: currentDate || new Date().toISOString().split("T")[0],
|
|
94
|
+
section,
|
|
95
|
+
content: factContent,
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return facts;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Migrate an entire .pi/memory directory into a FactStore.
|
|
105
|
+
*/
|
|
106
|
+
export function migrateToFactStore(memoryDir: string, store: FactStore): MigrationResult {
|
|
107
|
+
const result: MigrationResult = {
|
|
108
|
+
factsImported: 0,
|
|
109
|
+
duplicatesSkipped: 0,
|
|
110
|
+
archiveFactsImported: 0,
|
|
111
|
+
mindsImported: 0,
|
|
112
|
+
errors: [],
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
// 1. Migrate default memory.md
|
|
116
|
+
const defaultMemoryPath = path.join(memoryDir, "memory.md");
|
|
117
|
+
if (fs.existsSync(defaultMemoryPath)) {
|
|
118
|
+
try {
|
|
119
|
+
const content = fs.readFileSync(defaultMemoryPath, "utf8");
|
|
120
|
+
const sections = parseMarkdownMemory(content);
|
|
121
|
+
|
|
122
|
+
for (const [section, bullets] of sections) {
|
|
123
|
+
for (const bullet of bullets) {
|
|
124
|
+
const { duplicate } = store.storeFact({
|
|
125
|
+
mind: "default",
|
|
126
|
+
section,
|
|
127
|
+
content: bullet,
|
|
128
|
+
source: "migration",
|
|
129
|
+
// Give migrated facts a moderate reinforcement count
|
|
130
|
+
// They've survived in memory, so they're proven durable
|
|
131
|
+
reinforcement_count: 5,
|
|
132
|
+
});
|
|
133
|
+
if (duplicate) {
|
|
134
|
+
result.duplicatesSkipped++;
|
|
135
|
+
} else {
|
|
136
|
+
result.factsImported++;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
} catch (err: any) {
|
|
141
|
+
result.errors.push(`Failed to migrate default memory: ${err.message}`);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// 2. Migrate archive files
|
|
146
|
+
const archiveDir = path.join(memoryDir, "archive");
|
|
147
|
+
if (fs.existsSync(archiveDir)) {
|
|
148
|
+
try {
|
|
149
|
+
const archiveFiles = fs.readdirSync(archiveDir)
|
|
150
|
+
.filter(f => f.endsWith(".md"))
|
|
151
|
+
.sort();
|
|
152
|
+
|
|
153
|
+
for (const file of archiveFiles) {
|
|
154
|
+
const content = fs.readFileSync(path.join(archiveDir, file), "utf8");
|
|
155
|
+
const archiveFacts = parseArchiveFile(content);
|
|
156
|
+
|
|
157
|
+
for (const af of archiveFacts) {
|
|
158
|
+
const { duplicate } = store.storeFact({
|
|
159
|
+
mind: "default",
|
|
160
|
+
section: af.section,
|
|
161
|
+
content: af.content,
|
|
162
|
+
source: "migration",
|
|
163
|
+
reinforcement_count: 2, // archived = less durable
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
if (!duplicate) {
|
|
167
|
+
// Mark as archived since these were already archived
|
|
168
|
+
// Find the fact we just inserted and archive it
|
|
169
|
+
// Actually, store it as active — let decay handle it naturally
|
|
170
|
+
// since these are older facts they'll have lower confidence
|
|
171
|
+
result.archiveFactsImported++;
|
|
172
|
+
} else {
|
|
173
|
+
result.duplicatesSkipped++;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
} catch (err: any) {
|
|
178
|
+
result.errors.push(`Failed to migrate archive: ${err.message}`);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// 3. Migrate minds
|
|
183
|
+
const mindsDir = path.join(memoryDir, "minds");
|
|
184
|
+
if (fs.existsSync(mindsDir)) {
|
|
185
|
+
try {
|
|
186
|
+
// Read registry if it exists
|
|
187
|
+
const registryPath = path.join(mindsDir, "registry.json");
|
|
188
|
+
let registry: Record<string, any> = {};
|
|
189
|
+
try {
|
|
190
|
+
registry = JSON.parse(fs.readFileSync(registryPath, "utf8"));
|
|
191
|
+
} catch {
|
|
192
|
+
// No registry — scan directories
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
const entries = fs.readdirSync(mindsDir, { withFileTypes: true });
|
|
196
|
+
for (const entry of entries) {
|
|
197
|
+
if (!entry.isDirectory()) continue;
|
|
198
|
+
const mindName = entry.name;
|
|
199
|
+
|
|
200
|
+
// Skip state files
|
|
201
|
+
if (mindName === "." || mindName === "..") continue;
|
|
202
|
+
|
|
203
|
+
const metaPath = path.join(mindsDir, mindName, "meta.json");
|
|
204
|
+
const mindMemoryPath = path.join(mindsDir, mindName, "memory.md");
|
|
205
|
+
|
|
206
|
+
if (!fs.existsSync(mindMemoryPath)) continue;
|
|
207
|
+
|
|
208
|
+
try {
|
|
209
|
+
// Read metadata
|
|
210
|
+
let meta: any = {};
|
|
211
|
+
try {
|
|
212
|
+
meta = JSON.parse(fs.readFileSync(metaPath, "utf8"));
|
|
213
|
+
} catch {
|
|
214
|
+
meta = { name: mindName, description: "", status: "active" };
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
const regEntry = registry[mindName];
|
|
218
|
+
|
|
219
|
+
// Create mind in store if it doesn't exist
|
|
220
|
+
if (!store.mindExists(mindName)) {
|
|
221
|
+
store.createMind(mindName, meta.description ?? "", {
|
|
222
|
+
parent: meta.parent,
|
|
223
|
+
origin_type: regEntry?.origin?.type ?? meta.origin?.type ?? "local",
|
|
224
|
+
origin_path: regEntry?.origin?.path ?? meta.origin?.path,
|
|
225
|
+
readonly: meta.readonly ?? regEntry?.readonly ?? false,
|
|
226
|
+
});
|
|
227
|
+
result.mindsImported++;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Migrate facts
|
|
231
|
+
const content = fs.readFileSync(mindMemoryPath, "utf8");
|
|
232
|
+
const sections = parseMarkdownMemory(content);
|
|
233
|
+
|
|
234
|
+
for (const [section, bullets] of sections) {
|
|
235
|
+
for (const bullet of bullets) {
|
|
236
|
+
const { duplicate } = store.storeFact({
|
|
237
|
+
mind: mindName,
|
|
238
|
+
section,
|
|
239
|
+
content: bullet,
|
|
240
|
+
source: "migration",
|
|
241
|
+
reinforcement_count: 5,
|
|
242
|
+
});
|
|
243
|
+
if (duplicate) {
|
|
244
|
+
result.duplicatesSkipped++;
|
|
245
|
+
} else {
|
|
246
|
+
result.factsImported++;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
} catch (err: any) {
|
|
251
|
+
result.errors.push(`Failed to migrate mind "${mindName}": ${err.message}`);
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Migrate active mind state
|
|
256
|
+
const activeStatePath = path.join(mindsDir, "active.json");
|
|
257
|
+
if (fs.existsSync(activeStatePath)) {
|
|
258
|
+
try {
|
|
259
|
+
const state = JSON.parse(fs.readFileSync(activeStatePath, "utf8"));
|
|
260
|
+
if (state.activeMind) {
|
|
261
|
+
store.setActiveMind(state.activeMind);
|
|
262
|
+
}
|
|
263
|
+
} catch {
|
|
264
|
+
// Best effort
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
} catch (err: any) {
|
|
268
|
+
result.errors.push(`Failed to migrate minds: ${err.message}`);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
return result;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Check if migration is needed — does markdown memory exist but no facts.db?
|
|
277
|
+
*/
|
|
278
|
+
export function needsMigration(memoryDir: string): boolean {
|
|
279
|
+
const hasMarkdown = fs.existsSync(path.join(memoryDir, "memory.md"));
|
|
280
|
+
const hasDb = fs.existsSync(path.join(memoryDir, "facts.db"));
|
|
281
|
+
return hasMarkdown && !hasDb;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Rename old markdown files after successful migration.
|
|
286
|
+
* Appends .migrated suffix so they're preserved but not re-migrated.
|
|
287
|
+
*/
|
|
288
|
+
export function markMigrated(memoryDir: string): void {
|
|
289
|
+
const memoryMd = path.join(memoryDir, "memory.md");
|
|
290
|
+
if (fs.existsSync(memoryMd)) {
|
|
291
|
+
fs.renameSync(memoryMd, memoryMd + ".migrated");
|
|
292
|
+
}
|
|
293
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sci-UI render helpers for project-memory tools.
|
|
3
|
+
*
|
|
4
|
+
* Re-exports the shared sci-ui primitives and adds memory-specific
|
|
5
|
+
* formatting helpers for structured card rendering.
|
|
6
|
+
*/
|
|
7
|
+
export { sciCall, sciOk, sciErr, sciExpanded, sciLoading } from "../sci-ui.ts";
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Project Memory — Default Template
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export const DEFAULT_TEMPLATE = `<!-- Project Memory — managed by project-memory extension -->
|
|
6
|
+
<!-- Do not edit while a pi session is actively running -->
|
|
7
|
+
|
|
8
|
+
## Architecture
|
|
9
|
+
_System structure, component relationships, key abstractions_
|
|
10
|
+
|
|
11
|
+
## Decisions
|
|
12
|
+
_Choices made and their rationale_
|
|
13
|
+
|
|
14
|
+
## Constraints
|
|
15
|
+
_Requirements, limitations, environment details_
|
|
16
|
+
|
|
17
|
+
## Known Issues
|
|
18
|
+
_Bugs, flaky tests, workarounds_
|
|
19
|
+
|
|
20
|
+
## Patterns & Conventions
|
|
21
|
+
_Code style, project conventions, common approaches_
|
|
22
|
+
|
|
23
|
+
## Specs
|
|
24
|
+
_Active specifications, acceptance criteria, and design contracts driving current work_
|
|
25
|
+
|
|
26
|
+
## Recent Work
|
|
27
|
+
_What was done this week — ephemeral session receipts, decays within a business week_
|
|
28
|
+
`;
|
|
29
|
+
|
|
30
|
+
export const SECTIONS = [
|
|
31
|
+
"Architecture",
|
|
32
|
+
"Decisions",
|
|
33
|
+
"Constraints",
|
|
34
|
+
"Known Issues",
|
|
35
|
+
"Patterns & Conventions",
|
|
36
|
+
"Specs",
|
|
37
|
+
"Recent Work",
|
|
38
|
+
] as const;
|
|
39
|
+
|
|
40
|
+
export type SectionName = (typeof SECTIONS)[number];
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Check if a bullet already exists in the section (exact or near-duplicate).
|
|
44
|
+
* Normalizes whitespace and leading "- " for comparison.
|
|
45
|
+
*/
|
|
46
|
+
function isDuplicate(existingLines: string[], bullet: string): boolean {
|
|
47
|
+
const normalize = (s: string) =>
|
|
48
|
+
s.replace(/^-\s*/, "").trim().toLowerCase();
|
|
49
|
+
const normalized = normalize(bullet);
|
|
50
|
+
return existingLines.some((line) => {
|
|
51
|
+
if (!line.trim().startsWith("- ")) return false;
|
|
52
|
+
return normalize(line) === normalized;
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Append a bullet to a specific section in the memory markdown.
|
|
58
|
+
* Returns the updated markdown string.
|
|
59
|
+
*/
|
|
60
|
+
export function appendToSection(markdown: string, section: SectionName, bullet: string): string {
|
|
61
|
+
const sectionHeader = `## ${section}`;
|
|
62
|
+
const lines = markdown.split("\n");
|
|
63
|
+
const headerIdx = lines.findIndex((l) => l.trim() === sectionHeader);
|
|
64
|
+
|
|
65
|
+
if (headerIdx === -1) {
|
|
66
|
+
// Section not found — append it
|
|
67
|
+
return markdown.trimEnd() + `\n\n${sectionHeader}\n\n${bullet}\n`;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Find the next section header or end of file
|
|
71
|
+
let insertIdx = lines.length;
|
|
72
|
+
for (let i = headerIdx + 1; i < lines.length; i++) {
|
|
73
|
+
if (lines[i].match(/^## /)) {
|
|
74
|
+
insertIdx = i;
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Walk back past blank lines to insert before the gap
|
|
80
|
+
while (insertIdx > headerIdx + 1 && lines[insertIdx - 1].trim() === "") {
|
|
81
|
+
insertIdx--;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Check for duplicates in this section
|
|
85
|
+
const sectionLines = lines.slice(headerIdx + 1, insertIdx);
|
|
86
|
+
if (isDuplicate(sectionLines, bullet)) {
|
|
87
|
+
return markdown; // No change — already exists
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
lines.splice(insertIdx, 0, bullet);
|
|
91
|
+
return lines.join("\n");
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Count non-empty, non-comment lines in markdown content.
|
|
96
|
+
* Filters out empty lines and HTML comment lines starting with <!--.
|
|
97
|
+
*/
|
|
98
|
+
export function countContentLines(content: string): number {
|
|
99
|
+
return content.split("\n").filter((l) => {
|
|
100
|
+
const trimmed = l.trim();
|
|
101
|
+
return trimmed !== "" && !trimmed.startsWith("<!--");
|
|
102
|
+
}).length;
|
|
103
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Extraction trigger logic — determines when background extraction should run.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import type { MemoryConfig } from "./types.ts";
|
|
6
|
+
|
|
7
|
+
export interface ExtractionTriggerState {
|
|
8
|
+
lastExtractedTokens: number;
|
|
9
|
+
toolCallsSinceExtract: number;
|
|
10
|
+
manualStoresSinceExtract: number;
|
|
11
|
+
isInitialized: boolean;
|
|
12
|
+
isRunning: boolean;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function createTriggerState(): ExtractionTriggerState {
|
|
16
|
+
return {
|
|
17
|
+
lastExtractedTokens: 0,
|
|
18
|
+
toolCallsSinceExtract: 0,
|
|
19
|
+
manualStoresSinceExtract: 0,
|
|
20
|
+
isInitialized: false,
|
|
21
|
+
isRunning: false,
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function shouldExtract(
|
|
26
|
+
state: ExtractionTriggerState,
|
|
27
|
+
currentTokens: number,
|
|
28
|
+
config: MemoryConfig,
|
|
29
|
+
consecutiveFailures: number = 0,
|
|
30
|
+
): boolean {
|
|
31
|
+
if (state.isRunning) return false;
|
|
32
|
+
|
|
33
|
+
// Exponential backoff on consecutive failures: skip 2^n extraction opportunities
|
|
34
|
+
// (1 skip after 1 failure, 2 after 2, 4 after 3, cap at 16)
|
|
35
|
+
if (consecutiveFailures > 0) {
|
|
36
|
+
const backoffSlots = Math.min(1 << consecutiveFailures, 16);
|
|
37
|
+
if (state.toolCallsSinceExtract % backoffSlots !== 0) return false;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Only suppress for manual stores after first extraction has established baseline.
|
|
41
|
+
if (state.isInitialized && state.manualStoresSinceExtract >= config.manualStoreThreshold) {
|
|
42
|
+
return false;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const tokenDelta = currentTokens - state.lastExtractedTokens;
|
|
46
|
+
|
|
47
|
+
if (!state.isInitialized) {
|
|
48
|
+
return currentTokens >= config.minimumTokensToInit;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return tokenDelta >= config.minimumTokensBetweenUpdate && state.toolCallsSinceExtract >= config.toolCallsBetweenUpdates;
|
|
52
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Project Memory — Types
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export interface LifecycleMemoryMessage {
|
|
6
|
+
source: "design-tree" | "openspec" | "cleave";
|
|
7
|
+
context: string;
|
|
8
|
+
candidates: LifecycleMemoryCandidate[];
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface LifecycleMemoryCandidate {
|
|
12
|
+
sourceKind: "design-decision" | "design-constraint" | "openspec-archive" | "openspec-assess" | "cleave-outcome" | "cleave-bug-fix";
|
|
13
|
+
authority: "explicit" | "inferred";
|
|
14
|
+
section: "Architecture" | "Decisions" | "Constraints" | "Known Issues" | "Patterns & Conventions" | "Specs";
|
|
15
|
+
content: string;
|
|
16
|
+
artifactRef?: {
|
|
17
|
+
type: "design-node" | "openspec-spec" | "openspec-baseline" | "cleave-review";
|
|
18
|
+
path: string;
|
|
19
|
+
subRef?: string;
|
|
20
|
+
};
|
|
21
|
+
supersedes?: string;
|
|
22
|
+
session?: string;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface MemoryConfig {
|
|
26
|
+
/** Max lines in active memory before extraction prunes */
|
|
27
|
+
maxLines: number;
|
|
28
|
+
/** Embedding backend used for semantic retrieval */
|
|
29
|
+
embeddingProvider: "voyage" | "openai" | "openai-compatible" | "ollama";
|
|
30
|
+
/** Embedding model to use for fact and episode vectors */
|
|
31
|
+
embeddingModel: string;
|
|
32
|
+
/** Minimum total message tokens before first extraction */
|
|
33
|
+
minimumTokensToInit: number;
|
|
34
|
+
/** Token delta required between extractions */
|
|
35
|
+
minimumTokensBetweenUpdate: number;
|
|
36
|
+
/** Minimum tool calls since last extraction */
|
|
37
|
+
toolCallsBetweenUpdates: number;
|
|
38
|
+
/** Skip auto-extraction if LLM stored >= this many facts since last extraction */
|
|
39
|
+
manualStoreThreshold: number;
|
|
40
|
+
/** Model to use for extraction subagent */
|
|
41
|
+
extractionModel: string;
|
|
42
|
+
/** Timeout for extraction subprocess in ms */
|
|
43
|
+
extractionTimeout: number;
|
|
44
|
+
/** Timeout for shutdown extraction in ms (shorter — blocks exit) */
|
|
45
|
+
shutdownExtractionTimeout: number;
|
|
46
|
+
/** Context % at which degeneracy pressure begins (gradient onset) */
|
|
47
|
+
pressureOnsetPercent: number;
|
|
48
|
+
/** Context % at which to warn the agent to consider compacting */
|
|
49
|
+
compactionWarningPercent: number;
|
|
50
|
+
/** Context % at which to auto-compact without asking */
|
|
51
|
+
compactionAutoPercent: number;
|
|
52
|
+
/** Use local model as fallback when cloud compaction fails */
|
|
53
|
+
compactionLocalFallback: boolean;
|
|
54
|
+
/** Try local model FIRST for compaction (cloud only if local unavailable) */
|
|
55
|
+
compactionLocalFirst: boolean;
|
|
56
|
+
/** Timeout for local model compaction inference in ms */
|
|
57
|
+
compactionLocalTimeout: number;
|
|
58
|
+
/** Enable intelligent fallback chain: local → gpt-5.3-codex-spark → haiku */
|
|
59
|
+
compactionFallbackChain: boolean;
|
|
60
|
+
/** Timeout for gpt-5.3-codex-spark compaction fallback in ms */
|
|
61
|
+
compactionCodexTimeout: number;
|
|
62
|
+
/** Timeout for retribution compaction fallback in ms */
|
|
63
|
+
compactionHaikuTimeout: number;
|
|
64
|
+
/** Max consecutive compaction retry attempts before giving up for the session */
|
|
65
|
+
compactionRetryLimit: number;
|
|
66
|
+
/** Enable Phase 2 global extraction (generalizes project facts to user-level store) */
|
|
67
|
+
globalExtractionEnabled: boolean;
|
|
68
|
+
/** Model to use for episode generation (first cloud attempt in fallback chain) */
|
|
69
|
+
episodeModel: string;
|
|
70
|
+
/** Per-attempt timeout for each step in the episode fallback chain (ms) */
|
|
71
|
+
episodeStepTimeout: number;
|
|
72
|
+
/** Enable fallback chain for episode generation: Ollama → cloud primary → cloud retribution → template */
|
|
73
|
+
episodeFallbackChain: boolean;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export const DEFAULT_CONFIG: MemoryConfig = {
|
|
77
|
+
maxLines: 50,
|
|
78
|
+
embeddingProvider: "voyage",
|
|
79
|
+
embeddingModel: "voyage-3-lite",
|
|
80
|
+
minimumTokensToInit: 10_000,
|
|
81
|
+
minimumTokensBetweenUpdate: 5_000,
|
|
82
|
+
toolCallsBetweenUpdates: 8,
|
|
83
|
+
manualStoreThreshold: 3,
|
|
84
|
+
extractionModel: "gpt-5.3-codex-spark",
|
|
85
|
+
extractionTimeout: 60_000,
|
|
86
|
+
shutdownExtractionTimeout: 15_000,
|
|
87
|
+
pressureOnsetPercent: 55,
|
|
88
|
+
compactionWarningPercent: 75,
|
|
89
|
+
compactionAutoPercent: 85,
|
|
90
|
+
compactionLocalFallback: true,
|
|
91
|
+
compactionLocalFirst: false,
|
|
92
|
+
compactionLocalTimeout: 45_000,
|
|
93
|
+
compactionFallbackChain: true,
|
|
94
|
+
compactionCodexTimeout: 60_000,
|
|
95
|
+
compactionHaikuTimeout: 30_000,
|
|
96
|
+
compactionRetryLimit: 3,
|
|
97
|
+
globalExtractionEnabled: false,
|
|
98
|
+
episodeModel: "gpt-5.3-codex-spark",
|
|
99
|
+
episodeStepTimeout: 8_000,
|
|
100
|
+
episodeFallbackChain: true,
|
|
101
|
+
};
|
|
102
|
+
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|