@evomap/evolver 1.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +290 -0
  3. package/README.zh-CN.md +236 -0
  4. package/SKILL.md +132 -0
  5. package/assets/gep/capsules.json +79 -0
  6. package/assets/gep/events.jsonl +7 -0
  7. package/assets/gep/genes.json +108 -0
  8. package/index.js +479 -0
  9. package/package.json +38 -0
  10. package/src/canary.js +13 -0
  11. package/src/evolve.js +1704 -0
  12. package/src/gep/a2a.js +173 -0
  13. package/src/gep/a2aProtocol.js +736 -0
  14. package/src/gep/analyzer.js +35 -0
  15. package/src/gep/assetCallLog.js +130 -0
  16. package/src/gep/assetStore.js +297 -0
  17. package/src/gep/assets.js +36 -0
  18. package/src/gep/bridge.js +71 -0
  19. package/src/gep/candidates.js +142 -0
  20. package/src/gep/contentHash.js +65 -0
  21. package/src/gep/deviceId.js +209 -0
  22. package/src/gep/envFingerprint.js +68 -0
  23. package/src/gep/hubReview.js +206 -0
  24. package/src/gep/hubSearch.js +237 -0
  25. package/src/gep/issueReporter.js +262 -0
  26. package/src/gep/llmReview.js +92 -0
  27. package/src/gep/memoryGraph.js +771 -0
  28. package/src/gep/memoryGraphAdapter.js +203 -0
  29. package/src/gep/mutation.js +186 -0
  30. package/src/gep/narrativeMemory.js +108 -0
  31. package/src/gep/paths.js +113 -0
  32. package/src/gep/personality.js +355 -0
  33. package/src/gep/prompt.js +566 -0
  34. package/src/gep/questionGenerator.js +212 -0
  35. package/src/gep/reflection.js +127 -0
  36. package/src/gep/sanitize.js +67 -0
  37. package/src/gep/selector.js +250 -0
  38. package/src/gep/signals.js +417 -0
  39. package/src/gep/skillDistiller.js +499 -0
  40. package/src/gep/solidify.js +1681 -0
  41. package/src/gep/strategy.js +126 -0
  42. package/src/gep/taskReceiver.js +528 -0
  43. package/src/gep/validationReport.js +55 -0
  44. package/src/ops/cleanup.js +80 -0
  45. package/src/ops/commentary.js +60 -0
  46. package/src/ops/health_check.js +106 -0
  47. package/src/ops/index.js +11 -0
  48. package/src/ops/innovation.js +67 -0
  49. package/src/ops/lifecycle.js +168 -0
  50. package/src/ops/self_repair.js +72 -0
  51. package/src/ops/skills_monitor.js +143 -0
  52. package/src/ops/trigger.js +33 -0
@@ -0,0 +1,35 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ // Innovation: Self-Correction Analyzer
5
+ // Analyze past failures to suggest better future mutations
6
+ // Pattern: Meta-learning
7
+
8
+ function analyzeFailures() {
9
+ const memoryPath = path.join(process.cwd(), 'MEMORY.md');
10
+ if (!fs.existsSync(memoryPath)) return { status: 'skipped', reason: 'no_memory' };
11
+
12
+ const content = fs.readFileSync(memoryPath, 'utf8');
13
+ const failureRegex = /\|\s*\*\*F\d+\*\*\s*\|\s*Fix\s*\|\s*(.*?)\s*\|\s*\*\*(.*?)\*\*\s*\((.*?)\)\s*\|/g;
14
+
15
+ const failures = [];
16
+ let match;
17
+ while ((match = failureRegex.exec(content)) !== null) {
18
+ failures.push({
19
+ summary: match[1].trim(),
20
+ detail: match[2].trim()
21
+ });
22
+ }
23
+
24
+ return {
25
+ status: 'success',
26
+ count: failures.length,
27
+ failures: failures.slice(0, 3) // Return top 3 for prompt context
28
+ };
29
+ }
30
+
31
+ if (require.main === module) {
32
+ console.log(JSON.stringify(analyzeFailures(), null, 2));
33
+ }
34
+
35
+ module.exports = { analyzeFailures };
@@ -0,0 +1,130 @@
1
+ // Append-only asset call log for tracking Hub asset interactions per evolution run.
2
+ // Log file: {evolution_dir}/asset_call_log.jsonl
3
+
4
+ const fs = require('fs');
5
+ const path = require('path');
6
+ const { getEvolutionDir } = require('./paths');
7
+
8
+ function getLogPath() {
9
+ return path.join(getEvolutionDir(), 'asset_call_log.jsonl');
10
+ }
11
+
12
+ function ensureDir(filePath) {
13
+ const dir = path.dirname(filePath);
14
+ if (!fs.existsSync(dir)) {
15
+ fs.mkdirSync(dir, { recursive: true });
16
+ }
17
+ }
18
+
19
+ /**
20
+ * Append a single asset call record to the log.
21
+ *
22
+ * @param {object} entry
23
+ * @param {string} entry.run_id
24
+ * @param {string} entry.action - hub_search_hit | hub_search_miss | asset_reuse | asset_reference | asset_publish | asset_publish_skip
25
+ * @param {string} [entry.asset_id]
26
+ * @param {string} [entry.asset_type]
27
+ * @param {string} [entry.source_node_id]
28
+ * @param {string} [entry.chain_id]
29
+ * @param {number} [entry.score]
30
+ * @param {string} [entry.mode] - direct | reference
31
+ * @param {string[]} [entry.signals]
32
+ * @param {string} [entry.reason]
33
+ * @param {object} [entry.extra]
34
+ */
35
+ function logAssetCall(entry) {
36
+ if (!entry || typeof entry !== 'object') return;
37
+ try {
38
+ const logPath = getLogPath();
39
+ ensureDir(logPath);
40
+ const record = {
41
+ timestamp: new Date().toISOString(),
42
+ ...entry,
43
+ };
44
+ fs.appendFileSync(logPath, JSON.stringify(record) + '\n', 'utf8');
45
+ } catch (e) {
46
+ // Non-fatal: never block evolution for logging failure
47
+ }
48
+ }
49
+
50
+ /**
51
+ * Read asset call log entries with optional filters.
52
+ *
53
+ * @param {object} [opts]
54
+ * @param {string} [opts.run_id] - filter by run_id
55
+ * @param {string} [opts.action] - filter by action type
56
+ * @param {number} [opts.last] - only return last N entries
57
+ * @param {string} [opts.since] - ISO date string, only entries after this time
58
+ * @returns {object[]}
59
+ */
60
+ function readCallLog(opts) {
61
+ const o = opts || {};
62
+ const logPath = getLogPath();
63
+ if (!fs.existsSync(logPath)) return [];
64
+
65
+ const raw = fs.readFileSync(logPath, 'utf8');
66
+ const lines = raw.split('\n').filter(Boolean);
67
+
68
+ let entries = [];
69
+ for (const line of lines) {
70
+ try {
71
+ entries.push(JSON.parse(line));
72
+ } catch (e) { /* skip corrupt lines */ }
73
+ }
74
+
75
+ if (o.since) {
76
+ const sinceTs = new Date(o.since).getTime();
77
+ if (Number.isFinite(sinceTs)) {
78
+ entries = entries.filter(e => new Date(e.timestamp).getTime() >= sinceTs);
79
+ }
80
+ }
81
+
82
+ if (o.run_id) {
83
+ entries = entries.filter(e => e.run_id === o.run_id);
84
+ }
85
+
86
+ if (o.action) {
87
+ entries = entries.filter(e => e.action === o.action);
88
+ }
89
+
90
+ if (o.last && Number.isFinite(o.last) && o.last > 0) {
91
+ entries = entries.slice(-o.last);
92
+ }
93
+
94
+ return entries;
95
+ }
96
+
97
+ /**
98
+ * Summarize asset call log (for CLI display).
99
+ *
100
+ * @param {object} [opts] - same filters as readCallLog
101
+ * @returns {object} summary with totals and per-action counts
102
+ */
103
+ function summarizeCallLog(opts) {
104
+ const entries = readCallLog(opts);
105
+ const actionCounts = {};
106
+ const assetsSeen = new Set();
107
+ const runsSeen = new Set();
108
+
109
+ for (const e of entries) {
110
+ const a = e.action || 'unknown';
111
+ actionCounts[a] = (actionCounts[a] || 0) + 1;
112
+ if (e.asset_id) assetsSeen.add(e.asset_id);
113
+ if (e.run_id) runsSeen.add(e.run_id);
114
+ }
115
+
116
+ return {
117
+ total_entries: entries.length,
118
+ unique_assets: assetsSeen.size,
119
+ unique_runs: runsSeen.size,
120
+ by_action: actionCounts,
121
+ entries,
122
+ };
123
+ }
124
+
125
+ module.exports = {
126
+ logAssetCall,
127
+ readCallLog,
128
+ summarizeCallLog,
129
+ getLogPath,
130
+ };
@@ -0,0 +1,297 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const { getGepAssetsDir } = require('./paths');
4
+ const { computeAssetId, SCHEMA_VERSION } = require('./contentHash');
5
+
6
+ function ensureDir(dir) {
7
+ if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
8
+ }
9
+
10
+ function readJsonIfExists(filePath, fallback) {
11
+ try {
12
+ if (!fs.existsSync(filePath)) return fallback;
13
+ const raw = fs.readFileSync(filePath, 'utf8');
14
+ if (!raw.trim()) return fallback;
15
+ return JSON.parse(raw);
16
+ } catch {
17
+ return fallback;
18
+ }
19
+ }
20
+
21
+ function writeJsonAtomic(filePath, obj) {
22
+ const dir = path.dirname(filePath);
23
+ ensureDir(dir);
24
+ const tmp = `${filePath}.tmp`;
25
+ fs.writeFileSync(tmp, JSON.stringify(obj, null, 2) + '\n', 'utf8');
26
+ fs.renameSync(tmp, filePath);
27
+ }
28
+
29
+ // Build a validation command using repo-root-relative paths.
30
+ // runValidations() executes with cwd=repoRoot, so require('./src/...')
31
+ // resolves correctly without embedding machine-specific absolute paths.
32
+ function buildValidationCmd(relModules) {
33
+ const paths = relModules.map(m => `./${m}`);
34
+ return `node scripts/validate-modules.js ${paths.join(' ')}`;
35
+ }
36
+
37
+ function getDefaultGenes() {
38
+ return {
39
+ version: 1,
40
+ genes: [
41
+ {
42
+ type: 'Gene', id: 'gene_gep_repair_from_errors', category: 'repair',
43
+ signals_match: ['error', 'exception', 'failed', 'unstable'],
44
+ preconditions: ['signals contains error-related indicators'],
45
+ strategy: [
46
+ 'Extract structured signals from logs and user instructions',
47
+ 'Select an existing Gene by signals match (no improvisation)',
48
+ 'Estimate blast radius (files, lines) before editing',
49
+ 'Apply smallest reversible patch',
50
+ 'Validate using declared validation steps; rollback on failure',
51
+ 'Solidify knowledge: append EvolutionEvent, update Gene/Capsule store',
52
+ ],
53
+ constraints: { max_files: 12, forbidden_paths: ['.git', 'node_modules'] },
54
+ validation: [
55
+ buildValidationCmd(['src/evolve', 'src/gep/solidify']),
56
+ buildValidationCmd(['src/gep/selector', 'src/gep/memoryGraph']),
57
+ ],
58
+ },
59
+ {
60
+ type: 'Gene', id: 'gene_gep_optimize_prompt_and_assets', category: 'optimize',
61
+ signals_match: ['protocol', 'gep', 'prompt', 'audit', 'reusable'],
62
+ preconditions: ['need stricter, auditable evolution protocol outputs'],
63
+ strategy: [
64
+ 'Extract signals and determine selection rationale via Selector JSON',
65
+ 'Prefer reusing existing Gene/Capsule; only create if no match exists',
66
+ 'Refactor prompt assembly to embed assets (genes, capsules, parent event)',
67
+ 'Reduce noise and ambiguity; enforce strict output schema',
68
+ 'Validate by running node index.js run and ensuring no runtime errors',
69
+ 'Solidify: record EvolutionEvent, update Gene definitions, create Capsule on success',
70
+ ],
71
+ constraints: { max_files: 20, forbidden_paths: ['.git', 'node_modules'] },
72
+ validation: [buildValidationCmd(['src/evolve', 'src/gep/prompt'])],
73
+ },
74
+ ],
75
+ };
76
+ }
77
+
78
+ function getDefaultCapsules() { return { version: 1, capsules: [] }; }
79
+ function genesPath() { return path.join(getGepAssetsDir(), 'genes.json'); }
80
+ function capsulesPath() { return path.join(getGepAssetsDir(), 'capsules.json'); }
81
+ function capsulesJsonlPath() { return path.join(getGepAssetsDir(), 'capsules.jsonl'); }
82
+ function eventsPath() { return path.join(getGepAssetsDir(), 'events.jsonl'); }
83
+ function candidatesPath() { return path.join(getGepAssetsDir(), 'candidates.jsonl'); }
84
+ function externalCandidatesPath() { return path.join(getGepAssetsDir(), 'external_candidates.jsonl'); }
85
+ function failedCapsulesPath() { return path.join(getGepAssetsDir(), 'failed_capsules.json'); }
86
+
87
+ function loadGenes() {
88
+ const jsonGenes = readJsonIfExists(genesPath(), getDefaultGenes()).genes || [];
89
+ const jsonlGenes = [];
90
+ try {
91
+ const p = path.join(getGepAssetsDir(), 'genes.jsonl');
92
+ if (fs.existsSync(p)) {
93
+ const raw = fs.readFileSync(p, 'utf8');
94
+ raw.split('\n').forEach(line => {
95
+ if (line.trim()) {
96
+ try {
97
+ const parsed = JSON.parse(line);
98
+ if (parsed && parsed.type === 'Gene') jsonlGenes.push(parsed);
99
+ } catch(e) {}
100
+ }
101
+ });
102
+ }
103
+ } catch(e) {}
104
+
105
+ // Combine and deduplicate by ID (JSONL takes precedence if newer, but here we just merge)
106
+ const combined = [...jsonGenes, ...jsonlGenes];
107
+ const unique = new Map();
108
+ combined.forEach(g => {
109
+ if (g && g.id) unique.set(String(g.id), g);
110
+ });
111
+ return Array.from(unique.values());
112
+ }
113
+
114
+ function loadCapsules() {
115
+ const legacy = readJsonIfExists(capsulesPath(), getDefaultCapsules()).capsules || [];
116
+ const jsonlCapsules = [];
117
+ try {
118
+ const p = capsulesJsonlPath();
119
+ if (fs.existsSync(p)) {
120
+ const raw = fs.readFileSync(p, 'utf8');
121
+ raw.split('\n').forEach(line => {
122
+ if (line.trim()) {
123
+ try { jsonlCapsules.push(JSON.parse(line)); } catch(e) {}
124
+ }
125
+ });
126
+ }
127
+ } catch(e) {}
128
+
129
+ // Combine and deduplicate by ID
130
+ const combined = [...legacy, ...jsonlCapsules];
131
+ const unique = new Map();
132
+ combined.forEach(c => {
133
+ if (c && c.id) unique.set(String(c.id), c);
134
+ });
135
+ return Array.from(unique.values());
136
+ }
137
+
138
+ function getLastEventId() {
139
+ try {
140
+ const p = eventsPath();
141
+ if (!fs.existsSync(p)) return null;
142
+ const raw = fs.readFileSync(p, 'utf8');
143
+ const lines = raw.split('\n').map(l => l.trim()).filter(Boolean);
144
+ if (lines.length === 0) return null;
145
+ const last = JSON.parse(lines[lines.length - 1]);
146
+ return last && typeof last.id === 'string' ? last.id : null;
147
+ } catch { return null; }
148
+ }
149
+
150
+ function readAllEvents() {
151
+ try {
152
+ const p = eventsPath();
153
+ if (!fs.existsSync(p)) return [];
154
+ const raw = fs.readFileSync(p, 'utf8');
155
+ return raw.split('\n').map(l => l.trim()).filter(Boolean).map(l => {
156
+ try { return JSON.parse(l); } catch { return null; }
157
+ }).filter(Boolean);
158
+ } catch { return []; }
159
+ }
160
+
161
+ function appendEventJsonl(eventObj) {
162
+ const dir = getGepAssetsDir(); ensureDir(dir);
163
+ fs.appendFileSync(eventsPath(), JSON.stringify(eventObj) + '\n', 'utf8');
164
+ }
165
+
166
+ function appendCandidateJsonl(candidateObj) {
167
+ const dir = getGepAssetsDir(); ensureDir(dir);
168
+ fs.appendFileSync(candidatesPath(), JSON.stringify(candidateObj) + '\n', 'utf8');
169
+ }
170
+
171
+ function appendExternalCandidateJsonl(obj) {
172
+ const dir = getGepAssetsDir(); ensureDir(dir);
173
+ fs.appendFileSync(externalCandidatesPath(), JSON.stringify(obj) + '\n', 'utf8');
174
+ }
175
+
176
+ function readRecentCandidates(limit = 20) {
177
+ try {
178
+ const p = candidatesPath();
179
+ if (!fs.existsSync(p)) return [];
180
+ const raw = fs.readFileSync(p, 'utf8');
181
+ const lines = raw.split('\n').map(l => l.trim()).filter(Boolean);
182
+ return lines.slice(Math.max(0, lines.length - limit)).map(l => {
183
+ try { return JSON.parse(l); } catch { return null; }
184
+ }).filter(Boolean);
185
+ } catch { return []; }
186
+ }
187
+
188
+ function readRecentExternalCandidates(limit = 50) {
189
+ try {
190
+ const p = externalCandidatesPath();
191
+ if (!fs.existsSync(p)) return [];
192
+ const raw = fs.readFileSync(p, 'utf8');
193
+ const lines = raw.split('\n').map(l => l.trim()).filter(Boolean);
194
+ return lines.slice(Math.max(0, lines.length - limit)).map(l => {
195
+ try { return JSON.parse(l); } catch { return null; }
196
+ }).filter(Boolean);
197
+ } catch { return []; }
198
+ }
199
+
200
+ // Safety net: ensure schema_version and asset_id are present before writing.
201
+ function ensureSchemaFields(obj) {
202
+ if (!obj || typeof obj !== 'object') return obj;
203
+ if (!obj.schema_version) obj.schema_version = SCHEMA_VERSION;
204
+ if (!obj.asset_id) { try { obj.asset_id = computeAssetId(obj); } catch (e) {} }
205
+ return obj;
206
+ }
207
+
208
+ function upsertGene(geneObj) {
209
+ ensureSchemaFields(geneObj);
210
+ const current = readJsonIfExists(genesPath(), getDefaultGenes());
211
+ const genes = Array.isArray(current.genes) ? current.genes : [];
212
+ const idx = genes.findIndex(g => g && g.id === geneObj.id);
213
+ if (idx >= 0) genes[idx] = geneObj; else genes.push(geneObj);
214
+ writeJsonAtomic(genesPath(), { version: current.version || 1, genes });
215
+ }
216
+
217
+ function appendCapsule(capsuleObj) {
218
+ ensureSchemaFields(capsuleObj);
219
+ const current = readJsonIfExists(capsulesPath(), getDefaultCapsules());
220
+ const capsules = Array.isArray(current.capsules) ? current.capsules : [];
221
+ capsules.push(capsuleObj);
222
+ writeJsonAtomic(capsulesPath(), { version: current.version || 1, capsules });
223
+ }
224
+
225
+ function upsertCapsule(capsuleObj) {
226
+ if (!capsuleObj || capsuleObj.type !== 'Capsule' || !capsuleObj.id) return;
227
+ ensureSchemaFields(capsuleObj);
228
+ const current = readJsonIfExists(capsulesPath(), getDefaultCapsules());
229
+ const capsules = Array.isArray(current.capsules) ? current.capsules : [];
230
+ const idx = capsules.findIndex(c => c && c.type === 'Capsule' && String(c.id) === String(capsuleObj.id));
231
+ if (idx >= 0) capsules[idx] = capsuleObj; else capsules.push(capsuleObj);
232
+ writeJsonAtomic(capsulesPath(), { version: current.version || 1, capsules });
233
+ }
234
+
235
+ var FAILED_CAPSULES_MAX = 200;
236
+ var FAILED_CAPSULES_TRIM_TO = 100;
237
+
238
+ function getDefaultFailedCapsules() { return { version: 1, failed_capsules: [] }; }
239
+
240
+ function appendFailedCapsule(capsuleObj) {
241
+ if (!capsuleObj || typeof capsuleObj !== 'object') return;
242
+ ensureSchemaFields(capsuleObj);
243
+ var current = readJsonIfExists(failedCapsulesPath(), getDefaultFailedCapsules());
244
+ var list = Array.isArray(current.failed_capsules) ? current.failed_capsules : [];
245
+ list.push(capsuleObj);
246
+ if (list.length > FAILED_CAPSULES_MAX) {
247
+ list = list.slice(list.length - FAILED_CAPSULES_TRIM_TO);
248
+ }
249
+ writeJsonAtomic(failedCapsulesPath(), { version: current.version || 1, failed_capsules: list });
250
+ }
251
+
252
+ function readRecentFailedCapsules(limit) {
253
+ var n = Number.isFinite(Number(limit)) && Number(limit) > 0 ? Number(limit) : 50;
254
+ try {
255
+ var current = readJsonIfExists(failedCapsulesPath(), getDefaultFailedCapsules());
256
+ var list = Array.isArray(current.failed_capsules) ? current.failed_capsules : [];
257
+ return list.slice(Math.max(0, list.length - n));
258
+ } catch (e) {
259
+ return [];
260
+ }
261
+ }
262
+
263
+ // Ensure all expected asset files exist on startup.
264
+ // Creates empty files for optional append-only stores so that
265
+ // external grep/read commands never fail with "No such file or directory".
266
+ function ensureAssetFiles() {
267
+ const dir = getGepAssetsDir();
268
+ ensureDir(dir);
269
+ const files = [
270
+ { path: genesPath(), defaultContent: JSON.stringify(getDefaultGenes(), null, 2) + '\n' },
271
+ { path: capsulesPath(), defaultContent: JSON.stringify(getDefaultCapsules(), null, 2) + '\n' },
272
+ { path: path.join(dir, 'genes.jsonl'), defaultContent: '' },
273
+ { path: eventsPath(), defaultContent: '' },
274
+ { path: candidatesPath(), defaultContent: '' },
275
+ { path: failedCapsulesPath(), defaultContent: JSON.stringify(getDefaultFailedCapsules(), null, 2) + '\n' },
276
+ ];
277
+ for (const f of files) {
278
+ if (!fs.existsSync(f.path)) {
279
+ try {
280
+ fs.writeFileSync(f.path, f.defaultContent, 'utf8');
281
+ } catch (e) {
282
+ // Non-fatal: log but continue
283
+ console.error(`[AssetStore] Failed to create ${f.path}: ${e.message}`);
284
+ }
285
+ }
286
+ }
287
+ }
288
+
289
+ module.exports = {
290
+ loadGenes, loadCapsules, readAllEvents, getLastEventId,
291
+ appendEventJsonl, appendCandidateJsonl, appendExternalCandidateJsonl,
292
+ readRecentCandidates, readRecentExternalCandidates,
293
+ upsertGene, appendCapsule, upsertCapsule,
294
+ appendFailedCapsule, readRecentFailedCapsules,
295
+ genesPath, capsulesPath, eventsPath, candidatesPath, externalCandidatesPath, failedCapsulesPath,
296
+ ensureAssetFiles, buildValidationCmd,
297
+ };
@@ -0,0 +1,36 @@
1
+ const { computeAssetId, SCHEMA_VERSION } = require('./contentHash');
2
+
3
+ /**
4
+ * Format asset preview for prompt inclusion.
5
+ * Handles stringified JSON, arrays, and error cases gracefully.
6
+ */
7
+ function formatAssetPreview(preview) {
8
+ if (!preview) return '(none)';
9
+ if (typeof preview === 'string') {
10
+ try {
11
+ const parsed = JSON.parse(preview);
12
+ if (Array.isArray(parsed) && parsed.length > 0) {
13
+ return JSON.stringify(parsed, null, 2);
14
+ }
15
+ return preview; // Keep as string if not array or empty
16
+ } catch (e) {
17
+ return preview; // Keep as string if parse fails
18
+ }
19
+ }
20
+ return JSON.stringify(preview, null, 2);
21
+ }
22
+
23
+ /**
24
+ * Validate and normalize an asset object.
25
+ * Ensures schema version and ID are present.
26
+ */
27
+ function normalizeAsset(asset) {
28
+ if (!asset || typeof asset !== 'object') return asset;
29
+ if (!asset.schema_version) asset.schema_version = SCHEMA_VERSION;
30
+ if (!asset.asset_id) {
31
+ try { asset.asset_id = computeAssetId(asset); } catch (e) {}
32
+ }
33
+ return asset;
34
+ }
35
+
36
+ module.exports = { formatAssetPreview, normalizeAsset };
@@ -0,0 +1,71 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ function ensureDir(dir) {
5
+ try {
6
+ if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
7
+ } catch (e) {}
8
+ }
9
+
10
+ function nowIso() {
11
+ return new Date().toISOString();
12
+ }
13
+
14
+ function clip(text, maxChars) {
15
+ const s = String(text || '');
16
+ const n = Number(maxChars);
17
+ if (!Number.isFinite(n) || n <= 0) return s;
18
+ if (s.length <= n) return s;
19
+ return s.slice(0, Math.max(0, n - 40)) + '\n...[TRUNCATED]...\n';
20
+ }
21
+
22
+ function writePromptArtifact({ memoryDir, cycleId, runId, prompt, meta }) {
23
+ const dir = String(memoryDir || '').trim();
24
+ if (!dir) throw new Error('bridge: missing memoryDir');
25
+ ensureDir(dir);
26
+ const safeCycle = String(cycleId || 'cycle').replace(/[^a-zA-Z0-9_\-#]/g, '_');
27
+ const safeRun = String(runId || Date.now()).replace(/[^a-zA-Z0-9_\-]/g, '_');
28
+ const base = `gep_prompt_${safeCycle}_${safeRun}`;
29
+ const promptPath = path.join(dir, base + '.txt');
30
+ const metaPath = path.join(dir, base + '.json');
31
+
32
+ fs.writeFileSync(promptPath, String(prompt || ''), 'utf8');
33
+ fs.writeFileSync(
34
+ metaPath,
35
+ JSON.stringify(
36
+ {
37
+ type: 'GepPromptArtifact',
38
+ at: nowIso(),
39
+ cycle_id: cycleId || null,
40
+ run_id: runId || null,
41
+ prompt_path: promptPath,
42
+ meta: meta && typeof meta === 'object' ? meta : null,
43
+ },
44
+ null,
45
+ 2
46
+ ) + '\n',
47
+ 'utf8'
48
+ );
49
+
50
+ return { promptPath, metaPath };
51
+ }
52
+
53
+ function renderSessionsSpawnCall({ task, agentId, label, cleanup }) {
54
+ const t = String(task || '').trim();
55
+ if (!t) throw new Error('bridge: missing task');
56
+ const a = String(agentId || 'main');
57
+ const l = String(label || 'gep_bridge');
58
+ const c = cleanup ? String(cleanup) : 'delete';
59
+
60
+ // Output valid JSON so wrappers can parse with JSON.parse (not regex).
61
+ // The wrapper uses lastIndexOf('sessions_spawn(') + JSON.parse to extract the task.
62
+ const payload = JSON.stringify({ task: t, agentId: a, cleanup: c, label: l });
63
+ return `sessions_spawn(${payload})`;
64
+ }
65
+
66
+ module.exports = {
67
+ clip,
68
+ writePromptArtifact,
69
+ renderSessionsSpawnCall,
70
+ };
71
+