agentic-flow 1.7.0 → 1.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/.claude/agents/test-neural.md +5 -0
  2. package/.claude/settings.json +20 -19
  3. package/.claude/skills/agentdb-memory-patterns/SKILL.md +166 -0
  4. package/.claude/skills/agentdb-vector-search/SKILL.md +126 -0
  5. package/.claude/skills/agentic-flow/agentdb-memory-patterns/SKILL.md +166 -0
  6. package/.claude/skills/agentic-flow/agentdb-vector-search/SKILL.md +126 -0
  7. package/.claude/skills/agentic-flow/reasoningbank-intelligence/SKILL.md +201 -0
  8. package/.claude/skills/agentic-flow/swarm-orchestration/SKILL.md +179 -0
  9. package/.claude/skills/reasoningbank-intelligence/SKILL.md +201 -0
  10. package/.claude/skills/skill-builder/README.md +308 -0
  11. package/.claude/skills/skill-builder/SKILL.md +910 -0
  12. package/.claude/skills/skill-builder/docs/SPECIFICATION.md +358 -0
  13. package/.claude/skills/skill-builder/resources/schemas/skill-frontmatter.schema.json +41 -0
  14. package/.claude/skills/skill-builder/resources/templates/full-skill.template +118 -0
  15. package/.claude/skills/skill-builder/resources/templates/minimal-skill.template +38 -0
  16. package/.claude/skills/skill-builder/scripts/generate-skill.sh +334 -0
  17. package/.claude/skills/skill-builder/scripts/validate-skill.sh +198 -0
  18. package/.claude/skills/swarm-orchestration/SKILL.md +179 -0
  19. package/CHANGELOG.md +117 -0
  20. package/README.md +81 -17
  21. package/dist/cli-proxy.js +33 -2
  22. package/dist/mcp/standalone-stdio.js +4 -200
  23. package/dist/reasoningbank/index.js +4 -0
  24. package/dist/utils/cli.js +22 -0
  25. package/docs/AGENTDB_INTEGRATION.md +379 -0
  26. package/package.json +4 -4
  27. package/.claude/answer.md +0 -1
  28. package/dist/agentdb/benchmarks/comprehensive-benchmark.js +0 -664
  29. package/dist/agentdb/benchmarks/frontier-benchmark.js +0 -419
  30. package/dist/agentdb/benchmarks/reflexion-benchmark.js +0 -370
  31. package/dist/agentdb/cli/agentdb-cli.js +0 -717
  32. package/dist/agentdb/controllers/CausalMemoryGraph.js +0 -322
  33. package/dist/agentdb/controllers/CausalRecall.js +0 -281
  34. package/dist/agentdb/controllers/EmbeddingService.js +0 -118
  35. package/dist/agentdb/controllers/ExplainableRecall.js +0 -387
  36. package/dist/agentdb/controllers/NightlyLearner.js +0 -382
  37. package/dist/agentdb/controllers/ReflexionMemory.js +0 -239
  38. package/dist/agentdb/controllers/SkillLibrary.js +0 -276
  39. package/dist/agentdb/controllers/frontier-index.js +0 -9
  40. package/dist/agentdb/controllers/index.js +0 -8
  41. package/dist/agentdb/index.js +0 -32
  42. package/dist/agentdb/optimizations/BatchOperations.js +0 -198
  43. package/dist/agentdb/optimizations/QueryOptimizer.js +0 -225
  44. package/dist/agentdb/optimizations/index.js +0 -7
  45. package/dist/agentdb/tests/frontier-features.test.js +0 -665
  46. package/dist/memory/SharedMemoryPool.js +0 -211
  47. package/dist/memory/index.js +0 -6
  48. package/dist/reasoningbank/AdvancedMemory.js +0 -67
  49. package/dist/reasoningbank/HybridBackend.js +0 -91
  50. package/dist/reasoningbank/index-new.js +0 -87
  51. package/docs/AGENTDB_TESTING.md +0 -411
  52. package/scripts/run-validation.sh +0 -165
  53. package/scripts/test-agentdb.sh +0 -153
@@ -1,276 +0,0 @@
1
- /**
2
- * SkillLibrary - Lifelong Learning Skill Management
3
- *
4
- * Promotes high-reward trajectories into reusable skills.
5
- * Manages skill composition, relationships, and adaptive selection.
6
- *
7
- * Based on: "Voyager: An Open-Ended Embodied Agent with Large Language Models"
8
- * https://arxiv.org/abs/2305.16291
9
- */
10
- export class SkillLibrary {
11
- db;
12
- embedder;
13
- constructor(db, embedder) {
14
- this.db = db;
15
- this.embedder = embedder;
16
- }
17
- /**
18
- * Create a new skill manually or from an episode
19
- */
20
- async createSkill(skill) {
21
- const stmt = this.db.prepare(`
22
- INSERT INTO skills (
23
- name, description, signature, code, success_rate, uses,
24
- avg_reward, avg_latency_ms, created_from_episode, metadata
25
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
26
- `);
27
- const result = stmt.run(skill.name, skill.description || null, JSON.stringify(skill.signature), skill.code || null, skill.successRate, skill.uses, skill.avgReward, skill.avgLatencyMs, skill.createdFromEpisode || null, skill.metadata ? JSON.stringify(skill.metadata) : null);
28
- const skillId = result.lastInsertRowid;
29
- // Generate and store embedding
30
- const text = this.buildSkillText(skill);
31
- const embedding = await this.embedder.embed(text);
32
- this.storeSkillEmbedding(skillId, embedding);
33
- return skillId;
34
- }
35
- /**
36
- * Update skill statistics after use
37
- */
38
- updateSkillStats(skillId, success, reward, latencyMs) {
39
- const stmt = this.db.prepare(`
40
- UPDATE skills
41
- SET
42
- uses = uses + 1,
43
- success_rate = (success_rate * uses + ?) / (uses + 1),
44
- avg_reward = (avg_reward * uses + ?) / (uses + 1),
45
- avg_latency_ms = (avg_latency_ms * uses + ?) / (uses + 1)
46
- WHERE id = ?
47
- `);
48
- stmt.run(success ? 1 : 0, reward, latencyMs, skillId);
49
- }
50
- /**
51
- * Retrieve skills relevant to a task
52
- */
53
- async searchSkills(query) {
54
- return this.retrieveSkills(query);
55
- }
56
- async retrieveSkills(query) {
57
- const { task, k = 5, minSuccessRate = 0.5, preferRecent = true } = query;
58
- // Generate query embedding
59
- const queryEmbedding = await this.embedder.embed(task);
60
- // Build filters
61
- const filters = ['s.success_rate >= ?'];
62
- const params = [minSuccessRate];
63
- const stmt = this.db.prepare(`
64
- SELECT
65
- s.*,
66
- se.embedding
67
- FROM skills s
68
- JOIN skill_embeddings se ON s.id = se.skill_id
69
- WHERE ${filters.join(' AND ')}
70
- ORDER BY ${preferRecent ? 's.last_used_at DESC,' : ''} s.success_rate DESC
71
- `);
72
- const rows = stmt.all(...params);
73
- // Calculate similarities and rank
74
- const skills = rows.map(row => {
75
- const embedding = this.deserializeEmbedding(row.embedding);
76
- const similarity = this.cosineSimilarity(queryEmbedding, embedding);
77
- return {
78
- id: row.id,
79
- name: row.name,
80
- description: row.description,
81
- signature: JSON.parse(row.signature),
82
- code: row.code,
83
- successRate: row.success_rate,
84
- uses: row.uses,
85
- avgReward: row.avg_reward,
86
- avgLatencyMs: row.avg_latency_ms,
87
- createdFromEpisode: row.created_from_episode,
88
- metadata: row.metadata ? JSON.parse(row.metadata) : undefined,
89
- similarity
90
- };
91
- });
92
- // Compute composite scores
93
- skills.sort((a, b) => {
94
- const scoreA = this.computeSkillScore(a);
95
- const scoreB = this.computeSkillScore(b);
96
- return scoreB - scoreA;
97
- });
98
- return skills.slice(0, k);
99
- }
100
- /**
101
- * Link two skills with a relationship
102
- */
103
- linkSkills(link) {
104
- const stmt = this.db.prepare(`
105
- INSERT INTO skill_links (parent_skill_id, child_skill_id, relationship, weight, metadata)
106
- VALUES (?, ?, ?, ?, ?)
107
- ON CONFLICT(parent_skill_id, child_skill_id, relationship)
108
- DO UPDATE SET weight = excluded.weight
109
- `);
110
- stmt.run(link.parentSkillId, link.childSkillId, link.relationship, link.weight, link.metadata ? JSON.stringify(link.metadata) : null);
111
- }
112
- /**
113
- * Get skill composition plan (prerequisites and alternatives)
114
- */
115
- getSkillPlan(skillId) {
116
- // Get main skill
117
- const skill = this.getSkillById(skillId);
118
- // Get prerequisites
119
- const prereqStmt = this.db.prepare(`
120
- SELECT s.* FROM skills s
121
- JOIN skill_links sl ON s.id = sl.child_skill_id
122
- WHERE sl.parent_skill_id = ? AND sl.relationship = 'prerequisite'
123
- ORDER BY sl.weight DESC
124
- `);
125
- const prerequisites = prereqStmt.all(skillId).map(this.rowToSkill);
126
- // Get alternatives
127
- const altStmt = this.db.prepare(`
128
- SELECT s.* FROM skills s
129
- JOIN skill_links sl ON s.id = sl.child_skill_id
130
- WHERE sl.parent_skill_id = ? AND sl.relationship = 'alternative'
131
- ORDER BY sl.weight DESC, s.success_rate DESC
132
- `);
133
- const alternatives = altStmt.all(skillId).map(this.rowToSkill);
134
- // Get refinements
135
- const refStmt = this.db.prepare(`
136
- SELECT s.* FROM skills s
137
- JOIN skill_links sl ON s.id = sl.child_skill_id
138
- WHERE sl.parent_skill_id = ? AND sl.relationship = 'refinement'
139
- ORDER BY sl.weight DESC, s.created_at DESC
140
- `);
141
- const refinements = refStmt.all(skillId).map(this.rowToSkill);
142
- return { skill, prerequisites, alternatives, refinements };
143
- }
144
- /**
145
- * Consolidate high-reward episodes into skills
146
- * This is the core learning mechanism
147
- */
148
- consolidateEpisodesIntoSkills(config) {
149
- const { minAttempts = 3, minReward = 0.7, timeWindowDays = 7 } = config;
150
- const stmt = this.db.prepare(`
151
- SELECT
152
- task,
153
- COUNT(*) as attempt_count,
154
- AVG(reward) as avg_reward,
155
- AVG(success) as success_rate,
156
- AVG(latency_ms) as avg_latency,
157
- MAX(id) as latest_episode_id,
158
- GROUP_CONCAT(id) as episode_ids
159
- FROM episodes
160
- WHERE ts > strftime('%s', 'now') - ?
161
- AND reward >= ?
162
- GROUP BY task
163
- HAVING attempt_count >= ?
164
- `);
165
- const candidates = stmt.all(timeWindowDays * 86400, minReward, minAttempts);
166
- let created = 0;
167
- for (const candidate of candidates) {
168
- // Check if skill already exists
169
- const existing = this.db.prepare('SELECT id FROM skills WHERE name = ?').get(candidate.task);
170
- if (!existing) {
171
- // Create new skill
172
- const skill = {
173
- name: candidate.task,
174
- description: `Auto-generated skill from successful episodes`,
175
- signature: {
176
- inputs: { task: 'string' },
177
- outputs: { result: 'any' }
178
- },
179
- successRate: candidate.success_rate,
180
- uses: candidate.attempt_count,
181
- avgReward: candidate.avg_reward,
182
- avgLatencyMs: candidate.avg_latency || 0,
183
- createdFromEpisode: candidate.latest_episode_id,
184
- metadata: {
185
- sourceEpisodes: candidate.episode_ids.split(',').map(Number),
186
- autoGenerated: true,
187
- consolidatedAt: Date.now()
188
- }
189
- };
190
- this.createSkill(skill).catch(err => {
191
- console.error('Error creating skill:', err);
192
- });
193
- created++;
194
- }
195
- else {
196
- // Update existing skill stats
197
- this.updateSkillStats(existing.id, candidate.success_rate > 0.5, candidate.avg_reward, candidate.avg_latency || 0);
198
- }
199
- }
200
- return created;
201
- }
202
- /**
203
- * Prune underperforming skills
204
- */
205
- pruneSkills(config) {
206
- const { minUses = 3, minSuccessRate = 0.4, maxAgeDays = 60 } = config;
207
- const stmt = this.db.prepare(`
208
- DELETE FROM skills
209
- WHERE uses < ?
210
- AND success_rate < ?
211
- AND created_at < strftime('%s', 'now') - ?
212
- `);
213
- const result = stmt.run(minUses, minSuccessRate, maxAgeDays * 86400);
214
- return result.changes;
215
- }
216
- // ========================================================================
217
- // Private Helper Methods
218
- // ========================================================================
219
- getSkillById(id) {
220
- const stmt = this.db.prepare('SELECT * FROM skills WHERE id = ?');
221
- const row = stmt.get(id);
222
- if (!row)
223
- throw new Error(`Skill ${id} not found`);
224
- return this.rowToSkill(row);
225
- }
226
- rowToSkill(row) {
227
- return {
228
- id: row.id,
229
- name: row.name,
230
- description: row.description,
231
- signature: JSON.parse(row.signature),
232
- code: row.code,
233
- successRate: row.success_rate,
234
- uses: row.uses,
235
- avgReward: row.avg_reward,
236
- avgLatencyMs: row.avg_latency_ms,
237
- createdFromEpisode: row.created_from_episode,
238
- metadata: row.metadata ? JSON.parse(row.metadata) : undefined
239
- };
240
- }
241
- buildSkillText(skill) {
242
- const parts = [skill.name];
243
- if (skill.description)
244
- parts.push(skill.description);
245
- parts.push(JSON.stringify(skill.signature));
246
- return parts.join('\n');
247
- }
248
- storeSkillEmbedding(skillId, embedding) {
249
- const stmt = this.db.prepare(`
250
- INSERT INTO skill_embeddings (skill_id, embedding)
251
- VALUES (?, ?)
252
- `);
253
- stmt.run(skillId, Buffer.from(embedding.buffer));
254
- }
255
- deserializeEmbedding(buffer) {
256
- return new Float32Array(buffer.buffer, buffer.byteOffset, buffer.length / 4);
257
- }
258
- cosineSimilarity(a, b) {
259
- let dotProduct = 0;
260
- let normA = 0;
261
- let normB = 0;
262
- for (let i = 0; i < a.length; i++) {
263
- dotProduct += a[i] * b[i];
264
- normA += a[i] * a[i];
265
- normB += b[i] * b[i];
266
- }
267
- return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
268
- }
269
- computeSkillScore(skill) {
270
- // Composite score: similarity * 0.4 + success_rate * 0.3 + (uses/1000) * 0.1 + avg_reward * 0.2
271
- return (skill.similarity * 0.4 +
272
- skill.successRate * 0.3 +
273
- Math.min(skill.uses / 1000, 1.0) * 0.1 +
274
- skill.avgReward * 0.2);
275
- }
276
- }
@@ -1,9 +0,0 @@
1
- /**
2
- * AgentDB Frontier Features
3
- *
4
- * State-of-the-art memory capabilities
5
- */
6
- export { CausalMemoryGraph } from './CausalMemoryGraph';
7
- export { ExplainableRecall } from './ExplainableRecall';
8
- export { CausalRecall } from './CausalRecall';
9
- export { NightlyLearner } from './NightlyLearner';
@@ -1,8 +0,0 @@
1
- /**
2
- * AgentDB Controllers - State-of-the-Art Memory Systems
3
- *
4
- * Export all memory controllers for agent systems
5
- */
6
- export { ReflexionMemory } from './ReflexionMemory';
7
- export { SkillLibrary } from './SkillLibrary';
8
- export { EmbeddingService } from './EmbeddingService';
@@ -1,32 +0,0 @@
1
- /**
2
- * AgentDB Re-exports for Backwards Compatibility
3
- *
4
- * This module provides backwards-compatible exports for code that previously
5
- * used embedded AgentDB controllers. Now proxies to agentdb npm package.
6
- *
7
- * @deprecated Import directly from specific agentdb paths for better tree-shaking
8
- * @since v1.7.0 - Integrated agentdb as proper dependency
9
- *
10
- * Example migration:
11
- * ```typescript
12
- * // Old (still works)
13
- * import { ReflexionMemory } from 'agentic-flow/agentdb';
14
- *
15
- * // New (recommended)
16
- * import { ReflexionMemory } from 'agentdb/controllers/ReflexionMemory';
17
- * ```
18
- */
19
- // Import from individual controller paths (agentdb v1.3.9 exports pattern)
20
- export { ReflexionMemory } from 'agentdb/controllers/ReflexionMemory';
21
- export { SkillLibrary } from 'agentdb/controllers/SkillLibrary';
22
- export { EmbeddingService } from 'agentdb/controllers/EmbeddingService';
23
- export { CausalMemoryGraph } from 'agentdb/controllers/CausalMemoryGraph';
24
- export { CausalRecall } from 'agentdb/controllers/CausalRecall';
25
- export { NightlyLearner } from 'agentdb/controllers/NightlyLearner';
26
- export { ExplainableRecall } from 'agentdb/controllers/ExplainableRecall';
27
- // Note: These are custom types not exported from agentdb v1.3.9
28
- // Users should import from agentdb directly if needed
29
- // export type { LearningSystem } from 'agentdb/...';
30
- // export type { ReasoningBank } from 'agentdb/...';
31
- // Note: Optimizations not available in agentdb v1.3.9
32
- // Users can implement custom optimizations or use AgentDB's built-in features
@@ -1,198 +0,0 @@
1
- /**
2
- * BatchOperations - Optimized Batch Processing for AgentDB
3
- *
4
- * Implements efficient batch operations:
5
- * - Bulk inserts with transactions
6
- * - Batch embedding generation
7
- * - Parallel processing
8
- * - Progress tracking
9
- */
10
- export class BatchOperations {
11
- db;
12
- embedder;
13
- config;
14
- constructor(db, embedder, config) {
15
- this.db = db;
16
- this.embedder = embedder;
17
- this.config = {
18
- batchSize: 100,
19
- parallelism: 4,
20
- ...config
21
- };
22
- }
23
- /**
24
- * Bulk insert episodes with embeddings
25
- */
26
- async insertEpisodes(episodes) {
27
- const totalBatches = Math.ceil(episodes.length / this.config.batchSize);
28
- let completed = 0;
29
- for (let i = 0; i < episodes.length; i += this.config.batchSize) {
30
- const batch = episodes.slice(i, i + this.config.batchSize);
31
- // Generate embeddings in parallel
32
- const texts = batch.map(ep => this.buildEpisodeText(ep));
33
- const embeddings = await this.embedder.embedBatch(texts);
34
- // Insert with transaction
35
- const transaction = this.db.transaction(() => {
36
- const episodeStmt = this.db.prepare(`
37
- INSERT INTO episodes (
38
- session_id, task, input, output, critique, reward, success,
39
- latency_ms, tokens_used, tags, metadata
40
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
41
- `);
42
- const embeddingStmt = this.db.prepare(`
43
- INSERT INTO episode_embeddings (episode_id, embedding)
44
- VALUES (?, ?)
45
- `);
46
- batch.forEach((episode, idx) => {
47
- const result = episodeStmt.run(episode.sessionId, episode.task, episode.input || null, episode.output || null, episode.critique || null, episode.reward, episode.success ? 1 : 0, episode.latencyMs || null, episode.tokensUsed || null, episode.tags ? JSON.stringify(episode.tags) : null, episode.metadata ? JSON.stringify(episode.metadata) : null);
48
- const episodeId = result.lastInsertRowid;
49
- embeddingStmt.run(episodeId, Buffer.from(embeddings[idx].buffer));
50
- });
51
- });
52
- transaction();
53
- completed += batch.length;
54
- if (this.config.progressCallback) {
55
- this.config.progressCallback(completed, episodes.length);
56
- }
57
- }
58
- return completed;
59
- }
60
- /**
61
- * Bulk update embeddings for existing episodes
62
- */
63
- async regenerateEmbeddings(episodeIds) {
64
- let episodes;
65
- if (episodeIds) {
66
- const placeholders = episodeIds.map(() => '?').join(',');
67
- episodes = this.db.prepare(`SELECT id, task, critique, output FROM episodes WHERE id IN (${placeholders})`).all(...episodeIds);
68
- }
69
- else {
70
- episodes = this.db.prepare('SELECT id, task, critique, output FROM episodes').all();
71
- }
72
- let completed = 0;
73
- const totalBatches = Math.ceil(episodes.length / this.config.batchSize);
74
- for (let i = 0; i < episodes.length; i += this.config.batchSize) {
75
- const batch = episodes.slice(i, i + this.config.batchSize);
76
- // Generate embeddings
77
- const texts = batch.map((ep) => [ep.task, ep.critique, ep.output].filter(Boolean).join('\n'));
78
- const embeddings = await this.embedder.embedBatch(texts);
79
- // Update with transaction
80
- const transaction = this.db.transaction(() => {
81
- const stmt = this.db.prepare(`
82
- INSERT OR REPLACE INTO episode_embeddings (episode_id, embedding)
83
- VALUES (?, ?)
84
- `);
85
- batch.forEach((episode, idx) => {
86
- stmt.run(episode.id, Buffer.from(embeddings[idx].buffer));
87
- });
88
- });
89
- transaction();
90
- completed += batch.length;
91
- if (this.config.progressCallback) {
92
- this.config.progressCallback(completed, episodes.length);
93
- }
94
- }
95
- return completed;
96
- }
97
- /**
98
- * Parallel batch processing with worker pool
99
- */
100
- async processInParallel(items, processor) {
101
- const results = [];
102
- const chunks = this.chunkArray(items, this.config.parallelism);
103
- for (const chunk of chunks) {
104
- const chunkResults = await Promise.all(chunk.map(item => processor(item)));
105
- results.push(...chunkResults);
106
- if (this.config.progressCallback) {
107
- this.config.progressCallback(results.length, items.length);
108
- }
109
- }
110
- return results;
111
- }
112
- /**
113
- * Bulk delete with conditions
114
- */
115
- bulkDelete(table, conditions) {
116
- const whereClause = Object.keys(conditions)
117
- .map(key => `${key} = ?`)
118
- .join(' AND ');
119
- const values = Object.values(conditions);
120
- const stmt = this.db.prepare(`DELETE FROM ${table} WHERE ${whereClause}`);
121
- const result = stmt.run(...values);
122
- return result.changes;
123
- }
124
- /**
125
- * Bulk update with conditions
126
- */
127
- bulkUpdate(table, updates, conditions) {
128
- const setClause = Object.keys(updates)
129
- .map(key => `${key} = ?`)
130
- .join(', ');
131
- const whereClause = Object.keys(conditions)
132
- .map(key => `${key} = ?`)
133
- .join(' AND ');
134
- const values = [...Object.values(updates), ...Object.values(conditions)];
135
- const stmt = this.db.prepare(`UPDATE ${table} SET ${setClause} WHERE ${whereClause}`);
136
- const result = stmt.run(...values);
137
- return result.changes;
138
- }
139
- /**
140
- * Vacuum and optimize database
141
- */
142
- optimize() {
143
- console.log('🔧 Optimizing database...');
144
- // Analyze tables for query planner
145
- this.db.exec('ANALYZE');
146
- // Rebuild indexes
147
- const tables = this.db.prepare(`
148
- SELECT name FROM sqlite_master
149
- WHERE type='table' AND name NOT LIKE 'sqlite_%'
150
- `).all();
151
- for (const { name } of tables) {
152
- this.db.exec(`REINDEX ${name}`);
153
- }
154
- // Vacuum to reclaim space
155
- this.db.exec('VACUUM');
156
- console.log('✅ Database optimized');
157
- }
158
- /**
159
- * Get database statistics
160
- */
161
- getStats() {
162
- const pageSize = this.db.pragma('page_size', { simple: true });
163
- const pageCount = this.db.pragma('page_count', { simple: true });
164
- const totalSize = pageSize * pageCount;
165
- const tables = this.db.prepare(`
166
- SELECT name FROM sqlite_master
167
- WHERE type='table' AND name NOT LIKE 'sqlite_%'
168
- `).all();
169
- const tableStats = tables.map(({ name }) => {
170
- const count = this.db.prepare(`SELECT COUNT(*) as count FROM ${name}`).get();
171
- const pages = this.db.prepare(`SELECT COUNT(*) as count FROM dbstat WHERE name = ?`).get(name);
172
- return {
173
- name,
174
- rows: count.count,
175
- size: (pages?.count || 0) * pageSize
176
- };
177
- });
178
- return { totalSize, tableStats };
179
- }
180
- // ========================================================================
181
- // Private Methods
182
- // ========================================================================
183
- buildEpisodeText(episode) {
184
- const parts = [episode.task];
185
- if (episode.critique)
186
- parts.push(episode.critique);
187
- if (episode.output)
188
- parts.push(episode.output);
189
- return parts.join('\n');
190
- }
191
- chunkArray(array, size) {
192
- const chunks = [];
193
- for (let i = 0; i < array.length; i += size) {
194
- chunks.push(array.slice(i, i + size));
195
- }
196
- return chunks;
197
- }
198
- }