@ariso-ai/ivan 1.0.23 → 1.0.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. package/.github/workflows/ivanagent.yml +1 -1
  2. package/README.md +29 -0
  3. package/dist/__tests__/cli.test.d.ts +2 -0
  4. package/dist/__tests__/cli.test.d.ts.map +1 -0
  5. package/dist/__tests__/cli.test.js +100 -0
  6. package/dist/__tests__/cli.test.js.map +1 -0
  7. package/dist/database/migration.d.ts +3 -2
  8. package/dist/database/migration.d.ts.map +1 -1
  9. package/dist/database/migration.js +4 -2
  10. package/dist/database/migration.js.map +1 -1
  11. package/dist/database/migrations/015_create_learnings_tables.d.ts +3 -0
  12. package/dist/database/migrations/015_create_learnings_tables.d.ts.map +1 -0
  13. package/dist/database/migrations/015_create_learnings_tables.js +32 -0
  14. package/dist/database/migrations/015_create_learnings_tables.js.map +1 -0
  15. package/dist/database/migrations/index.d.ts +1 -0
  16. package/dist/database/migrations/index.d.ts.map +1 -1
  17. package/dist/database/migrations/index.js +2 -0
  18. package/dist/database/migrations/index.js.map +1 -1
  19. package/dist/database/types.d.ts +1 -1
  20. package/dist/database/types.d.ts.map +1 -1
  21. package/dist/database.d.ts +1 -1
  22. package/dist/database.d.ts.map +1 -1
  23. package/dist/index.js +21 -4
  24. package/dist/index.js.map +1 -1
  25. package/dist/learnings/builder.d.ts +25 -0
  26. package/dist/learnings/builder.d.ts.map +1 -0
  27. package/dist/learnings/builder.js +247 -0
  28. package/dist/learnings/builder.js.map +1 -0
  29. package/dist/learnings/database.d.ts +45 -0
  30. package/dist/learnings/database.d.ts.map +1 -0
  31. package/dist/learnings/database.js +63 -0
  32. package/dist/learnings/database.js.map +1 -0
  33. package/dist/learnings/embeddings.d.ts +20 -0
  34. package/dist/learnings/embeddings.d.ts.map +1 -0
  35. package/dist/learnings/embeddings.js +54 -0
  36. package/dist/learnings/embeddings.js.map +1 -0
  37. package/dist/learnings/evidence-writer.d.ts +13 -0
  38. package/dist/learnings/evidence-writer.d.ts.map +1 -0
  39. package/dist/learnings/evidence-writer.js +182 -0
  40. package/dist/learnings/evidence-writer.js.map +1 -0
  41. package/dist/learnings/extractor.d.ts +32 -0
  42. package/dist/learnings/extractor.d.ts.map +1 -0
  43. package/dist/learnings/extractor.js +265 -0
  44. package/dist/learnings/extractor.js.map +1 -0
  45. package/dist/learnings/github-evidence.d.ts +89 -0
  46. package/dist/learnings/github-evidence.d.ts.map +1 -0
  47. package/dist/learnings/github-evidence.js +255 -0
  48. package/dist/learnings/github-evidence.js.map +1 -0
  49. package/dist/learnings/github-ingestion.d.ts +18 -0
  50. package/dist/learnings/github-ingestion.d.ts.map +1 -0
  51. package/dist/learnings/github-ingestion.js +29 -0
  52. package/dist/learnings/github-ingestion.js.map +1 -0
  53. package/dist/learnings/heuristics.d.ts +17 -0
  54. package/dist/learnings/heuristics.d.ts.map +1 -0
  55. package/dist/learnings/heuristics.js +52 -0
  56. package/dist/learnings/heuristics.js.map +1 -0
  57. package/dist/learnings/id.d.ts +11 -0
  58. package/dist/learnings/id.d.ts.map +1 -0
  59. package/dist/learnings/id.js +36 -0
  60. package/dist/learnings/id.js.map +1 -0
  61. package/dist/learnings/index.d.ts +9 -0
  62. package/dist/learnings/index.d.ts.map +1 -0
  63. package/dist/learnings/index.js +55 -0
  64. package/dist/learnings/index.js.map +1 -0
  65. package/dist/learnings/ingest-pr-command.d.ts +8 -0
  66. package/dist/learnings/ingest-pr-command.d.ts.map +1 -0
  67. package/dist/learnings/ingest-pr-command.js +15 -0
  68. package/dist/learnings/ingest-pr-command.js.map +1 -0
  69. package/dist/learnings/ingest-repo-command.d.ts +9 -0
  70. package/dist/learnings/ingest-repo-command.d.ts.map +1 -0
  71. package/dist/learnings/ingest-repo-command.js +62 -0
  72. package/dist/learnings/ingest-repo-command.js.map +1 -0
  73. package/dist/learnings/init-command.d.ts +15 -0
  74. package/dist/learnings/init-command.d.ts.map +1 -0
  75. package/dist/learnings/init-command.js +36 -0
  76. package/dist/learnings/init-command.js.map +1 -0
  77. package/dist/learnings/install-hooks-command.d.ts +17 -0
  78. package/dist/learnings/install-hooks-command.d.ts.map +1 -0
  79. package/dist/learnings/install-hooks-command.js +241 -0
  80. package/dist/learnings/install-hooks-command.js.map +1 -0
  81. package/dist/learnings/learning-writer.d.ts +6 -0
  82. package/dist/learnings/learning-writer.d.ts.map +1 -0
  83. package/dist/learnings/learning-writer.js +42 -0
  84. package/dist/learnings/learning-writer.js.map +1 -0
  85. package/dist/learnings/parser.d.ts +12 -0
  86. package/dist/learnings/parser.d.ts.map +1 -0
  87. package/dist/learnings/parser.js +133 -0
  88. package/dist/learnings/parser.js.map +1 -0
  89. package/dist/learnings/paths.d.ts +6 -0
  90. package/dist/learnings/paths.d.ts.map +1 -0
  91. package/dist/learnings/paths.js +11 -0
  92. package/dist/learnings/paths.js.map +1 -0
  93. package/dist/learnings/query-command.d.ts +9 -0
  94. package/dist/learnings/query-command.d.ts.map +1 -0
  95. package/dist/learnings/query-command.js +37 -0
  96. package/dist/learnings/query-command.js.map +1 -0
  97. package/dist/learnings/query.d.ts +7 -0
  98. package/dist/learnings/query.d.ts.map +1 -0
  99. package/dist/learnings/query.js +65 -0
  100. package/dist/learnings/query.js.map +1 -0
  101. package/dist/learnings/rebuild-command.d.ts +8 -0
  102. package/dist/learnings/rebuild-command.d.ts.map +1 -0
  103. package/dist/learnings/rebuild-command.js +16 -0
  104. package/dist/learnings/rebuild-command.js.map +1 -0
  105. package/dist/learnings/record-types.d.ts +98 -0
  106. package/dist/learnings/record-types.d.ts.map +1 -0
  107. package/dist/learnings/record-types.js +5 -0
  108. package/dist/learnings/record-types.js.map +1 -0
  109. package/dist/learnings/repository.d.ts +18 -0
  110. package/dist/learnings/repository.d.ts.map +1 -0
  111. package/dist/learnings/repository.js +73 -0
  112. package/dist/learnings/repository.js.map +1 -0
  113. package/dist/learnings/types.d.ts +17 -0
  114. package/dist/learnings/types.d.ts.map +1 -0
  115. package/dist/learnings/types.js +4 -0
  116. package/dist/learnings/types.js.map +1 -0
  117. package/dist/learnings/validator.d.ts +14 -0
  118. package/dist/learnings/validator.d.ts.map +1 -0
  119. package/dist/learnings/validator.js +44 -0
  120. package/dist/learnings/validator.js.map +1 -0
  121. package/dist/learnings/weighting.d.ts +33 -0
  122. package/dist/learnings/weighting.d.ts.map +1 -0
  123. package/dist/learnings/weighting.js +106 -0
  124. package/dist/learnings/weighting.js.map +1 -0
  125. package/dist/services/address-executor.d.ts +0 -1
  126. package/dist/services/address-executor.d.ts.map +1 -1
  127. package/dist/services/address-executor.js +1 -4
  128. package/dist/services/address-executor.js.map +1 -1
  129. package/dist/services/address-task-executor.d.ts +0 -1
  130. package/dist/services/address-task-executor.d.ts.map +1 -1
  131. package/dist/services/address-task-executor.js +18 -14
  132. package/dist/services/address-task-executor.js.map +1 -1
  133. package/dist/services/claude-cli-executor.d.ts +1 -1
  134. package/dist/services/claude-cli-executor.d.ts.map +1 -1
  135. package/dist/services/claude-executor.d.ts +1 -1
  136. package/dist/services/claude-executor.d.ts.map +1 -1
  137. package/dist/services/claude-executor.js +2 -2
  138. package/dist/services/claude-executor.js.map +1 -1
  139. package/dist/services/git-interfaces.d.ts +3 -3
  140. package/dist/services/git-interfaces.d.ts.map +1 -1
  141. package/dist/services/git-manager-cli.d.ts +1 -1
  142. package/dist/services/git-manager-cli.d.ts.map +1 -1
  143. package/dist/services/git-manager-cli.js +111 -49
  144. package/dist/services/git-manager-cli.js.map +1 -1
  145. package/dist/services/git-manager-pat.d.ts +2 -2
  146. package/dist/services/git-manager-pat.d.ts.map +1 -1
  147. package/dist/services/git-manager-pat.js +31 -8
  148. package/dist/services/git-manager-pat.js.map +1 -1
  149. package/dist/services/github-api-client.d.ts +39 -3
  150. package/dist/services/github-api-client.d.ts.map +1 -1
  151. package/dist/services/github-api-client.js +76 -8
  152. package/dist/services/github-api-client.js.map +1 -1
  153. package/dist/services/index.d.ts +2 -1
  154. package/dist/services/index.d.ts.map +1 -1
  155. package/dist/services/index.js.map +1 -1
  156. package/dist/services/job-manager.d.ts +1 -1
  157. package/dist/services/job-manager.d.ts.map +1 -1
  158. package/dist/services/job-manager.js.map +1 -1
  159. package/dist/services/openai-service.d.ts +1 -0
  160. package/dist/services/openai-service.d.ts.map +1 -1
  161. package/dist/services/openai-service.js +54 -0
  162. package/dist/services/openai-service.js.map +1 -1
  163. package/dist/services/pr-service-cli.d.ts +1 -1
  164. package/dist/services/pr-service-cli.d.ts.map +1 -1
  165. package/dist/services/pr-service-pat.d.ts +1 -1
  166. package/dist/services/pr-service-pat.d.ts.map +1 -1
  167. package/dist/services/pr-service-pat.js +2 -2
  168. package/dist/services/pr-service-pat.js.map +1 -1
  169. package/dist/services/repository-manager-cli.d.ts +1 -1
  170. package/dist/services/repository-manager-cli.d.ts.map +1 -1
  171. package/dist/services/repository-manager-cli.js.map +1 -1
  172. package/dist/services/repository-manager-pat.d.ts +3 -3
  173. package/dist/services/repository-manager-pat.d.ts.map +1 -1
  174. package/dist/services/repository-manager-pat.js.map +1 -1
  175. package/dist/services/service-factory.d.ts +1 -1
  176. package/dist/services/service-factory.d.ts.map +1 -1
  177. package/dist/services/task-executor.d.ts +3 -2
  178. package/dist/services/task-executor.d.ts.map +1 -1
  179. package/dist/services/task-executor.js +54 -13
  180. package/dist/services/task-executor.js.map +1 -1
  181. package/dist/types/non-interactive-config.d.ts +5 -0
  182. package/dist/types/non-interactive-config.d.ts.map +1 -1
  183. package/dist/web-server.d.ts.map +1 -1
  184. package/dist/web-server.js.map +1 -1
  185. package/package.json +7 -5
@@ -0,0 +1,247 @@
1
+ // Rebuilds the learnings SQLite database from scratch by reading all canonical JSONL files,
2
+ // validating them, and inserting records inside a single transaction.
3
+ // This is intentionally a full-replace build (not an incremental migration).
4
+ import { createHash } from 'crypto';
5
+ import fs from 'fs';
6
+ import path from 'path';
7
+ import { sql } from 'kysely';
8
+ import { createFreshLearningsDatabase, getLearningsDbPath, openLearningsDatabase } from './database.js';
9
+ import { buildEmbeddingInputString, EMBEDDING_MODEL, EMBEDDING_DIMENSIONS, embedTexts } from './embeddings.js';
10
+ import { LESSONS_JSONL_RELATIVE_PATH, resolveCanonicalLearningsPath } from './paths.js';
11
+ import { loadCanonicalRecords, sortByPathThenId } from './parser.js';
12
+ import { validateLearningsDataset } from './validator.js';
13
+ const EMBEDDING_BATCH_MAX_ITEMS = 64;
14
+ const EMBEDDING_BATCH_MAX_CHARS = 200_000;
15
+ /**
16
+ * Validates all canonical JSONL records, then creates a fresh SQLite database
17
+ * and bulk-inserts everything in one transaction.
18
+ */
19
+ export async function rebuildLearningsDatabase(repoPath) {
20
+ const dataset = loadCanonicalRecords(repoPath);
21
+ validateLearningsDataset(dataset);
22
+ const { cached, generated, dirty } = await resolveEmbeddings(dataset.learnings);
23
+ console.log(`Embeddings: ${cached} cached, ${generated} generated`);
24
+ if (dirty)
25
+ writeBackEmbeddings(repoPath, dataset.learnings);
26
+ const dbPath = getLearningsDbPath(repoPath);
27
+ const tmpPath = `${dbPath}.tmp`;
28
+ const db = await createFreshLearningsDatabase(repoPath, tmpPath);
29
+ try {
30
+ await insertDataset(db, dataset);
31
+ await storeJsonlHash(db, computeJsonlHash(repoPath));
32
+ await db.destroy();
33
+ fs.renameSync(tmpPath, dbPath);
34
+ return {
35
+ dbPath,
36
+ learningCount: dataset.learnings.length,
37
+ embeddingsCached: cached,
38
+ embeddingsGenerated: generated
39
+ };
40
+ }
41
+ catch (err) {
42
+ await db.destroy();
43
+ if (fs.existsSync(tmpPath)) {
44
+ fs.unlinkSync(tmpPath);
45
+ }
46
+ throw err;
47
+ }
48
+ }
49
+ /**
50
+ * Returns true when `.ivan/db.sqlite` is absent or its stored JSONL hash does not match
51
+ * the current hash of the canonical JSONL files. Used by the pre-commit hook to skip
52
+ * unnecessary rebuilds.
53
+ */
54
+ export async function isLearningsDatabaseStale(repoPath) {
55
+ const dbPath = getLearningsDbPath(repoPath);
56
+ if (!fs.existsSync(dbPath)) {
57
+ return true;
58
+ }
59
+ const currentHash = computeJsonlHash(repoPath);
60
+ try {
61
+ const db = openLearningsDatabase(repoPath, { readonly: true });
62
+ try {
63
+ const row = await db
64
+ .selectFrom('meta')
65
+ .select('value')
66
+ .where('key', '=', 'jsonl_hash')
67
+ .executeTakeFirst();
68
+ return !row || row.value !== currentHash;
69
+ }
70
+ finally {
71
+ await db.destroy();
72
+ }
73
+ }
74
+ catch {
75
+ return true;
76
+ }
77
+ }
78
+ /**
79
+ * Computes a SHA-256 digest over the sorted paths and contents of all canonical
80
+ * JSONL files (`evidence.jsonl`, `lessons.jsonl`). Returns an empty string when
81
+ * the `.ivan` directory does not exist.
82
+ */
83
+ export function computeJsonlHash(repoPath) {
84
+ const resolved = path.resolve(repoPath);
85
+ const learningsDir = resolveCanonicalLearningsPath(resolved);
86
+ if (!fs.existsSync(learningsDir)) {
87
+ return '';
88
+ }
89
+ const files = [];
90
+ for (const relativePath of [LESSONS_JSONL_RELATIVE_PATH]) {
91
+ const file = path.join(resolved, relativePath);
92
+ if (fs.existsSync(file)) {
93
+ files.push(file);
94
+ }
95
+ }
96
+ const hash = createHash('sha256');
97
+ for (const file of files) {
98
+ hash.update(path.relative(resolved, file));
99
+ hash.update('\0');
100
+ hash.update(fs.readFileSync(file));
101
+ hash.update('\0');
102
+ }
103
+ return hash.digest('hex');
104
+ }
105
+ /**
106
+ * Checks each learning's cached embedding against a SHA-256 of the model version +
107
+ * embedding input string. Cache hits reuse the stored vector; cache misses are generated
108
+ * in bounded batches so a large rebuild does not exceed provider request limits.
109
+ * Mutates records in-place. Returns hit/miss counts.
110
+ */
111
+ async function resolveEmbeddings(learnings) {
112
+ const dirty = [];
113
+ let cached = 0;
114
+ for (const learning of learnings) {
115
+ const inputString = buildEmbeddingInputString(learning);
116
+ const hash = createHash('sha256')
117
+ .update(`${EMBEDDING_MODEL}@${EMBEDDING_DIMENSIONS}\n`)
118
+ .update(inputString)
119
+ .digest('hex');
120
+ if (learning.embeddingInputHash === hash &&
121
+ learning.embedding?.length === EMBEDDING_DIMENSIONS) {
122
+ cached += 1;
123
+ }
124
+ else {
125
+ dirty.push({ learning, inputString, hash });
126
+ }
127
+ }
128
+ let generated = 0;
129
+ if (dirty.length > 0) {
130
+ for (const batch of chunkEmbeddingRequests(dirty)) {
131
+ try {
132
+ const vectors = await embedTexts(batch.map((item) => item.inputString));
133
+ for (let i = 0; i < batch.length; i++) {
134
+ batch[i].learning.embedding = vectors[i];
135
+ batch[i].learning.embeddingInputHash = batch[i].hash;
136
+ }
137
+ generated += batch.length;
138
+ }
139
+ catch (err) {
140
+ console.error(`Warning: could not generate embeddings for batch of ${batch.length} learning(s) (${err.message}). Those rows will be skipped for this rebuild.`);
141
+ }
142
+ }
143
+ }
144
+ return { cached, generated, dirty: generated > 0 };
145
+ }
146
+ function chunkEmbeddingRequests(items) {
147
+ const batches = [];
148
+ let currentBatch = [];
149
+ let currentChars = 0;
150
+ for (const item of items) {
151
+ const itemChars = item.inputString.length;
152
+ const wouldOverflowItems = currentBatch.length >= EMBEDDING_BATCH_MAX_ITEMS;
153
+ const wouldOverflowChars = currentBatch.length > 0 &&
154
+ currentChars + itemChars > EMBEDDING_BATCH_MAX_CHARS;
155
+ if (wouldOverflowItems || wouldOverflowChars) {
156
+ batches.push(currentBatch);
157
+ currentBatch = [];
158
+ currentChars = 0;
159
+ }
160
+ currentBatch.push(item);
161
+ currentChars += itemChars;
162
+ }
163
+ if (currentBatch.length > 0) {
164
+ batches.push(currentBatch);
165
+ }
166
+ return batches;
167
+ }
168
+ /**
169
+ * Rewrites the lessons JSONL file, merging the in-memory `embedding` and `embeddingInputHash`
170
+ * fields back onto each line that matches by `id`. Non-learning lines are preserved as-is.
171
+ */
172
+ function writeBackEmbeddings(repoPath, learnings) {
173
+ const filePath = resolveCanonicalLearningsPath(path.resolve(repoPath), 'lessons.jsonl');
174
+ if (!fs.existsSync(filePath)) {
175
+ return;
176
+ }
177
+ const learningById = new Map();
178
+ for (const learning of learnings) {
179
+ learningById.set(learning.id, learning);
180
+ }
181
+ const lines = fs.readFileSync(filePath, 'utf8').split('\n');
182
+ const updatedLines = [];
183
+ for (const rawLine of lines) {
184
+ const trimmed = rawLine.trim();
185
+ if (!trimmed) {
186
+ continue;
187
+ }
188
+ const parsed = JSON.parse(trimmed);
189
+ const id = typeof parsed['id'] === 'string' ? parsed['id'] : undefined;
190
+ const learning = id !== undefined ? learningById.get(id) : undefined;
191
+ if (learning !== undefined) {
192
+ parsed['embedding'] = learning.embedding;
193
+ parsed['embeddingInputHash'] = learning.embeddingInputHash;
194
+ }
195
+ updatedLines.push(JSON.stringify(parsed));
196
+ }
197
+ const tmpPath = `${filePath}.tmp`;
198
+ fs.writeFileSync(tmpPath, updatedLines.map((l) => `${l}\n`).join(''), 'utf8');
199
+ fs.renameSync(tmpPath, filePath);
200
+ }
201
+ /**
202
+ * Inserts all evidence, learnings, embeddings, and tag/evidence join rows
203
+ * in a single transaction for atomicity and performance.
204
+ */
205
+ async function insertDataset(db, dataset) {
206
+ await db.transaction().execute(async (trx) => {
207
+ for (const learning of [...dataset.learnings].sort(sortByPathThenId)) {
208
+ await trx
209
+ .insertInto('learnings')
210
+ .values({
211
+ id: learning.id,
212
+ kind: learning.kind,
213
+ source_type: learning.source_type ?? null,
214
+ source_url: learning.source_url ?? null,
215
+ title: learning.title ?? null,
216
+ statement: learning.statement,
217
+ rationale: learning.rationale ?? null,
218
+ applicability: learning.applicability ?? null,
219
+ confidence: learning.confidence ?? null,
220
+ status: learning.status,
221
+ created_at: learning.created_at,
222
+ updated_at: learning.updated_at
223
+ })
224
+ .execute();
225
+ if (learning.embedding && learning.embedding.length > 0) {
226
+ const vectorBuffer = Buffer.from(new Float32Array(learning.embedding).buffer);
227
+ await sql `INSERT INTO learning_vectors (learning_id, vector) VALUES (${learning.id}, ${vectorBuffer})`.execute(trx);
228
+ }
229
+ }
230
+ });
231
+ }
232
+ /** Writes the JSONL content hash into the `meta` table so staleness checks can compare it later. */
233
+ async function storeJsonlHash(db, hash) {
234
+ await db
235
+ .insertInto('meta')
236
+ .values({
237
+ key: 'jsonl_hash',
238
+ value: hash,
239
+ updated_at: new Date().toISOString()
240
+ })
241
+ .onConflict((oc) => oc.column('key').doUpdateSet({
242
+ value: hash,
243
+ updated_at: new Date().toISOString()
244
+ }))
245
+ .execute();
246
+ }
247
+ //# sourceMappingURL=builder.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"builder.js","sourceRoot":"","sources":["../../src/learnings/builder.ts"],"names":[],"mappings":"AAAA,4FAA4F;AAC5F,sEAAsE;AACtE,6EAA6E;AAE7E,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AACpC,OAAO,EAAE,MAAM,IAAI,CAAC;AACpB,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,GAAG,EAAe,MAAM,QAAQ,CAAC;AAE1C,OAAO,EACL,4BAA4B,EAC5B,kBAAkB,EAClB,qBAAqB,EAEtB,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,yBAAyB,EACzB,eAAe,EACf,oBAAoB,EACpB,UAAU,EACX,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,2BAA2B,EAC3B,6BAA6B,EAC9B,MAAM,YAAY,CAAC;AACpB,OAAO,EAAE,oBAAoB,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AACrE,OAAO,EAAE,wBAAwB,EAAE,MAAM,gBAAgB,CAAC;AAU1D,MAAM,yBAAyB,GAAG,EAAE,CAAC;AACrC,MAAM,yBAAyB,GAAG,OAAO,CAAC;AAE1C;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,wBAAwB,CAC5C,QAAgB;IAEhB,MAAM,OAAO,GAAG,oBAAoB,CAAC,QAAQ,CAAC,CAAC;IAC/C,wBAAwB,CAAC,OAAO,CAAC,CAAC;IAElC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,GAAG,MAAM,iBAAiB,CAC1D,OAAO,CAAC,SAAS,CAClB,CAAC;IACF,OAAO,CAAC,GAAG,CAAC,eAAe,MAAM,YAAY,SAAS,YAAY,CAAC,CAAC;IAEpE,IAAI,KAAK;QAAE,mBAAmB,CAAC,QAAQ,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;IAE5D,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,OAAO,GAAG,GAAG,MAAM,MAAM,CAAC;IAChC,MAAM,EAAE,GAAG,MAAM,4BAA4B,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IAEjE,IAAI,CAAC;QACH,MAAM,aAAa,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;QACjC,MAAM,cAAc,CAAC,EAAE,EAAE,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;QACrD,MAAM,EAAE,CAAC,OAAO,EAAE,CAAC;QACnB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAE/B,OAAO;YACL,MAAM;YACN,aAAa,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM;YACvC,gBAAgB,EAAE,MAAM;YACxB,mBAAmB,EAAE,SAAS;SAC/B,CAAC;IACJ,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM,EAAE,CAAC,OAAO,EAAE,CAAC;QACnB,IAAI,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC3B,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,MAAM,GAAG,CAAC;IACZ,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,wBAAwB,CAC5C,QAAgB;IAEhB,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5C,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,WAAW,GAAG,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAE/C,IAAI,CAAC;QACH,MAAM,EAAE,GAAG,qBAAqB,CAAC,QAAQ,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC/D,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,MAAM,EAAE;iBACjB,UAAU,CAAC,MAAM,CAAC;iBAClB,MAAM,CAAC,OAAO,CAAC;iBACf,KAAK,CAAC,KAAK,EAAE,GAAG,EAAE,YAAY,CAAC;iBAC/B,gBAAgB,EAAE,CAAC;YACtB,OAAO,CAAC,GAAG,IAAI,GAAG,CAAC,KAAK,KAAK,WAAW,CAAC;QAC3C,CAAC;gBAAS,CAAC;YACT,MAAM,EAAE,CAAC,OAAO,EAAE,CAAC;QACrB,CAAC;IACH,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,IAAI,CAAC;IACd,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB,CAAC,QAAgB;IAC/C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACxC,MAAM,YAAY,GAAG,6BAA6B,CAAC,QAAQ,CAAC,CAAC;IAE7D,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,CAAC;QACjC,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,MAAM,KAAK,GAAa,EAAE,CAAC;IAE3B,KAAK,MAAM,YAAY,IAAI,CAAC,2BAA2B,CAAC,EAAE,CAAC;QACzD,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;QAC/C,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC;YACxB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACnB,CAAC;IACH,CAAC;IAED,MAAM,IAAI,GAAG,UAAU,CAAC,QAAQ,CAAC,CAAC;IAClC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAC;QAC3C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAClB,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC;QACnC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;IACpB,CAAC;IAED,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC5B,CAAC;AAED;;;;;GAKG;AACH,KAAK,UAAU,iBAAiB,CAC9B,SAA2B;IAE3B,MAAM,KAAK,GAIN,EAAE,CAAC;IACR,IAAI,MAAM,GAAG,CAAC,CAAC;IAEf,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,yBAAyB,CAAC,QAAQ,CAAC,CAAC;QACxD,MAAM,IAAI,GAAG,UAAU,CAAC,QAAQ,CAAC;aAC9B,MAAM,CAAC,GAAG,eAAe,IAAI,oBAAoB,IAAI,CAAC;aACtD,MAAM,CAAC,WAAW,CAAC;aACnB,MAAM,CAAC,KAAK,CAAC,CAAC;QAEjB,IACE,QAAQ,CAAC,kBAAkB,KAAK,IAAI;YACpC,QAAQ,CAAC,SAAS,EAAE,MAAM,KAAK,oBAAoB,EACnD,CAAC;YACD,MAAM,IAAI,CAAC,CAAC;QACd,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC,CAAC;QAC9C,CAAC;IACH,CAAC;IAED,IAAI,SAAS,GAAG,CAAC,CAAC;IAElB,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACrB,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,KAAK,CAAC,EAAE,CAAC;YAClD,IAAI,CAAC;gBACH,MAAM,OAAO,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;gBACxE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACtC,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;oBACzC,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,kBAAkB,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBACvD,CAAC;gBACD,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;YAC5B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,OAAO,CAAC,KAAK,CACX,uDAAuD,KAAK,CAAC,MAAM,iBAAkB,GAAa,CAAC,OAAO,iDAAiD,CAC5J,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAED,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,GAAG,CAAC,EAAE,CAAC;AACrD,CAAC;AAED,SAAS,sBAAsB,CAC7B,KAAU;IAEV,MAAM,OAAO,GAAU,EAAE,CAAC;IAC1B,IAAI,YAAY,GAAQ,EAAE,CAAC;IAC3B,IAAI,YAAY,GAAG,CAAC,CAAC;IAErB,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;QAC1C,MAAM,kBAAkB,GAAG,YAAY,CAAC,MAAM,IAAI,yBAAyB,CAAC;QAC5E,MAAM,kBAAkB,GACtB,YAAY,CAAC,MAAM,GAAG,CAAC;YACvB,YAAY,GAAG,SAAS,GAAG,yBAAyB,CAAC;QAEvD,IAAI,kBAAkB,IAAI,kBAAkB,EAAE,CAAC;YAC7C,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YAC3B,YAAY,GAAG,EAAE,CAAC;YAClB,YAAY,GAAG,CAAC,CAAC;QACnB,CAAC;QAED,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxB,YAAY,IAAI,SAAS,CAAC;IAC5B,CAAC;IAED,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC5B,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;IAC7B,CAAC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;GAGG;AACH,SAAS,mBAAmB,CAC1B,QAAgB,EAChB,SAA2B;IAE3B,MAAM,QAAQ,GAAG,6BAA6B,CAC5C,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EACtB,eAAe,CAChB,CAAC;IAEF,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC7B,OAAO;IACT,CAAC;IAED,MAAM,YAAY,GAAG,IAAI,GAAG,EAA0B,CAAC;IACvD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAC5D,MAAM,YAAY,GAAa,EAAE,CAAC;IAElC,KAAK,MAAM,OAAO,IAAI,KAAK,EAAE,CAAC;QAC5B,MAAM,OAAO,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;QAC/B,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,SAAS;QACX,CAAC;QAED,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAA4B,CAAC;QAC9D,MAAM,EAAE,GAAG,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;QACvE,MAAM,QAAQ,GAAG,EAAE,KAAK,SAAS,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;QAErE,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;YAC3B,MAAM,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,SAAS,CAAC;YACzC,MAAM,CAAC,oBAAoB,CAAC,GAAG,QAAQ,CAAC,kBAAkB,CAAC;QAC7D,CAAC;QAED,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;IAC5C,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,QAAQ,MAAM,CAAC;IAClC,EAAE,CAAC,aAAa,CAAC,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IAC9E,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,aAAa,CAC1B,EAA6B,EAC7B,OAAyB;IAEzB,MAAM,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;QAC3C,KAAK,MAAM,QAAQ,IAAI,CAAC,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE,CAAC;YACrE,MAAM,GAAG;iBACN,UAAU,CAAC,WAAW,CAAC;iBACvB,MAAM,CAAC;gBACN,EAAE,EAAE,QAAQ,CAAC,EAAE;gBACf,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,WAAW,EAAE,QAAQ,CAAC,WAAW,IAAI,IAAI;gBACzC,UAAU,EAAE,QAAQ,CAAC,UAAU,IAAI,IAAI;gBACvC,KAAK,EAAE,QAAQ,CAAC,KAAK,IAAI,IAAI;gBAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,IAAI,IAAI;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa,IAAI,IAAI;gBAC7C,UAAU,EAAE,QAAQ,CAAC,UAAU,IAAI,IAAI;gBACvC,MAAM,EAAE,QAAQ,CAAC,MAAM;gBACvB,UAAU,EAAE,QAAQ,CAAC,UAAU;gBAC/B,UAAU,EAAE,QAAQ,CAAC,UAAU;aAChC,CAAC;iBACD,OAAO,EAAE,CAAC;YAEb,IAAI,QAAQ,CAAC,SAAS,IAAI,QAAQ,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACxD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAC9B,IAAI,YAAY,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,MAAM,CAC5C,CAAC;gBACF,MAAM,GAAG,CAAA,8DAA8D,QAAQ,CAAC,EAAE,KAAK,YAAY,GAAG,CAAC,OAAO,CAC5G,GAAG,CACJ,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,oGAAoG;AACpG,KAAK,UAAU,cAAc,CAC3B,EAA6B,EAC7B,IAAY;IAEZ,MAAM,EAAE;SACL,UAAU,CAAC,MAAM,CAAC;SAClB,MAAM,CAAC;QACN,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,IAAI;QACX,UAAU,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;KACrC,CAAC;SACD,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,CACjB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,WAAW,CAAC;QAC3B,KAAK,EAAE,IAAI;QACX,UAAU,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;KACrC,CAAC,CACH;SACA,OAAO,EAAE,CAAC;AACf,CAAC"}
@@ -0,0 +1,45 @@
1
+ import { Kysely } from 'kysely';
2
+ /** Kysely schema type for the learnings SQLite database. */
3
+ export interface LearningsDatabase {
4
+ learnings: {
5
+ id: string;
6
+ kind: string;
7
+ source_type: string | null;
8
+ source_url: string | null;
9
+ title: string | null;
10
+ statement: string;
11
+ rationale: string | null;
12
+ applicability: string | null;
13
+ confidence: number | null;
14
+ status: string;
15
+ created_at: string;
16
+ updated_at: string;
17
+ };
18
+ meta: {
19
+ key: string;
20
+ value: string;
21
+ updated_at: string;
22
+ };
23
+ migrations: {
24
+ id: number;
25
+ name: string;
26
+ executed_at: string;
27
+ };
28
+ }
29
+ /** Returns the absolute path to the learnings SQLite database for the given repo root. */
30
+ export declare function getLearningsDbPath(repoPath: string): string;
31
+ /**
32
+ * Deletes any existing database (including WAL/SHM files), creates a fresh one,
33
+ * applies migrations, and returns an open Kysely connection.
34
+ * Uses `DELETE` journal mode so no WAL files are created during the bulk rebuild.
35
+ * Pass an explicit `dbPath` to write to a non-default location (e.g. a `.tmp` file).
36
+ */
37
+ export declare function createFreshLearningsDatabase(repoPath: string, dbPath?: string): Promise<Kysely<LearningsDatabase>>;
38
+ /**
39
+ * Opens an existing learnings database for reading or writing.
40
+ * Throws a descriptive error (with the rebuild command) if the file does not exist.
41
+ */
42
+ export declare function openLearningsDatabase(repoPath: string, options?: {
43
+ readonly?: boolean;
44
+ }): Kysely<LearningsDatabase>;
45
+ //# sourceMappingURL=database.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"database.d.ts","sourceRoot":"","sources":["../../src/learnings/database.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,MAAM,EAAiB,MAAM,QAAQ,CAAC;AAM/C,4DAA4D;AAC5D,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE;QACT,EAAE,EAAE,MAAM,CAAC;QACX,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;QAC3B,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;QAC1B,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;QAClB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;QACzB,aAAa,EAAE,MAAM,GAAG,IAAI,CAAC;QAC7B,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;QAC1B,MAAM,EAAE,MAAM,CAAC;QACf,UAAU,EAAE,MAAM,CAAC;QACnB,UAAU,EAAE,MAAM,CAAC;KACpB,CAAC;IACF,IAAI,EAAE;QACJ,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,UAAU,EAAE,MAAM,CAAC;KACpB,CAAC;IACF,UAAU,EAAE;QACV,EAAE,EAAE,MAAM,CAAC;QACX,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,EAAE,MAAM,CAAC;KACrB,CAAC;CACH;AAED,0FAA0F;AAC1F,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAE3D;AAED;;;;;GAKG;AACH,wBAAsB,4BAA4B,CAChD,QAAQ,EAAE,MAAM,EAChB,MAAM,CAAC,EAAE,MAAM,GACd,OAAO,CAAC,MAAM,CAAC,iBAAiB,CAAC,CAAC,CAepC;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,QAAQ,EAAE,MAAM,EAChB,OAAO,GAAE;IAAE,QAAQ,CAAC,EAAE,OAAO,CAAA;CAAO,GACnC,MAAM,CAAC,iBAAiB,CAAC,CAkB3B"}
@@ -0,0 +1,63 @@
1
+ // SQLite database lifecycle helpers for the learnings store.
2
+ // The DB is a derived artifact rebuilt from JSONL source files—never edit it directly.
3
+ // WAL files (.db-shm, .db-wal) are removed alongside the main file to avoid stale state.
4
+ import fs from 'fs';
5
+ import path from 'path';
6
+ import BetterSqlite3 from 'better-sqlite3';
7
+ import { Kysely, SqliteDialect } from 'kysely';
8
+ import * as sqliteVec from 'sqlite-vec';
9
+ import { LEARNINGS_DB_RELATIVE_PATH } from './paths.js';
10
+ import { MigrationManager } from '../database/migration.js';
11
+ import { learningsMigrations } from '../database/migrations/index.js';
12
+ /** Returns the absolute path to the learnings SQLite database for the given repo root. */
13
+ export function getLearningsDbPath(repoPath) {
14
+ return path.join(path.resolve(repoPath), LEARNINGS_DB_RELATIVE_PATH);
15
+ }
16
+ /**
17
+ * Deletes any existing database (including WAL/SHM files), creates a fresh one,
18
+ * applies migrations, and returns an open Kysely connection.
19
+ * Uses `DELETE` journal mode so no WAL files are created during the bulk rebuild.
20
+ * Pass an explicit `dbPath` to write to a non-default location (e.g. a `.tmp` file).
21
+ */
22
+ export async function createFreshLearningsDatabase(repoPath, dbPath) {
23
+ const resolvedDbPath = dbPath ?? getLearningsDbPath(repoPath);
24
+ removeLearningsDatabaseFiles(resolvedDbPath);
25
+ fs.mkdirSync(path.dirname(resolvedDbPath), { recursive: true });
26
+ const sqlite = new BetterSqlite3(resolvedDbPath);
27
+ sqliteVec.load(sqlite);
28
+ sqlite.pragma('journal_mode = DELETE');
29
+ sqlite.pragma('foreign_keys = ON');
30
+ const db = new Kysely({
31
+ dialect: new SqliteDialect({ database: sqlite })
32
+ });
33
+ await new MigrationManager(db, learningsMigrations).runMigrations();
34
+ return db;
35
+ }
36
+ /**
37
+ * Opens an existing learnings database for reading or writing.
38
+ * Throws a descriptive error (with the rebuild command) if the file does not exist.
39
+ */
40
+ export function openLearningsDatabase(repoPath, options = {}) {
41
+ const dbPath = getLearningsDbPath(repoPath);
42
+ if (!fs.existsSync(dbPath)) {
43
+ throw new Error(`Missing ${dbPath}. Run "ivan learnings rebuild --repo ${path.resolve(repoPath)}" first.`);
44
+ }
45
+ const sqlite = new BetterSqlite3(dbPath, {
46
+ readonly: options.readonly ?? false,
47
+ fileMustExist: true
48
+ });
49
+ sqliteVec.load(sqlite);
50
+ sqlite.pragma('foreign_keys = ON');
51
+ return new Kysely({
52
+ dialect: new SqliteDialect({ database: sqlite })
53
+ });
54
+ }
55
+ /** Removes the `.db`, `.db-shm`, and `.db-wal` files if they exist (safe to call when none are present). */
56
+ function removeLearningsDatabaseFiles(dbPath) {
57
+ for (const candidate of [dbPath, `${dbPath}-shm`, `${dbPath}-wal`]) {
58
+ if (fs.existsSync(candidate)) {
59
+ fs.unlinkSync(candidate);
60
+ }
61
+ }
62
+ }
63
+ //# sourceMappingURL=database.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"database.js","sourceRoot":"","sources":["../../src/learnings/database.ts"],"names":[],"mappings":"AAAA,6DAA6D;AAC7D,uFAAuF;AACvF,yFAAyF;AAEzF,OAAO,EAAE,MAAM,IAAI,CAAC;AACpB,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,aAAa,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,QAAQ,CAAC;AAC/C,OAAO,KAAK,SAAS,MAAM,YAAY,CAAC;AACxC,OAAO,EAAE,0BAA0B,EAAE,MAAM,YAAY,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AAC5D,OAAO,EAAE,mBAAmB,EAAE,MAAM,iCAAiC,CAAC;AA8BtE,0FAA0F;AAC1F,MAAM,UAAU,kBAAkB,CAAC,QAAgB;IACjD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,0BAA0B,CAAC,CAAC;AACvE,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,4BAA4B,CAChD,QAAgB,EAChB,MAAe;IAEf,MAAM,cAAc,GAAG,MAAM,IAAI,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC9D,4BAA4B,CAAC,cAAc,CAAC,CAAC;IAE7C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAChE,MAAM,MAAM,GAAG,IAAI,aAAa,CAAC,cAAc,CAAC,CAAC;IACjD,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACvB,MAAM,CAAC,MAAM,CAAC,uBAAuB,CAAC,CAAC;IACvC,MAAM,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC;IAEnC,MAAM,EAAE,GAAG,IAAI,MAAM,CAAoB;QACvC,OAAO,EAAE,IAAI,aAAa,CAAC,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;KACjD,CAAC,CAAC;IACH,MAAM,IAAI,gBAAgB,CAAC,EAAE,EAAE,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC;IACpE,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,QAAgB,EAChB,UAAkC,EAAE;IAEpC,MAAM,MAAM,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5C,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC;QAC3B,MAAM,IAAI,KAAK,CACb,WAAW,MAAM,wCAAwC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAC1F,CAAC;IACJ,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,aAAa,CAAC,MAAM,EAAE;QACvC,QAAQ,EAAE,OAAO,CAAC,QAAQ,IAAI,KAAK;QACnC,aAAa,EAAE,IAAI;KACpB,CAAC,CAAC;IACH,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACvB,MAAM,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC;IAEnC,OAAO,IAAI,MAAM,CAAoB;QACnC,OAAO,EAAE,IAAI,aAAa,CAAC,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;KACjD,CAAC,CAAC;AACL,CAAC;AAED,4GAA4G;AAC5G,SAAS,4BAA4B,CAAC,MAAc;IAClD,KAAK,MAAM,SAAS,IAAI,CAAC,MAAM,EAAE,GAAG,MAAM,MAAM,EAAE,GAAG,MAAM,MAAM,CAAC,EAAE,CAAC;QACnE,IAAI,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7B,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;QAC3B,CAAC;IACH,CAAC;AACH,CAAC"}
@@ -0,0 +1,20 @@
1
+ import type { LearningRecord } from './record-types.js';
2
+ export declare const EMBEDDING_MODEL = "text-embedding-3-small";
3
+ export declare const EMBEDDING_DIMENSIONS = 1536;
4
+ /**
5
+ * Returns the text string fed to the embedding model for a given learning.
6
+ * Exported so callers can SHA-256 hash it for cache invalidation without calling the API.
7
+ */
8
+ export declare function buildEmbeddingInputString(learning: LearningRecord): string;
9
+ /**
10
+ * Embeds a single text string. Used at query time to embed the search prompt.
11
+ * Requires OPENAI_API_KEY.
12
+ */
13
+ export declare function embedText(text: string): Promise<number[]>;
14
+ /**
15
+ * Embeds multiple texts in a single API call. Used during rebuild to batch all
16
+ * cache-missed learnings into one request.
17
+ * Requires OPENAI_API_KEY.
18
+ */
19
+ export declare function embedTexts(texts: string[]): Promise<number[][]>;
20
+ //# sourceMappingURL=embeddings.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embeddings.d.ts","sourceRoot":"","sources":["../../src/learnings/embeddings.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAExD,eAAO,MAAM,eAAe,2BAA2B,CAAC;AACxD,eAAO,MAAM,oBAAoB,OAAO,CAAC;AAQzC;;;GAGG;AACH,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,cAAc,GAAG,MAAM,CAU1E;AAED;;;GAGG;AACH,wBAAsB,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAM/D;AAED;;;;GAIG;AACH,wBAAsB,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CASrE"}
@@ -0,0 +1,54 @@
1
+ // Semantic text embedding using OpenAI text-embedding-3-small.
2
+ // Requires OPENAI_API_KEY in the environment.
3
+ // Embeddings are cached in JSONL source files so the API is only called once per record.
4
+ import OpenAI from 'openai';
5
+ export const EMBEDDING_MODEL = 'text-embedding-3-small';
6
+ export const EMBEDDING_DIMENSIONS = 1536;
7
+ let _client;
8
+ function getClient() {
9
+ if (!_client)
10
+ _client = new OpenAI();
11
+ return _client;
12
+ }
13
+ /**
14
+ * Returns the text string fed to the embedding model for a given learning.
15
+ * Exported so callers can SHA-256 hash it for cache invalidation without calling the API.
16
+ */
17
+ export function buildEmbeddingInputString(learning) {
18
+ const textParts = [
19
+ learning.kind,
20
+ learning.title,
21
+ learning.statement,
22
+ learning.rationale,
23
+ learning.applicability
24
+ ].filter(Boolean);
25
+ return textParts.join('\n');
26
+ }
27
+ /**
28
+ * Embeds a single text string. Used at query time to embed the search prompt.
29
+ * Requires OPENAI_API_KEY.
30
+ */
31
+ export async function embedText(text) {
32
+ const response = await getClient().embeddings.create({
33
+ model: EMBEDDING_MODEL,
34
+ input: text
35
+ });
36
+ return response.data[0].embedding;
37
+ }
38
+ /**
39
+ * Embeds multiple texts in a single API call. Used during rebuild to batch all
40
+ * cache-missed learnings into one request.
41
+ * Requires OPENAI_API_KEY.
42
+ */
43
+ export async function embedTexts(texts) {
44
+ if (texts.length === 0)
45
+ return [];
46
+ const response = await getClient().embeddings.create({
47
+ model: EMBEDDING_MODEL,
48
+ input: texts
49
+ });
50
+ return response.data
51
+ .sort((a, b) => a.index - b.index)
52
+ .map((d) => d.embedding);
53
+ }
54
+ //# sourceMappingURL=embeddings.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embeddings.js","sourceRoot":"","sources":["../../src/learnings/embeddings.ts"],"names":[],"mappings":"AAAA,+DAA+D;AAC/D,8CAA8C;AAC9C,yFAAyF;AAEzF,OAAO,MAAM,MAAM,QAAQ,CAAC;AAG5B,MAAM,CAAC,MAAM,eAAe,GAAG,wBAAwB,CAAC;AACxD,MAAM,CAAC,MAAM,oBAAoB,GAAG,IAAI,CAAC;AAEzC,IAAI,OAA2B,CAAC;AAChC,SAAS,SAAS;IAChB,IAAI,CAAC,OAAO;QAAE,OAAO,GAAG,IAAI,MAAM,EAAE,CAAC;IACrC,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,yBAAyB,CAAC,QAAwB;IAChE,MAAM,SAAS,GAAG;QAChB,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;QACd,QAAQ,CAAC,SAAS;QAClB,QAAQ,CAAC,SAAS;QAClB,QAAQ,CAAC,aAAa;KACvB,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IAElB,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAAC,IAAY;IAC1C,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;QACnD,KAAK,EAAE,eAAe;QACtB,KAAK,EAAE,IAAI;KACZ,CAAC,CAAC;IACH,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AACpC,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,KAAe;IAC9C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,EAAE,CAAC;IAClC,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;QACnD,KAAK,EAAE,eAAe;QACtB,KAAK,EAAE,KAAK;KACb,CAAC,CAAC;IACH,OAAO,QAAQ,CAAC,IAAI;SACjB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;SACjC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;AAC7B,CAAC"}
@@ -0,0 +1,13 @@
1
+ import type { EvidenceSignal, EvidenceContextCache } from './record-types.js';
2
+ import type { GitHubPullRequestEvidence } from './github-evidence.js';
3
+ /** Result of building signals from a PR payload. */
4
+ export interface BuildSignalsResult {
5
+ signals: EvidenceSignal[];
6
+ contextCache: EvidenceContextCache;
7
+ }
8
+ /**
9
+ * Converts a `GitHubPullRequestEvidence` payload into lean `EvidenceSignal` objects
10
+ * and a parallel `EvidenceContextCache` mapping signal IDs to their in-memory content.
11
+ */
12
+ export declare function buildEvidenceSignalsFromPullRequest(payload: GitHubPullRequestEvidence): BuildSignalsResult;
13
+ //# sourceMappingURL=evidence-writer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"evidence-writer.d.ts","sourceRoot":"","sources":["../../src/learnings/evidence-writer.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EACV,cAAc,EAEd,oBAAoB,EACrB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,sBAAsB,CAAC;AAStE,oDAAoD;AACpD,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,cAAc,EAAE,CAAC;IAC1B,YAAY,EAAE,oBAAoB,CAAC;CACpC;AAED;;;GAGG;AACH,wBAAgB,mCAAmC,CACjD,OAAO,EAAE,yBAAyB,GACjC,kBAAkB,CAyKpB"}
@@ -0,0 +1,182 @@
1
+ // Builds in-memory EvidenceSignal objects from raw GitHub PR payloads.
2
+ // Signals are purely in-memory — they flow through the extraction pipeline
3
+ // and are never written to disk.
4
+ import { createDeterministicId } from './id.js';
5
+ import { inferAuthorFields, weightCheck, weightIssueComment, weightReview, weightReviewThread } from './weighting.js';
6
+ /**
7
+ * Converts a `GitHubPullRequestEvidence` payload into lean `EvidenceSignal` objects
8
+ * and a parallel `EvidenceContextCache` mapping signal IDs to their in-memory content.
9
+ */
10
+ export function buildEvidenceSignalsFromPullRequest(payload) {
11
+ const signals = [];
12
+ const contextCache = new Map();
13
+ const now = new Date().toISOString();
14
+ const baseExternalId = `github:${payload.repository.owner}/${payload.repository.name}:pr:${payload.pullRequest.number}`;
15
+ const parentUrl = payload.pullRequest.url;
16
+ // PR summary
17
+ {
18
+ const id = createDeterministicId('ev', `${baseExternalId}:summary`);
19
+ signals.push({
20
+ type: 'evidence',
21
+ id,
22
+ source_system: 'github',
23
+ source_type: 'pull_request',
24
+ external_url: payload.pullRequest.url,
25
+ ...(payload.pullRequest.author?.login && {
26
+ author_type: 'human',
27
+ author_name: payload.pullRequest.author.login
28
+ }),
29
+ base_weight: 5,
30
+ final_weight: 5,
31
+ boosts: ['pr_summary'],
32
+ penalties: [],
33
+ occurred_at: now,
34
+ created_at: now,
35
+ updated_at: now
36
+ });
37
+ contextCache.set(id, {
38
+ title: payload.pullRequest.title,
39
+ content: buildPullRequestSummaryBody(payload)
40
+ });
41
+ }
42
+ // Issue comments
43
+ for (const comment of payload.issueComments) {
44
+ const id = createDeterministicId('ev', `${baseExternalId}:issue-comment:${comment.id}`);
45
+ const weight = weightIssueComment(comment);
46
+ const author = inferAuthorFields(comment.author?.login);
47
+ signals.push({
48
+ type: 'evidence',
49
+ id,
50
+ source_system: 'github',
51
+ source_type: 'pr_issue_comment',
52
+ external_url: comment.url,
53
+ parent_url: parentUrl,
54
+ ...author,
55
+ base_weight: weight.baseWeight,
56
+ final_weight: weight.finalWeight,
57
+ boosts: weight.boosts,
58
+ penalties: weight.penalties,
59
+ occurred_at: comment.createdAt,
60
+ created_at: now,
61
+ updated_at: now
62
+ });
63
+ contextCache.set(id, {
64
+ title: `PR issue comment by ${comment.author?.login ?? 'unknown'}`,
65
+ content: comment.body.trim()
66
+ });
67
+ }
68
+ // Reviews
69
+ for (const review of payload.reviews) {
70
+ const id = createDeterministicId('ev', `${baseExternalId}:review:${review.id}`);
71
+ const weight = weightReview(review);
72
+ const author = inferAuthorFields(review.author?.login);
73
+ signals.push({
74
+ type: 'evidence',
75
+ id,
76
+ source_system: 'github',
77
+ source_type: 'pr_review',
78
+ external_url: review.url,
79
+ parent_url: parentUrl,
80
+ ...author,
81
+ base_weight: weight.baseWeight,
82
+ final_weight: weight.finalWeight,
83
+ boosts: weight.boosts,
84
+ penalties: weight.penalties,
85
+ occurred_at: review.submittedAt,
86
+ created_at: now,
87
+ updated_at: now
88
+ });
89
+ contextCache.set(id, {
90
+ title: `Review ${review.state} by ${review.author?.login ?? 'unknown'}`,
91
+ content: review.body.trim() || review.state
92
+ });
93
+ }
94
+ // Review threads
95
+ for (const thread of payload.reviewThreads) {
96
+ const firstComment = thread.comments[0];
97
+ if (!firstComment)
98
+ continue;
99
+ const threadId = thread.id ?? firstComment.id;
100
+ const id = createDeterministicId('ev', `${baseExternalId}:thread:${threadId}`);
101
+ const weight = weightReviewThread(thread);
102
+ const author = inferAuthorFields(firstComment.author?.login);
103
+ signals.push({
104
+ type: 'evidence',
105
+ id,
106
+ source_system: 'github',
107
+ source_type: 'pr_review_thread',
108
+ external_url: firstComment.url,
109
+ parent_url: parentUrl,
110
+ ...author,
111
+ base_weight: weight.baseWeight,
112
+ final_weight: weight.finalWeight,
113
+ boosts: weight.boosts,
114
+ penalties: weight.penalties,
115
+ occurred_at: firstComment.createdAt,
116
+ created_at: now,
117
+ updated_at: now
118
+ });
119
+ const context = {
120
+ title: buildThreadTitle(firstComment.path, firstComment.line),
121
+ content: firstComment.body.trim(),
122
+ file_path: firstComment.path,
123
+ line_start: firstComment.line,
124
+ line_end: firstComment.line,
125
+ diff_hunk: firstComment.diffHunk
126
+ };
127
+ contextCache.set(id, context);
128
+ }
129
+ // Checks
130
+ for (const check of payload.checks) {
131
+ const id = createDeterministicId('ev', `${baseExternalId}:check:${check.name}:${check.state}`);
132
+ const weight = weightCheck(check);
133
+ signals.push({
134
+ type: 'evidence',
135
+ id,
136
+ source_system: 'github',
137
+ source_type: 'pr_check',
138
+ external_url: check.link,
139
+ parent_url: parentUrl,
140
+ base_weight: weight.baseWeight,
141
+ final_weight: weight.finalWeight,
142
+ boosts: weight.boosts,
143
+ penalties: weight.penalties,
144
+ occurred_at: now,
145
+ created_at: now,
146
+ updated_at: now
147
+ });
148
+ contextCache.set(id, {
149
+ title: `Check ${check.state}: ${check.name}`,
150
+ content: `${check.name} -> ${check.state}`
151
+ });
152
+ }
153
+ return {
154
+ signals: signals.sort((left, right) => left.source_type.localeCompare(right.source_type)),
155
+ contextCache
156
+ };
157
+ }
158
+ /** Assembles a human-readable summary of the PR (number + title + body + changed files) as the evidence content. */
159
+ function buildPullRequestSummaryBody(payload) {
160
+ const sections = [];
161
+ sections.push(`PR #${payload.pullRequest.number}: ${payload.pullRequest.title}`);
162
+ if (payload.pullRequest.body.trim()) {
163
+ sections.push(payload.pullRequest.body.trim());
164
+ }
165
+ if (payload.files.length > 0) {
166
+ sections.push(`Changed files:\n${payload.files
167
+ .map((file) => `- ${file.path}`)
168
+ .join('\n')}`);
169
+ }
170
+ return sections.join('\n\n');
171
+ }
172
+ /** Formats a human-readable title for a review thread, including file path and line number when available. */
173
+ function buildThreadTitle(filePath, line) {
174
+ if (!filePath) {
175
+ return 'Review thread';
176
+ }
177
+ if (line === undefined) {
178
+ return `Review thread on ${filePath}`;
179
+ }
180
+ return `Review thread on ${filePath}:${line}`;
181
+ }
182
+ //# sourceMappingURL=evidence-writer.js.map