optimal-cli 0.1.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/agents/.gitkeep +0 -0
  2. package/agents/content-ops.md +227 -0
  3. package/agents/financial-ops.md +184 -0
  4. package/agents/infra-ops.md +206 -0
  5. package/agents/profiles.json +5 -0
  6. package/bin/optimal.ts +1731 -0
  7. package/docs/CLI-REFERENCE.md +361 -0
  8. package/lib/assets/index.ts +225 -0
  9. package/lib/assets.ts +124 -0
  10. package/lib/auth/index.ts +189 -0
  11. package/lib/board/index.ts +309 -0
  12. package/lib/board/types.ts +124 -0
  13. package/lib/bot/claim.ts +43 -0
  14. package/lib/bot/coordinator.ts +254 -0
  15. package/lib/bot/heartbeat.ts +37 -0
  16. package/lib/bot/index.ts +9 -0
  17. package/lib/bot/protocol.ts +99 -0
  18. package/lib/bot/reporter.ts +42 -0
  19. package/lib/bot/skills.ts +81 -0
  20. package/lib/budget/projections.ts +561 -0
  21. package/lib/budget/scenarios.ts +312 -0
  22. package/lib/cms/publish-blog.ts +129 -0
  23. package/lib/cms/strapi-client.ts +302 -0
  24. package/lib/config/registry.ts +228 -0
  25. package/lib/config/schema.ts +58 -0
  26. package/lib/config.ts +247 -0
  27. package/lib/errors.ts +129 -0
  28. package/lib/format.ts +120 -0
  29. package/lib/infra/.gitkeep +0 -0
  30. package/lib/infra/deploy.ts +70 -0
  31. package/lib/infra/migrate.ts +141 -0
  32. package/lib/newsletter/.gitkeep +0 -0
  33. package/lib/newsletter/distribute.ts +256 -0
  34. package/{dist/lib/newsletter/generate-insurance.d.ts → lib/newsletter/generate-insurance.ts} +24 -7
  35. package/lib/newsletter/generate.ts +735 -0
  36. package/lib/returnpro/.gitkeep +0 -0
  37. package/lib/returnpro/anomalies.ts +258 -0
  38. package/lib/returnpro/audit.ts +194 -0
  39. package/lib/returnpro/diagnose.ts +400 -0
  40. package/lib/returnpro/kpis.ts +255 -0
  41. package/lib/returnpro/templates.ts +323 -0
  42. package/lib/returnpro/upload-income.ts +311 -0
  43. package/lib/returnpro/upload-netsuite.ts +696 -0
  44. package/lib/returnpro/upload-r1.ts +563 -0
  45. package/lib/returnpro/validate.ts +154 -0
  46. package/lib/social/meta.ts +228 -0
  47. package/lib/social/post-generator.ts +468 -0
  48. package/lib/social/publish.ts +301 -0
  49. package/lib/social/scraper.ts +503 -0
  50. package/lib/supabase.ts +25 -0
  51. package/lib/transactions/delete-batch.ts +258 -0
  52. package/lib/transactions/ingest.ts +659 -0
  53. package/lib/transactions/stamp.ts +654 -0
  54. package/package.json +15 -25
  55. package/dist/bin/optimal.d.ts +0 -2
  56. package/dist/bin/optimal.js +0 -995
  57. package/dist/lib/budget/projections.d.ts +0 -115
  58. package/dist/lib/budget/projections.js +0 -384
  59. package/dist/lib/budget/scenarios.d.ts +0 -93
  60. package/dist/lib/budget/scenarios.js +0 -214
  61. package/dist/lib/cms/publish-blog.d.ts +0 -62
  62. package/dist/lib/cms/publish-blog.js +0 -74
  63. package/dist/lib/cms/strapi-client.d.ts +0 -123
  64. package/dist/lib/cms/strapi-client.js +0 -213
  65. package/dist/lib/config.d.ts +0 -55
  66. package/dist/lib/config.js +0 -206
  67. package/dist/lib/infra/deploy.d.ts +0 -29
  68. package/dist/lib/infra/deploy.js +0 -58
  69. package/dist/lib/infra/migrate.d.ts +0 -34
  70. package/dist/lib/infra/migrate.js +0 -103
  71. package/dist/lib/kanban.d.ts +0 -46
  72. package/dist/lib/kanban.js +0 -118
  73. package/dist/lib/newsletter/distribute.d.ts +0 -52
  74. package/dist/lib/newsletter/distribute.js +0 -193
  75. package/dist/lib/newsletter/generate-insurance.js +0 -36
  76. package/dist/lib/newsletter/generate.d.ts +0 -104
  77. package/dist/lib/newsletter/generate.js +0 -571
  78. package/dist/lib/returnpro/anomalies.d.ts +0 -64
  79. package/dist/lib/returnpro/anomalies.js +0 -166
  80. package/dist/lib/returnpro/audit.d.ts +0 -32
  81. package/dist/lib/returnpro/audit.js +0 -147
  82. package/dist/lib/returnpro/diagnose.d.ts +0 -52
  83. package/dist/lib/returnpro/diagnose.js +0 -281
  84. package/dist/lib/returnpro/kpis.d.ts +0 -32
  85. package/dist/lib/returnpro/kpis.js +0 -192
  86. package/dist/lib/returnpro/templates.d.ts +0 -48
  87. package/dist/lib/returnpro/templates.js +0 -229
  88. package/dist/lib/returnpro/upload-income.d.ts +0 -25
  89. package/dist/lib/returnpro/upload-income.js +0 -235
  90. package/dist/lib/returnpro/upload-netsuite.d.ts +0 -37
  91. package/dist/lib/returnpro/upload-netsuite.js +0 -566
  92. package/dist/lib/returnpro/upload-r1.d.ts +0 -48
  93. package/dist/lib/returnpro/upload-r1.js +0 -398
  94. package/dist/lib/social/post-generator.d.ts +0 -83
  95. package/dist/lib/social/post-generator.js +0 -333
  96. package/dist/lib/social/publish.d.ts +0 -66
  97. package/dist/lib/social/publish.js +0 -226
  98. package/dist/lib/social/scraper.d.ts +0 -67
  99. package/dist/lib/social/scraper.js +0 -361
  100. package/dist/lib/supabase.d.ts +0 -4
  101. package/dist/lib/supabase.js +0 -20
  102. package/dist/lib/transactions/delete-batch.d.ts +0 -60
  103. package/dist/lib/transactions/delete-batch.js +0 -203
  104. package/dist/lib/transactions/ingest.d.ts +0 -43
  105. package/dist/lib/transactions/ingest.js +0 -555
  106. package/dist/lib/transactions/stamp.d.ts +0 -51
  107. package/dist/lib/transactions/stamp.js +0 -524
@@ -1,555 +0,0 @@
1
- /**
2
- * Transaction Ingestion — CSV Parsing & Deduplication
3
- *
4
- * Ported from OptimalOS:
5
- * - /home/optimal/optimalos/app/api/csv/ingest/route.ts
6
- * - /home/optimal/optimalos/lib/csv/upload.ts
7
- * - /home/optimal/optimalos/lib/stamp-engine/normalizers/
8
- * - /home/optimal/optimalos/lib/stamp-engine/format-detector.ts
9
- *
10
- * Reads a CSV file from disk, auto-detects bank format, parses into
11
- * normalized transactions, deduplicates against existing rows in Supabase,
12
- * and batch-inserts new records into the `transactions` table.
13
- */
14
- import { readFileSync } from 'node:fs';
15
- import { createHash } from 'node:crypto';
16
- import { getSupabase } from '../supabase.js';
17
- // =============================================================================
18
- // CSV PARSING UTILITIES
19
- // =============================================================================
20
- /**
21
- * Parse a single CSV line, handling quoted fields.
22
- */
23
- function parseCSVLine(line) {
24
- const values = [];
25
- let currentValue = '';
26
- let insideQuotes = false;
27
- for (let i = 0; i < line.length; i++) {
28
- const char = line[i];
29
- const nextChar = line[i + 1];
30
- if (char === '"') {
31
- if (insideQuotes && nextChar === '"') {
32
- currentValue += '"';
33
- i++;
34
- }
35
- else {
36
- insideQuotes = !insideQuotes;
37
- }
38
- }
39
- else if (char === ',' && !insideQuotes) {
40
- values.push(currentValue.trim());
41
- currentValue = '';
42
- }
43
- else {
44
- currentValue += char;
45
- }
46
- }
47
- values.push(currentValue.trim());
48
- return values;
49
- }
50
- /**
51
- * Parse CSV content into headers and rows.
52
- */
53
- function parseCSVContent(content) {
54
- let clean = content;
55
- if (clean.charCodeAt(0) === 0xfeff)
56
- clean = clean.slice(1); // remove BOM
57
- clean = clean.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
58
- const lines = clean.split('\n').filter((l) => l.trim());
59
- if (lines.length === 0)
60
- return { headers: [], rows: [] };
61
- const headers = parseCSVLine(lines[0]);
62
- const rows = lines.slice(1).map((l) => parseCSVLine(l));
63
- return { headers, rows };
64
- }
65
- function findColumn(headers, names) {
66
- const lower = headers.map((h) => h.toLowerCase().trim());
67
- for (const name of names) {
68
- const idx = lower.indexOf(name.toLowerCase());
69
- if (idx !== -1)
70
- return idx;
71
- }
72
- return -1;
73
- }
74
- /**
75
- * Parse date string to ISO format (YYYY-MM-DD).
76
- */
77
- function parseDate(dateStr) {
78
- if (!dateStr?.trim())
79
- return new Date().toISOString().split('T')[0];
80
- const trimmed = dateStr.trim();
81
- if (/^\d{4}-\d{2}-\d{2}$/.test(trimmed))
82
- return trimmed;
83
- const slash = trimmed.match(/^(\d{1,2})\/(\d{1,2})\/(\d{2,4})$/);
84
- if (slash) {
85
- const [, month, day, year] = slash;
86
- const fullYear = year.length === 2 ? `20${year}` : year;
87
- return `${fullYear}-${month.padStart(2, '0')}-${day.padStart(2, '0')}`;
88
- }
89
- try {
90
- const d = new Date(trimmed);
91
- if (!isNaN(d.getTime()))
92
- return d.toISOString().split('T')[0];
93
- }
94
- catch {
95
- /* fall through */
96
- }
97
- return new Date().toISOString().split('T')[0];
98
- }
99
- /**
100
- * Parse amount string to number.
101
- */
102
- function parseAmount(amountStr) {
103
- if (!amountStr?.trim())
104
- return 0;
105
- let str = amountStr.trim();
106
- const isParens = str.startsWith('(') && str.endsWith(')');
107
- if (isParens)
108
- str = str.slice(1, -1);
109
- str = str.replace(/[$,]/g, '');
110
- let amount = parseFloat(str);
111
- if (isNaN(amount))
112
- return 0;
113
- if (isParens && amount > 0)
114
- amount = -amount;
115
- return amount;
116
- }
117
- const FORMAT_SIGNATURES = [
118
- {
119
- format: 'chase_checking',
120
- requiredHeaders: [
121
- 'details',
122
- 'posting date',
123
- 'description',
124
- 'amount',
125
- 'type',
126
- 'balance',
127
- ],
128
- },
129
- {
130
- format: 'chase_credit',
131
- requiredHeaders: [
132
- 'transaction date',
133
- 'post date',
134
- 'description',
135
- 'category',
136
- 'type',
137
- 'amount',
138
- ],
139
- },
140
- {
141
- format: 'discover',
142
- requiredHeaders: [
143
- 'trans. date',
144
- 'post date',
145
- 'description',
146
- 'amount',
147
- 'category',
148
- ],
149
- },
150
- {
151
- format: 'amex',
152
- requiredHeaders: ['date', 'description', 'amount'],
153
- disambiguator: (h) => h.some((x) => x.includes('card member') ||
154
- x.includes('account #') ||
155
- x.includes('extended details')),
156
- },
157
- {
158
- format: 'generic',
159
- requiredHeaders: ['date', 'description', 'amount'],
160
- },
161
- ];
162
- function detectFormat(content, filename) {
163
- if (filename?.toLowerCase().endsWith('.xlsx')) {
164
- return { format: 'amex', confidence: 0.8, headers: [] };
165
- }
166
- const lines = content.split(/\r?\n/);
167
- if (lines.length === 0)
168
- return { format: 'unknown', confidence: 0, headers: [] };
169
- const headers = parseCSVLine(lines[0]);
170
- const normalized = headers.map((h) => h.toLowerCase().trim());
171
- for (const sig of FORMAT_SIGNATURES) {
172
- const matchCount = sig.requiredHeaders.filter((req) => normalized.some((h) => h.includes(req) || req.includes(h))).length;
173
- const ratio = matchCount / sig.requiredHeaders.length;
174
- if (ratio >= 0.8) {
175
- if (sig.disambiguator && !sig.disambiguator(normalized))
176
- continue;
177
- return { format: sig.format, confidence: ratio, headers: normalized };
178
- }
179
- }
180
- return { format: 'generic', confidence: 0.5, headers: normalized };
181
- }
182
- // =============================================================================
183
- // BANK-SPECIFIC PARSERS
184
- // =============================================================================
185
- function parseChaseChecking(content) {
186
- const transactions = [];
187
- const errors = [];
188
- const warnings = [];
189
- const { headers, rows } = parseCSVContent(content);
190
- const colMap = {
191
- postingDate: findColumn(headers, ['posting date', 'date']),
192
- description: findColumn(headers, ['description']),
193
- amount: findColumn(headers, ['amount']),
194
- type: findColumn(headers, ['type']),
195
- balance: findColumn(headers, ['balance']),
196
- };
197
- if (colMap.description === -1 || colMap.amount === -1) {
198
- errors.push('Missing required columns: description and/or amount');
199
- return { transactions, errors, warnings };
200
- }
201
- for (let i = 0; i < rows.length; i++) {
202
- const row = rows[i];
203
- try {
204
- const description = (row[colMap.description] || '').trim();
205
- if (!description) {
206
- warnings.push(`Row ${i + 2}: Empty description, skipping`);
207
- continue;
208
- }
209
- transactions.push({
210
- date: parseDate(colMap.postingDate >= 0 ? row[colMap.postingDate] : ''),
211
- description,
212
- amount: parseAmount(row[colMap.amount]),
213
- transactionType: colMap.type >= 0 ? row[colMap.type] : undefined,
214
- balance: colMap.balance >= 0 ? parseAmount(row[colMap.balance]) : undefined,
215
- });
216
- }
217
- catch (err) {
218
- errors.push(`Row ${i + 2}: ${err instanceof Error ? err.message : 'Unknown error'}`);
219
- }
220
- }
221
- return { transactions, errors, warnings };
222
- }
223
- function parseChaseCredit(content) {
224
- const transactions = [];
225
- const errors = [];
226
- const warnings = [];
227
- const { headers, rows } = parseCSVContent(content);
228
- const colMap = {
229
- transactionDate: findColumn(headers, ['transaction date', 'trans date']),
230
- postDate: findColumn(headers, ['post date']),
231
- description: findColumn(headers, ['description']),
232
- category: findColumn(headers, ['category']),
233
- type: findColumn(headers, ['type']),
234
- amount: findColumn(headers, ['amount']),
235
- };
236
- if (colMap.description === -1 || colMap.amount === -1) {
237
- errors.push('Missing required columns: description and/or amount');
238
- return { transactions, errors, warnings };
239
- }
240
- for (let i = 0; i < rows.length; i++) {
241
- const row = rows[i];
242
- try {
243
- const description = (row[colMap.description] || '').trim();
244
- if (!description) {
245
- warnings.push(`Row ${i + 2}: Empty description, skipping`);
246
- continue;
247
- }
248
- const txDate = colMap.transactionDate >= 0 ? row[colMap.transactionDate] : '';
249
- const pDate = colMap.postDate >= 0 ? row[colMap.postDate] : '';
250
- transactions.push({
251
- date: parseDate(txDate) || parseDate(pDate),
252
- description,
253
- amount: parseAmount(row[colMap.amount]),
254
- originalCategory: colMap.category >= 0 ? row[colMap.category] || undefined : undefined,
255
- transactionType: colMap.type >= 0 ? row[colMap.type] : undefined,
256
- postDate: parseDate(pDate),
257
- });
258
- }
259
- catch (err) {
260
- errors.push(`Row ${i + 2}: ${err instanceof Error ? err.message : 'Unknown error'}`);
261
- }
262
- }
263
- return { transactions, errors, warnings };
264
- }
265
- function parseDiscover(content) {
266
- const transactions = [];
267
- const errors = [];
268
- const warnings = [];
269
- const { headers, rows } = parseCSVContent(content);
270
- const colMap = {
271
- transDate: findColumn(headers, ['trans. date', 'trans date', 'transaction date']),
272
- postDate: findColumn(headers, ['post date']),
273
- description: findColumn(headers, ['description']),
274
- amount: findColumn(headers, ['amount']),
275
- category: findColumn(headers, ['category']),
276
- };
277
- if (colMap.description === -1 || colMap.amount === -1) {
278
- errors.push('Missing required columns: description and/or amount');
279
- return { transactions, errors, warnings };
280
- }
281
- for (let i = 0; i < rows.length; i++) {
282
- const row = rows[i];
283
- try {
284
- const description = (row[colMap.description] || '').trim();
285
- if (!description) {
286
- warnings.push(`Row ${i + 2}: Empty description, skipping`);
287
- continue;
288
- }
289
- const category = colMap.category >= 0 ? row[colMap.category] || '' : '';
290
- let amount = parseAmount(row[colMap.amount]);
291
- // Discover uses positive for charges; flip sign unless it's a payment/credit
292
- if (amount > 0 && !isDiscoverPayment(description, category)) {
293
- amount = -amount;
294
- }
295
- transactions.push({
296
- date: parseDate(colMap.transDate >= 0 ? row[colMap.transDate] : '') ||
297
- parseDate(colMap.postDate >= 0 ? row[colMap.postDate] : ''),
298
- description,
299
- amount,
300
- originalCategory: category || undefined,
301
- postDate: colMap.postDate >= 0 ? parseDate(row[colMap.postDate]) : undefined,
302
- });
303
- }
304
- catch (err) {
305
- errors.push(`Row ${i + 2}: ${err instanceof Error ? err.message : 'Unknown error'}`);
306
- }
307
- }
308
- return { transactions, errors, warnings };
309
- }
310
- function isDiscoverPayment(description, category) {
311
- const d = description.toLowerCase();
312
- const c = category.toLowerCase();
313
- return (c.includes('payment') || c.includes('credit') || c.includes('rebate') ||
314
- d.includes('directpay') || d.includes('payment') || d.includes('statement credit'));
315
- }
316
- function parseGenericCSV(content) {
317
- const transactions = [];
318
- const errors = [];
319
- const warnings = [];
320
- const { headers, rows } = parseCSVContent(content);
321
- const colMap = {
322
- date: findColumn(headers, [
323
- 'date', 'transaction date', 'trans. date', 'trans date',
324
- 'posting date', 'post date',
325
- ]),
326
- description: findColumn(headers, [
327
- 'description', 'desc', 'memo', 'narrative', 'details',
328
- 'transaction description', 'merchant',
329
- ]),
330
- amount: findColumn(headers, [
331
- 'amount', 'value', 'sum', 'total', 'debit/credit',
332
- ]),
333
- category: findColumn(headers, ['category', 'type', 'transaction type']),
334
- };
335
- if (colMap.description === -1) {
336
- errors.push('Missing required column: description');
337
- return { transactions, errors, warnings };
338
- }
339
- if (colMap.amount === -1) {
340
- errors.push('Missing required column: amount');
341
- return { transactions, errors, warnings };
342
- }
343
- if (colMap.date === -1) {
344
- warnings.push("No date column found, using today's date for all transactions");
345
- }
346
- for (let i = 0; i < rows.length; i++) {
347
- const row = rows[i];
348
- try {
349
- const description = (row[colMap.description] || '').trim();
350
- if (!description) {
351
- warnings.push(`Row ${i + 2}: Empty description, skipping`);
352
- continue;
353
- }
354
- transactions.push({
355
- date: parseDate(colMap.date >= 0 ? row[colMap.date] : ''),
356
- description,
357
- amount: parseAmount(row[colMap.amount]),
358
- originalCategory: colMap.category >= 0 ? row[colMap.category] || undefined : undefined,
359
- });
360
- }
361
- catch (err) {
362
- errors.push(`Row ${i + 2}: ${err instanceof Error ? err.message : 'Unknown error'}`);
363
- }
364
- }
365
- return { transactions, errors, warnings };
366
- }
367
- /**
368
- * Normalize CSV content based on detected bank format.
369
- */
370
- function normalizeTransactions(content, format) {
371
- switch (format) {
372
- case 'chase_checking':
373
- return parseChaseChecking(content);
374
- case 'chase_credit':
375
- return parseChaseCredit(content);
376
- case 'discover':
377
- return parseDiscover(content);
378
- case 'amex':
379
- return { transactions: [], errors: ['Amex XLSX not supported in CLI yet'], warnings: [] };
380
- case 'generic':
381
- return parseGenericCSV(content);
382
- default:
383
- return { transactions: [], errors: [`Unknown format: ${format}`], warnings: [] };
384
- }
385
- }
386
- // =============================================================================
387
- // DEDUPLICATION
388
- // =============================================================================
389
- /**
390
- * Generate a deterministic hash for transaction deduplication.
391
- * Uses date + amount + normalized description.
392
- */
393
- function generateTransactionHash(date, amount, description) {
394
- const normalizedDesc = description.trim().toLowerCase();
395
- const hashInput = `${date}|${amount}|${normalizedDesc}`;
396
- return createHash('sha256').update(hashInput).digest('hex').slice(0, 32);
397
- }
398
- /**
399
- * Find which hashes already exist in the database.
400
- */
401
- async function findExistingHashes(userId, hashes) {
402
- if (hashes.length === 0)
403
- return new Set();
404
- const supabase = getSupabase('optimal');
405
- const existing = new Set();
406
- const batchSize = 100;
407
- for (let i = 0; i < hashes.length; i += batchSize) {
408
- const batch = hashes.slice(i, i + batchSize);
409
- const { data } = await supabase
410
- .from('transactions')
411
- .select('dedup_hash')
412
- .eq('user_id', userId)
413
- .in('dedup_hash', batch);
414
- if (data) {
415
- for (const row of data) {
416
- if (row.dedup_hash)
417
- existing.add(row.dedup_hash);
418
- }
419
- }
420
- }
421
- return existing;
422
- }
423
- // =============================================================================
424
- // MAIN INGESTION FUNCTION
425
- // =============================================================================
426
- /**
427
- * Ingest transactions from a CSV file.
428
- *
429
- * 1. Read & detect format
430
- * 2. Parse into normalized transactions
431
- * 3. Deduplicate against existing rows (by hash)
432
- * 4. Batch-insert new rows into `transactions`
433
- *
434
- * @returns count of inserted, skipped (duplicate), and failed rows
435
- */
436
- export async function ingestTransactions(filePath, userId) {
437
- const supabase = getSupabase('optimal');
438
- // 1. Read file
439
- const content = readFileSync(filePath, 'utf-8');
440
- // 2. Detect format
441
- const detection = detectFormat(content, filePath);
442
- if (detection.format === 'unknown') {
443
- return { inserted: 0, skipped: 0, failed: 0, errors: ['Could not detect CSV format'], format: 'unknown' };
444
- }
445
- // 3. Normalize / parse
446
- const { transactions, errors: parseErrors, warnings } = normalizeTransactions(content, detection.format);
447
- if (transactions.length === 0) {
448
- return {
449
- inserted: 0,
450
- skipped: 0,
451
- failed: 0,
452
- errors: parseErrors.length > 0 ? parseErrors : ['No transactions parsed from file'],
453
- format: detection.format,
454
- };
455
- }
456
- // 4. Compute dedup hashes
457
- const withHashes = transactions.map((tx) => ({
458
- ...tx,
459
- dedupHash: generateTransactionHash(tx.date, tx.amount, tx.description),
460
- }));
461
- // 5. Find existing duplicates
462
- const allHashes = withHashes.map((t) => t.dedupHash);
463
- const existingHashes = await findExistingHashes(userId, allHashes);
464
- const duplicateCount = withHashes.filter((t) => existingHashes.has(t.dedupHash)).length;
465
- const newTxns = withHashes.filter((t) => !existingHashes.has(t.dedupHash));
466
- if (newTxns.length === 0) {
467
- return {
468
- inserted: 0,
469
- skipped: duplicateCount,
470
- failed: 0,
471
- errors: parseErrors,
472
- format: detection.format,
473
- };
474
- }
475
- // 6. Create upload batch record for provenance
476
- const { data: batchRecord } = await supabase
477
- .from('upload_batches')
478
- .insert({
479
- user_id: userId,
480
- file_name: filePath.split('/').pop() || 'unnamed.csv',
481
- row_count: newTxns.length,
482
- })
483
- .select('id')
484
- .single();
485
- const batchId = batchRecord?.id ?? null;
486
- // 7. Resolve categories (find or create)
487
- const uniqueCategories = [
488
- ...new Set(newTxns.map((t) => t.originalCategory).filter(Boolean)),
489
- ];
490
- const categoryMap = new Map();
491
- for (const catName of uniqueCategories) {
492
- const { data: existing } = await supabase
493
- .from('categories')
494
- .select('id')
495
- .eq('user_id', userId)
496
- .eq('name', catName)
497
- .single();
498
- if (existing) {
499
- categoryMap.set(catName, existing.id);
500
- continue;
501
- }
502
- const { data: created, error: createErr } = await supabase
503
- .from('categories')
504
- .insert({
505
- user_id: userId,
506
- name: catName,
507
- color: `#${Math.floor(Math.random() * 16_777_215).toString(16).padStart(6, '0')}`,
508
- })
509
- .select('id')
510
- .single();
511
- if (createErr) {
512
- parseErrors.push(`Failed to create category '${catName}': ${createErr.message}`);
513
- continue;
514
- }
515
- if (created)
516
- categoryMap.set(catName, created.id);
517
- }
518
- // 8. Prepare rows for insert
519
- const rows = newTxns.map((txn) => ({
520
- user_id: userId,
521
- date: txn.date,
522
- description: txn.description,
523
- amount: parseFloat(txn.amount.toString()),
524
- type: txn.transactionType || null,
525
- category_id: txn.originalCategory ? categoryMap.get(txn.originalCategory) ?? null : null,
526
- mode: 'actual',
527
- provider: 'csv',
528
- dedup_hash: txn.dedupHash,
529
- batch_id: batchId,
530
- }));
531
- // 9. Batch-insert (50 at a time)
532
- let insertedCount = 0;
533
- let failedCount = 0;
534
- const insertBatchSize = 50;
535
- for (let i = 0; i < rows.length; i += insertBatchSize) {
536
- const batch = rows.slice(i, i + insertBatchSize);
537
- const { error: insertErr } = await supabase.from('transactions').insert(batch);
538
- if (insertErr) {
539
- failedCount += batch.length;
540
- parseErrors.push(`Insert batch ${Math.floor(i / insertBatchSize) + 1} failed: ${insertErr.message}`);
541
- }
542
- else {
543
- insertedCount += batch.length;
544
- }
545
- }
546
- // Log warnings as non-fatal errors
547
- parseErrors.push(...warnings);
548
- return {
549
- inserted: insertedCount,
550
- skipped: duplicateCount,
551
- failed: failedCount,
552
- errors: parseErrors,
553
- format: detection.format,
554
- };
555
- }
@@ -1,51 +0,0 @@
1
- /**
2
- * Transaction Stamp Engine — Auto-Categorization by Rules
3
- *
4
- * Ported from OptimalOS:
5
- * - /home/optimal/optimalos/lib/stamp-engine/matcher.ts
6
- * - /home/optimal/optimalos/lib/stamp-engine/patterns.ts
7
- * - /home/optimal/optimalos/lib/stamp-engine/description-hash.ts
8
- * - /home/optimal/optimalos/lib/stamp-engine/db/
9
- * - /home/optimal/optimalos/app/api/stamp/route.ts
10
- *
11
- * 4-stage matching algorithm:
12
- * 1. PATTERN — transfers, P2P, credit card payments (100% confidence)
13
- * 2. LEARNED — user-confirmed patterns (80-99% confidence)
14
- * 3. EXACT — provider name found in description (100% confidence)
15
- * 4. FUZZY — token overlap matching (60-95% confidence)
16
- * Fallback: CATEGORY_INFER from institution category (50% confidence)
17
- *
18
- * Queries unclassified transactions for a given user, loads matching
19
- * rules from providers / learned_patterns / user_provider_overrides /
20
- * stamp_categories, then updates `category_id` on matched rows.
21
- */
22
- export type MatchType = 'PATTERN' | 'LEARNED' | 'EXACT' | 'FUZZY' | 'CATEGORY_INFER' | 'NONE';
23
- export interface MatchResult {
24
- provider: string | null;
25
- category: string | null;
26
- confidence: number;
27
- matchType: MatchType;
28
- matchedPattern?: string;
29
- }
30
- export interface StampResult {
31
- stamped: number;
32
- unmatched: number;
33
- total: number;
34
- byMatchType: Record<MatchType, number>;
35
- dryRun: boolean;
36
- }
37
- /**
38
- * Stamp (auto-categorize) unclassified transactions for a user.
39
- *
40
- * 1. Fetch unclassified transactions (provider IS NULL or category_id IS NULL)
41
- * 2. Load matching rules from providers, learned_patterns, user_provider_overrides
42
- * 3. Run 4-stage matching on each transaction
43
- * 4. Update matched transactions with provider + category_id
44
- *
45
- * @param userId Supabase user UUID
46
- * @param options dryRun=true to preview without writing
47
- * @returns counts of stamped, unmatched, and total
48
- */
49
- export declare function stampTransactions(userId: string, options?: {
50
- dryRun?: boolean;
51
- }): Promise<StampResult>;