@fuzzle/opencode-accountant 0.0.10 → 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agent/accountant.md +25 -1
- package/dist/index.js +838 -7
- package/package.json +2 -1
package/agent/accountant.md
CHANGED
|
@@ -19,7 +19,7 @@ permission:
|
|
|
19
19
|
todoread: allow
|
|
20
20
|
todowrite: allow
|
|
21
21
|
webfetch: deny
|
|
22
|
-
write: allow
|
|
22
|
+
write: allow
|
|
23
23
|
---
|
|
24
24
|
|
|
25
25
|
## Repository Structure
|
|
@@ -55,3 +55,27 @@ When working with accounting tasks:
|
|
|
55
55
|
1. **Unintended edits** - If a balance is off, check the journal for unintended edits
|
|
56
56
|
1. **Statement tracking** - Move processed statements to `statements/{provider}/YYYY`
|
|
57
57
|
1. **Consistency** - Maintain consistent formatting and naming conventions across all files
|
|
58
|
+
|
|
59
|
+
## Statement Import Workflow
|
|
60
|
+
|
|
61
|
+
Use the `import-statements` tool to import bank statements. The workflow:
|
|
62
|
+
|
|
63
|
+
1. **Prepare**: Drop CSV files into `statements/import/`
|
|
64
|
+
2. **Classify**: Run `classify-statements` tool to move files to `doc/agent/todo/import/<provider>/<currency>/`
|
|
65
|
+
3. **Validate (check mode)**: Run `import-statements(checkOnly: true)` to validate transactions
|
|
66
|
+
- Tool runs `hledger print` dry run to check for unknown postings
|
|
67
|
+
- Unknown postings appear as `income:unknown` or `expenses:unknown`
|
|
68
|
+
4. **Handle unknowns**: If unknown postings found:
|
|
69
|
+
- Tool returns full CSV row data for each unknown posting
|
|
70
|
+
- Analyze the CSV row data to understand the transaction
|
|
71
|
+
- Create or update rules file with `if` directives to match the transaction
|
|
72
|
+
- Repeat step 3 until all postings are matched
|
|
73
|
+
5. **Import**: Once all transactions have matching rules, run `import-statements(checkOnly: false)`
|
|
74
|
+
6. **Complete**: Transactions imported to journal, CSVs moved to `doc/agent/done/import/`
|
|
75
|
+
|
|
76
|
+
### Rules Files
|
|
77
|
+
|
|
78
|
+
- Rules files are in `config/rules/` directory
|
|
79
|
+
- Match CSV to rules file via the `source` directive in each `.rules` file
|
|
80
|
+
- Use field names from the `fields` directive for matching
|
|
81
|
+
- Unknown account pattern: `income:unknown` (positive amounts) / `expenses:unknown` (negative amounts)
|
package/dist/index.js
CHANGED
|
@@ -1340,6 +1340,606 @@ var require_papaparse = __commonJS((exports, module) => {
|
|
|
1340
1340
|
});
|
|
1341
1341
|
});
|
|
1342
1342
|
|
|
1343
|
+
// node_modules/convert-csv-to-json/src/util/fileUtils.js
|
|
1344
|
+
var require_fileUtils = __commonJS((exports, module) => {
|
|
1345
|
+
var fs6 = __require("fs");
|
|
1346
|
+
|
|
1347
|
+
class FileUtils {
|
|
1348
|
+
readFile(fileInputName, encoding) {
|
|
1349
|
+
return fs6.readFileSync(fileInputName, encoding).toString();
|
|
1350
|
+
}
|
|
1351
|
+
readFileAsync(fileInputName, encoding = "utf8") {
|
|
1352
|
+
if (fs6.promises && typeof fs6.promises.readFile === "function") {
|
|
1353
|
+
return fs6.promises.readFile(fileInputName, encoding).then((buf) => buf.toString());
|
|
1354
|
+
}
|
|
1355
|
+
return new Promise((resolve2, reject) => {
|
|
1356
|
+
fs6.readFile(fileInputName, encoding, (err, data) => {
|
|
1357
|
+
if (err) {
|
|
1358
|
+
reject(err);
|
|
1359
|
+
return;
|
|
1360
|
+
}
|
|
1361
|
+
resolve2(data.toString());
|
|
1362
|
+
});
|
|
1363
|
+
});
|
|
1364
|
+
}
|
|
1365
|
+
writeFile(json3, fileOutputName) {
|
|
1366
|
+
fs6.writeFile(fileOutputName, json3, function(err) {
|
|
1367
|
+
if (err) {
|
|
1368
|
+
throw err;
|
|
1369
|
+
} else {
|
|
1370
|
+
console.log("File saved: " + fileOutputName);
|
|
1371
|
+
}
|
|
1372
|
+
});
|
|
1373
|
+
}
|
|
1374
|
+
writeFileAsync(json3, fileOutputName) {
|
|
1375
|
+
if (fs6.promises && typeof fs6.promises.writeFile === "function") {
|
|
1376
|
+
return fs6.promises.writeFile(fileOutputName, json3);
|
|
1377
|
+
}
|
|
1378
|
+
return new Promise((resolve2, reject) => {
|
|
1379
|
+
fs6.writeFile(fileOutputName, json3, (err) => {
|
|
1380
|
+
if (err)
|
|
1381
|
+
return reject(err);
|
|
1382
|
+
resolve2();
|
|
1383
|
+
});
|
|
1384
|
+
});
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
module.exports = new FileUtils;
|
|
1388
|
+
});
|
|
1389
|
+
|
|
1390
|
+
// node_modules/convert-csv-to-json/src/util/stringUtils.js
|
|
1391
|
+
var require_stringUtils = __commonJS((exports, module) => {
|
|
1392
|
+
class StringUtils {
|
|
1393
|
+
static PATTERNS = {
|
|
1394
|
+
INTEGER: /^-?\d+$/,
|
|
1395
|
+
FLOAT: /^-?\d*\.\d+$/,
|
|
1396
|
+
WHITESPACE: /\s/g
|
|
1397
|
+
};
|
|
1398
|
+
static BOOLEAN_VALUES = {
|
|
1399
|
+
TRUE: "true",
|
|
1400
|
+
FALSE: "false"
|
|
1401
|
+
};
|
|
1402
|
+
trimPropertyName(shouldTrimAll, propertyName) {
|
|
1403
|
+
if (!propertyName) {
|
|
1404
|
+
return "";
|
|
1405
|
+
}
|
|
1406
|
+
return shouldTrimAll ? propertyName.replace(StringUtils.PATTERNS.WHITESPACE, "") : propertyName.trim();
|
|
1407
|
+
}
|
|
1408
|
+
getValueFormatByType(value) {
|
|
1409
|
+
if (this.isEmpty(value)) {
|
|
1410
|
+
return String();
|
|
1411
|
+
}
|
|
1412
|
+
if (this.isBoolean(value)) {
|
|
1413
|
+
return this.convertToBoolean(value);
|
|
1414
|
+
}
|
|
1415
|
+
if (this.isInteger(value)) {
|
|
1416
|
+
return this.convertInteger(value);
|
|
1417
|
+
}
|
|
1418
|
+
if (this.isFloat(value)) {
|
|
1419
|
+
return this.convertFloat(value);
|
|
1420
|
+
}
|
|
1421
|
+
return String(value);
|
|
1422
|
+
}
|
|
1423
|
+
hasContent(values = []) {
|
|
1424
|
+
return Array.isArray(values) && values.some((value) => Boolean(value));
|
|
1425
|
+
}
|
|
1426
|
+
isEmpty(value) {
|
|
1427
|
+
return value === undefined || value === "";
|
|
1428
|
+
}
|
|
1429
|
+
isBoolean(value) {
|
|
1430
|
+
const normalizedValue = value.toLowerCase();
|
|
1431
|
+
return normalizedValue === StringUtils.BOOLEAN_VALUES.TRUE || normalizedValue === StringUtils.BOOLEAN_VALUES.FALSE;
|
|
1432
|
+
}
|
|
1433
|
+
isInteger(value) {
|
|
1434
|
+
return StringUtils.PATTERNS.INTEGER.test(value);
|
|
1435
|
+
}
|
|
1436
|
+
isFloat(value) {
|
|
1437
|
+
return StringUtils.PATTERNS.FLOAT.test(value);
|
|
1438
|
+
}
|
|
1439
|
+
hasLeadingZero(value) {
|
|
1440
|
+
const isPositiveWithLeadingZero = value.length > 1 && value[0] === "0";
|
|
1441
|
+
const isNegativeWithLeadingZero = value.length > 2 && value[0] === "-" && value[1] === "0";
|
|
1442
|
+
return isPositiveWithLeadingZero || isNegativeWithLeadingZero;
|
|
1443
|
+
}
|
|
1444
|
+
convertToBoolean(value) {
|
|
1445
|
+
return JSON.parse(value.toLowerCase());
|
|
1446
|
+
}
|
|
1447
|
+
convertInteger(value) {
|
|
1448
|
+
if (this.hasLeadingZero(value)) {
|
|
1449
|
+
return String(value);
|
|
1450
|
+
}
|
|
1451
|
+
const num = Number(value);
|
|
1452
|
+
return Number.isSafeInteger(num) ? num : String(value);
|
|
1453
|
+
}
|
|
1454
|
+
convertFloat(value) {
|
|
1455
|
+
const num = Number(value);
|
|
1456
|
+
return Number.isFinite(num) ? num : String(value);
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
module.exports = new StringUtils;
|
|
1460
|
+
});
|
|
1461
|
+
|
|
1462
|
+
// node_modules/convert-csv-to-json/src/util/jsonUtils.js
|
|
1463
|
+
var require_jsonUtils = __commonJS((exports, module) => {
|
|
1464
|
+
class JsonUtil {
|
|
1465
|
+
validateJson(json3) {
|
|
1466
|
+
try {
|
|
1467
|
+
JSON.parse(json3);
|
|
1468
|
+
} catch (err) {
|
|
1469
|
+
throw Error(`Parsed csv has generated an invalid json!!!
|
|
1470
|
+
` + err);
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
}
|
|
1474
|
+
module.exports = new JsonUtil;
|
|
1475
|
+
});
|
|
1476
|
+
|
|
1477
|
+
// node_modules/convert-csv-to-json/src/csvToJson.js
|
|
1478
|
+
var require_csvToJson = __commonJS((exports, module) => {
|
|
1479
|
+
var fileUtils = require_fileUtils();
|
|
1480
|
+
var stringUtils = require_stringUtils();
|
|
1481
|
+
var jsonUtils = require_jsonUtils();
|
|
1482
|
+
var newLine = /\r?\n/;
|
|
1483
|
+
var defaultFieldDelimiter = ";";
|
|
1484
|
+
|
|
1485
|
+
class CsvToJson {
|
|
1486
|
+
formatValueByType(active) {
|
|
1487
|
+
this.printValueFormatByType = active;
|
|
1488
|
+
return this;
|
|
1489
|
+
}
|
|
1490
|
+
supportQuotedField(active) {
|
|
1491
|
+
this.isSupportQuotedField = active;
|
|
1492
|
+
return this;
|
|
1493
|
+
}
|
|
1494
|
+
fieldDelimiter(delimiter) {
|
|
1495
|
+
this.delimiter = delimiter;
|
|
1496
|
+
return this;
|
|
1497
|
+
}
|
|
1498
|
+
trimHeaderFieldWhiteSpace(active) {
|
|
1499
|
+
this.isTrimHeaderFieldWhiteSpace = active;
|
|
1500
|
+
return this;
|
|
1501
|
+
}
|
|
1502
|
+
indexHeader(indexHeaderValue) {
|
|
1503
|
+
if (isNaN(indexHeaderValue)) {
|
|
1504
|
+
throw new Error("The index Header must be a Number!");
|
|
1505
|
+
}
|
|
1506
|
+
this.indexHeaderValue = indexHeaderValue;
|
|
1507
|
+
return this;
|
|
1508
|
+
}
|
|
1509
|
+
parseSubArray(delimiter = "*", separator = ",") {
|
|
1510
|
+
this.parseSubArrayDelimiter = delimiter;
|
|
1511
|
+
this.parseSubArraySeparator = separator;
|
|
1512
|
+
}
|
|
1513
|
+
encoding(encoding) {
|
|
1514
|
+
this.encoding = encoding;
|
|
1515
|
+
return this;
|
|
1516
|
+
}
|
|
1517
|
+
generateJsonFileFromCsv(fileInputName, fileOutputName) {
|
|
1518
|
+
let jsonStringified = this.getJsonFromCsvStringified(fileInputName);
|
|
1519
|
+
fileUtils.writeFile(jsonStringified, fileOutputName);
|
|
1520
|
+
}
|
|
1521
|
+
getJsonFromCsvStringified(fileInputName) {
|
|
1522
|
+
let json3 = this.getJsonFromCsv(fileInputName);
|
|
1523
|
+
let jsonStringified = JSON.stringify(json3, undefined, 1);
|
|
1524
|
+
jsonUtils.validateJson(jsonStringified);
|
|
1525
|
+
return jsonStringified;
|
|
1526
|
+
}
|
|
1527
|
+
getJsonFromCsv(fileInputName) {
|
|
1528
|
+
let parsedCsv = fileUtils.readFile(fileInputName, this.encoding);
|
|
1529
|
+
return this.csvToJson(parsedCsv);
|
|
1530
|
+
}
|
|
1531
|
+
csvStringToJson(csvString) {
|
|
1532
|
+
return this.csvToJson(csvString);
|
|
1533
|
+
}
|
|
1534
|
+
csvStringToJsonStringified(csvString) {
|
|
1535
|
+
let json3 = this.csvStringToJson(csvString);
|
|
1536
|
+
let jsonStringified = JSON.stringify(json3, undefined, 1);
|
|
1537
|
+
jsonUtils.validateJson(jsonStringified);
|
|
1538
|
+
return jsonStringified;
|
|
1539
|
+
}
|
|
1540
|
+
csvToJson(parsedCsv) {
|
|
1541
|
+
this.validateInputConfig();
|
|
1542
|
+
let lines = parsedCsv.split(newLine);
|
|
1543
|
+
let fieldDelimiter = this.getFieldDelimiter();
|
|
1544
|
+
let index = this.getIndexHeader();
|
|
1545
|
+
let headers;
|
|
1546
|
+
if (this.isSupportQuotedField) {
|
|
1547
|
+
headers = this.split(lines[index]);
|
|
1548
|
+
} else {
|
|
1549
|
+
headers = lines[index].split(fieldDelimiter);
|
|
1550
|
+
}
|
|
1551
|
+
while (!stringUtils.hasContent(headers) && index <= lines.length) {
|
|
1552
|
+
index = index + 1;
|
|
1553
|
+
headers = lines[index].split(fieldDelimiter);
|
|
1554
|
+
}
|
|
1555
|
+
let jsonResult = [];
|
|
1556
|
+
for (let i2 = index + 1;i2 < lines.length; i2++) {
|
|
1557
|
+
let currentLine;
|
|
1558
|
+
if (this.isSupportQuotedField) {
|
|
1559
|
+
currentLine = this.split(lines[i2]);
|
|
1560
|
+
} else {
|
|
1561
|
+
currentLine = lines[i2].split(fieldDelimiter);
|
|
1562
|
+
}
|
|
1563
|
+
if (stringUtils.hasContent(currentLine)) {
|
|
1564
|
+
jsonResult.push(this.buildJsonResult(headers, currentLine));
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
return jsonResult;
|
|
1568
|
+
}
|
|
1569
|
+
getFieldDelimiter() {
|
|
1570
|
+
if (this.delimiter) {
|
|
1571
|
+
return this.delimiter;
|
|
1572
|
+
}
|
|
1573
|
+
return defaultFieldDelimiter;
|
|
1574
|
+
}
|
|
1575
|
+
getIndexHeader() {
|
|
1576
|
+
if (this.indexHeaderValue !== null && !isNaN(this.indexHeaderValue)) {
|
|
1577
|
+
return this.indexHeaderValue;
|
|
1578
|
+
}
|
|
1579
|
+
return 0;
|
|
1580
|
+
}
|
|
1581
|
+
buildJsonResult(headers, currentLine) {
|
|
1582
|
+
let jsonObject = {};
|
|
1583
|
+
for (let j = 0;j < headers.length; j++) {
|
|
1584
|
+
let propertyName = stringUtils.trimPropertyName(this.isTrimHeaderFieldWhiteSpace, headers[j]);
|
|
1585
|
+
let value = currentLine[j];
|
|
1586
|
+
if (this.isParseSubArray(value)) {
|
|
1587
|
+
value = this.buildJsonSubArray(value);
|
|
1588
|
+
}
|
|
1589
|
+
if (this.printValueFormatByType && !Array.isArray(value)) {
|
|
1590
|
+
value = stringUtils.getValueFormatByType(currentLine[j]);
|
|
1591
|
+
}
|
|
1592
|
+
jsonObject[propertyName] = value;
|
|
1593
|
+
}
|
|
1594
|
+
return jsonObject;
|
|
1595
|
+
}
|
|
1596
|
+
buildJsonSubArray(value) {
|
|
1597
|
+
let extractedValues = value.substring(value.indexOf(this.parseSubArrayDelimiter) + 1, value.lastIndexOf(this.parseSubArrayDelimiter));
|
|
1598
|
+
extractedValues.trim();
|
|
1599
|
+
value = extractedValues.split(this.parseSubArraySeparator);
|
|
1600
|
+
if (this.printValueFormatByType) {
|
|
1601
|
+
for (let i2 = 0;i2 < value.length; i2++) {
|
|
1602
|
+
value[i2] = stringUtils.getValueFormatByType(value[i2]);
|
|
1603
|
+
}
|
|
1604
|
+
}
|
|
1605
|
+
return value;
|
|
1606
|
+
}
|
|
1607
|
+
isParseSubArray(value) {
|
|
1608
|
+
if (this.parseSubArrayDelimiter) {
|
|
1609
|
+
if (value && (value.indexOf(this.parseSubArrayDelimiter) === 0 && value.lastIndexOf(this.parseSubArrayDelimiter) === value.length - 1)) {
|
|
1610
|
+
return true;
|
|
1611
|
+
}
|
|
1612
|
+
}
|
|
1613
|
+
return false;
|
|
1614
|
+
}
|
|
1615
|
+
validateInputConfig() {
|
|
1616
|
+
if (this.isSupportQuotedField) {
|
|
1617
|
+
if (this.getFieldDelimiter() === '"') {
|
|
1618
|
+
throw new Error('When SupportQuotedFields is enabled you cannot defined the field delimiter as quote -> ["]');
|
|
1619
|
+
}
|
|
1620
|
+
if (this.parseSubArraySeparator === '"') {
|
|
1621
|
+
throw new Error('When SupportQuotedFields is enabled you cannot defined the field parseSubArraySeparator as quote -> ["]');
|
|
1622
|
+
}
|
|
1623
|
+
if (this.parseSubArrayDelimiter === '"') {
|
|
1624
|
+
throw new Error('When SupportQuotedFields is enabled you cannot defined the field parseSubArrayDelimiter as quote -> ["]');
|
|
1625
|
+
}
|
|
1626
|
+
}
|
|
1627
|
+
}
|
|
1628
|
+
hasQuotes(line) {
|
|
1629
|
+
return line.includes('"');
|
|
1630
|
+
}
|
|
1631
|
+
split(line) {
|
|
1632
|
+
if (line.length == 0) {
|
|
1633
|
+
return [];
|
|
1634
|
+
}
|
|
1635
|
+
let delim = this.getFieldDelimiter();
|
|
1636
|
+
let subSplits = [""];
|
|
1637
|
+
if (this.hasQuotes(line)) {
|
|
1638
|
+
let chars = line.split("");
|
|
1639
|
+
let subIndex = 0;
|
|
1640
|
+
let startQuote = false;
|
|
1641
|
+
let isDouble = false;
|
|
1642
|
+
chars.forEach((c, i2, arr) => {
|
|
1643
|
+
if (isDouble) {
|
|
1644
|
+
subSplits[subIndex] += c;
|
|
1645
|
+
isDouble = false;
|
|
1646
|
+
return;
|
|
1647
|
+
}
|
|
1648
|
+
if (c != '"' && c != delim) {
|
|
1649
|
+
subSplits[subIndex] += c;
|
|
1650
|
+
} else if (c == delim && startQuote) {
|
|
1651
|
+
subSplits[subIndex] += c;
|
|
1652
|
+
} else if (c == delim) {
|
|
1653
|
+
subIndex++;
|
|
1654
|
+
subSplits[subIndex] = "";
|
|
1655
|
+
return;
|
|
1656
|
+
} else {
|
|
1657
|
+
if (arr[i2 + 1] === '"') {
|
|
1658
|
+
isDouble = true;
|
|
1659
|
+
} else {
|
|
1660
|
+
if (!startQuote) {
|
|
1661
|
+
startQuote = true;
|
|
1662
|
+
} else {
|
|
1663
|
+
startQuote = false;
|
|
1664
|
+
}
|
|
1665
|
+
}
|
|
1666
|
+
}
|
|
1667
|
+
});
|
|
1668
|
+
if (startQuote) {
|
|
1669
|
+
throw new Error("Row contains mismatched quotes!");
|
|
1670
|
+
}
|
|
1671
|
+
return subSplits;
|
|
1672
|
+
} else {
|
|
1673
|
+
return line.split(delim);
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
module.exports = new CsvToJson;
|
|
1678
|
+
});
|
|
1679
|
+
|
|
1680
|
+
// node_modules/convert-csv-to-json/src/csvToJsonAsync.js
|
|
1681
|
+
var require_csvToJsonAsync = __commonJS((exports, module) => {
|
|
1682
|
+
var fileUtils = require_fileUtils();
|
|
1683
|
+
var csvToJson = require_csvToJson();
|
|
1684
|
+
|
|
1685
|
+
class CsvToJsonAsync {
|
|
1686
|
+
constructor() {
|
|
1687
|
+
this.csvToJson = csvToJson;
|
|
1688
|
+
}
|
|
1689
|
+
formatValueByType(active) {
|
|
1690
|
+
this.csvToJson.formatValueByType(active);
|
|
1691
|
+
return this;
|
|
1692
|
+
}
|
|
1693
|
+
supportQuotedField(active) {
|
|
1694
|
+
this.csvToJson.supportQuotedField(active);
|
|
1695
|
+
return this;
|
|
1696
|
+
}
|
|
1697
|
+
fieldDelimiter(delimiter) {
|
|
1698
|
+
this.csvToJson.fieldDelimiter(delimiter);
|
|
1699
|
+
return this;
|
|
1700
|
+
}
|
|
1701
|
+
trimHeaderFieldWhiteSpace(active) {
|
|
1702
|
+
this.csvToJson.trimHeaderFieldWhiteSpace(active);
|
|
1703
|
+
return this;
|
|
1704
|
+
}
|
|
1705
|
+
indexHeader(indexHeader) {
|
|
1706
|
+
this.csvToJson.indexHeader(indexHeader);
|
|
1707
|
+
return this;
|
|
1708
|
+
}
|
|
1709
|
+
parseSubArray(delimiter = "*", separator = ",") {
|
|
1710
|
+
this.csvToJson.parseSubArray(delimiter, separator);
|
|
1711
|
+
return this;
|
|
1712
|
+
}
|
|
1713
|
+
encoding(encoding) {
|
|
1714
|
+
this.csvToJson.encoding = encoding;
|
|
1715
|
+
return this;
|
|
1716
|
+
}
|
|
1717
|
+
async generateJsonFileFromCsv(fileInputName, fileOutputName) {
|
|
1718
|
+
const jsonStringified = await this.getJsonFromCsvStringified(fileInputName);
|
|
1719
|
+
await fileUtils.writeFileAsync(jsonStringified, fileOutputName);
|
|
1720
|
+
}
|
|
1721
|
+
async getJsonFromCsvStringified(fileInputName) {
|
|
1722
|
+
const json3 = await this.getJsonFromCsvAsync(fileInputName);
|
|
1723
|
+
return JSON.stringify(json3, undefined, 1);
|
|
1724
|
+
}
|
|
1725
|
+
async getJsonFromCsvAsync(inputFileNameOrCsv, options = {}) {
|
|
1726
|
+
if (inputFileNameOrCsv === null || inputFileNameOrCsv === undefined) {
|
|
1727
|
+
throw new Error("inputFileNameOrCsv is not defined!!!");
|
|
1728
|
+
}
|
|
1729
|
+
if (options.raw) {
|
|
1730
|
+
if (inputFileNameOrCsv === "") {
|
|
1731
|
+
return [];
|
|
1732
|
+
}
|
|
1733
|
+
return this.csvToJson.csvToJson(inputFileNameOrCsv);
|
|
1734
|
+
}
|
|
1735
|
+
const parsedCsv = await fileUtils.readFileAsync(inputFileNameOrCsv, this.csvToJson.encoding || "utf8");
|
|
1736
|
+
return this.csvToJson.csvToJson(parsedCsv);
|
|
1737
|
+
}
|
|
1738
|
+
csvStringToJsonAsync(csvString, options = { raw: true }) {
|
|
1739
|
+
return this.getJsonFromCsvAsync(csvString, options);
|
|
1740
|
+
}
|
|
1741
|
+
async csvStringToJsonStringifiedAsync(csvString) {
|
|
1742
|
+
const json3 = await this.csvStringToJsonAsync(csvString);
|
|
1743
|
+
return JSON.stringify(json3, undefined, 1);
|
|
1744
|
+
}
|
|
1745
|
+
}
|
|
1746
|
+
module.exports = new CsvToJsonAsync;
|
|
1747
|
+
});
|
|
1748
|
+
|
|
1749
|
+
// node_modules/convert-csv-to-json/src/browserApi.js
|
|
1750
|
+
var require_browserApi = __commonJS((exports, module) => {
|
|
1751
|
+
var csvToJson = require_csvToJson();
|
|
1752
|
+
|
|
1753
|
+
class BrowserApi {
|
|
1754
|
+
constructor() {
|
|
1755
|
+
this.csvToJson = csvToJson;
|
|
1756
|
+
}
|
|
1757
|
+
formatValueByType(active = true) {
|
|
1758
|
+
this.csvToJson.formatValueByType(active);
|
|
1759
|
+
return this;
|
|
1760
|
+
}
|
|
1761
|
+
supportQuotedField(active = false) {
|
|
1762
|
+
this.csvToJson.supportQuotedField(active);
|
|
1763
|
+
return this;
|
|
1764
|
+
}
|
|
1765
|
+
fieldDelimiter(delimiter) {
|
|
1766
|
+
this.csvToJson.fieldDelimiter(delimiter);
|
|
1767
|
+
return this;
|
|
1768
|
+
}
|
|
1769
|
+
trimHeaderFieldWhiteSpace(active = false) {
|
|
1770
|
+
this.csvToJson.trimHeaderFieldWhiteSpace(active);
|
|
1771
|
+
return this;
|
|
1772
|
+
}
|
|
1773
|
+
indexHeader(index) {
|
|
1774
|
+
this.csvToJson.indexHeader(index);
|
|
1775
|
+
return this;
|
|
1776
|
+
}
|
|
1777
|
+
parseSubArray(delimiter = "*", separator = ",") {
|
|
1778
|
+
this.csvToJson.parseSubArray(delimiter, separator);
|
|
1779
|
+
return this;
|
|
1780
|
+
}
|
|
1781
|
+
csvStringToJson(csvString) {
|
|
1782
|
+
if (csvString === undefined || csvString === null) {
|
|
1783
|
+
throw new Error("csvString is not defined!!!");
|
|
1784
|
+
}
|
|
1785
|
+
return this.csvToJson.csvToJson(csvString);
|
|
1786
|
+
}
|
|
1787
|
+
csvStringToJsonStringified(csvString) {
|
|
1788
|
+
if (csvString === undefined || csvString === null) {
|
|
1789
|
+
throw new Error("csvString is not defined!!!");
|
|
1790
|
+
}
|
|
1791
|
+
return this.csvToJson.csvStringToJsonStringified(csvString);
|
|
1792
|
+
}
|
|
1793
|
+
csvStringToJsonAsync(csvString) {
|
|
1794
|
+
return Promise.resolve(this.csvStringToJson(csvString));
|
|
1795
|
+
}
|
|
1796
|
+
csvStringToJsonStringifiedAsync(csvString) {
|
|
1797
|
+
return Promise.resolve(this.csvStringToJsonStringified(csvString));
|
|
1798
|
+
}
|
|
1799
|
+
parseFile(file2, options = {}) {
|
|
1800
|
+
if (!file2) {
|
|
1801
|
+
return Promise.reject(new Error("file is not defined!!!"));
|
|
1802
|
+
}
|
|
1803
|
+
return new Promise((resolve2, reject) => {
|
|
1804
|
+
if (typeof FileReader === "undefined") {
|
|
1805
|
+
reject(new Error("FileReader is not available in this environment"));
|
|
1806
|
+
return;
|
|
1807
|
+
}
|
|
1808
|
+
const reader = new FileReader;
|
|
1809
|
+
reader.onerror = () => reject(reader.error || new Error("Failed to read file"));
|
|
1810
|
+
reader.onload = () => {
|
|
1811
|
+
try {
|
|
1812
|
+
const text = reader.result;
|
|
1813
|
+
const result = this.csvToJson.csvToJson(String(text));
|
|
1814
|
+
resolve2(result);
|
|
1815
|
+
} catch (err) {
|
|
1816
|
+
reject(err);
|
|
1817
|
+
}
|
|
1818
|
+
};
|
|
1819
|
+
if (options.encoding) {
|
|
1820
|
+
reader.readAsText(file2, options.encoding);
|
|
1821
|
+
} else {
|
|
1822
|
+
reader.readAsText(file2);
|
|
1823
|
+
}
|
|
1824
|
+
});
|
|
1825
|
+
}
|
|
1826
|
+
}
|
|
1827
|
+
module.exports = new BrowserApi;
|
|
1828
|
+
});
|
|
1829
|
+
|
|
1830
|
+
// node_modules/convert-csv-to-json/index.js
|
|
1831
|
+
var require_convert_csv_to_json = __commonJS((exports) => {
|
|
1832
|
+
var csvToJson = require_csvToJson();
|
|
1833
|
+
var encodingOps = {
|
|
1834
|
+
utf8: "utf8",
|
|
1835
|
+
ucs2: "ucs2",
|
|
1836
|
+
utf16le: "utf16le",
|
|
1837
|
+
latin1: "latin1",
|
|
1838
|
+
ascii: "ascii",
|
|
1839
|
+
base64: "base64",
|
|
1840
|
+
hex: "hex"
|
|
1841
|
+
};
|
|
1842
|
+
exports.formatValueByType = function(active = true) {
|
|
1843
|
+
csvToJson.formatValueByType(active);
|
|
1844
|
+
return this;
|
|
1845
|
+
};
|
|
1846
|
+
exports.supportQuotedField = function(active = false) {
|
|
1847
|
+
csvToJson.supportQuotedField(active);
|
|
1848
|
+
return this;
|
|
1849
|
+
};
|
|
1850
|
+
exports.fieldDelimiter = function(delimiter) {
|
|
1851
|
+
csvToJson.fieldDelimiter(delimiter);
|
|
1852
|
+
return this;
|
|
1853
|
+
};
|
|
1854
|
+
exports.trimHeaderFieldWhiteSpace = function(active = false) {
|
|
1855
|
+
csvToJson.trimHeaderFieldWhiteSpace(active);
|
|
1856
|
+
return this;
|
|
1857
|
+
};
|
|
1858
|
+
exports.indexHeader = function(index) {
|
|
1859
|
+
csvToJson.indexHeader(index);
|
|
1860
|
+
return this;
|
|
1861
|
+
};
|
|
1862
|
+
exports.parseSubArray = function(delimiter, separator) {
|
|
1863
|
+
csvToJson.parseSubArray(delimiter, separator);
|
|
1864
|
+
return this;
|
|
1865
|
+
};
|
|
1866
|
+
exports.customEncoding = function(encoding) {
|
|
1867
|
+
csvToJson.encoding = encoding;
|
|
1868
|
+
return this;
|
|
1869
|
+
};
|
|
1870
|
+
exports.utf8Encoding = function utf8Encoding() {
|
|
1871
|
+
csvToJson.encoding = encodingOps.utf8;
|
|
1872
|
+
return this;
|
|
1873
|
+
};
|
|
1874
|
+
exports.ucs2Encoding = function() {
|
|
1875
|
+
csvToJson.encoding = encodingOps.ucs2;
|
|
1876
|
+
return this;
|
|
1877
|
+
};
|
|
1878
|
+
exports.utf16leEncoding = function() {
|
|
1879
|
+
csvToJson.encoding = encodingOps.utf16le;
|
|
1880
|
+
return this;
|
|
1881
|
+
};
|
|
1882
|
+
exports.latin1Encoding = function() {
|
|
1883
|
+
csvToJson.encoding = encodingOps.latin1;
|
|
1884
|
+
return this;
|
|
1885
|
+
};
|
|
1886
|
+
exports.asciiEncoding = function() {
|
|
1887
|
+
csvToJson.encoding = encodingOps.ascii;
|
|
1888
|
+
return this;
|
|
1889
|
+
};
|
|
1890
|
+
exports.base64Encoding = function() {
|
|
1891
|
+
this.csvToJson = encodingOps.base64;
|
|
1892
|
+
return this;
|
|
1893
|
+
};
|
|
1894
|
+
exports.hexEncoding = function() {
|
|
1895
|
+
this.csvToJson = encodingOps.hex;
|
|
1896
|
+
return this;
|
|
1897
|
+
};
|
|
1898
|
+
exports.generateJsonFileFromCsv = function(inputFileName, outputFileName) {
|
|
1899
|
+
if (!inputFileName) {
|
|
1900
|
+
throw new Error("inputFileName is not defined!!!");
|
|
1901
|
+
}
|
|
1902
|
+
if (!outputFileName) {
|
|
1903
|
+
throw new Error("outputFileName is not defined!!!");
|
|
1904
|
+
}
|
|
1905
|
+
csvToJson.generateJsonFileFromCsv(inputFileName, outputFileName);
|
|
1906
|
+
};
|
|
1907
|
+
exports.getJsonFromCsv = function(inputFileName) {
|
|
1908
|
+
if (!inputFileName) {
|
|
1909
|
+
throw new Error("inputFileName is not defined!!!");
|
|
1910
|
+
}
|
|
1911
|
+
return csvToJson.getJsonFromCsv(inputFileName);
|
|
1912
|
+
};
|
|
1913
|
+
var csvToJsonAsync = require_csvToJsonAsync();
|
|
1914
|
+
Object.assign(exports, {
|
|
1915
|
+
getJsonFromCsvAsync: function(input, options) {
|
|
1916
|
+
return csvToJsonAsync.getJsonFromCsvAsync(input, options);
|
|
1917
|
+
},
|
|
1918
|
+
csvStringToJsonAsync: function(input, options) {
|
|
1919
|
+
return csvToJsonAsync.csvStringToJsonAsync(input, options);
|
|
1920
|
+
},
|
|
1921
|
+
csvStringToJsonStringifiedAsync: function(input) {
|
|
1922
|
+
return csvToJsonAsync.csvStringToJsonStringifiedAsync(input);
|
|
1923
|
+
},
|
|
1924
|
+
generateJsonFileFromCsvAsync: function(input, output) {
|
|
1925
|
+
return csvToJsonAsync.generateJsonFileFromCsv(input, output);
|
|
1926
|
+
}
|
|
1927
|
+
});
|
|
1928
|
+
exports.csvStringToJson = function(csvString) {
|
|
1929
|
+
return csvToJson.csvStringToJson(csvString);
|
|
1930
|
+
};
|
|
1931
|
+
exports.csvStringToJsonStringified = function(csvString) {
|
|
1932
|
+
if (csvString === undefined || csvString === null) {
|
|
1933
|
+
throw new Error("csvString is not defined!!!");
|
|
1934
|
+
}
|
|
1935
|
+
return csvToJson.csvStringToJsonStringified(csvString);
|
|
1936
|
+
};
|
|
1937
|
+
exports.jsonToCsv = function(inputFileName, outputFileName) {
|
|
1938
|
+
csvToJson.generateJsonFileFromCsv(inputFileName, outputFileName);
|
|
1939
|
+
};
|
|
1940
|
+
exports.browser = require_browserApi();
|
|
1941
|
+
});
|
|
1942
|
+
|
|
1343
1943
|
// src/index.ts
|
|
1344
1944
|
import { dirname as dirname4, join as join7 } from "path";
|
|
1345
1945
|
import { fileURLToPath } from "url";
|
|
@@ -16976,7 +17576,7 @@ var classify_statements_default = tool({
|
|
|
16976
17576
|
}
|
|
16977
17577
|
});
|
|
16978
17578
|
// src/tools/import-statements.ts
|
|
16979
|
-
import * as
|
|
17579
|
+
import * as fs7 from "fs";
|
|
16980
17580
|
import * as path6 from "path";
|
|
16981
17581
|
|
|
16982
17582
|
// src/utils/rulesMatcher.ts
|
|
@@ -17090,10 +17690,222 @@ function countTransactions(hledgerOutput) {
|
|
|
17090
17690
|
return count;
|
|
17091
17691
|
}
|
|
17092
17692
|
|
|
17693
|
+
// src/utils/rulesParser.ts
|
|
17694
|
+
function parseSkipRows(rulesContent) {
|
|
17695
|
+
const match = rulesContent.match(/^skip\s+(\d+)/m);
|
|
17696
|
+
return match ? parseInt(match[1], 10) : 0;
|
|
17697
|
+
}
|
|
17698
|
+
function parseSeparator(rulesContent) {
|
|
17699
|
+
const match = rulesContent.match(/^separator\s+(.)/m);
|
|
17700
|
+
return match ? match[1] : ",";
|
|
17701
|
+
}
|
|
17702
|
+
function parseFieldNames(rulesContent) {
|
|
17703
|
+
const match = rulesContent.match(/^fields\s+(.+)$/m);
|
|
17704
|
+
if (!match) {
|
|
17705
|
+
return [];
|
|
17706
|
+
}
|
|
17707
|
+
return match[1].split(",").map((field) => field.trim());
|
|
17708
|
+
}
|
|
17709
|
+
function parseDateFormat(rulesContent) {
|
|
17710
|
+
const match = rulesContent.match(/^date-format\s+(.+)$/m);
|
|
17711
|
+
return match ? match[1].trim() : "%Y-%m-%d";
|
|
17712
|
+
}
|
|
17713
|
+
function parseDateField(rulesContent, fieldNames) {
|
|
17714
|
+
const match = rulesContent.match(/^date\s+%(\w+|\d+)/m);
|
|
17715
|
+
if (!match) {
|
|
17716
|
+
return fieldNames[0] || "date";
|
|
17717
|
+
}
|
|
17718
|
+
const value = match[1];
|
|
17719
|
+
if (/^\d+$/.test(value)) {
|
|
17720
|
+
const index = parseInt(value, 10) - 1;
|
|
17721
|
+
return fieldNames[index] || value;
|
|
17722
|
+
}
|
|
17723
|
+
return value;
|
|
17724
|
+
}
|
|
17725
|
+
function parseAmountFields(rulesContent, fieldNames) {
|
|
17726
|
+
const result = {};
|
|
17727
|
+
const simpleMatch = rulesContent.match(/^amount\s+(-?)%(\w+|\d+)/m);
|
|
17728
|
+
if (simpleMatch) {
|
|
17729
|
+
const fieldRef = simpleMatch[2];
|
|
17730
|
+
if (/^\d+$/.test(fieldRef)) {
|
|
17731
|
+
const index = parseInt(fieldRef, 10) - 1;
|
|
17732
|
+
result.single = fieldNames[index] || fieldRef;
|
|
17733
|
+
} else {
|
|
17734
|
+
result.single = fieldRef;
|
|
17735
|
+
}
|
|
17736
|
+
}
|
|
17737
|
+
const debitMatch = rulesContent.match(/if\s+%(\w+)\s+\.\s*\n\s*amount\s+-?%\1/m);
|
|
17738
|
+
if (debitMatch) {
|
|
17739
|
+
result.debit = debitMatch[1];
|
|
17740
|
+
}
|
|
17741
|
+
const creditMatch = rulesContent.match(/if\s+%(\w+)\s+\.\s*\n\s*amount\s+%\1(?!\w)/m);
|
|
17742
|
+
if (creditMatch && creditMatch[1] !== result.debit) {
|
|
17743
|
+
result.credit = creditMatch[1];
|
|
17744
|
+
}
|
|
17745
|
+
if (result.debit || result.credit) {
|
|
17746
|
+
delete result.single;
|
|
17747
|
+
}
|
|
17748
|
+
if (!result.single && !result.debit && !result.credit) {
|
|
17749
|
+
result.single = "amount";
|
|
17750
|
+
}
|
|
17751
|
+
return result;
|
|
17752
|
+
}
|
|
17753
|
+
function parseRulesFile(rulesContent) {
|
|
17754
|
+
const fieldNames = parseFieldNames(rulesContent);
|
|
17755
|
+
return {
|
|
17756
|
+
skipRows: parseSkipRows(rulesContent),
|
|
17757
|
+
separator: parseSeparator(rulesContent),
|
|
17758
|
+
fieldNames,
|
|
17759
|
+
dateFormat: parseDateFormat(rulesContent),
|
|
17760
|
+
dateField: parseDateField(rulesContent, fieldNames),
|
|
17761
|
+
amountFields: parseAmountFields(rulesContent, fieldNames)
|
|
17762
|
+
};
|
|
17763
|
+
}
|
|
17764
|
+
|
|
17765
|
+
// src/utils/csvParser.ts
|
|
17766
|
+
var import_convert_csv_to_json = __toESM(require_convert_csv_to_json(), 1);
|
|
17767
|
+
import * as fs6 from "fs";
|
|
17768
|
+
function parseCsvFile(csvPath, config2) {
|
|
17769
|
+
const csvContent = fs6.readFileSync(csvPath, "utf-8");
|
|
17770
|
+
const lines = csvContent.split(`
|
|
17771
|
+
`);
|
|
17772
|
+
const headerIndex = config2.skipRows;
|
|
17773
|
+
if (headerIndex >= lines.length) {
|
|
17774
|
+
return [];
|
|
17775
|
+
}
|
|
17776
|
+
const headerLine = lines[headerIndex];
|
|
17777
|
+
const dataLines = lines.slice(headerIndex + 1).filter((line) => line.trim() !== "");
|
|
17778
|
+
const csvWithHeader = [headerLine, ...dataLines].join(`
|
|
17779
|
+
`);
|
|
17780
|
+
const rawRows = import_convert_csv_to_json.default.indexHeader(0).fieldDelimiter(config2.separator).supportQuotedField(true).csvStringToJson(csvWithHeader);
|
|
17781
|
+
const fieldNames = config2.fieldNames.length > 0 ? config2.fieldNames : Object.keys(rawRows[0] || {});
|
|
17782
|
+
const mappedRows = [];
|
|
17783
|
+
for (const parsedRow of rawRows) {
|
|
17784
|
+
const row = {};
|
|
17785
|
+
const values = Object.values(parsedRow);
|
|
17786
|
+
for (let i2 = 0;i2 < fieldNames.length && i2 < values.length; i2++) {
|
|
17787
|
+
row[fieldNames[i2]] = values[i2];
|
|
17788
|
+
}
|
|
17789
|
+
mappedRows.push(row);
|
|
17790
|
+
}
|
|
17791
|
+
return mappedRows;
|
|
17792
|
+
}
|
|
17793
|
+
function parseAmountValue(amountStr) {
|
|
17794
|
+
const cleaned = amountStr.replace(/[A-Z]{3}\s*/g, "").trim();
|
|
17795
|
+
return parseFloat(cleaned) || 0;
|
|
17796
|
+
}
|
|
17797
|
+
function getRowAmount(row, amountFields) {
|
|
17798
|
+
if (amountFields.single) {
|
|
17799
|
+
return parseAmountValue(row[amountFields.single] || "0");
|
|
17800
|
+
}
|
|
17801
|
+
const debitValue = amountFields.debit ? parseAmountValue(row[amountFields.debit] || "0") : 0;
|
|
17802
|
+
const creditValue = amountFields.credit ? parseAmountValue(row[amountFields.credit] || "0") : 0;
|
|
17803
|
+
if (debitValue !== 0) {
|
|
17804
|
+
return -Math.abs(debitValue);
|
|
17805
|
+
}
|
|
17806
|
+
if (creditValue !== 0) {
|
|
17807
|
+
return Math.abs(creditValue);
|
|
17808
|
+
}
|
|
17809
|
+
return 0;
|
|
17810
|
+
}
|
|
17811
|
+
function parseDateToIso(dateStr, dateFormat) {
|
|
17812
|
+
if (!dateStr)
|
|
17813
|
+
return "";
|
|
17814
|
+
if (dateFormat === "%Y-%m-%d" || dateFormat === "%F") {
|
|
17815
|
+
return dateStr.trim();
|
|
17816
|
+
}
|
|
17817
|
+
if (dateFormat === "%d.%m.%Y") {
|
|
17818
|
+
const parts = dateStr.split(".");
|
|
17819
|
+
if (parts.length === 3) {
|
|
17820
|
+
return `${parts[2]}-${parts[1].padStart(2, "0")}-${parts[0].padStart(2, "0")}`;
|
|
17821
|
+
}
|
|
17822
|
+
}
|
|
17823
|
+
if (dateFormat === "%m/%d/%Y") {
|
|
17824
|
+
const parts = dateStr.split("/");
|
|
17825
|
+
if (parts.length === 3) {
|
|
17826
|
+
return `${parts[2]}-${parts[0].padStart(2, "0")}-${parts[1].padStart(2, "0")}`;
|
|
17827
|
+
}
|
|
17828
|
+
}
|
|
17829
|
+
if (dateFormat === "%d/%m/%Y") {
|
|
17830
|
+
const parts = dateStr.split("/");
|
|
17831
|
+
if (parts.length === 3) {
|
|
17832
|
+
return `${parts[2]}-${parts[1].padStart(2, "0")}-${parts[0].padStart(2, "0")}`;
|
|
17833
|
+
}
|
|
17834
|
+
}
|
|
17835
|
+
return dateStr.trim();
|
|
17836
|
+
}
|
|
17837
|
+
function looksLikeTransactionId(fieldName, value) {
|
|
17838
|
+
if (!value || value.trim() === "")
|
|
17839
|
+
return false;
|
|
17840
|
+
const idFieldPatterns = [
|
|
17841
|
+
/transaction/i,
|
|
17842
|
+
/trans_?no/i,
|
|
17843
|
+
/trans_?id/i,
|
|
17844
|
+
/reference/i,
|
|
17845
|
+
/ref_?no/i,
|
|
17846
|
+
/ref_?id/i,
|
|
17847
|
+
/booking_?id/i,
|
|
17848
|
+
/payment_?id/i,
|
|
17849
|
+
/order_?id/i
|
|
17850
|
+
];
|
|
17851
|
+
const nameMatches = idFieldPatterns.some((pattern) => pattern.test(fieldName));
|
|
17852
|
+
if (!nameMatches)
|
|
17853
|
+
return false;
|
|
17854
|
+
const trimmedValue = value.trim();
|
|
17855
|
+
const looksLikeId = /^[A-Za-z0-9_-]+$/.test(trimmedValue) && trimmedValue.length >= 3;
|
|
17856
|
+
return looksLikeId;
|
|
17857
|
+
}
|
|
17858
|
+
function findTransactionId(row) {
|
|
17859
|
+
for (const [field, value] of Object.entries(row)) {
|
|
17860
|
+
if (looksLikeTransactionId(field, value)) {
|
|
17861
|
+
return { field, value: value.trim() };
|
|
17862
|
+
}
|
|
17863
|
+
}
|
|
17864
|
+
return null;
|
|
17865
|
+
}
|
|
17866
|
+
function findMatchingCsvRow(posting, csvRows, config2) {
|
|
17867
|
+
const postingAmount = parseAmountValue(posting.amount);
|
|
17868
|
+
let candidates = csvRows.filter((row) => {
|
|
17869
|
+
const rowDate = parseDateToIso(row[config2.dateField] || "", config2.dateFormat);
|
|
17870
|
+
const rowAmount = getRowAmount(row, config2.amountFields);
|
|
17871
|
+
if (rowDate !== posting.date)
|
|
17872
|
+
return false;
|
|
17873
|
+
if (Math.abs(rowAmount - postingAmount) > 0.001)
|
|
17874
|
+
return false;
|
|
17875
|
+
return true;
|
|
17876
|
+
});
|
|
17877
|
+
if (candidates.length === 1) {
|
|
17878
|
+
return candidates[0];
|
|
17879
|
+
}
|
|
17880
|
+
if (candidates.length === 0) {
|
|
17881
|
+
throw new Error(`Bug: Could not find CSV row for posting: ${posting.date} ${posting.description} ${posting.amount}. ` + `This indicates a mismatch between hledger output and CSV parsing.`);
|
|
17882
|
+
}
|
|
17883
|
+
for (const candidate of candidates) {
|
|
17884
|
+
const txId = findTransactionId(candidate);
|
|
17885
|
+
if (txId) {
|
|
17886
|
+
const withSameTxId = candidates.filter((row) => row[txId.field] === txId.value);
|
|
17887
|
+
if (withSameTxId.length === 1) {
|
|
17888
|
+
return withSameTxId[0];
|
|
17889
|
+
}
|
|
17890
|
+
}
|
|
17891
|
+
}
|
|
17892
|
+
const descriptionLower = posting.description.toLowerCase();
|
|
17893
|
+
const descMatches = candidates.filter((row) => {
|
|
17894
|
+
return Object.values(row).some((value) => value && value.toLowerCase().includes(descriptionLower));
|
|
17895
|
+
});
|
|
17896
|
+
if (descMatches.length === 1) {
|
|
17897
|
+
return descMatches[0];
|
|
17898
|
+
}
|
|
17899
|
+
if (descMatches.length > 1) {
|
|
17900
|
+
return descMatches[0];
|
|
17901
|
+
}
|
|
17902
|
+
return candidates[0];
|
|
17903
|
+
}
|
|
17904
|
+
|
|
17093
17905
|
// src/tools/import-statements.ts
|
|
17094
17906
|
function findPendingCsvFiles(pendingDir, provider, currency) {
|
|
17095
17907
|
const csvFiles = [];
|
|
17096
|
-
if (!
|
|
17908
|
+
if (!fs7.existsSync(pendingDir)) {
|
|
17097
17909
|
return csvFiles;
|
|
17098
17910
|
}
|
|
17099
17911
|
let searchPath = pendingDir;
|
|
@@ -17103,11 +17915,11 @@ function findPendingCsvFiles(pendingDir, provider, currency) {
|
|
|
17103
17915
|
searchPath = path6.join(searchPath, currency);
|
|
17104
17916
|
}
|
|
17105
17917
|
}
|
|
17106
|
-
if (!
|
|
17918
|
+
if (!fs7.existsSync(searchPath)) {
|
|
17107
17919
|
return csvFiles;
|
|
17108
17920
|
}
|
|
17109
17921
|
function scanDirectory(directory) {
|
|
17110
|
-
const entries =
|
|
17922
|
+
const entries = fs7.readdirSync(directory, { withFileTypes: true });
|
|
17111
17923
|
for (const entry of entries) {
|
|
17112
17924
|
const fullPath = path6.join(directory, entry.name);
|
|
17113
17925
|
if (entry.isDirectory()) {
|
|
@@ -17194,6 +18006,25 @@ async function importStatementsCore(directory, agent, options, configLoader = lo
|
|
|
17194
18006
|
const unknownPostings = parseUnknownPostings(result.stdout);
|
|
17195
18007
|
const transactionCount = countTransactions(result.stdout);
|
|
17196
18008
|
const matchedCount = transactionCount - unknownPostings.length;
|
|
18009
|
+
if (unknownPostings.length > 0) {
|
|
18010
|
+
try {
|
|
18011
|
+
const rulesContent = fs7.readFileSync(rulesFile, "utf-8");
|
|
18012
|
+
const rulesConfig = parseRulesFile(rulesContent);
|
|
18013
|
+
const csvRows = parseCsvFile(csvFile, rulesConfig);
|
|
18014
|
+
for (const posting of unknownPostings) {
|
|
18015
|
+
const csvRow = findMatchingCsvRow({
|
|
18016
|
+
date: posting.date,
|
|
18017
|
+
description: posting.description,
|
|
18018
|
+
amount: posting.amount
|
|
18019
|
+
}, csvRows, rulesConfig);
|
|
18020
|
+
posting.csvRow = csvRow;
|
|
18021
|
+
}
|
|
18022
|
+
} catch {
|
|
18023
|
+
for (const posting of unknownPostings) {
|
|
18024
|
+
posting.csvRow = undefined;
|
|
18025
|
+
}
|
|
18026
|
+
}
|
|
18027
|
+
}
|
|
17197
18028
|
totalTransactions += transactionCount;
|
|
17198
18029
|
totalMatched += matchedCount;
|
|
17199
18030
|
totalUnknown += unknownPostings.length;
|
|
@@ -17272,10 +18103,10 @@ async function importStatementsCore(directory, agent, options, configLoader = lo
|
|
|
17272
18103
|
const relativePath = path6.relative(pendingDir, csvFile);
|
|
17273
18104
|
const destPath = path6.join(doneDir, relativePath);
|
|
17274
18105
|
const destDir = path6.dirname(destPath);
|
|
17275
|
-
if (!
|
|
17276
|
-
|
|
18106
|
+
if (!fs7.existsSync(destDir)) {
|
|
18107
|
+
fs7.mkdirSync(destDir, { recursive: true });
|
|
17277
18108
|
}
|
|
17278
|
-
|
|
18109
|
+
fs7.renameSync(csvFile, destPath);
|
|
17279
18110
|
}
|
|
17280
18111
|
return JSON.stringify({
|
|
17281
18112
|
success: true,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fuzzle/opencode-accountant",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.11",
|
|
4
4
|
"description": "An OpenCode accounting agent, specialized in double-entry-bookkepping with hledger",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "ali bengali",
|
|
@@ -28,6 +28,7 @@
|
|
|
28
28
|
],
|
|
29
29
|
"dependencies": {
|
|
30
30
|
"@opencode-ai/plugin": "latest",
|
|
31
|
+
"convert-csv-to-json": "^3.20.0",
|
|
31
32
|
"js-yaml": "^4.1.0",
|
|
32
33
|
"papaparse": "^5.5.3"
|
|
33
34
|
},
|