@fuzzle/opencode-accountant 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +469 -88
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1342,18 +1342,18 @@ var require_papaparse = __commonJS((exports, module) => {
|
|
|
1342
1342
|
|
|
1343
1343
|
// node_modules/convert-csv-to-json/src/util/fileUtils.js
|
|
1344
1344
|
var require_fileUtils = __commonJS((exports, module) => {
|
|
1345
|
-
var
|
|
1345
|
+
var fs9 = __require("fs");
|
|
1346
1346
|
|
|
1347
1347
|
class FileUtils {
|
|
1348
1348
|
readFile(fileInputName, encoding) {
|
|
1349
|
-
return
|
|
1349
|
+
return fs9.readFileSync(fileInputName, encoding).toString();
|
|
1350
1350
|
}
|
|
1351
1351
|
readFileAsync(fileInputName, encoding = "utf8") {
|
|
1352
|
-
if (
|
|
1353
|
-
return
|
|
1352
|
+
if (fs9.promises && typeof fs9.promises.readFile === "function") {
|
|
1353
|
+
return fs9.promises.readFile(fileInputName, encoding).then((buf) => buf.toString());
|
|
1354
1354
|
}
|
|
1355
1355
|
return new Promise((resolve2, reject) => {
|
|
1356
|
-
|
|
1356
|
+
fs9.readFile(fileInputName, encoding, (err, data) => {
|
|
1357
1357
|
if (err) {
|
|
1358
1358
|
reject(err);
|
|
1359
1359
|
return;
|
|
@@ -1363,7 +1363,7 @@ var require_fileUtils = __commonJS((exports, module) => {
|
|
|
1363
1363
|
});
|
|
1364
1364
|
}
|
|
1365
1365
|
writeFile(json3, fileOutputName) {
|
|
1366
|
-
|
|
1366
|
+
fs9.writeFile(fileOutputName, json3, function(err) {
|
|
1367
1367
|
if (err) {
|
|
1368
1368
|
throw err;
|
|
1369
1369
|
} else {
|
|
@@ -1372,11 +1372,11 @@ var require_fileUtils = __commonJS((exports, module) => {
|
|
|
1372
1372
|
});
|
|
1373
1373
|
}
|
|
1374
1374
|
writeFileAsync(json3, fileOutputName) {
|
|
1375
|
-
if (
|
|
1376
|
-
return
|
|
1375
|
+
if (fs9.promises && typeof fs9.promises.writeFile === "function") {
|
|
1376
|
+
return fs9.promises.writeFile(fileOutputName, json3);
|
|
1377
1377
|
}
|
|
1378
1378
|
return new Promise((resolve2, reject) => {
|
|
1379
|
-
|
|
1379
|
+
fs9.writeFile(fileOutputName, json3, (err) => {
|
|
1380
1380
|
if (err)
|
|
1381
1381
|
return reject(err);
|
|
1382
1382
|
resolve2();
|
|
@@ -1941,7 +1941,7 @@ var require_convert_csv_to_json = __commonJS((exports) => {
|
|
|
1941
1941
|
});
|
|
1942
1942
|
|
|
1943
1943
|
// src/index.ts
|
|
1944
|
-
import { dirname as
|
|
1944
|
+
import { dirname as dirname6, join as join11 } from "path";
|
|
1945
1945
|
import { fileURLToPath } from "url";
|
|
1946
1946
|
|
|
1947
1947
|
// src/utils/agentLoader.ts
|
|
@@ -17273,8 +17273,8 @@ var fetch_currency_prices_default = tool({
|
|
|
17273
17273
|
}
|
|
17274
17274
|
});
|
|
17275
17275
|
// src/tools/classify-statements.ts
|
|
17276
|
-
import * as
|
|
17277
|
-
import * as
|
|
17276
|
+
import * as fs6 from "fs";
|
|
17277
|
+
import * as path7 from "path";
|
|
17278
17278
|
|
|
17279
17279
|
// src/utils/importConfig.ts
|
|
17280
17280
|
import * as fs3 from "fs";
|
|
@@ -17543,6 +17543,63 @@ function detectProvider(filename, content, config2) {
|
|
|
17543
17543
|
|
|
17544
17544
|
// src/utils/worktreeManager.ts
|
|
17545
17545
|
import { spawnSync } from "child_process";
|
|
17546
|
+
|
|
17547
|
+
// node_modules/uuid/dist-node/stringify.js
|
|
17548
|
+
var byteToHex = [];
|
|
17549
|
+
for (let i2 = 0;i2 < 256; ++i2) {
|
|
17550
|
+
byteToHex.push((i2 + 256).toString(16).slice(1));
|
|
17551
|
+
}
|
|
17552
|
+
function unsafeStringify(arr, offset = 0) {
|
|
17553
|
+
return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase();
|
|
17554
|
+
}
|
|
17555
|
+
|
|
17556
|
+
// node_modules/uuid/dist-node/rng.js
|
|
17557
|
+
import { randomFillSync } from "crypto";
|
|
17558
|
+
var rnds8Pool = new Uint8Array(256);
|
|
17559
|
+
var poolPtr = rnds8Pool.length;
|
|
17560
|
+
function rng() {
|
|
17561
|
+
if (poolPtr > rnds8Pool.length - 16) {
|
|
17562
|
+
randomFillSync(rnds8Pool);
|
|
17563
|
+
poolPtr = 0;
|
|
17564
|
+
}
|
|
17565
|
+
return rnds8Pool.slice(poolPtr, poolPtr += 16);
|
|
17566
|
+
}
|
|
17567
|
+
|
|
17568
|
+
// node_modules/uuid/dist-node/native.js
|
|
17569
|
+
import { randomUUID } from "crypto";
|
|
17570
|
+
var native_default = { randomUUID };
|
|
17571
|
+
|
|
17572
|
+
// node_modules/uuid/dist-node/v4.js
|
|
17573
|
+
function _v4(options, buf, offset) {
|
|
17574
|
+
options = options || {};
|
|
17575
|
+
const rnds = options.random ?? options.rng?.() ?? rng();
|
|
17576
|
+
if (rnds.length < 16) {
|
|
17577
|
+
throw new Error("Random bytes length must be >= 16");
|
|
17578
|
+
}
|
|
17579
|
+
rnds[6] = rnds[6] & 15 | 64;
|
|
17580
|
+
rnds[8] = rnds[8] & 63 | 128;
|
|
17581
|
+
if (buf) {
|
|
17582
|
+
offset = offset || 0;
|
|
17583
|
+
if (offset < 0 || offset + 16 > buf.length) {
|
|
17584
|
+
throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`);
|
|
17585
|
+
}
|
|
17586
|
+
for (let i2 = 0;i2 < 16; ++i2) {
|
|
17587
|
+
buf[offset + i2] = rnds[i2];
|
|
17588
|
+
}
|
|
17589
|
+
return buf;
|
|
17590
|
+
}
|
|
17591
|
+
return unsafeStringify(rnds);
|
|
17592
|
+
}
|
|
17593
|
+
function v4(options, buf, offset) {
|
|
17594
|
+
if (native_default.randomUUID && !buf && !options) {
|
|
17595
|
+
return native_default.randomUUID();
|
|
17596
|
+
}
|
|
17597
|
+
return _v4(options, buf, offset);
|
|
17598
|
+
}
|
|
17599
|
+
var v4_default = v4;
|
|
17600
|
+
// src/utils/worktreeManager.ts
|
|
17601
|
+
import * as fs4 from "fs";
|
|
17602
|
+
import * as path5 from "path";
|
|
17546
17603
|
function execGit(args, cwd) {
|
|
17547
17604
|
const result = spawnSync("git", args, { cwd, encoding: "utf-8" });
|
|
17548
17605
|
if (result.status !== 0) {
|
|
@@ -17550,6 +17607,70 @@ function execGit(args, cwd) {
|
|
|
17550
17607
|
}
|
|
17551
17608
|
return (result.stdout || "").trim();
|
|
17552
17609
|
}
|
|
17610
|
+
function execGitSafe(args, cwd) {
|
|
17611
|
+
const result = spawnSync("git", args, { cwd, encoding: "utf-8" });
|
|
17612
|
+
if (result.status !== 0) {
|
|
17613
|
+
return { success: false, output: result.stderr || result.stdout || `git ${args[0]} failed` };
|
|
17614
|
+
}
|
|
17615
|
+
return { success: true, output: (result.stdout || "").trim() };
|
|
17616
|
+
}
|
|
17617
|
+
function createImportWorktree(mainRepoPath, options = {}) {
|
|
17618
|
+
const baseDir = options.baseDir ?? "/tmp";
|
|
17619
|
+
const uuid3 = v4_default();
|
|
17620
|
+
const branch = `import-${uuid3}`;
|
|
17621
|
+
const worktreePath = path5.join(baseDir, `import-worktree-${uuid3}`);
|
|
17622
|
+
try {
|
|
17623
|
+
execGit(["rev-parse", "--git-dir"], mainRepoPath);
|
|
17624
|
+
} catch {
|
|
17625
|
+
throw new Error(`Not a git repository: ${mainRepoPath}`);
|
|
17626
|
+
}
|
|
17627
|
+
execGit(["branch", branch], mainRepoPath);
|
|
17628
|
+
try {
|
|
17629
|
+
execGit(["worktree", "add", worktreePath, branch], mainRepoPath);
|
|
17630
|
+
} catch (error45) {
|
|
17631
|
+
execGitSafe(["branch", "-D", branch], mainRepoPath);
|
|
17632
|
+
throw error45;
|
|
17633
|
+
}
|
|
17634
|
+
return {
|
|
17635
|
+
path: worktreePath,
|
|
17636
|
+
branch,
|
|
17637
|
+
uuid: uuid3,
|
|
17638
|
+
mainRepoPath
|
|
17639
|
+
};
|
|
17640
|
+
}
|
|
17641
|
+
function mergeWorktree(context, commitMessage) {
|
|
17642
|
+
const status = execGit(["status", "--porcelain"], context.path);
|
|
17643
|
+
if (status.length > 0) {
|
|
17644
|
+
execGit(["add", "-A"], context.path);
|
|
17645
|
+
execGit(["commit", "-m", commitMessage], context.path);
|
|
17646
|
+
}
|
|
17647
|
+
const currentBranch = execGit(["rev-parse", "--abbrev-ref", "HEAD"], context.mainRepoPath);
|
|
17648
|
+
execGit(["merge", "--no-ff", context.branch, "-m", commitMessage], context.mainRepoPath);
|
|
17649
|
+
if (currentBranch !== "main" && currentBranch !== "master") {
|
|
17650
|
+
execGit(["checkout", currentBranch], context.mainRepoPath);
|
|
17651
|
+
}
|
|
17652
|
+
}
|
|
17653
|
+
function removeWorktree(context, force = false) {
|
|
17654
|
+
const forceFlag = force ? "--force" : "";
|
|
17655
|
+
const args = ["worktree", "remove", context.path];
|
|
17656
|
+
if (forceFlag) {
|
|
17657
|
+
args.push(forceFlag);
|
|
17658
|
+
}
|
|
17659
|
+
const removeResult = execGitSafe(args, context.mainRepoPath);
|
|
17660
|
+
if (!removeResult.success) {
|
|
17661
|
+
if (!fs4.existsSync(context.path)) {} else {
|
|
17662
|
+
return { success: false, error: `Failed to remove worktree: ${removeResult.output}` };
|
|
17663
|
+
}
|
|
17664
|
+
}
|
|
17665
|
+
execGitSafe(["worktree", "prune"], context.mainRepoPath);
|
|
17666
|
+
const branchResult = execGitSafe(["branch", "-D", context.branch], context.mainRepoPath);
|
|
17667
|
+
if (!branchResult.success) {
|
|
17668
|
+
if (!branchResult.output.includes("not found")) {
|
|
17669
|
+
return { success: false, error: `Failed to delete branch: ${branchResult.output}` };
|
|
17670
|
+
}
|
|
17671
|
+
}
|
|
17672
|
+
return { success: true };
|
|
17673
|
+
}
|
|
17553
17674
|
function isInWorktree(directory) {
|
|
17554
17675
|
try {
|
|
17555
17676
|
const gitDir = execGit(["rev-parse", "--git-dir"], directory);
|
|
@@ -17558,22 +17679,34 @@ function isInWorktree(directory) {
|
|
|
17558
17679
|
return false;
|
|
17559
17680
|
}
|
|
17560
17681
|
}
|
|
17682
|
+
async function withWorktree(directory, operation) {
|
|
17683
|
+
let createdWorktree = null;
|
|
17684
|
+
try {
|
|
17685
|
+
createdWorktree = createImportWorktree(directory);
|
|
17686
|
+
const result = await operation(createdWorktree);
|
|
17687
|
+
return result;
|
|
17688
|
+
} finally {
|
|
17689
|
+
if (createdWorktree) {
|
|
17690
|
+
removeWorktree(createdWorktree, true);
|
|
17691
|
+
}
|
|
17692
|
+
}
|
|
17693
|
+
}
|
|
17561
17694
|
|
|
17562
17695
|
// src/utils/fileUtils.ts
|
|
17563
|
-
import * as
|
|
17564
|
-
import * as
|
|
17696
|
+
import * as fs5 from "fs";
|
|
17697
|
+
import * as path6 from "path";
|
|
17565
17698
|
function findCSVFiles(importsDir) {
|
|
17566
|
-
if (!
|
|
17699
|
+
if (!fs5.existsSync(importsDir)) {
|
|
17567
17700
|
return [];
|
|
17568
17701
|
}
|
|
17569
|
-
return
|
|
17570
|
-
const fullPath =
|
|
17571
|
-
return
|
|
17702
|
+
return fs5.readdirSync(importsDir).filter((file2) => file2.toLowerCase().endsWith(".csv")).filter((file2) => {
|
|
17703
|
+
const fullPath = path6.join(importsDir, file2);
|
|
17704
|
+
return fs5.statSync(fullPath).isFile();
|
|
17572
17705
|
});
|
|
17573
17706
|
}
|
|
17574
17707
|
function ensureDirectory(dirPath) {
|
|
17575
|
-
if (!
|
|
17576
|
-
|
|
17708
|
+
if (!fs5.existsSync(dirPath)) {
|
|
17709
|
+
fs5.mkdirSync(dirPath, { recursive: true });
|
|
17577
17710
|
}
|
|
17578
17711
|
}
|
|
17579
17712
|
|
|
@@ -17614,20 +17747,20 @@ function planMoves(csvFiles, importsDir, pendingDir, unrecognizedDir, config2) {
|
|
|
17614
17747
|
const plannedMoves = [];
|
|
17615
17748
|
const collisions = [];
|
|
17616
17749
|
for (const filename of csvFiles) {
|
|
17617
|
-
const sourcePath =
|
|
17618
|
-
const content =
|
|
17750
|
+
const sourcePath = path7.join(importsDir, filename);
|
|
17751
|
+
const content = fs6.readFileSync(sourcePath, "utf-8");
|
|
17619
17752
|
const detection = detectProvider(filename, content, config2);
|
|
17620
17753
|
let targetPath;
|
|
17621
17754
|
let targetFilename;
|
|
17622
17755
|
if (detection) {
|
|
17623
17756
|
targetFilename = detection.outputFilename || filename;
|
|
17624
|
-
const targetDir =
|
|
17625
|
-
targetPath =
|
|
17757
|
+
const targetDir = path7.join(pendingDir, detection.provider, detection.currency);
|
|
17758
|
+
targetPath = path7.join(targetDir, targetFilename);
|
|
17626
17759
|
} else {
|
|
17627
17760
|
targetFilename = filename;
|
|
17628
|
-
targetPath =
|
|
17761
|
+
targetPath = path7.join(unrecognizedDir, filename);
|
|
17629
17762
|
}
|
|
17630
|
-
if (
|
|
17763
|
+
if (fs6.existsSync(targetPath)) {
|
|
17631
17764
|
collisions.push({
|
|
17632
17765
|
filename,
|
|
17633
17766
|
existingPath: targetPath
|
|
@@ -17648,22 +17781,22 @@ function executeMoves(plannedMoves, config2, unrecognizedDir) {
|
|
|
17648
17781
|
const unrecognized = [];
|
|
17649
17782
|
for (const move of plannedMoves) {
|
|
17650
17783
|
if (move.detection) {
|
|
17651
|
-
const targetDir =
|
|
17784
|
+
const targetDir = path7.dirname(move.targetPath);
|
|
17652
17785
|
ensureDirectory(targetDir);
|
|
17653
|
-
|
|
17786
|
+
fs6.renameSync(move.sourcePath, move.targetPath);
|
|
17654
17787
|
classified.push({
|
|
17655
17788
|
filename: move.targetFilename,
|
|
17656
17789
|
originalFilename: move.detection.outputFilename ? move.filename : undefined,
|
|
17657
17790
|
provider: move.detection.provider,
|
|
17658
17791
|
currency: move.detection.currency,
|
|
17659
|
-
targetPath:
|
|
17792
|
+
targetPath: path7.join(config2.paths.pending, move.detection.provider, move.detection.currency, move.targetFilename)
|
|
17660
17793
|
});
|
|
17661
17794
|
} else {
|
|
17662
17795
|
ensureDirectory(unrecognizedDir);
|
|
17663
|
-
|
|
17796
|
+
fs6.renameSync(move.sourcePath, move.targetPath);
|
|
17664
17797
|
unrecognized.push({
|
|
17665
17798
|
filename: move.filename,
|
|
17666
|
-
targetPath:
|
|
17799
|
+
targetPath: path7.join(config2.paths.unrecognized, move.filename)
|
|
17667
17800
|
});
|
|
17668
17801
|
}
|
|
17669
17802
|
}
|
|
@@ -17687,9 +17820,9 @@ async function classifyStatements(directory, agent, configLoader = loadImportCon
|
|
|
17687
17820
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
17688
17821
|
return buildErrorResult2(errorMessage);
|
|
17689
17822
|
}
|
|
17690
|
-
const importsDir =
|
|
17691
|
-
const pendingDir =
|
|
17692
|
-
const unrecognizedDir =
|
|
17823
|
+
const importsDir = path7.join(directory, config2.paths.import);
|
|
17824
|
+
const pendingDir = path7.join(directory, config2.paths.pending);
|
|
17825
|
+
const unrecognizedDir = path7.join(directory, config2.paths.unrecognized);
|
|
17693
17826
|
const csvFiles = findCSVFiles(importsDir);
|
|
17694
17827
|
if (csvFiles.length === 0) {
|
|
17695
17828
|
return buildSuccessResult2([], [], `No CSV files found in ${config2.paths.import}`);
|
|
@@ -17710,12 +17843,12 @@ var classify_statements_default = tool({
|
|
|
17710
17843
|
}
|
|
17711
17844
|
});
|
|
17712
17845
|
// src/tools/import-statements.ts
|
|
17713
|
-
import * as
|
|
17714
|
-
import * as
|
|
17846
|
+
import * as fs10 from "fs";
|
|
17847
|
+
import * as path9 from "path";
|
|
17715
17848
|
|
|
17716
17849
|
// src/utils/rulesMatcher.ts
|
|
17717
|
-
import * as
|
|
17718
|
-
import * as
|
|
17850
|
+
import * as fs7 from "fs";
|
|
17851
|
+
import * as path8 from "path";
|
|
17719
17852
|
function parseSourceDirective(content) {
|
|
17720
17853
|
const match = content.match(/^source\s+([^\n#]+)/m);
|
|
17721
17854
|
if (!match) {
|
|
@@ -17724,28 +17857,28 @@ function parseSourceDirective(content) {
|
|
|
17724
17857
|
return match[1].trim();
|
|
17725
17858
|
}
|
|
17726
17859
|
function resolveSourcePath(sourcePath, rulesFilePath) {
|
|
17727
|
-
if (
|
|
17860
|
+
if (path8.isAbsolute(sourcePath)) {
|
|
17728
17861
|
return sourcePath;
|
|
17729
17862
|
}
|
|
17730
|
-
const rulesDir =
|
|
17731
|
-
return
|
|
17863
|
+
const rulesDir = path8.dirname(rulesFilePath);
|
|
17864
|
+
return path8.resolve(rulesDir, sourcePath);
|
|
17732
17865
|
}
|
|
17733
17866
|
function loadRulesMapping(rulesDir) {
|
|
17734
17867
|
const mapping = {};
|
|
17735
|
-
if (!
|
|
17868
|
+
if (!fs7.existsSync(rulesDir)) {
|
|
17736
17869
|
return mapping;
|
|
17737
17870
|
}
|
|
17738
|
-
const files =
|
|
17871
|
+
const files = fs7.readdirSync(rulesDir);
|
|
17739
17872
|
for (const file2 of files) {
|
|
17740
17873
|
if (!file2.endsWith(".rules")) {
|
|
17741
17874
|
continue;
|
|
17742
17875
|
}
|
|
17743
|
-
const rulesFilePath =
|
|
17744
|
-
const stat =
|
|
17876
|
+
const rulesFilePath = path8.join(rulesDir, file2);
|
|
17877
|
+
const stat = fs7.statSync(rulesFilePath);
|
|
17745
17878
|
if (!stat.isFile()) {
|
|
17746
17879
|
continue;
|
|
17747
17880
|
}
|
|
17748
|
-
const content =
|
|
17881
|
+
const content = fs7.readFileSync(rulesFilePath, "utf-8");
|
|
17749
17882
|
const sourcePath = parseSourceDirective(content);
|
|
17750
17883
|
if (!sourcePath) {
|
|
17751
17884
|
continue;
|
|
@@ -17759,9 +17892,9 @@ function findRulesForCsv(csvPath, mapping) {
|
|
|
17759
17892
|
if (mapping[csvPath]) {
|
|
17760
17893
|
return mapping[csvPath];
|
|
17761
17894
|
}
|
|
17762
|
-
const normalizedCsvPath =
|
|
17895
|
+
const normalizedCsvPath = path8.normalize(csvPath);
|
|
17763
17896
|
for (const [mappedCsv, rulesFile] of Object.entries(mapping)) {
|
|
17764
|
-
if (
|
|
17897
|
+
if (path8.normalize(mappedCsv) === normalizedCsvPath) {
|
|
17765
17898
|
return rulesFile;
|
|
17766
17899
|
}
|
|
17767
17900
|
}
|
|
@@ -17887,7 +18020,7 @@ async function getAccountBalance(mainJournalPath, account, asOfDate, executor =
|
|
|
17887
18020
|
}
|
|
17888
18021
|
|
|
17889
18022
|
// src/utils/rulesParser.ts
|
|
17890
|
-
import * as
|
|
18023
|
+
import * as fs8 from "fs";
|
|
17891
18024
|
function parseSkipRows(rulesContent) {
|
|
17892
18025
|
const match = rulesContent.match(/^skip\s+(\d+)/m);
|
|
17893
18026
|
return match ? parseInt(match[1], 10) : 0;
|
|
@@ -17953,7 +18086,7 @@ function parseAccount1(rulesContent) {
|
|
|
17953
18086
|
}
|
|
17954
18087
|
function getAccountFromRulesFile(rulesFilePath) {
|
|
17955
18088
|
try {
|
|
17956
|
-
const content =
|
|
18089
|
+
const content = fs8.readFileSync(rulesFilePath, "utf-8");
|
|
17957
18090
|
return parseAccount1(content);
|
|
17958
18091
|
} catch {
|
|
17959
18092
|
return null;
|
|
@@ -17973,7 +18106,7 @@ function parseRulesFile(rulesContent) {
|
|
|
17973
18106
|
|
|
17974
18107
|
// src/utils/csvParser.ts
|
|
17975
18108
|
var import_convert_csv_to_json = __toESM(require_convert_csv_to_json(), 1);
|
|
17976
|
-
import * as
|
|
18109
|
+
import * as fs9 from "fs";
|
|
17977
18110
|
|
|
17978
18111
|
// src/utils/balanceUtils.ts
|
|
17979
18112
|
function parseAmountValue(amountStr) {
|
|
@@ -18022,7 +18155,7 @@ function balancesMatch(balance1, balance2) {
|
|
|
18022
18155
|
|
|
18023
18156
|
// src/utils/csvParser.ts
|
|
18024
18157
|
function parseCsvFile(csvPath, config2) {
|
|
18025
|
-
const csvContent =
|
|
18158
|
+
const csvContent = fs9.readFileSync(csvPath, "utf-8");
|
|
18026
18159
|
const lines = csvContent.split(`
|
|
18027
18160
|
`);
|
|
18028
18161
|
const headerIndex = config2.skipRows;
|
|
@@ -18182,8 +18315,8 @@ function buildSuccessResult3(files, summary, message) {
|
|
|
18182
18315
|
async function executeImports(fileResults, directory, pendingDir, doneDir, hledgerExecutor) {
|
|
18183
18316
|
const importedFiles = [];
|
|
18184
18317
|
for (const fileResult of fileResults) {
|
|
18185
|
-
const csvFile =
|
|
18186
|
-
const rulesFile = fileResult.rulesFile ?
|
|
18318
|
+
const csvFile = path9.join(directory, fileResult.csv);
|
|
18319
|
+
const rulesFile = fileResult.rulesFile ? path9.join(directory, fileResult.rulesFile) : null;
|
|
18187
18320
|
if (!rulesFile)
|
|
18188
18321
|
continue;
|
|
18189
18322
|
const year = fileResult.transactionYear;
|
|
@@ -18219,7 +18352,7 @@ async function executeImports(fileResults, directory, pendingDir, doneDir, hledg
|
|
|
18219
18352
|
}
|
|
18220
18353
|
importedFiles.push(csvFile);
|
|
18221
18354
|
}
|
|
18222
|
-
const mainJournalPath =
|
|
18355
|
+
const mainJournalPath = path9.join(directory, ".hledger.journal");
|
|
18223
18356
|
const validationResult = await validateLedger(mainJournalPath, hledgerExecutor);
|
|
18224
18357
|
if (!validationResult.valid) {
|
|
18225
18358
|
return {
|
|
@@ -18229,13 +18362,13 @@ async function executeImports(fileResults, directory, pendingDir, doneDir, hledg
|
|
|
18229
18362
|
};
|
|
18230
18363
|
}
|
|
18231
18364
|
for (const csvFile of importedFiles) {
|
|
18232
|
-
const relativePath =
|
|
18233
|
-
const destPath =
|
|
18234
|
-
const destDir =
|
|
18235
|
-
if (!
|
|
18236
|
-
|
|
18365
|
+
const relativePath = path9.relative(pendingDir, csvFile);
|
|
18366
|
+
const destPath = path9.join(doneDir, relativePath);
|
|
18367
|
+
const destDir = path9.dirname(destPath);
|
|
18368
|
+
if (!fs10.existsSync(destDir)) {
|
|
18369
|
+
fs10.mkdirSync(destDir, { recursive: true });
|
|
18237
18370
|
}
|
|
18238
|
-
|
|
18371
|
+
fs10.renameSync(csvFile, destPath);
|
|
18239
18372
|
}
|
|
18240
18373
|
return {
|
|
18241
18374
|
success: true,
|
|
@@ -18246,7 +18379,7 @@ async function processCsvFile(csvFile, rulesMapping, directory, hledgerExecutor)
|
|
|
18246
18379
|
const rulesFile = findRulesForCsv(csvFile, rulesMapping);
|
|
18247
18380
|
if (!rulesFile) {
|
|
18248
18381
|
return {
|
|
18249
|
-
csv:
|
|
18382
|
+
csv: path9.relative(directory, csvFile),
|
|
18250
18383
|
rulesFile: null,
|
|
18251
18384
|
totalTransactions: 0,
|
|
18252
18385
|
matchedTransactions: 0,
|
|
@@ -18257,8 +18390,8 @@ async function processCsvFile(csvFile, rulesMapping, directory, hledgerExecutor)
|
|
|
18257
18390
|
const result = await hledgerExecutor(["print", "-f", csvFile, "--rules-file", rulesFile]);
|
|
18258
18391
|
if (result.exitCode !== 0) {
|
|
18259
18392
|
return {
|
|
18260
|
-
csv:
|
|
18261
|
-
rulesFile:
|
|
18393
|
+
csv: path9.relative(directory, csvFile),
|
|
18394
|
+
rulesFile: path9.relative(directory, rulesFile),
|
|
18262
18395
|
totalTransactions: 0,
|
|
18263
18396
|
matchedTransactions: 0,
|
|
18264
18397
|
unknownPostings: [],
|
|
@@ -18272,8 +18405,8 @@ async function processCsvFile(csvFile, rulesMapping, directory, hledgerExecutor)
|
|
|
18272
18405
|
if (years.size > 1) {
|
|
18273
18406
|
const yearList = Array.from(years).sort().join(", ");
|
|
18274
18407
|
return {
|
|
18275
|
-
csv:
|
|
18276
|
-
rulesFile:
|
|
18408
|
+
csv: path9.relative(directory, csvFile),
|
|
18409
|
+
rulesFile: path9.relative(directory, rulesFile),
|
|
18277
18410
|
totalTransactions: transactionCount,
|
|
18278
18411
|
matchedTransactions: matchedCount,
|
|
18279
18412
|
unknownPostings: [],
|
|
@@ -18283,7 +18416,7 @@ async function processCsvFile(csvFile, rulesMapping, directory, hledgerExecutor)
|
|
|
18283
18416
|
const transactionYear = years.size === 1 ? Array.from(years)[0] : undefined;
|
|
18284
18417
|
if (unknownPostings.length > 0) {
|
|
18285
18418
|
try {
|
|
18286
|
-
const rulesContent =
|
|
18419
|
+
const rulesContent = fs10.readFileSync(rulesFile, "utf-8");
|
|
18287
18420
|
const rulesConfig = parseRulesFile(rulesContent);
|
|
18288
18421
|
const csvRows = parseCsvFile(csvFile, rulesConfig);
|
|
18289
18422
|
for (const posting of unknownPostings) {
|
|
@@ -18300,8 +18433,8 @@ async function processCsvFile(csvFile, rulesMapping, directory, hledgerExecutor)
|
|
|
18300
18433
|
}
|
|
18301
18434
|
}
|
|
18302
18435
|
return {
|
|
18303
|
-
csv:
|
|
18304
|
-
rulesFile:
|
|
18436
|
+
csv: path9.relative(directory, csvFile),
|
|
18437
|
+
rulesFile: path9.relative(directory, rulesFile),
|
|
18305
18438
|
totalTransactions: transactionCount,
|
|
18306
18439
|
matchedTransactions: matchedCount,
|
|
18307
18440
|
unknownPostings,
|
|
@@ -18323,9 +18456,9 @@ async function importStatements(directory, agent, options, configLoader = loadIm
|
|
|
18323
18456
|
const errorMessage = `Failed to load configuration: ${error45 instanceof Error ? error45.message : String(error45)}`;
|
|
18324
18457
|
return buildErrorResult3(errorMessage, 'Ensure config/import/providers.yaml exists with required paths including "rules"');
|
|
18325
18458
|
}
|
|
18326
|
-
const pendingDir =
|
|
18327
|
-
const rulesDir =
|
|
18328
|
-
const doneDir =
|
|
18459
|
+
const pendingDir = path9.join(directory, config2.paths.pending);
|
|
18460
|
+
const rulesDir = path9.join(directory, config2.paths.rules);
|
|
18461
|
+
const doneDir = path9.join(directory, config2.paths.done);
|
|
18329
18462
|
const rulesMapping = loadRulesMapping(rulesDir);
|
|
18330
18463
|
const csvFiles = findCsvFiles(pendingDir, options.provider, options.currency);
|
|
18331
18464
|
if (csvFiles.length === 0) {
|
|
@@ -18450,8 +18583,8 @@ This tool processes CSV files in the pending import directory and uses hledger's
|
|
|
18450
18583
|
}
|
|
18451
18584
|
});
|
|
18452
18585
|
// src/tools/reconcile-statement.ts
|
|
18453
|
-
import * as
|
|
18454
|
-
import * as
|
|
18586
|
+
import * as fs11 from "fs";
|
|
18587
|
+
import * as path10 from "path";
|
|
18455
18588
|
function buildErrorResult4(params) {
|
|
18456
18589
|
return JSON.stringify({
|
|
18457
18590
|
success: false,
|
|
@@ -18499,14 +18632,14 @@ function findCsvToReconcile(doneDir, options) {
|
|
|
18499
18632
|
};
|
|
18500
18633
|
}
|
|
18501
18634
|
const csvFile = csvFiles[csvFiles.length - 1];
|
|
18502
|
-
const relativePath =
|
|
18635
|
+
const relativePath = path10.relative(path10.dirname(path10.dirname(doneDir)), csvFile);
|
|
18503
18636
|
return { csvFile, relativePath };
|
|
18504
18637
|
}
|
|
18505
18638
|
function determineClosingBalance(csvFile, config2, options, relativeCsvPath) {
|
|
18506
18639
|
let metadata;
|
|
18507
18640
|
try {
|
|
18508
|
-
const content =
|
|
18509
|
-
const filename =
|
|
18641
|
+
const content = fs11.readFileSync(csvFile, "utf-8");
|
|
18642
|
+
const filename = path10.basename(csvFile);
|
|
18510
18643
|
const detectionResult = detectProvider(filename, content, config2);
|
|
18511
18644
|
metadata = detectionResult?.metadata;
|
|
18512
18645
|
} catch {
|
|
@@ -18514,9 +18647,10 @@ function determineClosingBalance(csvFile, config2, options, relativeCsvPath) {
|
|
|
18514
18647
|
}
|
|
18515
18648
|
let closingBalance = options.closingBalance;
|
|
18516
18649
|
if (!closingBalance && metadata?.closing_balance) {
|
|
18517
|
-
|
|
18518
|
-
|
|
18519
|
-
|
|
18650
|
+
const { closing_balance, currency } = metadata;
|
|
18651
|
+
closingBalance = closing_balance;
|
|
18652
|
+
if (currency && !closingBalance.includes(currency)) {
|
|
18653
|
+
closingBalance = `${currency} ${closingBalance}`;
|
|
18520
18654
|
}
|
|
18521
18655
|
}
|
|
18522
18656
|
if (!closingBalance) {
|
|
@@ -18555,7 +18689,7 @@ function determineAccount(csvFile, rulesDir, options, relativeCsvPath, metadata)
|
|
|
18555
18689
|
}
|
|
18556
18690
|
return { account };
|
|
18557
18691
|
}
|
|
18558
|
-
async function
|
|
18692
|
+
async function reconcileStatement(directory, agent, options, configLoader = loadImportConfig, hledgerExecutor = defaultHledgerExecutor, worktreeChecker = isInWorktree) {
|
|
18559
18693
|
const restrictionError = checkAccountantAgent(agent, "reconcile statement");
|
|
18560
18694
|
if (restrictionError) {
|
|
18561
18695
|
return restrictionError;
|
|
@@ -18569,9 +18703,9 @@ async function reconcileStatementCore(directory, agent, options, configLoader =
|
|
|
18569
18703
|
return configResult.error;
|
|
18570
18704
|
}
|
|
18571
18705
|
const { config: config2 } = configResult;
|
|
18572
|
-
const doneDir =
|
|
18573
|
-
const rulesDir =
|
|
18574
|
-
const mainJournalPath =
|
|
18706
|
+
const doneDir = path10.join(directory, config2.paths.done);
|
|
18707
|
+
const rulesDir = path10.join(directory, config2.paths.rules);
|
|
18708
|
+
const mainJournalPath = path10.join(directory, ".hledger.journal");
|
|
18575
18709
|
const csvResult = findCsvToReconcile(doneDir, options);
|
|
18576
18710
|
if ("error" in csvResult) {
|
|
18577
18711
|
return csvResult.error;
|
|
@@ -18686,7 +18820,7 @@ It must be run inside an import worktree (use import-pipeline for the full workf
|
|
|
18686
18820
|
},
|
|
18687
18821
|
async execute(params, context) {
|
|
18688
18822
|
const { directory, agent } = context;
|
|
18689
|
-
return
|
|
18823
|
+
return reconcileStatement(directory, agent, {
|
|
18690
18824
|
provider: params.provider,
|
|
18691
18825
|
currency: params.currency,
|
|
18692
18826
|
closingBalance: params.closingBalance,
|
|
@@ -18694,9 +18828,255 @@ It must be run inside an import worktree (use import-pipeline for the full workf
|
|
|
18694
18828
|
});
|
|
18695
18829
|
}
|
|
18696
18830
|
});
|
|
18831
|
+
// src/tools/import-pipeline.ts
|
|
18832
|
+
class NoTransactionsError extends Error {
|
|
18833
|
+
constructor() {
|
|
18834
|
+
super("No transactions to import");
|
|
18835
|
+
this.name = "NoTransactionsError";
|
|
18836
|
+
}
|
|
18837
|
+
}
|
|
18838
|
+
function buildStepResult(success2, message, details) {
|
|
18839
|
+
const result = { success: success2, message };
|
|
18840
|
+
if (details !== undefined) {
|
|
18841
|
+
result.details = details;
|
|
18842
|
+
}
|
|
18843
|
+
return result;
|
|
18844
|
+
}
|
|
18845
|
+
function buildSuccessResult5(result, summary) {
|
|
18846
|
+
result.success = true;
|
|
18847
|
+
result.summary = summary;
|
|
18848
|
+
return JSON.stringify(result);
|
|
18849
|
+
}
|
|
18850
|
+
function buildErrorResult5(result, error45, hint) {
|
|
18851
|
+
result.success = false;
|
|
18852
|
+
result.error = error45;
|
|
18853
|
+
if (hint) {
|
|
18854
|
+
result.hint = hint;
|
|
18855
|
+
}
|
|
18856
|
+
return JSON.stringify(result);
|
|
18857
|
+
}
|
|
18858
|
+
function buildCommitMessage(provider, currency, fromDate, untilDate, transactionCount) {
|
|
18859
|
+
const providerStr = provider?.toUpperCase() || "statements";
|
|
18860
|
+
const currencyStr = currency?.toUpperCase();
|
|
18861
|
+
const dateRange = fromDate && untilDate ? ` ${fromDate} to ${untilDate}` : "";
|
|
18862
|
+
const txStr = transactionCount > 0 ? ` (${transactionCount} transactions)` : "";
|
|
18863
|
+
const parts = ["Import:", providerStr];
|
|
18864
|
+
if (currencyStr) {
|
|
18865
|
+
parts.push(currencyStr);
|
|
18866
|
+
}
|
|
18867
|
+
return `${parts.join(" ")}${dateRange}${txStr}`;
|
|
18868
|
+
}
|
|
18869
|
+
async function executeClassifyStep(context, worktree) {
|
|
18870
|
+
if (context.options.skipClassify) {
|
|
18871
|
+
context.result.steps.classify = buildStepResult(true, "Classification skipped (skipClassify: true)");
|
|
18872
|
+
return;
|
|
18873
|
+
}
|
|
18874
|
+
const inWorktree = () => true;
|
|
18875
|
+
const classifyResult = await classifyStatements(worktree.path, context.agent, context.configLoader, inWorktree);
|
|
18876
|
+
const classifyParsed = JSON.parse(classifyResult);
|
|
18877
|
+
const success2 = classifyParsed.success !== false;
|
|
18878
|
+
let message = success2 ? "Classification complete" : "Classification had issues";
|
|
18879
|
+
if (classifyParsed.unrecognized?.length > 0) {
|
|
18880
|
+
message = `Classification complete with ${classifyParsed.unrecognized.length} unrecognized file(s)`;
|
|
18881
|
+
}
|
|
18882
|
+
const details = {
|
|
18883
|
+
success: success2,
|
|
18884
|
+
unrecognized: classifyParsed.unrecognized,
|
|
18885
|
+
classified: classifyParsed
|
|
18886
|
+
};
|
|
18887
|
+
context.result.steps.classify = buildStepResult(success2, message, details);
|
|
18888
|
+
}
|
|
18889
|
+
async function executeDryRunStep(context, worktree) {
|
|
18890
|
+
const inWorktree = () => true;
|
|
18891
|
+
const dryRunResult = await importStatements(worktree.path, context.agent, {
|
|
18892
|
+
provider: context.options.provider,
|
|
18893
|
+
currency: context.options.currency,
|
|
18894
|
+
checkOnly: true
|
|
18895
|
+
}, context.configLoader, context.hledgerExecutor, inWorktree);
|
|
18896
|
+
const dryRunParsed = JSON.parse(dryRunResult);
|
|
18897
|
+
const message = dryRunParsed.success ? `Dry run passed: ${dryRunParsed.summary?.totalTransactions || 0} transactions ready` : `Dry run failed: ${dryRunParsed.summary?.unknown || 0} unknown account(s)`;
|
|
18898
|
+
context.result.steps.dryRun = buildStepResult(dryRunParsed.success, message, {
|
|
18899
|
+
success: dryRunParsed.success,
|
|
18900
|
+
summary: dryRunParsed.summary
|
|
18901
|
+
});
|
|
18902
|
+
if (!dryRunParsed.success) {
|
|
18903
|
+
context.result.error = "Dry run found unknown accounts or errors";
|
|
18904
|
+
context.result.hint = "Add rules to categorize unknown transactions, then retry";
|
|
18905
|
+
throw new Error("Dry run failed");
|
|
18906
|
+
}
|
|
18907
|
+
if (dryRunParsed.summary?.totalTransactions === 0) {
|
|
18908
|
+
throw new NoTransactionsError;
|
|
18909
|
+
}
|
|
18910
|
+
}
|
|
18911
|
+
async function executeImportStep(context, worktree) {
|
|
18912
|
+
const inWorktree = () => true;
|
|
18913
|
+
const importResult = await importStatements(worktree.path, context.agent, {
|
|
18914
|
+
provider: context.options.provider,
|
|
18915
|
+
currency: context.options.currency,
|
|
18916
|
+
checkOnly: false
|
|
18917
|
+
}, context.configLoader, context.hledgerExecutor, inWorktree);
|
|
18918
|
+
const importParsed = JSON.parse(importResult);
|
|
18919
|
+
const message = importParsed.success ? `Imported ${importParsed.summary?.totalTransactions || 0} transactions` : `Import failed: ${importParsed.error || "Unknown error"}`;
|
|
18920
|
+
context.result.steps.import = buildStepResult(importParsed.success, message, {
|
|
18921
|
+
success: importParsed.success,
|
|
18922
|
+
summary: importParsed.summary,
|
|
18923
|
+
error: importParsed.error
|
|
18924
|
+
});
|
|
18925
|
+
if (!importParsed.success) {
|
|
18926
|
+
context.result.error = `Import failed: ${importParsed.error || "Unknown error"}`;
|
|
18927
|
+
throw new Error("Import failed");
|
|
18928
|
+
}
|
|
18929
|
+
}
|
|
18930
|
+
async function executeReconcileStep(context, worktree) {
|
|
18931
|
+
const inWorktree = () => true;
|
|
18932
|
+
const reconcileResult = await reconcileStatement(worktree.path, context.agent, {
|
|
18933
|
+
provider: context.options.provider,
|
|
18934
|
+
currency: context.options.currency,
|
|
18935
|
+
closingBalance: context.options.closingBalance,
|
|
18936
|
+
account: context.options.account
|
|
18937
|
+
}, context.configLoader, context.hledgerExecutor, inWorktree);
|
|
18938
|
+
const reconcileParsed = JSON.parse(reconcileResult);
|
|
18939
|
+
const message = reconcileParsed.success ? `Balance reconciled: ${reconcileParsed.actualBalance}` : `Balance mismatch: expected ${reconcileParsed.expectedBalance}, got ${reconcileParsed.actualBalance}`;
|
|
18940
|
+
context.result.steps.reconcile = buildStepResult(reconcileParsed.success, message, {
|
|
18941
|
+
success: reconcileParsed.success,
|
|
18942
|
+
actualBalance: reconcileParsed.actualBalance,
|
|
18943
|
+
expectedBalance: reconcileParsed.expectedBalance,
|
|
18944
|
+
metadata: reconcileParsed.metadata,
|
|
18945
|
+
error: reconcileParsed.error
|
|
18946
|
+
});
|
|
18947
|
+
if (!reconcileParsed.success) {
|
|
18948
|
+
context.result.error = `Reconciliation failed: ${reconcileParsed.error || "Balance mismatch"}`;
|
|
18949
|
+
context.result.hint = "Check for missing transactions or incorrect rules";
|
|
18950
|
+
throw new Error("Reconciliation failed");
|
|
18951
|
+
}
|
|
18952
|
+
}
|
|
18953
|
+
async function executeMergeStep(context, worktree) {
|
|
18954
|
+
const importDetails = context.result.steps.import?.details;
|
|
18955
|
+
const reconcileDetails = context.result.steps.reconcile?.details;
|
|
18956
|
+
if (!importDetails || !reconcileDetails) {
|
|
18957
|
+
throw new Error("Import or reconcile step not completed before merge");
|
|
18958
|
+
}
|
|
18959
|
+
const commitInfo = {
|
|
18960
|
+
fromDate: reconcileDetails.metadata?.from_date,
|
|
18961
|
+
untilDate: reconcileDetails.metadata?.until_date
|
|
18962
|
+
};
|
|
18963
|
+
const transactionCount = importDetails.summary?.totalTransactions || 0;
|
|
18964
|
+
const commitMessage = buildCommitMessage(context.options.provider, context.options.currency, commitInfo.fromDate, commitInfo.untilDate, transactionCount);
|
|
18965
|
+
try {
|
|
18966
|
+
mergeWorktree(worktree, commitMessage);
|
|
18967
|
+
const mergeDetails = { commitMessage };
|
|
18968
|
+
context.result.steps.merge = buildStepResult(true, `Merged to main: "${commitMessage}"`, mergeDetails);
|
|
18969
|
+
} catch (error45) {
|
|
18970
|
+
const message = `Merge failed: ${error45 instanceof Error ? error45.message : String(error45)}`;
|
|
18971
|
+
context.result.steps.merge = buildStepResult(false, message);
|
|
18972
|
+
context.result.error = "Merge to main branch failed";
|
|
18973
|
+
throw new Error("Merge failed");
|
|
18974
|
+
}
|
|
18975
|
+
}
|
|
18976
|
+
function handleNoTransactions(result) {
|
|
18977
|
+
result.steps.import = buildStepResult(true, "No transactions to import");
|
|
18978
|
+
result.steps.reconcile = buildStepResult(true, "Reconciliation skipped (no transactions)");
|
|
18979
|
+
result.steps.merge = buildStepResult(true, "Merge skipped (no changes)");
|
|
18980
|
+
return buildSuccessResult5(result, "No transactions found to import");
|
|
18981
|
+
}
|
|
18982
|
+
async function importPipeline(directory, agent, options, configLoader = loadImportConfig, hledgerExecutor = defaultHledgerExecutor) {
|
|
18983
|
+
const restrictionError = checkAccountantAgent(agent, "import pipeline");
|
|
18984
|
+
if (restrictionError) {
|
|
18985
|
+
return restrictionError;
|
|
18986
|
+
}
|
|
18987
|
+
const result = {
|
|
18988
|
+
success: false,
|
|
18989
|
+
steps: {}
|
|
18990
|
+
};
|
|
18991
|
+
const context = {
|
|
18992
|
+
directory,
|
|
18993
|
+
agent,
|
|
18994
|
+
options,
|
|
18995
|
+
configLoader,
|
|
18996
|
+
hledgerExecutor,
|
|
18997
|
+
result
|
|
18998
|
+
};
|
|
18999
|
+
try {
|
|
19000
|
+
return await withWorktree(directory, async (worktree) => {
|
|
19001
|
+
result.worktreeId = worktree.uuid;
|
|
19002
|
+
result.steps.worktree = buildStepResult(true, `Created worktree at ${worktree.path}`, {
|
|
19003
|
+
path: worktree.path,
|
|
19004
|
+
branch: worktree.branch
|
|
19005
|
+
});
|
|
19006
|
+
try {
|
|
19007
|
+
await executeClassifyStep(context, worktree);
|
|
19008
|
+
await executeDryRunStep(context, worktree);
|
|
19009
|
+
await executeImportStep(context, worktree);
|
|
19010
|
+
await executeReconcileStep(context, worktree);
|
|
19011
|
+
await executeMergeStep(context, worktree);
|
|
19012
|
+
result.steps.cleanup = buildStepResult(true, "Worktree cleaned up", {
|
|
19013
|
+
cleanedAfterSuccess: true
|
|
19014
|
+
});
|
|
19015
|
+
const transactionCount = context.result.steps.import?.details?.summary?.totalTransactions || 0;
|
|
19016
|
+
return buildSuccessResult5(result, `Successfully imported ${transactionCount} transaction(s)`);
|
|
19017
|
+
} catch (error45) {
|
|
19018
|
+
result.steps.cleanup = buildStepResult(true, "Worktree cleaned up after failure", { cleanedAfterFailure: true });
|
|
19019
|
+
if (error45 instanceof NoTransactionsError) {
|
|
19020
|
+
return handleNoTransactions(result);
|
|
19021
|
+
}
|
|
19022
|
+
if (!result.error) {
|
|
19023
|
+
result.error = error45 instanceof Error ? error45.message : String(error45);
|
|
19024
|
+
}
|
|
19025
|
+
return buildErrorResult5(result, result.error, result.hint);
|
|
19026
|
+
}
|
|
19027
|
+
});
|
|
19028
|
+
} catch (error45) {
|
|
19029
|
+
result.steps.worktree = buildStepResult(false, `Failed to create worktree: ${error45 instanceof Error ? error45.message : String(error45)}`);
|
|
19030
|
+
result.error = "Failed to create worktree";
|
|
19031
|
+
return buildErrorResult5(result, result.error);
|
|
19032
|
+
}
|
|
19033
|
+
}
|
|
19034
|
+
var import_pipeline_default = tool({
|
|
19035
|
+
description: `ACCOUNTANT AGENT ONLY: Complete import pipeline with git worktree isolation and balance reconciliation.
|
|
19036
|
+
|
|
19037
|
+
This tool orchestrates the full import workflow in an isolated git worktree:
|
|
19038
|
+
|
|
19039
|
+
**Pipeline Steps:**
|
|
19040
|
+
1. **Create Worktree**: Creates an isolated git worktree for safe import
|
|
19041
|
+
2. **Classify**: Moves CSVs from import to pending directory (optional, skip with skipClassify)
|
|
19042
|
+
3. **Dry Run**: Validates all transactions have known accounts
|
|
19043
|
+
4. **Import**: Imports transactions to the journal
|
|
19044
|
+
5. **Reconcile**: Validates closing balance matches CSV metadata
|
|
19045
|
+
6. **Merge**: Merges worktree to main with --no-ff
|
|
19046
|
+
7. **Cleanup**: Removes worktree
|
|
19047
|
+
|
|
19048
|
+
**Safety Features:**
|
|
19049
|
+
- All changes happen in isolated worktree
|
|
19050
|
+
- If any step fails, worktree is discarded (main branch untouched)
|
|
19051
|
+
- Balance reconciliation ensures data integrity
|
|
19052
|
+
- Atomic commit with merge --no-ff preserves history
|
|
19053
|
+
|
|
19054
|
+
**Usage:**
|
|
19055
|
+
- Basic: import-pipeline (processes all pending CSVs)
|
|
19056
|
+
- Filtered: import-pipeline --provider ubs --currency chf
|
|
19057
|
+
- With manual balance: import-pipeline --closingBalance "CHF 1234.56"
|
|
19058
|
+
- Skip classify: import-pipeline --skipClassify true`,
|
|
19059
|
+
args: {
|
|
19060
|
+
provider: tool.schema.string().optional().describe('Filter by provider (e.g., "ubs", "revolut")'),
|
|
19061
|
+
currency: tool.schema.string().optional().describe('Filter by currency (e.g., "chf", "eur")'),
|
|
19062
|
+
closingBalance: tool.schema.string().optional().describe("Manual closing balance override (if not in CSV metadata)"),
|
|
19063
|
+
account: tool.schema.string().optional().describe("Manual account override (auto-detected from rules file if not provided)"),
|
|
19064
|
+
skipClassify: tool.schema.boolean().optional().describe("Skip the classify step (default: false)")
|
|
19065
|
+
},
|
|
19066
|
+
async execute(params, context) {
|
|
19067
|
+
const { directory, agent } = context;
|
|
19068
|
+
return importPipeline(directory, agent, {
|
|
19069
|
+
provider: params.provider,
|
|
19070
|
+
currency: params.currency,
|
|
19071
|
+
closingBalance: params.closingBalance,
|
|
19072
|
+
account: params.account,
|
|
19073
|
+
skipClassify: params.skipClassify
|
|
19074
|
+
});
|
|
19075
|
+
}
|
|
19076
|
+
});
|
|
18697
19077
|
// src/index.ts
|
|
18698
|
-
var __dirname2 =
|
|
18699
|
-
var AGENT_FILE =
|
|
19078
|
+
var __dirname2 = dirname6(fileURLToPath(import.meta.url));
|
|
19079
|
+
var AGENT_FILE = join11(__dirname2, "..", "agent", "accountant.md");
|
|
18700
19080
|
var AccountantPlugin = async () => {
|
|
18701
19081
|
const agent = loadAgent(AGENT_FILE);
|
|
18702
19082
|
return {
|
|
@@ -18704,7 +19084,8 @@ var AccountantPlugin = async () => {
|
|
|
18704
19084
|
"fetch-currency-prices": fetch_currency_prices_default,
|
|
18705
19085
|
"classify-statements": classify_statements_default,
|
|
18706
19086
|
"import-statements": import_statements_default,
|
|
18707
|
-
"reconcile-statements": reconcile_statement_default
|
|
19087
|
+
"reconcile-statements": reconcile_statement_default,
|
|
19088
|
+
"import-piprline": import_pipeline_default
|
|
18708
19089
|
},
|
|
18709
19090
|
config: async (config2) => {
|
|
18710
19091
|
if (agent) {
|