@fuzzle/opencode-accountant 0.6.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +125 -18
- package/docs/tools/import-pipeline.md +46 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -4249,7 +4249,7 @@ __export(exports_accountSuggester, {
|
|
|
4249
4249
|
extractRulePatternsFromFile: () => extractRulePatternsFromFile,
|
|
4250
4250
|
clearSuggestionCache: () => clearSuggestionCache
|
|
4251
4251
|
});
|
|
4252
|
-
import * as
|
|
4252
|
+
import * as fs16 from "fs";
|
|
4253
4253
|
import * as crypto from "crypto";
|
|
4254
4254
|
function clearSuggestionCache() {
|
|
4255
4255
|
Object.keys(suggestionCache).forEach((key) => delete suggestionCache[key]);
|
|
@@ -4259,10 +4259,10 @@ function hashTransaction(posting) {
|
|
|
4259
4259
|
return crypto.createHash("md5").update(data).digest("hex");
|
|
4260
4260
|
}
|
|
4261
4261
|
function loadExistingAccounts(yearJournalPath) {
|
|
4262
|
-
if (!
|
|
4262
|
+
if (!fs16.existsSync(yearJournalPath)) {
|
|
4263
4263
|
return [];
|
|
4264
4264
|
}
|
|
4265
|
-
const content =
|
|
4265
|
+
const content = fs16.readFileSync(yearJournalPath, "utf-8");
|
|
4266
4266
|
const lines = content.split(`
|
|
4267
4267
|
`);
|
|
4268
4268
|
const accounts = [];
|
|
@@ -4278,10 +4278,10 @@ function loadExistingAccounts(yearJournalPath) {
|
|
|
4278
4278
|
return accounts.sort();
|
|
4279
4279
|
}
|
|
4280
4280
|
function extractRulePatternsFromFile(rulesPath) {
|
|
4281
|
-
if (!
|
|
4281
|
+
if (!fs16.existsSync(rulesPath)) {
|
|
4282
4282
|
return [];
|
|
4283
4283
|
}
|
|
4284
|
-
const content =
|
|
4284
|
+
const content = fs16.readFileSync(rulesPath, "utf-8");
|
|
4285
4285
|
const lines = content.split(`
|
|
4286
4286
|
`);
|
|
4287
4287
|
const patterns = [];
|
|
@@ -24581,7 +24581,7 @@ function formatJournalEntry(match2) {
|
|
|
24581
24581
|
if (hasFees) {
|
|
24582
24582
|
const feeAmount = formatAmount(btcRow.fees.amount);
|
|
24583
24583
|
const feeCurrency = btcRow.fees.currency;
|
|
24584
|
-
lines.push(` expenses:fees:
|
|
24584
|
+
lines.push(` expenses:fees:btc ${feeAmount} ${feeCurrency}`, ` equity:bitcoin:conversion -${feeAmount} ${feeCurrency}`);
|
|
24585
24585
|
}
|
|
24586
24586
|
return lines.join(`
|
|
24587
24587
|
`);
|
|
@@ -24666,6 +24666,80 @@ function generateBtcPurchaseJournal(fiatCsvPaths, btcCsvPath, yearJournalPath, l
|
|
|
24666
24666
|
};
|
|
24667
24667
|
}
|
|
24668
24668
|
|
|
24669
|
+
// src/utils/btcCsvPreprocessor.ts
|
|
24670
|
+
import * as fs15 from "fs";
|
|
24671
|
+
var ORIGINAL_HEADER = "Symbol,Type,Quantity,Price,Value,Fees,Date";
|
|
24672
|
+
var PREPROCESSED_HEADER = "Symbol,Type,Quantity,Price,Value,Fees,Date,Fees_BTC,Total_BTC,Price_Amount";
|
|
24673
|
+
var FEE_BTC_DECIMALS = 8;
|
|
24674
|
+
function calculateFeeBtc(fees, price) {
|
|
24675
|
+
if (price.amount === 0)
|
|
24676
|
+
return 0;
|
|
24677
|
+
return fees.amount / price.amount;
|
|
24678
|
+
}
|
|
24679
|
+
function formatPriceAmount(price) {
|
|
24680
|
+
return `${price.amount} ${price.currency}`;
|
|
24681
|
+
}
|
|
24682
|
+
function preprocessRevolutBtcCsv(csvPath, logger) {
|
|
24683
|
+
const content = fs15.readFileSync(csvPath, "utf-8");
|
|
24684
|
+
const lines = content.trim().split(`
|
|
24685
|
+
`);
|
|
24686
|
+
if (lines.length < 2) {
|
|
24687
|
+
return { rowsProcessed: 0, sendRowsEnriched: 0, alreadyPreprocessed: false };
|
|
24688
|
+
}
|
|
24689
|
+
const header = lines[0].trim();
|
|
24690
|
+
if (header === PREPROCESSED_HEADER) {
|
|
24691
|
+
logger?.info("CSV already preprocessed, skipping");
|
|
24692
|
+
return { rowsProcessed: lines.length - 1, sendRowsEnriched: 0, alreadyPreprocessed: true };
|
|
24693
|
+
}
|
|
24694
|
+
if (header !== ORIGINAL_HEADER) {
|
|
24695
|
+
throw new Error(`Unexpected CSV header. Expected:
|
|
24696
|
+
${ORIGINAL_HEADER}
|
|
24697
|
+
Got:
|
|
24698
|
+
${header}`);
|
|
24699
|
+
}
|
|
24700
|
+
const outputLines = [PREPROCESSED_HEADER];
|
|
24701
|
+
let sendRowsEnriched = 0;
|
|
24702
|
+
for (let i2 = 1;i2 < lines.length; i2++) {
|
|
24703
|
+
const line = lines[i2];
|
|
24704
|
+
if (line.trim() === "")
|
|
24705
|
+
continue;
|
|
24706
|
+
const fields = parseCryptoCsvLine(line);
|
|
24707
|
+
if (fields.length < 7) {
|
|
24708
|
+
outputLines.push(line + ",,,");
|
|
24709
|
+
continue;
|
|
24710
|
+
}
|
|
24711
|
+
const type2 = fields[1];
|
|
24712
|
+
const quantityStr = fields[2];
|
|
24713
|
+
const priceStr = fields[3];
|
|
24714
|
+
const feesStr = fields[5];
|
|
24715
|
+
let feesBtc = "";
|
|
24716
|
+
let totalBtc = "";
|
|
24717
|
+
let priceAmount = "";
|
|
24718
|
+
try {
|
|
24719
|
+
const price = parseBtcPrice(priceStr);
|
|
24720
|
+
priceAmount = formatPriceAmount(price);
|
|
24721
|
+
if (type2 === "Send") {
|
|
24722
|
+
const fees = parseBtcPrice(feesStr);
|
|
24723
|
+
const feeBtc = calculateFeeBtc(fees, price);
|
|
24724
|
+
const quantity = parseFloat(quantityStr);
|
|
24725
|
+
feesBtc = feeBtc.toFixed(FEE_BTC_DECIMALS);
|
|
24726
|
+
totalBtc = (quantity + feeBtc).toFixed(FEE_BTC_DECIMALS);
|
|
24727
|
+
sendRowsEnriched++;
|
|
24728
|
+
}
|
|
24729
|
+
} catch {
|
|
24730
|
+
logger?.debug(`Row ${i2}: could not parse price/fees, leaving computed columns empty`);
|
|
24731
|
+
}
|
|
24732
|
+
outputLines.push(`${line},${feesBtc},${totalBtc},${priceAmount}`);
|
|
24733
|
+
}
|
|
24734
|
+
const outputContent = outputLines.join(`
|
|
24735
|
+
`) + `
|
|
24736
|
+
`;
|
|
24737
|
+
fs15.writeFileSync(csvPath, outputContent);
|
|
24738
|
+
const rowsProcessed = outputLines.length - 1;
|
|
24739
|
+
logger?.info(`Preprocessed ${rowsProcessed} rows (${sendRowsEnriched} Send rows enriched)`);
|
|
24740
|
+
return { rowsProcessed, sendRowsEnriched, alreadyPreprocessed: false };
|
|
24741
|
+
}
|
|
24742
|
+
|
|
24669
24743
|
// src/tools/import-pipeline.ts
|
|
24670
24744
|
class NoTransactionsError extends Error {
|
|
24671
24745
|
constructor() {
|
|
@@ -24733,6 +24807,38 @@ async function executeClassifyStep(context, logger) {
|
|
|
24733
24807
|
logger?.endSection();
|
|
24734
24808
|
return contextIds;
|
|
24735
24809
|
}
|
|
24810
|
+
function executePreprocessBtcStep(context, contextIds, logger) {
|
|
24811
|
+
let btcCsvPath;
|
|
24812
|
+
for (const contextId of contextIds) {
|
|
24813
|
+
const importCtx = loadContext(context.directory, contextId);
|
|
24814
|
+
if (importCtx.provider === "revolut" && importCtx.currency === "btc") {
|
|
24815
|
+
btcCsvPath = path12.join(context.directory, importCtx.filePath);
|
|
24816
|
+
break;
|
|
24817
|
+
}
|
|
24818
|
+
}
|
|
24819
|
+
if (!btcCsvPath) {
|
|
24820
|
+
logger?.info("No revolut BTC CSV found, skipping preprocessing");
|
|
24821
|
+
return;
|
|
24822
|
+
}
|
|
24823
|
+
logger?.startSection("Step 1a: Preprocess BTC CSV");
|
|
24824
|
+
logger?.logStep("BTC Preprocess", "start");
|
|
24825
|
+
try {
|
|
24826
|
+
const result = preprocessRevolutBtcCsv(btcCsvPath, logger);
|
|
24827
|
+
const message = result.alreadyPreprocessed ? "BTC CSV already preprocessed" : `Preprocessed ${result.rowsProcessed} rows (${result.sendRowsEnriched} Send rows enriched)`;
|
|
24828
|
+
logger?.logStep("BTC Preprocess", "success", message);
|
|
24829
|
+
context.result.steps.btcPreprocess = buildStepResult(true, message, result);
|
|
24830
|
+
} catch (error45) {
|
|
24831
|
+
const errorMessage = `BTC CSV preprocessing failed: ${error45 instanceof Error ? error45.message : String(error45)}`;
|
|
24832
|
+
logger?.error(errorMessage);
|
|
24833
|
+
logger?.logStep("BTC Preprocess", "error", errorMessage);
|
|
24834
|
+
context.result.steps.btcPreprocess = buildStepResult(false, errorMessage);
|
|
24835
|
+
context.result.error = errorMessage;
|
|
24836
|
+
context.result.hint = "Check the BTC CSV format \u2014 expected Revolut crypto account statement";
|
|
24837
|
+
throw new Error(errorMessage);
|
|
24838
|
+
} finally {
|
|
24839
|
+
logger?.endSection();
|
|
24840
|
+
}
|
|
24841
|
+
}
|
|
24736
24842
|
async function executeBtcPurchaseStep(context, contextIds, logger) {
|
|
24737
24843
|
const fiatCsvPaths = [];
|
|
24738
24844
|
let btcCsvPath;
|
|
@@ -25077,6 +25183,7 @@ async function importPipeline(directory, agent, options, configLoader = loadImpo
|
|
|
25077
25183
|
logger.info("No files classified, nothing to import");
|
|
25078
25184
|
return buildPipelineSuccessResult(result, "No files to import");
|
|
25079
25185
|
}
|
|
25186
|
+
executePreprocessBtcStep(context, contextIds, logger);
|
|
25080
25187
|
await executeBtcPurchaseStep(context, contextIds, logger);
|
|
25081
25188
|
let totalTransactions = 0;
|
|
25082
25189
|
for (const contextId of contextIds) {
|
|
@@ -25158,7 +25265,7 @@ This tool orchestrates the full import workflow:
|
|
|
25158
25265
|
}
|
|
25159
25266
|
});
|
|
25160
25267
|
// src/tools/init-directories.ts
|
|
25161
|
-
import * as
|
|
25268
|
+
import * as fs17 from "fs";
|
|
25162
25269
|
import * as path13 from "path";
|
|
25163
25270
|
async function initDirectories(directory) {
|
|
25164
25271
|
try {
|
|
@@ -25166,8 +25273,8 @@ async function initDirectories(directory) {
|
|
|
25166
25273
|
const directoriesCreated = [];
|
|
25167
25274
|
const gitkeepFiles = [];
|
|
25168
25275
|
const importBase = path13.join(directory, "import");
|
|
25169
|
-
if (!
|
|
25170
|
-
|
|
25276
|
+
if (!fs17.existsSync(importBase)) {
|
|
25277
|
+
fs17.mkdirSync(importBase, { recursive: true });
|
|
25171
25278
|
directoriesCreated.push("import");
|
|
25172
25279
|
}
|
|
25173
25280
|
const pathsToCreate = [
|
|
@@ -25178,19 +25285,19 @@ async function initDirectories(directory) {
|
|
|
25178
25285
|
];
|
|
25179
25286
|
for (const { path: dirPath } of pathsToCreate) {
|
|
25180
25287
|
const fullPath = path13.join(directory, dirPath);
|
|
25181
|
-
if (!
|
|
25182
|
-
|
|
25288
|
+
if (!fs17.existsSync(fullPath)) {
|
|
25289
|
+
fs17.mkdirSync(fullPath, { recursive: true });
|
|
25183
25290
|
directoriesCreated.push(dirPath);
|
|
25184
25291
|
}
|
|
25185
25292
|
const gitkeepPath = path13.join(fullPath, ".gitkeep");
|
|
25186
|
-
if (!
|
|
25187
|
-
|
|
25293
|
+
if (!fs17.existsSync(gitkeepPath)) {
|
|
25294
|
+
fs17.writeFileSync(gitkeepPath, "");
|
|
25188
25295
|
gitkeepFiles.push(path13.join(dirPath, ".gitkeep"));
|
|
25189
25296
|
}
|
|
25190
25297
|
}
|
|
25191
25298
|
const gitignorePath = path13.join(importBase, ".gitignore");
|
|
25192
25299
|
let gitignoreCreated = false;
|
|
25193
|
-
if (!
|
|
25300
|
+
if (!fs17.existsSync(gitignorePath)) {
|
|
25194
25301
|
const gitignoreContent = `# Ignore CSV/PDF files in temporary directories
|
|
25195
25302
|
/incoming/*.csv
|
|
25196
25303
|
/incoming/*.pdf
|
|
@@ -25208,7 +25315,7 @@ async function initDirectories(directory) {
|
|
|
25208
25315
|
.DS_Store
|
|
25209
25316
|
Thumbs.db
|
|
25210
25317
|
`;
|
|
25211
|
-
|
|
25318
|
+
fs17.writeFileSync(gitignorePath, gitignoreContent);
|
|
25212
25319
|
gitignoreCreated = true;
|
|
25213
25320
|
}
|
|
25214
25321
|
const parts = [];
|
|
@@ -25285,13 +25392,13 @@ You can now drop CSV files into import/incoming/ and run import-pipeline.`);
|
|
|
25285
25392
|
});
|
|
25286
25393
|
// src/tools/generate-btc-purchases.ts
|
|
25287
25394
|
import * as path14 from "path";
|
|
25288
|
-
import * as
|
|
25395
|
+
import * as fs18 from "fs";
|
|
25289
25396
|
function findFiatCsvPaths(directory, pendingDir, provider) {
|
|
25290
25397
|
const providerDir = path14.join(directory, pendingDir, provider);
|
|
25291
|
-
if (!
|
|
25398
|
+
if (!fs18.existsSync(providerDir))
|
|
25292
25399
|
return [];
|
|
25293
25400
|
const csvPaths = [];
|
|
25294
|
-
const entries =
|
|
25401
|
+
const entries = fs18.readdirSync(providerDir, { withFileTypes: true });
|
|
25295
25402
|
for (const entry of entries) {
|
|
25296
25403
|
if (!entry.isDirectory())
|
|
25297
25404
|
continue;
|
|
@@ -6,14 +6,18 @@ This tool is **restricted to the accountant agent only**.
|
|
|
6
6
|
|
|
7
7
|
## Overview
|
|
8
8
|
|
|
9
|
-
The pipeline automates
|
|
9
|
+
The pipeline automates these sequential steps:
|
|
10
10
|
|
|
11
11
|
1. **Classify** - Detect provider/currency, create contexts, organize files
|
|
12
|
+
1a. **Preprocess BTC CSV** _(Revolut only)_ - Add computed columns (fees in BTC, total, clean price) for hledger rules
|
|
13
|
+
1b. **Generate BTC Purchases** _(Revolut only)_ - Cross-reference fiat + BTC CSVs for equity conversion entries
|
|
12
14
|
2. **Account Declarations** - Ensure all accounts exist in year journals
|
|
13
15
|
3. **Dry Run** - Validate transactions, check for unknown accounts
|
|
14
16
|
4. **Import** - Add transactions to journals, move files to done
|
|
15
17
|
5. **Reconcile** - Verify balances match expectations
|
|
16
18
|
|
|
19
|
+
Steps 1a and 1b are Revolut-specific and are automatically skipped when no Revolut BTC CSV is present.
|
|
20
|
+
|
|
17
21
|
**Key behavior**: The pipeline processes files via **import contexts**. Each classified CSV gets a unique context ID, and subsequent steps operate on these contexts **sequentially** with **fail-fast** error handling.
|
|
18
22
|
|
|
19
23
|
## Arguments
|
|
@@ -156,6 +160,34 @@ When reconciliation fails:
|
|
|
156
160
|
|
|
157
161
|
See [classify-statements](classify-statements.md) for details.
|
|
158
162
|
|
|
163
|
+
### Step 1a: Preprocess BTC CSV
|
|
164
|
+
|
|
165
|
+
**Purpose**: Add computed columns to Revolut BTC CSVs for use in hledger rules
|
|
166
|
+
|
|
167
|
+
**What happens**:
|
|
168
|
+
|
|
169
|
+
1. Finds Revolut BTC contexts from classification
|
|
170
|
+
2. For each Send row, calculates: `fee_btc = fees_chf / price_chf_per_btc`
|
|
171
|
+
3. Adds three columns to the CSV (in-place): `Fees_BTC`, `Total_BTC`, `Price_Amount`
|
|
172
|
+
4. Idempotent — skips if CSV is already preprocessed
|
|
173
|
+
|
|
174
|
+
**Why**: hledger CSV rules don't support division. BTC Send transactions need the fee split into a separate BTC amount, which requires dividing the CHF fee by the CHF/BTC price.
|
|
175
|
+
|
|
176
|
+
**Skipped when**: No Revolut BTC CSV is present among the classified files.
|
|
177
|
+
|
|
178
|
+
### Step 1b: Generate BTC Purchase Entries
|
|
179
|
+
|
|
180
|
+
**Purpose**: Cross-reference Revolut fiat and BTC CSVs to generate equity conversion journal entries for BTC purchases
|
|
181
|
+
|
|
182
|
+
**What happens**:
|
|
183
|
+
|
|
184
|
+
1. Finds matching Revolut fiat and BTC CSV pairs from contexts
|
|
185
|
+
2. Matches fiat transfer rows to BTC Buy rows by timestamp
|
|
186
|
+
3. Generates equity conversion journal entries (fiat → equity → BTC)
|
|
187
|
+
4. Appends entries to year journal, skipping duplicates
|
|
188
|
+
|
|
189
|
+
**Skipped when**: No fiat+BTC CSV pair is found among the classified files.
|
|
190
|
+
|
|
159
191
|
### Step 2: Account Declarations
|
|
160
192
|
|
|
161
193
|
**Purpose**: Ensure all accounts referenced in rules files are declared in year journals
|
|
@@ -262,6 +294,19 @@ See [reconcile-statement](reconcile-statement.md) for details.
|
|
|
262
294
|
│ └──────────────────────────────────────────────────────────┘ │
|
|
263
295
|
│ │
|
|
264
296
|
│ OUTPUT: ["uuid-1", "uuid-2"] │
|
|
297
|
+
└────────────────────────────────────────────────────────────────┘
|
|
298
|
+
│
|
|
299
|
+
▼
|
|
300
|
+
┌────────────────────────────────────────────────────────────────┐
|
|
301
|
+
│ STEP 1a: Preprocess BTC CSV (if revolut/btc context exists) │
|
|
302
|
+
│ • Add Fees_BTC, Total_BTC, Price_Amount columns │
|
|
303
|
+
└────────────────────────────────────────────────────────────────┘
|
|
304
|
+
│
|
|
305
|
+
▼
|
|
306
|
+
┌────────────────────────────────────────────────────────────────┐
|
|
307
|
+
│ STEP 1b: Generate BTC Purchase Entries │
|
|
308
|
+
│ • Cross-reference fiat + BTC CSVs │
|
|
309
|
+
│ • Generate equity conversion journal entries │
|
|
265
310
|
└────────────────────────────────────────────────────────────────┘
|
|
266
311
|
│
|
|
267
312
|
▼
|