@fuzzle/opencode-accountant 0.1.2 → 0.1.3-next.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -199,13 +199,15 @@ The `import-pipeline` tool provides an atomic, safe import workflow using git wo
199
199
  2. Run `import-pipeline` tool with optional provider/currency filters
200
200
  3. The tool automatically:
201
201
  - Creates an isolated git worktree
202
+ - Syncs CSV files from main repo to worktree
202
203
  - Classifies CSV files by provider/currency
203
204
  - Validates all transactions have matching rules
204
205
  - Imports transactions to the appropriate year journal
205
206
  - Reconciles closing balance (if available in CSV metadata)
206
207
  - Merges changes back to main branch with `--no-ff`
208
+ - Deletes processed CSV files from main repo's import/incoming
207
209
  - Cleans up the worktree
208
- 4. If any step fails, the worktree is discarded and main branch remains untouched
210
+ 4. If any step fails, the worktree is discarded and main branch remains untouched (CSV files are preserved for retry)
209
211
 
210
212
  ### Statement Import
211
213
 
@@ -56,10 +56,10 @@ When working with accounting tasks:
56
56
 
57
57
  You have access to specialized MCP tools that MUST be used for their designated tasks. Do NOT attempt to replicate their functionality with bash commands, direct hledger CLI calls, or manual file edits.
58
58
 
59
- | Tool | Use For | NEVER Do Instead |
60
- | ----------------- | ---------------------------------------------------- | --------------------------------------------------------- |
61
- | `import-pipeline` | Full import workflow (classify → import → reconcile) | Manual file moves, `hledger import`, manual journal edits |
62
- | `fetch-currency-prices` | Fetching exchange rates | `curl` to price APIs, manual price entries |
59
+ | Tool | Use For | NEVER Do Instead |
60
+ | ----------------------- | ---------------------------------------------------- | --------------------------------------------------------- |
61
+ | `import-pipeline` | Full import workflow (classify → import → reconcile) | Manual file moves, `hledger import`, manual journal edits |
62
+ | `fetch-currency-prices` | Fetching exchange rates | `curl` to price APIs, manual price entries |
63
63
 
64
64
  These tools handle validation, deduplication, error checking, and file organization automatically. Bypassing them risks data corruption, duplicate transactions, and inconsistent state.
65
65
 
@@ -87,11 +87,13 @@ The `import-pipeline` tool provides an **atomic, safe workflow** using git workt
87
87
  1. **Prepare**: Drop CSV files into `{paths.import}` (configured in `config/import/providers.yaml`, default: `import/incoming`)
88
88
  2. **Run Pipeline**: Execute `import-pipeline` (optionally filter by `provider` and `currency`)
89
89
  3. **Automatic Processing**: The tool creates an isolated git worktree and:
90
+ - Syncs CSV files from main repo to worktree
90
91
  - Classifies CSV files by provider/currency
91
92
  - Validates all transactions have matching rules
92
93
  - Imports transactions to the appropriate year journal
93
94
  - Reconciles closing balance (if available in CSV metadata)
94
95
  - Merges changes back to main branch with `--no-ff`
96
+ - Deletes processed CSV files from main repo's import/incoming
95
97
  - Cleans up the worktree
96
98
  4. **Handle Failures**: If any step fails (e.g., unknown postings found):
97
99
  - Worktree is discarded, main branch remains untouched
@@ -134,12 +136,14 @@ The following are MCP tools available to you. Always call these tools directly -
134
136
  **Behavior:**
135
137
 
136
138
  1. Creates isolated git worktree
137
- 2. Classifies CSV files (unless `skipClassify: true`)
138
- 3. Validates all transactions have matching rules (dry run)
139
- 4. Imports transactions to year journal
140
- 5. Reconciles closing balance against CSV metadata or manual value
141
- 6. Merges to main with `--no-ff` commit
142
- 7. Cleans up worktree
139
+ 2. Syncs CSV files from main repo to worktree
140
+ 3. Classifies CSV files (unless `skipClassify: true`)
141
+ 4. Validates all transactions have matching rules (dry run)
142
+ 5. Imports transactions to year journal
143
+ 6. Reconciles closing balance against CSV metadata or manual value
144
+ 7. Merges to main with `--no-ff` commit
145
+ 8. Deletes processed CSV files from main repo's import/incoming
146
+ 9. Cleans up worktree
143
147
 
144
148
  **Output:** Returns step-by-step results with success/failure for each phase
145
149
 
package/dist/index.js CHANGED
@@ -1941,7 +1941,7 @@ var require_convert_csv_to_json = __commonJS((exports) => {
1941
1941
  });
1942
1942
 
1943
1943
  // src/index.ts
1944
- import { dirname as dirname6, join as join11 } from "path";
1944
+ import { dirname as dirname6, join as join13 } from "path";
1945
1945
  import { fileURLToPath } from "url";
1946
1946
 
1947
1947
  // src/utils/agentLoader.ts
@@ -17614,6 +17614,27 @@ function execGitSafe(args, cwd) {
17614
17614
  }
17615
17615
  return { success: true, output: (result.stdout || "").trim() };
17616
17616
  }
17617
+ function copyIncomingFiles(mainRepoPath, worktreePath) {
17618
+ const sourceDir = path5.join(mainRepoPath, "import/incoming");
17619
+ const targetDir = path5.join(worktreePath, "import/incoming");
17620
+ if (!fs4.existsSync(sourceDir)) {
17621
+ return;
17622
+ }
17623
+ fs4.mkdirSync(targetDir, { recursive: true });
17624
+ const entries = fs4.readdirSync(sourceDir, { withFileTypes: true });
17625
+ let copiedCount = 0;
17626
+ for (const entry of entries) {
17627
+ if (entry.isFile() && !entry.name.startsWith(".")) {
17628
+ const srcPath = path5.join(sourceDir, entry.name);
17629
+ const destPath = path5.join(targetDir, entry.name);
17630
+ fs4.copyFileSync(srcPath, destPath);
17631
+ copiedCount++;
17632
+ }
17633
+ }
17634
+ if (copiedCount > 0) {
17635
+ console.log(`[INFO] Copied ${copiedCount} file(s) from import/incoming/ to worktree`);
17636
+ }
17637
+ }
17617
17638
  function createImportWorktree(mainRepoPath, options = {}) {
17618
17639
  const baseDir = options.baseDir ?? "/tmp";
17619
17640
  const uuid3 = v4_default();
@@ -17631,6 +17652,7 @@ function createImportWorktree(mainRepoPath, options = {}) {
17631
17652
  execGitSafe(["branch", "-D", branch], mainRepoPath);
17632
17653
  throw error45;
17633
17654
  }
17655
+ copyIncomingFiles(mainRepoPath, worktreePath);
17634
17656
  return {
17635
17657
  path: worktreePath,
17636
17658
  branch,
@@ -17709,6 +17731,53 @@ function ensureDirectory(dirPath) {
17709
17731
  fs5.mkdirSync(dirPath, { recursive: true });
17710
17732
  }
17711
17733
  }
17734
+ function syncCSVFilesToWorktree(mainRepoPath, worktreePath, importDir) {
17735
+ const result = {
17736
+ synced: [],
17737
+ errors: []
17738
+ };
17739
+ const mainImportDir = path6.join(mainRepoPath, importDir);
17740
+ const worktreeImportDir = path6.join(worktreePath, importDir);
17741
+ const csvFiles = findCSVFiles(mainImportDir);
17742
+ if (csvFiles.length === 0) {
17743
+ return result;
17744
+ }
17745
+ ensureDirectory(worktreeImportDir);
17746
+ for (const file2 of csvFiles) {
17747
+ try {
17748
+ const sourcePath = path6.join(mainImportDir, file2);
17749
+ const destPath = path6.join(worktreeImportDir, file2);
17750
+ fs5.copyFileSync(sourcePath, destPath);
17751
+ result.synced.push(file2);
17752
+ } catch (error45) {
17753
+ const errorMsg = error45 instanceof Error ? error45.message : String(error45);
17754
+ result.errors.push({ file: file2, error: errorMsg });
17755
+ }
17756
+ }
17757
+ return result;
17758
+ }
17759
+ function cleanupProcessedCSVFiles(mainRepoPath, importDir) {
17760
+ const result = {
17761
+ deleted: [],
17762
+ errors: []
17763
+ };
17764
+ const mainImportDir = path6.join(mainRepoPath, importDir);
17765
+ const csvFiles = findCSVFiles(mainImportDir);
17766
+ if (csvFiles.length === 0) {
17767
+ return result;
17768
+ }
17769
+ for (const file2 of csvFiles) {
17770
+ try {
17771
+ const filePath = path6.join(mainImportDir, file2);
17772
+ fs5.unlinkSync(filePath);
17773
+ result.deleted.push(file2);
17774
+ } catch (error45) {
17775
+ const errorMsg = error45 instanceof Error ? error45.message : String(error45);
17776
+ result.errors.push({ file: file2, error: errorMsg });
17777
+ }
17778
+ }
17779
+ return result;
17780
+ }
17712
17781
 
17713
17782
  // src/tools/classify-statements.ts
17714
17783
  function buildSuccessResult2(classified, unrecognized, message) {
@@ -18829,6 +18898,8 @@ It must be run inside an import worktree (use import-pipeline for the full workf
18829
18898
  }
18830
18899
  });
18831
18900
  // src/tools/import-pipeline.ts
18901
+ import * as fs12 from "fs";
18902
+ import * as path11 from "path";
18832
18903
  class NoTransactionsError extends Error {
18833
18904
  constructor() {
18834
18905
  super("No transactions to import");
@@ -18866,6 +18937,38 @@ function buildCommitMessage(provider, currency, fromDate, untilDate, transaction
18866
18937
  }
18867
18938
  return `${parts.join(" ")}${dateRange}${txStr}`;
18868
18939
  }
18940
+ function cleanupIncomingFiles(worktree, context) {
18941
+ const incomingDir = path11.join(worktree.mainRepoPath, "import/incoming");
18942
+ if (!fs12.existsSync(incomingDir)) {
18943
+ return;
18944
+ }
18945
+ const importStep = context.result.steps.import;
18946
+ if (!importStep?.success || !importStep.details) {
18947
+ return;
18948
+ }
18949
+ const importResult = importStep.details;
18950
+ if (!importResult.files || !Array.isArray(importResult.files)) {
18951
+ return;
18952
+ }
18953
+ let deletedCount = 0;
18954
+ for (const fileResult of importResult.files) {
18955
+ if (!fileResult.csv)
18956
+ continue;
18957
+ const filename = path11.basename(fileResult.csv);
18958
+ const filePath = path11.join(incomingDir, filename);
18959
+ if (fs12.existsSync(filePath)) {
18960
+ try {
18961
+ fs12.unlinkSync(filePath);
18962
+ deletedCount++;
18963
+ } catch (error45) {
18964
+ console.error(`[ERROR] Failed to delete ${filename}: ${error45 instanceof Error ? error45.message : String(error45)}`);
18965
+ }
18966
+ }
18967
+ }
18968
+ if (deletedCount > 0) {
18969
+ console.log(`[INFO] Cleaned up ${deletedCount} file(s) from import/incoming/`);
18970
+ }
18971
+ }
18869
18972
  async function executeClassifyStep(context, worktree) {
18870
18973
  if (context.options.skipClassify) {
18871
18974
  context.result.steps.classify = buildStepResult(true, "Classification skipped (skipClassify: true)");
@@ -18966,6 +19069,7 @@ async function executeMergeStep(context, worktree) {
18966
19069
  mergeWorktree(worktree, commitMessage);
18967
19070
  const mergeDetails = { commitMessage };
18968
19071
  context.result.steps.merge = buildStepResult(true, `Merged to main: "${commitMessage}"`, mergeDetails);
19072
+ cleanupIncomingFiles(worktree, context);
18969
19073
  } catch (error45) {
18970
19074
  const message = `Merge failed: ${error45 instanceof Error ? error45.message : String(error45)}`;
18971
19075
  context.result.steps.merge = buildStepResult(false, message);
@@ -19003,19 +19107,67 @@ async function importPipeline(directory, agent, options, configLoader = loadImpo
19003
19107
  path: worktree.path,
19004
19108
  branch: worktree.branch
19005
19109
  });
19110
+ try {
19111
+ const config2 = configLoader(directory);
19112
+ const syncResult = syncCSVFilesToWorktree(directory, worktree.path, config2.paths.import);
19113
+ if (syncResult.synced.length === 0 && syncResult.errors.length === 0) {
19114
+ result.steps.sync = buildStepResult(true, "No CSV files to sync", {
19115
+ synced: []
19116
+ });
19117
+ } else if (syncResult.errors.length > 0) {
19118
+ result.steps.sync = buildStepResult(true, `Synced ${syncResult.synced.length} file(s) with ${syncResult.errors.length} error(s)`, { synced: syncResult.synced, errors: syncResult.errors });
19119
+ } else {
19120
+ result.steps.sync = buildStepResult(true, `Synced ${syncResult.synced.length} CSV file(s) to worktree`, { synced: syncResult.synced });
19121
+ }
19122
+ } catch (error45) {
19123
+ const errorMsg = error45 instanceof Error ? error45.message : String(error45);
19124
+ result.steps.sync = buildStepResult(false, `Failed to sync CSV files: ${errorMsg}`, { synced: [], errors: [{ file: "unknown", error: errorMsg }] });
19125
+ }
19006
19126
  try {
19007
19127
  await executeClassifyStep(context, worktree);
19008
19128
  await executeDryRunStep(context, worktree);
19009
19129
  await executeImportStep(context, worktree);
19010
19130
  await executeReconcileStep(context, worktree);
19131
+ try {
19132
+ const config2 = configLoader(directory);
19133
+ const cleanupResult = cleanupProcessedCSVFiles(directory, config2.paths.import);
19134
+ if (cleanupResult.deleted.length === 0 && cleanupResult.errors.length === 0) {
19135
+ result.steps.cleanup = buildStepResult(true, "No CSV files to cleanup", { csvCleanup: { deleted: [] } });
19136
+ } else if (cleanupResult.errors.length > 0) {
19137
+ result.steps.cleanup = buildStepResult(true, `Deleted ${cleanupResult.deleted.length} CSV file(s) with ${cleanupResult.errors.length} error(s)`, {
19138
+ csvCleanup: {
19139
+ deleted: cleanupResult.deleted,
19140
+ errors: cleanupResult.errors
19141
+ }
19142
+ });
19143
+ } else {
19144
+ result.steps.cleanup = buildStepResult(true, `Deleted ${cleanupResult.deleted.length} CSV file(s) from main repo`, { csvCleanup: { deleted: cleanupResult.deleted } });
19145
+ }
19146
+ } catch (error45) {
19147
+ const errorMsg = error45 instanceof Error ? error45.message : String(error45);
19148
+ result.steps.cleanup = buildStepResult(false, `Failed to cleanup CSV files: ${errorMsg}`, {
19149
+ csvCleanup: {
19150
+ deleted: [],
19151
+ errors: [{ file: "unknown", error: errorMsg }]
19152
+ }
19153
+ });
19154
+ }
19011
19155
  await executeMergeStep(context, worktree);
19012
- result.steps.cleanup = buildStepResult(true, "Worktree cleaned up", {
19013
- cleanedAfterSuccess: true
19014
- });
19156
+ const existingCleanup = result.steps.cleanup;
19157
+ if (existingCleanup) {
19158
+ existingCleanup.message += ", worktree cleaned up";
19159
+ existingCleanup.details = {
19160
+ ...existingCleanup.details,
19161
+ cleanedAfterSuccess: true
19162
+ };
19163
+ }
19015
19164
  const transactionCount = context.result.steps.import?.details?.summary?.totalTransactions || 0;
19016
19165
  return buildSuccessResult5(result, `Successfully imported ${transactionCount} transaction(s)`);
19017
19166
  } catch (error45) {
19018
- result.steps.cleanup = buildStepResult(true, "Worktree cleaned up after failure", { cleanedAfterFailure: true });
19167
+ result.steps.cleanup = buildStepResult(true, "Worktree cleaned up after failure (CSV files preserved for retry)", {
19168
+ cleanedAfterFailure: true,
19169
+ csvCleanup: { deleted: [] }
19170
+ });
19019
19171
  if (error45 instanceof NoTransactionsError) {
19020
19172
  return handleNoTransactions(result);
19021
19173
  }
@@ -19074,9 +19226,135 @@ This tool orchestrates the full import workflow in an isolated git worktree:
19074
19226
  });
19075
19227
  }
19076
19228
  });
19229
+ // src/tools/init-directories.ts
19230
+ import * as fs13 from "fs";
19231
+ import * as path12 from "path";
19232
+ async function initDirectories(directory) {
19233
+ try {
19234
+ const config2 = loadImportConfig(directory);
19235
+ const directoriesCreated = [];
19236
+ const gitkeepFiles = [];
19237
+ const importBase = path12.join(directory, "import");
19238
+ if (!fs13.existsSync(importBase)) {
19239
+ fs13.mkdirSync(importBase, { recursive: true });
19240
+ directoriesCreated.push("import");
19241
+ }
19242
+ const pathsToCreate = [
19243
+ { key: "import", path: config2.paths.import },
19244
+ { key: "pending", path: config2.paths.pending },
19245
+ { key: "done", path: config2.paths.done },
19246
+ { key: "unrecognized", path: config2.paths.unrecognized }
19247
+ ];
19248
+ for (const { path: dirPath } of pathsToCreate) {
19249
+ const fullPath = path12.join(directory, dirPath);
19250
+ if (!fs13.existsSync(fullPath)) {
19251
+ fs13.mkdirSync(fullPath, { recursive: true });
19252
+ directoriesCreated.push(dirPath);
19253
+ }
19254
+ const gitkeepPath = path12.join(fullPath, ".gitkeep");
19255
+ if (!fs13.existsSync(gitkeepPath)) {
19256
+ fs13.writeFileSync(gitkeepPath, "");
19257
+ gitkeepFiles.push(path12.join(dirPath, ".gitkeep"));
19258
+ }
19259
+ }
19260
+ const gitignorePath = path12.join(importBase, ".gitignore");
19261
+ let gitignoreCreated = false;
19262
+ if (!fs13.existsSync(gitignorePath)) {
19263
+ const gitignoreContent = `# Ignore CSV/PDF files in temporary directories
19264
+ /incoming/*.csv
19265
+ /incoming/*.pdf
19266
+ /pending/**/*.csv
19267
+ /pending/**/*.pdf
19268
+ /unrecognized/**/*.csv
19269
+ /unrecognized/**/*.pdf
19270
+
19271
+ # Track processed files in done/ (audit trail)
19272
+ # No ignore rule needed - tracked by default
19273
+
19274
+ # Ignore temporary files
19275
+ *.tmp
19276
+ *.temp
19277
+ .DS_Store
19278
+ Thumbs.db
19279
+ `;
19280
+ fs13.writeFileSync(gitignorePath, gitignoreContent);
19281
+ gitignoreCreated = true;
19282
+ }
19283
+ const parts = [];
19284
+ if (directoriesCreated.length > 0) {
19285
+ parts.push(`Created ${directoriesCreated.length} director${directoriesCreated.length === 1 ? "y" : "ies"}`);
19286
+ }
19287
+ if (gitkeepFiles.length > 0) {
19288
+ parts.push(`added ${gitkeepFiles.length} .gitkeep file${gitkeepFiles.length === 1 ? "" : "s"}`);
19289
+ }
19290
+ if (gitignoreCreated) {
19291
+ parts.push("created .gitignore");
19292
+ }
19293
+ const message = parts.length > 0 ? `Import directory structure initialized: ${parts.join(", ")}` : "Import directory structure already exists (no changes needed)";
19294
+ return {
19295
+ success: true,
19296
+ directoriesCreated,
19297
+ gitkeepFiles,
19298
+ gitignoreCreated,
19299
+ message
19300
+ };
19301
+ } catch (error45) {
19302
+ return {
19303
+ success: false,
19304
+ directoriesCreated: [],
19305
+ gitkeepFiles: [],
19306
+ gitignoreCreated: false,
19307
+ error: error45 instanceof Error ? error45.message : String(error45),
19308
+ message: "Failed to initialize import directory structure"
19309
+ };
19310
+ }
19311
+ }
19312
+ var init_directories_default = tool({
19313
+ description: "ACCOUNTANT AGENT ONLY: Initialize the import directory structure needed for processing bank statements. Creates import/incoming, import/pending, import/done, and import/unrecognized directories with .gitkeep files and appropriate .gitignore rules. Reads directory paths from config/import/providers.yaml. Safe to run multiple times (idempotent).",
19314
+ args: {},
19315
+ async execute(_params, context) {
19316
+ const restrictionError = checkAccountantAgent(context.agent, "init directories");
19317
+ if (restrictionError) {
19318
+ throw new Error(restrictionError);
19319
+ }
19320
+ const { directory } = context;
19321
+ const result = await initDirectories(directory);
19322
+ if (!result.success) {
19323
+ return `Error: ${result.error}
19324
+
19325
+ ${result.message}`;
19326
+ }
19327
+ const output = [];
19328
+ output.push(result.message || "");
19329
+ if (result.directoriesCreated.length > 0) {
19330
+ output.push(`
19331
+ Directories created:`);
19332
+ for (const dir of result.directoriesCreated) {
19333
+ output.push(` - ${dir}`);
19334
+ }
19335
+ }
19336
+ if (result.gitkeepFiles.length > 0) {
19337
+ output.push(`
19338
+ .gitkeep files added:`);
19339
+ for (const file2 of result.gitkeepFiles) {
19340
+ output.push(` - ${file2}`);
19341
+ }
19342
+ }
19343
+ if (result.gitignoreCreated) {
19344
+ output.push(`
19345
+ Created import/.gitignore with rules to:`);
19346
+ output.push(" - Ignore CSV/PDF files in incoming/, pending/, unrecognized/");
19347
+ output.push(" - Track processed files in done/ for audit trail");
19348
+ }
19349
+ output.push(`
19350
+ You can now drop CSV files into import/incoming/ and run import-pipeline.`);
19351
+ return output.join(`
19352
+ `);
19353
+ }
19354
+ });
19077
19355
  // src/index.ts
19078
19356
  var __dirname2 = dirname6(fileURLToPath(import.meta.url));
19079
- var AGENT_FILE = join11(__dirname2, "..", "agent", "accountant.md");
19357
+ var AGENT_FILE = join13(__dirname2, "..", "agent", "accountant.md");
19080
19358
  var AccountantPlugin = async () => {
19081
19359
  const agent = loadAgent(AGENT_FILE);
19082
19360
  return {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fuzzle/opencode-accountant",
3
- "version": "0.1.2",
3
+ "version": "0.1.3-next.1",
4
4
  "description": "An OpenCode accounting agent, specialized in double-entry-bookkepping with hledger",
5
5
  "author": {
6
6
  "name": "ali bengali",