@moatless/bookkeeping 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/accounting/index.d.ts +9 -0
- package/dist/accounting/index.js +14 -0
- package/dist/accounting/line-generator.d.ts +34 -0
- package/dist/accounting/line-generator.js +136 -0
- package/dist/accounting/tax-codes.d.ts +32 -0
- package/dist/accounting/tax-codes.js +279 -0
- package/dist/accounting/traktamente-rates.d.ts +48 -0
- package/dist/accounting/traktamente-rates.js +325 -0
- package/dist/accounting/types.d.ts +69 -0
- package/dist/accounting/types.js +5 -0
- package/dist/accounting/validation.d.ts +41 -0
- package/dist/accounting/validation.js +118 -0
- package/dist/auth/fortnox-login.d.ts +15 -0
- package/dist/auth/fortnox-login.js +170 -0
- package/dist/auth/index.d.ts +3 -0
- package/dist/auth/index.js +3 -0
- package/dist/auth/prompts.d.ts +6 -0
- package/dist/auth/prompts.js +56 -0
- package/dist/auth/token-store.d.ts +19 -0
- package/dist/auth/token-store.js +54 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +21 -0
- package/dist/progress/index.d.ts +1 -0
- package/dist/progress/index.js +1 -0
- package/dist/progress/sync-progress.d.ts +31 -0
- package/dist/progress/sync-progress.js +65 -0
- package/dist/services/bokio-journal.d.ts +29 -0
- package/dist/services/bokio-journal.js +175 -0
- package/dist/services/document-download.d.ts +46 -0
- package/dist/services/document-download.js +105 -0
- package/dist/services/fortnox-inbox.d.ts +18 -0
- package/dist/services/fortnox-inbox.js +150 -0
- package/dist/services/fortnox-journal.d.ts +22 -0
- package/dist/services/fortnox-journal.js +166 -0
- package/dist/services/index.d.ts +6 -0
- package/dist/services/index.js +6 -0
- package/dist/services/journal-sync.d.ts +23 -0
- package/dist/services/journal-sync.js +124 -0
- package/dist/services/journal.service.d.ts +45 -0
- package/dist/services/journal.service.js +204 -0
- package/dist/storage/filesystem.d.ts +49 -0
- package/dist/storage/filesystem.js +122 -0
- package/dist/storage/index.d.ts +2 -0
- package/dist/storage/index.js +1 -0
- package/dist/storage/interface.d.ts +48 -0
- package/dist/storage/interface.js +5 -0
- package/dist/sync-types.d.ts +61 -0
- package/dist/sync-types.js +1 -0
- package/dist/transformers/bokio.d.ts +10 -0
- package/dist/transformers/bokio.js +56 -0
- package/dist/transformers/fortnox.d.ts +6 -0
- package/dist/transformers/fortnox.js +39 -0
- package/dist/transformers/index.d.ts +3 -0
- package/dist/transformers/index.js +2 -0
- package/dist/types/discarded-item.d.ts +29 -0
- package/dist/types/discarded-item.js +1 -0
- package/dist/types/document.d.ts +63 -0
- package/dist/types/document.js +9 -0
- package/dist/types/exported-document.d.ts +61 -0
- package/dist/types/exported-document.js +9 -0
- package/dist/types/exported-fiscal-year.d.ts +10 -0
- package/dist/types/exported-fiscal-year.js +1 -0
- package/dist/types/exported-inbox-document.d.ts +14 -0
- package/dist/types/exported-inbox-document.js +10 -0
- package/dist/types/fiscal-year.d.ts +10 -0
- package/dist/types/fiscal-year.js +1 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/index.js +10 -0
- package/dist/types/journal-entry.d.ts +79 -0
- package/dist/types/journal-entry.js +12 -0
- package/dist/types/ledger-account.d.ts +5 -0
- package/dist/types/ledger-account.js +1 -0
- package/dist/utils/file-namer.d.ts +48 -0
- package/dist/utils/file-namer.js +80 -0
- package/dist/utils/git.d.ts +9 -0
- package/dist/utils/git.js +41 -0
- package/dist/utils/index.d.ts +6 -0
- package/dist/utils/index.js +6 -0
- package/dist/utils/paths.d.ts +17 -0
- package/dist/utils/paths.js +24 -0
- package/dist/utils/retry.d.ts +17 -0
- package/dist/utils/retry.js +48 -0
- package/dist/utils/templates.d.ts +12 -0
- package/dist/utils/templates.js +222 -0
- package/dist/utils/yaml.d.ts +12 -0
- package/dist/utils/yaml.js +47 -0
- package/dist/yaml/entry-helpers.d.ts +57 -0
- package/dist/yaml/entry-helpers.js +125 -0
- package/dist/yaml/index.d.ts +2 -0
- package/dist/yaml/index.js +2 -0
- package/dist/yaml/yaml-serializer.d.ts +21 -0
- package/dist/yaml/yaml-serializer.js +60 -0
- package/package.json +37 -0
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import type { IStorageService } from "../storage/interface";
|
|
2
|
+
export interface DocumentMetadata {
|
|
3
|
+
fileName: string;
|
|
4
|
+
mimeType?: string;
|
|
5
|
+
sourceIntegration: string;
|
|
6
|
+
sourceId: string;
|
|
7
|
+
}
|
|
8
|
+
export interface DownloadableFile {
|
|
9
|
+
id: string;
|
|
10
|
+
contentType: string;
|
|
11
|
+
description?: string;
|
|
12
|
+
}
|
|
13
|
+
export interface FileDownloader {
|
|
14
|
+
/**
|
|
15
|
+
* Get files linked to a journal entry
|
|
16
|
+
* @param journalEntryId - The journal entry ID to filter by
|
|
17
|
+
* @returns List of downloadable files
|
|
18
|
+
*/
|
|
19
|
+
getFilesForEntry(journalEntryId: string): Promise<DownloadableFile[]>;
|
|
20
|
+
/**
|
|
21
|
+
* Download a file by ID
|
|
22
|
+
* @param id - The file/upload ID
|
|
23
|
+
* @returns Binary data and content type
|
|
24
|
+
*/
|
|
25
|
+
downloadFile(id: string): Promise<{
|
|
26
|
+
data: ArrayBuffer;
|
|
27
|
+
contentType: string;
|
|
28
|
+
}>;
|
|
29
|
+
}
|
|
30
|
+
export interface DownloadFilesOptions {
|
|
31
|
+
storage: IStorageService;
|
|
32
|
+
repoPath: string;
|
|
33
|
+
entryDir: string;
|
|
34
|
+
journalEntryId: string;
|
|
35
|
+
downloader: FileDownloader;
|
|
36
|
+
sourceIntegration: string;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Download files for a journal entry and save them to documents.yaml
|
|
40
|
+
* Returns the number of files downloaded
|
|
41
|
+
*/
|
|
42
|
+
export declare function downloadFilesForEntry(options: DownloadFilesOptions): Promise<number>;
|
|
43
|
+
/**
|
|
44
|
+
* Get extension from MIME type
|
|
45
|
+
*/
|
|
46
|
+
export declare function getExtensionFromMimeType(mimeType: string): string;
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import * as fs from "node:fs/promises";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import { toYaml, parseYaml } from "../utils/yaml";
|
|
4
|
+
/**
|
|
5
|
+
* Slugify text for filenames
|
|
6
|
+
*/
|
|
7
|
+
function slugify(text, maxLength = 40) {
|
|
8
|
+
return text
|
|
9
|
+
.toLowerCase()
|
|
10
|
+
.slice(0, maxLength)
|
|
11
|
+
.replace(/[^a-z0-9\s-]/g, "")
|
|
12
|
+
.replace(/\s+/g, "-")
|
|
13
|
+
.replace(/-+/g, "-")
|
|
14
|
+
.replace(/^-|-$/g, "");
|
|
15
|
+
}
|
|
16
|
+
/** Map MIME types to file extensions */
|
|
17
|
+
const EXTENSION_MAP = {
|
|
18
|
+
"application/pdf": ".pdf",
|
|
19
|
+
"image/jpeg": ".jpg",
|
|
20
|
+
"image/png": ".png",
|
|
21
|
+
"image/gif": ".gif",
|
|
22
|
+
"image/webp": ".webp",
|
|
23
|
+
"image/tiff": ".tiff",
|
|
24
|
+
"application/octet-stream": ".bin",
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* Download files for a journal entry and save them to documents.yaml
|
|
28
|
+
* Returns the number of files downloaded
|
|
29
|
+
*/
|
|
30
|
+
export async function downloadFilesForEntry(options) {
|
|
31
|
+
const { storage, repoPath, entryDir, journalEntryId, downloader, sourceIntegration } = options;
|
|
32
|
+
// Get files linked to this journal entry
|
|
33
|
+
const linkedFiles = await downloader.getFilesForEntry(journalEntryId);
|
|
34
|
+
if (linkedFiles.length === 0) {
|
|
35
|
+
return 0;
|
|
36
|
+
}
|
|
37
|
+
// Load existing documents.yaml if present
|
|
38
|
+
const documentsPath = `${entryDir}/documents.yaml`;
|
|
39
|
+
let existingDocs = [];
|
|
40
|
+
try {
|
|
41
|
+
const { content } = await storage.readFile(documentsPath);
|
|
42
|
+
existingDocs = parseYaml(content) || [];
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
// documents.yaml doesn't exist yet
|
|
46
|
+
}
|
|
47
|
+
// Track which sourceIds are already downloaded
|
|
48
|
+
const downloadedSourceIds = new Set(existingDocs
|
|
49
|
+
.filter((d) => d.sourceIntegration === sourceIntegration)
|
|
50
|
+
.map((d) => d.sourceId));
|
|
51
|
+
let filesDownloaded = 0;
|
|
52
|
+
const absoluteDir = path.join(repoPath, entryDir);
|
|
53
|
+
// Ensure directory exists
|
|
54
|
+
await fs.mkdir(absoluteDir, { recursive: true });
|
|
55
|
+
for (const file of linkedFiles) {
|
|
56
|
+
// Skip if already downloaded
|
|
57
|
+
if (downloadedSourceIds.has(file.id)) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
try {
|
|
61
|
+
// Download the file
|
|
62
|
+
const result = await downloader.downloadFile(file.id);
|
|
63
|
+
// Determine extension from content type
|
|
64
|
+
const ext = EXTENSION_MAP[file.contentType || result.contentType] || ".bin";
|
|
65
|
+
// Generate filename from description or ID (never use generic "document")
|
|
66
|
+
const baseName = file.description
|
|
67
|
+
? slugify(file.description)
|
|
68
|
+
: file.id.slice(0, 8);
|
|
69
|
+
const existingFilenames = existingDocs.map((d) => d.fileName);
|
|
70
|
+
let filename = `${baseName}${ext}`;
|
|
71
|
+
let counter = 1;
|
|
72
|
+
while (existingFilenames.includes(filename)) {
|
|
73
|
+
filename = `${baseName}-${counter}${ext}`;
|
|
74
|
+
counter++;
|
|
75
|
+
}
|
|
76
|
+
// Save file
|
|
77
|
+
const filePath = path.join(absoluteDir, filename);
|
|
78
|
+
const buffer = Buffer.from(result.data);
|
|
79
|
+
await fs.writeFile(filePath, buffer);
|
|
80
|
+
// Add to documents list
|
|
81
|
+
existingDocs.push({
|
|
82
|
+
fileName: filename,
|
|
83
|
+
mimeType: file.contentType || result.contentType,
|
|
84
|
+
sourceIntegration,
|
|
85
|
+
sourceId: file.id,
|
|
86
|
+
});
|
|
87
|
+
filesDownloaded++;
|
|
88
|
+
}
|
|
89
|
+
catch (error) {
|
|
90
|
+
// Log but continue with other files
|
|
91
|
+
console.error(`\n Warning: Failed to download file ${file.id}: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
// Write updated documents.yaml if any files were downloaded
|
|
95
|
+
if (filesDownloaded > 0) {
|
|
96
|
+
await storage.writeFile(documentsPath, toYaml(existingDocs));
|
|
97
|
+
}
|
|
98
|
+
return filesDownloaded;
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Get extension from MIME type
|
|
102
|
+
*/
|
|
103
|
+
export function getExtensionFromMimeType(mimeType) {
|
|
104
|
+
return EXTENSION_MAP[mimeType] || ".bin";
|
|
105
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { FortnoxClient } from "@moatless/fortnox-client";
|
|
2
|
+
import type { FilesystemStorageService } from "../storage/filesystem";
|
|
3
|
+
export interface FortnoxInboxSyncProgress {
|
|
4
|
+
current: number;
|
|
5
|
+
total: number;
|
|
6
|
+
message?: string;
|
|
7
|
+
}
|
|
8
|
+
export interface FortnoxInboxSyncOptions {
|
|
9
|
+
onProgress?: (progress: FortnoxInboxSyncProgress) => void;
|
|
10
|
+
}
|
|
11
|
+
export interface FortnoxInboxSyncResult {
|
|
12
|
+
newCount: number;
|
|
13
|
+
existingCount: number;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Sync inbox files from Fortnox to local filesystem
|
|
17
|
+
*/
|
|
18
|
+
export declare function syncFortnoxInbox(client: FortnoxClient, storage: FilesystemStorageService, options?: FortnoxInboxSyncOptions): Promise<FortnoxInboxSyncResult>;
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import { toYaml, parseYaml } from "../utils/yaml";
|
|
2
|
+
/**
|
|
3
|
+
* Slugify text for terminal-friendly filenames
|
|
4
|
+
*/
|
|
5
|
+
function slugify(text, maxLength = 30) {
|
|
6
|
+
return text
|
|
7
|
+
.toLowerCase()
|
|
8
|
+
.slice(0, maxLength)
|
|
9
|
+
.replace(/[^a-z0-9\s-]/g, "")
|
|
10
|
+
.replace(/\s+/g, "-")
|
|
11
|
+
.replace(/-+/g, "-")
|
|
12
|
+
.replace(/^-|-$/g, "");
|
|
13
|
+
}
|
|
14
|
+
function _getExtensionFromContentType(contentType) {
|
|
15
|
+
const map = {
|
|
16
|
+
"application/pdf": ".pdf",
|
|
17
|
+
"image/jpeg": ".jpg",
|
|
18
|
+
"image/png": ".png",
|
|
19
|
+
"image/gif": ".gif",
|
|
20
|
+
"image/webp": ".webp",
|
|
21
|
+
"image/tiff": ".tiff",
|
|
22
|
+
"application/octet-stream": ".bin",
|
|
23
|
+
};
|
|
24
|
+
return map[contentType] || ".bin";
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Check if a file was already downloaded by scanning existing documents.yaml files
|
|
28
|
+
*/
|
|
29
|
+
async function isAlreadyDownloaded(storage, inboxDir, sourceId) {
|
|
30
|
+
try {
|
|
31
|
+
const entries = await storage.listDirectory(inboxDir);
|
|
32
|
+
for (const entry of entries) {
|
|
33
|
+
if (entry.type !== "dir")
|
|
34
|
+
continue;
|
|
35
|
+
const documentsPath = `${inboxDir}/${entry.name}/documents.yaml`;
|
|
36
|
+
try {
|
|
37
|
+
const fileContent = await storage.readFile(documentsPath);
|
|
38
|
+
if (fileContent?.content) {
|
|
39
|
+
const documents = parseYaml(fileContent.content);
|
|
40
|
+
if (documents?.some((doc) => doc.sourceId === sourceId)) {
|
|
41
|
+
return true;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
// documents.yaml doesn't exist or can't be read, skip
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
// Inbox directory doesn't exist
|
|
52
|
+
}
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Sync inbox files from Fortnox to local filesystem
|
|
57
|
+
*/
|
|
58
|
+
export async function syncFortnoxInbox(client, storage, options = {}) {
|
|
59
|
+
const { onProgress = () => { } } = options;
|
|
60
|
+
// Fetch inbox files
|
|
61
|
+
const allFiles = [];
|
|
62
|
+
// Get root inbox
|
|
63
|
+
const rootResponse = await client.getInboxFiles();
|
|
64
|
+
const rootFiles = rootResponse.Folder?.Files ?? rootResponse.Files ?? [];
|
|
65
|
+
const folders = rootResponse.Folder?.Folders ?? [];
|
|
66
|
+
allFiles.push(...rootFiles);
|
|
67
|
+
// Fetch files from each folder
|
|
68
|
+
if (Array.isArray(folders)) {
|
|
69
|
+
for (const folder of folders) {
|
|
70
|
+
if (folder?.Id) {
|
|
71
|
+
try {
|
|
72
|
+
const folderResponse = await client.getInboxFolder(folder.Id);
|
|
73
|
+
const folderFiles = folderResponse.Folder?.Files ?? folderResponse.Files ?? [];
|
|
74
|
+
allFiles.push(...folderFiles);
|
|
75
|
+
}
|
|
76
|
+
catch {
|
|
77
|
+
// Skip folders that can't be read
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
const totalFiles = allFiles.length;
|
|
83
|
+
onProgress({ current: 0, total: totalFiles, message: "Fetching inbox files..." });
|
|
84
|
+
if (totalFiles === 0) {
|
|
85
|
+
return { newCount: 0, existingCount: 0 };
|
|
86
|
+
}
|
|
87
|
+
// Download each file
|
|
88
|
+
let newCount = 0;
|
|
89
|
+
let existingCount = 0;
|
|
90
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
91
|
+
// Track used directory names to handle duplicates
|
|
92
|
+
const usedDirNames = new Set();
|
|
93
|
+
// First, scan existing directories
|
|
94
|
+
try {
|
|
95
|
+
const entries = await storage.listDirectory("inbox");
|
|
96
|
+
for (const entry of entries) {
|
|
97
|
+
if (entry.type === "dir") {
|
|
98
|
+
usedDirNames.add(entry.name);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
// Inbox directory doesn't exist yet
|
|
104
|
+
}
|
|
105
|
+
for (const file of allFiles) {
|
|
106
|
+
// Check if this file was already downloaded
|
|
107
|
+
const alreadyDownloaded = await isAlreadyDownloaded(storage, "inbox", file.Id);
|
|
108
|
+
if (alreadyDownloaded) {
|
|
109
|
+
existingCount++;
|
|
110
|
+
onProgress({ current: newCount + existingCount, total: totalFiles });
|
|
111
|
+
continue;
|
|
112
|
+
}
|
|
113
|
+
// Generate directory name from filename
|
|
114
|
+
const baseName = file.Name.replace(/\.[^/.]+$/, ""); // Remove extension
|
|
115
|
+
const slug = slugify(baseName) || file.Id.slice(0, 8);
|
|
116
|
+
let dirName = `${today}-${slug}`;
|
|
117
|
+
// Handle duplicate directory names
|
|
118
|
+
if (usedDirNames.has(dirName)) {
|
|
119
|
+
dirName = `${dirName}-${file.Id.slice(0, 8)}`;
|
|
120
|
+
}
|
|
121
|
+
usedDirNames.add(dirName);
|
|
122
|
+
const uploadDir = `inbox/${dirName}`;
|
|
123
|
+
try {
|
|
124
|
+
// Download file
|
|
125
|
+
const result = await client.downloadFromInbox(file.Id);
|
|
126
|
+
// Use original filename
|
|
127
|
+
const filename = file.Name;
|
|
128
|
+
// Save file
|
|
129
|
+
const filePath = `${uploadDir}/${filename}`;
|
|
130
|
+
await storage.writeBinaryFile(filePath, Buffer.from(result.data));
|
|
131
|
+
// Write documents.yaml
|
|
132
|
+
const documents = [
|
|
133
|
+
{
|
|
134
|
+
fileName: filename,
|
|
135
|
+
mimeType: result.contentType,
|
|
136
|
+
sourceIntegration: "fortnox",
|
|
137
|
+
sourceId: file.Id,
|
|
138
|
+
},
|
|
139
|
+
];
|
|
140
|
+
const documentsPath = `${uploadDir}/documents.yaml`;
|
|
141
|
+
await storage.writeFile(documentsPath, toYaml(documents));
|
|
142
|
+
newCount++;
|
|
143
|
+
}
|
|
144
|
+
catch {
|
|
145
|
+
// Skip files that can't be downloaded
|
|
146
|
+
}
|
|
147
|
+
onProgress({ current: newCount + existingCount, total: totalFiles });
|
|
148
|
+
}
|
|
149
|
+
return { newCount, existingCount };
|
|
150
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { FortnoxClient } from "@moatless/fortnox-client";
|
|
2
|
+
import type { FilesystemStorageService } from "../storage/filesystem";
|
|
3
|
+
export interface FortnoxSyncProgress {
|
|
4
|
+
current: number;
|
|
5
|
+
total: number;
|
|
6
|
+
message?: string;
|
|
7
|
+
phase?: "discovering" | "fetching";
|
|
8
|
+
}
|
|
9
|
+
export interface FortnoxJournalSyncOptions {
|
|
10
|
+
downloadFiles?: boolean;
|
|
11
|
+
targetYear?: number;
|
|
12
|
+
onProgress?: (progress: FortnoxSyncProgress) => void;
|
|
13
|
+
}
|
|
14
|
+
export interface FortnoxJournalSyncResult {
|
|
15
|
+
entriesCount: number;
|
|
16
|
+
fiscalYearsCount: number;
|
|
17
|
+
entriesWithFilesDownloaded: number;
|
|
18
|
+
}
|
|
19
|
+
export declare function syncFortnoxJournalEntries(client: FortnoxClient, storage: FilesystemStorageService, options?: FortnoxJournalSyncOptions): Promise<FortnoxJournalSyncResult>;
|
|
20
|
+
export declare function syncFortnoxChartOfAccounts(client: FortnoxClient, storage: FilesystemStorageService): Promise<{
|
|
21
|
+
accountsCount: number;
|
|
22
|
+
}>;
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import { fiscalYearDirName } from "../utils/file-namer";
|
|
2
|
+
import { toYaml } from "../utils/yaml";
|
|
3
|
+
import { mapFortnoxVoucherToJournalEntry } from "../transformers/fortnox";
|
|
4
|
+
import { JournalService } from "./journal.service";
|
|
5
|
+
export async function syncFortnoxJournalEntries(client, storage, options = {}) {
|
|
6
|
+
const { downloadFiles = false, targetYear, onProgress = () => { } } = options;
|
|
7
|
+
const journalService = new JournalService(storage);
|
|
8
|
+
// 1. Fetch fiscal years
|
|
9
|
+
const fiscalYearsResponse = await client.getFinancialYears();
|
|
10
|
+
const allFiscalYears = fiscalYearsResponse.data;
|
|
11
|
+
// Filter fiscal years if targeting a specific year
|
|
12
|
+
const fiscalYears = targetYear
|
|
13
|
+
? allFiscalYears.filter((fy) => parseInt(fy.FromDate.slice(0, 4), 10) === targetYear)
|
|
14
|
+
: allFiscalYears;
|
|
15
|
+
if (targetYear && fiscalYears.length === 0) {
|
|
16
|
+
return {
|
|
17
|
+
entriesCount: 0,
|
|
18
|
+
fiscalYearsCount: 0,
|
|
19
|
+
entriesWithFilesDownloaded: 0,
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
// 2. Fetch voucher list for each fiscal year
|
|
23
|
+
const voucherRefs = [];
|
|
24
|
+
const existingEntriesByYear = new Map();
|
|
25
|
+
for (const fy of fiscalYears) {
|
|
26
|
+
const fyYear = fy.Id;
|
|
27
|
+
const pathYear = parseInt(fy.FromDate.slice(0, 4), 10);
|
|
28
|
+
const existingRecords = await journalService.listJournalEntryRecords(pathYear);
|
|
29
|
+
existingEntriesByYear.set(pathYear, existingRecords);
|
|
30
|
+
let hasMore = true;
|
|
31
|
+
let page = 1;
|
|
32
|
+
while (hasMore) {
|
|
33
|
+
const response = await client.getVouchers(fyYear, { page, pageSize: 100 });
|
|
34
|
+
for (const voucher of response.data) {
|
|
35
|
+
voucherRefs.push({
|
|
36
|
+
series: voucher.VoucherSeries,
|
|
37
|
+
number: voucher.VoucherNumber ?? 0,
|
|
38
|
+
year: voucher.Year ?? fyYear,
|
|
39
|
+
fiscalYear: fy,
|
|
40
|
+
pathYear,
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
hasMore = response.pagination.hasNextPage;
|
|
44
|
+
page++;
|
|
45
|
+
// Report discovery progress
|
|
46
|
+
onProgress({
|
|
47
|
+
current: voucherRefs.length,
|
|
48
|
+
total: 0,
|
|
49
|
+
message: "Discovering entries...",
|
|
50
|
+
phase: "discovering",
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const totalEntries = voucherRefs.length;
|
|
55
|
+
onProgress({ current: 0, total: totalEntries, message: "Fetching entry details...", phase: "fetching" });
|
|
56
|
+
if (totalEntries === 0) {
|
|
57
|
+
await writeFortnoxFiscalYearsMetadata(storage, fiscalYears);
|
|
58
|
+
return {
|
|
59
|
+
entriesCount: 0,
|
|
60
|
+
fiscalYearsCount: fiscalYears.length,
|
|
61
|
+
entriesWithFilesDownloaded: 0,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
// 3. Fetch full voucher details and write entries
|
|
65
|
+
const entryDirs = new Map();
|
|
66
|
+
const allVouchers = [];
|
|
67
|
+
let processed = 0;
|
|
68
|
+
for (const ref of voucherRefs) {
|
|
69
|
+
// Fetch full voucher with VoucherRows
|
|
70
|
+
const fullVoucherResponse = await client.getVoucher(ref.series, ref.number, ref.year);
|
|
71
|
+
const voucher = fullVoucherResponse.Voucher;
|
|
72
|
+
const journalEntry = mapFortnoxVoucherToJournalEntry(voucher);
|
|
73
|
+
const entryKey = journalEntry.externalId ??
|
|
74
|
+
`${journalEntry.series ?? ""}-${journalEntry.entryNumber}`;
|
|
75
|
+
const existingRecords = existingEntriesByYear.get(ref.pathYear);
|
|
76
|
+
const existingRecord = existingRecords?.get(entryKey);
|
|
77
|
+
const result = await journalService.upsertJournalEntry(ref.pathYear, journalEntry, existingRecord);
|
|
78
|
+
const entryDir = result.entryDir;
|
|
79
|
+
const entryId = `${voucher.VoucherSeries}-${voucher.VoucherNumber ?? 0}-${voucher.Year ?? 0}`;
|
|
80
|
+
entryDirs.set(entryId, entryDir);
|
|
81
|
+
allVouchers.push({ voucher, fiscalYear: ref.fiscalYear });
|
|
82
|
+
processed++;
|
|
83
|
+
onProgress({ current: processed, total: totalEntries, phase: "fetching" });
|
|
84
|
+
}
|
|
85
|
+
// 4. Write fiscal year metadata
|
|
86
|
+
await writeFortnoxFiscalYearsMetadata(storage, fiscalYears);
|
|
87
|
+
// 5. Download files for entries (if enabled)
|
|
88
|
+
let entriesWithFilesDownloaded = 0;
|
|
89
|
+
if (downloadFiles) {
|
|
90
|
+
for (const { voucher } of allVouchers) {
|
|
91
|
+
const entryId = `${voucher.VoucherSeries}-${voucher.VoucherNumber ?? 0}-${voucher.Year ?? 0}`;
|
|
92
|
+
const entryDir = entryDirs.get(entryId);
|
|
93
|
+
if (!entryDir)
|
|
94
|
+
continue;
|
|
95
|
+
try {
|
|
96
|
+
const connections = await client.getVoucherFileConnectionsForVoucher(voucher.VoucherSeries, voucher.VoucherNumber ?? 0, voucher.Year ?? 0);
|
|
97
|
+
for (const conn of connections) {
|
|
98
|
+
try {
|
|
99
|
+
const fileResult = await client.downloadFromArchive(conn.FileId);
|
|
100
|
+
const filename = `attachment-${conn.FileId}.${getFortnoxExtension(fileResult.contentType)}`;
|
|
101
|
+
const filePath = `${entryDir}/${filename}`;
|
|
102
|
+
await storage.writeBinaryFile(filePath, Buffer.from(fileResult.data));
|
|
103
|
+
entriesWithFilesDownloaded++;
|
|
104
|
+
}
|
|
105
|
+
catch {
|
|
106
|
+
// Skip file download errors
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
// Skip if can't get file connections
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
return {
|
|
116
|
+
entriesCount: allVouchers.length,
|
|
117
|
+
fiscalYearsCount: fiscalYears.length,
|
|
118
|
+
entriesWithFilesDownloaded,
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
export async function syncFortnoxChartOfAccounts(client, storage) {
|
|
122
|
+
// Fetch all accounts with pagination
|
|
123
|
+
const allAccounts = [];
|
|
124
|
+
let hasMore = true;
|
|
125
|
+
let page = 1;
|
|
126
|
+
while (hasMore) {
|
|
127
|
+
const response = await client.getAccounts({ page, pageSize: 500 });
|
|
128
|
+
for (const account of response.data) {
|
|
129
|
+
allAccounts.push({
|
|
130
|
+
code: account.Number.toString(),
|
|
131
|
+
name: account.Description,
|
|
132
|
+
description: account.Description,
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
hasMore = response.pagination.hasNextPage;
|
|
136
|
+
page++;
|
|
137
|
+
}
|
|
138
|
+
// Sort by account number
|
|
139
|
+
allAccounts.sort((a, b) => parseInt(a.code, 10) - parseInt(b.code, 10));
|
|
140
|
+
// Write accounts.yaml
|
|
141
|
+
const yamlContent = toYaml({ accounts: allAccounts });
|
|
142
|
+
await storage.writeFile("accounts.yaml", yamlContent);
|
|
143
|
+
return { accountsCount: allAccounts.length };
|
|
144
|
+
}
|
|
145
|
+
function getFortnoxExtension(contentType) {
|
|
146
|
+
const map = {
|
|
147
|
+
"application/pdf": "pdf",
|
|
148
|
+
"image/jpeg": "jpg",
|
|
149
|
+
"image/png": "png",
|
|
150
|
+
"image/gif": "gif",
|
|
151
|
+
};
|
|
152
|
+
return map[contentType] ?? "bin";
|
|
153
|
+
}
|
|
154
|
+
async function writeFortnoxFiscalYearsMetadata(storage, fiscalYears) {
|
|
155
|
+
for (const fy of fiscalYears) {
|
|
156
|
+
const fyDir = fiscalYearDirName({ start_date: fy.FromDate });
|
|
157
|
+
const metadataPath = `journal-entries/${fyDir}/_fiscal-year.yaml`;
|
|
158
|
+
const metadata = {
|
|
159
|
+
id: fy.Id,
|
|
160
|
+
startDate: fy.FromDate,
|
|
161
|
+
endDate: fy.ToDate,
|
|
162
|
+
};
|
|
163
|
+
const yamlContent = toYaml(metadata);
|
|
164
|
+
await storage.writeFile(metadataPath, yamlContent);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { JournalService } from "./journal.service";
|
|
2
|
+
export { syncJournalEntries, type SyncJournalConfig, } from "./journal-sync";
|
|
3
|
+
export { downloadFilesForEntry, getExtensionFromMimeType, type DocumentMetadata, type DownloadableFile, type FileDownloader, type DownloadFilesOptions, } from "./document-download";
|
|
4
|
+
export { syncFortnoxJournalEntries, syncFortnoxChartOfAccounts, type FortnoxSyncProgress, type FortnoxJournalSyncOptions, type FortnoxJournalSyncResult, } from "./fortnox-journal";
|
|
5
|
+
export { syncFortnoxInbox, type FortnoxInboxSyncProgress, type FortnoxInboxSyncOptions, type FortnoxInboxSyncResult, } from "./fortnox-inbox";
|
|
6
|
+
export { syncBokioJournalEntries, syncBokioChartOfAccounts, type BokioSyncProgress, type BokioJournalSyncOptions, type BokioJournalSyncResult, } from "./bokio-journal";
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { JournalService } from "./journal.service";
|
|
2
|
+
export { syncJournalEntries, } from "./journal-sync";
|
|
3
|
+
export { downloadFilesForEntry, getExtensionFromMimeType, } from "./document-download";
|
|
4
|
+
export { syncFortnoxJournalEntries, syncFortnoxChartOfAccounts, } from "./fortnox-journal";
|
|
5
|
+
export { syncFortnoxInbox, } from "./fortnox-inbox";
|
|
6
|
+
export { syncBokioJournalEntries, syncBokioChartOfAccounts, } from "./bokio-journal";
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { FortnoxClient } from "@moatless/fortnox-client";
|
|
2
|
+
import type { IStorageService } from "../storage/interface";
|
|
3
|
+
import type { SyncJournalResult, FiscalYearInfo } from "../sync-types";
|
|
4
|
+
/**
|
|
5
|
+
* Sync configuration for journal entries
|
|
6
|
+
*/
|
|
7
|
+
export interface SyncJournalConfig {
|
|
8
|
+
storage: IStorageService;
|
|
9
|
+
fortnoxClient: FortnoxClient;
|
|
10
|
+
organizationId: string;
|
|
11
|
+
financialYears: FiscalYearInfo[];
|
|
12
|
+
/** Optional logger */
|
|
13
|
+
log?: {
|
|
14
|
+
info: (data: object, msg: string) => void;
|
|
15
|
+
warn: (data: object, msg: string) => void;
|
|
16
|
+
error: (data: object, msg: string) => void;
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Sync journal entries from Fortnox to storage
|
|
21
|
+
* Returns detailed stats grouped by fiscal year
|
|
22
|
+
*/
|
|
23
|
+
export declare function syncJournalEntries(config: SyncJournalConfig): Promise<SyncJournalResult>;
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import { JournalService } from "./journal.service";
|
|
2
|
+
import { mapFortnoxVoucherToJournalEntry } from "../transformers/fortnox";
|
|
3
|
+
/**
|
|
4
|
+
* Default console logger
|
|
5
|
+
*/
|
|
6
|
+
const defaultLog = {
|
|
7
|
+
info: (data, msg) => console.log(`[INFO] ${msg}`, data),
|
|
8
|
+
warn: (data, msg) => console.warn(`[WARN] ${msg}`, data),
|
|
9
|
+
error: (data, msg) => console.error(`[ERROR] ${msg}`, data),
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* Sync journal entries from Fortnox to storage
|
|
13
|
+
* Returns detailed stats grouped by fiscal year
|
|
14
|
+
*/
|
|
15
|
+
export async function syncJournalEntries(config) {
|
|
16
|
+
const startTime = Date.now();
|
|
17
|
+
const { storage, fortnoxClient, organizationId, financialYears } = config;
|
|
18
|
+
const log = config.log ?? defaultLog;
|
|
19
|
+
const journalService = new JournalService(storage);
|
|
20
|
+
const fiscalYearStats = [];
|
|
21
|
+
const errors = [];
|
|
22
|
+
log.info({ organizationId, financialYears }, "Starting journal entries sync");
|
|
23
|
+
for (const fy of financialYears) {
|
|
24
|
+
const { externalId: fortnoxYearId, year } = fy;
|
|
25
|
+
const stats = {
|
|
26
|
+
year,
|
|
27
|
+
fetched: 0,
|
|
28
|
+
created: 0,
|
|
29
|
+
updated: 0,
|
|
30
|
+
unchanged: 0,
|
|
31
|
+
failed: 0,
|
|
32
|
+
};
|
|
33
|
+
try {
|
|
34
|
+
// Get existing entries from storage (uses calendar year for paths)
|
|
35
|
+
log.info({ year, fortnoxYearId }, "Loading existing entries from storage");
|
|
36
|
+
const existingEntries = await journalService.listJournalEntryRecords(year);
|
|
37
|
+
log.info({ year, existingCount: existingEntries.size }, "Loaded existing entries");
|
|
38
|
+
// Fetch all vouchers from Fortnox (uses Fortnox external ID)
|
|
39
|
+
log.info({ year, fortnoxYearId }, "Fetching vouchers from Fortnox");
|
|
40
|
+
const allVouchers = [];
|
|
41
|
+
let page = 1;
|
|
42
|
+
const pageSize = 100;
|
|
43
|
+
let hasNextPage = true;
|
|
44
|
+
while (hasNextPage) {
|
|
45
|
+
const response = await fortnoxClient.getVouchers(fortnoxYearId, { page, pageSize });
|
|
46
|
+
allVouchers.push(...response.data);
|
|
47
|
+
if (!response.pagination.hasNextPage) {
|
|
48
|
+
hasNextPage = false;
|
|
49
|
+
}
|
|
50
|
+
else {
|
|
51
|
+
page++;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
stats.fetched = allVouchers.length;
|
|
55
|
+
log.info({ year, fetched: allVouchers.length }, "Fetched vouchers from Fortnox");
|
|
56
|
+
// Process each voucher
|
|
57
|
+
for (const voucherSummary of allVouchers) {
|
|
58
|
+
if (!voucherSummary.VoucherNumber) {
|
|
59
|
+
log.warn({ voucherSeries: voucherSummary.VoucherSeries }, "Skipping voucher without number");
|
|
60
|
+
stats.failed++;
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
const externalId = `${voucherSummary.VoucherSeries}-${voucherSummary.VoucherNumber}`;
|
|
64
|
+
try {
|
|
65
|
+
// Get full voucher details (list response doesn't include rows)
|
|
66
|
+
const fullVoucherResponse = await fortnoxClient.getVoucher(voucherSummary.VoucherSeries, voucherSummary.VoucherNumber, fortnoxYearId);
|
|
67
|
+
if (!fullVoucherResponse.Voucher) {
|
|
68
|
+
log.warn({ externalId }, "Voucher not found");
|
|
69
|
+
stats.failed++;
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
const newEntry = mapFortnoxVoucherToJournalEntry(fullVoucherResponse.Voucher, year);
|
|
73
|
+
const entryKey = newEntry.externalId ??
|
|
74
|
+
`${newEntry.series ?? ""}-${newEntry.entryNumber}`;
|
|
75
|
+
const existingRecord = existingEntries.get(entryKey);
|
|
76
|
+
const result = await journalService.upsertJournalEntry(year, newEntry, existingRecord);
|
|
77
|
+
if (result.action === "created") {
|
|
78
|
+
stats.created++;
|
|
79
|
+
log.info({ externalId }, "Created journal entry");
|
|
80
|
+
}
|
|
81
|
+
else if (result.action === "updated") {
|
|
82
|
+
stats.updated++;
|
|
83
|
+
log.info({ externalId }, "Updated journal entry");
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
stats.unchanged++;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
catch (err) {
|
|
90
|
+
const message = err instanceof Error ? err.message : "Unknown error";
|
|
91
|
+
log.error({ externalId, error: message }, "Failed to process voucher");
|
|
92
|
+
errors.push({ year, externalId, message });
|
|
93
|
+
stats.failed++;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
catch (err) {
|
|
98
|
+
const message = err instanceof Error ? err.message : "Unknown error";
|
|
99
|
+
log.error({ year, error: message }, "Failed to sync fiscal year");
|
|
100
|
+
errors.push({ year, externalId: "*", message });
|
|
101
|
+
stats.failed++;
|
|
102
|
+
}
|
|
103
|
+
fiscalYearStats.push(stats);
|
|
104
|
+
log.info({ year, stats }, "Completed fiscal year sync");
|
|
105
|
+
}
|
|
106
|
+
// Calculate totals
|
|
107
|
+
const totals = fiscalYearStats.reduce((acc, fy) => ({
|
|
108
|
+
fetched: acc.fetched + fy.fetched,
|
|
109
|
+
created: acc.created + fy.created,
|
|
110
|
+
updated: acc.updated + fy.updated,
|
|
111
|
+
unchanged: acc.unchanged + fy.unchanged,
|
|
112
|
+
failed: acc.failed + fy.failed,
|
|
113
|
+
}), { fetched: 0, created: 0, updated: 0, unchanged: 0, failed: 0 });
|
|
114
|
+
const durationMs = Date.now() - startTime;
|
|
115
|
+
const success = errors.length === 0;
|
|
116
|
+
log.info({ organizationId, totals, errors: errors.length, durationMs }, "Journal entries sync completed");
|
|
117
|
+
return {
|
|
118
|
+
success,
|
|
119
|
+
fiscalYears: fiscalYearStats,
|
|
120
|
+
totals,
|
|
121
|
+
errors,
|
|
122
|
+
durationMs,
|
|
123
|
+
};
|
|
124
|
+
}
|