rustfava 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rustfava/__init__.py +30 -0
- rustfava/_ctx_globals_class.py +55 -0
- rustfava/api_models.py +36 -0
- rustfava/application.py +534 -0
- rustfava/beans/__init__.py +6 -0
- rustfava/beans/abc.py +327 -0
- rustfava/beans/account.py +79 -0
- rustfava/beans/create.py +377 -0
- rustfava/beans/flags.py +20 -0
- rustfava/beans/funcs.py +38 -0
- rustfava/beans/helpers.py +52 -0
- rustfava/beans/ingest.py +75 -0
- rustfava/beans/load.py +31 -0
- rustfava/beans/prices.py +151 -0
- rustfava/beans/protocols.py +82 -0
- rustfava/beans/str.py +454 -0
- rustfava/beans/types.py +63 -0
- rustfava/cli.py +187 -0
- rustfava/context.py +13 -0
- rustfava/core/__init__.py +729 -0
- rustfava/core/accounts.py +161 -0
- rustfava/core/attributes.py +145 -0
- rustfava/core/budgets.py +207 -0
- rustfava/core/charts.py +301 -0
- rustfava/core/commodities.py +37 -0
- rustfava/core/conversion.py +229 -0
- rustfava/core/documents.py +87 -0
- rustfava/core/extensions.py +132 -0
- rustfava/core/fava_options.py +255 -0
- rustfava/core/file.py +542 -0
- rustfava/core/filters.py +484 -0
- rustfava/core/group_entries.py +97 -0
- rustfava/core/ingest.py +509 -0
- rustfava/core/inventory.py +167 -0
- rustfava/core/misc.py +105 -0
- rustfava/core/module_base.py +18 -0
- rustfava/core/number.py +106 -0
- rustfava/core/query.py +180 -0
- rustfava/core/query_shell.py +301 -0
- rustfava/core/tree.py +265 -0
- rustfava/core/watcher.py +219 -0
- rustfava/ext/__init__.py +232 -0
- rustfava/ext/auto_commit.py +61 -0
- rustfava/ext/portfolio_list/PortfolioList.js +34 -0
- rustfava/ext/portfolio_list/__init__.py +29 -0
- rustfava/ext/portfolio_list/templates/PortfolioList.html +15 -0
- rustfava/ext/rustfava_ext_test/RustfavaExtTest.js +42 -0
- rustfava/ext/rustfava_ext_test/__init__.py +207 -0
- rustfava/ext/rustfava_ext_test/templates/RustfavaExtTest.html +45 -0
- rustfava/ext/rustfava_ext_test/templates/RustfavaExtTestInclude.html +1 -0
- rustfava/help/__init__.py +15 -0
- rustfava/help/_index.md +29 -0
- rustfava/help/beancount_syntax.md +156 -0
- rustfava/help/budgets.md +31 -0
- rustfava/help/conversion.md +29 -0
- rustfava/help/extensions.md +111 -0
- rustfava/help/features.md +179 -0
- rustfava/help/filters.md +103 -0
- rustfava/help/import.md +27 -0
- rustfava/help/options.md +289 -0
- rustfava/helpers.py +30 -0
- rustfava/internal_api.py +221 -0
- rustfava/json_api.py +952 -0
- rustfava/plugins/__init__.py +3 -0
- rustfava/plugins/link_documents.py +107 -0
- rustfava/plugins/tag_discovered_documents.py +44 -0
- rustfava/py.typed +0 -0
- rustfava/rustledger/__init__.py +31 -0
- rustfava/rustledger/constants.py +76 -0
- rustfava/rustledger/engine.py +485 -0
- rustfava/rustledger/loader.py +273 -0
- rustfava/rustledger/options.py +202 -0
- rustfava/rustledger/query.py +331 -0
- rustfava/rustledger/types.py +830 -0
- rustfava/serialisation.py +220 -0
- rustfava/static/app.css +2988 -0
- rustfava/static/app.css.map +7 -0
- rustfava/static/app.js +12854 -0
- rustfava/static/app.js.map +7 -0
- rustfava/static/beancount-JFV44ZVZ.css +5 -0
- rustfava/static/beancount-JFV44ZVZ.css.map +7 -0
- rustfava/static/beancount-VTTKRGSK.js +4642 -0
- rustfava/static/beancount-VTTKRGSK.js.map +7 -0
- rustfava/static/bql-MGFRUMBP.js +333 -0
- rustfava/static/bql-MGFRUMBP.js.map +7 -0
- rustfava/static/chunk-E7ZF4ASL.js +23061 -0
- rustfava/static/chunk-E7ZF4ASL.js.map +7 -0
- rustfava/static/chunk-V24TLQHT.js +12673 -0
- rustfava/static/chunk-V24TLQHT.js.map +7 -0
- rustfava/static/favicon.ico +0 -0
- rustfava/static/fira-mono-cyrillic-400-normal-BLAGXRCE.woff2 +0 -0
- rustfava/static/fira-mono-cyrillic-500-normal-EN7JUAAW.woff2 +0 -0
- rustfava/static/fira-mono-cyrillic-ext-400-normal-EX7VARTS.woff2 +0 -0
- rustfava/static/fira-mono-cyrillic-ext-500-normal-ZDPTUPRR.woff2 +0 -0
- rustfava/static/fira-mono-greek-400-normal-COGHKMOA.woff2 +0 -0
- rustfava/static/fira-mono-greek-500-normal-4EN2PKZT.woff2 +0 -0
- rustfava/static/fira-mono-greek-ext-400-normal-DYEQIJH7.woff2 +0 -0
- rustfava/static/fira-mono-greek-ext-500-normal-SG73CVKQ.woff2 +0 -0
- rustfava/static/fira-mono-latin-400-normal-NA3VLV7E.woff2 +0 -0
- rustfava/static/fira-mono-latin-500-normal-YC77GFWD.woff2 +0 -0
- rustfava/static/fira-mono-latin-ext-400-normal-DIKTZ5PW.woff2 +0 -0
- rustfava/static/fira-mono-latin-ext-500-normal-ZWY4UO4V.woff2 +0 -0
- rustfava/static/fira-mono-symbols2-400-normal-UITXT77Q.woff2 +0 -0
- rustfava/static/fira-mono-symbols2-500-normal-VWPC2EFN.woff2 +0 -0
- rustfava/static/fira-sans-cyrillic-400-normal-KLQMBCA6.woff2 +0 -0
- rustfava/static/fira-sans-cyrillic-500-normal-NFG7UD6J.woff2 +0 -0
- rustfava/static/fira-sans-cyrillic-ext-400-normal-GWO44OPC.woff2 +0 -0
- rustfava/static/fira-sans-cyrillic-ext-500-normal-SP47E5SC.woff2 +0 -0
- rustfava/static/fira-sans-greek-400-normal-UMQBTLC3.woff2 +0 -0
- rustfava/static/fira-sans-greek-500-normal-4ZKHN4FQ.woff2 +0 -0
- rustfava/static/fira-sans-greek-ext-400-normal-O2DVJAJZ.woff2 +0 -0
- rustfava/static/fira-sans-greek-ext-500-normal-SK6GNWGO.woff2 +0 -0
- rustfava/static/fira-sans-latin-400-normal-OYYTPMAV.woff2 +0 -0
- rustfava/static/fira-sans-latin-500-normal-SMQPZW5A.woff2 +0 -0
- rustfava/static/fira-sans-latin-ext-400-normal-OAUP3WK5.woff2 +0 -0
- rustfava/static/fira-sans-latin-ext-500-normal-LY3YDR5Y.woff2 +0 -0
- rustfava/static/fira-sans-vietnamese-400-normal-OBMQ72MR.woff2 +0 -0
- rustfava/static/fira-sans-vietnamese-500-normal-Y4NZR5EU.woff2 +0 -0
- rustfava/static/source-code-pro-cyrillic-400-normal-TO22V6M3.woff2 +0 -0
- rustfava/static/source-code-pro-cyrillic-500-normal-OGBWWWYW.woff2 +0 -0
- rustfava/static/source-code-pro-cyrillic-ext-400-normal-XH44UCIA.woff2 +0 -0
- rustfava/static/source-code-pro-cyrillic-ext-500-normal-3Z6MMVM6.woff2 +0 -0
- rustfava/static/source-code-pro-greek-400-normal-OUXXUQWK.woff2 +0 -0
- rustfava/static/source-code-pro-greek-500-normal-JA2Z5UXO.woff2 +0 -0
- rustfava/static/source-code-pro-greek-ext-400-normal-WCDKMX7U.woff2 +0 -0
- rustfava/static/source-code-pro-greek-ext-500-normal-ZHVI4VKW.woff2 +0 -0
- rustfava/static/source-code-pro-latin-400-normal-QOGTXED5.woff2 +0 -0
- rustfava/static/source-code-pro-latin-500-normal-X57QEOLQ.woff2 +0 -0
- rustfava/static/source-code-pro-latin-ext-400-normal-QXC74NBF.woff2 +0 -0
- rustfava/static/source-code-pro-latin-ext-500-normal-QGOY7MTT.woff2 +0 -0
- rustfava/static/source-code-pro-vietnamese-400-normal-NPDCDTBA.woff2 +0 -0
- rustfava/static/source-code-pro-vietnamese-500-normal-M6PJKTR5.woff2 +0 -0
- rustfava/static/tree-sitter-beancount-MLXFQBZ5.wasm +0 -0
- rustfava/static/web-tree-sitter-RNOQ6E74.wasm +0 -0
- rustfava/template_filters.py +64 -0
- rustfava/templates/_journal_table.html +156 -0
- rustfava/templates/_layout.html +26 -0
- rustfava/templates/_query_table.html +88 -0
- rustfava/templates/beancount_file +18 -0
- rustfava/templates/help.html +23 -0
- rustfava/templates/macros/_account_macros.html +5 -0
- rustfava/templates/macros/_commodity_macros.html +13 -0
- rustfava/translations/bg/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/bg/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/ca/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/ca/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/de/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/de/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/es/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/es/LC_MESSAGES/messages.po +619 -0
- rustfava/translations/fa/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/fa/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/fr/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/fr/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/ja/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/ja/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/nl/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/nl/LC_MESSAGES/messages.po +617 -0
- rustfava/translations/pt/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/pt/LC_MESSAGES/messages.po +617 -0
- rustfava/translations/pt_BR/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/pt_BR/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/ru/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/ru/LC_MESSAGES/messages.po +617 -0
- rustfava/translations/sk/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/sk/LC_MESSAGES/messages.po +623 -0
- rustfava/translations/sv/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/sv/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/uk/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/uk/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/zh/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/zh/LC_MESSAGES/messages.po +618 -0
- rustfava/translations/zh_Hant_TW/LC_MESSAGES/messages.mo +0 -0
- rustfava/translations/zh_Hant_TW/LC_MESSAGES/messages.po +618 -0
- rustfava/util/__init__.py +157 -0
- rustfava/util/date.py +576 -0
- rustfava/util/excel.py +118 -0
- rustfava/util/ranking.py +79 -0
- rustfava/util/sets.py +18 -0
- rustfava/util/unreachable.py +20 -0
- rustfava-0.1.0.dist-info/METADATA +102 -0
- rustfava-0.1.0.dist-info/RECORD +187 -0
- rustfava-0.1.0.dist-info/WHEEL +5 -0
- rustfava-0.1.0.dist-info/entry_points.txt +2 -0
- rustfava-0.1.0.dist-info/licenses/AUTHORS +11 -0
- rustfava-0.1.0.dist-info/licenses/LICENSE +21 -0
- rustfava-0.1.0.dist-info/top_level.txt +1 -0
rustfava/json_api.py
ADDED
|
@@ -0,0 +1,952 @@
|
|
|
1
|
+
"""JSON API.
|
|
2
|
+
|
|
3
|
+
This module contains the url endpoints of the JSON API that is used by the web
|
|
4
|
+
interface for asynchronous functionality.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import hashlib
|
|
10
|
+
import logging
|
|
11
|
+
import shutil
|
|
12
|
+
from abc import abstractmethod
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from dataclasses import fields
|
|
15
|
+
from functools import wraps
|
|
16
|
+
from http import HTTPStatus
|
|
17
|
+
from inspect import Parameter
|
|
18
|
+
from inspect import signature
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from pprint import pformat
|
|
21
|
+
from typing import Any
|
|
22
|
+
from typing import TYPE_CHECKING
|
|
23
|
+
|
|
24
|
+
from flask import Blueprint
|
|
25
|
+
from flask import get_template_attribute
|
|
26
|
+
from flask import jsonify
|
|
27
|
+
from flask import request
|
|
28
|
+
from flask_babel import gettext
|
|
29
|
+
from pydantic import BaseModel
|
|
30
|
+
from pydantic import ValidationError as PydanticValidationError
|
|
31
|
+
|
|
32
|
+
from rustfava.api_models import FormatSourceRequest
|
|
33
|
+
from rustfava.api_models import SaveEntrySliceRequest
|
|
34
|
+
from rustfava.api_models import SaveSourceRequest
|
|
35
|
+
from rustfava.beans.abc import Document
|
|
36
|
+
from rustfava.beans.abc import Event
|
|
37
|
+
from rustfava.context import g
|
|
38
|
+
from rustfava.core import EntryNotFoundForHashError
|
|
39
|
+
from rustfava.core.conversion import UNITS
|
|
40
|
+
from rustfava.core.documents import filepath_in_document_folder
|
|
41
|
+
from rustfava.core.documents import is_document_or_import_file
|
|
42
|
+
from rustfava.core.file import GeneratedEntryError
|
|
43
|
+
from rustfava.core.file import get_entry_slice
|
|
44
|
+
from rustfava.core.filters import FilterError
|
|
45
|
+
from rustfava.core.group_entries import group_entries_by_type
|
|
46
|
+
from rustfava.core.ingest import filepath_in_primary_imports_folder
|
|
47
|
+
from rustfava.core.misc import align
|
|
48
|
+
from rustfava.helpers import RustfavaAPIError
|
|
49
|
+
from rustfava.internal_api import ChartApi
|
|
50
|
+
from rustfava.internal_api import get_errors
|
|
51
|
+
from rustfava.internal_api import get_ledger_data
|
|
52
|
+
from rustfava.serialisation import deserialise
|
|
53
|
+
from rustfava.serialisation import serialise
|
|
54
|
+
|
|
55
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
56
|
+
from collections.abc import Callable
|
|
57
|
+
from collections.abc import Mapping
|
|
58
|
+
from collections.abc import Sequence
|
|
59
|
+
from datetime import date
|
|
60
|
+
from decimal import Decimal
|
|
61
|
+
|
|
62
|
+
from flask.wrappers import Response
|
|
63
|
+
|
|
64
|
+
from rustfava.beans.abc import Directive
|
|
65
|
+
from rustfava.core.ingest import FileImporters
|
|
66
|
+
from rustfava.core.inventory import SimpleCounterInventory
|
|
67
|
+
from rustfava.core.query import QueryResultTable
|
|
68
|
+
from rustfava.core.query import QueryResultText
|
|
69
|
+
from rustfava.core.tree import SerialisedTreeNode
|
|
70
|
+
from rustfava.internal_api import ChartData
|
|
71
|
+
from rustfava.util.date import DateRange
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
json_api = Blueprint("json_api", __name__)
|
|
75
|
+
log = logging.getLogger(__name__)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class ValidationError(Exception):
|
|
79
|
+
"""Validation of data failed."""
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class MissingParameterValidationError(ValidationError):
|
|
83
|
+
"""Validation failed due to missing parameter."""
|
|
84
|
+
|
|
85
|
+
def __init__(self, param: str) -> None:
|
|
86
|
+
super().__init__(f"Parameter `{param}` is missing.")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class IncorrectTypeValidationError(ValidationError):
|
|
90
|
+
"""Validation failed due to incorrect type of parameter."""
|
|
91
|
+
|
|
92
|
+
def __init__(self, param: str, expected: type) -> None:
|
|
93
|
+
super().__init__(
|
|
94
|
+
f"Parameter `{param}` of incorrect type - expected {expected}.",
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class InvalidJsonRequestError(ValidationError):
|
|
99
|
+
"""Validation failed due to invalid JSON in body."""
|
|
100
|
+
|
|
101
|
+
def __init__(self) -> None:
|
|
102
|
+
super().__init__("Invalid JSON body.")
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def json_err(msg: str, status: HTTPStatus) -> Response:
|
|
106
|
+
"""Jsonify the error message."""
|
|
107
|
+
res = jsonify({"error": msg})
|
|
108
|
+
res.status = status
|
|
109
|
+
return res
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def json_success(data: Any) -> Response:
|
|
113
|
+
"""Jsonify the response with caching headers."""
|
|
114
|
+
response = jsonify(
|
|
115
|
+
{"data": data, "mtime": str(g.ledger.mtime)},
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# Add Cache-Control for GET requests (private cache, short TTL)
|
|
119
|
+
if request.method == "GET":
|
|
120
|
+
response.headers["Cache-Control"] = "private, max-age=5"
|
|
121
|
+
|
|
122
|
+
# Compute ETag from mtime for cache validation
|
|
123
|
+
etag = hashlib.md5( # noqa: S324
|
|
124
|
+
f"{g.ledger.mtime}".encode(),
|
|
125
|
+
usedforsecurity=False,
|
|
126
|
+
).hexdigest()
|
|
127
|
+
response.headers["ETag"] = f'"{etag}"'
|
|
128
|
+
|
|
129
|
+
# Check If-None-Match for conditional requests
|
|
130
|
+
if_none_match = request.headers.get("If-None-Match")
|
|
131
|
+
if if_none_match and if_none_match.strip('"') == etag:
|
|
132
|
+
response.status_code = 304
|
|
133
|
+
response.data = b""
|
|
134
|
+
|
|
135
|
+
return response
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class FavaJSONAPIError(RustfavaAPIError):
|
|
139
|
+
"""An error with a HTTPStatus."""
|
|
140
|
+
|
|
141
|
+
@property
|
|
142
|
+
@abstractmethod
|
|
143
|
+
def status(self) -> HTTPStatus:
|
|
144
|
+
"""HTTP status that should be used for the response."""
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class NotFoundError(FavaJSONAPIError):
|
|
148
|
+
"""Not found."""
|
|
149
|
+
|
|
150
|
+
status = HTTPStatus.NOT_FOUND
|
|
151
|
+
|
|
152
|
+
def __init__(self) -> None:
|
|
153
|
+
super().__init__("Not found.")
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class TargetPathAlreadyExistsError(FavaJSONAPIError):
|
|
157
|
+
"""The given path already exists."""
|
|
158
|
+
|
|
159
|
+
status = HTTPStatus.CONFLICT
|
|
160
|
+
|
|
161
|
+
def __init__(self, path: Path) -> None:
|
|
162
|
+
super().__init__(f"{path} already exists.")
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class DocumentDirectoryMissingError(FavaJSONAPIError):
|
|
166
|
+
"""No document directory was specified."""
|
|
167
|
+
|
|
168
|
+
status = HTTPStatus.UNPROCESSABLE_ENTITY
|
|
169
|
+
|
|
170
|
+
def __init__(self) -> None:
|
|
171
|
+
super().__init__("You need to set a documents folder.")
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class NoFileUploadedError(FavaJSONAPIError):
|
|
175
|
+
"""No file uploaded."""
|
|
176
|
+
|
|
177
|
+
status = HTTPStatus.BAD_REQUEST
|
|
178
|
+
|
|
179
|
+
def __init__(self) -> None:
|
|
180
|
+
super().__init__("No file uploaded.")
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class UploadedFileIsMissingFilenameError(FavaJSONAPIError):
|
|
184
|
+
"""Uploaded file is missing filename."""
|
|
185
|
+
|
|
186
|
+
status = HTTPStatus.BAD_REQUEST
|
|
187
|
+
|
|
188
|
+
def __init__(self) -> None:
|
|
189
|
+
super().__init__("Uploaded file is missing filename.")
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class NotAValidDocumentOrImportFileError(FavaJSONAPIError):
|
|
193
|
+
"""Not valid document or import file."""
|
|
194
|
+
|
|
195
|
+
status = HTTPStatus.BAD_REQUEST
|
|
196
|
+
|
|
197
|
+
def __init__(self, filename: str) -> None:
|
|
198
|
+
super().__init__(f"Not valid document or import file: '{filename}'.")
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
class NotAFileError(FavaJSONAPIError):
|
|
202
|
+
"""Not a file."""
|
|
203
|
+
|
|
204
|
+
status = HTTPStatus.UNPROCESSABLE_ENTITY
|
|
205
|
+
|
|
206
|
+
def __init__(self, filename: str) -> None:
|
|
207
|
+
super().__init__(f"Not a file: '{filename}'")
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
@json_api.errorhandler(RustfavaAPIError)
|
|
211
|
+
def _(error: RustfavaAPIError) -> Response:
|
|
212
|
+
log.error("Encountered RustfavaAPIError.", exc_info=error)
|
|
213
|
+
return json_err(error.message, HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@json_api.errorhandler(FavaJSONAPIError)
|
|
217
|
+
def _(error: FavaJSONAPIError) -> Response:
|
|
218
|
+
return json_err(error.message, error.status)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
@json_api.errorhandler(FilterError)
|
|
222
|
+
def _(error: FilterError) -> Response:
|
|
223
|
+
return json_err(error.message, HTTPStatus.BAD_REQUEST)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@json_api.errorhandler(OSError)
|
|
227
|
+
def _(error: OSError) -> Response: # pragma: no cover
|
|
228
|
+
log.error("Encountered OSError.", exc_info=error)
|
|
229
|
+
return json_err(error.strerror or "", HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
@json_api.errorhandler(ValidationError)
|
|
233
|
+
def _(error: ValidationError) -> Response:
|
|
234
|
+
return json_err(f"Invalid API request: {error!s}", HTTPStatus.BAD_REQUEST)
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
@json_api.errorhandler(PydanticValidationError)
|
|
238
|
+
def _(error: PydanticValidationError) -> Response:
|
|
239
|
+
errors = "; ".join(
|
|
240
|
+
f"{'.'.join(str(loc) for loc in e['loc'])}: {e['msg']}"
|
|
241
|
+
for e in error.errors()
|
|
242
|
+
)
|
|
243
|
+
return json_err(f"Validation error: {errors}", HTTPStatus.BAD_REQUEST)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
@json_api.errorhandler(EntryNotFoundForHashError)
|
|
247
|
+
def _(error: EntryNotFoundForHashError) -> Response:
|
|
248
|
+
return json_err(error.message, HTTPStatus.NOT_FOUND)
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
@json_api.errorhandler(GeneratedEntryError)
|
|
252
|
+
def _(error: GeneratedEntryError) -> Response:
|
|
253
|
+
return json_err(error.message, HTTPStatus.UNPROCESSABLE_ENTITY)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def validate_func_arguments(
|
|
257
|
+
func: Callable[..., Any],
|
|
258
|
+
) -> Callable[[Mapping[str, str]], list[str]] | None:
|
|
259
|
+
"""Validate arguments for a function.
|
|
260
|
+
|
|
261
|
+
This currently only works for strings and lists (but only does a shallow
|
|
262
|
+
validation for lists).
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
func: The function to check parameters for.
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
A function, which takes a Mapping and tries to construct a list of
|
|
269
|
+
positional parameters for the given function or None if the function
|
|
270
|
+
has no parameters.
|
|
271
|
+
"""
|
|
272
|
+
sig = signature(func)
|
|
273
|
+
params: list[tuple[str, Any]] = []
|
|
274
|
+
for param in sig.parameters.values():
|
|
275
|
+
if param.annotation not in {"str", "list[Any]"}: # pragma: no cover
|
|
276
|
+
msg = (f"Type of param {param.name} needs to str or list",)
|
|
277
|
+
raise ValueError(msg)
|
|
278
|
+
if param.kind != Parameter.POSITIONAL_OR_KEYWORD: # pragma: no cover
|
|
279
|
+
msg2 = f"Param {param.name} should be positional"
|
|
280
|
+
raise ValueError(msg2)
|
|
281
|
+
params.append((param.name, str if param.annotation == "str" else list))
|
|
282
|
+
|
|
283
|
+
if not params:
|
|
284
|
+
return None
|
|
285
|
+
|
|
286
|
+
def validator(mapping: Mapping[str, str]) -> list[str]:
|
|
287
|
+
args: list[str] = []
|
|
288
|
+
for param, type_ in params:
|
|
289
|
+
val = mapping.get(param, None)
|
|
290
|
+
if val is None:
|
|
291
|
+
raise MissingParameterValidationError(param)
|
|
292
|
+
if not isinstance(val, type_):
|
|
293
|
+
raise IncorrectTypeValidationError(param, type_)
|
|
294
|
+
args.append(val)
|
|
295
|
+
return args
|
|
296
|
+
|
|
297
|
+
return validator
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def api_endpoint(func: Callable[..., Any]) -> Callable[[], Response]:
|
|
301
|
+
"""Register an API endpoint.
|
|
302
|
+
|
|
303
|
+
The part of the function name up to the first underscore determines
|
|
304
|
+
the accepted HTTP method. For GET and DELETE endpoints, the function
|
|
305
|
+
parameters are extracted from the URL query string and passed to the
|
|
306
|
+
decorated endpoint handler.
|
|
307
|
+
"""
|
|
308
|
+
method, _, name = func.__name__.partition("_") # ty:ignore[unresolved-attribute]
|
|
309
|
+
if method not in {"get", "delete", "put"}: # pragma: no cover
|
|
310
|
+
msg = f"Invalid endpoint function name: {func.__name__}" # ty:ignore[unresolved-attribute]
|
|
311
|
+
raise ValueError(msg)
|
|
312
|
+
validator = validate_func_arguments(func)
|
|
313
|
+
|
|
314
|
+
@json_api.route(f"/{name}", methods=[method])
|
|
315
|
+
@wraps(func)
|
|
316
|
+
def _wrapper() -> Response:
|
|
317
|
+
if validator is not None:
|
|
318
|
+
if method == "put":
|
|
319
|
+
request_json = request.get_json(silent=True)
|
|
320
|
+
if request_json is None:
|
|
321
|
+
raise InvalidJsonRequestError
|
|
322
|
+
data = request_json
|
|
323
|
+
else:
|
|
324
|
+
data = request.args
|
|
325
|
+
res = func(*validator(data))
|
|
326
|
+
else:
|
|
327
|
+
res = func()
|
|
328
|
+
return json_success(res)
|
|
329
|
+
|
|
330
|
+
return _wrapper
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
def pydantic_api_endpoint(
|
|
334
|
+
model: type[BaseModel],
|
|
335
|
+
method: str = "put",
|
|
336
|
+
) -> Callable[[Callable[..., Any]], Callable[[], Response]]:
|
|
337
|
+
"""Register an API endpoint with Pydantic validation.
|
|
338
|
+
|
|
339
|
+
Args:
|
|
340
|
+
model: Pydantic model class for request validation.
|
|
341
|
+
method: HTTP method (put, delete).
|
|
342
|
+
"""
|
|
343
|
+
|
|
344
|
+
def decorator(func: Callable[..., Any]) -> Callable[[], Response]:
|
|
345
|
+
name = func.__name__.partition("_")[2]
|
|
346
|
+
|
|
347
|
+
@json_api.route(f"/{name}", methods=[method])
|
|
348
|
+
@wraps(func)
|
|
349
|
+
def _wrapper() -> Response:
|
|
350
|
+
request_json = request.get_json(silent=True)
|
|
351
|
+
if request_json is None:
|
|
352
|
+
raise InvalidJsonRequestError
|
|
353
|
+
validated = model.model_validate(request_json)
|
|
354
|
+
res = func(validated)
|
|
355
|
+
return json_success(res)
|
|
356
|
+
|
|
357
|
+
return _wrapper
|
|
358
|
+
|
|
359
|
+
return decorator
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
@api_endpoint
|
|
363
|
+
def get_changed() -> bool:
|
|
364
|
+
"""Check for file changes."""
|
|
365
|
+
return g.ledger.changed()
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
api_endpoint(get_errors)
|
|
369
|
+
api_endpoint(get_ledger_data)
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
@api_endpoint
|
|
373
|
+
def get_payee_accounts(payee: str) -> Sequence[str]:
|
|
374
|
+
"""Rank accounts for the given payee."""
|
|
375
|
+
return g.ledger.attributes.payee_accounts(payee)
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
@api_endpoint
|
|
379
|
+
def get_query(query_string: str) -> QueryResultTable | QueryResultText:
|
|
380
|
+
"""Run a Beancount query."""
|
|
381
|
+
return g.ledger.query_shell.execute_query_serialised(
|
|
382
|
+
g.filtered.entries_with_all_prices, query_string
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
@api_endpoint
|
|
387
|
+
def get_extract(filename: str, importer: str) -> Sequence[Any]:
|
|
388
|
+
"""Extract entries using the ingest framework."""
|
|
389
|
+
entries = g.ledger.ingest.extract(filename, importer)
|
|
390
|
+
return list(map(serialise, entries))
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
@dataclass(frozen=True)
|
|
394
|
+
class Context:
|
|
395
|
+
"""Context for an entry."""
|
|
396
|
+
|
|
397
|
+
entry: Any
|
|
398
|
+
balances_before: Mapping[str, Sequence[str]] | None
|
|
399
|
+
balances_after: Mapping[str, Sequence[str]] | None
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
@api_endpoint
|
|
403
|
+
def get_context(entry_hash: str) -> Context:
|
|
404
|
+
"""Entry context."""
|
|
405
|
+
entry, before, after = g.ledger.context(entry_hash)
|
|
406
|
+
return Context(serialise(entry), before, after)
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
@dataclass(frozen=True)
|
|
410
|
+
class SourceSlice:
|
|
411
|
+
"""Source slice for an entry."""
|
|
412
|
+
|
|
413
|
+
sha256sum: str
|
|
414
|
+
slice: str
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
@api_endpoint
|
|
418
|
+
def get_source_slice(entry_hash: str) -> SourceSlice:
|
|
419
|
+
"""Entry slice."""
|
|
420
|
+
entry = g.ledger.get_entry(entry_hash)
|
|
421
|
+
source_slice, sha256sum = get_entry_slice(entry)
|
|
422
|
+
return SourceSlice(sha256sum, source_slice)
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
@api_endpoint
|
|
426
|
+
def put_move(account: str, new_name: str, filename: str) -> str:
|
|
427
|
+
"""Move a document."""
|
|
428
|
+
if not g.ledger.options["documents"]:
|
|
429
|
+
raise DocumentDirectoryMissingError
|
|
430
|
+
|
|
431
|
+
new_path = filepath_in_document_folder(
|
|
432
|
+
g.ledger.options["documents"][0],
|
|
433
|
+
account,
|
|
434
|
+
new_name,
|
|
435
|
+
g.ledger,
|
|
436
|
+
)
|
|
437
|
+
file_path = Path(filename)
|
|
438
|
+
|
|
439
|
+
if not file_path.is_file():
|
|
440
|
+
raise NotAFileError(filename)
|
|
441
|
+
if new_path.exists():
|
|
442
|
+
raise TargetPathAlreadyExistsError(new_path)
|
|
443
|
+
|
|
444
|
+
new_path.parent.mkdir(parents=True, exist_ok=True)
|
|
445
|
+
shutil.move(filename, new_path)
|
|
446
|
+
|
|
447
|
+
return f"Moved {filename} to {new_path}."
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
@api_endpoint
|
|
451
|
+
def get_payee_transaction(payee: str) -> Any:
|
|
452
|
+
"""Last transaction for the given payee."""
|
|
453
|
+
entry = g.ledger.attributes.payee_transaction(payee)
|
|
454
|
+
return serialise(entry) if entry else None
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
@api_endpoint
|
|
458
|
+
def get_narration_transaction(narration: str) -> Any:
|
|
459
|
+
"""Last transaction for the given narration."""
|
|
460
|
+
entry = g.ledger.attributes.narration_transaction(narration)
|
|
461
|
+
return serialise(entry) if entry else None
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
@api_endpoint
|
|
465
|
+
def get_narrations() -> Sequence[str]:
|
|
466
|
+
"""List of all narrations in the ledger."""
|
|
467
|
+
return g.ledger.attributes.narrations
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
@dataclass(frozen=True)
|
|
471
|
+
class SourceFile:
|
|
472
|
+
"""Source slice for an entry."""
|
|
473
|
+
|
|
474
|
+
file_path: str
|
|
475
|
+
sha256sum: str
|
|
476
|
+
source: str
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
@api_endpoint
|
|
480
|
+
def get_source() -> SourceFile:
|
|
481
|
+
"""Load one of the source files."""
|
|
482
|
+
file_path = (
|
|
483
|
+
request.args.get("filename", "")
|
|
484
|
+
or g.ledger.fava_options.default_file
|
|
485
|
+
or g.ledger.beancount_file_path
|
|
486
|
+
)
|
|
487
|
+
source, sha256sum = g.ledger.file.get_source(Path(file_path))
|
|
488
|
+
return SourceFile(file_path=file_path, sha256sum=sha256sum, source=source)
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
@pydantic_api_endpoint(SaveSourceRequest)
|
|
492
|
+
def put_source(req: SaveSourceRequest) -> str:
|
|
493
|
+
"""Write one of the source files and return the updated sha256sum."""
|
|
494
|
+
return g.ledger.file.set_source(Path(req.file_path), req.source, req.sha256sum)
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
@pydantic_api_endpoint(SaveEntrySliceRequest)
|
|
498
|
+
def put_source_slice(req: SaveEntrySliceRequest) -> str:
|
|
499
|
+
"""Write an entry source slice and return the updated sha256sum."""
|
|
500
|
+
return g.ledger.file.save_entry_slice(req.entry_hash, req.source, req.sha256sum)
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
@api_endpoint
|
|
504
|
+
def delete_source_slice(entry_hash: str, sha256sum: str) -> str:
|
|
505
|
+
"""Delete an entry source slice."""
|
|
506
|
+
g.ledger.file.delete_entry_slice(entry_hash, sha256sum)
|
|
507
|
+
return f"Deleted entry {entry_hash}."
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
@pydantic_api_endpoint(FormatSourceRequest)
|
|
511
|
+
def put_format_source(req: FormatSourceRequest) -> str:
|
|
512
|
+
"""Format beancount file."""
|
|
513
|
+
return align(req.source, g.ledger.fava_options.currency_column)
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
class FileDoesNotExistError(RustfavaAPIError):
|
|
517
|
+
"""The given file does not exist."""
|
|
518
|
+
|
|
519
|
+
def __init__(self, filename: str) -> None:
|
|
520
|
+
super().__init__(f"{filename} does not exist.")
|
|
521
|
+
|
|
522
|
+
|
|
523
|
+
@api_endpoint
|
|
524
|
+
def delete_document(filename: str) -> str:
|
|
525
|
+
"""Delete a document."""
|
|
526
|
+
if not is_document_or_import_file(filename, g.ledger):
|
|
527
|
+
raise NotAValidDocumentOrImportFileError(filename)
|
|
528
|
+
|
|
529
|
+
file_path = Path(filename)
|
|
530
|
+
if not file_path.exists():
|
|
531
|
+
raise FileDoesNotExistError(filename)
|
|
532
|
+
|
|
533
|
+
file_path.unlink()
|
|
534
|
+
return f"Deleted {filename}."
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
@api_endpoint
|
|
538
|
+
def put_add_document() -> str:
|
|
539
|
+
"""Upload a document."""
|
|
540
|
+
if not g.ledger.options["documents"]:
|
|
541
|
+
raise DocumentDirectoryMissingError
|
|
542
|
+
|
|
543
|
+
upload = request.files.get("file", None)
|
|
544
|
+
|
|
545
|
+
if upload is None:
|
|
546
|
+
raise NoFileUploadedError
|
|
547
|
+
if not upload.filename:
|
|
548
|
+
raise UploadedFileIsMissingFilenameError
|
|
549
|
+
|
|
550
|
+
filepath = filepath_in_document_folder(
|
|
551
|
+
request.form["folder"],
|
|
552
|
+
request.form["account"],
|
|
553
|
+
upload.filename,
|
|
554
|
+
g.ledger,
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
if filepath.exists():
|
|
558
|
+
raise TargetPathAlreadyExistsError(filepath)
|
|
559
|
+
|
|
560
|
+
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
561
|
+
upload.save(filepath)
|
|
562
|
+
|
|
563
|
+
if request.form.get("hash"):
|
|
564
|
+
g.ledger.file.insert_metadata(
|
|
565
|
+
request.form["hash"],
|
|
566
|
+
"document",
|
|
567
|
+
filepath.name,
|
|
568
|
+
)
|
|
569
|
+
return f"Uploaded to {filepath}"
|
|
570
|
+
|
|
571
|
+
|
|
572
|
+
@api_endpoint
|
|
573
|
+
def put_attach_document(filename: str, entry_hash: str) -> str:
|
|
574
|
+
"""Attach a document to an entry."""
|
|
575
|
+
g.ledger.file.insert_metadata(entry_hash, "document", filename)
|
|
576
|
+
return f"Attached '{filename}' to entry."
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
@api_endpoint
|
|
580
|
+
def put_add_entries(entries: list[Any]) -> str:
|
|
581
|
+
"""Add multiple entries."""
|
|
582
|
+
try:
|
|
583
|
+
entries = [deserialise(entry) for entry in entries]
|
|
584
|
+
except KeyError as error: # pragma: no cover
|
|
585
|
+
msg = f"KeyError: {error}"
|
|
586
|
+
raise RustfavaAPIError(msg) from error
|
|
587
|
+
|
|
588
|
+
g.ledger.file.insert_entries(entries)
|
|
589
|
+
|
|
590
|
+
return f"Stored {len(entries)} entries."
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
@api_endpoint
|
|
594
|
+
def put_upload_import_file() -> str:
|
|
595
|
+
"""Upload a file for importing."""
|
|
596
|
+
upload = request.files.get("file", None)
|
|
597
|
+
|
|
598
|
+
if upload is None:
|
|
599
|
+
raise NoFileUploadedError
|
|
600
|
+
if not upload.filename:
|
|
601
|
+
raise UploadedFileIsMissingFilenameError
|
|
602
|
+
filepath = filepath_in_primary_imports_folder(upload.filename, g.ledger)
|
|
603
|
+
|
|
604
|
+
if filepath.exists():
|
|
605
|
+
raise TargetPathAlreadyExistsError(filepath)
|
|
606
|
+
|
|
607
|
+
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
608
|
+
upload.save(filepath)
|
|
609
|
+
|
|
610
|
+
return f"Uploaded to {filepath}"
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
########################################################################
|
|
614
|
+
# Reports
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
@api_endpoint
|
|
618
|
+
def get_journal() -> Sequence[Directive]:
|
|
619
|
+
"""Get all (filtered) entries."""
|
|
620
|
+
g.ledger.changed()
|
|
621
|
+
return [serialise(e) for e in g.filtered.entries]
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
@dataclass(frozen=True)
|
|
625
|
+
class JournalPage:
|
|
626
|
+
"""A rendered journal page."""
|
|
627
|
+
|
|
628
|
+
page: int
|
|
629
|
+
total_pages: int
|
|
630
|
+
journal: str
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
@api_endpoint
|
|
634
|
+
def get_journal_page(page: str, order: str) -> JournalPage:
|
|
635
|
+
"""Get the HTML contents for a Journal page."""
|
|
636
|
+
page_number = int(page)
|
|
637
|
+
journal_table_contents = get_template_attribute(
|
|
638
|
+
"_journal_table.html", "journal_table_contents"
|
|
639
|
+
)
|
|
640
|
+
if page == "1":
|
|
641
|
+
g.ledger.changed()
|
|
642
|
+
journal_page = g.filtered.paginate_journal(
|
|
643
|
+
page_number, order="asc" if order == "asc" else "desc"
|
|
644
|
+
)
|
|
645
|
+
if journal_page is None:
|
|
646
|
+
raise NotFoundError
|
|
647
|
+
return JournalPage(
|
|
648
|
+
page=page_number,
|
|
649
|
+
total_pages=journal_page.total_pages,
|
|
650
|
+
journal=journal_table_contents(journal_page.entries),
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
@api_endpoint
|
|
655
|
+
def get_events() -> Sequence[Event]:
|
|
656
|
+
"""Get all (filtered) events."""
|
|
657
|
+
g.ledger.changed()
|
|
658
|
+
return [serialise(e) for e in g.filtered.entries if isinstance(e, Event)]
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
@api_endpoint
|
|
662
|
+
def get_imports() -> Sequence[FileImporters]:
|
|
663
|
+
"""Get a list of the importable files."""
|
|
664
|
+
g.ledger.changed()
|
|
665
|
+
return g.ledger.ingest.import_data()
|
|
666
|
+
|
|
667
|
+
|
|
668
|
+
@api_endpoint
|
|
669
|
+
def get_documents() -> Sequence[Document]:
|
|
670
|
+
"""Get all (filtered) documents."""
|
|
671
|
+
g.ledger.changed()
|
|
672
|
+
return [
|
|
673
|
+
serialise(e) for e in g.filtered.entries if isinstance(e, Document)
|
|
674
|
+
]
|
|
675
|
+
|
|
676
|
+
|
|
677
|
+
@dataclass(frozen=True)
|
|
678
|
+
class Options:
|
|
679
|
+
"""Fava and Beancount options as strings."""
|
|
680
|
+
|
|
681
|
+
fava_options: Mapping[str, str]
|
|
682
|
+
beancount_options: Mapping[str, str]
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
@api_endpoint
|
|
686
|
+
def get_options() -> Options:
|
|
687
|
+
"""Get all options, rendered to strings for displaying in the frontend."""
|
|
688
|
+
g.ledger.changed()
|
|
689
|
+
|
|
690
|
+
fava_options = g.ledger.fava_options
|
|
691
|
+
pprinted_fava_options = {
|
|
692
|
+
field.name.replace("_", "-"): pformat(
|
|
693
|
+
getattr(fava_options, field.name)
|
|
694
|
+
)
|
|
695
|
+
for field in fields(fava_options)
|
|
696
|
+
}
|
|
697
|
+
return Options(
|
|
698
|
+
pprinted_fava_options,
|
|
699
|
+
{key: str(value) for key, value in g.ledger.options.items()},
|
|
700
|
+
)
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
@dataclass(frozen=True)
|
|
704
|
+
class CommodityPairWithPrices:
|
|
705
|
+
"""A pair of commodities and prices for them."""
|
|
706
|
+
|
|
707
|
+
base: str
|
|
708
|
+
quote: str
|
|
709
|
+
prices: Sequence[tuple[date, Decimal]]
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
@api_endpoint
|
|
713
|
+
def get_commodities() -> Sequence[CommodityPairWithPrices]:
|
|
714
|
+
"""Get the prices for all commodity pairs."""
|
|
715
|
+
g.ledger.changed()
|
|
716
|
+
ret = []
|
|
717
|
+
for base, quote in g.ledger.commodity_pairs():
|
|
718
|
+
prices = g.filtered.prices(base, quote)
|
|
719
|
+
if prices:
|
|
720
|
+
ret.append(CommodityPairWithPrices(base, quote, prices))
|
|
721
|
+
|
|
722
|
+
return ret
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+
@dataclass(frozen=True)
|
|
726
|
+
class TreeReport:
|
|
727
|
+
"""Data for the tree reports."""
|
|
728
|
+
|
|
729
|
+
date_range: DateRange | None
|
|
730
|
+
charts: Sequence[ChartData]
|
|
731
|
+
trees: Sequence[SerialisedTreeNode]
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
@api_endpoint
|
|
735
|
+
def get_income_statement() -> TreeReport:
|
|
736
|
+
"""Get the data for the income statement."""
|
|
737
|
+
g.ledger.changed()
|
|
738
|
+
options = g.ledger.options
|
|
739
|
+
invert = g.ledger.fava_options.invert_income_liabilities_equity
|
|
740
|
+
|
|
741
|
+
charts = [
|
|
742
|
+
ChartApi.interval_totals(
|
|
743
|
+
g.interval,
|
|
744
|
+
(options["name_income"], options["name_expenses"]),
|
|
745
|
+
label=gettext("Net Profit"),
|
|
746
|
+
invert=invert,
|
|
747
|
+
),
|
|
748
|
+
ChartApi.interval_totals(
|
|
749
|
+
g.interval,
|
|
750
|
+
options["name_income"],
|
|
751
|
+
label=f"{gettext('Income')} ({g.interval.label})",
|
|
752
|
+
invert=invert,
|
|
753
|
+
),
|
|
754
|
+
ChartApi.interval_totals(
|
|
755
|
+
g.interval,
|
|
756
|
+
options["name_expenses"],
|
|
757
|
+
label=f"{gettext('Expenses')} ({g.interval.label})",
|
|
758
|
+
),
|
|
759
|
+
]
|
|
760
|
+
root_tree = g.filtered.root_tree
|
|
761
|
+
trees = [
|
|
762
|
+
root_tree.get(options["name_income"]),
|
|
763
|
+
root_tree.net_profit(options, gettext("Net Profit")),
|
|
764
|
+
root_tree.get(options["name_expenses"]),
|
|
765
|
+
]
|
|
766
|
+
|
|
767
|
+
return TreeReport(
|
|
768
|
+
g.filtered.date_range,
|
|
769
|
+
charts,
|
|
770
|
+
trees=[tree.serialise_with_context() for tree in trees],
|
|
771
|
+
)
|
|
772
|
+
|
|
773
|
+
|
|
774
|
+
@api_endpoint
|
|
775
|
+
def get_balance_sheet() -> TreeReport:
|
|
776
|
+
"""Get the data for the balance sheet."""
|
|
777
|
+
g.ledger.changed()
|
|
778
|
+
options = g.ledger.options
|
|
779
|
+
|
|
780
|
+
charts = [ChartApi.net_worth()]
|
|
781
|
+
root_tree_closed = g.filtered.root_tree_closed
|
|
782
|
+
trees = [
|
|
783
|
+
root_tree_closed.get(options["name_assets"]),
|
|
784
|
+
root_tree_closed.get(options["name_liabilities"]),
|
|
785
|
+
root_tree_closed.get(options["name_equity"]),
|
|
786
|
+
]
|
|
787
|
+
|
|
788
|
+
return TreeReport(
|
|
789
|
+
g.filtered.date_range,
|
|
790
|
+
charts,
|
|
791
|
+
trees=[tree.serialise_with_context() for tree in trees],
|
|
792
|
+
)
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
@api_endpoint
|
|
796
|
+
def get_trial_balance() -> TreeReport:
|
|
797
|
+
"""Get the data for the trial balance."""
|
|
798
|
+
g.ledger.changed()
|
|
799
|
+
|
|
800
|
+
trees = [g.filtered.root_tree.get("")]
|
|
801
|
+
|
|
802
|
+
return TreeReport(
|
|
803
|
+
g.filtered.date_range,
|
|
804
|
+
charts=[],
|
|
805
|
+
trees=[tree.serialise_with_context() for tree in trees],
|
|
806
|
+
)
|
|
807
|
+
|
|
808
|
+
|
|
809
|
+
@dataclass(frozen=True)
|
|
810
|
+
class AccountBudget:
|
|
811
|
+
"""Budgets for an account."""
|
|
812
|
+
|
|
813
|
+
budget: Mapping[str, Decimal]
|
|
814
|
+
budget_children: Mapping[str, Decimal]
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
@dataclass(frozen=True)
|
|
818
|
+
class AccountReportJournal:
|
|
819
|
+
"""Data for the journal account report."""
|
|
820
|
+
|
|
821
|
+
charts: Sequence[ChartData]
|
|
822
|
+
journal: str
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
@dataclass(frozen=True)
|
|
826
|
+
class AccountReportTree:
|
|
827
|
+
"""Data for the tree account reports."""
|
|
828
|
+
|
|
829
|
+
charts: Sequence[ChartData]
|
|
830
|
+
interval_balances: Sequence[SerialisedTreeNode]
|
|
831
|
+
budgets: Mapping[str, Sequence[AccountBudget]]
|
|
832
|
+
dates: Sequence[DateRange]
|
|
833
|
+
|
|
834
|
+
|
|
835
|
+
@api_endpoint
|
|
836
|
+
def get_account_report() -> AccountReportJournal | AccountReportTree:
|
|
837
|
+
"""Get the data for the account report."""
|
|
838
|
+
g.ledger.changed()
|
|
839
|
+
|
|
840
|
+
account_name = request.args.get("a", "")
|
|
841
|
+
subreport = request.args.get("r")
|
|
842
|
+
|
|
843
|
+
charts = [
|
|
844
|
+
ChartApi.account_balance(account_name),
|
|
845
|
+
ChartApi.interval_totals(
|
|
846
|
+
g.interval,
|
|
847
|
+
account_name,
|
|
848
|
+
label=gettext("Changes"),
|
|
849
|
+
),
|
|
850
|
+
]
|
|
851
|
+
|
|
852
|
+
if subreport in {"changes", "balances"}:
|
|
853
|
+
accumulate = subreport == "balances"
|
|
854
|
+
interval_balances, dates = g.ledger.interval_balances(
|
|
855
|
+
g.filtered,
|
|
856
|
+
g.interval,
|
|
857
|
+
account_name,
|
|
858
|
+
accumulate=accumulate,
|
|
859
|
+
)
|
|
860
|
+
|
|
861
|
+
all_accounts = (
|
|
862
|
+
interval_balances[0].accounts if interval_balances else []
|
|
863
|
+
)
|
|
864
|
+
budget_accounts = [
|
|
865
|
+
a for a in all_accounts if a.startswith(account_name)
|
|
866
|
+
]
|
|
867
|
+
budgets_mod = g.ledger.budgets
|
|
868
|
+
first_date_range = dates[-1]
|
|
869
|
+
budgets = {
|
|
870
|
+
account: [
|
|
871
|
+
AccountBudget(
|
|
872
|
+
budgets_mod.calculate(
|
|
873
|
+
account,
|
|
874
|
+
(first_date_range if accumulate else date_range).begin,
|
|
875
|
+
date_range.end,
|
|
876
|
+
),
|
|
877
|
+
budgets_mod.calculate_children(
|
|
878
|
+
account,
|
|
879
|
+
(first_date_range if accumulate else date_range).begin,
|
|
880
|
+
date_range.end,
|
|
881
|
+
),
|
|
882
|
+
)
|
|
883
|
+
for date_range in dates
|
|
884
|
+
]
|
|
885
|
+
for account in budget_accounts
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
return AccountReportTree(
|
|
889
|
+
charts,
|
|
890
|
+
interval_balances=[
|
|
891
|
+
tree.get(account_name).serialise(
|
|
892
|
+
g.conv,
|
|
893
|
+
g.ledger.prices,
|
|
894
|
+
date_range.end_inclusive,
|
|
895
|
+
with_cost=False,
|
|
896
|
+
)
|
|
897
|
+
for tree, date_range in zip(
|
|
898
|
+
interval_balances, dates, strict=True
|
|
899
|
+
)
|
|
900
|
+
],
|
|
901
|
+
dates=dates,
|
|
902
|
+
budgets=budgets,
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
journal_table_contents = get_template_attribute(
|
|
906
|
+
"_journal_table.html", "journal_table_contents"
|
|
907
|
+
)
|
|
908
|
+
entries = reversed(
|
|
909
|
+
g.ledger.account_journal(
|
|
910
|
+
g.filtered,
|
|
911
|
+
account_name,
|
|
912
|
+
g.conv,
|
|
913
|
+
with_children=g.ledger.fava_options.account_journal_include_children,
|
|
914
|
+
)
|
|
915
|
+
)
|
|
916
|
+
return AccountReportJournal(
|
|
917
|
+
charts,
|
|
918
|
+
journal=journal_table_contents(entries, show_change_and_balance=True),
|
|
919
|
+
)
|
|
920
|
+
|
|
921
|
+
|
|
922
|
+
@dataclass(frozen=True)
|
|
923
|
+
class Statistics:
|
|
924
|
+
"""Data for the statistics report."""
|
|
925
|
+
|
|
926
|
+
all_balance_directives: str
|
|
927
|
+
balances: Mapping[str, SimpleCounterInventory]
|
|
928
|
+
entries_by_type: Mapping[str, int]
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
@api_endpoint
|
|
932
|
+
def get_statistics() -> Statistics:
|
|
933
|
+
"""Get the data for the statistics report."""
|
|
934
|
+
g.ledger.changed()
|
|
935
|
+
|
|
936
|
+
entries_by_type = {
|
|
937
|
+
type_: len(entries)
|
|
938
|
+
for type_, entries in group_entries_by_type(g.filtered.entries)
|
|
939
|
+
._asdict()
|
|
940
|
+
.items()
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
balances = {
|
|
944
|
+
account_name: UNITS.apply(node.balance)
|
|
945
|
+
for account_name, node in g.filtered.root_tree.items()
|
|
946
|
+
}
|
|
947
|
+
|
|
948
|
+
return Statistics(
|
|
949
|
+
all_balance_directives=g.ledger.accounts.all_balance_directives(),
|
|
950
|
+
balances=balances,
|
|
951
|
+
entries_by_type=entries_by_type,
|
|
952
|
+
)
|