tradedangerous 12.0.0__py3-none-any.whl → 12.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/db/__init__.py +27 -0
- tradedangerous/db/adapter.py +191 -0
- tradedangerous/db/config.py +107 -0
- tradedangerous/db/engine.py +246 -0
- tradedangerous/db/lifecycle.py +332 -0
- tradedangerous/db/locks.py +208 -0
- tradedangerous/db/orm_models.py +455 -0
- tradedangerous/db/paths.py +112 -0
- tradedangerous/db/utils.py +661 -0
- tradedangerous/tradedb.py +6 -3
- tradedangerous/version.py +1 -1
- {tradedangerous-12.0.0.dist-info → tradedangerous-12.0.2.dist-info}/METADATA +6 -4
- {tradedangerous-12.0.0.dist-info → tradedangerous-12.0.2.dist-info}/RECORD +17 -8
- {tradedangerous-12.0.0.dist-info → tradedangerous-12.0.2.dist-info}/WHEEL +0 -0
- {tradedangerous-12.0.0.dist-info → tradedangerous-12.0.2.dist-info}/entry_points.txt +0 -0
- {tradedangerous-12.0.0.dist-info → tradedangerous-12.0.2.dist-info}/licenses/LICENSE +0 -0
- {tradedangerous-12.0.0.dist-info → tradedangerous-12.0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""TradeDangerous SQLAlchemy bootstrap package (Stage 3A, Part 1).
|
|
2
|
+
|
|
3
|
+
Side-effect free on import. Provides a minimal, cross-platform API
|
|
4
|
+
for config loading, path resolution, and engine/session bootstrap.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
from tradedangerous.db import (
|
|
8
|
+
load_config, resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path,
|
|
9
|
+
make_engine_from_config, get_session_factory, healthcheck,
|
|
10
|
+
)
|
|
11
|
+
"""
|
|
12
|
+
from .config import load_config
|
|
13
|
+
from .paths import resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path
|
|
14
|
+
from .engine import make_engine_from_config, get_session_factory, healthcheck
|
|
15
|
+
from .lifecycle import ensure_fresh_db
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"load_config",
|
|
19
|
+
"resolve_data_dir",
|
|
20
|
+
"resolve_tmp_dir",
|
|
21
|
+
"ensure_dir",
|
|
22
|
+
"resolve_db_config_path",
|
|
23
|
+
"make_engine_from_config",
|
|
24
|
+
"get_session_factory",
|
|
25
|
+
"healthcheck",
|
|
26
|
+
"ensure_fresh_db",
|
|
27
|
+
]
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
# tradedangerous/db/adapter.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from contextlib import contextmanager
|
|
5
|
+
from typing import Dict, Generator, Iterable, Optional, Tuple
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import select, func
|
|
8
|
+
from sqlalchemy.engine import Engine
|
|
9
|
+
from sqlalchemy.orm import Session
|
|
10
|
+
|
|
11
|
+
# Local engine + ORM (authoritative)
|
|
12
|
+
from .engine import make_engine_from_config, get_session_factory # uses env/CWD-resolved db_config.ini by default
|
|
13
|
+
from .orm_models import System, Station, Item, StationItem # canonical models
|
|
14
|
+
from .paths import resolve_db_config_path
|
|
15
|
+
|
|
16
|
+
# ---- Public factory ---------------------------------------------------------
|
|
17
|
+
|
|
18
|
+
def get_adapter_if_enabled(cfg_path: Optional[str] = None) -> "TradeDBReadAdapter | None":
|
|
19
|
+
"""
|
|
20
|
+
Return an adapter when [database] backend != 'sqlite', else None.
|
|
21
|
+
- No engine/session created at import: construction is lazy.
|
|
22
|
+
- This is called by tradedb.py (thin gate).
|
|
23
|
+
"""
|
|
24
|
+
import configparser, os
|
|
25
|
+
if cfg_path is None:
|
|
26
|
+
cfg_path = str(resolve_db_config_path())
|
|
27
|
+
cfg = configparser.ConfigParser()
|
|
28
|
+
if not os.path.exists(cfg_path):
|
|
29
|
+
return None
|
|
30
|
+
with open(cfg_path, "r", encoding="utf-8") as fh:
|
|
31
|
+
cfg.read_file(fh)
|
|
32
|
+
backend = (cfg.get("database", "backend", fallback="sqlite") or "sqlite").strip().lower()
|
|
33
|
+
if backend == "sqlite":
|
|
34
|
+
return None
|
|
35
|
+
|
|
36
|
+
# Engine is created lazily via the property below to honour "no side-effects at import".
|
|
37
|
+
return TradeDBReadAdapter(cfg_path)
|
|
38
|
+
|
|
39
|
+
# ---- Adapter (read-only) ----------------------------------------------------
|
|
40
|
+
|
|
41
|
+
class TradeDBReadAdapter:
|
|
42
|
+
"""
|
|
43
|
+
Very small, read-only façade over SQLAlchemy for legacy TradeDB reads:
|
|
44
|
+
- systems() list
|
|
45
|
+
- lookup system by name (case-insensitive)
|
|
46
|
+
- station by (system_id, station_name) (case-insensitive)
|
|
47
|
+
- average selling/buying prices (used by trade_cmd at detail>1)
|
|
48
|
+
"""
|
|
49
|
+
def __init__(self, cfg_path: str):
|
|
50
|
+
self._cfg_path = cfg_path
|
|
51
|
+
self._engine: Optional[Engine] = None
|
|
52
|
+
self._Session = None # sessionmaker
|
|
53
|
+
|
|
54
|
+
# Lazy engine/session factory (no import-time work)
|
|
55
|
+
@property
|
|
56
|
+
def Session(self):
|
|
57
|
+
if self._Session is None:
|
|
58
|
+
engine = make_engine_from_config(self._cfg_path)
|
|
59
|
+
self._engine = engine
|
|
60
|
+
self._Session = get_session_factory(engine)
|
|
61
|
+
return self._Session
|
|
62
|
+
|
|
63
|
+
@contextmanager
|
|
64
|
+
def session(self) -> Generator[Session, None, None]:
|
|
65
|
+
Session = self.Session
|
|
66
|
+
with Session() as s:
|
|
67
|
+
yield s
|
|
68
|
+
|
|
69
|
+
# ---- Reads mapped to ORM ------------------------------------------------
|
|
70
|
+
|
|
71
|
+
def list_system_rows(self) -> Iterable[Tuple[int, str, float, float, float, Optional[int]]]:
|
|
72
|
+
"""
|
|
73
|
+
Shape matches legacy _loadSystems SELECT:
|
|
74
|
+
(system_id, name, pos_x, pos_y, pos_z, added_id)
|
|
75
|
+
"""
|
|
76
|
+
with self.session() as s:
|
|
77
|
+
rows = s.execute(
|
|
78
|
+
select(
|
|
79
|
+
System.system_id,
|
|
80
|
+
System.name,
|
|
81
|
+
System.pos_x,
|
|
82
|
+
System.pos_y,
|
|
83
|
+
System.pos_z,
|
|
84
|
+
System.added_id,
|
|
85
|
+
)
|
|
86
|
+
)
|
|
87
|
+
for r in rows:
|
|
88
|
+
yield (r.system_id, r.name, r.pos_x, r.pos_y, r.pos_z, r.added_id)
|
|
89
|
+
|
|
90
|
+
def system_by_name(self, name_ci: str) -> Optional[Tuple[int, str, float, float, float, Optional[int]]]:
|
|
91
|
+
"""
|
|
92
|
+
Case-insensitive name match for System.
|
|
93
|
+
"""
|
|
94
|
+
with self.session() as s:
|
|
95
|
+
row = s.execute(
|
|
96
|
+
select(
|
|
97
|
+
System.system_id, System.name, System.pos_x, System.pos_y, System.pos_z, System.added_id
|
|
98
|
+
).where(func.upper(System.name) == func.upper(func.trim(func.cast(name_ci, System.name.type))))
|
|
99
|
+
).first()
|
|
100
|
+
if not row:
|
|
101
|
+
return None
|
|
102
|
+
return (row.system_id, row.name, row.pos_x, row.pos_y, row.pos_z, row.added_id)
|
|
103
|
+
|
|
104
|
+
def station_by_system_and_name(
|
|
105
|
+
self, system_id: int, station_name_ci: str
|
|
106
|
+
) -> Optional[Tuple[int, int, str, int, str, str, str, str, str, str, str, str, str, int]]:
|
|
107
|
+
"""
|
|
108
|
+
Return the single Station row by system + name (CI).
|
|
109
|
+
Shape matches legacy _loadStations row consumed by Station(...):
|
|
110
|
+
(station_id, system_id, name,
|
|
111
|
+
ls_from_star, market, blackmarket, shipyard,
|
|
112
|
+
max_pad_size, outfitting, rearm, refuel, repair, planetary, type_id)
|
|
113
|
+
"""
|
|
114
|
+
with self.session() as s:
|
|
115
|
+
r = s.execute(
|
|
116
|
+
select(
|
|
117
|
+
Station.station_id,
|
|
118
|
+
Station.system_id,
|
|
119
|
+
Station.name,
|
|
120
|
+
Station.ls_from_star,
|
|
121
|
+
Station.market,
|
|
122
|
+
Station.blackmarket,
|
|
123
|
+
Station.shipyard,
|
|
124
|
+
Station.max_pad_size,
|
|
125
|
+
Station.outfitting,
|
|
126
|
+
Station.rearm,
|
|
127
|
+
Station.refuel,
|
|
128
|
+
Station.repair,
|
|
129
|
+
Station.planetary,
|
|
130
|
+
Station.type_id,
|
|
131
|
+
).where(
|
|
132
|
+
Station.system_id == system_id,
|
|
133
|
+
func.upper(Station.name) == func.upper(func.trim(func.cast(station_name_ci, Station.name.type))),
|
|
134
|
+
)
|
|
135
|
+
).first()
|
|
136
|
+
if not r:
|
|
137
|
+
return None
|
|
138
|
+
return (
|
|
139
|
+
r.station_id,
|
|
140
|
+
r.system_id,
|
|
141
|
+
r.name,
|
|
142
|
+
r.ls_from_star,
|
|
143
|
+
r.market,
|
|
144
|
+
r.blackmarket,
|
|
145
|
+
r.shipyard,
|
|
146
|
+
r.max_pad_size,
|
|
147
|
+
r.outfitting,
|
|
148
|
+
r.rearm,
|
|
149
|
+
r.refuel,
|
|
150
|
+
r.repair,
|
|
151
|
+
r.planetary,
|
|
152
|
+
r.type_id,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def average_selling(self) -> Dict[int, int]:
|
|
156
|
+
"""
|
|
157
|
+
{item_id: avg_supply_price>0}
|
|
158
|
+
Mirrors the legacy SQL used in TradeDB.getAverageSelling.
|
|
159
|
+
"""
|
|
160
|
+
with self.session() as s:
|
|
161
|
+
rows = s.execute(
|
|
162
|
+
select(
|
|
163
|
+
Item.item_id,
|
|
164
|
+
func.IFNULL(func.avg(StationItem.supply_price), 0),
|
|
165
|
+
)
|
|
166
|
+
.select_from(Item.__table__.outerjoin(
|
|
167
|
+
StationItem, (Item.item_id == StationItem.item_id) & (StationItem.supply_price > 0)
|
|
168
|
+
))
|
|
169
|
+
.where(StationItem.supply_price > 0)
|
|
170
|
+
.group_by(Item.item_id)
|
|
171
|
+
)
|
|
172
|
+
return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
|
|
173
|
+
|
|
174
|
+
def average_buying(self) -> Dict[int, int]:
|
|
175
|
+
"""
|
|
176
|
+
{item_id: avg_demand_price>0}
|
|
177
|
+
Mirrors the legacy SQL used in TradeDB.getAverageBuying.
|
|
178
|
+
"""
|
|
179
|
+
with self.session() as s:
|
|
180
|
+
rows = s.execute(
|
|
181
|
+
select(
|
|
182
|
+
Item.item_id,
|
|
183
|
+
func.IFNULL(func.avg(StationItem.demand_price), 0),
|
|
184
|
+
)
|
|
185
|
+
.select_from(Item.__table__.outerjoin(
|
|
186
|
+
StationItem, (Item.item_id == StationItem.item_id) & (StationItem.demand_price > 0)
|
|
187
|
+
))
|
|
188
|
+
.where(StationItem.demand_price > 0)
|
|
189
|
+
.group_by(Item.item_id)
|
|
190
|
+
)
|
|
191
|
+
return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import configparser
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any, Dict
|
|
5
|
+
|
|
6
|
+
DEFAULTS: Dict[str, Dict[str, Any]] = {
|
|
7
|
+
"database": {"backend": "sqlite"},
|
|
8
|
+
"mariadb": {
|
|
9
|
+
"host": "127.0.0.1",
|
|
10
|
+
"port": 3306,
|
|
11
|
+
"user": "",
|
|
12
|
+
"password": "",
|
|
13
|
+
"name": "tradedangerous",
|
|
14
|
+
"driver": "mariadbconnector", # or 'pymysql'
|
|
15
|
+
"charset": "utf8mb4",
|
|
16
|
+
},
|
|
17
|
+
"sqlite": {"sqlite_filename": "TradeDangerous.db"},
|
|
18
|
+
"paths": {"data_dir": "./data", "tmp_dir": "./tmp"},
|
|
19
|
+
"engine": {
|
|
20
|
+
"pool_size": 10,
|
|
21
|
+
"max_overflow": 20,
|
|
22
|
+
"pool_timeout": 30,
|
|
23
|
+
"pool_recycle": 1800,
|
|
24
|
+
"isolation_level": "READ COMMITTED",
|
|
25
|
+
"echo": False,
|
|
26
|
+
"connect_timeout": 10,
|
|
27
|
+
},
|
|
28
|
+
}
|
|
29
|
+
# --- Runtime default path correction ----------------------------------------
|
|
30
|
+
# Convert relative defaults ("./data", "./tmp") into absolute paths under the
|
|
31
|
+
# current working directory. This prevents first-run installs from resolving
|
|
32
|
+
# relative to the package install directory or venv when no db_config.ini exists.
|
|
33
|
+
try:
|
|
34
|
+
_cwd = Path.cwd()
|
|
35
|
+
DEFAULTS["paths"]["data_dir"] = str((_cwd / "data").resolve())
|
|
36
|
+
DEFAULTS["paths"]["tmp_dir"] = str((_cwd / "tmp").resolve())
|
|
37
|
+
except Exception:
|
|
38
|
+
# Best effort; fall back to shipped defaults if CWD is inaccessible
|
|
39
|
+
pass
|
|
40
|
+
# ---------------------------------------------------------------------------
|
|
41
|
+
|
|
42
|
+
# Hardened parser: allow inline comments and disable interpolation
|
|
43
|
+
CFG_KW = dict(inline_comment_prefixes=(";", "#"), interpolation=None)
|
|
44
|
+
|
|
45
|
+
def _parse_bool(s: str) -> bool:
|
|
46
|
+
return str(s).strip().lower() in {"1", "true", "yes", "on"}
|
|
47
|
+
|
|
48
|
+
def _as_int(s: str, default: int | None = None) -> int | None:
|
|
49
|
+
try:
|
|
50
|
+
return int(str(s).strip())
|
|
51
|
+
except (TypeError, ValueError):
|
|
52
|
+
return default
|
|
53
|
+
|
|
54
|
+
def _coerce_types(d: Dict[str, Any]) -> Dict[str, Any]:
|
|
55
|
+
eng = d.get("engine", {})
|
|
56
|
+
if "echo" in eng:
|
|
57
|
+
eng["echo"] = _parse_bool(eng["echo"]) if isinstance(eng["echo"], str) else bool(eng["echo"])
|
|
58
|
+
for k in ("pool_size", "max_overflow", "pool_timeout", "pool_recycle", "connect_timeout"):
|
|
59
|
+
if k in eng:
|
|
60
|
+
eng[k] = _as_int(eng[k], DEFAULTS["engine"][k])
|
|
61
|
+
if "mariadb" in d and "port" in d["mariadb"]:
|
|
62
|
+
d["mariadb"]["port"] = _as_int(d["mariadb"]["port"], DEFAULTS["mariadb"]["port"])
|
|
63
|
+
return d
|
|
64
|
+
|
|
65
|
+
def load_config(path: str | Path | None = None) -> Dict[str, Any]:
|
|
66
|
+
"""Load configuration as a dict with typed values.
|
|
67
|
+
Search order:
|
|
68
|
+
1) explicit *path* if provided
|
|
69
|
+
2) TD_DB_CONFIG env (if file exists)
|
|
70
|
+
3) ./db_config.ini (cwd)
|
|
71
|
+
4) in-code DEFAULTS
|
|
72
|
+
"""
|
|
73
|
+
cfg_path: Path | None = None
|
|
74
|
+
if path is not None:
|
|
75
|
+
p = Path(path)
|
|
76
|
+
if p.exists():
|
|
77
|
+
cfg_path = p
|
|
78
|
+
else:
|
|
79
|
+
# Prefer environment variable if it points to an existing file
|
|
80
|
+
try:
|
|
81
|
+
from .paths import resolve_db_config_path
|
|
82
|
+
env_candidate = resolve_db_config_path()
|
|
83
|
+
if env_candidate.exists():
|
|
84
|
+
cfg_path = env_candidate
|
|
85
|
+
except Exception:
|
|
86
|
+
# If anything goes wrong resolving the env, fall back to defaults below
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
# Fall back to local file in CWD
|
|
90
|
+
if cfg_path is None:
|
|
91
|
+
p = Path.cwd() / "db_config.ini"
|
|
92
|
+
if p.exists():
|
|
93
|
+
cfg_path = p
|
|
94
|
+
|
|
95
|
+
# start with defaults
|
|
96
|
+
result: Dict[str, Any] = {k: (v.copy() if isinstance(v, dict) else v) for k, v in DEFAULTS.items()}
|
|
97
|
+
|
|
98
|
+
if cfg_path:
|
|
99
|
+
parser = configparser.ConfigParser(**CFG_KW)
|
|
100
|
+
with cfg_path.open("r", encoding="utf-8") as fh:
|
|
101
|
+
parser.read_file(fh)
|
|
102
|
+
for section in parser.sections():
|
|
103
|
+
result.setdefault(section, {})
|
|
104
|
+
for key, val in parser.items(section):
|
|
105
|
+
result[section][key] = val
|
|
106
|
+
|
|
107
|
+
return _coerce_types(result)
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
# tradedangerous/db/engine.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import os, time
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Dict, Mapping
|
|
6
|
+
import configparser
|
|
7
|
+
|
|
8
|
+
from sqlalchemy import create_engine, event, text
|
|
9
|
+
from sqlalchemy.engine import Engine, URL
|
|
10
|
+
from sqlalchemy.orm import sessionmaker
|
|
11
|
+
from sqlalchemy.pool import NullPool
|
|
12
|
+
from sqlalchemy.exc import OperationalError
|
|
13
|
+
|
|
14
|
+
from .paths import resolve_data_dir, resolve_tmp_dir, resolve_db_config_path
|
|
15
|
+
|
|
16
|
+
# ---------- config normalization & helpers ----------
|
|
17
|
+
|
|
18
|
+
def _ensure_default_config_file(target_path: Path | None) -> Path | None:
|
|
19
|
+
"""
|
|
20
|
+
If *target_path* is provided and no file exists there, write a minimal db_config.ini
|
|
21
|
+
built from in-code DEFAULTS. Returns the path if created, else None.
|
|
22
|
+
"""
|
|
23
|
+
if not target_path:
|
|
24
|
+
return None
|
|
25
|
+
if target_path.exists():
|
|
26
|
+
return target_path
|
|
27
|
+
# Build from DEFAULTS
|
|
28
|
+
from .config import DEFAULTS # typed defaults live here
|
|
29
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
cp = configparser.ConfigParser()
|
|
31
|
+
for section, mapping in DEFAULTS.items():
|
|
32
|
+
cp[section] = {}
|
|
33
|
+
if isinstance(mapping, Mapping):
|
|
34
|
+
for k, v in mapping.items():
|
|
35
|
+
cp[section][k] = str(v)
|
|
36
|
+
with target_path.open("w", encoding="utf-8") as fh:
|
|
37
|
+
cp.write(fh)
|
|
38
|
+
return target_path
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _cfg_to_dict(cfg: configparser.ConfigParser | Mapping[str, Any] | str | os.PathLike) -> Dict[str, Dict[str, Any]]:
|
|
42
|
+
"""
|
|
43
|
+
Normalise configuration input into a dict-of-sections.
|
|
44
|
+
|
|
45
|
+
Accepted inputs:
|
|
46
|
+
* dict-like mapping → returned as {section: {key: value}}
|
|
47
|
+
* ConfigParser → converted to nested dict (sections overlay DEFAULT section)
|
|
48
|
+
* str/Path → if file exists, read it; if missing, fall back to load_config()
|
|
49
|
+
|
|
50
|
+
NOTE:
|
|
51
|
+
- We do NOT raise on a missing path; we delegate to load_config() to honour the
|
|
52
|
+
documented resolution order (ENV → CWD → DEFAULTS).
|
|
53
|
+
"""
|
|
54
|
+
if isinstance(cfg, (str, os.PathLike)):
|
|
55
|
+
p = Path(cfg)
|
|
56
|
+
if p.exists():
|
|
57
|
+
cp = configparser.ConfigParser()
|
|
58
|
+
with p.open("r", encoding="utf-8") as fh:
|
|
59
|
+
cp.read_file(fh)
|
|
60
|
+
return _cfg_to_dict(cp)
|
|
61
|
+
# Missing provided path → use canonical loader with fallbacks
|
|
62
|
+
from .config import load_config
|
|
63
|
+
return load_config(None)
|
|
64
|
+
|
|
65
|
+
if isinstance(cfg, configparser.ConfigParser):
|
|
66
|
+
out: Dict[str, Dict[str, Any]] = {}
|
|
67
|
+
defaults = dict(cfg.defaults())
|
|
68
|
+
for sec in cfg.sections():
|
|
69
|
+
d = dict(defaults)
|
|
70
|
+
d.update({k: v for k, v in cfg.items(sec)})
|
|
71
|
+
out[sec] = d
|
|
72
|
+
for sec in ("database", "engine", "sqlite", "mariadb", "paths"):
|
|
73
|
+
out.setdefault(sec, dict(defaults))
|
|
74
|
+
return out
|
|
75
|
+
|
|
76
|
+
# Already a dict-like mapping of sections
|
|
77
|
+
return {k: dict(v) if isinstance(v, Mapping) else dict() for k, v in cfg.items()} # type: ignore[arg-type]
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _get(cfg: Dict[str, Any], section: str, key: str, default=None):
|
|
81
|
+
if section in cfg and key in cfg[section]:
|
|
82
|
+
return cfg[section][key]
|
|
83
|
+
if "database" in cfg and key in cfg["database"]:
|
|
84
|
+
return cfg["database"][key]
|
|
85
|
+
return default
|
|
86
|
+
|
|
87
|
+
def _get_int(cfg: Dict[str, Any], section: str, key: str, default=None):
|
|
88
|
+
try:
|
|
89
|
+
return int(_get(cfg, section, key, default))
|
|
90
|
+
except (TypeError, ValueError):
|
|
91
|
+
return default
|
|
92
|
+
|
|
93
|
+
def _get_bool(cfg: Dict[str, Any], section: str, key: str, default=None):
|
|
94
|
+
v = _get(cfg, section, key, default)
|
|
95
|
+
if isinstance(v, bool):
|
|
96
|
+
return v
|
|
97
|
+
if isinstance(v, str):
|
|
98
|
+
return v.strip().lower() in {"1", "true", "yes", "on"}
|
|
99
|
+
return default
|
|
100
|
+
|
|
101
|
+
# ---------- URL builders ----------
|
|
102
|
+
|
|
103
|
+
def _redact(url: str) -> str:
|
|
104
|
+
if "://" not in url:
|
|
105
|
+
return url
|
|
106
|
+
head, rest = url.split("://", 1)
|
|
107
|
+
if "@" in rest and ":" in rest.split("@", 1)[0]:
|
|
108
|
+
user_pass, host = rest.split("@", 1)
|
|
109
|
+
user = user_pass.split(":", 1)[0]
|
|
110
|
+
return f"{head}://{user}:***@{host}"
|
|
111
|
+
return f"{head}://{rest}"
|
|
112
|
+
|
|
113
|
+
def _make_mariadb_url(cfg: Dict[str, Any]) -> URL:
|
|
114
|
+
driver = str(_get(cfg, "mariadb", "driver", "mariadbconnector")).strip().lower()
|
|
115
|
+
drivername = "mariadb+" + driver if driver == "mariadbconnector" else "mysql+" + driver
|
|
116
|
+
return URL.create(
|
|
117
|
+
drivername=drivername,
|
|
118
|
+
username=str(_get(cfg, "mariadb", "user", "")),
|
|
119
|
+
password=str(_get(cfg, "mariadb", "password", "")),
|
|
120
|
+
host=str(_get(cfg, "mariadb", "host", "127.0.0.1")),
|
|
121
|
+
port=int(_get(cfg, "mariadb", "port", 3306)),
|
|
122
|
+
database=str(_get(cfg, "mariadb", "name", "tradedangerous")),
|
|
123
|
+
query={"charset": str(_get(cfg, "mariadb", "charset", "utf8mb4"))},
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
def _make_sqlite_url(cfg: Dict[str, Any]) -> str:
|
|
127
|
+
data_dir = resolve_data_dir(cfg)
|
|
128
|
+
# Honour legacy filename
|
|
129
|
+
filename = str(_get(cfg, "sqlite", "sqlite_filename", "TradeDangerous.db"))
|
|
130
|
+
db_path = (data_dir / filename).resolve()
|
|
131
|
+
return f"sqlite+pysqlite:///{db_path.as_posix()}"
|
|
132
|
+
|
|
133
|
+
# ---------- Engine construction ----------
|
|
134
|
+
|
|
135
|
+
def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str, Any] | str | os.PathLike | None = None) -> Engine:
|
|
136
|
+
"""
|
|
137
|
+
Build a SQLAlchemy Engine for either MariaDB or SQLite.
|
|
138
|
+
|
|
139
|
+
Accepts: ConfigParser, dict-like {section:{k:v}}, path to INI file, or None.
|
|
140
|
+
First-run behaviour:
|
|
141
|
+
- If a path is provided but missing, or if no path is provided and no config is found,
|
|
142
|
+
a default db_config.ini is CREATED in the resolved default location (CWD unless TD_DB_CONFIG
|
|
143
|
+
points elsewhere), then loaded.
|
|
144
|
+
"""
|
|
145
|
+
ini_target: Path | None = None
|
|
146
|
+
|
|
147
|
+
# If caller gave a specific path, prefer to materialise a default file there.
|
|
148
|
+
if isinstance(cfg_or_path, (str, os.PathLike)):
|
|
149
|
+
ini_target = Path(cfg_or_path)
|
|
150
|
+
_ensure_default_config_file(ini_target)
|
|
151
|
+
else:
|
|
152
|
+
# No specific path: create (if missing) at the standard location
|
|
153
|
+
# (CWD/db_config.ini by default, or the file pointed to by TD_DB_CONFIG).
|
|
154
|
+
ini_target = resolve_db_config_path("db_config.ini")
|
|
155
|
+
_ensure_default_config_file(ini_target)
|
|
156
|
+
|
|
157
|
+
cfg = _cfg_to_dict(cfg_or_path if cfg_or_path is not None else str(ini_target))
|
|
158
|
+
|
|
159
|
+
# Ensure dirs exist (used by various parts of the app)
|
|
160
|
+
_ = resolve_data_dir(cfg)
|
|
161
|
+
_ = resolve_tmp_dir(cfg)
|
|
162
|
+
|
|
163
|
+
backend = str(_get(cfg, "database", "backend", "sqlite")).strip().lower()
|
|
164
|
+
echo = bool(_get_bool(cfg, "engine", "echo", False))
|
|
165
|
+
isolation = _get(cfg, "engine", "isolation_level", None)
|
|
166
|
+
|
|
167
|
+
if backend == "mariadb":
|
|
168
|
+
url = _make_mariadb_url(cfg)
|
|
169
|
+
connect_timeout = _get_int(cfg, "engine", "connect_timeout", 10) or 10
|
|
170
|
+
pool_size = _get_int(cfg, "engine", "pool_size", 10) or 10
|
|
171
|
+
max_overflow = _get_int(cfg, "engine", "max_overflow", 20) or 20
|
|
172
|
+
pool_timeout = _get_int(cfg, "engine", "pool_timeout", 30) or 30
|
|
173
|
+
pool_recycle = _get_int(cfg, "engine", "pool_recycle", 1800) or 1800
|
|
174
|
+
engine = create_engine(
|
|
175
|
+
url,
|
|
176
|
+
echo=echo,
|
|
177
|
+
pool_pre_ping=True,
|
|
178
|
+
pool_size=pool_size,
|
|
179
|
+
max_overflow=max_overflow,
|
|
180
|
+
pool_timeout=pool_timeout,
|
|
181
|
+
pool_recycle=pool_recycle,
|
|
182
|
+
isolation_level=isolation or "READ COMMITTED",
|
|
183
|
+
connect_args={"connect_timeout": connect_timeout},
|
|
184
|
+
)
|
|
185
|
+
elif backend == "sqlite":
|
|
186
|
+
url = _make_sqlite_url(cfg)
|
|
187
|
+
engine = create_engine(
|
|
188
|
+
url,
|
|
189
|
+
echo=echo,
|
|
190
|
+
poolclass=NullPool,
|
|
191
|
+
connect_args={"check_same_thread": False},
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
@event.listens_for(engine, "connect")
|
|
195
|
+
def _set_sqlite_pragmas(dbapi_conn, _):
|
|
196
|
+
cur = dbapi_conn.cursor()
|
|
197
|
+
cur.execute("PRAGMA foreign_keys=ON")
|
|
198
|
+
cur.execute("PRAGMA synchronous=OFF")
|
|
199
|
+
cur.execute("PRAGMA temp_store=MEMORY")
|
|
200
|
+
cur.execute("PRAGMA auto_vacuum=INCREMENTAL")
|
|
201
|
+
cur.close()
|
|
202
|
+
else:
|
|
203
|
+
raise ValueError(f"Unsupported backend: {backend}")
|
|
204
|
+
|
|
205
|
+
try:
|
|
206
|
+
engine._td_redacted_url = _redact(str(url)) # type: ignore[attr-defined]
|
|
207
|
+
except Exception:
|
|
208
|
+
pass
|
|
209
|
+
return engine
|
|
210
|
+
# ---------- Session factory ----------
|
|
211
|
+
|
|
212
|
+
def get_session_factory(engine: Engine):
|
|
213
|
+
return sessionmaker(bind=engine, expire_on_commit=False, autoflush=True)
|
|
214
|
+
|
|
215
|
+
# ---------- Health helpers ----------
|
|
216
|
+
|
|
217
|
+
def healthcheck(engine: Engine, retries: int = 0) -> bool:
|
|
218
|
+
attempt = 0
|
|
219
|
+
delay = 0.25
|
|
220
|
+
while True:
|
|
221
|
+
try:
|
|
222
|
+
with engine.connect() as conn:
|
|
223
|
+
conn.execute(text("SELECT 1"))
|
|
224
|
+
return True
|
|
225
|
+
except OperationalError:
|
|
226
|
+
attempt += 1
|
|
227
|
+
if attempt > retries:
|
|
228
|
+
return False
|
|
229
|
+
time.sleep(delay)
|
|
230
|
+
delay *= 2
|
|
231
|
+
|
|
232
|
+
def read_sqlite_pragmas(engine: Engine) -> Dict[str, Any]:
|
|
233
|
+
"""
|
|
234
|
+
Return active PRAGMA values (SQLite only). Safe no-op for non-sqlite engines.
|
|
235
|
+
"""
|
|
236
|
+
out: Dict[str, Any] = {}
|
|
237
|
+
with engine.connect() as conn:
|
|
238
|
+
if conn.dialect.name != "sqlite":
|
|
239
|
+
return out
|
|
240
|
+
def one(q: str) -> Any:
|
|
241
|
+
return conn.execute(text(q)).scalar()
|
|
242
|
+
out["foreign_keys"] = one("PRAGMA foreign_keys")
|
|
243
|
+
out["synchronous"] = one("PRAGMA synchronous")
|
|
244
|
+
out["temp_store"] = one("PRAGMA temp_store")
|
|
245
|
+
out["auto_vacuum"] = one("PRAGMA auto_vacuum")
|
|
246
|
+
return out
|