tradedangerous 12.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. py.typed +1 -0
  2. trade.py +49 -0
  3. tradedangerous/__init__.py +43 -0
  4. tradedangerous/cache.py +1381 -0
  5. tradedangerous/cli.py +136 -0
  6. tradedangerous/commands/TEMPLATE.py +74 -0
  7. tradedangerous/commands/__init__.py +244 -0
  8. tradedangerous/commands/buildcache_cmd.py +102 -0
  9. tradedangerous/commands/buy_cmd.py +427 -0
  10. tradedangerous/commands/commandenv.py +372 -0
  11. tradedangerous/commands/exceptions.py +94 -0
  12. tradedangerous/commands/export_cmd.py +150 -0
  13. tradedangerous/commands/import_cmd.py +222 -0
  14. tradedangerous/commands/local_cmd.py +243 -0
  15. tradedangerous/commands/market_cmd.py +207 -0
  16. tradedangerous/commands/nav_cmd.py +252 -0
  17. tradedangerous/commands/olddata_cmd.py +270 -0
  18. tradedangerous/commands/parsing.py +221 -0
  19. tradedangerous/commands/rares_cmd.py +298 -0
  20. tradedangerous/commands/run_cmd.py +1521 -0
  21. tradedangerous/commands/sell_cmd.py +262 -0
  22. tradedangerous/commands/shipvendor_cmd.py +60 -0
  23. tradedangerous/commands/station_cmd.py +68 -0
  24. tradedangerous/commands/trade_cmd.py +181 -0
  25. tradedangerous/commands/update_cmd.py +67 -0
  26. tradedangerous/corrections.py +55 -0
  27. tradedangerous/csvexport.py +234 -0
  28. tradedangerous/db/__init__.py +27 -0
  29. tradedangerous/db/adapter.py +192 -0
  30. tradedangerous/db/config.py +107 -0
  31. tradedangerous/db/engine.py +259 -0
  32. tradedangerous/db/lifecycle.py +332 -0
  33. tradedangerous/db/locks.py +208 -0
  34. tradedangerous/db/orm_models.py +500 -0
  35. tradedangerous/db/paths.py +113 -0
  36. tradedangerous/db/utils.py +661 -0
  37. tradedangerous/edscupdate.py +565 -0
  38. tradedangerous/edsmupdate.py +474 -0
  39. tradedangerous/formatting.py +210 -0
  40. tradedangerous/fs.py +156 -0
  41. tradedangerous/gui.py +1146 -0
  42. tradedangerous/mapping.py +133 -0
  43. tradedangerous/mfd/__init__.py +103 -0
  44. tradedangerous/mfd/saitek/__init__.py +3 -0
  45. tradedangerous/mfd/saitek/directoutput.py +678 -0
  46. tradedangerous/mfd/saitek/x52pro.py +195 -0
  47. tradedangerous/misc/checkpricebounds.py +287 -0
  48. tradedangerous/misc/clipboard.py +49 -0
  49. tradedangerous/misc/coord64.py +83 -0
  50. tradedangerous/misc/csvdialect.py +57 -0
  51. tradedangerous/misc/derp-sentinel.py +35 -0
  52. tradedangerous/misc/diff-system-csvs.py +159 -0
  53. tradedangerous/misc/eddb.py +81 -0
  54. tradedangerous/misc/eddn.py +349 -0
  55. tradedangerous/misc/edsc.py +437 -0
  56. tradedangerous/misc/edsm.py +121 -0
  57. tradedangerous/misc/importeddbstats.py +54 -0
  58. tradedangerous/misc/prices-json-exp.py +179 -0
  59. tradedangerous/misc/progress.py +194 -0
  60. tradedangerous/plugins/__init__.py +249 -0
  61. tradedangerous/plugins/edcd_plug.py +371 -0
  62. tradedangerous/plugins/eddblink_plug.py +861 -0
  63. tradedangerous/plugins/edmc_batch_plug.py +133 -0
  64. tradedangerous/plugins/spansh_plug.py +2647 -0
  65. tradedangerous/prices.py +211 -0
  66. tradedangerous/submit-distances.py +422 -0
  67. tradedangerous/templates/Added.csv +37 -0
  68. tradedangerous/templates/Category.csv +17 -0
  69. tradedangerous/templates/RareItem.csv +143 -0
  70. tradedangerous/templates/TradeDangerous.sql +338 -0
  71. tradedangerous/tools.py +40 -0
  72. tradedangerous/tradecalc.py +1302 -0
  73. tradedangerous/tradedb.py +2320 -0
  74. tradedangerous/tradeenv.py +313 -0
  75. tradedangerous/tradeenv.pyi +109 -0
  76. tradedangerous/tradeexcept.py +131 -0
  77. tradedangerous/tradeorm.py +183 -0
  78. tradedangerous/transfers.py +192 -0
  79. tradedangerous/utils.py +243 -0
  80. tradedangerous/version.py +16 -0
  81. tradedangerous-12.7.6.dist-info/METADATA +106 -0
  82. tradedangerous-12.7.6.dist-info/RECORD +87 -0
  83. tradedangerous-12.7.6.dist-info/WHEEL +5 -0
  84. tradedangerous-12.7.6.dist-info/entry_points.txt +3 -0
  85. tradedangerous-12.7.6.dist-info/licenses/LICENSE +373 -0
  86. tradedangerous-12.7.6.dist-info/top_level.txt +2 -0
  87. tradegui.py +24 -0
@@ -0,0 +1,234 @@
1
+ from pathlib import Path
2
+ import csv
3
+
4
+ from sqlalchemy import inspect, text
5
+ from sqlalchemy.orm import Session
6
+
7
+ from .tradeexcept import TradeException
8
+ from .db import utils as db_utils
9
+
10
+
11
+ ######################################################################
12
+ # TradeDangerous :: Modules :: CSV Exporter
13
+ #
14
+ # Generate CSV files for database tables.
15
+ #
16
+ # Assumptions:
17
+ # * Each table has at most one UNIQUE index.
18
+ # * Referenced tables also have a UNIQUE index.
19
+ # * Only single-column foreign keys are supported.
20
+ # * Single-column primary keys are inferred automatically by SQLAlchemy.
21
+ #
22
+ # CAUTION: If the schema changes this module may require updates.
23
+ ######################################################################
24
+
25
+ ######################################################################
26
+ # Default values
27
+
28
+ # For some tables the first two columns will be reversed
29
+ reverseList = []
30
+
31
+ ######################################################################
32
+ # Helpers
33
+ ######################################################################
34
+
35
+ def search_keyList(items, val):
36
+ for row in items:
37
+ if row['from'] == row['to'] == val:
38
+ return row
39
+ return None
40
+
41
+
42
+ def getUniqueIndex(session, tableName):
43
+ """Return all unique columns via SQLAlchemy inspector."""
44
+ inspector = inspect(session.get_bind())
45
+ unqIndex = []
46
+ for idx in inspector.get_indexes(tableName):
47
+ if idx.get("unique"):
48
+ unqIndex.extend(idx.get("column_names", []))
49
+ return unqIndex
50
+
51
+
52
+ def getFKeyList(session, tableName):
53
+ """Return all single-column foreign keys via SQLAlchemy inspector."""
54
+ inspector = inspect(session.get_bind())
55
+ keyList = []
56
+ for fk in inspector.get_foreign_keys(tableName):
57
+ cols = fk.get("constrained_columns", [])
58
+ referred = fk.get("referred_columns", [])
59
+ if len(cols) == 1 and len(referred) == 1:
60
+ keyList.append({
61
+ "table": fk.get("referred_table"),
62
+ "from": cols[0],
63
+ "to": referred[0],
64
+ })
65
+ return keyList
66
+
67
+
68
+ def buildFKeyStmt(session, tableName, key):
69
+ """
70
+ Resolve the FK constraint against the UNIQUE index of the
71
+ referenced table.
72
+
73
+ Multicolumn UNIQUEs are allowed, but only the last column
74
+ may be treated as a single-column join target.
75
+ """
76
+ unqIndex = getUniqueIndex(session, key["table"])
77
+ keyList = getFKeyList(session, key["table"])
78
+ keyStmt = []
79
+
80
+ for colName in unqIndex:
81
+ # If this unique column is itself a foreign key, recurse
82
+ keyKey = search_keyList(keyList, colName)
83
+ if keyKey:
84
+ keyStmt.extend(buildFKeyStmt(session, key["table"], keyKey))
85
+ else:
86
+ keyStmt.append({
87
+ "table": tableName,
88
+ "column": colName,
89
+ "joinTable": key["table"],
90
+ "joinColumn": key["to"],
91
+ })
92
+
93
+ return keyStmt
94
+
95
+
96
+ ######################################################################
97
+ # Code
98
+ ######################################################################
99
+
100
+ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
101
+ """
102
+ Generate the CSV file for tableName in csvPath.
103
+ Returns (lineCount, exportPath).
104
+
105
+ Behaviour:
106
+ - Prefix unique columns with "unq:".
107
+ - Foreign keys are exported as "<col>@<joinTable>.<uniqueCol>".
108
+ - Datetime-like values for 'modified' columns are exported as
109
+ "YYYY-MM-DD HH:MM:SS" (no microseconds).
110
+
111
+ Compatible with either:
112
+ * a SQLAlchemy Session
113
+ * a TradeDB wrapper exposing .engine
114
+ """
115
+
116
+ # --- Resolve a SQLAlchemy session ---
117
+ if hasattr(tdb_or_session, "engine"):
118
+ # Likely a TradeDB instance
119
+ engine = tdb_or_session.engine
120
+ session = Session(engine)
121
+ elif hasattr(tdb_or_session, "get_bind"):
122
+ # Already a Session
123
+ session = tdb_or_session
124
+ else:
125
+ raise TradeException(
126
+ f"Unsupported DB object passed to exportTableToFile: {type(tdb_or_session)}"
127
+ )
128
+
129
+ csvPath = csvPath or Path(tdenv.csvDir)
130
+ if not Path(csvPath).is_dir():
131
+ raise TradeException(f"Save location '{csvPath}' not found.")
132
+
133
+ uniquePfx = "unq:"
134
+ exportPath = (Path(csvPath) / Path(tableName)).with_suffix(".csv")
135
+ tdenv.DEBUG0(f"Export Table '{tableName}' to '{exportPath}'")
136
+
137
+ def _fmt_ts(val) -> str:
138
+ if (formatter := getattr(val, "strftime", None)):
139
+ try:
140
+ return formatter("%Y-%m-%d %H:%M:%S")
141
+ except Exception:
142
+ pass
143
+
144
+ if isinstance(val, str) and len(val) >= 19:
145
+ if val[10] == "T": # Indicates timezone awareness, and if no timezone, utc
146
+ val = f"{val[:10]} {val[11:]}"
147
+ # check the punctuation in YYYY-MM-DD HH:mm:ss
148
+ # 4 7 0 3 6
149
+ # 0123 56 89 12 45 78
150
+ if val[4] == "-" and val[7] == "-" and val[10] == " " and val[13] == ":" and val[16] == ":":
151
+ return val[:19]
152
+
153
+ return val
154
+
155
+ lineCount = 0
156
+ with exportPath.open("w", encoding="utf-8", newline="\n") as exportFile:
157
+ exportOut = csv.writer(
158
+ exportFile,
159
+ delimiter=",",
160
+ quotechar="'",
161
+ doublequote=True,
162
+ quoting=csv.QUOTE_NONNUMERIC,
163
+ lineterminator="\n",
164
+ )
165
+
166
+ bind = session.get_bind()
167
+ inspector = inspect(bind)
168
+
169
+ try:
170
+ unique_cols = db_utils.get_unique_columns(session, tableName)
171
+ fk_list = db_utils.get_foreign_keys(session, tableName)
172
+ except Exception as e:
173
+ raise TradeException(f"Failed to introspect table '{tableName}': {e!r}") from None
174
+
175
+ csvHead = []
176
+ stmtColumn = []
177
+ stmtTable = [tableName]
178
+ stmtOrder = []
179
+ is_modified_col = []
180
+
181
+ for col in inspector.get_columns(tableName):
182
+ col_name = col["name"]
183
+ fk = next((fk for fk in fk_list if fk["from"] == col_name), None)
184
+ if fk:
185
+ joinTable = fk["table"]
186
+ joinColumn = fk["to"]
187
+ join_unique_cols = db_utils.get_unique_columns(session, joinTable)
188
+ if not join_unique_cols:
189
+ raise TradeException(
190
+ f"No unique column found in referenced table '{joinTable}'"
191
+ )
192
+ export_col = join_unique_cols[0]
193
+ csvPfx = uniquePfx if col_name in unique_cols else ""
194
+ csvHead.append(f"{csvPfx}{col_name}@{joinTable}.{export_col}")
195
+ stmtColumn.append(f"{joinTable}.{export_col}")
196
+ is_modified_col.append(export_col == "modified")
197
+ nullable = bool(col.get("nullable", True))
198
+ join_type = "LEFT OUTER JOIN" if nullable else "INNER JOIN"
199
+ stmtTable.append(
200
+ f"{join_type} {joinTable} ON {tableName}.{col_name} = {joinTable}.{joinColumn}"
201
+ )
202
+ stmtOrder.append(f"{joinTable}.{export_col}")
203
+ else:
204
+ if col_name in unique_cols:
205
+ csvHead.append(uniquePfx + col_name)
206
+ stmtOrder.append(f"{tableName}.{col_name}")
207
+ else:
208
+ csvHead.append(col_name)
209
+ stmtColumn.append(f"{tableName}.{col_name}")
210
+ is_modified_col.append(col_name == "modified")
211
+
212
+ sqlStmt = f"SELECT {','.join(stmtColumn)} FROM {' '.join(stmtTable)}"
213
+ if stmtOrder:
214
+ sqlStmt += f" ORDER BY {','.join(stmtOrder)}"
215
+ tdenv.DEBUG1(f"SQL: {sqlStmt}")
216
+
217
+ exportFile.write(f"{','.join(csvHead)}\n")
218
+
219
+ for row in session.execute(text(sqlStmt)):
220
+ lineCount += 1
221
+ row_out = [
222
+ _fmt_ts(val) if is_modified_col[i] else val
223
+ for i, val in enumerate(row)
224
+ ]
225
+ tdenv.DEBUG2(f"{lineCount}: {row_out}")
226
+ exportOut.writerow(row_out)
227
+
228
+ tdenv.DEBUG1(f"{lineCount} {tableName}s exported")
229
+
230
+ # Close session if we created it
231
+ if hasattr(tdb_or_session, "engine"):
232
+ session.close()
233
+
234
+ return lineCount, exportPath
@@ -0,0 +1,27 @@
1
+ """TradeDangerous SQLAlchemy bootstrap package (Stage 3A, Part 1).
2
+
3
+ Side-effect free on import. Provides a minimal, cross-platform API
4
+ for config loading, path resolution, and engine/session bootstrap.
5
+
6
+ Usage:
7
+ from tradedangerous.db import (
8
+ load_config, resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path,
9
+ make_engine_from_config, get_session_factory, healthcheck,
10
+ )
11
+ """
12
+ from .config import load_config
13
+ from .paths import resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path
14
+ from .engine import make_engine_from_config, get_session_factory, healthcheck
15
+ from .lifecycle import ensure_fresh_db
16
+
17
+ __all__ = [
18
+ "load_config",
19
+ "resolve_data_dir",
20
+ "resolve_tmp_dir",
21
+ "ensure_dir",
22
+ "resolve_db_config_path",
23
+ "make_engine_from_config",
24
+ "get_session_factory",
25
+ "healthcheck",
26
+ "ensure_fresh_db",
27
+ ]
@@ -0,0 +1,192 @@
1
+ # tradedangerous/db/adapter.py
2
+ from __future__ import annotations
3
+
4
+ from contextlib import contextmanager
5
+ from typing import Dict, Generator, Iterable, Optional, Tuple
6
+ import configparser
7
+ import os
8
+
9
+ from sqlalchemy import select, func
10
+ from sqlalchemy.engine import Engine
11
+
12
+ # Local engine + ORM (authoritative)
13
+ from .engine import make_engine_from_config, get_session_factory # uses env/CWD-resolved db_config.ini by default
14
+ from .orm_models import System, Station, Item, StationItem # canonical models
15
+ from .paths import resolve_db_config_path
16
+
17
+
18
+ # ---- Public factory ---------------------------------------------------------
19
+
20
+ def get_adapter_if_enabled(cfg_path: Optional[str] = None) -> "TradeDBReadAdapter | None":
21
+ """
22
+ Return an adapter when [database] backend != 'sqlite', else None.
23
+ - No engine/session created at import: construction is lazy.
24
+ - This is called by tradedb.py (thin gate).
25
+ """
26
+ if cfg_path is None:
27
+ cfg_path = str(resolve_db_config_path())
28
+ cfg = configparser.ConfigParser()
29
+ if not os.path.exists(cfg_path):
30
+ return None
31
+ with open(cfg_path, "r", encoding="utf-8") as fh:
32
+ cfg.read_file(fh)
33
+ backend = (cfg.get("database", "backend", fallback="sqlite") or "sqlite").strip().lower()
34
+ if backend == "sqlite":
35
+ return None
36
+
37
+ # Engine is created lazily via the property below to honour "no side-effects at import".
38
+ return TradeDBReadAdapter(cfg_path)
39
+
40
+ # ---- Adapter (read-only) ----------------------------------------------------
41
+
42
+ class TradeDBReadAdapter:
43
+ """
44
+ Very small, read-only façade over SQLAlchemy for legacy TradeDB reads:
45
+ - systems() list
46
+ - lookup system by name (case-insensitive)
47
+ - station by (system_id, station_name) (case-insensitive)
48
+ - average selling/buying prices (used by trade_cmd at detail>1)
49
+ """
50
+ def __init__(self, cfg_path: str):
51
+ self._cfg_path = cfg_path
52
+ self._engine: Optional[Engine] = None
53
+ self._Session = None # sessionmaker
54
+
55
+ # Lazy engine/session factory (no import-time work)
56
+ @property
57
+ def Session(self):
58
+ if self._Session is None:
59
+ engine = make_engine_from_config(self._cfg_path)
60
+ self._engine = engine
61
+ self._Session = get_session_factory(engine)
62
+ return self._Session
63
+
64
+ @contextmanager
65
+ def session(self) -> Generator[Session, None, None]:
66
+ Session = self.Session
67
+ with Session() as s:
68
+ yield s
69
+
70
+ # ---- Reads mapped to ORM ------------------------------------------------
71
+
72
+ def list_system_rows(self) -> Iterable[Tuple[int, str, float, float, float, Optional[int]]]:
73
+ """
74
+ Shape matches legacy _loadSystems SELECT:
75
+ (system_id, name, pos_x, pos_y, pos_z, added_id)
76
+ """
77
+ with self.session() as s:
78
+ rows = s.execute(
79
+ select(
80
+ System.system_id,
81
+ System.name,
82
+ System.pos_x,
83
+ System.pos_y,
84
+ System.pos_z,
85
+ System.added_id,
86
+ )
87
+ )
88
+ for r in rows:
89
+ yield (r.system_id, r.name, r.pos_x, r.pos_y, r.pos_z, r.added_id)
90
+
91
+ def system_by_name(self, name_ci: str) -> Optional[Tuple[int, str, float, float, float, Optional[int]]]:
92
+ """
93
+ Case-insensitive name match for System.
94
+ """
95
+ with self.session() as s:
96
+ row = s.execute(
97
+ select(
98
+ System.system_id, System.name, System.pos_x, System.pos_y, System.pos_z, System.added_id
99
+ ).where(func.upper(System.name) == func.upper(func.trim(func.cast(name_ci, System.name.type))))
100
+ ).first()
101
+ if not row:
102
+ return None
103
+ return (row.system_id, row.name, row.pos_x, row.pos_y, row.pos_z, row.added_id)
104
+
105
+ def station_by_system_and_name(
106
+ self, system_id: int, station_name_ci: str
107
+ ) -> Optional[Tuple[int, int, str, int, str, str, str, str, str, str, str, str, str, int]]:
108
+ """
109
+ Return the single Station row by system + name (CI).
110
+ Shape matches legacy _loadStations row consumed by Station(...):
111
+ (station_id, system_id, name,
112
+ ls_from_star, market, blackmarket, shipyard,
113
+ max_pad_size, outfitting, rearm, refuel, repair, planetary, type_id)
114
+ """
115
+ with self.session() as s:
116
+ r = s.execute(
117
+ select(
118
+ Station.station_id,
119
+ Station.system_id,
120
+ Station.name,
121
+ Station.ls_from_star,
122
+ Station.market,
123
+ Station.blackmarket,
124
+ Station.shipyard,
125
+ Station.max_pad_size,
126
+ Station.outfitting,
127
+ Station.rearm,
128
+ Station.refuel,
129
+ Station.repair,
130
+ Station.planetary,
131
+ Station.type_id,
132
+ ).where(
133
+ Station.system_id == system_id,
134
+ func.upper(Station.name) == func.upper(func.trim(func.cast(station_name_ci, Station.name.type))),
135
+ )
136
+ ).first()
137
+ if not r:
138
+ return None
139
+ return (
140
+ r.station_id,
141
+ r.system_id,
142
+ r.name,
143
+ r.ls_from_star,
144
+ r.market,
145
+ r.blackmarket,
146
+ r.shipyard,
147
+ r.max_pad_size,
148
+ r.outfitting,
149
+ r.rearm,
150
+ r.refuel,
151
+ r.repair,
152
+ r.planetary,
153
+ r.type_id,
154
+ )
155
+
156
+ def average_selling(self) -> Dict[int, int]:
157
+ """
158
+ {item_id: avg_supply_price>0}
159
+ Mirrors the legacy SQL used in TradeDB.getAverageSelling.
160
+ """
161
+ with self.session() as s:
162
+ rows = s.execute(
163
+ select(
164
+ Item.item_id,
165
+ func.IFNULL(func.avg(StationItem.supply_price), 0),
166
+ )
167
+ .select_from(Item.__table__.outerjoin(
168
+ StationItem, (Item.item_id == StationItem.item_id) & (StationItem.supply_price > 0)
169
+ ))
170
+ .where(StationItem.supply_price > 0)
171
+ .group_by(Item.item_id)
172
+ )
173
+ return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
174
+
175
+ def average_buying(self) -> Dict[int, int]:
176
+ """
177
+ {item_id: avg_demand_price>0}
178
+ Mirrors the legacy SQL used in TradeDB.getAverageBuying.
179
+ """
180
+ with self.session() as s:
181
+ rows = s.execute(
182
+ select(
183
+ Item.item_id,
184
+ func.IFNULL(func.avg(StationItem.demand_price), 0),
185
+ )
186
+ .select_from(Item.__table__.outerjoin(
187
+ StationItem, (Item.item_id == StationItem.item_id) & (StationItem.demand_price > 0)
188
+ ))
189
+ .where(StationItem.demand_price > 0)
190
+ .group_by(Item.item_id)
191
+ )
192
+ return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
@@ -0,0 +1,107 @@
1
+ from __future__ import annotations
2
+ import configparser
3
+ from pathlib import Path
4
+ from typing import Any, Dict
5
+
6
+ DEFAULTS: Dict[str, Dict[str, Any]] = {
7
+ "database": {"backend": "sqlite"},
8
+ "mariadb": {
9
+ "host": "127.0.0.1",
10
+ "port": 3306,
11
+ "user": "",
12
+ "password": "",
13
+ "name": "tradedangerous",
14
+ "driver": "mariadbconnector", # or 'pymysql'
15
+ "charset": "utf8mb4",
16
+ },
17
+ "sqlite": {"sqlite_filename": "TradeDangerous.db"},
18
+ "paths": {"data_dir": "./data", "tmp_dir": "./tmp"},
19
+ "engine": {
20
+ "pool_size": 10,
21
+ "max_overflow": 20,
22
+ "pool_timeout": 30,
23
+ "pool_recycle": 1800,
24
+ "isolation_level": "READ COMMITTED",
25
+ "echo": False,
26
+ "connect_timeout": 10,
27
+ },
28
+ }
29
+ # --- Runtime default path correction ----------------------------------------
30
+ # Convert relative defaults ("./data", "./tmp") into absolute paths under the
31
+ # current working directory. This prevents first-run installs from resolving
32
+ # relative to the package install directory or venv when no db_config.ini exists.
33
+ try:
34
+ _cwd = Path.cwd()
35
+ DEFAULTS["paths"]["data_dir"] = str((_cwd / "data").resolve())
36
+ DEFAULTS["paths"]["tmp_dir"] = str((_cwd / "tmp").resolve())
37
+ except Exception:
38
+ # Best effort; fall back to shipped defaults if CWD is inaccessible
39
+ pass
40
+ # ---------------------------------------------------------------------------
41
+
42
+ # Hardened parser: allow inline comments and disable interpolation
43
+ CFG_KW = {"inline_comment_prefixes": (";", "#"), "interpolation": None}
44
+
45
+ def _parse_bool(s: str) -> bool:
46
+ return str(s).strip().lower() in {"1", "true", "yes", "on"}
47
+
48
+ def _as_int(s: str, default: int | None = None) -> int | None:
49
+ try:
50
+ return int(str(s).strip())
51
+ except (TypeError, ValueError):
52
+ return default
53
+
54
+ def _coerce_types(d: Dict[str, Any]) -> Dict[str, Any]:
55
+ eng = d.get("engine", {})
56
+ if "echo" in eng:
57
+ eng["echo"] = _parse_bool(eng["echo"]) if isinstance(eng["echo"], str) else bool(eng["echo"])
58
+ for k in ("pool_size", "max_overflow", "pool_timeout", "pool_recycle", "connect_timeout"):
59
+ if k in eng:
60
+ eng[k] = _as_int(eng[k], DEFAULTS["engine"][k])
61
+ if "mariadb" in d and "port" in d["mariadb"]:
62
+ d["mariadb"]["port"] = _as_int(d["mariadb"]["port"], DEFAULTS["mariadb"]["port"])
63
+ return d
64
+
65
+ def load_config(path: str | Path | None = None) -> Dict[str, Any]:
66
+ """Load configuration as a dict with typed values.
67
+ Search order:
68
+ 1) explicit *path* if provided
69
+ 2) TD_DB_CONFIG env (if file exists)
70
+ 3) ./db_config.ini (cwd)
71
+ 4) in-code DEFAULTS
72
+ """
73
+ cfg_path: Path | None = None
74
+ if path is not None:
75
+ p = Path(path)
76
+ if p.exists():
77
+ cfg_path = p
78
+ else:
79
+ # Prefer environment variable if it points to an existing file
80
+ try:
81
+ from .paths import resolve_db_config_path
82
+ env_candidate = resolve_db_config_path()
83
+ if env_candidate.exists():
84
+ cfg_path = env_candidate
85
+ except Exception:
86
+ # If anything goes wrong resolving the env, fall back to defaults below
87
+ pass
88
+
89
+ # Fall back to local file in CWD
90
+ if cfg_path is None:
91
+ p = Path.cwd() / "db_config.ini"
92
+ if p.exists():
93
+ cfg_path = p
94
+
95
+ # start with defaults
96
+ result: Dict[str, Any] = {k: (v.copy() if isinstance(v, dict) else v) for k, v in DEFAULTS.items()}
97
+
98
+ if cfg_path:
99
+ parser = configparser.ConfigParser(**CFG_KW)
100
+ with cfg_path.open("r", encoding="utf-8") as fh:
101
+ parser.read_file(fh)
102
+ for section in parser.sections():
103
+ result.setdefault(section, {})
104
+ for key, val in parser.items(section):
105
+ result[section][key] = val
106
+
107
+ return _coerce_types(result)