tradedangerous 12.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. py.typed +1 -0
  2. trade.py +49 -0
  3. tradedangerous/__init__.py +43 -0
  4. tradedangerous/cache.py +1381 -0
  5. tradedangerous/cli.py +136 -0
  6. tradedangerous/commands/TEMPLATE.py +74 -0
  7. tradedangerous/commands/__init__.py +244 -0
  8. tradedangerous/commands/buildcache_cmd.py +102 -0
  9. tradedangerous/commands/buy_cmd.py +427 -0
  10. tradedangerous/commands/commandenv.py +372 -0
  11. tradedangerous/commands/exceptions.py +94 -0
  12. tradedangerous/commands/export_cmd.py +150 -0
  13. tradedangerous/commands/import_cmd.py +222 -0
  14. tradedangerous/commands/local_cmd.py +243 -0
  15. tradedangerous/commands/market_cmd.py +207 -0
  16. tradedangerous/commands/nav_cmd.py +252 -0
  17. tradedangerous/commands/olddata_cmd.py +270 -0
  18. tradedangerous/commands/parsing.py +221 -0
  19. tradedangerous/commands/rares_cmd.py +298 -0
  20. tradedangerous/commands/run_cmd.py +1521 -0
  21. tradedangerous/commands/sell_cmd.py +262 -0
  22. tradedangerous/commands/shipvendor_cmd.py +60 -0
  23. tradedangerous/commands/station_cmd.py +68 -0
  24. tradedangerous/commands/trade_cmd.py +181 -0
  25. tradedangerous/commands/update_cmd.py +67 -0
  26. tradedangerous/corrections.py +55 -0
  27. tradedangerous/csvexport.py +234 -0
  28. tradedangerous/db/__init__.py +27 -0
  29. tradedangerous/db/adapter.py +192 -0
  30. tradedangerous/db/config.py +107 -0
  31. tradedangerous/db/engine.py +259 -0
  32. tradedangerous/db/lifecycle.py +332 -0
  33. tradedangerous/db/locks.py +208 -0
  34. tradedangerous/db/orm_models.py +500 -0
  35. tradedangerous/db/paths.py +113 -0
  36. tradedangerous/db/utils.py +661 -0
  37. tradedangerous/edscupdate.py +565 -0
  38. tradedangerous/edsmupdate.py +474 -0
  39. tradedangerous/formatting.py +210 -0
  40. tradedangerous/fs.py +156 -0
  41. tradedangerous/gui.py +1146 -0
  42. tradedangerous/mapping.py +133 -0
  43. tradedangerous/mfd/__init__.py +103 -0
  44. tradedangerous/mfd/saitek/__init__.py +3 -0
  45. tradedangerous/mfd/saitek/directoutput.py +678 -0
  46. tradedangerous/mfd/saitek/x52pro.py +195 -0
  47. tradedangerous/misc/checkpricebounds.py +287 -0
  48. tradedangerous/misc/clipboard.py +49 -0
  49. tradedangerous/misc/coord64.py +83 -0
  50. tradedangerous/misc/csvdialect.py +57 -0
  51. tradedangerous/misc/derp-sentinel.py +35 -0
  52. tradedangerous/misc/diff-system-csvs.py +159 -0
  53. tradedangerous/misc/eddb.py +81 -0
  54. tradedangerous/misc/eddn.py +349 -0
  55. tradedangerous/misc/edsc.py +437 -0
  56. tradedangerous/misc/edsm.py +121 -0
  57. tradedangerous/misc/importeddbstats.py +54 -0
  58. tradedangerous/misc/prices-json-exp.py +179 -0
  59. tradedangerous/misc/progress.py +194 -0
  60. tradedangerous/plugins/__init__.py +249 -0
  61. tradedangerous/plugins/edcd_plug.py +371 -0
  62. tradedangerous/plugins/eddblink_plug.py +861 -0
  63. tradedangerous/plugins/edmc_batch_plug.py +133 -0
  64. tradedangerous/plugins/spansh_plug.py +2647 -0
  65. tradedangerous/prices.py +211 -0
  66. tradedangerous/submit-distances.py +422 -0
  67. tradedangerous/templates/Added.csv +37 -0
  68. tradedangerous/templates/Category.csv +17 -0
  69. tradedangerous/templates/RareItem.csv +143 -0
  70. tradedangerous/templates/TradeDangerous.sql +338 -0
  71. tradedangerous/tools.py +40 -0
  72. tradedangerous/tradecalc.py +1302 -0
  73. tradedangerous/tradedb.py +2320 -0
  74. tradedangerous/tradeenv.py +313 -0
  75. tradedangerous/tradeenv.pyi +109 -0
  76. tradedangerous/tradeexcept.py +131 -0
  77. tradedangerous/tradeorm.py +183 -0
  78. tradedangerous/transfers.py +192 -0
  79. tradedangerous/utils.py +243 -0
  80. tradedangerous/version.py +16 -0
  81. tradedangerous-12.7.6.dist-info/METADATA +106 -0
  82. tradedangerous-12.7.6.dist-info/RECORD +87 -0
  83. tradedangerous-12.7.6.dist-info/WHEEL +5 -0
  84. tradedangerous-12.7.6.dist-info/entry_points.txt +3 -0
  85. tradedangerous-12.7.6.dist-info/licenses/LICENSE +373 -0
  86. tradedangerous-12.7.6.dist-info/top_level.txt +2 -0
  87. tradegui.py +24 -0
@@ -0,0 +1,259 @@
1
+ # tradedangerous/db/engine.py
2
+ from __future__ import annotations
3
+ from pathlib import Path
4
+ from typing import Any, Dict, Mapping
5
+ import configparser
6
+ import os
7
+ import time
8
+
9
+ from sqlalchemy import create_engine, event, text
10
+ from sqlalchemy.engine import Engine, URL
11
+ from sqlalchemy.orm import sessionmaker, Session # type: ignore
12
+ from sqlalchemy.pool import NullPool
13
+ from sqlalchemy.exc import OperationalError
14
+
15
+ from .config import DEFAULTS, load_config
16
+ from .paths import resolve_data_dir, resolve_tmp_dir, resolve_db_config_path
17
+
18
+
19
+ # ---------- config normalization & helpers ----------
20
+
21
+ def _ensure_default_config_file(target_path: Path | None) -> Path | None:
22
+ """
23
+ If *target_path* is provided and no file exists there, write a minimal db_config.ini
24
+ built from in-code DEFAULTS. Returns the path if created, else None.
25
+ """
26
+ if not target_path:
27
+ return None
28
+ if target_path.exists():
29
+ return target_path
30
+ # Build from DEFAULTS
31
+ target_path.parent.mkdir(parents=True, exist_ok=True)
32
+ cp = configparser.ConfigParser()
33
+ for section, mapping in DEFAULTS.items():
34
+ cp[section] = {}
35
+ if isinstance(mapping, Mapping):
36
+ for k, v in mapping.items():
37
+ cp[section][k] = str(v)
38
+ with target_path.open("w", encoding="utf-8") as fh:
39
+ cp.write(fh)
40
+ return target_path
41
+
42
+
43
+ def _cfg_to_dict(cfg: configparser.ConfigParser | Mapping[str, Any] | str | os.PathLike) -> Dict[str, Dict[str, Any]]:
44
+ """
45
+ Normalise configuration input into a dict-of-sections.
46
+
47
+ Accepted inputs:
48
+ * dict-like mapping → returned as {section: {key: value}}
49
+ * ConfigParser → converted to nested dict (sections overlay DEFAULT section)
50
+ * str/Path → if file exists, read it; if missing, fall back to load_config()
51
+
52
+ NOTE:
53
+ - We do NOT raise on a missing path; we delegate to load_config() to honour the
54
+ documented resolution order (ENV → CWD → DEFAULTS).
55
+ """
56
+ if isinstance(cfg, (str, os.PathLike)):
57
+ p = Path(cfg)
58
+ if p.exists():
59
+ cp = configparser.ConfigParser()
60
+ with p.open("r", encoding="utf-8") as fh:
61
+ cp.read_file(fh)
62
+ return _cfg_to_dict(cp)
63
+ # Missing provided path → use canonical loader with fallbacks
64
+ return load_config(None)
65
+
66
+ if isinstance(cfg, configparser.ConfigParser):
67
+ out: Dict[str, Dict[str, Any]] = {}
68
+ defaults = dict(cfg.defaults())
69
+ for sec in cfg.sections():
70
+ d = dict(defaults)
71
+ d.update({k: v for k, v in cfg.items(sec)})
72
+ out[sec] = d
73
+ for sec in ("database", "engine", "sqlite", "mariadb", "paths"):
74
+ out.setdefault(sec, dict(defaults))
75
+ return out
76
+
77
+ # Already a dict-like mapping of sections
78
+ return {k: dict(v) if isinstance(v, Mapping) else {} for k, v in cfg.items()} # type: ignore[arg-type]
79
+
80
+
81
+ def _get(cfg: Dict[str, Any], section: str, key: str, default=None):
82
+ if section in cfg and key in cfg[section]:
83
+ return cfg[section][key]
84
+ if "database" in cfg and key in cfg["database"]:
85
+ return cfg["database"][key]
86
+ return default
87
+
88
+
89
+ def _get_int(cfg: Dict[str, Any], section: str, key: str, default=None):
90
+ try:
91
+ return int(_get(cfg, section, key, default))
92
+ except (TypeError, ValueError):
93
+ return default
94
+
95
+
96
+ def _get_bool(cfg: Dict[str, Any], section: str, key: str, default=None):
97
+ v = _get(cfg, section, key, default)
98
+ if isinstance(v, bool):
99
+ return v
100
+ if isinstance(v, str):
101
+ return v.strip().lower() in {"1", "true", "yes", "on"}
102
+ return default
103
+
104
+
105
+ # ---------- URL builders ----------
106
+
107
+ def _redact(url: str) -> str:
108
+ if "://" not in url:
109
+ return url
110
+ head, rest = url.split("://", 1)
111
+ if "@" in rest and ":" in rest.split("@", 1)[0]:
112
+ user_pass, host = rest.split("@", 1)
113
+ user = user_pass.split(":", 1)[0]
114
+ return f"{head}://{user}:***@{host}"
115
+ return f"{head}://{rest}"
116
+
117
+ def _make_mariadb_url(cfg: Dict[str, Any]) -> URL:
118
+ driver = str(_get(cfg, "mariadb", "driver", "mariadbconnector")).strip().lower()
119
+ drivername = "mariadb+" + driver if driver == "mariadbconnector" else "mysql+" + driver
120
+ return URL.create(
121
+ drivername=drivername,
122
+ username=str(_get(cfg, "mariadb", "user", "")),
123
+ password=str(_get(cfg, "mariadb", "password", "")),
124
+ host=str(_get(cfg, "mariadb", "host", "127.0.0.1")),
125
+ port=int(_get(cfg, "mariadb", "port", 3306)),
126
+ database=str(_get(cfg, "mariadb", "name", "tradedangerous")),
127
+ query={"charset": str(_get(cfg, "mariadb", "charset", "utf8mb4"))},
128
+ )
129
+
130
+
131
+ def _make_sqlite_url(cfg: Dict[str, Any]) -> str:
132
+ data_dir = resolve_data_dir(cfg)
133
+ # Honour legacy filename
134
+ filename = str(_get(cfg, "sqlite", "sqlite_filename", "TradeDangerous.db"))
135
+ db_path = (data_dir / filename).resolve()
136
+ return f"sqlite+pysqlite:///{db_path.as_posix()}"
137
+
138
+
139
+ # ---------- Engine construction ----------
140
+
141
+ def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str, Any] | str | os.PathLike | None = None) -> Engine:
142
+ """
143
+ Build a SQLAlchemy Engine for either MariaDB or SQLite.
144
+
145
+ Accepts: ConfigParser, dict-like {section:{k:v}}, path to INI file, or None.
146
+ First-run behaviour:
147
+ - If a path is provided but missing, or if no path is provided and no config is found,
148
+ a default db_config.ini is CREATED in the resolved default location (CWD unless TD_DB_CONFIG
149
+ points elsewhere), then loaded.
150
+ """
151
+ ini_target: Path | None = None
152
+
153
+ # If caller gave a specific path, prefer to materialise a default file there.
154
+ if isinstance(cfg_or_path, (str, os.PathLike)):
155
+ ini_target = Path(cfg_or_path)
156
+ _ensure_default_config_file(ini_target)
157
+ else:
158
+ # No specific path: create (if missing) at the standard location
159
+ # (CWD/db_config.ini by default, or the file pointed to by TD_DB_CONFIG).
160
+ ini_target = resolve_db_config_path("db_config.ini")
161
+ _ensure_default_config_file(ini_target)
162
+
163
+ cfg = _cfg_to_dict(cfg_or_path if cfg_or_path is not None else str(ini_target))
164
+
165
+ # Ensure dirs exist (used by various parts of the app)
166
+ _ = resolve_data_dir(cfg)
167
+ _ = resolve_tmp_dir(cfg)
168
+
169
+ backend = str(_get(cfg, "database", "backend", "sqlite")).strip().lower()
170
+ echo = bool(_get_bool(cfg, "engine", "echo", False))
171
+ isolation = _get(cfg, "engine", "isolation_level", None)
172
+
173
+ if backend == "mariadb":
174
+ url = _make_mariadb_url(cfg)
175
+ connect_timeout = _get_int(cfg, "engine", "connect_timeout", 10) or 10
176
+ pool_size = _get_int(cfg, "engine", "pool_size", 10) or 10
177
+ max_overflow = _get_int(cfg, "engine", "max_overflow", 20) or 20
178
+ pool_timeout = _get_int(cfg, "engine", "pool_timeout", 30) or 30
179
+ pool_recycle = _get_int(cfg, "engine", "pool_recycle", 1800) or 1800
180
+ engine = create_engine(
181
+ url,
182
+ echo=echo,
183
+ pool_pre_ping=True,
184
+ pool_size=pool_size,
185
+ max_overflow=max_overflow,
186
+ pool_timeout=pool_timeout,
187
+ pool_recycle=pool_recycle,
188
+ isolation_level=isolation or "READ COMMITTED",
189
+ connect_args={"connect_timeout": connect_timeout},
190
+ )
191
+ elif backend == "sqlite":
192
+ url = _make_sqlite_url(cfg)
193
+ engine = create_engine(
194
+ url,
195
+ echo=echo,
196
+ poolclass=NullPool,
197
+ connect_args={"check_same_thread": False},
198
+ )
199
+
200
+ @event.listens_for(engine, "connect")
201
+ def _set_sqlite_pragmas(dbapi_conn, _):
202
+ cur = dbapi_conn.cursor()
203
+ cur.execute("PRAGMA foreign_keys=ON")
204
+ cur.execute("PRAGMA synchronous=OFF")
205
+ cur.execute("PRAGMA temp_store=MEMORY")
206
+ cur.execute("PRAGMA auto_vacuum=INCREMENTAL")
207
+ cur.close()
208
+ else:
209
+ raise ValueError(f"Unsupported backend: {backend}")
210
+
211
+ try:
212
+ engine._td_redacted_url = _redact(str(url)) # type: ignore[attr-defined]
213
+ except Exception:
214
+ pass
215
+ return engine
216
+
217
+
218
+ # ---------- Session factory ----------
219
+
220
+ def get_session_factory(engine: Engine) -> sessionmaker[Session]:
221
+ return sessionmaker(bind=engine, expire_on_commit=False, autoflush=True)
222
+
223
+
224
+ # ---------- Health helpers ----------
225
+
226
+ def healthcheck(engine: Engine, retries: int = 0) -> bool:
227
+ attempt = 0
228
+ delay = 0.25
229
+ while True:
230
+ try:
231
+ with engine.connect() as conn:
232
+ conn.execute(text("SELECT 1"))
233
+ return True
234
+ except OperationalError:
235
+ attempt += 1
236
+ if attempt > retries:
237
+ return False
238
+ time.sleep(delay)
239
+ delay *= 2
240
+
241
+
242
+ def read_sqlite_pragmas(engine: Engine) -> Dict[str, Any]:
243
+ """
244
+ Return active PRAGMA values (SQLite only). Safe no-op for non-sqlite engines.
245
+ """
246
+ out: Dict[str, Any] = {}
247
+ with engine.connect() as conn:
248
+ if conn.dialect.name != "sqlite":
249
+ return out
250
+
251
+ def one(q: str) -> Any:
252
+ return conn.execute(text(q)).scalar()
253
+
254
+ out["foreign_keys"] = one("PRAGMA foreign_keys")
255
+ out["synchronous"] = one("PRAGMA synchronous")
256
+ out["temp_store"] = one("PRAGMA temp_store")
257
+ out["auto_vacuum"] = one("PRAGMA auto_vacuum")
258
+
259
+ return out
@@ -0,0 +1,332 @@
1
+ # tradedangerous/db/lifecycle.py
2
+ from __future__ import annotations
3
+
4
+ from pathlib import Path
5
+ from typing import Iterable, List, Tuple, Optional, Dict
6
+
7
+ from sqlalchemy import inspect, text
8
+ from sqlalchemy.engine import Engine
9
+ from sqlalchemy.schema import MetaData
10
+
11
+
12
+ # --------------------------------------------------------------------
13
+ # Utilities
14
+ # --------------------------------------------------------------------
15
+
16
+ def is_sqlite(engine: Engine) -> bool:
17
+ """Return True if the SQLAlchemy engine is using SQLite dialect."""
18
+ return engine.dialect.name == "sqlite"
19
+
20
+
21
+ def _user_tables(engine: Engine) -> Iterable[str]:
22
+ """List user (non-internal) tables present in the current database."""
23
+ insp = inspect(engine)
24
+ names = insp.get_table_names()
25
+ if is_sqlite(engine):
26
+ names = [n for n in names if not n.startswith("sqlite_")]
27
+ return names
28
+
29
+
30
+ def is_empty(engine: Engine) -> bool:
31
+ """True when no user tables exist (via SQLAlchemy Inspector)."""
32
+ return len(list(_user_tables(engine))) == 0
33
+
34
+
35
+ # --------------------------------------------------------------------
36
+ # (Re)creation helpers — prefer explicit paths; discovery is fallback
37
+ # --------------------------------------------------------------------
38
+
39
+ def _read_sql_file(sql_path: Path) -> str:
40
+ """Read the provided SQL file (authoritative SQLite schema)."""
41
+ if not sql_path.exists():
42
+ raise FileNotFoundError(f"SQLite schema file not found: {sql_path}")
43
+ return sql_path.read_text(encoding="utf-8")
44
+
45
+
46
+ def _read_legacy_sql() -> str:
47
+ """Fallback: locate the legacy SQLite schema SQL if an explicit path was not provided."""
48
+ candidates = [
49
+ Path(__file__).resolve().parents[1] / "templates" / "TradeDangerous.sql",
50
+ Path.cwd() / "tradedangerous" / "templates" / "TradeDangerous.sql",
51
+ Path.cwd() / "TradeDangerous.sql",
52
+ ]
53
+ for p in candidates:
54
+ if p.exists():
55
+ return p.read_text(encoding="utf-8")
56
+ raise FileNotFoundError("TradeDangerous.sql not found in expected locations.")
57
+
58
+
59
+ def _execute_sql_script(engine: Engine, script: str) -> None:
60
+ """Execute a multi-statement SQL script using sqlite3's executescript()."""
61
+ with engine.begin() as conn:
62
+ raw_conn = conn.connection # DB-API connection (sqlite3.Connection)
63
+ raw_conn.executescript(script)
64
+
65
+
66
+ def _create_sqlite_from_legacy(engine: Engine, sql_path: Optional[Path] = None) -> None:
67
+ """Create the SQLite schema by executing the legacy SQL (explicit path preferred)."""
68
+ if sql_path is not None:
69
+ sql = _read_sql_file(sql_path)
70
+ else:
71
+ sql = _read_legacy_sql()
72
+ _execute_sql_script(engine, sql)
73
+
74
+
75
+ # --------------------------------------------------------------------
76
+ # Public resets
77
+ # --------------------------------------------------------------------
78
+
79
+ def reset_sqlite(engine: Engine, db_path: Path, sql_path: Optional[Path] = None) -> None:
80
+ """
81
+ Reset the SQLite schema by rotating the DB file and recreating from legacy SQL.
82
+
83
+ Steps:
84
+ 1) Dispose the SQLAlchemy engine to release pooled sqlite file handles.
85
+ 2) Rotate the on-disk database file to a .old sibling (idempotent; cross-device safe).
86
+ 3) Ensure the target directory exists.
87
+ 4) Recreate the schema using the provided canonical SQL file (or fallback discovery).
88
+
89
+ Notes:
90
+ - Rotation naming preserves your historic convention:
91
+ TradeDangerous.db → TradeDangerous.old
92
+ - If no DB file exists, rotation is a no-op.
93
+ """
94
+ # 1) Release any open file handles held by the connection pool
95
+ try:
96
+ engine.dispose()
97
+ except Exception:
98
+ pass # best-effort
99
+
100
+ # 2) Rotate DB → .old (idempotent, cross-device safe)
101
+ db_path = db_path.resolve()
102
+ old_path = db_path.with_suffix(".old")
103
+ try:
104
+ if db_path.exists():
105
+ try:
106
+ if old_path.exists():
107
+ old_path.unlink()
108
+ except Exception:
109
+ # If removal of old backup fails, continue and let rename/copy raise if necessary
110
+ pass
111
+
112
+ try:
113
+ db_path.rename(old_path)
114
+ except OSError:
115
+ # Cross-device or locked: copy then unlink
116
+ import shutil
117
+ shutil.copy2(db_path, old_path)
118
+ try:
119
+ db_path.unlink()
120
+ except Exception:
121
+ # If unlink fails, leave both; schema recreate will still run on db_path
122
+ pass
123
+ except Exception:
124
+ # Rotation shouldn't prevent schema recreation; continue
125
+ pass
126
+
127
+ # 3) Make sure parent directory exists
128
+ try:
129
+ db_path.parent.mkdir(parents=True, exist_ok=True)
130
+ except Exception:
131
+ pass
132
+
133
+ # 4) Recreate schema from canonical SQL
134
+ _create_sqlite_from_legacy(engine, sql_path=sql_path)
135
+
136
+ def reset_mariadb(engine: Engine, metadata: MetaData) -> None:
137
+ """
138
+ Drop all tables and recreate using ORM metadata (MariaDB/MySQL),
139
+ with FOREIGN_KEY_CHECKS disabled during the operation.
140
+
141
+ This avoids FK-ordering issues and makes resets deterministic.
142
+ """
143
+ # Use a transactional connection for the whole reset
144
+ with engine.begin() as conn:
145
+ # Disable FK checks for the duration of drop/create
146
+ conn.execute(text("SET FOREIGN_KEY_CHECKS=0"))
147
+ try:
148
+ metadata.drop_all(bind=conn)
149
+ metadata.create_all(bind=conn)
150
+ finally:
151
+ # Always restore FK checks
152
+ conn.execute(text("SET FOREIGN_KEY_CHECKS=1"))
153
+
154
+
155
+
156
+ # --------------------------------------------------------------------
157
+ # Unified reset entry point (dialect hidden from callers)
158
+ # --------------------------------------------------------------------
159
+
160
+ def reset_db(engine: Engine, *, db_path: Path, sql_path: Optional[Path] = None) -> str:
161
+ """
162
+ Reset the database schema for the given engine in a dialect-appropriate way.
163
+
164
+ Caller MUST pass the canonical on-disk `db_path` for SQLite and SHOULD pass `sql_path`.
165
+ (No path deduction is attempted here beyond optional SQL discovery.)
166
+
167
+ Returns a short action string for logs/tests.
168
+ """
169
+ dialect = engine.dialect.name.lower()
170
+
171
+ if dialect == "sqlite":
172
+ reset_sqlite(engine, db_path=db_path, sql_path=sql_path)
173
+ return "sqlite:rotated+recreated"
174
+
175
+ if dialect in ("mysql", "mariadb"):
176
+ # Resolve ORM metadata internally to avoid dialect branching at call sites.
177
+ from tradedangerous.db import orm_models
178
+ reset_mariadb(engine, orm_models.Base.metadata)
179
+ return f"{dialect}:reset"
180
+
181
+ raise RuntimeError(f"Unsupported database backend: {engine.dialect.name}")
182
+
183
+
184
+ # --------------------------------------------------------------------
185
+ # Sanity checks (seconds-only, no deep I/O)
186
+ # --------------------------------------------------------------------
187
+
188
+ # NOTE: 'Added' removed from core set (being deprecated)
189
+ _CORE_TABLES: Tuple[str, ...] = (
190
+ "System",
191
+ "Station",
192
+ "Category",
193
+ "Item",
194
+ "StationItem",
195
+ )
196
+
197
+ def _core_tables_and_pks_ok(engine: Engine) -> Tuple[bool, List[str]]:
198
+ """
199
+ T1 + T2: Required core tables exist and have primary keys.
200
+ Returns (ok, problems).
201
+ """
202
+ problems: List[str] = []
203
+ insp = inspect(engine)
204
+
205
+ existing = set(insp.get_table_names())
206
+ missing = [t for t in _CORE_TABLES if t not in existing]
207
+ if missing:
208
+ problems.append(f"missing tables: {', '.join(missing)}")
209
+ return False, problems
210
+
211
+ for t in _CORE_TABLES:
212
+ pk = insp.get_pk_constraint(t) or {}
213
+ cols = pk.get("constrained_columns") or []
214
+ if not cols:
215
+ problems.append(f"missing primary key on {t}")
216
+
217
+ return (len(problems) == 0), problems
218
+
219
+
220
+ def _seed_counts_ok(engine: Engine) -> Tuple[bool, List[str]]:
221
+ """
222
+ T4: Minimal seed/anchor rows must exist.
223
+ - Category > 0
224
+ - System > 0
225
+ """
226
+ problems: List[str] = []
227
+ with engine.connect() as conn:
228
+ for tbl in ("Category", "System"):
229
+ cnt = conn.execute(text(f"SELECT COUNT(*) FROM {tbl}")).scalar() or 0
230
+ if cnt <= 0:
231
+ problems.append(f"{tbl} is empty")
232
+
233
+ return (len(problems) == 0), problems
234
+
235
+
236
+ def _connectivity_ok(engine: Engine) -> bool:
237
+ """T0: Cheap connectivity probe (redundant if T4 runs, but negligible)."""
238
+ try:
239
+ with engine.connect() as conn:
240
+ conn.execute(text("SELECT 1"))
241
+ return True
242
+ except Exception:
243
+ return False
244
+
245
+
246
+ # --------------------------------------------------------------------
247
+ # Orchestration (policy) — may call buildCache
248
+ # --------------------------------------------------------------------
249
+
250
+ def ensure_fresh_db(
251
+ backend: str,
252
+ engine: Engine,
253
+ data_dir: Path,
254
+ metadata: MetaData | None,
255
+ mode: str = "auto",
256
+ *,
257
+ tdb=None,
258
+ tdenv=None,
259
+ rebuild: bool = True,
260
+ ) -> Dict[str, str]:
261
+ """
262
+ Ensure a *sane, populated* database exists (seconds-only checks).
263
+
264
+ Checks:
265
+ - T0: connectivity
266
+ - T1/T2: core tables exist and have PKs
267
+ - T4: seed rows exist in Category and System
268
+
269
+ Actions:
270
+ - mode == "force" → rebuild via buildCache(...) (if rebuild=True)
271
+ - mode == "auto" and not sane → rebuild via buildCache(...) (if rebuild=True)
272
+ - not sane and rebuild == False → action = "needs_rebuild" (NEVER rebuild)
273
+ - sane and mode != "force" → action = "kept"
274
+
275
+ Returns a summary dict including:
276
+ - backend, mode, action, sane (Y/N), and optional reason.
277
+
278
+ NOTE:
279
+ - When a rebuild is required but rebuild=True and (tdb/tdenv) are missing,
280
+ a ValueError is raised (preserves current semantics).
281
+ - When rebuild=False, the function NEVER calls buildCache and never raises
282
+ for missing tdb/tdenv. It simply reports the status.
283
+ """
284
+ summary: Dict[str, str] = {
285
+ "backend": (backend or engine.dialect.name).lower(),
286
+ "mode": mode,
287
+ "action": "kept",
288
+ "sane": "Y",
289
+ }
290
+
291
+ # T0: cheap connectivity
292
+ if not _connectivity_ok(engine):
293
+ summary["reason"] = "connectivity-failed"
294
+ summary["sane"] = "N"
295
+ if mode == "auto":
296
+ mode = "force"
297
+
298
+ # T1+T2: structure; T4: seeds
299
+ if summary["sane"] == "Y":
300
+ structure_ok, struct_problems = _core_tables_and_pks_ok(engine)
301
+ if not structure_ok:
302
+ summary["sane"] = "N"
303
+ summary["reason"] = "; ".join(struct_problems) or "structure-invalid"
304
+ else:
305
+ seeds_ok, seed_problems = _seed_counts_ok(engine)
306
+ if not seeds_ok:
307
+ summary["sane"] = "N"
308
+ reason = "; ".join(seed_problems) or "seeds-missing"
309
+ summary["reason"] = f"{summary.get('reason','')}; {reason}".strip("; ").strip()
310
+
311
+ sane = (summary["sane"] == "Y")
312
+ must_rebuild = (mode == "force") or (not sane)
313
+
314
+ # If nothing to do, return immediately.
315
+ if not must_rebuild:
316
+ summary["action"] = "kept"
317
+ return summary
318
+
319
+ # Caller explicitly requested no rebuild: report and exit.
320
+ if not rebuild:
321
+ summary["action"] = "needs_rebuild"
322
+ return summary
323
+
324
+ # From here on, behavior matches the original: rebuild via buildCache.
325
+ if tdb is None or tdenv is None:
326
+ raise ValueError("ensure_fresh_db needs `tdb` and `tdenv` to rebuild via buildCache")
327
+
328
+ from tradedangerous.cache import buildCache
329
+
330
+ buildCache(tdb, tdenv)
331
+ summary["action"] = "rebuilt"
332
+ return summary