tradedangerous 12.0.5__py3-none-any.whl → 12.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

Files changed (30) hide show
  1. tradedangerous/cache.py +135 -133
  2. tradedangerous/commands/buildcache_cmd.py +7 -7
  3. tradedangerous/commands/buy_cmd.py +4 -4
  4. tradedangerous/commands/export_cmd.py +11 -11
  5. tradedangerous/commands/import_cmd.py +12 -12
  6. tradedangerous/commands/market_cmd.py +17 -17
  7. tradedangerous/commands/olddata_cmd.py +18 -18
  8. tradedangerous/commands/rares_cmd.py +30 -30
  9. tradedangerous/commands/run_cmd.py +21 -21
  10. tradedangerous/commands/sell_cmd.py +5 -5
  11. tradedangerous/corrections.py +1 -1
  12. tradedangerous/csvexport.py +20 -20
  13. tradedangerous/db/adapter.py +9 -9
  14. tradedangerous/db/config.py +4 -4
  15. tradedangerous/db/engine.py +12 -12
  16. tradedangerous/db/lifecycle.py +28 -28
  17. tradedangerous/db/orm_models.py +42 -42
  18. tradedangerous/db/paths.py +3 -3
  19. tradedangerous/plugins/eddblink_plug.py +108 -253
  20. tradedangerous/plugins/spansh_plug.py +254 -254
  21. tradedangerous/prices.py +21 -21
  22. tradedangerous/tradedb.py +85 -85
  23. tradedangerous/tradeenv.py +2 -2
  24. tradedangerous/version.py +1 -1
  25. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/METADATA +1 -1
  26. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/RECORD +30 -30
  27. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/WHEEL +0 -0
  28. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/entry_points.txt +0 -0
  29. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/licenses/LICENSE +0 -0
  30. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/top_level.txt +0 -0
@@ -386,9 +386,9 @@ def expandForJumps(tdb, cmdenv, calc, origin, jumps, srcName, purpose):
386
386
  Find all the stations you could reach if you made a given
387
387
  number of jumps away from the origin list.
388
388
  """
389
-
389
+
390
390
  assert jumps
391
-
391
+
392
392
  maxLyPer = cmdenv.emptyLyPer or cmdenv.maxLyPer
393
393
  avoidPlaces = cmdenv.avoidPlaces
394
394
  cmdenv.DEBUG0(
@@ -398,7 +398,7 @@ def expandForJumps(tdb, cmdenv, calc, origin, jumps, srcName, purpose):
398
398
  jumps,
399
399
  maxLyPer,
400
400
  )
401
-
401
+
402
402
  # Preserve original behavior: --to uses stationsSelling, --from uses stationsBuying
403
403
  if srcName == "--to":
404
404
  tradingList = calc.stationsSelling
@@ -406,13 +406,13 @@ def expandForJumps(tdb, cmdenv, calc, origin, jumps, srcName, purpose):
406
406
  tradingList = calc.stationsBuying
407
407
  else:
408
408
  raise Exception("Unknown src")
409
-
409
+
410
410
  # Ensure O(1) membership checks regardless of the underlying container type.
411
411
  trading_ids = tradingList if isinstance(tradingList, set) else set(tradingList)
412
-
412
+
413
413
  stations = set()
414
414
  origins, avoid = set((origin,)), set(place for place in avoidPlaces)
415
-
415
+
416
416
  for jump in range(jumps):
417
417
  if not origins:
418
418
  break
@@ -449,14 +449,14 @@ def expandForJumps(tdb, cmdenv, calc, origin, jumps, srcName, purpose):
449
449
  for dest, dist in tdb.genSystemsInRange(system, maxLyPer):
450
450
  if dest not in avoid:
451
451
  origins.add(dest)
452
-
452
+
453
453
  if getattr(cmdenv, "debug", False):
454
454
  cmdenv.DEBUG0(
455
455
  "Expanded {} stations: {}",
456
456
  srcName,
457
457
  [stn.name() for stn in stations]
458
458
  )
459
-
459
+
460
460
  if not stations:
461
461
  if not cmdenv.emptyLyPer:
462
462
  extra = (
@@ -475,10 +475,10 @@ def expandForJumps(tdb, cmdenv, calc, origin, jumps, srcName, purpose):
475
475
  extra,
476
476
  )
477
477
  )
478
-
478
+
479
479
  stations = list(stations)
480
480
  stations.sort(key=lambda stn: stn.ID)
481
-
481
+
482
482
  return stations
483
483
 
484
484
  def checkForEmptyStationList(category, focusPlace, stationList, jumps):
@@ -692,7 +692,7 @@ def filterStationSet(src, cmdenv, calc, stnList):
692
692
  def checkOrigins(tdb, cmdenv, calc):
693
693
  # Compute eligibility once: stations must both sell and buy (same as suitability with src=None).
694
694
  eligible_ids = set(calc.stationsSelling) & set(calc.stationsBuying)
695
-
695
+
696
696
  if cmdenv.origPlace:
697
697
  if cmdenv.startJumps and cmdenv.startJumps > 0:
698
698
  cmdenv.origins = expandForJumps(
@@ -732,12 +732,12 @@ def checkOrigins(tdb, cmdenv, calc):
732
732
  for station in tdb.stationByID.values()
733
733
  if station.ID in eligible_ids and checkStationSuitability(cmdenv, calc, station)
734
734
  )
735
-
735
+
736
736
  if not cmdenv.startJumps and isinstance(cmdenv.origPlace, System):
737
737
  cmdenv.origins = filterStationSet(
738
738
  '--from', cmdenv, calc, cmdenv.origins
739
739
  )
740
-
740
+
741
741
  cmdenv.origSystems = tuple(set(
742
742
  stn.system for stn in cmdenv.origins
743
743
  ))
@@ -749,7 +749,7 @@ def checkDestinations(tdb, cmdenv, calc):
749
749
  last_hb = 0.0
750
750
  spinner = ("|", "/", "-", "\\")
751
751
  spin_i = 0
752
-
752
+
753
753
  def heartbeat(seen, kept):
754
754
  nonlocal last_hb, spin_i
755
755
  if not showProgress:
@@ -764,7 +764,7 @@ def checkDestinations(tdb, cmdenv, calc):
764
764
  f"\r{s} Scanning stations… examined {seen:n} kept {kept:n}"
765
765
  )
766
766
  sys.stdout.flush()
767
-
767
+
768
768
  if cmdenv.destPlace:
769
769
  if cmdenv.endJumps and cmdenv.endJumps > 0:
770
770
  cmdenv.destinations = expandForJumps(
@@ -802,17 +802,17 @@ def checkDestinations(tdb, cmdenv, calc):
802
802
  if cmdenv.goalSystem:
803
803
  dest = tdb.lookupPlace(cmdenv.goalSystem)
804
804
  cmdenv.goalSystem = dest.system
805
-
805
+
806
806
  if cmdenv.origPlace and cmdenv.maxJumpsPer == 0:
807
807
  stationSrc = chain.from_iterable(
808
808
  system.stations for system in cmdenv.origSystems
809
809
  )
810
810
  else:
811
811
  stationSrc = tdb.stationByID.values()
812
-
812
+
813
813
  # Pre-filter by eligibility to skip obviously ineligible stations
814
814
  eligible_ids = set(calc.stationsSelling) & set(calc.stationsBuying)
815
-
815
+
816
816
  # Iterate with heartbeat
817
817
  dests, seen, kept = [], 0, 0
818
818
  for station in stationSrc:
@@ -824,12 +824,12 @@ def checkDestinations(tdb, cmdenv, calc):
824
824
  cmdenv.destinations = tuple(dests)
825
825
  if showProgress:
826
826
  sys.stdout.write("\n"); sys.stdout.flush()
827
-
827
+
828
828
  if not cmdenv.endJumps and isinstance(cmdenv.destPlace, System):
829
829
  cmdenv.destinations = filterStationSet(
830
830
  '--to', cmdenv, calc, cmdenv.destinations
831
831
  )
832
-
832
+
833
833
  cmdenv.destSystems = tuple(set(
834
834
  stn.system for stn in cmdenv.destinations
835
835
  ))
@@ -1189,7 +1189,7 @@ def run(results, cmdenv, tdb):
1189
1189
 
1190
1190
  if tdb.tradingCount == 0:
1191
1191
  raise NoDataError("Database does not contain any profitable trades.")
1192
-
1192
+
1193
1193
  # Always show a friendly heads-up before heavy work begins.
1194
1194
  print("Searching for quality trades. This may take a few minutes. Please be patient.", flush=True)
1195
1195
 
@@ -89,7 +89,7 @@ def run(results, cmdenv, tdb: TradeDB):
89
89
  to avoid closed-cursor errors when iterating later.
90
90
  - Preserve all existing filters, sorting, and output fields.
91
91
  """
92
-
92
+
93
93
  if cmdenv.lt and cmdenv.gt:
94
94
  if cmdenv.lt <= cmdenv.gt:
95
95
  raise CommandLineError("--gt must be lower than --lt")
@@ -122,7 +122,7 @@ def run(results, cmdenv, tdb: TradeDB):
122
122
  columns = "si.station_id, si.demand_price, si.demand_units"
123
123
  where = ["si.item_id = :item_id", "si.demand_price > 0"]
124
124
  params = {"item_id": item.ID}
125
-
125
+
126
126
  if cmdenv.demand:
127
127
  where.append("si.demand_units >= :demand")
128
128
  params["demand"] = cmdenv.demand
@@ -132,14 +132,14 @@ def run(results, cmdenv, tdb: TradeDB):
132
132
  if cmdenv.gt:
133
133
  where.append("si.demand_price > :gt")
134
134
  params["gt"] = cmdenv.gt
135
-
135
+
136
136
  stmt = f"""
137
137
  SELECT DISTINCT {columns}
138
138
  FROM StationItem AS si
139
139
  WHERE {' AND '.join(where)}
140
140
  """
141
141
  cmdenv.DEBUG0('SQL: {} ; params={}', stmt, params)
142
-
142
+
143
143
  # Execute and eagerly fetch rows
144
144
  with tdb.engine.connect() as conn:
145
145
  cur_rows = conn.execute(text(stmt), params).fetchall()
@@ -151,7 +151,7 @@ def run(results, cmdenv, tdb: TradeDB):
151
151
  odyssey = cmdenv.odyssey
152
152
  wantNoPlanet = cmdenv.noPlanet
153
153
  wantBlackMarket = cmdenv.blackMarket
154
-
154
+
155
155
  # System-based search
156
156
  nearSystem = cmdenv.nearSystem
157
157
  if nearSystem:
@@ -27,7 +27,7 @@ items = {
27
27
  'SALVAGEABLE WRECKAGE': 'Wreckage Components',
28
28
  'POLITICAL PRISONER': 'Political Prisoners',
29
29
  'HOSTAGE': 'Hostages',
30
- "VOID OPAL": "Void Opals",
30
+ "VOID OPALS": "Void Opal",
31
31
  }
32
32
 
33
33
  def correctSystem(oldName):
@@ -69,14 +69,14 @@ def buildFKeyStmt(session, tableName, key):
69
69
  """
70
70
  Resolve the FK constraint against the UNIQUE index of the
71
71
  referenced table.
72
-
72
+
73
73
  Multicolumn UNIQUEs are allowed, but only the last column
74
74
  may be treated as a single-column join target.
75
75
  """
76
76
  unqIndex = getUniqueIndex(session, key["table"])
77
77
  keyList = getFKeyList(session, key["table"])
78
78
  keyStmt = []
79
-
79
+
80
80
  for colName in unqIndex:
81
81
  # If this unique column is itself a foreign key, recurse
82
82
  keyKey = search_keyList(keyList, colName)
@@ -89,7 +89,7 @@ def buildFKeyStmt(session, tableName, key):
89
89
  "joinTable": key["table"],
90
90
  "joinColumn": key["to"],
91
91
  })
92
-
92
+
93
93
  return keyStmt
94
94
 
95
95
 
@@ -101,19 +101,19 @@ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
101
101
  """
102
102
  Generate the CSV file for tableName in csvPath.
103
103
  Returns (lineCount, exportPath).
104
-
104
+
105
105
  Behaviour:
106
106
  - Prefix unique columns with "unq:".
107
107
  - Foreign keys are exported as "<col>@<joinTable>.<uniqueCol>".
108
108
  - Datetime-like values for 'modified' columns are exported as
109
109
  "YYYY-MM-DD HH:MM:SS" (no microseconds).
110
-
110
+
111
111
  Compatible with either:
112
112
  * a SQLAlchemy Session
113
113
  * a TradeDB wrapper exposing .engine
114
114
  """
115
115
  from sqlalchemy.orm import Session
116
-
116
+
117
117
  # --- Resolve a SQLAlchemy session ---
118
118
  if hasattr(tdb_or_session, "engine"):
119
119
  # Likely a TradeDB instance
@@ -126,15 +126,15 @@ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
126
126
  raise TradeException(
127
127
  f"Unsupported DB object passed to exportTableToFile: {type(tdb_or_session)}"
128
128
  )
129
-
129
+
130
130
  csvPath = csvPath or Path(tdenv.csvDir)
131
131
  if not Path(csvPath).is_dir():
132
132
  raise TradeException(f"Save location '{csvPath}' not found.")
133
-
133
+
134
134
  uniquePfx = "unq:"
135
135
  exportPath = (Path(csvPath) / Path(tableName)).with_suffix(".csv")
136
136
  tdenv.DEBUG0(f"Export Table '{tableName}' to '{exportPath}'")
137
-
137
+
138
138
  def _fmt_ts(val):
139
139
  if hasattr(val, "strftime"):
140
140
  try:
@@ -148,7 +148,7 @@ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
148
148
  if len(s) >= 19 and s[4] == "-" and s[7] == "-" and s[10] == " " and s[13] == ":" and s[16] == ":":
149
149
  return s[:19]
150
150
  return val
151
-
151
+
152
152
  lineCount = 0
153
153
  with exportPath.open("w", encoding="utf-8", newline="\n") as exportFile:
154
154
  exportOut = csv.writer(
@@ -159,22 +159,22 @@ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
159
159
  quoting=csv.QUOTE_NONNUMERIC,
160
160
  lineterminator="\n",
161
161
  )
162
-
162
+
163
163
  bind = session.get_bind()
164
164
  inspector = inspect(bind)
165
-
165
+
166
166
  try:
167
167
  unique_cols = db_utils.get_unique_columns(session, tableName)
168
168
  fk_list = db_utils.get_foreign_keys(session, tableName)
169
169
  except Exception as e:
170
170
  raise TradeException(f"Failed to introspect table '{tableName}': {e!r}")
171
-
171
+
172
172
  csvHead = []
173
173
  stmtColumn = []
174
174
  stmtTable = [tableName]
175
175
  stmtOrder = []
176
176
  is_modified_col = []
177
-
177
+
178
178
  for col in inspector.get_columns(tableName):
179
179
  col_name = col["name"]
180
180
  fk = next((fk for fk in fk_list if fk["from"] == col_name), None)
@@ -205,14 +205,14 @@ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
205
205
  csvHead.append(col_name)
206
206
  stmtColumn.append(f"{tableName}.{col_name}")
207
207
  is_modified_col.append(col_name == "modified")
208
-
208
+
209
209
  sqlStmt = f"SELECT {','.join(stmtColumn)} FROM {' '.join(stmtTable)}"
210
210
  if stmtOrder:
211
211
  sqlStmt += f" ORDER BY {','.join(stmtOrder)}"
212
212
  tdenv.DEBUG1(f"SQL: {sqlStmt}")
213
-
213
+
214
214
  exportFile.write(f"{','.join(csvHead)}\n")
215
-
215
+
216
216
  for row in session.execute(text(sqlStmt)):
217
217
  lineCount += 1
218
218
  row_out = [
@@ -221,11 +221,11 @@ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
221
221
  ]
222
222
  tdenv.DEBUG2(f"{lineCount}: {row_out}")
223
223
  exportOut.writerow(row_out)
224
-
224
+
225
225
  tdenv.DEBUG1(f"{lineCount} {tableName}s exported")
226
-
226
+
227
227
  # Close session if we created it
228
228
  if hasattr(tdb_or_session, "engine"):
229
229
  session.close()
230
-
230
+
231
231
  return lineCount, exportPath
@@ -32,7 +32,7 @@ def get_adapter_if_enabled(cfg_path: Optional[str] = None) -> "TradeDBReadAdapte
32
32
  backend = (cfg.get("database", "backend", fallback="sqlite") or "sqlite").strip().lower()
33
33
  if backend == "sqlite":
34
34
  return None
35
-
35
+
36
36
  # Engine is created lazily via the property below to honour "no side-effects at import".
37
37
  return TradeDBReadAdapter(cfg_path)
38
38
 
@@ -50,7 +50,7 @@ class TradeDBReadAdapter:
50
50
  self._cfg_path = cfg_path
51
51
  self._engine: Optional[Engine] = None
52
52
  self._Session = None # sessionmaker
53
-
53
+
54
54
  # Lazy engine/session factory (no import-time work)
55
55
  @property
56
56
  def Session(self):
@@ -59,15 +59,15 @@ class TradeDBReadAdapter:
59
59
  self._engine = engine
60
60
  self._Session = get_session_factory(engine)
61
61
  return self._Session
62
-
62
+
63
63
  @contextmanager
64
64
  def session(self) -> Generator[Session, None, None]:
65
65
  Session = self.Session
66
66
  with Session() as s:
67
67
  yield s
68
-
68
+
69
69
  # ---- Reads mapped to ORM ------------------------------------------------
70
-
70
+
71
71
  def list_system_rows(self) -> Iterable[Tuple[int, str, float, float, float, Optional[int]]]:
72
72
  """
73
73
  Shape matches legacy _loadSystems SELECT:
@@ -86,7 +86,7 @@ class TradeDBReadAdapter:
86
86
  )
87
87
  for r in rows:
88
88
  yield (r.system_id, r.name, r.pos_x, r.pos_y, r.pos_z, r.added_id)
89
-
89
+
90
90
  def system_by_name(self, name_ci: str) -> Optional[Tuple[int, str, float, float, float, Optional[int]]]:
91
91
  """
92
92
  Case-insensitive name match for System.
@@ -100,7 +100,7 @@ class TradeDBReadAdapter:
100
100
  if not row:
101
101
  return None
102
102
  return (row.system_id, row.name, row.pos_x, row.pos_y, row.pos_z, row.added_id)
103
-
103
+
104
104
  def station_by_system_and_name(
105
105
  self, system_id: int, station_name_ci: str
106
106
  ) -> Optional[Tuple[int, int, str, int, str, str, str, str, str, str, str, str, str, int]]:
@@ -151,7 +151,7 @@ class TradeDBReadAdapter:
151
151
  r.planetary,
152
152
  r.type_id,
153
153
  )
154
-
154
+
155
155
  def average_selling(self) -> Dict[int, int]:
156
156
  """
157
157
  {item_id: avg_supply_price>0}
@@ -170,7 +170,7 @@ class TradeDBReadAdapter:
170
170
  .group_by(Item.item_id)
171
171
  )
172
172
  return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
173
-
173
+
174
174
  def average_buying(self) -> Dict[int, int]:
175
175
  """
176
176
  {item_id: avg_demand_price>0}
@@ -85,16 +85,16 @@ def load_config(path: str | Path | None = None) -> Dict[str, Any]:
85
85
  except Exception:
86
86
  # If anything goes wrong resolving the env, fall back to defaults below
87
87
  pass
88
-
88
+
89
89
  # Fall back to local file in CWD
90
90
  if cfg_path is None:
91
91
  p = Path.cwd() / "db_config.ini"
92
92
  if p.exists():
93
93
  cfg_path = p
94
-
94
+
95
95
  # start with defaults
96
96
  result: Dict[str, Any] = {k: (v.copy() if isinstance(v, dict) else v) for k, v in DEFAULTS.items()}
97
-
97
+
98
98
  if cfg_path:
99
99
  parser = configparser.ConfigParser(**CFG_KW)
100
100
  with cfg_path.open("r", encoding="utf-8") as fh:
@@ -103,5 +103,5 @@ def load_config(path: str | Path | None = None) -> Dict[str, Any]:
103
103
  result.setdefault(section, {})
104
104
  for key, val in parser.items(section):
105
105
  result[section][key] = val
106
-
106
+
107
107
  return _coerce_types(result)
@@ -41,12 +41,12 @@ def _ensure_default_config_file(target_path: Path | None) -> Path | None:
41
41
  def _cfg_to_dict(cfg: configparser.ConfigParser | Mapping[str, Any] | str | os.PathLike) -> Dict[str, Dict[str, Any]]:
42
42
  """
43
43
  Normalise configuration input into a dict-of-sections.
44
-
44
+
45
45
  Accepted inputs:
46
46
  * dict-like mapping → returned as {section: {key: value}}
47
47
  * ConfigParser → converted to nested dict (sections overlay DEFAULT section)
48
48
  * str/Path → if file exists, read it; if missing, fall back to load_config()
49
-
49
+
50
50
  NOTE:
51
51
  - We do NOT raise on a missing path; we delegate to load_config() to honour the
52
52
  documented resolution order (ENV → CWD → DEFAULTS).
@@ -61,7 +61,7 @@ def _cfg_to_dict(cfg: configparser.ConfigParser | Mapping[str, Any] | str | os.P
61
61
  # Missing provided path → use canonical loader with fallbacks
62
62
  from .config import load_config
63
63
  return load_config(None)
64
-
64
+
65
65
  if isinstance(cfg, configparser.ConfigParser):
66
66
  out: Dict[str, Dict[str, Any]] = {}
67
67
  defaults = dict(cfg.defaults())
@@ -72,7 +72,7 @@ def _cfg_to_dict(cfg: configparser.ConfigParser | Mapping[str, Any] | str | os.P
72
72
  for sec in ("database", "engine", "sqlite", "mariadb", "paths"):
73
73
  out.setdefault(sec, dict(defaults))
74
74
  return out
75
-
75
+
76
76
  # Already a dict-like mapping of sections
77
77
  return {k: dict(v) if isinstance(v, Mapping) else dict() for k, v in cfg.items()} # type: ignore[arg-type]
78
78
 
@@ -135,7 +135,7 @@ def _make_sqlite_url(cfg: Dict[str, Any]) -> str:
135
135
  def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str, Any] | str | os.PathLike | None = None) -> Engine:
136
136
  """
137
137
  Build a SQLAlchemy Engine for either MariaDB or SQLite.
138
-
138
+
139
139
  Accepts: ConfigParser, dict-like {section:{k:v}}, path to INI file, or None.
140
140
  First-run behaviour:
141
141
  - If a path is provided but missing, or if no path is provided and no config is found,
@@ -143,7 +143,7 @@ def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str
143
143
  points elsewhere), then loaded.
144
144
  """
145
145
  ini_target: Path | None = None
146
-
146
+
147
147
  # If caller gave a specific path, prefer to materialise a default file there.
148
148
  if isinstance(cfg_or_path, (str, os.PathLike)):
149
149
  ini_target = Path(cfg_or_path)
@@ -153,17 +153,17 @@ def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str
153
153
  # (CWD/db_config.ini by default, or the file pointed to by TD_DB_CONFIG).
154
154
  ini_target = resolve_db_config_path("db_config.ini")
155
155
  _ensure_default_config_file(ini_target)
156
-
156
+
157
157
  cfg = _cfg_to_dict(cfg_or_path if cfg_or_path is not None else str(ini_target))
158
-
158
+
159
159
  # Ensure dirs exist (used by various parts of the app)
160
160
  _ = resolve_data_dir(cfg)
161
161
  _ = resolve_tmp_dir(cfg)
162
-
162
+
163
163
  backend = str(_get(cfg, "database", "backend", "sqlite")).strip().lower()
164
164
  echo = bool(_get_bool(cfg, "engine", "echo", False))
165
165
  isolation = _get(cfg, "engine", "isolation_level", None)
166
-
166
+
167
167
  if backend == "mariadb":
168
168
  url = _make_mariadb_url(cfg)
169
169
  connect_timeout = _get_int(cfg, "engine", "connect_timeout", 10) or 10
@@ -190,7 +190,7 @@ def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str
190
190
  poolclass=NullPool,
191
191
  connect_args={"check_same_thread": False},
192
192
  )
193
-
193
+
194
194
  @event.listens_for(engine, "connect")
195
195
  def _set_sqlite_pragmas(dbapi_conn, _):
196
196
  cur = dbapi_conn.cursor()
@@ -201,7 +201,7 @@ def make_engine_from_config(cfg_or_path: configparser.ConfigParser | Mapping[str
201
201
  cur.close()
202
202
  else:
203
203
  raise ValueError(f"Unsupported backend: {backend}")
204
-
204
+
205
205
  try:
206
206
  engine._td_redacted_url = _redact(str(url)) # type: ignore[attr-defined]
207
207
  except Exception: