tradedangerous 12.0.5__py3-none-any.whl → 12.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

Files changed (30) hide show
  1. tradedangerous/cache.py +135 -133
  2. tradedangerous/commands/buildcache_cmd.py +7 -7
  3. tradedangerous/commands/buy_cmd.py +4 -4
  4. tradedangerous/commands/export_cmd.py +11 -11
  5. tradedangerous/commands/import_cmd.py +12 -12
  6. tradedangerous/commands/market_cmd.py +17 -17
  7. tradedangerous/commands/olddata_cmd.py +18 -18
  8. tradedangerous/commands/rares_cmd.py +30 -30
  9. tradedangerous/commands/run_cmd.py +21 -21
  10. tradedangerous/commands/sell_cmd.py +5 -5
  11. tradedangerous/corrections.py +1 -1
  12. tradedangerous/csvexport.py +20 -20
  13. tradedangerous/db/adapter.py +9 -9
  14. tradedangerous/db/config.py +4 -4
  15. tradedangerous/db/engine.py +12 -12
  16. tradedangerous/db/lifecycle.py +28 -28
  17. tradedangerous/db/orm_models.py +42 -42
  18. tradedangerous/db/paths.py +3 -3
  19. tradedangerous/plugins/eddblink_plug.py +108 -253
  20. tradedangerous/plugins/spansh_plug.py +254 -254
  21. tradedangerous/prices.py +21 -21
  22. tradedangerous/tradedb.py +85 -85
  23. tradedangerous/tradeenv.py +2 -2
  24. tradedangerous/version.py +1 -1
  25. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/METADATA +1 -1
  26. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/RECORD +30 -30
  27. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/WHEEL +0 -0
  28. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/entry_points.txt +0 -0
  29. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/licenses/LICENSE +0 -0
  30. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/top_level.txt +0 -0
@@ -56,7 +56,7 @@ class ImportPlugin(plugins.ImportPluginBase):
56
56
  - Imports RareItem.csv via cache.processImportFile() AFTER systems/stations exist
57
57
  - Exports CSVs (+RareItem) and rebuilds TradeDangerous.prices
58
58
  """
59
-
59
+
60
60
  pluginInfo = {
61
61
  "name": "spansh",
62
62
  "author": "TD Team",
@@ -64,7 +64,7 @@ class ImportPlugin(plugins.ImportPluginBase):
64
64
  "minimum-tb-version": "1.76",
65
65
  "description": "Imports Spansh galaxy dump and refreshes cache artefacts.",
66
66
  }
67
-
67
+
68
68
  # Correct option contract: dict name -> help text
69
69
  pluginOptions = {
70
70
  "url": "Remote URL to galaxy_stations.json (default if neither url nor file is given)",
@@ -84,7 +84,7 @@ class ImportPlugin(plugins.ImportPluginBase):
84
84
  "only_system": "Process only the system with this name or id64; still stream the real file.",
85
85
  "debug_trace": "Emit compact JSONL decision logs to tmp/spansh_trace.jsonl (1 line per decision).",
86
86
  }
87
-
87
+
88
88
  # Hardcoded EDCD sources (raw GitHub)
89
89
  EDCD_URLS = {
90
90
  "commodity": "https://raw.githubusercontent.com/EDCD/FDevIDs/master/commodity.csv",
@@ -92,7 +92,7 @@ class ImportPlugin(plugins.ImportPluginBase):
92
92
  "shipyard": "https://raw.githubusercontent.com/EDCD/FDevIDs/master/shipyard.csv",
93
93
  "rares": "https://raw.githubusercontent.com/EDCD/FDevIDs/master/rare_commodity.csv",
94
94
  }
95
-
95
+
96
96
  # ------------------------------
97
97
  # Construction & plumbing
98
98
  # ------------------------------
@@ -104,7 +104,7 @@ class ImportPlugin(plugins.ImportPluginBase):
104
104
  self.tdb = tdb
105
105
  self.tdenv = cmdenv
106
106
  self.session: Optional[Session] = None
107
-
107
+
108
108
  # Paths (data/tmp) from env/config; fall back defensively
109
109
  self.data_dir = Path(getattr(self.tdenv, "dataDir", getattr(self.tdb, "dataDir", "data"))).resolve()
110
110
  self.tmp_dir = Path(getattr(self.tdenv, "tmpDir", getattr(self.tdb, "tmpDir", "tmp"))).resolve()
@@ -113,25 +113,25 @@ class ImportPlugin(plugins.ImportPluginBase):
113
113
  p.mkdir(parents=True, exist_ok=True)
114
114
  except Exception as e:
115
115
  raise CleanExit(f"Failed to create directory {p}: {e!r}")
116
-
116
+
117
117
  # Batch size decided AFTER session is opened (see finish())
118
118
  self.batch_size: Optional[int] = None
119
-
119
+
120
120
  # Verbosity gates
121
121
  self._is_tty = sys.stderr.isatty() or sys.stdout.isatty()
122
122
  self._debug_level = int(getattr(self.tdenv, "debug", 0) or 0) # -v levels
123
123
  self._warn_enabled = bool(getattr(self.tdenv, "warn", None)) or (self._debug_level >= 3)
124
-
124
+
125
125
  # Progress state
126
126
  self._last_progress_time = 0.0
127
-
127
+
128
128
  # Station type mapping (existing helper in this module)
129
129
  self._station_type_map = self._build_station_type_map()
130
-
130
+
131
131
  # Debug trace option
132
132
  self.debug_trace = str(self.getOption("debug_trace") or "0").strip().lower() not in ("0", "", "false", "no")
133
133
  self._trace_fp = None
134
-
134
+
135
135
  # --------------------------------------
136
136
  # Small tracing helper
137
137
  # --------------------------------------
@@ -150,28 +150,28 @@ class ImportPlugin(plugins.ImportPluginBase):
150
150
  tmp = getattr(self, "tmp_dir", Path("tmp"))
151
151
  tmp.mkdir(parents=True, exist_ok=True)
152
152
  self._trace_fp = (tmp / "spansh_trace.jsonl").open("a", encoding="utf-8")
153
-
153
+
154
154
  # sanitize datetimes
155
155
  for k, v in list(evt.items()):
156
156
  if hasattr(v, "isoformat"):
157
157
  evt[k] = v.isoformat()
158
-
158
+
159
159
  self._trace_fp.write(json.dumps(evt, ensure_ascii=False) + "\n")
160
160
  self._trace_fp.flush()
161
161
  except Exception:
162
162
  pass # never break main flow
163
-
163
+
164
164
  # --- TD shim: seed 'Added' from templates (idempotent) ---
165
165
  def _seed_added_from_templates(self, session) -> None:
166
166
  """
167
167
  Seed the legacy 'Added' table from the packaged CSV:
168
168
  tradedangerous/templates/Added.csv
169
-
169
+
170
170
  DB-agnostic; uses cache.processImportFile. No reliance on any templatesDir.
171
171
  """
172
172
  from importlib.resources import files, as_file
173
173
  from tradedangerous.cache import processImportFile
174
-
174
+
175
175
  # Obtain a Traversable for the packaged resource and materialize to a real path
176
176
  res = files("tradedangerous").joinpath("templates", "Added.csv")
177
177
  with as_file(res) as csv_path:
@@ -196,7 +196,7 @@ class ImportPlugin(plugins.ImportPluginBase):
196
196
  # --------------------------------------
197
197
  # EDCD Import Functions
198
198
  # --------------------------------------
199
-
199
+
200
200
  # ---------- Download from EDCD ----------
201
201
  def _acquire_edcd_files(self) -> Dict[str, Optional[Path]]:
202
202
  """
@@ -210,11 +210,11 @@ class ImportPlugin(plugins.ImportPluginBase):
210
210
  override = self.getOption(opt_key)
211
211
  target = self.tmp_dir / f"edcd_{basename}.csv"
212
212
  label = f"EDCD {basename}.csv"
213
-
213
+
214
214
  # Explicit disable via empty override
215
215
  if override is not None and str(override).strip() == "":
216
216
  return None
217
-
217
+
218
218
  # Local path override
219
219
  if override and ("://" not in override):
220
220
  p = Path(override)
@@ -225,7 +225,7 @@ class ImportPlugin(plugins.ImportPluginBase):
225
225
  if p.exists() and p.is_file():
226
226
  return p.resolve()
227
227
  override = None # fall back to URL
228
-
228
+
229
229
  # URL (override or default)
230
230
  url = override or default_url
231
231
  try:
@@ -234,10 +234,10 @@ class ImportPlugin(plugins.ImportPluginBase):
234
234
  return target if target.exists() else None
235
235
  except Exception:
236
236
  return target if target.exists() else None
237
-
237
+
238
238
  if self.getOption("no_edcd"):
239
239
  return {"commodity": None, "outfitting": None, "shipyard": None, "rares": None}
240
-
240
+
241
241
  return {
242
242
  "commodity": _resolve_one("edcd_commodity", self.EDCD_URLS["commodity"], "commodity"),
243
243
  "outfitting": _resolve_one("edcd_outfitting", self.EDCD_URLS["outfitting"], "outfitting"),
@@ -256,22 +256,22 @@ class ImportPlugin(plugins.ImportPluginBase):
256
256
  """
257
257
  Read EDCD commodity.csv, extract distinct category names, and add any
258
258
  missing Category rows. No updates, no deletes. Cross-dialect safe.
259
-
259
+
260
260
  Returns: number of rows inserted.
261
261
  """
262
262
  t_cat = tables["Category"]
263
-
263
+
264
264
  # Load existing category names (case-insensitive) to avoid duplicates.
265
265
  existing_lc = {
266
266
  (str(n) or "").strip().lower()
267
267
  for (n,) in session.execute(select(t_cat.c.name)).all()
268
268
  if n is not None
269
269
  }
270
-
270
+
271
271
  # Parse the CSV and collect unique category names.
272
272
  with open(commodity_csv, "r", encoding="utf-8", newline="") as fh:
273
273
  reader = csv.DictReader(fh)
274
-
274
+
275
275
  # Find the 'category' column (case-insensitive).
276
276
  cat_col = None
277
277
  for h in (reader.fieldnames or []):
@@ -280,10 +280,10 @@ class ImportPlugin(plugins.ImportPluginBase):
280
280
  break
281
281
  if cat_col is None:
282
282
  raise CleanExit(f"EDCD commodity.csv missing 'category' column: {commodity_csv}")
283
-
283
+
284
284
  seen_lc: set[str] = set()
285
285
  to_add: list[dict] = []
286
-
286
+
287
287
  for row in reader:
288
288
  raw = row.get(cat_col)
289
289
  if not raw:
@@ -291,17 +291,17 @@ class ImportPlugin(plugins.ImportPluginBase):
291
291
  name = str(raw).strip()
292
292
  if not name:
293
293
  continue
294
-
294
+
295
295
  lk = name.lower()
296
296
  if lk in existing_lc or lk in seen_lc:
297
297
  continue
298
-
298
+
299
299
  seen_lc.add(lk)
300
300
  to_add.append({"name": name})
301
-
301
+
302
302
  if not to_add:
303
303
  return 0
304
-
304
+
305
305
  # Cross-dialect safe "add-only": bulk insert the missing names.
306
306
  session.execute(insert(t_cat), to_add)
307
307
  return len(to_add)
@@ -319,7 +319,7 @@ class ImportPlugin(plugins.ImportPluginBase):
319
319
  # --- choose key columns for upsert ---
320
320
  pk_cols = tuple(c.name for c in table.primary_key.columns)
321
321
  key_cols: tuple[str, ...] = pk_cols
322
-
322
+
323
323
  if not key_cols:
324
324
  # Common case for EDCD FDev tables: UNIQUE(id) but no PK
325
325
  if "id" in table.c:
@@ -337,10 +337,10 @@ class ImportPlugin(plugins.ImportPluginBase):
337
337
  key_cols = (uniq_single[0],)
338
338
  except Exception:
339
339
  pass
340
-
340
+
341
341
  if not key_cols:
342
342
  raise CleanExit(f"Table {table.name} has neither a primary key nor a single-column UNIQUE key; cannot upsert from EDCD")
343
-
343
+
344
344
  # --- read CSV ---
345
345
  with open(csv_path, "r", encoding="utf-8", newline="") as fh:
346
346
  reader = csv.DictReader(fh)
@@ -348,15 +348,15 @@ class ImportPlugin(plugins.ImportPluginBase):
348
348
  if not cols:
349
349
  return 0
350
350
  rows = [{k: row.get(k) for k in cols} for row in reader]
351
-
351
+
352
352
  if not rows:
353
353
  return 0
354
-
354
+
355
355
  # --- table-specific sanitation (fixes ck_fdo_mount / ck_fdo_guidance) ---
356
356
  if table.name == "FDevOutfitting":
357
357
  allowed_mount = {"Fixed", "Gimballed", "Turreted"}
358
358
  allowed_guid = {"Dumbfire", "Seeker", "Swarm"}
359
-
359
+
360
360
  def _norm(val, allowed):
361
361
  if val is None:
362
362
  return None
@@ -364,24 +364,24 @@ class ImportPlugin(plugins.ImportPluginBase):
364
364
  if not s or s not in allowed:
365
365
  return None
366
366
  return s
367
-
367
+
368
368
  for r in rows:
369
369
  if "mount" in r:
370
370
  r["mount"] = _norm(r["mount"], allowed_mount)
371
371
  if "guidance" in r:
372
372
  r["guidance"] = _norm(r["guidance"], allowed_guid)
373
-
373
+
374
374
  # --- perform upsert using chosen key columns ---
375
375
  upd_cols = tuple(c for c in cols if c not in key_cols)
376
-
376
+
377
377
  if db_utils.is_sqlite(session):
378
378
  db_utils.sqlite_upsert_simple(session, table, rows=rows, key_cols=key_cols, update_cols=upd_cols)
379
379
  return len(rows)
380
-
380
+
381
381
  if db_utils.is_mysql(session):
382
382
  db_utils.mysql_upsert_simple(session, table, rows=rows, key_cols=key_cols, update_cols=upd_cols)
383
383
  return len(rows)
384
-
384
+
385
385
  # Generic backend (read-then-insert/update)
386
386
  for r in rows:
387
387
  cond = and_(*[getattr(table.c, k) == r[k] for k in key_cols])
@@ -391,12 +391,12 @@ class ImportPlugin(plugins.ImportPluginBase):
391
391
  elif upd_cols:
392
392
  session.execute(update(table).where(cond).values(**{k: r[k] for k in upd_cols}))
393
393
  return len(rows)
394
-
394
+
395
395
  def _edcd_import_fdev_catalogs(self, session: Session, tables: Dict[str, Table], *, outfitting_csv: Path, shipyard_csv: Path) -> Tuple[int, int]:
396
396
  u = self._edcd_import_table_direct(session, tables["FDevOutfitting"], outfitting_csv)
397
397
  s = self._edcd_import_table_direct(session, tables["FDevShipyard"], shipyard_csv)
398
398
  return (u, s)
399
-
399
+
400
400
  # --------------------------------------
401
401
  # Comparison Helpers
402
402
  # --------------------------------------
@@ -421,7 +421,7 @@ class ImportPlugin(plugins.ImportPluginBase):
421
421
  """
422
422
  keep_ids = {int(x) for x in ids if x is not None}
423
423
  inserts = updates = deletes = 0
424
-
424
+
425
425
  # --- INSERT missing (batch) ---
426
426
  if keep_ids:
427
427
  # Find which of keep_ids are missing
@@ -440,7 +440,7 @@ class ImportPlugin(plugins.ImportPluginBase):
440
440
  [{id_col: vid, "station_id": station_id, "modified": ts_sp} for vid in to_insert]
441
441
  )
442
442
  inserts = len(to_insert)
443
-
443
+
444
444
  # --- UPDATE only those with modified < ts_sp (batch) ---
445
445
  if keep_ids:
446
446
  res = self.session.execute(
@@ -456,7 +456,7 @@ class ImportPlugin(plugins.ImportPluginBase):
456
456
  )
457
457
  # rowcount includes both existing rows (not inserts) whose modified was < ts_sp
458
458
  updates = int(res.rowcount or 0)
459
-
459
+
460
460
  # --- DELETE rows NOT in keep_ids, but only if <= ts_sp (single statement) ---
461
461
  res = self.session.execute(
462
462
  t_vendor.delete().where(
@@ -468,7 +468,7 @@ class ImportPlugin(plugins.ImportPluginBase):
468
468
  )
469
469
  )
470
470
  deletes = int(res.rowcount or 0)
471
-
471
+
472
472
  return inserts, updates, deletes
473
473
 
474
474
 
@@ -483,12 +483,12 @@ class ImportPlugin(plugins.ImportPluginBase):
483
483
  ) -> Tuple[int, int]:
484
484
  """
485
485
  Fast, set-based vendor sync for a single station and one service (shipyard/outfitting).
486
-
486
+
487
487
  Returns: (number_of_inserts_or_updates_on_vendor_links, deletions_count).
488
488
  """
489
489
  # Ensure we never write NULL into NOT NULL 'modified' columns.
490
490
  ts_eff = (ts_sp or datetime.utcnow().replace(microsecond=0))
491
-
491
+
492
492
  if kind == "ship":
493
493
  t_master = tables["Ship"]
494
494
  t_vendor = tables["ShipVendor"]
@@ -505,7 +505,7 @@ class ImportPlugin(plugins.ImportPluginBase):
505
505
  continue
506
506
  keep_ids.add(int(ship_id))
507
507
  master_rows.append({"ship_id": ship_id, "name": name})
508
-
508
+
509
509
  elif kind == "module":
510
510
  t_master = tables["Upgrade"]
511
511
  t_vendor = tables["UpgradeVendor"]
@@ -530,7 +530,7 @@ class ImportPlugin(plugins.ImportPluginBase):
530
530
  })
531
531
  else:
532
532
  raise CleanExit(f"_sync_vendor_block_fast: unknown kind={kind!r}")
533
-
533
+
534
534
  # 1) Ensure master rows exist (simple upsert, no timestamp guards).
535
535
  if master_rows:
536
536
  key_name = list(master_rows[0].keys())[0]
@@ -557,7 +557,7 @@ class ImportPlugin(plugins.ImportPluginBase):
557
557
  upd = {k: v for k, v in r.items() if k != key_name}
558
558
  if upd:
559
559
  self.session.execute(update(t_master).where(cond).values(**upd))
560
-
560
+
561
561
  # 2) Link rows with timestamp guard for vendor tables.
562
562
  wrote = 0
563
563
  delc = 0
@@ -576,7 +576,7 @@ class ImportPlugin(plugins.ImportPluginBase):
576
576
  if (mod is None) or (ts_eff > mod)
577
577
  }
578
578
  wrote = len(to_insert) + len(to_update)
579
-
579
+
580
580
  vendor_rows = [{id_col: vid, "station_id": station_id, "modified": ts_eff} for vid in keep_ids]
581
581
  if db_utils.is_sqlite(self.session):
582
582
  db_utils.sqlite_upsert_modified(
@@ -602,9 +602,9 @@ class ImportPlugin(plugins.ImportPluginBase):
602
602
  mod = cur[0]
603
603
  if (mod is None) or (ts_eff > mod):
604
604
  self.session.execute(update(t_vendor).where(cond).values(modified=ts_eff))
605
-
606
- return wrote, delc
607
605
 
606
+ return wrote, delc
607
+
608
608
  def _cleanup_absent_stations(self, tables: Dict[str, Table], present_station_ids: set[int], json_ts: datetime) -> Tuple[int, int, int]:
609
609
  """
610
610
  After streaming, delete baseline rows for stations absent from the JSON
@@ -612,13 +612,13 @@ class ImportPlugin(plugins.ImportPluginBase):
612
612
  Returns (market_del, outfit_del, ship_del) counts.
613
613
  """
614
614
  t_si, t_uv, t_sv, t_st = tables["StationItem"], tables["UpgradeVendor"], tables["ShipVendor"], tables["Station"]
615
-
615
+
616
616
  # All station ids in DB
617
617
  all_sids = [int(r[0]) for r in self.session.execute(select(t_st.c.station_id)).all()]
618
618
  absent = [sid for sid in all_sids if sid not in present_station_ids]
619
619
  if not absent:
620
620
  return (0, 0, 0)
621
-
621
+
622
622
  # Markets: delete baseline rows (from_live=0) with modified <= json_ts
623
623
  del_m = self.session.execute(
624
624
  t_si.delete().where(
@@ -629,7 +629,7 @@ class ImportPlugin(plugins.ImportPluginBase):
629
629
  )
630
630
  )
631
631
  ).rowcount or 0
632
-
632
+
633
633
  # Vendors: delete rows with modified <= json_ts
634
634
  del_u = self.session.execute(
635
635
  tables["UpgradeVendor"].delete().where(
@@ -641,9 +641,9 @@ class ImportPlugin(plugins.ImportPluginBase):
641
641
  and_(t_sv.c.station_id.in_(absent), or_(t_sv.c.modified == None, t_sv.c.modified <= json_ts))
642
642
  )
643
643
  ).rowcount or 0
644
-
645
- return (int(del_m), int(del_u), int(del_s))
646
644
 
645
+ return (int(del_m), int(del_u), int(del_s))
646
+
647
647
  def _sync_market_block_fast(
648
648
  self,
649
649
  tables: Dict[str, Table],
@@ -655,15 +655,15 @@ class ImportPlugin(plugins.ImportPluginBase):
655
655
  ) -> Tuple[int, int]:
656
656
  """
657
657
  Fast, set-based market sync for one station.
658
-
658
+
659
659
  Returns: (number_of_inserts_or_updates_on_StationItem, deletions_count).
660
660
  """
661
661
  t_item, t_si = tables["Item"], tables["StationItem"]
662
-
662
+
663
663
  item_rows: List[Dict[str, Any]] = []
664
664
  link_rows: List[Dict[str, Any]] = []
665
665
  keep_ids: set[int] = set()
666
-
666
+
667
667
  for co in commodities:
668
668
  if not isinstance(co, dict):
669
669
  continue
@@ -672,11 +672,11 @@ class ImportPlugin(plugins.ImportPluginBase):
672
672
  cat_name = co.get("category")
673
673
  if fdev_id is None or name is None or cat_name is None:
674
674
  continue
675
-
675
+
676
676
  cat_id = categories.get(str(cat_name).lower())
677
677
  if cat_id is None:
678
678
  raise CleanExit(f'Unknown commodity category "{cat_name}"')
679
-
679
+
680
680
  keep_ids.add(int(fdev_id))
681
681
  item_rows.append({
682
682
  "item_id": fdev_id,
@@ -685,12 +685,12 @@ class ImportPlugin(plugins.ImportPluginBase):
685
685
  "fdev_id": fdev_id,
686
686
  "ui_order": 0,
687
687
  })
688
-
688
+
689
689
  demand = co.get("demand")
690
690
  supply = co.get("supply")
691
691
  buy = co.get("buyPrice")
692
692
  sell = co.get("sellPrice")
693
-
693
+
694
694
  link_rows.append({
695
695
  "station_id": station_id,
696
696
  "item_id": fdev_id,
@@ -703,7 +703,7 @@ class ImportPlugin(plugins.ImportPluginBase):
703
703
  "from_live": 0,
704
704
  "modified": ts_sp,
705
705
  })
706
-
706
+
707
707
  # 1) Upsert Items (simple)
708
708
  if item_rows:
709
709
  if db_utils.is_sqlite(self.session):
@@ -731,7 +731,7 @@ class ImportPlugin(plugins.ImportPluginBase):
731
731
  name=r["name"], category_id=r["category_id"], fdev_id=r["fdev_id"], ui_order=r["ui_order"]
732
732
  )
733
733
  )
734
-
734
+
735
735
  # 2) Compute effective inserts/updates for StationItem (pre-check modified), then upsert
736
736
  wrote = 0
737
737
  if link_rows:
@@ -752,7 +752,7 @@ class ImportPlugin(plugins.ImportPluginBase):
752
752
  if (mod is None) or (ts_sp is not None and ts_sp > mod)
753
753
  }
754
754
  wrote = len(to_insert) + len(to_update)
755
-
755
+
756
756
  if db_utils.is_sqlite(self.session):
757
757
  db_utils.sqlite_upsert_modified(
758
758
  self.session, t_si, rows=link_rows,
@@ -787,7 +787,7 @@ class ImportPlugin(plugins.ImportPluginBase):
787
787
  .where(and_(t_si.c.station_id == r["station_id"], t_si.c.item_id == r["item_id"]))
788
788
  .values(**r)
789
789
  )
790
-
790
+
791
791
  # 3) Delete baseline rows missing from JSON, not newer than ts_sp
792
792
  delc = 0
793
793
  base_where = and_(
@@ -799,27 +799,27 @@ class ImportPlugin(plugins.ImportPluginBase):
799
799
  delete_stmt = t_si.delete().where(and_(base_where, ~t_si.c.item_id.in_(keep_ids)))
800
800
  else:
801
801
  delete_stmt = t_si.delete().where(base_where)
802
-
802
+
803
803
  res = self.session.execute(delete_stmt)
804
804
  try:
805
805
  delc = int(res.rowcount or 0)
806
806
  except Exception:
807
807
  delc = 0
808
-
808
+
809
809
  return wrote, delc
810
810
 
811
811
 
812
812
  # ------------------------------
813
813
  # Lifecycle hooks
814
814
  # ------------------------------
815
-
815
+
816
816
  def run(self) -> bool:
817
817
  """
818
818
  Full orchestrator: acquisition → bootstrap → EDCD preload → import → rares → export.
819
819
  Returns False to keep default flow suppressed.
820
820
  """
821
821
  started = time.time()
822
-
822
+
823
823
  if self.getOption("pricesonly"):
824
824
  try:
825
825
  self._print("Regenerating TradeDangerous.prices …")
@@ -829,7 +829,7 @@ class ImportPlugin(plugins.ImportPluginBase):
829
829
  self._error(f"Prices regeneration failed: {e!r}")
830
830
  return False
831
831
  return False
832
-
832
+
833
833
  # Acquire Spansh JSON
834
834
  try:
835
835
  source_path = self._acquire_source()
@@ -837,13 +837,13 @@ class ImportPlugin(plugins.ImportPluginBase):
837
837
  self._warn(str(ce)); return False
838
838
  except Exception as e:
839
839
  self._error(f"Acquisition failed: {e!r}"); return False
840
-
840
+
841
841
  # -------- Bootstrap DB (no cache rebuild here) --------
842
842
  try:
843
843
  backend = self.tdb.engine.dialect.name.lower()
844
844
  data_dir = Path(getattr(self.tdenv, "dataDir", getattr(self.tdb, "dataDir", "data")))
845
845
  metadata = getattr(self.tdb, "metadata", None)
846
-
846
+
847
847
  summary = ensure_fresh_db(
848
848
  backend=backend,
849
849
  engine=self.tdb.engine,
@@ -858,24 +858,24 @@ class ImportPlugin(plugins.ImportPluginBase):
858
858
  f"DB bootstrap: action={summary.get('action','kept')} "
859
859
  f"reason={summary.get('reason','ok')} backend={summary.get('backend')}"
860
860
  )
861
-
861
+
862
862
  # No valid DB? Create full schema now (SQLite from canonical SQL; MariaDB via ORM)
863
863
  if summary.get("action") == "needs_rebuild":
864
864
  from tradedangerous.db.lifecycle import reset_db
865
865
  db_path = Path(self.tdb.engine.url.database or (data_dir / "TradeDangerous.db")) # SQLite only
866
866
  self._print("No valid DB detected — creating full schema…")
867
867
  reset_db(self.tdb.engine, db_path=db_path)
868
-
868
+
869
869
  # Seed 'Added' once on a fresh schema
870
870
  self.session = self._open_session()
871
871
  self._seed_added_from_templates(self.session)
872
872
  self.session.commit()
873
873
  self._safe_close_session()
874
-
874
+
875
875
  except Exception as e:
876
876
  self._error(f"Database bootstrap failed: {e!r}")
877
877
  return False
878
-
878
+
879
879
  # -------- Session + batch + reflection --------
880
880
  try:
881
881
  self.session = self._open_session()
@@ -884,10 +884,10 @@ class ImportPlugin(plugins.ImportPluginBase):
884
884
  except Exception as e:
885
885
  self._error(f"Failed to open/reflect DB session: {e!r}")
886
886
  return False
887
-
887
+
888
888
  # -------- EDCD preloads (hardcoded URLs; can be disabled) --------
889
889
  edcd = self._acquire_edcd_files()
890
-
890
+
891
891
  # Categories (add-only) — COMMIT immediately so they persist even if later phases fail.
892
892
  try:
893
893
  if edcd.get("commodity"):
@@ -899,7 +899,7 @@ class ImportPlugin(plugins.ImportPluginBase):
899
899
  self._warn(str(ce)); return False
900
900
  except Exception as e:
901
901
  self._warn(f"EDCD categories skipped due to error: {e!r}")
902
-
902
+
903
903
  # FDev catalogs (outfitting, shipyard) — COMMIT immediately as well.
904
904
  try:
905
905
  if edcd.get("outfitting") and edcd.get("shipyard"):
@@ -913,14 +913,14 @@ class ImportPlugin(plugins.ImportPluginBase):
913
913
  self.session.commit()
914
914
  except Exception as e:
915
915
  self._warn(f"EDCD FDev catalogs skipped due to error: {e!r}")
916
-
916
+
917
917
  # Load categories (may have grown) before Spansh import
918
918
  try:
919
919
  categories = self._load_categories(self.session, tables)
920
920
  except Exception as e:
921
921
  self._error(f"Failed to load categories: {e!r}")
922
922
  return False
923
-
923
+
924
924
  # -------- Import Spansh JSON --------
925
925
  try:
926
926
  if self._debug_level < 1:
@@ -929,7 +929,7 @@ class ImportPlugin(plugins.ImportPluginBase):
929
929
  self._print("Importing spansh data")
930
930
  stats = self._import_stream(source_path, categories, tables)
931
931
  self._end_live_status()
932
-
932
+
933
933
  mk_e = stats.get("market_writes", 0) + stats.get("market_stations", 0)
934
934
  of_e = stats.get("outfit_writes", 0) + stats.get("outfit_stations", 0)
935
935
  sh_e = stats.get("ship_writes", 0) + stats.get("ship_stations", 0)
@@ -943,7 +943,7 @@ class ImportPlugin(plugins.ImportPluginBase):
943
943
  self._warn(str(ce)); self._safe_close_session(); return False
944
944
  except Exception as e:
945
945
  self._error(f"Import failed: {e!r}"); self._safe_close_session(); return False
946
-
946
+
947
947
  # Enforce Item.ui_order
948
948
  try:
949
949
  t0 = time.time()
@@ -952,16 +952,16 @@ class ImportPlugin(plugins.ImportPluginBase):
952
952
  except Exception as e:
953
953
  self._error(f"ui_order enforcement failed: {e!r}")
954
954
  self._safe_close_session(); return False
955
-
955
+
956
956
  # Final commit for import phase
957
957
  try:
958
958
  self.session.commit()
959
959
  except Exception as e:
960
960
  self._warn(f"Commit failed at end of import; rolling back. Cause: {e!r}")
961
961
  self.session.rollback(); self._safe_close_session(); return False
962
-
962
+
963
963
  self._safe_close_session()
964
-
964
+
965
965
  # -------- Rares (prefer EDCD; fallback to template) --------
966
966
  try:
967
967
  t0 = time.time()
@@ -974,33 +974,33 @@ class ImportPlugin(plugins.ImportPluginBase):
974
974
  self._warn(str(ce)); return False
975
975
  except Exception as e:
976
976
  self._error(f"RareItem import failed: {e!r}"); return False
977
-
977
+
978
978
  # -------- Export (uses your parallel exporter already present) --------
979
979
  try:
980
980
  self._export_and_mirror() # timing + final print handled inside
981
981
  except Exception as e:
982
982
  self._error(f"Export failed: {e!r}"); return False
983
-
983
+
984
984
  elapsed = self._format_hms(time.time() - started)
985
985
  self._print(f"{elapsed} Done")
986
986
  return False
987
987
 
988
988
 
989
-
989
+
990
990
  def finish(self) -> bool:
991
991
  """No-op: handled in run(); finish() won’t be called."""
992
992
  return True
993
-
993
+
994
994
  # ------------------------------
995
995
  # Acquisition (url/file/stdin)
996
996
  # ------------------------------
997
-
997
+
998
998
  def _acquire_source(self) -> Path:
999
999
  """Return a readable filesystem path to the JSON source (tmp/)."""
1000
1000
  url = self.getOption("url")
1001
1001
  file_ = self.getOption("file")
1002
1002
  cache_path = self.tmp_dir / "galaxy_stations.json"
1003
-
1003
+
1004
1004
  if file_:
1005
1005
  if file_ == "-":
1006
1006
  self._print("Reading Spansh dump from stdin …")
@@ -1010,18 +1010,18 @@ class ImportPlugin(plugins.ImportPluginBase):
1010
1010
  if not src.exists() or not src.is_file():
1011
1011
  raise CleanExit(f"Local file not found: {src}")
1012
1012
  return src.resolve()
1013
-
1013
+
1014
1014
  if not url:
1015
1015
  url = DEFAULT_URL
1016
-
1016
+
1017
1017
  # Pass a friendly label so progress says “Spansh dump”
1018
1018
  return self._download_with_cache(url, cache_path, label="Spansh dump")
1019
-
1019
+
1020
1020
  def _download_with_cache(self, url: str, cache_path: Path, *, label: str = "download") -> Path:
1021
1021
  """Conditional download with HEAD Last-Modified and atomic .part."""
1022
1022
  import urllib.request
1023
1023
  from email.utils import parsedate_to_datetime
1024
-
1024
+
1025
1025
  remote_lm: Optional[datetime] = None
1026
1026
  try:
1027
1027
  req = urllib.request.Request(url, method="HEAD")
@@ -1034,13 +1034,13 @@ class ImportPlugin(plugins.ImportPluginBase):
1034
1034
  remote_lm = None
1035
1035
  except Exception:
1036
1036
  pass
1037
-
1037
+
1038
1038
  if cache_path.exists() and remote_lm:
1039
1039
  local_mtime = datetime.fromtimestamp(cache_path.stat().st_mtime, tz=timezone.utc).replace(tzinfo=None)
1040
1040
  if local_mtime >= remote_lm:
1041
1041
  self._print(f"Remote not newer; using cached {label}")
1042
1042
  return cache_path
1043
-
1043
+
1044
1044
  self._print(f"Downloading {label} from {url} …")
1045
1045
  part = cache_path.with_suffix(cache_path.suffix + ".part")
1046
1046
  if part.exists():
@@ -1048,18 +1048,18 @@ class ImportPlugin(plugins.ImportPluginBase):
1048
1048
  part.unlink()
1049
1049
  except Exception:
1050
1050
  pass
1051
-
1051
+
1052
1052
  req = urllib.request.Request(url, method="GET")
1053
1053
  connect_timeout = 30
1054
1054
  chunk = 8 * 1024 * 1024 # 8 MiB
1055
-
1055
+
1056
1056
  try:
1057
1057
  with urllib.request.urlopen(req, timeout=connect_timeout) as resp, open(part, "wb") as fh:
1058
1058
  total_hdr = resp.headers.get("Content-Length")
1059
1059
  total = int(total_hdr) if total_hdr and total_hdr.isdigit() else None
1060
1060
  downloaded = 0
1061
1061
  start = time.time()
1062
-
1062
+
1063
1063
  while True:
1064
1064
  data = resp.read(chunk)
1065
1065
  if not data:
@@ -1067,9 +1067,9 @@ class ImportPlugin(plugins.ImportPluginBase):
1067
1067
  fh.write(data)
1068
1068
  downloaded += len(data)
1069
1069
  self._download_progress(downloaded, total, start, label=label)
1070
-
1070
+
1071
1071
  part.replace(cache_path)
1072
-
1072
+
1073
1073
  # Set mtime to Last-Modified if present on GET
1074
1074
  lm_header = None
1075
1075
  try:
@@ -1084,7 +1084,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1084
1084
  os.utime(cache_path, (ts, ts))
1085
1085
  except Exception:
1086
1086
  pass
1087
-
1087
+
1088
1088
  except Exception as e:
1089
1089
  try:
1090
1090
  if part.exists():
@@ -1092,16 +1092,16 @@ class ImportPlugin(plugins.ImportPluginBase):
1092
1092
  except Exception:
1093
1093
  pass
1094
1094
  raise CleanExit(f"Download failed or timed out for {label}; skipping run ({e!r})")
1095
-
1095
+
1096
1096
  self._print(f'Download complete: {label} → "{cache_path}"')
1097
1097
  return cache_path
1098
-
1098
+
1099
1099
  def _download_progress(self, downloaded: int, total: Optional[int], start_ts: float, *, label: str = "download") -> None:
1100
1100
  now = time.time()
1101
1101
  if now - self._last_progress_time < 0.5 and self._debug_level < 1:
1102
1102
  return
1103
1103
  self._last_progress_time = now
1104
-
1104
+
1105
1105
  rate = downloaded / max(now - start_ts, 1e-9)
1106
1106
  if total:
1107
1107
  pct = (downloaded / total) * 100.0
@@ -1109,7 +1109,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1109
1109
  else:
1110
1110
  msg = f"{label}: {self._fmt_bytes(downloaded)} read {self._fmt_bytes(rate)}/s"
1111
1111
  self._live_status(msg)
1112
-
1112
+
1113
1113
  def _write_stream_to_file(self, stream: io.BufferedReader, dest: Path) -> None:
1114
1114
  part = dest.with_suffix(dest.suffix + ".part")
1115
1115
  if part.exists():
@@ -1136,7 +1136,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1136
1136
  except Exception:
1137
1137
  pass
1138
1138
  raise CleanExit(f"Failed to read stdin into tmp file: {e!r})")
1139
-
1139
+
1140
1140
  # ------------------------------
1141
1141
  # DB session / reflection
1142
1142
  # ------------------------------
@@ -1150,23 +1150,23 @@ class ImportPlugin(plugins.ImportPluginBase):
1150
1150
  sess = db_utils.get_session(self.tdb.engine)
1151
1151
  else:
1152
1152
  raise RuntimeError("No Session factory available")
1153
-
1153
+
1154
1154
  # SQLite pragmas (non-fatal)
1155
1155
  try:
1156
1156
  if db_utils.is_sqlite(sess):
1157
1157
  db_utils.sqlite_set_bulk_pragmas(sess)
1158
1158
  except Exception:
1159
1159
  pass
1160
-
1160
+
1161
1161
  # MySQL/MariaDB session tuning (non-fatal)
1162
1162
  try:
1163
1163
  if db_utils.is_mysql(sess):
1164
1164
  db_utils.mysql_set_bulk_session(sess)
1165
1165
  except Exception:
1166
1166
  pass
1167
-
1167
+
1168
1168
  return sess
1169
-
1169
+
1170
1170
  def _reflect_tables(self, engine: Engine) -> Dict[str, Table]:
1171
1171
  meta = MetaData()
1172
1172
  names = [
@@ -1175,14 +1175,14 @@ class ImportPlugin(plugins.ImportPluginBase):
1175
1175
  "FDevOutfitting", "FDevShipyard", "RareItem",
1176
1176
  ]
1177
1177
  return {n: Table(n, meta, autoload_with=engine) for n in names}
1178
-
1178
+
1179
1179
  # ------------------------------
1180
1180
  # Import (streaming JSON → upserts)
1181
1181
  # ------------------------------
1182
1182
  def _import_stream(self, source_path: Path, categories: Dict[str, int], tables: Dict[str, Table]) -> Dict[str, int]:
1183
1183
  """
1184
1184
  Streaming importer with service-level maxage gating (FK-safe), using per-row rules.
1185
-
1185
+
1186
1186
  FIXES:
1187
1187
  - Batch commits now honor utils.get_import_batch_size() across *all* parent/child ops.
1188
1188
  - System/Station increments are counted in stats and batch_ops.
@@ -1196,36 +1196,36 @@ class ImportPlugin(plugins.ImportPluginBase):
1196
1196
  "market_writes": 0, "outfit_writes": 0, "ship_writes": 0,
1197
1197
  "commodities": 0,
1198
1198
  }
1199
-
1199
+
1200
1200
  # NEW: initialize parse metrics for _progress_line(); iterator keeps these updated
1201
1201
  self._parse_bytes = 0
1202
1202
  self._parse_rate = 0.0
1203
-
1203
+
1204
1204
  maxage_days = float(self.getOption("maxage")) if self.getOption("maxage") else None
1205
1205
  maxage_td = timedelta(days=maxage_days) if maxage_days is not None else None
1206
1206
  now_utc = datetime.utcnow()
1207
-
1207
+
1208
1208
  try:
1209
1209
  json_ts = datetime.fromtimestamp(os.path.getmtime(source_path), tz=timezone.utc).replace(tzinfo=None)
1210
1210
  except Exception:
1211
1211
  json_ts = datetime.utcfromtimestamp(0)
1212
-
1212
+
1213
1213
  seen_station_ids: set[int] = set()
1214
1214
  force_baseline = bool(self.getOption("force_baseline"))
1215
-
1215
+
1216
1216
  def recent(ts: Optional[datetime]) -> bool:
1217
1217
  if ts is None:
1218
1218
  return False if maxage_td is not None else True
1219
1219
  if maxage_td is None:
1220
1220
  return True
1221
1221
  return (now_utc - ts) <= maxage_td
1222
-
1222
+
1223
1223
  def svc_ts(st: Dict[str, Any], key: str) -> Optional[datetime]:
1224
1224
  obj = st.get(key) or {}
1225
1225
  if not isinstance(obj, dict):
1226
1226
  return None
1227
1227
  return self._parse_ts(obj.get("updateTime"))
1228
-
1228
+
1229
1229
  with open(source_path, "rb") as fh:
1230
1230
  for sys_idx, system_obj in enumerate(self._iter_top_level_json_array(fh), 1):
1231
1231
  sys_id64 = system_obj.get("id64")
@@ -1235,9 +1235,9 @@ class ImportPlugin(plugins.ImportPluginBase):
1235
1235
  if self._debug_level >= 3:
1236
1236
  self._warn(f"Skipping malformed system object at index {sys_idx}")
1237
1237
  continue
1238
-
1238
+
1239
1239
  self._trace(phase="system", decision="consider", name=sys_name, id64=sys_id64)
1240
-
1240
+
1241
1241
  # Collect stations (top-level + body-embedded)
1242
1242
  stations: List[Dict[str, Any]] = []
1243
1243
  if isinstance(system_obj.get("stations"), list):
@@ -1249,19 +1249,19 @@ class ImportPlugin(plugins.ImportPluginBase):
1249
1249
  stl = b.get("stations")
1250
1250
  if isinstance(stl, list):
1251
1251
  stations.extend(stl)
1252
-
1252
+
1253
1253
  # --- System upsert ---
1254
1254
  t_system = tables["System"]
1255
1255
  x = coords.get("x"); y = coords.get("y"); z = coords.get("z")
1256
1256
  sys_modified = self._parse_ts(system_obj.get("updateTime"))
1257
1257
  self._upsert_system(t_system, int(sys_id64), str(sys_name), x, y, z, sys_modified)
1258
-
1258
+
1259
1259
  # Count system progress and participate in batching
1260
1260
  stats["systems"] += 1
1261
1261
  batch_ops += 1
1262
-
1262
+
1263
1263
  imported_station_modifieds: list[datetime] = []
1264
-
1264
+
1265
1265
  for st in stations:
1266
1266
  # Periodic commit BEFORE processing the next station (outside any advisory locks)
1267
1267
  if (self.batch_size is not None) and (batch_ops >= self.batch_size):
@@ -1271,7 +1271,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1271
1271
  except Exception as e:
1272
1272
  self._warn(f"Batch commit failed; rolling back. Cause: {e!r}")
1273
1273
  self.session.rollback()
1274
-
1274
+
1275
1275
  name = st.get("name")
1276
1276
  sid = st.get("id")
1277
1277
  if not isinstance(name, str) or sid is None:
@@ -1281,10 +1281,10 @@ class ImportPlugin(plugins.ImportPluginBase):
1281
1281
  stats["stations"] += 1
1282
1282
  # Count at least one op per station so batching still progresses even if no vendor writes occur
1283
1283
  batch_ops += 1
1284
-
1284
+
1285
1285
  # NEW: drive live progress from here (throttled inside _progress_line)
1286
1286
  self._progress_line(stats)
1287
-
1287
+
1288
1288
  # Flags/timestamps
1289
1289
  has_market = bool(st.get("hasMarket") or ("market" in st))
1290
1290
  has_outfit = bool(st.get("hasOutfitting") or ("outfitting" in st))
@@ -1295,7 +1295,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1295
1295
  mkt_fresh = recent(mkt_ts)
1296
1296
  outf_fresh = recent(outf_ts)
1297
1297
  ship_fresh = recent(ship_ts)
1298
-
1298
+
1299
1299
  # Station upsert (idempotent)
1300
1300
  t_station = tables["Station"]
1301
1301
  type_id, planetary = self._map_station_type(st.get("type"))
@@ -1313,7 +1313,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1313
1313
  st_modified = self._parse_ts(st.get("updateTime"))
1314
1314
  if st_modified:
1315
1315
  imported_station_modifieds.append(st_modified)
1316
-
1316
+
1317
1317
  ls_from_star_val = st.get("distanceToArrival", 0)
1318
1318
  try:
1319
1319
  if ls_from_star_val is None:
@@ -1324,13 +1324,13 @@ class ImportPlugin(plugins.ImportPluginBase):
1324
1324
  ls_from_star_val = 0
1325
1325
  except Exception:
1326
1326
  ls_from_star_val = 0
1327
-
1327
+
1328
1328
  self._upsert_station(
1329
1329
  t_station, station_id=int(station_id), system_id=int(sys_id64), name=name,
1330
1330
  ls_from_star=ls_from_star_val, max_pad=max_pad,
1331
1331
  type_id=int(type_id), planetary=planetary, sflags=sflags, modified=st_modified
1332
1332
  )
1333
-
1333
+
1334
1334
  # ----------------------------
1335
1335
  # Ship vendor
1336
1336
  # ----------------------------
@@ -1356,7 +1356,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1356
1356
  stats["ship_stations"] += 1
1357
1357
  else:
1358
1358
  stats["ship_stations"] += 1
1359
-
1359
+
1360
1360
  # ----------------------------
1361
1361
  # Outfitting vendor
1362
1362
  # ----------------------------
@@ -1384,7 +1384,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1384
1384
  stats["outfit_stations"] += 1
1385
1385
  else:
1386
1386
  stats["outfit_stations"] += 1
1387
-
1387
+
1388
1388
  # ----------------------------
1389
1389
  # Market (commit check already happened before this station)
1390
1390
  # ----------------------------
@@ -1397,10 +1397,10 @@ class ImportPlugin(plugins.ImportPluginBase):
1397
1397
  if not got:
1398
1398
  # Could not acquire; try this station on a later pass
1399
1399
  continue
1400
-
1400
+
1401
1401
  self._trace(phase="market", decision="process",
1402
1402
  station_id=station_id, commodities=len(commodities))
1403
-
1403
+
1404
1404
  if force_baseline:
1405
1405
  wrote_i, wrote_si = self._upsert_market(
1406
1406
  tables, categories, station_id, commodities, mkt_ts
@@ -1436,7 +1436,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1436
1436
  stats["market_stations"] += 1
1437
1437
  else:
1438
1438
  stats["market_stations"] += 1
1439
-
1439
+
1440
1440
  # Baseline absent-station cleanup (global, after full stream)
1441
1441
  # We only remove baseline content (from_live=0 for markets; vendor links)
1442
1442
  # and only where modified <= json_ts, so anything newer (e.g. live/ZMQ) is preserved.
@@ -1453,7 +1453,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1453
1453
  )
1454
1454
  except Exception as e:
1455
1455
  self._warn(f"Absent-station cleanup skipped due to error: {e!r}")
1456
-
1456
+
1457
1457
  return stats
1458
1458
 
1459
1459
 
@@ -1473,9 +1473,9 @@ class ImportPlugin(plugins.ImportPluginBase):
1473
1473
  """
1474
1474
  if modified is None:
1475
1475
  modified = datetime.utcfromtimestamp(0)
1476
-
1476
+
1477
1477
  has_added_col = hasattr(t_system.c, "added")
1478
-
1478
+
1479
1479
  row = {
1480
1480
  "system_id": system_id,
1481
1481
  "name": name,
@@ -1484,7 +1484,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1484
1484
  }
1485
1485
  if has_added_col:
1486
1486
  row["added"] = 20 # EDSM on INSERT
1487
-
1487
+
1488
1488
  if db_utils.is_sqlite(self.session):
1489
1489
  db_utils.sqlite_upsert_modified(
1490
1490
  self.session, t_system,
@@ -1500,7 +1500,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1500
1500
  .values(added=20)
1501
1501
  )
1502
1502
  return
1503
-
1503
+
1504
1504
  if db_utils.is_mysql(self.session):
1505
1505
  db_utils.mysql_upsert_modified(
1506
1506
  self.session, t_system,
@@ -1516,7 +1516,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1516
1516
  .values(added=20)
1517
1517
  )
1518
1518
  return
1519
-
1519
+
1520
1520
  # Generic fallback
1521
1521
  sel_cols = [t_system.c.modified]
1522
1522
  if has_added_col:
@@ -1524,7 +1524,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1524
1524
  existing = self.session.execute(
1525
1525
  select(*sel_cols).where(t_system.c.system_id == system_id)
1526
1526
  ).first()
1527
-
1527
+
1528
1528
  if existing is None:
1529
1529
  self.session.execute(insert(t_system).values(**row))
1530
1530
  else:
@@ -1545,7 +1545,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1545
1545
  .where((t_system.c.system_id == system_id) & (t_system.c.added.is_(None)))
1546
1546
  .values(added=20)
1547
1547
  )
1548
-
1548
+
1549
1549
  def _upsert_station(
1550
1550
  self, t_station: Table, station_id: int, system_id: int, name: str,
1551
1551
  ls_from_star: Optional[float], max_pad: str, type_id: int, planetary: str,
@@ -1556,7 +1556,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1556
1556
  """
1557
1557
  if modified is None:
1558
1558
  modified = datetime.utcfromtimestamp(0)
1559
-
1559
+
1560
1560
  if db_utils.is_sqlite(self.session):
1561
1561
  db_utils.sqlite_upsert_modified(
1562
1562
  self.session, t_station,
@@ -1585,7 +1585,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1585
1585
  ),
1586
1586
  )
1587
1587
  return
1588
-
1588
+
1589
1589
  if db_utils.is_mysql(self.session):
1590
1590
  db_utils.mysql_upsert_modified(
1591
1591
  self.session, t_station,
@@ -1614,13 +1614,13 @@ class ImportPlugin(plugins.ImportPluginBase):
1614
1614
  ),
1615
1615
  )
1616
1616
  return
1617
-
1617
+
1618
1618
  # Generic fallback
1619
1619
  row = self.session.execute(
1620
1620
  select(t_station.c.system_id, t_station.c.modified)
1621
1621
  .where(t_station.c.station_id == station_id)
1622
1622
  ).first()
1623
-
1623
+
1624
1624
  if row is None:
1625
1625
  self.session.execute(
1626
1626
  insert(t_station).values(
@@ -1661,17 +1661,17 @@ class ImportPlugin(plugins.ImportPluginBase):
1661
1661
  values["system_id"] = system_id
1662
1662
  if db_modified is None or modified > db_modified:
1663
1663
  values["modified"] = modified
1664
-
1664
+
1665
1665
  self.session.execute(
1666
1666
  update(t_station)
1667
1667
  .where(t_station.c.station_id == station_id)
1668
1668
  .values(**values)
1669
1669
  )
1670
-
1670
+
1671
1671
  def _upsert_shipyard(self, tables: Dict[str, Table], station_id: int, ships: List[Dict[str, Any]], ts: datetime) -> int:
1672
1672
  t_ship, t_vendor = tables["Ship"], tables["ShipVendor"]
1673
1673
  ship_rows, vendor_rows = [], []
1674
-
1674
+
1675
1675
  for sh in ships:
1676
1676
  ship_id = sh.get("shipId")
1677
1677
  name = sh.get("name")
@@ -1679,7 +1679,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1679
1679
  continue
1680
1680
  ship_rows.append({"ship_id": ship_id, "name": name})
1681
1681
  vendor_rows.append({"ship_id": ship_id, "station_id": station_id, "modified": ts})
1682
-
1682
+
1683
1683
  if ship_rows:
1684
1684
  if db_utils.is_sqlite(self.session):
1685
1685
  db_utils.sqlite_upsert_simple(self.session, t_ship, rows=ship_rows, key_cols=("ship_id",), update_cols=("name",))
@@ -1692,7 +1692,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1692
1692
  self.session.execute(insert(t_ship).values(**r))
1693
1693
  elif exists[0] != r["name"]:
1694
1694
  self.session.execute(update(t_ship).where(t_ship.c.ship_id == r["ship_id"]).values(name=r["name"]))
1695
-
1695
+
1696
1696
  wrote = 0
1697
1697
  if vendor_rows:
1698
1698
  if db_utils.is_sqlite(self.session):
@@ -1720,11 +1720,11 @@ class ImportPlugin(plugins.ImportPluginBase):
1720
1720
  )
1721
1721
  wrote += 1
1722
1722
  return wrote
1723
-
1723
+
1724
1724
  def _upsert_outfitting(self, tables: Dict[str, Table], station_id: int, modules: List[Dict[str, Any]], ts: datetime) -> int:
1725
1725
  t_up, t_vendor = tables["Upgrade"], tables["UpgradeVendor"]
1726
1726
  up_rows, vendor_rows = [], []
1727
-
1727
+
1728
1728
  for mo in modules:
1729
1729
  up_id = mo.get("moduleId")
1730
1730
  name = mo.get("name")
@@ -1733,10 +1733,10 @@ class ImportPlugin(plugins.ImportPluginBase):
1733
1733
  ship = mo.get("ship")
1734
1734
  if up_id is None or name is None:
1735
1735
  continue
1736
-
1736
+
1737
1737
  up_rows.append({"upgrade_id": up_id, "name": name, "class": cls, "rating": rating, "ship": ship})
1738
1738
  vendor_rows.append({"upgrade_id": up_id, "station_id": station_id, "modified": ts})
1739
-
1739
+
1740
1740
  if up_rows:
1741
1741
  if db_utils.is_sqlite(self.session):
1742
1742
  db_utils.sqlite_upsert_simple(self.session, t_up, rows=up_rows, key_cols=("upgrade_id",),
@@ -1755,7 +1755,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1755
1755
  name=r["name"], **{"class": r["class"]}, rating=r["rating"], ship=r["ship"]
1756
1756
  )
1757
1757
  )
1758
-
1758
+
1759
1759
  wrote = 0
1760
1760
  if vendor_rows:
1761
1761
  if db_utils.is_sqlite(self.session):
@@ -1783,7 +1783,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1783
1783
  )
1784
1784
  wrote += 1
1785
1785
  return wrote
1786
-
1786
+
1787
1787
  def _upsert_market(
1788
1788
  self,
1789
1789
  tables: Dict[str, Table],
@@ -1795,25 +1795,25 @@ class ImportPlugin(plugins.ImportPluginBase):
1795
1795
  t_item, t_si = tables["Item"], tables["StationItem"]
1796
1796
  item_rows, link_rows = [], []
1797
1797
  wrote_items = 0
1798
-
1798
+
1799
1799
  for co in commodities:
1800
1800
  fdev_id = co.get("commodityId")
1801
1801
  name = co.get("name")
1802
1802
  cat_name = co.get("category")
1803
1803
  if fdev_id is None or name is None or cat_name is None:
1804
1804
  continue
1805
-
1805
+
1806
1806
  cat_id = categories.get(str(cat_name).lower())
1807
1807
  if cat_id is None:
1808
1808
  raise CleanExit(f'Unknown commodity category "{cat_name}"')
1809
-
1809
+
1810
1810
  item_rows.append({"item_id": fdev_id, "name": name, "category_id": cat_id, "fdev_id": fdev_id, "ui_order": 0})
1811
-
1811
+
1812
1812
  demand = co.get("demand")
1813
1813
  supply = co.get("supply")
1814
1814
  buy = co.get("buyPrice")
1815
1815
  sell = co.get("sellPrice")
1816
-
1816
+
1817
1817
  link_rows.append(dict(
1818
1818
  station_id=station_id,
1819
1819
  item_id=fdev_id,
@@ -1826,7 +1826,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1826
1826
  from_live=0,
1827
1827
  modified=ts,
1828
1828
  ))
1829
-
1829
+
1830
1830
  if item_rows:
1831
1831
  if db_utils.is_sqlite(self.session):
1832
1832
  db_utils.sqlite_upsert_simple(self.session, t_item, rows=item_rows, key_cols=("item_id",),
@@ -1850,7 +1850,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1850
1850
  name=r["name"], category_id=r["category_id"]
1851
1851
  )
1852
1852
  )
1853
-
1853
+
1854
1854
  wrote_links = 0
1855
1855
  if link_rows:
1856
1856
  if db_utils.is_sqlite(self.session):
@@ -1881,9 +1881,9 @@ class ImportPlugin(plugins.ImportPluginBase):
1881
1881
  .values(**r)
1882
1882
  )
1883
1883
  wrote_links += 1
1884
-
1884
+
1885
1885
  return (wrote_items, wrote_links)
1886
-
1886
+
1887
1887
  # ------------------------------
1888
1888
  # UI ordering
1889
1889
  # ------------------------------
@@ -1901,22 +1901,22 @@ class ImportPlugin(plugins.ImportPluginBase):
1901
1901
  if ui_order != expected:
1902
1902
  session.execute(update(t_item).where(t_item.c.item_id == item_id).values(ui_order=expected))
1903
1903
  expected += 1
1904
-
1904
+
1905
1905
  # ------------------------------
1906
1906
  # Rares import (via cache.processImportFile)
1907
1907
  # ------------------------------
1908
1908
  def _import_rareitems_edcd(self, rares_csv: Path, commodity_csv: Optional[Path] = None) -> None:
1909
1909
  """
1910
1910
  EDCD rares → TD.RareItem
1911
-
1911
+
1912
1912
  Supports CSV shapes:
1913
1913
  A) name, system, station
1914
1914
  B) id, symbol, market_id, category, name (FDevIDs canonical)
1915
-
1915
+
1916
1916
  Shape B maps: station_id = int(market_id), category by name.
1917
1917
  Clears RareItem then upserts by UNIQUE(name). Writes a CSV of skipped rows to tmp/.
1918
1918
  """
1919
-
1919
+
1920
1920
  def _norm(s: Optional[str]) -> str:
1921
1921
  if s is None: return ""
1922
1922
  s = s.strip().strip("'").strip('"')
@@ -1924,7 +1924,7 @@ class ImportPlugin(plugins.ImportPluginBase):
1924
1924
  s = s.replace("–", "-").replace("—", "-")
1925
1925
  s = " ".join(s.split())
1926
1926
  return s.casefold()
1927
-
1927
+
1928
1928
  def _kwant(fieldnames, *aliases) -> Optional[str]:
1929
1929
  if not fieldnames: return None
1930
1930
  canon = {}
@@ -1936,13 +1936,13 @@ class ImportPlugin(plugins.ImportPluginBase):
1936
1936
  k = a.strip().lower().replace("_", "").replace(" ", "")
1937
1937
  if k in canon: return canon[k]
1938
1938
  return None
1939
-
1939
+
1940
1940
  sess = None
1941
1941
  try:
1942
1942
  sess = self._open_session()
1943
1943
  tables = self._reflect_tables(sess.get_bind())
1944
1944
  t_sys, t_stn, t_cat, t_rare = tables["System"], tables["Station"], tables["Category"], tables["RareItem"]
1945
-
1945
+
1946
1946
  # Build lookups for Shape A
1947
1947
  stn_by_names: Dict[tuple[str, str], int] = {}
1948
1948
  for sid, sys_name, stn_name in sess.execute(
@@ -1950,50 +1950,50 @@ class ImportPlugin(plugins.ImportPluginBase):
1950
1950
  ).all():
1951
1951
  if sys_name and stn_name:
1952
1952
  stn_by_names[(_norm(sys_name), _norm(stn_name))] = int(sid)
1953
-
1953
+
1954
1954
  # Category name -> id (from DB)
1955
1955
  cat_id_by_name = {
1956
1956
  _norm(n): int(cid)
1957
1957
  for cid, n in sess.execute(select(t_cat.c.category_id, t_cat.c.name)).all()
1958
1958
  if n is not None
1959
1959
  }
1960
-
1960
+
1961
1961
  kept = skipped = 0
1962
1962
  skipped_no_station = 0
1963
1963
  skipped_no_category = 0
1964
1964
  out_rows: list[dict] = []
1965
1965
  skipped_rows: list[dict] = [] # <-- record details
1966
-
1966
+
1967
1967
  with open(rares_csv, "r", encoding="utf-8", newline="") as fh:
1968
1968
  reader = csv.DictReader(fh)
1969
1969
  hdr = [h for h in (reader.fieldnames or []) if h]
1970
1970
  hdr_canon = [h.lower().replace("_", "").replace(" ", "") for h in hdr]
1971
-
1971
+
1972
1972
  has_market_shape = all(x in hdr_canon for x in ["id", "symbol", "marketid", "category", "name"])
1973
1973
  has_name_shape = all(x in hdr_canon for x in ["name", "system", "station"])
1974
-
1974
+
1975
1975
  if not (has_market_shape or has_name_shape):
1976
1976
  raise CleanExit(
1977
1977
  "rare_commodity.csv headers not recognized. "
1978
1978
  f"Seen headers: {', '.join(reader.fieldnames or [])}. File: {rares_csv}"
1979
1979
  )
1980
-
1980
+
1981
1981
  if has_market_shape:
1982
1982
  # FDevIDs: station_id = int(market_id)
1983
1983
  k_name = _kwant(reader.fieldnames, "name")
1984
1984
  k_market = _kwant(reader.fieldnames, "market_id", "marketid")
1985
1985
  k_cat = _kwant(reader.fieldnames, "category", "categoryname")
1986
-
1986
+
1987
1987
  for row in reader:
1988
1988
  rn_raw = row.get(k_name)
1989
1989
  mk_raw = row.get(k_market)
1990
1990
  cat_raw= row.get(k_cat)
1991
-
1991
+
1992
1992
  try:
1993
1993
  station_id = int(mk_raw) if mk_raw is not None else None
1994
1994
  except (TypeError, ValueError):
1995
1995
  station_id = None
1996
-
1996
+
1997
1997
  # validate station exists
1998
1998
  if station_id is None or sess.execute(
1999
1999
  select(t_stn.c.station_id).where(t_stn.c.station_id == station_id)
@@ -2001,13 +2001,13 @@ class ImportPlugin(plugins.ImportPluginBase):
2001
2001
  skipped += 1; skipped_no_station += 1
2002
2002
  skipped_rows.append({"reason":"no_station","name":rn_raw,"market_id":mk_raw,"category":cat_raw})
2003
2003
  continue
2004
-
2004
+
2005
2005
  cid = cat_id_by_name.get(_norm(cat_raw))
2006
2006
  if cid is None:
2007
2007
  skipped += 1; skipped_no_category += 1
2008
2008
  skipped_rows.append({"reason":"no_category","name":rn_raw,"market_id":mk_raw,"category":cat_raw})
2009
2009
  continue
2010
-
2010
+
2011
2011
  out_rows.append({
2012
2012
  "name": rn_raw,
2013
2013
  "station_id": station_id,
@@ -2016,7 +2016,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2016
2016
  "max_allocation": None,
2017
2017
  })
2018
2018
  kept += 1
2019
-
2019
+
2020
2020
  else:
2021
2021
  # Legacy/community: need commodity.csv to map product -> category
2022
2022
  name_to_catid: Dict[str, int] = {}
@@ -2025,7 +2025,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2025
2025
  commodity_csv = files.get("commodity")
2026
2026
  if commodity_csv and Path(commodity_csv).exists():
2027
2027
  with open(commodity_csv, "r", encoding="utf-8", newline="") as fh2:
2028
- rd2 = _csv.DictReader(fh2)
2028
+ rd2 = csv.DictReader(fh2)
2029
2029
  k2_name = _kwant(rd2.fieldnames, "name","commodity","commodityname","product")
2030
2030
  k2_cat = _kwant(rd2.fieldnames, "category","categoryname")
2031
2031
  if k2_name and k2_cat:
@@ -2035,34 +2035,34 @@ class ImportPlugin(plugins.ImportPluginBase):
2035
2035
  cid = cat_id_by_name.get(c)
2036
2036
  if cid is not None:
2037
2037
  name_to_catid[n] = cid
2038
-
2038
+
2039
2039
  k_name = _kwant(reader.fieldnames, "name","commodity","commodityname","product")
2040
2040
  k_system = _kwant(reader.fieldnames, "system","systemname")
2041
2041
  k_station = _kwant(reader.fieldnames, "station","stationname")
2042
-
2042
+
2043
2043
  for row in reader:
2044
2044
  rn_raw = row.get(k_name)
2045
2045
  sys_raw = row.get(k_system)
2046
2046
  stn_raw = row.get(k_station)
2047
2047
  rn = _norm(rn_raw); sysn = _norm(sys_raw); stnn = _norm(stn_raw)
2048
-
2048
+
2049
2049
  if not rn or not sysn or not stnn:
2050
2050
  skipped += 1
2051
2051
  skipped_rows.append({"reason":"missing_fields","name":rn_raw,"system":sys_raw,"station":stn_raw})
2052
2052
  continue
2053
-
2053
+
2054
2054
  station_id = stn_by_names.get((sysn, stnn))
2055
2055
  if station_id is None:
2056
2056
  skipped += 1; skipped_no_station += 1
2057
2057
  skipped_rows.append({"reason":"no_station","name":rn_raw,"system":sys_raw,"station":stn_raw})
2058
2058
  continue
2059
-
2059
+
2060
2060
  cid = name_to_catid.get(rn)
2061
2061
  if cid is None:
2062
2062
  skipped += 1; skipped_no_category += 1
2063
2063
  skipped_rows.append({"reason":"no_category","name":rn_raw,"system":sys_raw,"station":stn_raw})
2064
2064
  continue
2065
-
2065
+
2066
2066
  out_rows.append({
2067
2067
  "name": rn_raw,
2068
2068
  "station_id": station_id,
@@ -2071,13 +2071,13 @@ class ImportPlugin(plugins.ImportPluginBase):
2071
2071
  "max_allocation": None,
2072
2072
  })
2073
2073
  kept += 1
2074
-
2074
+
2075
2075
  # Clear → upsert
2076
2076
  try:
2077
2077
  sess.execute(text('DELETE FROM "RareItem"'))
2078
2078
  except Exception:
2079
2079
  sess.execute(text("DELETE FROM RareItem"))
2080
-
2080
+
2081
2081
  if out_rows:
2082
2082
  if db_utils.is_sqlite(sess):
2083
2083
  db_utils.sqlite_upsert_simple(
@@ -2100,7 +2100,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2100
2100
  .values({k: r[k] for k in r.keys() if k != "name"})
2101
2101
  )
2102
2102
  sess.commit()
2103
-
2103
+
2104
2104
  # Write a CSV with skipped details
2105
2105
  if skipped_rows:
2106
2106
  outp = self.tmp_dir / "edcd_rares_skipped.csv"
@@ -2114,7 +2114,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2114
2114
  else:
2115
2115
  self._print(f"EDCD Rares: imported={kept:,} skipped={skipped:,} "
2116
2116
  f"(no_station={skipped_no_station:,}, no_category={skipped_no_category:,})")
2117
-
2117
+
2118
2118
  except Exception as e:
2119
2119
  if sess is not None:
2120
2120
  try: sess.rollback()
@@ -2131,7 +2131,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2131
2131
  # ------------------------------
2132
2132
  def _export_cache(self) -> None:
2133
2133
  """Export CSVs and regenerate TradeDangerous.prices — concurrently, with optional StationItem gating."""
2134
-
2134
+
2135
2135
  # Option/env gate for StationItem export (large file)
2136
2136
  def _opt_true(val: Optional[str]) -> bool:
2137
2137
  if val is None:
@@ -2139,9 +2139,9 @@ class ImportPlugin(plugins.ImportPluginBase):
2139
2139
  if isinstance(val, str):
2140
2140
  return val.strip().lower() in ("1", "true", "yes", "on", "y")
2141
2141
  return bool(val)
2142
-
2142
+
2143
2143
  skip_stationitems = _opt_true(self.getOption("skip_stationitems")) or _opt_true(os.environ.get("TD_SKIP_STATIONITEM_EXPORT"))
2144
-
2144
+
2145
2145
  # Heaviest tables first to maximize overlap
2146
2146
  tables = [
2147
2147
  "StationItem",
@@ -2158,14 +2158,14 @@ class ImportPlugin(plugins.ImportPluginBase):
2158
2158
  ]
2159
2159
  if skip_stationitems:
2160
2160
  tables = [t for t in tables if t != "StationItem"]
2161
-
2161
+
2162
2162
  # Worker count (env override allowed); +1 slot reserved for prices task
2163
2163
  try:
2164
2164
  workers = int(os.environ.get("TD_EXPORT_WORKERS", "4"))
2165
2165
  except ValueError:
2166
2166
  workers = 4
2167
2167
  workers = max(1, workers) + 1 # extra slot for the prices job
2168
-
2168
+
2169
2169
  def _export_one(table_name: str) -> str:
2170
2170
  sess = None
2171
2171
  try:
@@ -2178,18 +2178,18 @@ class ImportPlugin(plugins.ImportPluginBase):
2178
2178
  sess.close()
2179
2179
  except Exception:
2180
2180
  pass
2181
-
2181
+
2182
2182
  def _regen_prices() -> str:
2183
2183
  cache.regeneratePricesFile(self.tdb, self.tdenv)
2184
2184
  return "TradeDangerous.prices"
2185
-
2185
+
2186
2186
  self._print("Exporting to cache...")
2187
2187
  for t in tables:
2188
2188
  self._print(f" - {t}.csv")
2189
2189
  if skip_stationitems:
2190
2190
  self._warn("Skipping StationItem.csv export (requested).")
2191
2191
  self._print("Regenerating TradeDangerous.prices …")
2192
-
2192
+
2193
2193
  # Parallel export + prices regen, with conservative fallback
2194
2194
  try:
2195
2195
  with ThreadPoolExecutor(max_workers=workers) as ex:
@@ -2202,9 +2202,9 @@ class ImportPlugin(plugins.ImportPluginBase):
2202
2202
  for t in tables:
2203
2203
  _export_one(t)
2204
2204
  _regen_prices()
2205
-
2206
- self._print("Cache export completed.")
2207
2205
 
2206
+ self._print("Cache export completed.")
2207
+
2208
2208
  def _mirror_csv_exports(self) -> None:
2209
2209
  """
2210
2210
  If TD_CSV is set, mirror all CSVs emitted into tdenv.dataDir to TD_CSV.
@@ -2220,7 +2220,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2220
2220
  except Exception as e:
2221
2221
  self._warn(f"TD_CSV mirror: unable to create destination {dst_dir}: {e!r}")
2222
2222
  return
2223
-
2223
+
2224
2224
  copied = 0
2225
2225
  for src in src_dir.glob("*.csv"):
2226
2226
  try:
@@ -2228,9 +2228,9 @@ class ImportPlugin(plugins.ImportPluginBase):
2228
2228
  copied += 1
2229
2229
  except Exception as e:
2230
2230
  self._warn(f"TD_CSV mirror: failed to copy {src.name}: {e!r}")
2231
-
2231
+
2232
2232
  self._print(f"TD_CSV mirror: copied {copied} csv file(s) → {dst_dir}")
2233
-
2233
+
2234
2234
  def _export_and_mirror(self) -> None:
2235
2235
  """
2236
2236
  Run the normal cache/CSV export, then mirror CSVs to TD_CSV if set.
@@ -2241,7 +2241,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2241
2241
  self._export_cache() # existing exporter (unchanged)
2242
2242
  self._print(f"Cache export completed in {time.time()-t0:.2f}s")
2243
2243
  self._mirror_csv_exports()
2244
-
2244
+
2245
2245
  # ------------------------------
2246
2246
  # Categories cache
2247
2247
  # ------------------------------
@@ -2249,7 +2249,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2249
2249
  t_cat = tables["Category"]
2250
2250
  rows = session.execute(select(t_cat.c.category_id, t_cat.c.name)).all()
2251
2251
  return {str(name).lower(): int(cid) for (cid, name) in rows}
2252
-
2252
+
2253
2253
  # ------------------------------
2254
2254
  # Streaming JSON reader
2255
2255
  # ------------------------------
@@ -2276,7 +2276,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2276
2276
  pass
2277
2277
  # Fallback to whatever was imported at module top
2278
2278
  return ijson.items(fh, prefix)
2279
-
2279
+
2280
2280
  def _iter_top_level_json_array(self, fh: io.BufferedReader) -> Generator[Dict[str, Any], None, None]:
2281
2281
  """
2282
2282
  High-performance streaming reader for a huge top-level JSON array of systems.
@@ -2286,7 +2286,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2286
2286
  start_ts = time.time()
2287
2287
  last_tick_systems = 0
2288
2288
  TICK_EVERY = 256
2289
-
2289
+
2290
2290
  it = self._ijson_items(fh, 'item')
2291
2291
  for idx, obj in enumerate(it, 1):
2292
2292
  if (idx - last_tick_systems) >= TICK_EVERY:
@@ -2300,7 +2300,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2300
2300
  except Exception:
2301
2301
  pass
2302
2302
  yield obj
2303
-
2303
+
2304
2304
  # Final metric update at EOF
2305
2305
  try:
2306
2306
  pos = fh.tell()
@@ -2309,7 +2309,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2309
2309
  self._parse_rate = pos / elapsed
2310
2310
  except Exception:
2311
2311
  pass
2312
-
2312
+
2313
2313
  if self._is_tty:
2314
2314
  self._live_status("")
2315
2315
 
@@ -2333,7 +2333,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2333
2333
  "Drake-Class Carrier": (24, False),
2334
2334
  "Settlement": (25, True),
2335
2335
  }
2336
-
2336
+
2337
2337
  def _map_station_type(self, type_name: Optional[str]) -> Tuple[int, str]:
2338
2338
  if isinstance(type_name, str):
2339
2339
  res = self._station_type_map.get(type_name)
@@ -2341,7 +2341,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2341
2341
  type_id, is_planetary = res
2342
2342
  return type_id, "Y" if is_planetary else "N"
2343
2343
  return (0, "?")
2344
-
2344
+
2345
2345
  @staticmethod
2346
2346
  def _derive_pad_size(landing: Mapping[str, Any]) -> str:
2347
2347
  try:
@@ -2354,7 +2354,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2354
2354
  except Exception:
2355
2355
  pass
2356
2356
  return "?"
2357
-
2357
+
2358
2358
  def _resolve_batch_size(self) -> Optional[int]:
2359
2359
  """
2360
2360
  Decide commit batch size for *spansh* profile.
@@ -2366,7 +2366,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2366
2366
  return val
2367
2367
  except Exception:
2368
2368
  pass
2369
-
2369
+
2370
2370
  raw = os.environ.get("TD_LISTINGS_BATCH")
2371
2371
  if raw is not None:
2372
2372
  try:
@@ -2374,7 +2374,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2374
2374
  return envv if envv > 0 else None
2375
2375
  except ValueError:
2376
2376
  pass
2377
-
2377
+
2378
2378
  try:
2379
2379
  if db_utils.is_sqlite(self.session):
2380
2380
  return None
@@ -2382,22 +2382,22 @@ class ImportPlugin(plugins.ImportPluginBase):
2382
2382
  return 50_000
2383
2383
  except Exception:
2384
2384
  pass
2385
-
2385
+
2386
2386
  return 5_000
2387
-
2387
+
2388
2388
  # ---- ts/format/logging helpers ----
2389
2389
  def _parse_ts(self, value: Any) -> Optional[datetime]:
2390
2390
  try:
2391
2391
  return db_utils.parse_ts(value) # UTC-naive, μs=0
2392
2392
  except Exception:
2393
2393
  return None
2394
-
2394
+
2395
2395
  @staticmethod
2396
2396
  def _format_hms(seconds: float) -> str:
2397
2397
  m, s = divmod(int(seconds), 60)
2398
2398
  h, m = divmod(m, 60)
2399
2399
  return f"{h}:{m:02d}:{s:02d}"
2400
-
2400
+
2401
2401
  def _fmt_bytes(self, n: float) -> str:
2402
2402
  units = ["B", "KiB", "MiB", "GiB", "TiB"]
2403
2403
  i = 0
@@ -2405,11 +2405,11 @@ class ImportPlugin(plugins.ImportPluginBase):
2405
2405
  n /= 1024.0
2406
2406
  i += 1
2407
2407
  return f"{int(n)} {units[i]}" if i == 0 else f"{n:.1f} {units[i]}"
2408
-
2408
+
2409
2409
  def _progress_line(self, stats: Dict[str, int]) -> None:
2410
2410
  """
2411
2411
  Single-line live status while importing.
2412
-
2412
+
2413
2413
  Modes:
2414
2414
  - default (verbose-ish): rich long line
2415
2415
  - compact: shorter, log-friendly line (enable with -O progress_compact=1 or TD_PROGRESS_COMPACT=1)
@@ -2419,7 +2419,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2419
2419
  return
2420
2420
  self._last_progress_time = now
2421
2421
  self._started_importing = True
2422
-
2422
+
2423
2423
  # Determine compact mode (CLI overrides env; default is rich/False)
2424
2424
  # Truthy whitelist: 1, true, yes, on, y (case-insensitive)
2425
2425
  _opt = self.getOption("progress_compact")
@@ -2429,20 +2429,20 @@ class ImportPlugin(plugins.ImportPluginBase):
2429
2429
  _env = os.getenv("TD_PROGRESS_COMPACT")
2430
2430
  _val = "" if _env is None else str(_env).strip().lower()
2431
2431
  compact = _val in {"1", "true", "yes", "on", "y"}
2432
-
2432
+
2433
2433
  parse_bytes = getattr(self, "_parse_bytes", 0)
2434
2434
  parse_rate = getattr(self, "_parse_rate", 0.0)
2435
2435
  systems = stats.get("systems", 0)
2436
2436
  stations = stats.get("stations", 0)
2437
-
2437
+
2438
2438
  wm = stats.get("market_writes", 0)
2439
2439
  wo = stats.get("outfit_writes", 0)
2440
2440
  ws = stats.get("ship_writes", 0)
2441
-
2441
+
2442
2442
  km = stats.get("market_stations", 0)
2443
2443
  ko = stats.get("outfit_stations", 0)
2444
2444
  ks = stats.get("ship_stations", 0)
2445
-
2445
+
2446
2446
  if compact:
2447
2447
  # Compact, log-friendly (newline prints)
2448
2448
  msg = (
@@ -2452,20 +2452,20 @@ class ImportPlugin(plugins.ImportPluginBase):
2452
2452
  )
2453
2453
  self._print(msg)
2454
2454
  return
2455
-
2455
+
2456
2456
  # Rich/long line (TTY-optimized; truncated only on TTY)
2457
2457
  msg = (
2458
2458
  f"Importing… {parse_bytes/1048576:.1f} MiB read {parse_rate/1048576:.1f} MiB/s "
2459
2459
  f"[Parsed - Systems: {systems:,} Stations: {stations:,}] "
2460
2460
  f"Checked(stations): mkt={km:,} outf={ko:,} shp={ks:,} "
2461
2461
  f"Written(stations): mkt={wm:,} outf={wo:,} shp={ws:,}"
2462
-
2462
+
2463
2463
  )
2464
2464
  self._live_status(msg)
2465
-
2465
+
2466
2466
  def _live_line(self, msg: str) -> None:
2467
2467
  self._live_status(msg)
2468
-
2468
+
2469
2469
  def _live_status(self, msg: str) -> None:
2470
2470
  """
2471
2471
  Live status line for TTY; plain prints for non-TTY.
@@ -2485,7 +2485,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2485
2485
  self._print(msg)
2486
2486
  except Exception:
2487
2487
  self._print(msg)
2488
-
2488
+
2489
2489
  def _end_live_status(self) -> None:
2490
2490
  try:
2491
2491
  if self._is_tty:
@@ -2493,7 +2493,7 @@ class ImportPlugin(plugins.ImportPluginBase):
2493
2493
  sys.stderr.flush()
2494
2494
  except Exception:
2495
2495
  pass
2496
-
2496
+
2497
2497
  # ---- printing/warnings ----
2498
2498
  def _print(self, *args, **kwargs):
2499
2499
  printer = getattr(self.tdenv, "print", None)
@@ -2501,14 +2501,14 @@ class ImportPlugin(plugins.ImportPluginBase):
2501
2501
  printer(*args, **kwargs)
2502
2502
  else:
2503
2503
  print(*args, **kwargs)
2504
-
2504
+
2505
2505
  def _warn(self, msg: str):
2506
2506
  if self._warn_enabled:
2507
2507
  self._print(f"WARNING: {msg}")
2508
-
2508
+
2509
2509
  def _error(self, msg: str):
2510
2510
  self._print(f"ERROR: {msg}")
2511
-
2511
+
2512
2512
  def _safe_close_session(self):
2513
2513
  try:
2514
2514
  if self.session is not None: