tradedangerous 11.5.2__py3-none-any.whl → 12.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

Files changed (39) hide show
  1. tradedangerous/cache.py +567 -395
  2. tradedangerous/cli.py +2 -2
  3. tradedangerous/commands/TEMPLATE.py +25 -26
  4. tradedangerous/commands/__init__.py +8 -16
  5. tradedangerous/commands/buildcache_cmd.py +40 -10
  6. tradedangerous/commands/buy_cmd.py +57 -46
  7. tradedangerous/commands/commandenv.py +0 -2
  8. tradedangerous/commands/export_cmd.py +78 -50
  9. tradedangerous/commands/import_cmd.py +70 -34
  10. tradedangerous/commands/market_cmd.py +52 -19
  11. tradedangerous/commands/olddata_cmd.py +120 -107
  12. tradedangerous/commands/rares_cmd.py +122 -110
  13. tradedangerous/commands/run_cmd.py +118 -66
  14. tradedangerous/commands/sell_cmd.py +52 -45
  15. tradedangerous/commands/shipvendor_cmd.py +49 -234
  16. tradedangerous/commands/station_cmd.py +55 -485
  17. tradedangerous/commands/update_cmd.py +56 -420
  18. tradedangerous/csvexport.py +173 -162
  19. tradedangerous/gui.py +2 -2
  20. tradedangerous/plugins/eddblink_plug.py +389 -252
  21. tradedangerous/plugins/spansh_plug.py +2488 -821
  22. tradedangerous/prices.py +124 -142
  23. tradedangerous/templates/TradeDangerous.sql +6 -6
  24. tradedangerous/tradecalc.py +1227 -1109
  25. tradedangerous/tradedb.py +533 -384
  26. tradedangerous/tradeenv.py +12 -1
  27. tradedangerous/version.py +1 -1
  28. {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/METADATA +17 -4
  29. {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/RECORD +33 -39
  30. {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/WHEEL +1 -1
  31. tradedangerous/commands/update_gui.py +0 -721
  32. tradedangerous/jsonprices.py +0 -254
  33. tradedangerous/plugins/edapi_plug.py +0 -1071
  34. tradedangerous/plugins/journal_plug.py +0 -537
  35. tradedangerous/plugins/netlog_plug.py +0 -316
  36. tradedangerous/templates/database_changes.json +0 -6
  37. {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/entry_points.txt +0 -0
  38. {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info/licenses}/LICENSE +0 -0
  39. {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/top_level.txt +0 -0
@@ -1,12 +1,14 @@
1
+ from __future__ import annotations
2
+
1
3
  """
2
4
  Import plugin that uses data files from
3
5
  https://elite.tromador.com/ to update the Database.
4
6
  """
5
- from __future__ import annotations
7
+
6
8
 
7
9
  from email.utils import parsedate_to_datetime
8
10
  from pathlib import Path
9
- from .. fs import file_line_count
11
+ from ..fs import file_line_count
10
12
  from .. import plugins, cache, transfers
11
13
  from ..misc import progress as pbar
12
14
  from ..plugins import PluginException
@@ -15,13 +17,15 @@ import csv
15
17
  import datetime
16
18
  import os
17
19
  import requests
18
- import sqlite3
19
20
  import typing
20
21
 
22
+ from sqlalchemy.orm import Session
23
+ from sqlalchemy import func, delete, select, exists, text
24
+ from ..db import orm_models as SA, lifecycle
21
25
 
22
26
  if typing.TYPE_CHECKING:
23
27
  from typing import Optional
24
- from .. tradeenv import TradeEnv
28
+ from ..tradeenv import TradeEnv
25
29
 
26
30
  # Constants
27
31
  BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
@@ -36,7 +40,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
36
40
  if not listings.exists():
37
41
  tdenv.NOTE("File not found, aborting: {}", listings)
38
42
  return 0
39
-
43
+
40
44
  tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
41
45
  count = file_line_count(listings)
42
46
  if count <= 1:
@@ -45,31 +49,45 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
45
49
  else:
46
50
  tdenv.DEBUG0("Listings file is empty, nothing to do.")
47
51
  return 0
48
-
52
+
49
53
  return count + 1 # kfsone: Doesn't the header already make this + 1?
50
54
 
51
55
 
52
- def _make_item_id_lookup(tdenv: TradeEnv, db: sqlite3.Cursor) -> frozenset[int]:
53
- """ helper: retrieve the list of commodities in database. """
56
+ def _make_item_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
57
+ """Helper: retrieve the list of commodities in database."""
54
58
  tdenv.DEBUG0("Getting list of commodities...")
55
- return frozenset(cols[0] for cols in db.execute("SELECT item_id FROM Item"))
59
+ rows = session.query(SA.Item.item_id).all()
60
+ return frozenset(r[0] for r in rows)
56
61
 
57
62
 
58
- def _make_station_id_lookup(tdenv: TradeEnv, db: sqlite3.Cursor) -> frozenset[int]:
59
- """ helper: retrieve the list of station IDs in database. """
63
+ def _make_station_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
64
+ """Helper: retrieve the list of station IDs in database."""
60
65
  tdenv.DEBUG0("Getting list of stations...")
61
- return frozenset(cols[0] for cols in db.execute("SELECT station_id FROM Station"))
66
+ rows = session.query(SA.Station.station_id).all()
67
+ return frozenset(r[0] for r in rows)
62
68
 
63
69
 
64
- def _collect_station_modified_times(tdenv: TradeEnv, db: sqlite3.Cursor) -> dict[int, int]:
65
- """ helper: build a list of the last modified time for all stations by id. """
70
+ def _collect_station_modified_times(tdenv: TradeEnv, session: Session) -> dict[int, int]:
71
+ """Helper: build a list of the last modified time for all stations by id (epoch seconds)."""
66
72
  tdenv.DEBUG0("Getting last-update times for stations...")
67
- return dict(db.execute("SELECT station_id, strftime('%s', MIN(modified)) FROM StationItem GROUP BY station_id"))
73
+ rows = (
74
+ session.query(
75
+ SA.StationItem.station_id,
76
+ func.min(SA.StationItem.modified),
77
+ )
78
+ .group_by(SA.StationItem.station_id)
79
+ .all()
80
+ )
81
+ return {
82
+ station_id: int(modified.timestamp()) if modified else 0
83
+ for station_id, modified in rows
84
+ }
68
85
 
69
86
 
70
87
  class ImportPlugin(plugins.ImportPluginBase):
71
88
  """
72
- Plugin that downloads data from eddb.
89
+ Import plugin that uses data files from
90
+ https://elite.tromador.com/ to update the Database.
73
91
  """
74
92
  pluginOptions = {
75
93
  'item': "Update Items using latest file from server. (Implies '-O system,station')",
@@ -90,10 +108,10 @@ class ImportPlugin(plugins.ImportPluginBase):
90
108
  'optimize': "Optimize ('vacuum') database after processing.",
91
109
  'solo': "Don't download crowd-sourced market data. (Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)",
92
110
  }
93
-
111
+
94
112
  def __init__(self, tdb, tdenv):
95
113
  super().__init__(tdb, tdenv)
96
-
114
+
97
115
  self.dataPath = os.environ.get('TD_EDDB') or self.tdenv.tmpDir
98
116
  self.categoriesPath = Path("Category.csv")
99
117
  self.commoditiesPath = Path("Item.csv")
@@ -111,10 +129,10 @@ class ImportPlugin(plugins.ImportPluginBase):
111
129
  self.listingsPath = Path("listings.csv")
112
130
  self.liveListingsPath = Path("listings-live.csv")
113
131
  self.pricesPath = Path("listings.prices")
114
-
132
+
115
133
  def now(self):
116
134
  return datetime.datetime.now()
117
-
135
+
118
136
  def downloadFile(self, path):
119
137
  """
120
138
  Fetch the latest dumpfile from the website if newer than local copy.
@@ -123,9 +141,9 @@ class ImportPlugin(plugins.ImportPluginBase):
123
141
  localPath = Path(self.tdb.dataPath, path)
124
142
  else:
125
143
  localPath = Path(self.dataPath, path)
126
-
127
- url = BASE_URL + str(path)
128
-
144
+
145
+ url = BASE_URL + str(path)
146
+
129
147
  self.tdenv.NOTE("Checking for update to '{}'.", path)
130
148
  # Use an HTTP Request header to obtain the Last-Modified and Content-Length headers.
131
149
  # Also, tell the server to give us the un-compressed length of the file by saying
@@ -136,277 +154,264 @@ class ImportPlugin(plugins.ImportPluginBase):
136
154
  except Exception as e: # pylint: disable=broad-exception-caught
137
155
  self.tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", url, str(e))
138
156
  return False
139
-
157
+
140
158
  last_modified = response.headers.get("last-modified")
141
159
  dump_mod_time = parsedate_to_datetime(last_modified).timestamp()
142
-
160
+
143
161
  if Path.exists(localPath):
144
162
  local_mod_time = localPath.stat().st_mtime
145
163
  if local_mod_time >= dump_mod_time:
146
164
  self.tdenv.DEBUG0("'{}': Dump is not more recent than Local.", path)
147
165
  return False
148
-
166
+
149
167
  # The server doesn't know the gzip'd length, and we won't see the gzip'd data,
150
168
  # so we want the actual text-only length. Capture it here so we can tell the
151
169
  # transfer mechanism how big the file is going to be.
152
170
  length = response.headers.get("content-length")
153
-
171
+
154
172
  self.tdenv.NOTE("Downloading file '{}'.", path)
155
173
  transfers.download(self.tdenv, url, localPath, chunkSize=16384, length=length)
156
-
174
+
157
175
  # Change the timestamps on the file so they match the website
158
176
  os.utime(localPath, (dump_mod_time, dump_mod_time))
159
-
177
+
160
178
  return True
161
-
179
+
162
180
  def purgeSystems(self):
163
181
  """
164
182
  Purges systems from the System table that do not have any stations claiming to be in them.
165
183
  Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
166
184
  """
167
- db = self.tdb.getDB()
168
185
  self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
169
-
170
- db.execute("""
171
- DELETE FROM System
172
- WHERE NOT EXISTS(SELECT 1 FROM Station WHERE Station.system_id = System.system_id)
173
- """)
174
- db.commit()
175
-
186
+
187
+ Session = self.tdb.Session
188
+ with Session.begin() as session:
189
+ subq = select(SA.Station.system_id).where(SA.Station.system_id == SA.System.system_id)
190
+ stmt = delete(SA.System).where(~exists(subq))
191
+ session.execute(stmt)
192
+
176
193
  self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
177
-
194
+
178
195
  def importListings(self, listings_file):
179
196
  """
180
- Updates the market data (AKA the StationItem table) using listings_file
181
- Writes directly to database.
197
+ Updates the market data (StationItem) using `listings_file`.
198
+
199
+ Rules:
200
+ - If a row doesn't exist in DB → insert (copy CSV exactly).
201
+ - If it exists → update only when CSV.modified > DB.modified.
202
+ - If CSV.modified <= DB.modified → do nothing (no field changes).
182
203
  """
204
+ from tradedangerous.db.utils import (
205
+ get_import_batch_size,
206
+ begin_bulk_mode,
207
+ end_bulk_mode,
208
+ get_upsert_fn,
209
+ )
210
+
183
211
  listings_path = Path(self.dataPath, listings_file).absolute()
184
- from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
185
-
212
+ from_live = int(listings_path != Path(self.dataPath, self.listingsPath).absolute())
213
+
186
214
  self.tdenv.NOTE("Checking listings")
187
215
  total = _count_listing_entries(self.tdenv, listings_path)
188
216
  if not total:
189
217
  self.tdenv.NOTE("No listings")
190
218
  return
191
-
192
- self.tdenv.NOTE("Processing market data from {}: Start time = {}. Live = {}", listings_file, self.now(), from_live)
193
-
194
- db = self.tdb.getDB()
195
- stmt_unliven_station = """UPDATE StationItem SET from_live = 0 WHERE station_id = ?"""
196
- stmt_flush_station = """DELETE from StationItem WHERE station_id = ?"""
197
- stmt_add_listing = """
198
- INSERT OR IGNORE INTO StationItem (
199
- station_id, item_id, modified, from_live,
200
- demand_price, demand_units, demand_level,
201
- supply_price, supply_units, supply_level
202
- )
203
- VALUES (
204
- ?, ?, datetime(?, 'unixepoch'), ?,
205
- ?, ?, ?,
206
- ?, ?, ?
207
- )
208
- """
209
-
210
- # Fetch all the items IDS
211
- item_lookup = _make_item_id_lookup(self.tdenv, db.cursor())
212
- station_lookup = _make_station_id_lookup(self.tdenv, db.cursor())
213
- last_station_update_times = _collect_station_modified_times(self.tdenv, db.cursor())
214
-
215
- cur_station = None
219
+
220
+ self.tdenv.NOTE(
221
+ "Processing market data from {}: Start time = {}. Live = {}",
222
+ listings_file, self.now(), bool(from_live)
223
+ )
224
+
225
+ Session = self.tdb.Session
226
+
227
+ # Prefetch item/station IDs for early filtering
228
+ with Session.begin() as session:
229
+ item_lookup = _make_item_id_lookup(self.tdenv, session)
230
+ station_lookup = _make_station_id_lookup(self.tdenv, session)
231
+
216
232
  is_debug = self.tdenv.debug > 0
217
233
  self.tdenv.DEBUG0("Processing entries...")
218
-
219
- # Try to find a balance between doing too many commits where we fail
220
- # to get any benefits from constructing transactions, and blowing up
221
- # the WAL and memory usage by making massive transactions.
222
- max_transaction_items, transaction_items = 32 * 1024, 0
223
- with pbar.Progress(total, 40, prefix="Processing", style=pbar.LongRunningCountBar) as prog,\
224
- listings_path.open("r", encoding="utf-8", errors="ignore") as fh:
225
- cursor = db.cursor()
226
- cursor.execute("BEGIN TRANSACTION")
227
-
228
- for listing in csv.DictReader(fh):
229
- prog.increment(1)
230
-
231
- station_id = int(listing['station_id'])
232
- if station_id not in station_lookup:
233
- continue
234
-
235
- listing_time = int(listing['collected_at'])
236
-
237
- if station_id != cur_station:
238
- # commit anything from the previous station, get a new cursor
239
- if transaction_items >= max_transaction_items:
240
- cursor.execute("COMMIT")
241
- transaction_items = 0
242
- cursor.execute("BEGIN TRANSACTION")
243
- cur_station, skip_station = station_id, False
244
-
245
- # Check if listing already exists in DB and needs updated.
246
- last_modified: int = int(last_station_update_times.get(station_id, 0))
247
- if last_modified:
248
- # When the listings.csv data matches the database, update to make from_live == 0.
249
- if listing_time == last_modified and not from_live:
250
- if is_debug:
251
- self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live' (old={last_modified}, listing={listing_time}).")
252
- cursor.execute(stmt_unliven_station, (cur_station,))
253
- transaction_items += 1
254
- skip_station = True
255
- continue
256
-
257
- # Unless the import file data is newer, nothing else needs to be done for this station,
258
- # so the rest of the listings for this station can be skipped.
259
- if listing_time <= last_modified:
260
- skip_station = True
234
+
235
+ with pbar.Progress(total, 40, prefix="Processing", style=pbar.LongRunningCountBar) as prog, \
236
+ listings_path.open("r", encoding="utf-8", errors="ignore") as fh, \
237
+ Session() as session:
238
+
239
+ token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
240
+ try:
241
+ commit_batch = get_import_batch_size(session, profile="eddblink")
242
+ execute_batch = commit_batch or 10000 # cap statement size even if single final commit
243
+
244
+ # Upsert: keys + guarded fields (including from_live), guarded by 'modified'
245
+ table = SA.StationItem.__table__
246
+ key_cols = ("station_id", "item_id")
247
+ update_cols = (
248
+ "demand_price", "demand_units", "demand_level",
249
+ "supply_price", "supply_units", "supply_level",
250
+ "from_live",
251
+ )
252
+ upsert = get_upsert_fn(
253
+ session,
254
+ table,
255
+ key_cols=key_cols,
256
+ update_cols=update_cols,
257
+ modified_col="modified",
258
+ always_update=(), # IMPORTANT: no unconditional updates
259
+ )
260
+
261
+ batch_rows = []
262
+ since_commit = 0
263
+
264
+ for listing in csv.DictReader(fh):
265
+ prog.increment(1)
266
+ try:
267
+ station_id = int(listing["station_id"])
268
+ if station_id not in station_lookup:
261
269
  continue
262
-
263
- # The data from the import file is newer, so we need to delete the old data for this station.
264
- if is_debug:
265
- self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station} (old={last_modified}, listing={listing_time}).")
266
- cursor.execute(stmt_flush_station, (cur_station,))
267
- transaction_items += 1
268
- last_station_update_times[station_id] = listing_time
269
-
270
- # station skip lasts until we change station id.
271
- if skip_station:
272
- continue
273
-
274
- # Since this station is not being skipped, get the data and prepare for insertion into the DB.
275
- item_id = int(listing['commodity_id'])
276
- # listings.csv includes rare items, which we are ignoring.
277
- if item_id not in item_lookup:
278
- continue
279
-
280
- demand_price = int(listing['sell_price'])
281
- demand_units = int(listing['demand'])
282
- demand_level = int(listing.get('demand_bracket') or '-1')
283
- supply_price = int(listing['buy_price'])
284
- supply_units = int(listing['supply'])
285
- supply_level = int(listing.get('supply_bracket') or '-1')
286
-
287
- if is_debug:
288
- self.tdenv.DEBUG1(f"Inserting new listing data for {station_id}.")
289
- cursor.execute(stmt_add_listing, (
290
- station_id, item_id, listing_time, from_live,
291
- demand_price, demand_units, demand_level,
292
- supply_price, supply_units, supply_level,
293
- ))
294
- transaction_items += 1
295
-
296
- # These will take a little while, which has four steps, so we'll make it a counter.
270
+
271
+ item_id = int(listing["commodity_id"])
272
+ if item_id not in item_lookup:
273
+ continue # skip rare items (not in Item table)
274
+
275
+ listing_time = int(listing["collected_at"])
276
+ dt_listing_time = datetime.datetime.utcfromtimestamp(listing_time)
277
+
278
+ row = {
279
+ "station_id": station_id,
280
+ "item_id": item_id,
281
+ "modified": dt_listing_time, # guard column
282
+ "from_live": from_live, # copied exactly when updating/inserting
283
+ "demand_price": int(listing["sell_price"]),
284
+ "demand_units": int(listing["demand"]),
285
+ "demand_level": int(listing.get("demand_bracket") or "-1"),
286
+ "supply_price": int(listing["buy_price"]),
287
+ "supply_units": int(listing["supply"]),
288
+ "supply_level": int(listing.get("supply_bracket") or "-1"),
289
+ }
290
+ batch_rows.append(row)
291
+ since_commit += 1
292
+
293
+ if len(batch_rows) >= execute_batch:
294
+ upsert(batch_rows)
295
+ batch_rows.clear()
296
+
297
+ if commit_batch and since_commit >= commit_batch:
298
+ session.commit()
299
+ since_commit = 0
300
+
301
+ except Exception as e: # pylint: disable=broad-exception-caught
302
+ self.tdenv.WARN("Bad listing row (skipped): {} error: {}", listing, e)
303
+ continue
304
+
305
+ if batch_rows:
306
+ upsert(batch_rows)
307
+ batch_rows.clear()
308
+
309
+ session.commit()
310
+
311
+ finally:
312
+ end_bulk_mode(session, token)
313
+
297
314
  with pbar.Progress(1, 40, prefix="Saving"):
298
- # Do a final commit to be sure
299
- cursor.execute("COMMIT")
300
-
315
+ pass
316
+
301
317
  if self.getOption("optimize"):
302
318
  with pbar.Progress(1, 40, prefix="Optimizing"):
303
- db.execute("VACUUM")
304
-
305
- self.tdb.close()
306
-
319
+ if self.tdb.engine.dialect.name == "sqlite":
320
+ with Session.begin() as session:
321
+ session.execute(text("VACUUM"))
322
+
307
323
  self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
308
-
324
+
325
+
309
326
  def run(self):
327
+ """
328
+ EDDN/EDDB link importer.
329
+
330
+ Refactored DB flow:
331
+ - No dialect-specific logic in the plugin.
332
+ - Preflight uses TradeDB.reloadCache() (which centralizes sanity via lifecycle.ensure_fresh_db).
333
+ - For '--clean' → do a single full rebuild with the RareItem dance.
334
+ - Otherwise, if static CSVs changed → incrementally import only those tables (no drop/recreate).
335
+ - Listings import and .prices regeneration unchanged.
336
+ """
337
+ import os
338
+ import time
339
+ from pathlib import Path
340
+ from tradedangerous import cache
341
+ # bulk-mode helpers for the incremental static import session
342
+ from tradedangerous.db.utils import begin_bulk_mode, end_bulk_mode
343
+
310
344
  self.tdenv.ignoreUnknown = True
311
-
312
- # Create the /eddb folder for downloading the source files if it doesn't exist.
313
- try:
314
- Path(str(self.dataPath)).mkdir()
315
- except FileExistsError:
316
- pass
317
-
318
- # Run 'listings' by default:
319
- # If no options, or if only 'force', and/or 'skipvend',
320
- # have been passed, enable 'listings'.
345
+ self.tdb.dataPath.mkdir(parents=True, exist_ok=True)
346
+
347
+ # Enable 'listings' by default unless other explicit options are present
321
348
  default = True
322
349
  for option in self.options:
323
350
  if option not in ('force', 'skipvend', 'purge'):
324
351
  default = False
325
352
  if default:
326
353
  self.options["listings"] = True
327
-
328
- # We can probably safely assume that the plugin
329
- # has never been run if the db file doesn't exist.
330
- if not self.tdb.dbPath.exists():
331
- self.options["clean"] = True
332
-
354
+
355
+ # -----------------------------
356
+ # Optional CLEAN: prepare inputs
357
+ # -----------------------------
333
358
  if self.getOption("clean"):
334
- # Rebuild the tables from scratch. Must be done on first run of plugin.
335
- # Can be done at anytime with the "clean" option.
359
+ # Remove CSVs so downloads become the new source of truth
336
360
  for name in [
337
- "Category",
338
- "Item",
339
- "RareItem",
340
- "Ship",
341
- "ShipVendor",
342
- "Station",
343
- "System",
344
- "Upgrade",
345
- "UpgradeVendor",
346
- "FDevShipyard",
347
- "FDevOutfitting",
361
+ "Category", "Item", "RareItem",
362
+ "Ship", "ShipVendor",
363
+ "Station", "System",
364
+ "Upgrade", "UpgradeVendor",
365
+ "FDevShipyard", "FDevOutfitting",
348
366
  ]:
349
- file = self.tdb.dataPath / Path(name + ".csv")
367
+ f = self.tdb.dataPath / f"{name}.csv"
350
368
  try:
351
- os.remove(str(file))
369
+ os.remove(str(f))
352
370
  except FileNotFoundError:
353
371
  pass
354
-
355
- try:
356
- os.remove(str(self.tdb.dataPath) + "/TradeDangerous.db")
357
- except FileNotFoundError:
358
- pass
372
+
373
+ # Remove .prices (will be regenerated later)
359
374
  try:
360
- os.remove(str(self.tdb.dataPath) + "/TradeDangerous.prices")
375
+ os.remove(str(self.tdb.dataPath / "TradeDangerous.prices"))
361
376
  except FileNotFoundError:
362
377
  pass
363
-
364
- # Because this is a clean run, we need to temporarily rename the RareItem.csv,
365
- # otherwise TD will crash trying to insert the rare items to the database,
366
- # because there's nothing in the Station table it tries to pull from.
367
- ri_path = self.tdb.dataPath / Path("RareItem.csv")
368
- rib_path = ri_path.with_suffix(".tmp")
369
- if ri_path.exists():
370
- if rib_path.exists():
371
- rib_path.unlink()
372
- ri_path.rename(rib_path)
373
-
374
- self.tdb.close()
375
-
376
- self.tdb.reloadCache()
377
- self.tdb.close()
378
-
379
- # Now it's safe to move RareItems back.
380
- if ri_path.exists():
381
- ri_path.unlink()
382
- if rib_path.exists():
383
- rib_path.rename(ri_path)
384
-
378
+
379
+ # Stash RareItem.csv so a full rebuild doesn't hit FK issues
380
+ self._ri_path = self.tdb.dataPath / "RareItem.csv"
381
+ self._rib_path = self._ri_path.with_suffix(".tmp")
382
+ if self._ri_path.exists():
383
+ if self._rib_path.exists():
384
+ self._rib_path.unlink()
385
+ self._ri_path.rename(self._rib_path)
386
+
387
+ # Full update after downloads
385
388
  self.options["all"] = True
386
389
  self.options["force"] = True
387
-
388
- # Select which options will be updated
390
+
391
+ # --------------------------------
392
+ # Option cascade (unchanged logic)
393
+ # --------------------------------
389
394
  if self.getOption("listings"):
390
395
  self.options["item"] = True
391
396
  self.options["station"] = True
392
-
397
+
393
398
  if self.getOption("shipvend"):
394
399
  self.options["ship"] = True
395
400
  self.options["station"] = True
396
-
401
+
397
402
  if self.getOption("upvend"):
398
403
  self.options["upgrade"] = True
399
404
  self.options["station"] = True
400
-
405
+
401
406
  if self.getOption("item"):
402
407
  self.options["station"] = True
403
-
408
+
404
409
  if self.getOption("rare"):
405
410
  self.options["station"] = True
406
-
411
+
407
412
  if self.getOption("station"):
408
413
  self.options["system"] = True
409
-
414
+
410
415
  if self.getOption("all"):
411
416
  self.options["item"] = True
412
417
  self.options["rare"] = True
@@ -417,73 +422,205 @@ class ImportPlugin(plugins.ImportPluginBase):
417
422
  self.options["upgrade"] = True
418
423
  self.options["upvend"] = True
419
424
  self.options["listings"] = True
420
-
425
+
421
426
  if self.getOption("solo"):
422
427
  self.options["listings"] = False
423
428
  self.options["skipvend"] = True
424
-
429
+
425
430
  if self.getOption("skipvend"):
426
431
  self.options["shipvend"] = False
427
432
  self.options["upvend"] = False
428
-
429
- # Download required files and update tables.
430
- buildCache = False
433
+
434
+ # ---------------------------------------------
435
+ # Downloads — track which static CSVs changed
436
+ # ---------------------------------------------
437
+ changed = {
438
+ "System": False,
439
+ "Station": False,
440
+ "Category": False,
441
+ "Item": False,
442
+ "RareItem": False,
443
+ "Ship": False,
444
+ "ShipVendor": False,
445
+ "Upgrade": False,
446
+ "UpgradeVendor": False,
447
+ "FDevShipyard": False,
448
+ "FDevOutfitting": False,
449
+ }
450
+
451
+ # EDCD mirrors
431
452
  if self.getOption("upgrade"):
432
453
  if self.downloadFile(self.upgradesPath) or self.getOption("force"):
433
454
  transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
434
- buildCache = True
435
-
455
+ changed["Upgrade"] = True
456
+ changed["FDevOutfitting"] = True
457
+
436
458
  if self.getOption("ship"):
437
459
  if self.downloadFile(self.shipPath) or self.getOption("force"):
438
460
  transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
439
- buildCache = True
440
-
461
+ changed["Ship"] = True
462
+ changed["FDevShipyard"] = True
463
+
464
+ # Core static tables
441
465
  if self.getOption("rare"):
442
466
  if self.downloadFile(self.rareItemPath) or self.getOption("force"):
443
- buildCache = True
444
-
467
+ changed["RareItem"] = True
468
+
445
469
  if self.getOption("shipvend"):
446
470
  if self.downloadFile(self.shipVendorPath) or self.getOption("force"):
447
- buildCache = True
448
-
471
+ changed["ShipVendor"] = True
472
+
449
473
  if self.getOption("upvend"):
450
474
  if self.downloadFile(self.upgradeVendorPath) or self.getOption("force"):
451
- buildCache = True
452
-
475
+ changed["UpgradeVendor"] = True
476
+
453
477
  if self.getOption("system"):
454
478
  if self.downloadFile(self.sysPath) or self.getOption("force"):
455
- buildCache = True
456
-
479
+ changed["System"] = True
480
+
457
481
  if self.getOption("station"):
458
482
  if self.downloadFile(self.stationsPath) or self.getOption("force"):
459
- buildCache = True
460
-
483
+ changed["Station"] = True
484
+
461
485
  if self.getOption("item"):
462
486
  if self.downloadFile(self.commoditiesPath) or self.getOption("force"):
463
487
  self.downloadFile(self.categoriesPath)
464
- buildCache = True
465
-
466
- # Remake the .db files with the updated info.
467
- if buildCache:
468
- self.tdb.close()
488
+ changed["Item"] = True
489
+ changed["Category"] = True
490
+
491
+ # -------------------------------------------------------------
492
+ # Preflight sanity (user-visible): make the pause explicit
493
+ # -------------------------------------------------------------
494
+ ri_path = getattr(self, "_ri_path", self.tdb.dataPath / "RareItem.csv")
495
+ rib_path = getattr(self, "_rib_path", ri_path.with_suffix(".tmp"))
496
+ rareitem_stashed = False
497
+ self.tdenv.NOTE("Preflight: verifying database (this can take a while on first run)...")
498
+ t0 = time.monotonic()
499
+ try:
500
+ if ri_path.exists():
501
+ if not rib_path.exists() and not self.getOption("clean"):
502
+ ri_path.rename(rib_path)
503
+ rareitem_stashed = True
504
+
505
+ # This may no-op or may call buildCache() internally
469
506
  self.tdb.reloadCache()
507
+ finally:
508
+ if rib_path.exists() and (self.getOption("clean") or rareitem_stashed):
509
+ if ri_path.exists():
510
+ ri_path.unlink()
511
+ rib_path.rename(ri_path)
512
+ t1 = time.monotonic()
513
+ self.tdenv.NOTE("Preflight complete in {:.1f}s.", (t1 - t0))
514
+
515
+ # -----------------------------------------------------
516
+ # Rebuild or Incremental Import?
517
+ # -----------------------------------------------------
518
+ if self.getOption("clean"):
519
+ self.tdenv.NOTE("Performing full rebuild...")
520
+ self.tdb.close()
521
+ cache.buildCache(self.tdb, self.tdenv)
470
522
  self.tdb.close()
471
-
523
+ self.tdenv.NOTE("Full rebuild complete.")
524
+ else:
525
+ # Incremental import of only changed tables (no schema drop)
526
+ IMPORT_ORDER = [
527
+ "System",
528
+ "Station",
529
+ "Category",
530
+ "Item",
531
+ "RareItem",
532
+ "Ship",
533
+ "ShipVendor",
534
+ "Upgrade",
535
+ "UpgradeVendor",
536
+ "FDevShipyard",
537
+ "FDevOutfitting",
538
+ ]
539
+
540
+ any_changed = any(changed.values())
541
+ if any_changed:
542
+ self.tdenv.NOTE("Incremental import starting ({} tables changed).", sum(1 for v in changed.values() if v))
543
+ with self.tdb.Session() as session:
544
+ token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
545
+ try:
546
+ for table_name in IMPORT_ORDER:
547
+ if not changed.get(table_name):
548
+ continue
549
+ import_path = (self.tdb.dataPath / f"{table_name}.csv").resolve()
550
+ try:
551
+ # Determine a cheap per-table total (header-aware) for display only.
552
+ try:
553
+ total = max(file_line_count(import_path) - 1, 0)
554
+ except Exception:
555
+ total = 0
556
+
557
+ prefix = f"Processing {table_name}"
558
+ # Mirror listings-style progress: single-line if TTY, periodic otherwise.
559
+ with pbar.Progress(total or 1, 40, prefix=prefix, style=pbar.LongRunningCountBar) as prog:
560
+
561
+ def _cb(stats=None, **kwargs):
562
+ """
563
+ Liberal progress callback used by cache.processImportFile.
564
+ Accepts either:
565
+ - int → increment by that many rows
566
+ - dict with keys inc/rows/count → increment by that value
567
+ - anything else → default increment of 1
568
+ """
569
+ inc = 1
570
+ if isinstance(stats, int):
571
+ inc = max(int(stats), 1)
572
+ elif isinstance(stats, dict):
573
+ for k in ("inc", "rows", "count"):
574
+ if k in stats:
575
+ try:
576
+ inc = max(int(stats[k]), 1)
577
+ break
578
+ except Exception:
579
+ pass
580
+ prog.increment(inc)
581
+
582
+ cache.processImportFile(
583
+ self.tdenv,
584
+ session,
585
+ import_path,
586
+ table_name,
587
+ line_callback=_cb,
588
+ call_args={"table": table_name, "total": total},
589
+ )
590
+
591
+ session.commit()
592
+ self.tdenv.DEBUG0("Incremental import OK: {} ({})", table_name, import_path)
593
+
594
+ except FileNotFoundError:
595
+ self.tdenv.NOTE("{} missing; skipped incremental import ({})", table_name, import_path)
596
+ except StopIteration:
597
+ self.tdenv.NOTE("{} exists but is empty; skipped incremental import ({})", table_name, import_path)
598
+ except Exception as e:
599
+ self.tdenv.WARN("Incremental import failed for {}: {} ({})", table_name, e, import_path)
600
+ session.rollback()
601
+ self.tdenv.NOTE("Escalating to full rebuild due to import failure.")
602
+ self.tdb.close()
603
+ cache.buildCache(self.tdb, self.tdenv)
604
+ self.tdb.close()
605
+ break
606
+ finally:
607
+ end_bulk_mode(session, token)
608
+ self.tdenv.NOTE("Incremental import finished.")
609
+
610
+
472
611
  if self.getOption("purge"):
473
612
  self.purgeSystems()
474
- self.tdb.close()
475
-
613
+
614
+ # Listings import (prices)
476
615
  if self.getOption("listings"):
477
616
  if self.downloadFile(self.listingsPath) or self.getOption("force"):
478
617
  self.importListings(self.listingsPath)
479
618
  if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
480
619
  self.importListings(self.liveListingsPath)
481
-
620
+
482
621
  if self.getOption("listings"):
483
622
  self.tdenv.NOTE("Regenerating .prices file.")
484
623
  cache.regeneratePricesFile(self.tdb, self.tdenv)
485
-
624
+
486
625
  self.tdenv.NOTE("Import completed.")
487
-
488
- # TD doesn't need to do anything, tell it to just quit.
489
626
  return False