tradedangerous 12.0.5__py3-none-any.whl → 12.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

Files changed (30) hide show
  1. tradedangerous/cache.py +135 -133
  2. tradedangerous/commands/buildcache_cmd.py +7 -7
  3. tradedangerous/commands/buy_cmd.py +4 -4
  4. tradedangerous/commands/export_cmd.py +11 -11
  5. tradedangerous/commands/import_cmd.py +12 -12
  6. tradedangerous/commands/market_cmd.py +17 -17
  7. tradedangerous/commands/olddata_cmd.py +18 -18
  8. tradedangerous/commands/rares_cmd.py +30 -30
  9. tradedangerous/commands/run_cmd.py +21 -21
  10. tradedangerous/commands/sell_cmd.py +5 -5
  11. tradedangerous/corrections.py +1 -1
  12. tradedangerous/csvexport.py +20 -20
  13. tradedangerous/db/adapter.py +9 -9
  14. tradedangerous/db/config.py +4 -4
  15. tradedangerous/db/engine.py +12 -12
  16. tradedangerous/db/lifecycle.py +28 -28
  17. tradedangerous/db/orm_models.py +42 -42
  18. tradedangerous/db/paths.py +3 -3
  19. tradedangerous/plugins/eddblink_plug.py +106 -251
  20. tradedangerous/plugins/spansh_plug.py +253 -253
  21. tradedangerous/prices.py +21 -21
  22. tradedangerous/tradedb.py +85 -85
  23. tradedangerous/tradeenv.py +2 -2
  24. tradedangerous/version.py +1 -1
  25. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.6.dist-info}/METADATA +1 -1
  26. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.6.dist-info}/RECORD +30 -30
  27. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.6.dist-info}/WHEEL +0 -0
  28. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.6.dist-info}/entry_points.txt +0 -0
  29. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.6.dist-info}/licenses/LICENSE +0 -0
  30. {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.6.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,8 @@
1
- from __future__ import annotations
2
-
3
1
  """
4
2
  Import plugin that uses data files from
5
3
  https://elite.tromador.com/ to update the Database.
6
4
  """
7
-
5
+ from __future__ import annotations
8
6
 
9
7
  from email.utils import parsedate_to_datetime
10
8
  from pathlib import Path
@@ -40,7 +38,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
40
38
  if not listings.exists():
41
39
  tdenv.NOTE("File not found, aborting: {}", listings)
42
40
  return 0
43
-
41
+
44
42
  tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
45
43
  count = file_line_count(listings)
46
44
  if count <= 1:
@@ -49,7 +47,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
49
47
  else:
50
48
  tdenv.DEBUG0("Listings file is empty, nothing to do.")
51
49
  return 0
52
-
50
+
53
51
  return count + 1 # kfsone: Doesn't the header already make this + 1?
54
52
 
55
53
 
@@ -108,10 +106,10 @@ class ImportPlugin(plugins.ImportPluginBase):
108
106
  'optimize': "Optimize ('vacuum') database after processing.",
109
107
  'solo': "Don't download crowd-sourced market data. (Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)",
110
108
  }
111
-
109
+
112
110
  def __init__(self, tdb, tdenv):
113
111
  super().__init__(tdb, tdenv)
114
-
112
+
115
113
  self.dataPath = os.environ.get('TD_EDDB') or self.tdenv.tmpDir
116
114
  self.categoriesPath = Path("Category.csv")
117
115
  self.commoditiesPath = Path("Item.csv")
@@ -129,10 +127,10 @@ class ImportPlugin(plugins.ImportPluginBase):
129
127
  self.listingsPath = Path("listings.csv")
130
128
  self.liveListingsPath = Path("listings-live.csv")
131
129
  self.pricesPath = Path("listings.prices")
132
-
130
+
133
131
  def now(self):
134
132
  return datetime.datetime.now()
135
-
133
+
136
134
  def downloadFile(self, path):
137
135
  """
138
136
  Fetch the latest dumpfile from the website if newer than local copy.
@@ -141,9 +139,9 @@ class ImportPlugin(plugins.ImportPluginBase):
141
139
  localPath = Path(self.tdb.dataPath, path)
142
140
  else:
143
141
  localPath = Path(self.dataPath, path)
144
-
142
+
145
143
  url = BASE_URL + str(path)
146
-
144
+
147
145
  self.tdenv.NOTE("Checking for update to '{}'.", path)
148
146
  # Use an HTTP Request header to obtain the Last-Modified and Content-Length headers.
149
147
  # Also, tell the server to give us the un-compressed length of the file by saying
@@ -154,48 +152,48 @@ class ImportPlugin(plugins.ImportPluginBase):
154
152
  except Exception as e: # pylint: disable=broad-exception-caught
155
153
  self.tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", url, str(e))
156
154
  return False
157
-
155
+
158
156
  last_modified = response.headers.get("last-modified")
159
157
  dump_mod_time = parsedate_to_datetime(last_modified).timestamp()
160
-
158
+
161
159
  if Path.exists(localPath):
162
160
  local_mod_time = localPath.stat().st_mtime
163
161
  if local_mod_time >= dump_mod_time:
164
162
  self.tdenv.DEBUG0("'{}': Dump is not more recent than Local.", path)
165
163
  return False
166
-
164
+
167
165
  # The server doesn't know the gzip'd length, and we won't see the gzip'd data,
168
166
  # so we want the actual text-only length. Capture it here so we can tell the
169
167
  # transfer mechanism how big the file is going to be.
170
168
  length = response.headers.get("content-length")
171
-
169
+
172
170
  self.tdenv.NOTE("Downloading file '{}'.", path)
173
171
  transfers.download(self.tdenv, url, localPath, chunkSize=16384, length=length)
174
-
172
+
175
173
  # Change the timestamps on the file so they match the website
176
174
  os.utime(localPath, (dump_mod_time, dump_mod_time))
177
-
175
+
178
176
  return True
179
-
177
+
180
178
  def purgeSystems(self):
181
179
  """
182
180
  Purges systems from the System table that do not have any stations claiming to be in them.
183
181
  Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
184
182
  """
185
183
  self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
186
-
184
+
187
185
  Session = self.tdb.Session
188
186
  with Session.begin() as session:
189
187
  subq = select(SA.Station.system_id).where(SA.Station.system_id == SA.System.system_id)
190
188
  stmt = delete(SA.System).where(~exists(subq))
191
189
  session.execute(stmt)
192
-
190
+
193
191
  self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
194
-
192
+
195
193
  def importListings(self, listings_file):
196
194
  """
197
195
  Updates the market data (StationItem) using `listings_file`.
198
-
196
+
199
197
  Rules:
200
198
  - If a row doesn't exist in DB → insert (copy CSV exactly).
201
199
  - If it exists → update only when CSV.modified > DB.modified.
@@ -207,40 +205,40 @@ class ImportPlugin(plugins.ImportPluginBase):
207
205
  end_bulk_mode,
208
206
  get_upsert_fn,
209
207
  )
210
-
208
+
211
209
  listings_path = Path(self.dataPath, listings_file).absolute()
212
- from_live = int(listings_path != Path(self.dataPath, self.listingsPath).absolute())
213
-
210
+ from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
211
+
214
212
  self.tdenv.NOTE("Checking listings")
215
213
  total = _count_listing_entries(self.tdenv, listings_path)
216
214
  if not total:
217
215
  self.tdenv.NOTE("No listings")
218
216
  return
219
-
217
+
220
218
  self.tdenv.NOTE(
221
219
  "Processing market data from {}: Start time = {}. Live = {}",
222
- listings_file, self.now(), bool(from_live)
220
+ listings_file, self.now(), from_live
223
221
  )
224
-
222
+
225
223
  Session = self.tdb.Session
226
-
224
+
227
225
  # Prefetch item/station IDs for early filtering
228
226
  with Session.begin() as session:
229
227
  item_lookup = _make_item_id_lookup(self.tdenv, session)
230
228
  station_lookup = _make_station_id_lookup(self.tdenv, session)
231
-
229
+
232
230
  is_debug = self.tdenv.debug > 0
233
231
  self.tdenv.DEBUG0("Processing entries...")
234
-
232
+
235
233
  with pbar.Progress(total, 40, prefix="Processing", style=pbar.LongRunningCountBar) as prog, \
236
- listings_path.open("r", encoding="utf-8", errors="ignore") as fh, \
237
- Session() as session:
238
-
234
+ listings_path.open("r", encoding="utf-8", errors="ignore") as fh, \
235
+ Session() as session:
236
+
239
237
  token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
240
238
  try:
241
239
  commit_batch = get_import_batch_size(session, profile="eddblink")
242
240
  execute_batch = commit_batch or 10000 # cap statement size even if single final commit
243
-
241
+
244
242
  # Upsert: keys + guarded fields (including from_live), guarded by 'modified'
245
243
  table = SA.StationItem.__table__
246
244
  key_cols = ("station_id", "item_id")
@@ -257,29 +255,29 @@ class ImportPlugin(plugins.ImportPluginBase):
257
255
  modified_col="modified",
258
256
  always_update=(), # IMPORTANT: no unconditional updates
259
257
  )
260
-
258
+
261
259
  batch_rows = []
262
260
  since_commit = 0
263
-
261
+
264
262
  for listing in csv.DictReader(fh):
265
263
  prog.increment(1)
266
264
  try:
267
265
  station_id = int(listing["station_id"])
268
266
  if station_id not in station_lookup:
269
267
  continue
270
-
268
+
271
269
  item_id = int(listing["commodity_id"])
272
270
  if item_id not in item_lookup:
273
271
  continue # skip rare items (not in Item table)
274
-
272
+
275
273
  listing_time = int(listing["collected_at"])
276
274
  dt_listing_time = datetime.datetime.utcfromtimestamp(listing_time)
277
-
275
+
278
276
  row = {
279
- "station_id": station_id,
280
- "item_id": item_id,
281
- "modified": dt_listing_time, # guard column
282
- "from_live": from_live, # copied exactly when updating/inserting
277
+ "station_id": station_id,
278
+ "item_id": item_id,
279
+ "modified": dt_listing_time, # guard column
280
+ "from_live": int(from_live), # copied exactly when updating/inserting
283
281
  "demand_price": int(listing["sell_price"]),
284
282
  "demand_units": int(listing["demand"]),
285
283
  "demand_level": int(listing.get("demand_bracket") or "-1"),
@@ -289,44 +287,43 @@ class ImportPlugin(plugins.ImportPluginBase):
289
287
  }
290
288
  batch_rows.append(row)
291
289
  since_commit += 1
292
-
290
+
293
291
  if len(batch_rows) >= execute_batch:
294
292
  upsert(batch_rows)
295
293
  batch_rows.clear()
296
-
294
+
297
295
  if commit_batch and since_commit >= commit_batch:
298
296
  session.commit()
299
297
  since_commit = 0
300
-
298
+
301
299
  except Exception as e: # pylint: disable=broad-exception-caught
302
300
  self.tdenv.WARN("Bad listing row (skipped): {} error: {}", listing, e)
303
301
  continue
304
-
302
+
305
303
  if batch_rows:
306
304
  upsert(batch_rows)
307
305
  batch_rows.clear()
308
-
306
+
309
307
  session.commit()
310
-
308
+
311
309
  finally:
312
310
  end_bulk_mode(session, token)
313
-
314
- with pbar.Progress(1, 40, prefix="Saving"):
315
- pass
316
-
311
+
312
+ # with pbar.Progress(1, 40, prefix="Saving"):
313
+ # pass
314
+
317
315
  if self.getOption("optimize"):
318
316
  with pbar.Progress(1, 40, prefix="Optimizing"):
319
317
  if self.tdb.engine.dialect.name == "sqlite":
320
318
  with Session.begin() as session:
321
319
  session.execute(text("VACUUM"))
322
-
320
+
323
321
  self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
324
-
325
-
322
+
326
323
  def run(self):
327
324
  """
328
325
  EDDN/EDDB link importer.
329
-
326
+
330
327
  Refactored DB flow:
331
328
  - No dialect-specific logic in the plugin.
332
329
  - Preflight uses TradeDB.reloadCache() (which centralizes sanity via lifecycle.ensure_fresh_db).
@@ -340,10 +337,10 @@ class ImportPlugin(plugins.ImportPluginBase):
340
337
  from tradedangerous import cache
341
338
  # bulk-mode helpers for the incremental static import session
342
339
  from tradedangerous.db.utils import begin_bulk_mode, end_bulk_mode
343
-
340
+
344
341
  self.tdenv.ignoreUnknown = True
345
342
  self.tdb.dataPath.mkdir(parents=True, exist_ok=True)
346
-
343
+
347
344
  # Enable 'listings' by default unless other explicit options are present
348
345
  default = True
349
346
  for option in self.options:
@@ -351,10 +348,12 @@ class ImportPlugin(plugins.ImportPluginBase):
351
348
  default = False
352
349
  if default:
353
350
  self.options["listings"] = True
354
-
355
- # -----------------------------
356
- # Optional CLEAN: prepare inputs
357
- # -----------------------------
351
+
352
+ # Check if database already exists and enable `clean` if not.
353
+ from tradedangerous.db.lifecycle import is_empty
354
+ if is_empty(self.tdb.engine):
355
+ self.options["clean"] = True
356
+
358
357
  if self.getOption("clean"):
359
358
  # Remove CSVs so downloads become the new source of truth
360
359
  for name in [
@@ -369,49 +368,39 @@ class ImportPlugin(plugins.ImportPluginBase):
369
368
  os.remove(str(f))
370
369
  except FileNotFoundError:
371
370
  pass
372
-
373
- # Remove .prices (will be regenerated later)
371
+
372
+ # Remove .prices (DEPRECATED)
374
373
  try:
375
374
  os.remove(str(self.tdb.dataPath / "TradeDangerous.prices"))
376
375
  except FileNotFoundError:
377
376
  pass
378
-
379
- # Stash RareItem.csv so a full rebuild doesn't hit FK issues
380
- self._ri_path = self.tdb.dataPath / "RareItem.csv"
381
- self._rib_path = self._ri_path.with_suffix(".tmp")
382
- if self._ri_path.exists():
383
- if self._rib_path.exists():
384
- self._rib_path.unlink()
385
- self._ri_path.rename(self._rib_path)
386
-
387
- # Full update after downloads
377
+
388
378
  self.options["all"] = True
389
379
  self.options["force"] = True
390
-
391
- # --------------------------------
392
- # Option cascade (unchanged logic)
393
- # --------------------------------
380
+
381
+
382
+ # Select which options will be updated
394
383
  if self.getOption("listings"):
395
384
  self.options["item"] = True
396
385
  self.options["station"] = True
397
-
386
+
398
387
  if self.getOption("shipvend"):
399
388
  self.options["ship"] = True
400
389
  self.options["station"] = True
401
-
390
+
402
391
  if self.getOption("upvend"):
403
392
  self.options["upgrade"] = True
404
393
  self.options["station"] = True
405
-
394
+
406
395
  if self.getOption("item"):
407
396
  self.options["station"] = True
408
-
397
+
409
398
  if self.getOption("rare"):
410
399
  self.options["station"] = True
411
-
400
+
412
401
  if self.getOption("station"):
413
402
  self.options["system"] = True
414
-
403
+
415
404
  if self.getOption("all"):
416
405
  self.options["item"] = True
417
406
  self.options["rare"] = True
@@ -422,205 +411,71 @@ class ImportPlugin(plugins.ImportPluginBase):
422
411
  self.options["upgrade"] = True
423
412
  self.options["upvend"] = True
424
413
  self.options["listings"] = True
425
-
414
+
426
415
  if self.getOption("solo"):
427
416
  self.options["listings"] = False
428
417
  self.options["skipvend"] = True
429
-
418
+
430
419
  if self.getOption("skipvend"):
431
420
  self.options["shipvend"] = False
432
421
  self.options["upvend"] = False
433
-
434
- # ---------------------------------------------
435
- # Downloads — track which static CSVs changed
436
- # ---------------------------------------------
437
- changed = {
438
- "System": False,
439
- "Station": False,
440
- "Category": False,
441
- "Item": False,
442
- "RareItem": False,
443
- "Ship": False,
444
- "ShipVendor": False,
445
- "Upgrade": False,
446
- "UpgradeVendor": False,
447
- "FDevShipyard": False,
448
- "FDevOutfitting": False,
449
- }
450
-
451
- # EDCD mirrors
422
+
423
+ # Download required files and update tables.
424
+ buildCache = False
452
425
  if self.getOption("upgrade"):
453
426
  if self.downloadFile(self.upgradesPath) or self.getOption("force"):
454
427
  transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
455
- changed["Upgrade"] = True
456
- changed["FDevOutfitting"] = True
457
-
428
+ buildCache = True
429
+
458
430
  if self.getOption("ship"):
459
431
  if self.downloadFile(self.shipPath) or self.getOption("force"):
460
432
  transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
461
- changed["Ship"] = True
462
- changed["FDevShipyard"] = True
463
-
464
- # Core static tables
433
+ buildCache = True
434
+
465
435
  if self.getOption("rare"):
466
436
  if self.downloadFile(self.rareItemPath) or self.getOption("force"):
467
- changed["RareItem"] = True
468
-
437
+ buildCache = True
438
+
469
439
  if self.getOption("shipvend"):
470
440
  if self.downloadFile(self.shipVendorPath) or self.getOption("force"):
471
- changed["ShipVendor"] = True
472
-
441
+ buildCache = True
442
+
473
443
  if self.getOption("upvend"):
474
444
  if self.downloadFile(self.upgradeVendorPath) or self.getOption("force"):
475
- changed["UpgradeVendor"] = True
476
-
445
+ buildCache = True
446
+
477
447
  if self.getOption("system"):
478
448
  if self.downloadFile(self.sysPath) or self.getOption("force"):
479
- changed["System"] = True
480
-
449
+ buildCache = True
450
+
481
451
  if self.getOption("station"):
482
452
  if self.downloadFile(self.stationsPath) or self.getOption("force"):
483
- changed["Station"] = True
484
-
453
+ buildCache = True
454
+
485
455
  if self.getOption("item"):
486
456
  if self.downloadFile(self.commoditiesPath) or self.getOption("force"):
487
457
  self.downloadFile(self.categoriesPath)
488
- changed["Item"] = True
489
- changed["Category"] = True
490
-
491
- # -------------------------------------------------------------
492
- # Preflight sanity (user-visible): make the pause explicit
493
- # -------------------------------------------------------------
494
- ri_path = getattr(self, "_ri_path", self.tdb.dataPath / "RareItem.csv")
495
- rib_path = getattr(self, "_rib_path", ri_path.with_suffix(".tmp"))
496
- rareitem_stashed = False
497
- self.tdenv.NOTE("Preflight: verifying database (this can take a while on first run)...")
498
- t0 = time.monotonic()
499
- try:
500
- if ri_path.exists():
501
- if not rib_path.exists() and not self.getOption("clean"):
502
- ri_path.rename(rib_path)
503
- rareitem_stashed = True
504
-
505
- # This may no-op or may call buildCache() internally
506
- self.tdb.reloadCache()
507
- finally:
508
- if rib_path.exists() and (self.getOption("clean") or rareitem_stashed):
509
- if ri_path.exists():
510
- ri_path.unlink()
511
- rib_path.rename(ri_path)
512
- t1 = time.monotonic()
513
- self.tdenv.NOTE("Preflight complete in {:.1f}s.", (t1 - t0))
514
-
515
- # -----------------------------------------------------
516
- # Rebuild or Incremental Import?
517
- # -----------------------------------------------------
518
- if self.getOption("clean"):
519
- self.tdenv.NOTE("Performing full rebuild...")
458
+ buildCache = True
459
+
460
+ # Remake the .db files with the updated info.
461
+ if buildCache:
520
462
  self.tdb.close()
521
- cache.buildCache(self.tdb, self.tdenv)
463
+ self.tdb.reloadCache()
522
464
  self.tdb.close()
523
- self.tdenv.NOTE("Full rebuild complete.")
524
- else:
525
- # Incremental import of only changed tables (no schema drop)
526
- IMPORT_ORDER = [
527
- "System",
528
- "Station",
529
- "Category",
530
- "Item",
531
- "RareItem",
532
- "Ship",
533
- "ShipVendor",
534
- "Upgrade",
535
- "UpgradeVendor",
536
- "FDevShipyard",
537
- "FDevOutfitting",
538
- ]
539
-
540
- any_changed = any(changed.values())
541
- if any_changed:
542
- self.tdenv.NOTE("Incremental import starting ({} tables changed).", sum(1 for v in changed.values() if v))
543
- with self.tdb.Session() as session:
544
- token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
545
- try:
546
- for table_name in IMPORT_ORDER:
547
- if not changed.get(table_name):
548
- continue
549
- import_path = (self.tdb.dataPath / f"{table_name}.csv").resolve()
550
- try:
551
- # Determine a cheap per-table total (header-aware) for display only.
552
- try:
553
- total = max(file_line_count(import_path) - 1, 0)
554
- except Exception:
555
- total = 0
556
-
557
- prefix = f"Processing {table_name}"
558
- # Mirror listings-style progress: single-line if TTY, periodic otherwise.
559
- with pbar.Progress(total or 1, 40, prefix=prefix, style=pbar.LongRunningCountBar) as prog:
560
-
561
- def _cb(stats=None, **kwargs):
562
- """
563
- Liberal progress callback used by cache.processImportFile.
564
- Accepts either:
565
- - int → increment by that many rows
566
- - dict with keys inc/rows/count → increment by that value
567
- - anything else → default increment of 1
568
- """
569
- inc = 1
570
- if isinstance(stats, int):
571
- inc = max(int(stats), 1)
572
- elif isinstance(stats, dict):
573
- for k in ("inc", "rows", "count"):
574
- if k in stats:
575
- try:
576
- inc = max(int(stats[k]), 1)
577
- break
578
- except Exception:
579
- pass
580
- prog.increment(inc)
581
-
582
- cache.processImportFile(
583
- self.tdenv,
584
- session,
585
- import_path,
586
- table_name,
587
- line_callback=_cb,
588
- call_args={"table": table_name, "total": total},
589
- )
590
-
591
- session.commit()
592
- self.tdenv.DEBUG0("Incremental import OK: {} ({})", table_name, import_path)
593
-
594
- except FileNotFoundError:
595
- self.tdenv.NOTE("{} missing; skipped incremental import ({})", table_name, import_path)
596
- except StopIteration:
597
- self.tdenv.NOTE("{} exists but is empty; skipped incremental import ({})", table_name, import_path)
598
- except Exception as e:
599
- self.tdenv.WARN("Incremental import failed for {}: {} ({})", table_name, e, import_path)
600
- session.rollback()
601
- self.tdenv.NOTE("Escalating to full rebuild due to import failure.")
602
- self.tdb.close()
603
- cache.buildCache(self.tdb, self.tdenv)
604
- self.tdb.close()
605
- break
606
- finally:
607
- end_bulk_mode(session, token)
608
- self.tdenv.NOTE("Incremental import finished.")
609
-
610
-
465
+
611
466
  if self.getOption("purge"):
612
467
  self.purgeSystems()
613
-
468
+
614
469
  # Listings import (prices)
615
470
  if self.getOption("listings"):
616
471
  if self.downloadFile(self.listingsPath) or self.getOption("force"):
617
472
  self.importListings(self.listingsPath)
618
473
  if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
619
474
  self.importListings(self.liveListingsPath)
620
-
621
- if self.getOption("listings"):
622
- self.tdenv.NOTE("Regenerating .prices file.")
623
- cache.regeneratePricesFile(self.tdb, self.tdenv)
624
-
475
+
476
+ # if self.getOption("listings"):
477
+ # self.tdenv.NOTE("Regenerating .prices file.")
478
+ # cache.regeneratePricesFile(self.tdb, self.tdenv)
479
+
625
480
  self.tdenv.NOTE("Import completed.")
626
481
  return False