tradedangerous 12.0.4__py3-none-any.whl → 12.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

Files changed (30) hide show
  1. tradedangerous/cache.py +350 -224
  2. tradedangerous/commands/buildcache_cmd.py +7 -7
  3. tradedangerous/commands/buy_cmd.py +4 -4
  4. tradedangerous/commands/export_cmd.py +11 -11
  5. tradedangerous/commands/import_cmd.py +12 -12
  6. tradedangerous/commands/market_cmd.py +17 -17
  7. tradedangerous/commands/olddata_cmd.py +18 -18
  8. tradedangerous/commands/rares_cmd.py +30 -30
  9. tradedangerous/commands/run_cmd.py +21 -21
  10. tradedangerous/commands/sell_cmd.py +5 -5
  11. tradedangerous/corrections.py +1 -1
  12. tradedangerous/csvexport.py +20 -20
  13. tradedangerous/db/adapter.py +9 -9
  14. tradedangerous/db/config.py +4 -4
  15. tradedangerous/db/engine.py +12 -12
  16. tradedangerous/db/lifecycle.py +28 -28
  17. tradedangerous/db/orm_models.py +42 -42
  18. tradedangerous/db/paths.py +3 -3
  19. tradedangerous/plugins/eddblink_plug.py +106 -251
  20. tradedangerous/plugins/spansh_plug.py +253 -253
  21. tradedangerous/prices.py +21 -21
  22. tradedangerous/tradedb.py +85 -85
  23. tradedangerous/tradeenv.py +2 -2
  24. tradedangerous/version.py +1 -1
  25. {tradedangerous-12.0.4.dist-info → tradedangerous-12.0.6.dist-info}/METADATA +1 -1
  26. {tradedangerous-12.0.4.dist-info → tradedangerous-12.0.6.dist-info}/RECORD +30 -30
  27. {tradedangerous-12.0.4.dist-info → tradedangerous-12.0.6.dist-info}/WHEEL +0 -0
  28. {tradedangerous-12.0.4.dist-info → tradedangerous-12.0.6.dist-info}/entry_points.txt +0 -0
  29. {tradedangerous-12.0.4.dist-info → tradedangerous-12.0.6.dist-info}/licenses/LICENSE +0 -0
  30. {tradedangerous-12.0.4.dist-info → tradedangerous-12.0.6.dist-info}/top_level.txt +0 -0
tradedangerous/cache.py CHANGED
@@ -16,10 +16,8 @@
16
16
  # ships, etc
17
17
  # data/TradeDangerous.prices contains a description of the price
18
18
  # database that is intended to be easily editable and commitable to
19
- # a source repository.
19
+ # a source repository. -- DEPRECATED [eyeonus]
20
20
  #
21
- # TODO: Split prices into per-system or per-station files so that
22
- # we can tell how old data for a specific system is.
23
21
 
24
22
  from __future__ import annotations
25
23
 
@@ -33,7 +31,7 @@ import typing
33
31
 
34
32
 
35
33
  from functools import partial as partial_fn
36
- from sqlalchemy import func, Integer, Float, DateTime
34
+ from sqlalchemy import func, Integer, Float, DateTime, tuple_
37
35
  from sqlalchemy import inspect as sa_inspect
38
36
  from sqlalchemy.orm import Session
39
37
  from sqlalchemy.types import DateTime as SA_DateTime
@@ -420,7 +418,7 @@ def processPrices(
420
418
  ) -> tuple[ProcessedStationIds, ProcessedItems, ZeroItems, int, int, int, int]:
421
419
  """
422
420
  Populate the database with prices by reading the given file.
423
-
421
+
424
422
  :param tdenv: The environment we're working in
425
423
  :param priceFile: File to read
426
424
  :param session: Active SQLAlchemy session
@@ -428,14 +426,14 @@ def processPrices(
428
426
  records for missing data. For partial updates,
429
427
  set False.
430
428
  """
431
-
429
+
432
430
  DEBUG0, DEBUG1 = tdenv.DEBUG0, tdenv.DEBUG1
433
431
  DEBUG0("Processing prices file: {}", priceFile)
434
-
432
+
435
433
  ignoreUnknown = tdenv.ignoreUnknown
436
434
  quiet = tdenv.quiet
437
435
  merging = tdenv.mergeImport
438
-
436
+
439
437
  # build lookup indexes from DB
440
438
  systemByName = getSystemByNameIndex(session)
441
439
  stationByName = getStationByNameIndex(session)
@@ -450,12 +448,12 @@ def processPrices(
450
448
  for stn, alt in corrections.stations.items()
451
449
  if isinstance(alt, str)
452
450
  }
453
-
451
+
454
452
  itemByName = getItemByNameIndex(session)
455
-
453
+
456
454
  defaultUnits = -1 if not defaultZero else 0
457
455
  defaultLevel = -1 if not defaultZero else 0
458
-
456
+
459
457
  stationID = None
460
458
  facility = None
461
459
  processedStations = {}
@@ -464,9 +462,9 @@ def processPrices(
464
462
  stationItemDates = {}
465
463
  DELETED = corrections.DELETED
466
464
  items, zeros = [], []
467
-
465
+
468
466
  lineNo, localAdd = 0, 0
469
-
467
+
470
468
  if not ignoreUnknown:
471
469
  def ignoreOrWarn(error: Exception) -> None:
472
470
  raise error
@@ -479,22 +477,22 @@ def processPrices(
479
477
  nonlocal facility, stationID
480
478
  nonlocal processedStations, processedItems, localAdd
481
479
  nonlocal stationItemDates
482
-
480
+
483
481
  # ## Change current station
484
482
  stationItemDates = {}
485
483
  systemNameIn, stationNameIn = matches.group(1, 2)
486
484
  systemName, stationName = systemNameIn.upper(), stationNameIn.upper()
487
485
  corrected = False
488
486
  facility = f'{systemName}/{stationName}'
489
-
487
+
490
488
  stationID = DELETED
491
489
  newID = stationByName.get(facility, -1)
492
490
  DEBUG0("Selected station: {}, ID={}", facility, newID)
493
-
491
+
494
492
  if newID is DELETED:
495
493
  DEBUG1("DELETED Station: {}", facility)
496
494
  return
497
-
495
+
498
496
  if newID < 0:
499
497
  if utils.checkForOcrDerp(tdenv, systemName, stationName):
500
498
  return
@@ -506,36 +504,36 @@ def processPrices(
506
504
  if altName:
507
505
  DEBUG1("SYSTEM '{}' renamed '{}'", systemName, altName)
508
506
  systemName, facility = altName, "/".join((altName, stationName))
509
-
507
+
510
508
  systemID = systemByName.get(systemName, -1)
511
509
  if systemID < 0:
512
510
  ignoreOrWarn(
513
511
  UnknownSystemError(priceFile, lineNo, facility)
514
512
  )
515
513
  return
516
-
514
+
517
515
  altStation = stnCorrections.get(facility)
518
516
  if altStation:
519
517
  if altStation is DELETED:
520
518
  DEBUG1("DELETED Station: {}", facility)
521
519
  return
522
-
520
+
523
521
  DEBUG1("Station '{}' renamed '{}'", facility, altStation)
524
522
  stationName = altStation.upper()
525
523
  facility = f'{systemName}/{stationName}'
526
-
524
+
527
525
  newID = stationByName.get(facility, -1)
528
526
  if newID is DELETED:
529
527
  DEBUG1("Renamed station DELETED: {}", facility)
530
528
  return
531
-
529
+
532
530
  if newID < 0:
533
531
  if not ignoreUnknown:
534
532
  ignoreOrWarn(
535
533
  UnknownStationError(priceFile, lineNo, facility)
536
534
  )
537
535
  return
538
-
536
+
539
537
  name = utils.titleFixup(stationName)
540
538
  # ORM insert: placeholder station
541
539
  station = SA.Station(
@@ -550,25 +548,25 @@ def processPrices(
550
548
  session.add(station)
551
549
  session.flush() # assign station_id
552
550
  newID = station.station_id
553
-
551
+
554
552
  stationByName[facility] = newID
555
553
  tdenv.NOTE(
556
554
  "Added local station placeholder for {} (#{})", facility, newID
557
555
  )
558
556
  localAdd += 1
559
-
557
+
560
558
  elif newID in processedStations:
561
559
  if not corrected:
562
560
  raise MultipleStationEntriesError(
563
561
  priceFile, lineNo, facility,
564
562
  processedStations[newID]
565
563
  )
566
-
564
+
567
565
  stationID = newID
568
566
  processedSystems.add(systemName)
569
567
  processedStations[stationID] = lineNo
570
568
  processedItems = {}
571
-
569
+
572
570
  # ORM query: load existing item → modified map
573
571
  rows = (
574
572
  session.query(SA.StationItem.item_id, SA.StationItem.modified)
@@ -576,41 +574,41 @@ def processPrices(
576
574
  .all()
577
575
  )
578
576
  stationItemDates = dict(rows)
579
-
577
+
580
578
  addItem, addZero = items.append, zeros.append
581
579
  getItemID = itemByName.get
582
580
  newItems, updtItems, ignItems = 0, 0, 0 # <-- put this back
583
581
 
584
-
582
+
585
583
  def processItemLine(matches):
586
584
  nonlocal newItems, updtItems, ignItems
587
585
  itemName, modified = matches.group('item', 'time')
588
586
  itemName = itemName.upper()
589
-
587
+
590
588
  # Look up the item ID.
591
589
  itemID = getItemID(itemName, -1)
592
590
  if itemID < 0:
593
591
  oldName = itemName
594
592
  itemName = corrections.correctItem(itemName)
595
-
593
+
596
594
  # Silently skip DELETED items
597
595
  if itemName == corrections.DELETED:
598
596
  DEBUG1("DELETED {}", oldName)
599
597
  return
600
-
598
+
601
599
  # Retry with corrected name
602
600
  itemName = itemName.upper()
603
601
  itemID = getItemID(itemName, -1)
604
-
602
+
605
603
  if itemID < 0:
606
604
  ignoreOrWarn(
607
605
  UnknownItemError(priceFile, lineNo, itemName)
608
606
  )
609
607
  return
610
-
608
+
611
609
  DEBUG1("Renamed {} -> {}", oldName, itemName)
612
610
 
613
-
611
+
614
612
  lastModified = stationItemDates.get(itemID, None)
615
613
  if lastModified and merging:
616
614
  if modified and modified != 'now' and modified <= lastModified:
@@ -736,9 +734,9 @@ def processPricesFile(
736
734
  """
737
735
  Process a .prices file and import data into the DB via ORM.
738
736
  """
739
-
737
+
740
738
  tdenv.DEBUG0("Processing Prices file '{}'", pricesPath)
741
-
739
+
742
740
  with (pricesFh or pricesPath.open("r", encoding="utf-8")) as fh:
743
741
  (
744
742
  stations,
@@ -749,32 +747,34 @@ def processPricesFile(
749
747
  ignItems,
750
748
  numSys,
751
749
  ) = processPrices(tdenv, fh, session, defaultZero)
752
-
750
+
753
751
  if not tdenv.mergeImport:
754
752
  # Delete all StationItems for these stations
755
753
  session.query(SA.StationItem).filter(
756
754
  SA.StationItem.station_id.in_([sid for (sid,) in stations])
757
755
  ).delete(synchronize_session=False)
758
-
756
+
757
+ # Remove zeroed pairs
758
+ removedItems = 0
759
759
  if zeros:
760
760
  session.query(SA.StationItem).filter(
761
761
  tuple_(SA.StationItem.station_id, SA.StationItem.item_id).in_(zeros)
762
762
  ).delete(synchronize_session=False)
763
- removedItems = len(zeros)
764
-
763
+ removedItems = len(zeros)
764
+
765
+ # Upsert items
765
766
  if items:
766
- for item in items:
767
- (
768
- station_id,
769
- item_id,
770
- modified,
771
- demand_price,
772
- demand_units,
773
- demand_level,
774
- supply_price,
775
- supply_units,
776
- supply_level,
777
- ) = item
767
+ for (
768
+ station_id,
769
+ item_id,
770
+ modified,
771
+ demand_price,
772
+ demand_units,
773
+ demand_level,
774
+ supply_price,
775
+ supply_units,
776
+ supply_level,
777
+ ) in items:
778
778
  obj = SA.StationItem(
779
779
  station_id=station_id,
780
780
  item_id=item_id,
@@ -787,12 +787,12 @@ def processPricesFile(
787
787
  supply_level=supply_level,
788
788
  )
789
789
  session.merge(obj)
790
-
790
+
791
791
  tdenv.DEBUG0("Marking populated stations as having a market")
792
792
  session.query(SA.Station).filter(
793
793
  SA.Station.station_id.in_([sid for (sid,) in stations])
794
794
  ).update({SA.Station.market: "Y"}, synchronize_session=False)
795
-
795
+
796
796
  changes = " and ".join(
797
797
  f"{v} {k}"
798
798
  for k, v in {
@@ -802,7 +802,7 @@ def processPricesFile(
802
802
  }.items()
803
803
  if v
804
804
  ) or "0"
805
-
805
+
806
806
  tdenv.NOTE(
807
807
  "Import complete: "
808
808
  "{:s} items "
@@ -812,12 +812,13 @@ def processPricesFile(
812
812
  len(stations),
813
813
  numSys,
814
814
  )
815
-
815
+
816
816
  if ignItems:
817
817
  tdenv.NOTE("Ignored {} items with old data", ignItems)
818
818
 
819
819
 
820
820
 
821
+
821
822
  ######################################################################
822
823
 
823
824
 
@@ -877,146 +878,270 @@ def processImportFile(
877
878
  ):
878
879
  """
879
880
  Import a CSV file into the given table.
880
-
881
- - RareItem.csv:
882
- Skips FK marker columns:
883
- * !name@System.system_id
884
- * name@Station.station_id
885
- * name@Category.category_id
886
- Looks up system_id (transient), station_id, category_id via cached helpers.
887
- NOTE: system_id is NOT a RareItem column and is not passed to the model.
888
-
889
- - System.csv:
890
- Skips 'name@Added.added_id' from active columns; resolves Added by name,
891
- defaulting to "EDSM" when blank.
892
-
893
- - All tables:
894
- Uses parse_ts() for datetimes.
895
- Enforces unq: unique headers.
896
- Commits per tradedangerous.db.utils.get_import_batch_size(session).
881
+
882
+ Applies header parsing, uniqueness checks, foreign key lookups,
883
+ in-row deprecation correction (warnings only at -vv via DEBUG1), and upserts via SQLAlchemy ORM.
884
+ Commits in batches for large datasets.
897
885
  """
898
-
886
+
899
887
  tdenv.DEBUG0("Processing import file '{}' for table '{}'", str(importPath), tableName)
900
-
888
+
901
889
  call_args = call_args or {}
902
890
  if line_callback:
903
891
  line_callback = partial_fn(line_callback, **call_args)
904
-
905
- uniquePfx = "unq:"
906
- uniqueLen = len(uniquePfx)
907
-
908
- # Backend-aware batch policy (SQLite=None→single commit; MariaDB defaults to 50k; env override respected)
909
- max_transaction_items = get_import_batch_size(session, profile="csv") # from tradedangerous.db.utils
910
- transaction_items = 0
911
-
892
+
893
+ # --- batch size config from environment or fallback ---
894
+ env_batch = os.environ.get("TD_LISTINGS_BATCH")
895
+ if env_batch:
896
+ try:
897
+ max_transaction_items = int(env_batch)
898
+ except ValueError:
899
+ tdenv.WARN("Invalid TD_LISTINGS_BATCH value %r, falling back to defaults.", env_batch)
900
+ max_transaction_items = None
901
+ else:
902
+ max_transaction_items = None
903
+
904
+ if max_transaction_items is None:
905
+ if session.bind.dialect.name in ("mysql", "mariadb"):
906
+ max_transaction_items = 50 * 1024
907
+ else:
908
+ max_transaction_items = 250 * 1024
909
+
910
+ transaction_items = 0 # track how many rows inserted before committing
911
+
912
912
  with importPath.open("r", encoding="utf-8") as importFile:
913
913
  csvin = csv.reader(importFile, delimiter=",", quotechar="'", doublequote=True)
914
-
915
- # header
914
+
915
+ # Read header row
916
916
  columnDefs = next(csvin)
917
917
  columnCount = len(columnDefs)
918
-
919
- activeColumns: list[str] = [] # headers that map directly to ORM fields
920
- kept_indices: list[int] = [] # original header indices that we KEEP (aligns values)
921
- uniqueIndexes: list[int] = [] # indexes into activeColumns (post-skip)
922
- fk_col_indices: dict[str, int] = {} # special FK headers their original indices
923
-
924
- # --- preprocess header ---
918
+
919
+ # --- Process headers: extract column names, track indices ---
920
+ activeColumns: list[str] = [] # Final columns we'll use (after "unq:" stripping)
921
+ kept_indices: list[int] = [] # Indices into CSV rows we keep (aligned to activeColumns)
922
+ uniqueIndexes: list[int] = [] # Indexes (into activeColumns) of unique keys
923
+ fk_col_indices: dict[str, int] = {} # Special handling for FK resolution
924
+
925
+ uniquePfx = "unq:"
926
+ uniqueLen = len(uniquePfx)
927
+
928
+ # map of header (without "unq:") -> original CSV index, for correction by name
929
+ header_index: dict[str, int] = {}
930
+
925
931
  for cIndex, cName in enumerate(columnDefs):
926
932
  colName, _, srcKey = cName.partition("@")
927
-
928
- # --- System.csv ---
933
+ baseName = colName[uniqueLen:] if colName.startswith(uniquePfx) else colName
934
+ header_index[baseName] = cIndex
935
+
936
+ # Special-case: System-added
929
937
  if tableName == "System":
930
938
  if cName == "name":
931
- srcKey = "" # plain field
939
+ srcKey = ""
932
940
  elif cName == "name@Added.added_id":
933
- # We'll resolve Added by name separately; skip from active columns.
934
941
  fk_col_indices["added"] = cIndex
935
- continue # do NOT keep this header/value
936
-
937
- # --- RareItem.csv (skip FK headers; remember positions) ---
942
+ continue
943
+
944
+ # Foreign key columns for RareItem
938
945
  if tableName == "RareItem":
939
946
  if cName == "!name@System.system_id":
940
947
  fk_col_indices["system"] = cIndex
941
- continue # do NOT keep this header/value
948
+ continue
942
949
  if cName == "name@Station.station_id":
943
950
  fk_col_indices["station"] = cIndex
944
- continue # do NOT keep this header/value
951
+ continue
945
952
  if cName == "name@Category.category_id":
946
953
  fk_col_indices["category"] = cIndex
947
- continue # do NOT keep this header/value
948
-
949
- # unique index marker (e.g., "unq:name")
954
+ continue
955
+
956
+ # Handle unique constraint tracking
950
957
  if colName.startswith(uniquePfx):
951
958
  uniqueIndexes.append(len(activeColumns))
952
- colName = colName[uniqueLen:]
953
-
954
- # keep normal columns and remember their source index
959
+ colName = baseName
960
+
955
961
  activeColumns.append(colName)
956
962
  kept_indices.append(cIndex)
957
-
958
- # optional deprecation checker
959
- deprecationFn = getattr(sys.modules[__name__], "deprecationCheck" + tableName, None)
960
-
963
+
961
964
  importCount = 0
962
- uniqueIndex = {}
963
-
965
+ uniqueIndex: dict[str, int] = {}
966
+
967
+ # helpers for correction + visibility-gated warning
968
+ DELETED = corrections.DELETED
969
+
970
+ def _warn(line_no: int, msg: str) -> None:
971
+ # Gate deprecation chatter to -vv (DEBUG1)
972
+ tdenv.DEBUG1("{}:{} WARNING {}", importPath, line_no, msg)
973
+
974
+ def _apply_row_corrections(table_name: str, row: list[str], line_no: int) -> bool:
975
+ """
976
+ Returns True if the row should be skipped (deleted in tolerant mode), False otherwise.
977
+ Mutates 'row' in place with corrected values.
978
+ """
979
+ try:
980
+ if table_name == "System":
981
+ idx = header_index.get("name")
982
+ if idx is not None:
983
+ orig = row[idx]
984
+ corr = corrections.correctSystem(orig)
985
+ if corr is DELETED:
986
+ if tdenv.ignoreUnknown:
987
+ _warn(line_no, f'System "{orig}" is marked as DELETED and should not be used.')
988
+ return True
989
+ raise DeletedKeyError(importPath, line_no, "System", orig)
990
+ if corr != orig:
991
+ _warn(line_no, f'System "{orig}" is deprecated and should be replaced with "{corr}".')
992
+ row[idx] = corr
993
+
994
+ elif table_name == "Station":
995
+ s_idx = header_index.get("system")
996
+ n_idx = header_index.get("name")
997
+ if s_idx is not None and n_idx is not None:
998
+ s_orig = row[s_idx]
999
+ s_corr = corrections.correctSystem(s_orig)
1000
+ if s_corr is DELETED:
1001
+ if tdenv.ignoreUnknown:
1002
+ _warn(line_no, f'System "{s_orig}" is marked as DELETED and should not be used.')
1003
+ return True
1004
+ raise DeletedKeyError(importPath, line_no, "System", s_orig)
1005
+ if s_corr != s_orig:
1006
+ _warn(line_no, f'System "{s_orig}" is deprecated and should be replaced with "{s_corr}".')
1007
+ row[s_idx] = s_corr
1008
+ n_orig = row[n_idx]
1009
+ n_corr = corrections.correctStation(s_corr, n_orig)
1010
+ if n_corr is DELETED:
1011
+ if tdenv.ignoreUnknown:
1012
+ _warn(line_no, f'Station "{n_orig}" is marked as DELETED and should not be used.')
1013
+ return True
1014
+ raise DeletedKeyError(importPath, line_no, "Station", n_orig)
1015
+ if n_corr != n_orig:
1016
+ _warn(line_no, f'Station "{n_orig}" is deprecated and should be replaced with "{n_corr}".')
1017
+ row[n_idx] = n_corr
1018
+
1019
+ elif table_name == "Category":
1020
+ idx = header_index.get("name")
1021
+ if idx is not None:
1022
+ orig = row[idx]
1023
+ corr = corrections.correctCategory(orig)
1024
+ if corr is DELETED:
1025
+ if tdenv.ignoreUnknown:
1026
+ _warn(line_no, f'Category "{orig}" is marked as DELETED and should not be used.')
1027
+ return True
1028
+ raise DeletedKeyError(importPath, line_no, "Category", orig)
1029
+ if corr != orig:
1030
+ _warn(line_no, f'Category "{orig}" is deprecated and should be replaced with "{corr}".')
1031
+ row[idx] = corr
1032
+
1033
+ elif table_name == "Item":
1034
+ cat_idx = header_index.get("category")
1035
+ name_idx = header_index.get("name")
1036
+ if cat_idx is not None:
1037
+ c_orig = row[cat_idx]
1038
+ c_corr = corrections.correctCategory(c_orig)
1039
+ if c_corr is DELETED:
1040
+ if tdenv.ignoreUnknown:
1041
+ _warn(line_no, f'Category "{c_orig}" is marked as DELETED and should not be used.')
1042
+ return True
1043
+ raise DeletedKeyError(importPath, line_no, "Category", c_orig)
1044
+ if c_corr != c_orig:
1045
+ _warn(line_no, f'Category "{c_orig}" is deprecated and should be replaced with "{c_corr}".')
1046
+ row[cat_idx] = c_corr
1047
+ if name_idx is not None:
1048
+ i_orig = row[name_idx]
1049
+ i_corr = corrections.correctItem(i_orig)
1050
+ if i_corr is DELETED:
1051
+ if tdenv.ignoreUnknown:
1052
+ _warn(line_no, f'Item "{i_orig}" is marked as DELETED and should not be used.')
1053
+ return True
1054
+ raise DeletedKeyError(importPath, line_no, "Item", i_orig)
1055
+ if i_corr != i_orig:
1056
+ _warn(line_no, f'Item "{i_orig}" is deprecated and should be replaced with "{i_corr}".')
1057
+ row[name_idx] = i_corr
1058
+
1059
+ # RareItem: we only correct category (FK lookup uses names) to improve hit rate.
1060
+ elif table_name == "RareItem":
1061
+ cat_idx = header_index.get("category")
1062
+ if cat_idx is not None:
1063
+ c_orig = row[cat_idx]
1064
+ c_corr = corrections.correctCategory(c_orig)
1065
+ if c_corr is DELETED:
1066
+ if tdenv.ignoreUnknown:
1067
+ _warn(line_no, f'Category "{c_orig}" is marked as DELETED and should not be used.')
1068
+ return True
1069
+ raise DeletedKeyError(importPath, line_no, "Category", c_orig)
1070
+ if c_corr != c_orig:
1071
+ _warn(line_no, f'Category "{c_orig}" is deprecated and should be replaced with "{c_corr}".')
1072
+ row[cat_idx] = c_corr
1073
+
1074
+ except BuildCacheBaseException:
1075
+ # strict mode path bubbles up; caller will handle
1076
+ raise
1077
+ return False # do not skip
1078
+
1079
+ # --- Read data lines ---
964
1080
  for linein in csvin:
965
1081
  if line_callback:
966
1082
  line_callback()
967
1083
  if not linein:
968
1084
  continue
1085
+
969
1086
  lineNo = csvin.line_num
970
-
1087
+
971
1088
  if len(linein) != columnCount:
972
- tdenv.NOTE(
973
- "Wrong number of columns ({}:{}): {}",
974
- importPath,
975
- lineNo,
976
- ", ".join(linein),
977
- )
1089
+ tdenv.NOTE("Wrong number of columns ({}:{}): {}", importPath, lineNo, ", ".join(linein))
978
1090
  continue
979
-
1091
+
980
1092
  tdenv.DEBUG1(" Values: {}", ", ".join(linein))
981
-
982
- # deprecation checks
983
- if deprecationFn:
984
- try:
985
- deprecationFn(importPath, lineNo, linein)
986
- except DeletedKeyError as e:
987
- if not tdenv.ignoreUnknown:
988
- raise e
989
- e.category = "WARNING"
990
- tdenv.NOTE("{}", e)
1093
+
1094
+ # --- Apply corrections BEFORE uniqueness; may skip if deleted in tolerant mode
1095
+ try:
1096
+ if _apply_row_corrections(tableName, linein, lineNo):
991
1097
  continue
992
- except DeprecatedKeyError as e:
993
- if not tdenv.ignoreUnknown:
994
- raise e
995
- e.category = "WARNING"
996
- tdenv.NOTE("{}", e)
997
- # Do NOT skip — correction is available
998
-
999
-
1000
- # Build values aligned to activeColumns (skip the FK columns we excluded)
1098
+ except DeletedKeyError:
1099
+ if not tdenv.ignoreUnknown:
1100
+ # strict: fail hard
1101
+ raise
1102
+ # tolerant: already warned in _apply_row_corrections; skip row
1103
+ continue
1104
+
1105
+ # Extract and clean values to use (from corrected line)
1001
1106
  activeValues = [linein[i] for i in kept_indices]
1002
-
1003
- # unique index enforcement over activeColumns
1004
- if uniqueIndexes:
1005
- keyValues = [str(activeValues[i]).upper() for i in uniqueIndexes]
1006
- key = ":!:".join(keyValues)
1007
- prevLineNo = uniqueIndex.get(key, 0)
1008
- if prevLineNo:
1009
- key_disp = "/".join(keyValues)
1010
- raise DuplicateKeyError(importPath, lineNo, "entry", key_disp, prevLineNo)
1011
- uniqueIndex[key] = lineNo
1012
-
1107
+
1108
+ # --- Uniqueness check (after correction) ---
1109
+ try:
1110
+ if uniqueIndexes:
1111
+ keyValues = [str(activeValues[i]).upper() for i in uniqueIndexes]
1112
+ key = ":!:".join(keyValues)
1113
+ prevLineNo = uniqueIndex.get(key, 0)
1114
+ if prevLineNo:
1115
+ key_disp = "/".join(keyValues)
1116
+ if tdenv.ignoreUnknown:
1117
+ e = DuplicateKeyError(importPath, lineNo, "entry", key_disp, prevLineNo)
1118
+ e.category = "WARNING"
1119
+ tdenv.NOTE("{}", e)
1120
+ continue
1121
+ raise DuplicateKeyError(importPath, lineNo, "entry", key_disp, prevLineNo)
1122
+ uniqueIndex[key] = lineNo
1123
+ except Exception as e:
1124
+ # Keep processing the file, don’t tear down the loop
1125
+ tdenv.WARN(
1126
+ "*** INTERNAL ERROR: {err}\n"
1127
+ "CSV File: {file}:{line}\n"
1128
+ "Table: {table}\n"
1129
+ "Params: {params}\n".format(
1130
+ err=str(e),
1131
+ file=str(importPath),
1132
+ line=lineNo,
1133
+ table=tableName,
1134
+ params=linein,
1135
+ )
1136
+ )
1137
+ session.rollback()
1138
+ continue
1139
+
1013
1140
  try:
1014
- # Base rowdict from non-FK columns only
1015
1141
  rowdict = dict(zip(activeColumns, activeValues))
1016
-
1017
- # --- RareItem foreign key lookups ---
1142
+
1143
+ # Foreign key lookups — RareItem
1018
1144
  if tableName == "RareItem":
1019
- # Resolve system (transient; only for station lookup)
1020
1145
  sys_id = None
1021
1146
  if "system" in fk_col_indices:
1022
1147
  sys_name = linein[fk_col_indices["system"]]
@@ -1024,8 +1149,7 @@ def processImportFile(
1024
1149
  sys_id = _get_system_id(session, sys_name)
1025
1150
  except ValueError:
1026
1151
  tdenv.WARN("Unknown System '{}' in {}", sys_name, importPath)
1027
-
1028
- # Station (requires system context)
1152
+
1029
1153
  if "station" in fk_col_indices:
1030
1154
  stn_name = linein[fk_col_indices["station"]]
1031
1155
  if sys_id is not None:
@@ -1035,16 +1159,15 @@ def processImportFile(
1035
1159
  tdenv.WARN("Unknown Station '{}' in {}", stn_name, importPath)
1036
1160
  else:
1037
1161
  tdenv.WARN("Station lookup skipped (no system_id) for '{}'", stn_name)
1038
-
1039
- # Category
1162
+
1040
1163
  if "category" in fk_col_indices:
1041
1164
  cat_name = linein[fk_col_indices["category"]]
1042
1165
  try:
1043
1166
  rowdict["category_id"] = _get_category_id(session, cat_name)
1044
1167
  except ValueError:
1045
1168
  tdenv.WARN("Unknown Category '{}' in {}", cat_name, importPath)
1046
-
1047
- # --- System foreign key lookup (Added), default "EDSM" if blank ---
1169
+
1170
+ # Foreign key lookups System.added
1048
1171
  if tableName == "System" and "added" in fk_col_indices:
1049
1172
  added_val = linein[fk_col_indices["added"]] or "EDSM"
1050
1173
  try:
@@ -1052,25 +1175,22 @@ def processImportFile(
1052
1175
  except ValueError:
1053
1176
  rowdict["added_id"] = None
1054
1177
  tdenv.WARN("Unknown Added value '{}' in {}", added_val, importPath)
1055
-
1056
- # --- type coercion ---
1178
+
1179
+ # --- Type coercion for common types ---
1057
1180
  for key, val in list(rowdict.items()):
1058
1181
  if val in ("", None):
1059
1182
  rowdict[key] = None
1060
1183
  continue
1061
- # ints
1062
1184
  if key.endswith("_id") or key.endswith("ID") or key in ("cost", "max_allocation"):
1063
1185
  try:
1064
1186
  rowdict[key] = int(val)
1065
1187
  except ValueError:
1066
1188
  rowdict[key] = None
1067
- # floats
1068
1189
  elif key in ("pos_x", "pos_y", "pos_z", "ls_from_star"):
1069
1190
  try:
1070
1191
  rowdict[key] = float(val)
1071
1192
  except ValueError:
1072
1193
  rowdict[key] = None
1073
- # datetimes
1074
1194
  elif "time" in key or key == "modified":
1075
1195
  parsed = parse_ts(val)
1076
1196
  if parsed:
@@ -1084,32 +1204,31 @@ def processImportFile(
1084
1204
  val,
1085
1205
  )
1086
1206
  rowdict[key] = None
1087
- # strings (incl. TriState flags) left as-is
1088
-
1089
- # reserved word remaps
1207
+
1208
+ # Special handling for SQL reserved word `class`
1090
1209
  if tableName == "Upgrade" and "class" in rowdict:
1091
1210
  rowdict["class_"] = rowdict.pop("class")
1092
1211
  if tableName == "FDevOutfitting" and "class" in rowdict:
1093
1212
  rowdict["class_"] = rowdict.pop("class")
1094
-
1095
- # ensure we never pass system_id to RareItem (not a column)
1096
1213
  if tableName == "RareItem" and "system_id" in rowdict:
1097
1214
  rowdict.pop("system_id", None)
1098
-
1215
+
1216
+ # ORM insert/merge
1099
1217
  Model = getattr(SA, tableName)
1100
1218
  obj = Model(**rowdict)
1101
1219
  session.merge(obj)
1102
1220
  importCount += 1
1103
-
1104
- # batched commit (only if enabled for this backend)
1221
+
1222
+ # Batch commit
1105
1223
  if max_transaction_items:
1106
1224
  transaction_items += 1
1107
1225
  if transaction_items >= max_transaction_items:
1108
1226
  session.commit()
1109
1227
  session.begin()
1110
1228
  transaction_items = 0
1111
-
1229
+
1112
1230
  except Exception as e:
1231
+ # Log all import errors — but keep going
1113
1232
  tdenv.WARN(
1114
1233
  "*** INTERNAL ERROR: {err}\n"
1115
1234
  "CSV File: {file}:{line}\n"
@@ -1123,40 +1242,43 @@ def processImportFile(
1123
1242
  )
1124
1243
  )
1125
1244
  session.rollback()
1126
-
1245
+
1246
+ # Final commit after file done
1127
1247
  session.commit()
1128
1248
  tdenv.DEBUG0("{count} {table}s imported", count=importCount, table=tableName)
1129
1249
 
1130
1250
 
1251
+
1252
+
1131
1253
  def buildCache(tdb, tdenv):
1132
1254
  """
1133
1255
  Rebuilds the database from source files.
1134
-
1256
+
1135
1257
  TD's data is either "stable" - information that rarely changes like Ship
1136
1258
  details, star systems etc - and "volatile" - pricing information, etc.
1137
-
1259
+
1138
1260
  The stable data starts out in data/TradeDangerous.sql while other data
1139
1261
  is stored in custom-formatted text files, e.g. ./TradeDangerous.prices.
1140
-
1262
+
1141
1263
  We load both sets of data into a database, after which we can
1142
1264
  avoid the text-processing overhead by simply checking if the text files
1143
1265
  are newer than the database.
1144
1266
  """
1145
-
1267
+
1146
1268
  tdenv.NOTE(
1147
- "Rebuilding cache file: this may take a few moments.",
1269
+ "(Re)building database: this may take a few moments.",
1148
1270
  stderr=True,
1149
1271
  )
1150
-
1272
+
1151
1273
  dbPath = tdb.dbPath
1152
1274
  sqlPath = tdb.sqlPath
1153
- pricesPath = tdb.pricesPath
1275
+ # pricesPath = tdb.pricesPath
1154
1276
  engine = tdb.engine
1155
-
1277
+
1156
1278
  # --- Step 1: reset schema BEFORE opening a session/transaction ---
1157
1279
  # Single unified call; no dialect branching here.
1158
1280
  lifecycle.reset_db(engine, db_path=dbPath)
1159
-
1281
+
1160
1282
  # --- Step 2: open a new session for rebuild work ---
1161
1283
  with tdb.Session() as session:
1162
1284
  # Import standard tables on a plain session with progress
@@ -1196,46 +1318,50 @@ def buildCache(tdb, tdenv):
1196
1318
  importName,
1197
1319
  )
1198
1320
  prog.increment(1)
1199
-
1321
+
1200
1322
  with prog.sub_task(description="Save DB"):
1201
1323
  session.commit()
1202
-
1203
- # --- Step 3: parse the prices file (still plain session) ---
1204
- if pricesPath.exists():
1205
- with Progress(max_value=None, width=25, prefix="Processing prices file"):
1206
- processPricesFile(tdenv, session, pricesPath)
1207
- else:
1208
- tdenv.NOTE(
1209
- f'Missing "{pricesPath}" file - no price data.',
1210
- stderr=True,
1211
- )
1212
-
1324
+
1325
+ # # --- Step 3: parse the prices file (still plain session) ---
1326
+ # if pricesPath.exists():
1327
+ # with Progress(max_value=None, width=25, prefix="Processing prices file"):
1328
+ # processPricesFile(tdenv, session, pricesPath)
1329
+ # else:
1330
+ # tdenv.NOTE(
1331
+ # f'Missing "{pricesPath}" file - no price data.',
1332
+ # stderr=True,
1333
+ # )
1334
+
1213
1335
  tdb.close()
1214
- tdenv.DEBUG0("Finished")
1336
+ tdenv.NOTE(
1337
+ "Database build completed.",
1338
+ stderr=True,
1339
+ )
1215
1340
 
1216
1341
 
1217
1342
  ######################################################################
1218
1343
 
1219
1344
 
1220
1345
  def regeneratePricesFile(tdb, tdenv):
1221
- """
1222
- Regenerate the .prices file from the current DB contents.
1223
- Uses the ORM session rather than raw sqlite.
1224
- """
1225
- tdenv.DEBUG0("Regenerating .prices file")
1226
-
1227
- with tdb.Session() as session:
1228
- with tdb.pricesPath.open("w", encoding="utf-8") as pricesFile:
1229
- prices.dumpPrices(
1230
- session,
1231
- prices.Element.full,
1232
- file=pricesFile,
1233
- debug=tdenv.debug,
1234
- )
1235
-
1236
- # Only touch the DB file on SQLite — MariaDB has no dbPath
1237
- if tdb.engine.dialect.name == "sqlite" and tdb.dbPath and os.path.exists(tdb.dbPath):
1238
- os.utime(tdb.dbPath)
1346
+ return
1347
+ # """
1348
+ # Regenerate the .prices file from the current DB contents.
1349
+ # Uses the ORM session rather than raw sqlite.
1350
+ # """
1351
+ # tdenv.DEBUG0("Regenerating .prices file")
1352
+ #
1353
+ # with tdb.Session() as session:
1354
+ # with tdb.pricesPath.open("w", encoding="utf-8") as pricesFile:
1355
+ # prices.dumpPrices(
1356
+ # session,
1357
+ # prices.Element.full,
1358
+ # file=pricesFile,
1359
+ # debug=tdenv.debug,
1360
+ # )
1361
+ #
1362
+ # # Only touch the DB file on SQLite — MariaDB has no dbPath
1363
+ # if tdb.engine.dialect.name == "sqlite" and tdb.dbPath and os.path.exists(tdb.dbPath):
1364
+ # os.utime(tdb.dbPath)
1239
1365
 
1240
1366
  ######################################################################
1241
1367
 
@@ -1246,15 +1372,15 @@ def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False):
1246
1372
  that is when a new station is encountered, delete any
1247
1373
  existing records for that station in the database.
1248
1374
  """
1249
-
1375
+
1250
1376
  if not pricesFh and not path.exists():
1251
1377
  raise TradeException(f"No such file: {path}")
1252
-
1378
+
1253
1379
  if reset:
1254
1380
  tdenv.DEBUG0("Resetting price data")
1255
1381
  with tdb.Session.begin() as session:
1256
1382
  session.query(SA.StationItem).delete()
1257
-
1383
+
1258
1384
  tdenv.DEBUG0(f"Importing data from {path}")
1259
1385
  processPricesFile(
1260
1386
  tdenv,
@@ -1262,7 +1388,7 @@ def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False):
1262
1388
  pricesPath=path,
1263
1389
  pricesFh=pricesFh,
1264
1390
  )
1265
-
1266
- # If everything worked, regenerate the canonical prices file if this wasn’t the main one
1267
- if path != tdb.pricesPath:
1268
- regeneratePricesFile(tdb, tdenv)
1391
+
1392
+ # # If everything worked, regenerate the canonical prices file if this wasn’t the main one
1393
+ # if path != tdb.pricesPath:
1394
+ # regeneratePricesFile(tdb, tdenv)