tradedangerous 12.0.5__py3-none-any.whl → 12.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/cache.py +135 -133
- tradedangerous/commands/buildcache_cmd.py +7 -7
- tradedangerous/commands/buy_cmd.py +4 -4
- tradedangerous/commands/export_cmd.py +11 -11
- tradedangerous/commands/import_cmd.py +12 -12
- tradedangerous/commands/market_cmd.py +17 -17
- tradedangerous/commands/olddata_cmd.py +18 -18
- tradedangerous/commands/rares_cmd.py +30 -30
- tradedangerous/commands/run_cmd.py +21 -21
- tradedangerous/commands/sell_cmd.py +5 -5
- tradedangerous/corrections.py +1 -1
- tradedangerous/csvexport.py +20 -20
- tradedangerous/db/adapter.py +9 -9
- tradedangerous/db/config.py +4 -4
- tradedangerous/db/engine.py +12 -12
- tradedangerous/db/lifecycle.py +28 -28
- tradedangerous/db/orm_models.py +42 -42
- tradedangerous/db/paths.py +3 -3
- tradedangerous/plugins/eddblink_plug.py +108 -253
- tradedangerous/plugins/spansh_plug.py +254 -254
- tradedangerous/prices.py +21 -21
- tradedangerous/tradedb.py +85 -85
- tradedangerous/tradeenv.py +2 -2
- tradedangerous/version.py +1 -1
- {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/METADATA +1 -1
- {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/RECORD +30 -30
- {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/WHEEL +0 -0
- {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/entry_points.txt +0 -0
- {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/licenses/LICENSE +0 -0
- {tradedangerous-12.0.5.dist-info → tradedangerous-12.0.7.dist-info}/top_level.txt +0 -0
tradedangerous/cache.py
CHANGED
|
@@ -16,10 +16,8 @@
|
|
|
16
16
|
# ships, etc
|
|
17
17
|
# data/TradeDangerous.prices contains a description of the price
|
|
18
18
|
# database that is intended to be easily editable and commitable to
|
|
19
|
-
# a source repository.
|
|
19
|
+
# a source repository. -- DEPRECATED [eyeonus]
|
|
20
20
|
#
|
|
21
|
-
# TODO: Split prices into per-system or per-station files so that
|
|
22
|
-
# we can tell how old data for a specific system is.
|
|
23
21
|
|
|
24
22
|
from __future__ import annotations
|
|
25
23
|
|
|
@@ -420,7 +418,7 @@ def processPrices(
|
|
|
420
418
|
) -> tuple[ProcessedStationIds, ProcessedItems, ZeroItems, int, int, int, int]:
|
|
421
419
|
"""
|
|
422
420
|
Populate the database with prices by reading the given file.
|
|
423
|
-
|
|
421
|
+
|
|
424
422
|
:param tdenv: The environment we're working in
|
|
425
423
|
:param priceFile: File to read
|
|
426
424
|
:param session: Active SQLAlchemy session
|
|
@@ -428,14 +426,14 @@ def processPrices(
|
|
|
428
426
|
records for missing data. For partial updates,
|
|
429
427
|
set False.
|
|
430
428
|
"""
|
|
431
|
-
|
|
429
|
+
|
|
432
430
|
DEBUG0, DEBUG1 = tdenv.DEBUG0, tdenv.DEBUG1
|
|
433
431
|
DEBUG0("Processing prices file: {}", priceFile)
|
|
434
|
-
|
|
432
|
+
|
|
435
433
|
ignoreUnknown = tdenv.ignoreUnknown
|
|
436
434
|
quiet = tdenv.quiet
|
|
437
435
|
merging = tdenv.mergeImport
|
|
438
|
-
|
|
436
|
+
|
|
439
437
|
# build lookup indexes from DB
|
|
440
438
|
systemByName = getSystemByNameIndex(session)
|
|
441
439
|
stationByName = getStationByNameIndex(session)
|
|
@@ -450,12 +448,12 @@ def processPrices(
|
|
|
450
448
|
for stn, alt in corrections.stations.items()
|
|
451
449
|
if isinstance(alt, str)
|
|
452
450
|
}
|
|
453
|
-
|
|
451
|
+
|
|
454
452
|
itemByName = getItemByNameIndex(session)
|
|
455
|
-
|
|
453
|
+
|
|
456
454
|
defaultUnits = -1 if not defaultZero else 0
|
|
457
455
|
defaultLevel = -1 if not defaultZero else 0
|
|
458
|
-
|
|
456
|
+
|
|
459
457
|
stationID = None
|
|
460
458
|
facility = None
|
|
461
459
|
processedStations = {}
|
|
@@ -464,9 +462,9 @@ def processPrices(
|
|
|
464
462
|
stationItemDates = {}
|
|
465
463
|
DELETED = corrections.DELETED
|
|
466
464
|
items, zeros = [], []
|
|
467
|
-
|
|
465
|
+
|
|
468
466
|
lineNo, localAdd = 0, 0
|
|
469
|
-
|
|
467
|
+
|
|
470
468
|
if not ignoreUnknown:
|
|
471
469
|
def ignoreOrWarn(error: Exception) -> None:
|
|
472
470
|
raise error
|
|
@@ -479,22 +477,22 @@ def processPrices(
|
|
|
479
477
|
nonlocal facility, stationID
|
|
480
478
|
nonlocal processedStations, processedItems, localAdd
|
|
481
479
|
nonlocal stationItemDates
|
|
482
|
-
|
|
480
|
+
|
|
483
481
|
# ## Change current station
|
|
484
482
|
stationItemDates = {}
|
|
485
483
|
systemNameIn, stationNameIn = matches.group(1, 2)
|
|
486
484
|
systemName, stationName = systemNameIn.upper(), stationNameIn.upper()
|
|
487
485
|
corrected = False
|
|
488
486
|
facility = f'{systemName}/{stationName}'
|
|
489
|
-
|
|
487
|
+
|
|
490
488
|
stationID = DELETED
|
|
491
489
|
newID = stationByName.get(facility, -1)
|
|
492
490
|
DEBUG0("Selected station: {}, ID={}", facility, newID)
|
|
493
|
-
|
|
491
|
+
|
|
494
492
|
if newID is DELETED:
|
|
495
493
|
DEBUG1("DELETED Station: {}", facility)
|
|
496
494
|
return
|
|
497
|
-
|
|
495
|
+
|
|
498
496
|
if newID < 0:
|
|
499
497
|
if utils.checkForOcrDerp(tdenv, systemName, stationName):
|
|
500
498
|
return
|
|
@@ -506,36 +504,36 @@ def processPrices(
|
|
|
506
504
|
if altName:
|
|
507
505
|
DEBUG1("SYSTEM '{}' renamed '{}'", systemName, altName)
|
|
508
506
|
systemName, facility = altName, "/".join((altName, stationName))
|
|
509
|
-
|
|
507
|
+
|
|
510
508
|
systemID = systemByName.get(systemName, -1)
|
|
511
509
|
if systemID < 0:
|
|
512
510
|
ignoreOrWarn(
|
|
513
511
|
UnknownSystemError(priceFile, lineNo, facility)
|
|
514
512
|
)
|
|
515
513
|
return
|
|
516
|
-
|
|
514
|
+
|
|
517
515
|
altStation = stnCorrections.get(facility)
|
|
518
516
|
if altStation:
|
|
519
517
|
if altStation is DELETED:
|
|
520
518
|
DEBUG1("DELETED Station: {}", facility)
|
|
521
519
|
return
|
|
522
|
-
|
|
520
|
+
|
|
523
521
|
DEBUG1("Station '{}' renamed '{}'", facility, altStation)
|
|
524
522
|
stationName = altStation.upper()
|
|
525
523
|
facility = f'{systemName}/{stationName}'
|
|
526
|
-
|
|
524
|
+
|
|
527
525
|
newID = stationByName.get(facility, -1)
|
|
528
526
|
if newID is DELETED:
|
|
529
527
|
DEBUG1("Renamed station DELETED: {}", facility)
|
|
530
528
|
return
|
|
531
|
-
|
|
529
|
+
|
|
532
530
|
if newID < 0:
|
|
533
531
|
if not ignoreUnknown:
|
|
534
532
|
ignoreOrWarn(
|
|
535
533
|
UnknownStationError(priceFile, lineNo, facility)
|
|
536
534
|
)
|
|
537
535
|
return
|
|
538
|
-
|
|
536
|
+
|
|
539
537
|
name = utils.titleFixup(stationName)
|
|
540
538
|
# ORM insert: placeholder station
|
|
541
539
|
station = SA.Station(
|
|
@@ -550,25 +548,25 @@ def processPrices(
|
|
|
550
548
|
session.add(station)
|
|
551
549
|
session.flush() # assign station_id
|
|
552
550
|
newID = station.station_id
|
|
553
|
-
|
|
551
|
+
|
|
554
552
|
stationByName[facility] = newID
|
|
555
553
|
tdenv.NOTE(
|
|
556
554
|
"Added local station placeholder for {} (#{})", facility, newID
|
|
557
555
|
)
|
|
558
556
|
localAdd += 1
|
|
559
|
-
|
|
557
|
+
|
|
560
558
|
elif newID in processedStations:
|
|
561
559
|
if not corrected:
|
|
562
560
|
raise MultipleStationEntriesError(
|
|
563
561
|
priceFile, lineNo, facility,
|
|
564
562
|
processedStations[newID]
|
|
565
563
|
)
|
|
566
|
-
|
|
564
|
+
|
|
567
565
|
stationID = newID
|
|
568
566
|
processedSystems.add(systemName)
|
|
569
567
|
processedStations[stationID] = lineNo
|
|
570
568
|
processedItems = {}
|
|
571
|
-
|
|
569
|
+
|
|
572
570
|
# ORM query: load existing item → modified map
|
|
573
571
|
rows = (
|
|
574
572
|
session.query(SA.StationItem.item_id, SA.StationItem.modified)
|
|
@@ -576,41 +574,41 @@ def processPrices(
|
|
|
576
574
|
.all()
|
|
577
575
|
)
|
|
578
576
|
stationItemDates = dict(rows)
|
|
579
|
-
|
|
577
|
+
|
|
580
578
|
addItem, addZero = items.append, zeros.append
|
|
581
579
|
getItemID = itemByName.get
|
|
582
580
|
newItems, updtItems, ignItems = 0, 0, 0 # <-- put this back
|
|
583
581
|
|
|
584
|
-
|
|
582
|
+
|
|
585
583
|
def processItemLine(matches):
|
|
586
584
|
nonlocal newItems, updtItems, ignItems
|
|
587
585
|
itemName, modified = matches.group('item', 'time')
|
|
588
586
|
itemName = itemName.upper()
|
|
589
|
-
|
|
587
|
+
|
|
590
588
|
# Look up the item ID.
|
|
591
589
|
itemID = getItemID(itemName, -1)
|
|
592
590
|
if itemID < 0:
|
|
593
591
|
oldName = itemName
|
|
594
592
|
itemName = corrections.correctItem(itemName)
|
|
595
|
-
|
|
593
|
+
|
|
596
594
|
# Silently skip DELETED items
|
|
597
595
|
if itemName == corrections.DELETED:
|
|
598
596
|
DEBUG1("DELETED {}", oldName)
|
|
599
597
|
return
|
|
600
|
-
|
|
598
|
+
|
|
601
599
|
# Retry with corrected name
|
|
602
600
|
itemName = itemName.upper()
|
|
603
601
|
itemID = getItemID(itemName, -1)
|
|
604
|
-
|
|
602
|
+
|
|
605
603
|
if itemID < 0:
|
|
606
604
|
ignoreOrWarn(
|
|
607
605
|
UnknownItemError(priceFile, lineNo, itemName)
|
|
608
606
|
)
|
|
609
607
|
return
|
|
610
|
-
|
|
608
|
+
|
|
611
609
|
DEBUG1("Renamed {} -> {}", oldName, itemName)
|
|
612
610
|
|
|
613
|
-
|
|
611
|
+
|
|
614
612
|
lastModified = stationItemDates.get(itemID, None)
|
|
615
613
|
if lastModified and merging:
|
|
616
614
|
if modified and modified != 'now' and modified <= lastModified:
|
|
@@ -736,9 +734,9 @@ def processPricesFile(
|
|
|
736
734
|
"""
|
|
737
735
|
Process a .prices file and import data into the DB via ORM.
|
|
738
736
|
"""
|
|
739
|
-
|
|
737
|
+
|
|
740
738
|
tdenv.DEBUG0("Processing Prices file '{}'", pricesPath)
|
|
741
|
-
|
|
739
|
+
|
|
742
740
|
with (pricesFh or pricesPath.open("r", encoding="utf-8")) as fh:
|
|
743
741
|
(
|
|
744
742
|
stations,
|
|
@@ -749,13 +747,13 @@ def processPricesFile(
|
|
|
749
747
|
ignItems,
|
|
750
748
|
numSys,
|
|
751
749
|
) = processPrices(tdenv, fh, session, defaultZero)
|
|
752
|
-
|
|
750
|
+
|
|
753
751
|
if not tdenv.mergeImport:
|
|
754
752
|
# Delete all StationItems for these stations
|
|
755
753
|
session.query(SA.StationItem).filter(
|
|
756
754
|
SA.StationItem.station_id.in_([sid for (sid,) in stations])
|
|
757
755
|
).delete(synchronize_session=False)
|
|
758
|
-
|
|
756
|
+
|
|
759
757
|
# Remove zeroed pairs
|
|
760
758
|
removedItems = 0
|
|
761
759
|
if zeros:
|
|
@@ -763,7 +761,7 @@ def processPricesFile(
|
|
|
763
761
|
tuple_(SA.StationItem.station_id, SA.StationItem.item_id).in_(zeros)
|
|
764
762
|
).delete(synchronize_session=False)
|
|
765
763
|
removedItems = len(zeros)
|
|
766
|
-
|
|
764
|
+
|
|
767
765
|
# Upsert items
|
|
768
766
|
if items:
|
|
769
767
|
for (
|
|
@@ -789,12 +787,12 @@ def processPricesFile(
|
|
|
789
787
|
supply_level=supply_level,
|
|
790
788
|
)
|
|
791
789
|
session.merge(obj)
|
|
792
|
-
|
|
790
|
+
|
|
793
791
|
tdenv.DEBUG0("Marking populated stations as having a market")
|
|
794
792
|
session.query(SA.Station).filter(
|
|
795
793
|
SA.Station.station_id.in_([sid for (sid,) in stations])
|
|
796
794
|
).update({SA.Station.market: "Y"}, synchronize_session=False)
|
|
797
|
-
|
|
795
|
+
|
|
798
796
|
changes = " and ".join(
|
|
799
797
|
f"{v} {k}"
|
|
800
798
|
for k, v in {
|
|
@@ -804,7 +802,7 @@ def processPricesFile(
|
|
|
804
802
|
}.items()
|
|
805
803
|
if v
|
|
806
804
|
) or "0"
|
|
807
|
-
|
|
805
|
+
|
|
808
806
|
tdenv.NOTE(
|
|
809
807
|
"Import complete: "
|
|
810
808
|
"{:s} items "
|
|
@@ -814,7 +812,7 @@ def processPricesFile(
|
|
|
814
812
|
len(stations),
|
|
815
813
|
numSys,
|
|
816
814
|
)
|
|
817
|
-
|
|
815
|
+
|
|
818
816
|
if ignItems:
|
|
819
817
|
tdenv.NOTE("Ignored {} items with old data", ignItems)
|
|
820
818
|
|
|
@@ -880,18 +878,18 @@ def processImportFile(
|
|
|
880
878
|
):
|
|
881
879
|
"""
|
|
882
880
|
Import a CSV file into the given table.
|
|
883
|
-
|
|
881
|
+
|
|
884
882
|
Applies header parsing, uniqueness checks, foreign key lookups,
|
|
885
883
|
in-row deprecation correction (warnings only at -vv via DEBUG1), and upserts via SQLAlchemy ORM.
|
|
886
884
|
Commits in batches for large datasets.
|
|
887
885
|
"""
|
|
888
|
-
|
|
886
|
+
|
|
889
887
|
tdenv.DEBUG0("Processing import file '{}' for table '{}'", str(importPath), tableName)
|
|
890
|
-
|
|
888
|
+
|
|
891
889
|
call_args = call_args or {}
|
|
892
890
|
if line_callback:
|
|
893
891
|
line_callback = partial_fn(line_callback, **call_args)
|
|
894
|
-
|
|
892
|
+
|
|
895
893
|
# --- batch size config from environment or fallback ---
|
|
896
894
|
env_batch = os.environ.get("TD_LISTINGS_BATCH")
|
|
897
895
|
if env_batch:
|
|
@@ -902,39 +900,39 @@ def processImportFile(
|
|
|
902
900
|
max_transaction_items = None
|
|
903
901
|
else:
|
|
904
902
|
max_transaction_items = None
|
|
905
|
-
|
|
903
|
+
|
|
906
904
|
if max_transaction_items is None:
|
|
907
905
|
if session.bind.dialect.name in ("mysql", "mariadb"):
|
|
908
906
|
max_transaction_items = 50 * 1024
|
|
909
907
|
else:
|
|
910
908
|
max_transaction_items = 250 * 1024
|
|
911
|
-
|
|
909
|
+
|
|
912
910
|
transaction_items = 0 # track how many rows inserted before committing
|
|
913
|
-
|
|
911
|
+
|
|
914
912
|
with importPath.open("r", encoding="utf-8") as importFile:
|
|
915
913
|
csvin = csv.reader(importFile, delimiter=",", quotechar="'", doublequote=True)
|
|
916
|
-
|
|
914
|
+
|
|
917
915
|
# Read header row
|
|
918
916
|
columnDefs = next(csvin)
|
|
919
917
|
columnCount = len(columnDefs)
|
|
920
|
-
|
|
918
|
+
|
|
921
919
|
# --- Process headers: extract column names, track indices ---
|
|
922
920
|
activeColumns: list[str] = [] # Final columns we'll use (after "unq:" stripping)
|
|
923
921
|
kept_indices: list[int] = [] # Indices into CSV rows we keep (aligned to activeColumns)
|
|
924
922
|
uniqueIndexes: list[int] = [] # Indexes (into activeColumns) of unique keys
|
|
925
923
|
fk_col_indices: dict[str, int] = {} # Special handling for FK resolution
|
|
926
|
-
|
|
924
|
+
|
|
927
925
|
uniquePfx = "unq:"
|
|
928
926
|
uniqueLen = len(uniquePfx)
|
|
929
|
-
|
|
927
|
+
|
|
930
928
|
# map of header (without "unq:") -> original CSV index, for correction by name
|
|
931
929
|
header_index: dict[str, int] = {}
|
|
932
|
-
|
|
930
|
+
|
|
933
931
|
for cIndex, cName in enumerate(columnDefs):
|
|
934
932
|
colName, _, srcKey = cName.partition("@")
|
|
935
933
|
baseName = colName[uniqueLen:] if colName.startswith(uniquePfx) else colName
|
|
936
934
|
header_index[baseName] = cIndex
|
|
937
|
-
|
|
935
|
+
|
|
938
936
|
# Special-case: System-added
|
|
939
937
|
if tableName == "System":
|
|
940
938
|
if cName == "name":
|
|
@@ -942,7 +940,7 @@ def processImportFile(
|
|
|
942
940
|
elif cName == "name@Added.added_id":
|
|
943
941
|
fk_col_indices["added"] = cIndex
|
|
944
942
|
continue
|
|
945
|
-
|
|
943
|
+
|
|
946
944
|
# Foreign key columns for RareItem
|
|
947
945
|
if tableName == "RareItem":
|
|
948
946
|
if cName == "!name@System.system_id":
|
|
@@ -954,25 +952,25 @@ def processImportFile(
|
|
|
954
952
|
if cName == "name@Category.category_id":
|
|
955
953
|
fk_col_indices["category"] = cIndex
|
|
956
954
|
continue
|
|
957
|
-
|
|
955
|
+
|
|
958
956
|
# Handle unique constraint tracking
|
|
959
957
|
if colName.startswith(uniquePfx):
|
|
960
958
|
uniqueIndexes.append(len(activeColumns))
|
|
961
959
|
colName = baseName
|
|
962
|
-
|
|
960
|
+
|
|
963
961
|
activeColumns.append(colName)
|
|
964
962
|
kept_indices.append(cIndex)
|
|
965
|
-
|
|
963
|
+
|
|
966
964
|
importCount = 0
|
|
967
965
|
uniqueIndex: dict[str, int] = {}
|
|
968
|
-
|
|
966
|
+
|
|
969
967
|
# helpers for correction + visibility-gated warning
|
|
970
968
|
DELETED = corrections.DELETED
|
|
971
|
-
|
|
969
|
+
|
|
972
970
|
def _warn(line_no: int, msg: str) -> None:
|
|
973
971
|
# Gate deprecation chatter to -vv (DEBUG1)
|
|
974
972
|
tdenv.DEBUG1("{}:{} WARNING {}", importPath, line_no, msg)
|
|
975
|
-
|
|
973
|
+
|
|
976
974
|
def _apply_row_corrections(table_name: str, row: list[str], line_no: int) -> bool:
|
|
977
975
|
"""
|
|
978
976
|
Returns True if the row should be skipped (deleted in tolerant mode), False otherwise.
|
|
@@ -992,7 +990,7 @@ def processImportFile(
|
|
|
992
990
|
if corr != orig:
|
|
993
991
|
_warn(line_no, f'System "{orig}" is deprecated and should be replaced with "{corr}".')
|
|
994
992
|
row[idx] = corr
|
|
995
|
-
|
|
993
|
+
|
|
996
994
|
elif table_name == "Station":
|
|
997
995
|
s_idx = header_index.get("system")
|
|
998
996
|
n_idx = header_index.get("name")
|
|
@@ -1017,7 +1015,7 @@ def processImportFile(
|
|
|
1017
1015
|
if n_corr != n_orig:
|
|
1018
1016
|
_warn(line_no, f'Station "{n_orig}" is deprecated and should be replaced with "{n_corr}".')
|
|
1019
1017
|
row[n_idx] = n_corr
|
|
1020
|
-
|
|
1018
|
+
|
|
1021
1019
|
elif table_name == "Category":
|
|
1022
1020
|
idx = header_index.get("name")
|
|
1023
1021
|
if idx is not None:
|
|
@@ -1031,7 +1029,7 @@ def processImportFile(
|
|
|
1031
1029
|
if corr != orig:
|
|
1032
1030
|
_warn(line_no, f'Category "{orig}" is deprecated and should be replaced with "{corr}".')
|
|
1033
1031
|
row[idx] = corr
|
|
1034
|
-
|
|
1032
|
+
|
|
1035
1033
|
elif table_name == "Item":
|
|
1036
1034
|
cat_idx = header_index.get("category")
|
|
1037
1035
|
name_idx = header_index.get("name")
|
|
@@ -1057,7 +1055,7 @@ def processImportFile(
|
|
|
1057
1055
|
if i_corr != i_orig:
|
|
1058
1056
|
_warn(line_no, f'Item "{i_orig}" is deprecated and should be replaced with "{i_corr}".')
|
|
1059
1057
|
row[name_idx] = i_corr
|
|
1060
|
-
|
|
1058
|
+
|
|
1061
1059
|
# RareItem: we only correct category (FK lookup uses names) to improve hit rate.
|
|
1062
1060
|
elif table_name == "RareItem":
|
|
1063
1061
|
cat_idx = header_index.get("category")
|
|
@@ -1072,27 +1070,27 @@ def processImportFile(
|
|
|
1072
1070
|
if c_corr != c_orig:
|
|
1073
1071
|
_warn(line_no, f'Category "{c_orig}" is deprecated and should be replaced with "{c_corr}".')
|
|
1074
1072
|
row[cat_idx] = c_corr
|
|
1075
|
-
|
|
1073
|
+
|
|
1076
1074
|
except BuildCacheBaseException:
|
|
1077
1075
|
# strict mode path bubbles up; caller will handle
|
|
1078
1076
|
raise
|
|
1079
1077
|
return False # do not skip
|
|
1080
|
-
|
|
1078
|
+
|
|
1081
1079
|
# --- Read data lines ---
|
|
1082
1080
|
for linein in csvin:
|
|
1083
1081
|
if line_callback:
|
|
1084
1082
|
line_callback()
|
|
1085
1083
|
if not linein:
|
|
1086
1084
|
continue
|
|
1087
|
-
|
|
1085
|
+
|
|
1088
1086
|
lineNo = csvin.line_num
|
|
1089
|
-
|
|
1087
|
+
|
|
1090
1088
|
if len(linein) != columnCount:
|
|
1091
1089
|
tdenv.NOTE("Wrong number of columns ({}:{}): {}", importPath, lineNo, ", ".join(linein))
|
|
1092
1090
|
continue
|
|
1093
|
-
|
|
1091
|
+
|
|
1094
1092
|
tdenv.DEBUG1(" Values: {}", ", ".join(linein))
|
|
1095
|
-
|
|
1093
|
+
|
|
1096
1094
|
# --- Apply corrections BEFORE uniqueness; may skip if deleted in tolerant mode
|
|
1097
1095
|
try:
|
|
1098
1096
|
if _apply_row_corrections(tableName, linein, lineNo):
|
|
@@ -1103,10 +1101,10 @@ def processImportFile(
|
|
|
1103
1101
|
raise
|
|
1104
1102
|
# tolerant: already warned in _apply_row_corrections; skip row
|
|
1105
1103
|
continue
|
|
1106
|
-
|
|
1104
|
+
|
|
1107
1105
|
# Extract and clean values to use (from corrected line)
|
|
1108
1106
|
activeValues = [linein[i] for i in kept_indices]
|
|
1109
|
-
|
|
1107
|
+
|
|
1110
1108
|
# --- Uniqueness check (after correction) ---
|
|
1111
1109
|
try:
|
|
1112
1110
|
if uniqueIndexes:
|
|
@@ -1138,10 +1136,10 @@ def processImportFile(
|
|
|
1138
1136
|
)
|
|
1139
1137
|
session.rollback()
|
|
1140
1138
|
continue
|
|
1141
|
-
|
|
1139
|
+
|
|
1142
1140
|
try:
|
|
1143
1141
|
rowdict = dict(zip(activeColumns, activeValues))
|
|
1144
|
-
|
|
1142
|
+
|
|
1145
1143
|
# Foreign key lookups — RareItem
|
|
1146
1144
|
if tableName == "RareItem":
|
|
1147
1145
|
sys_id = None
|
|
@@ -1151,7 +1149,7 @@ def processImportFile(
|
|
|
1151
1149
|
sys_id = _get_system_id(session, sys_name)
|
|
1152
1150
|
except ValueError:
|
|
1153
1151
|
tdenv.WARN("Unknown System '{}' in {}", sys_name, importPath)
|
|
1154
|
-
|
|
1152
|
+
|
|
1155
1153
|
if "station" in fk_col_indices:
|
|
1156
1154
|
stn_name = linein[fk_col_indices["station"]]
|
|
1157
1155
|
if sys_id is not None:
|
|
@@ -1161,14 +1159,14 @@ def processImportFile(
|
|
|
1161
1159
|
tdenv.WARN("Unknown Station '{}' in {}", stn_name, importPath)
|
|
1162
1160
|
else:
|
|
1163
1161
|
tdenv.WARN("Station lookup skipped (no system_id) for '{}'", stn_name)
|
|
1164
|
-
|
|
1162
|
+
|
|
1165
1163
|
if "category" in fk_col_indices:
|
|
1166
1164
|
cat_name = linein[fk_col_indices["category"]]
|
|
1167
1165
|
try:
|
|
1168
1166
|
rowdict["category_id"] = _get_category_id(session, cat_name)
|
|
1169
1167
|
except ValueError:
|
|
1170
1168
|
tdenv.WARN("Unknown Category '{}' in {}", cat_name, importPath)
|
|
1171
|
-
|
|
1169
|
+
|
|
1172
1170
|
# Foreign key lookups — System.added
|
|
1173
1171
|
if tableName == "System" and "added" in fk_col_indices:
|
|
1174
1172
|
added_val = linein[fk_col_indices["added"]] or "EDSM"
|
|
@@ -1177,7 +1175,7 @@ def processImportFile(
|
|
|
1177
1175
|
except ValueError:
|
|
1178
1176
|
rowdict["added_id"] = None
|
|
1179
1177
|
tdenv.WARN("Unknown Added value '{}' in {}", added_val, importPath)
|
|
1180
|
-
|
|
1178
|
+
|
|
1181
1179
|
# --- Type coercion for common types ---
|
|
1182
1180
|
for key, val in list(rowdict.items()):
|
|
1183
1181
|
if val in ("", None):
|
|
@@ -1206,7 +1204,7 @@ def processImportFile(
|
|
|
1206
1204
|
val,
|
|
1207
1205
|
)
|
|
1208
1206
|
rowdict[key] = None
|
|
1209
|
-
|
|
1207
|
+
|
|
1210
1208
|
# Special handling for SQL reserved word `class`
|
|
1211
1209
|
if tableName == "Upgrade" and "class" in rowdict:
|
|
1212
1210
|
rowdict["class_"] = rowdict.pop("class")
|
|
@@ -1214,13 +1212,13 @@ def processImportFile(
|
|
|
1214
1212
|
rowdict["class_"] = rowdict.pop("class")
|
|
1215
1213
|
if tableName == "RareItem" and "system_id" in rowdict:
|
|
1216
1214
|
rowdict.pop("system_id", None)
|
|
1217
|
-
|
|
1215
|
+
|
|
1218
1216
|
# ORM insert/merge
|
|
1219
1217
|
Model = getattr(SA, tableName)
|
|
1220
1218
|
obj = Model(**rowdict)
|
|
1221
1219
|
session.merge(obj)
|
|
1222
1220
|
importCount += 1
|
|
1223
|
-
|
|
1221
|
+
|
|
1224
1222
|
# Batch commit
|
|
1225
1223
|
if max_transaction_items:
|
|
1226
1224
|
transaction_items += 1
|
|
@@ -1228,7 +1226,7 @@ def processImportFile(
|
|
|
1228
1226
|
session.commit()
|
|
1229
1227
|
session.begin()
|
|
1230
1228
|
transaction_items = 0
|
|
1231
|
-
|
|
1229
|
+
|
|
1232
1230
|
except Exception as e:
|
|
1233
1231
|
# Log all import errors — but keep going
|
|
1234
1232
|
tdenv.WARN(
|
|
@@ -1244,7 +1242,7 @@ def processImportFile(
|
|
|
1244
1242
|
)
|
|
1245
1243
|
)
|
|
1246
1244
|
session.rollback()
|
|
1247
|
-
|
|
1245
|
+
|
|
1248
1246
|
# Final commit after file done
|
|
1249
1247
|
session.commit()
|
|
1250
1248
|
tdenv.DEBUG0("{count} {table}s imported", count=importCount, table=tableName)
|
|
@@ -1255,32 +1253,32 @@ def processImportFile(
|
|
|
1255
1253
|
def buildCache(tdb, tdenv):
|
|
1256
1254
|
"""
|
|
1257
1255
|
Rebuilds the database from source files.
|
|
1258
|
-
|
|
1256
|
+
|
|
1259
1257
|
TD's data is either "stable" - information that rarely changes like Ship
|
|
1260
1258
|
details, star systems etc - and "volatile" - pricing information, etc.
|
|
1261
|
-
|
|
1259
|
+
|
|
1262
1260
|
The stable data starts out in data/TradeDangerous.sql while other data
|
|
1263
1261
|
is stored in custom-formatted text files, e.g. ./TradeDangerous.prices.
|
|
1264
|
-
|
|
1262
|
+
|
|
1265
1263
|
We load both sets of data into a database, after which we can
|
|
1266
1264
|
avoid the text-processing overhead by simply checking if the text files
|
|
1267
1265
|
are newer than the database.
|
|
1268
1266
|
"""
|
|
1269
|
-
|
|
1267
|
+
|
|
1270
1268
|
tdenv.NOTE(
|
|
1271
|
-
"
|
|
1269
|
+
"(Re)building database: this may take a few moments.",
|
|
1272
1270
|
stderr=True,
|
|
1273
1271
|
)
|
|
1274
|
-
|
|
1272
|
+
|
|
1275
1273
|
dbPath = tdb.dbPath
|
|
1276
1274
|
sqlPath = tdb.sqlPath
|
|
1277
|
-
pricesPath = tdb.pricesPath
|
|
1275
|
+
# pricesPath = tdb.pricesPath
|
|
1278
1276
|
engine = tdb.engine
|
|
1279
|
-
|
|
1277
|
+
|
|
1280
1278
|
# --- Step 1: reset schema BEFORE opening a session/transaction ---
|
|
1281
1279
|
# Single unified call; no dialect branching here.
|
|
1282
1280
|
lifecycle.reset_db(engine, db_path=dbPath)
|
|
1283
|
-
|
|
1281
|
+
|
|
1284
1282
|
# --- Step 2: open a new session for rebuild work ---
|
|
1285
1283
|
with tdb.Session() as session:
|
|
1286
1284
|
# Import standard tables on a plain session with progress
|
|
@@ -1320,46 +1318,50 @@ def buildCache(tdb, tdenv):
|
|
|
1320
1318
|
importName,
|
|
1321
1319
|
)
|
|
1322
1320
|
prog.increment(1)
|
|
1323
|
-
|
|
1321
|
+
|
|
1324
1322
|
with prog.sub_task(description="Save DB"):
|
|
1325
1323
|
session.commit()
|
|
1326
|
-
|
|
1327
|
-
# --- Step 3: parse the prices file (still plain session) ---
|
|
1328
|
-
if pricesPath.exists():
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
else:
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1324
|
+
|
|
1325
|
+
# # --- Step 3: parse the prices file (still plain session) ---
|
|
1326
|
+
# if pricesPath.exists():
|
|
1327
|
+
# with Progress(max_value=None, width=25, prefix="Processing prices file"):
|
|
1328
|
+
# processPricesFile(tdenv, session, pricesPath)
|
|
1329
|
+
# else:
|
|
1330
|
+
# tdenv.NOTE(
|
|
1331
|
+
# f'Missing "{pricesPath}" file - no price data.',
|
|
1332
|
+
# stderr=True,
|
|
1333
|
+
# )
|
|
1334
|
+
|
|
1337
1335
|
tdb.close()
|
|
1338
|
-
tdenv.
|
|
1336
|
+
tdenv.NOTE(
|
|
1337
|
+
"Database build completed.",
|
|
1338
|
+
stderr=True,
|
|
1339
|
+
)
|
|
1339
1340
|
|
|
1340
1341
|
|
|
1341
1342
|
######################################################################
|
|
1342
1343
|
|
|
1343
1344
|
|
|
1344
1345
|
def regeneratePricesFile(tdb, tdenv):
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
#
|
|
1361
|
-
|
|
1362
|
-
|
|
1346
|
+
return
|
|
1347
|
+
# """
|
|
1348
|
+
# Regenerate the .prices file from the current DB contents.
|
|
1349
|
+
# Uses the ORM session rather than raw sqlite.
|
|
1350
|
+
# """
|
|
1351
|
+
# tdenv.DEBUG0("Regenerating .prices file")
|
|
1352
|
+
#
|
|
1353
|
+
# with tdb.Session() as session:
|
|
1354
|
+
# with tdb.pricesPath.open("w", encoding="utf-8") as pricesFile:
|
|
1355
|
+
# prices.dumpPrices(
|
|
1356
|
+
# session,
|
|
1357
|
+
# prices.Element.full,
|
|
1358
|
+
# file=pricesFile,
|
|
1359
|
+
# debug=tdenv.debug,
|
|
1360
|
+
# )
|
|
1361
|
+
#
|
|
1362
|
+
# # Only touch the DB file on SQLite — MariaDB has no dbPath
|
|
1363
|
+
# if tdb.engine.dialect.name == "sqlite" and tdb.dbPath and os.path.exists(tdb.dbPath):
|
|
1364
|
+
# os.utime(tdb.dbPath)
|
|
1363
1365
|
|
|
1364
1366
|
######################################################################
|
|
1365
1367
|
|
|
@@ -1370,15 +1372,15 @@ def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False):
|
|
|
1370
1372
|
that is when a new station is encountered, delete any
|
|
1371
1373
|
existing records for that station in the database.
|
|
1372
1374
|
"""
|
|
1373
|
-
|
|
1375
|
+
|
|
1374
1376
|
if not pricesFh and not path.exists():
|
|
1375
1377
|
raise TradeException(f"No such file: {path}")
|
|
1376
|
-
|
|
1378
|
+
|
|
1377
1379
|
if reset:
|
|
1378
1380
|
tdenv.DEBUG0("Resetting price data")
|
|
1379
1381
|
with tdb.Session.begin() as session:
|
|
1380
1382
|
session.query(SA.StationItem).delete()
|
|
1381
|
-
|
|
1383
|
+
|
|
1382
1384
|
tdenv.DEBUG0(f"Importing data from {path}")
|
|
1383
1385
|
processPricesFile(
|
|
1384
1386
|
tdenv,
|
|
@@ -1386,7 +1388,7 @@ def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False):
|
|
|
1386
1388
|
pricesPath=path,
|
|
1387
1389
|
pricesFh=pricesFh,
|
|
1388
1390
|
)
|
|
1389
|
-
|
|
1390
|
-
# If everything worked, regenerate the canonical prices file if this wasn’t the main one
|
|
1391
|
-
if path != tdb.pricesPath:
|
|
1392
|
-
|
|
1391
|
+
|
|
1392
|
+
# # If everything worked, regenerate the canonical prices file if this wasn’t the main one
|
|
1393
|
+
# if path != tdb.pricesPath:
|
|
1394
|
+
# regeneratePricesFile(tdb, tdenv)
|