tradedangerous 11.5.2__py3-none-any.whl → 12.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/cache.py +567 -395
- tradedangerous/cli.py +2 -2
- tradedangerous/commands/TEMPLATE.py +25 -26
- tradedangerous/commands/__init__.py +8 -16
- tradedangerous/commands/buildcache_cmd.py +40 -10
- tradedangerous/commands/buy_cmd.py +57 -46
- tradedangerous/commands/commandenv.py +0 -2
- tradedangerous/commands/export_cmd.py +78 -50
- tradedangerous/commands/import_cmd.py +70 -34
- tradedangerous/commands/market_cmd.py +52 -19
- tradedangerous/commands/olddata_cmd.py +120 -107
- tradedangerous/commands/rares_cmd.py +122 -110
- tradedangerous/commands/run_cmd.py +118 -66
- tradedangerous/commands/sell_cmd.py +52 -45
- tradedangerous/commands/shipvendor_cmd.py +49 -234
- tradedangerous/commands/station_cmd.py +55 -485
- tradedangerous/commands/update_cmd.py +56 -420
- tradedangerous/csvexport.py +173 -162
- tradedangerous/gui.py +2 -2
- tradedangerous/plugins/eddblink_plug.py +389 -252
- tradedangerous/plugins/spansh_plug.py +2488 -821
- tradedangerous/prices.py +124 -142
- tradedangerous/templates/TradeDangerous.sql +6 -6
- tradedangerous/tradecalc.py +1227 -1109
- tradedangerous/tradedb.py +533 -384
- tradedangerous/tradeenv.py +12 -1
- tradedangerous/version.py +1 -1
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/METADATA +17 -4
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/RECORD +33 -39
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/WHEEL +1 -1
- tradedangerous/commands/update_gui.py +0 -721
- tradedangerous/jsonprices.py +0 -254
- tradedangerous/plugins/edapi_plug.py +0 -1071
- tradedangerous/plugins/journal_plug.py +0 -537
- tradedangerous/plugins/netlog_plug.py +0 -316
- tradedangerous/templates/database_changes.json +0 -6
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/entry_points.txt +0 -0
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info/licenses}/LICENSE +0 -0
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/top_level.txt +0 -0
tradedangerous/cache.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
# --------------------------------------------------------------------
|
|
2
2
|
# Copyright (C) Oliver 'kfsone' Smith 2014 <oliver@kfs.org>:
|
|
3
3
|
# Copyright (C) Bernd 'Gazelle' Gollesch 2016, 2017
|
|
4
|
-
# Copyright (C)
|
|
4
|
+
# Copyright (C) Stefan 'Tromador' Morrell 2025
|
|
5
|
+
# Copyright (C) Jonathan 'eyeonus' Jones 2018-2025
|
|
5
6
|
#
|
|
6
7
|
# You are free to use, redistribute, or even print and eat a copy of
|
|
7
8
|
# this software so long as you include this copyright notice.
|
|
@@ -23,14 +24,24 @@
|
|
|
23
24
|
from __future__ import annotations
|
|
24
25
|
|
|
25
26
|
from pathlib import Path
|
|
27
|
+
from datetime import datetime, date
|
|
26
28
|
import csv
|
|
27
29
|
import os
|
|
28
30
|
import re
|
|
29
|
-
import sqlite3
|
|
30
31
|
import sys
|
|
31
32
|
import typing
|
|
32
33
|
|
|
34
|
+
|
|
33
35
|
from functools import partial as partial_fn
|
|
36
|
+
from sqlalchemy import func, Integer, Float, DateTime
|
|
37
|
+
from sqlalchemy import inspect as sa_inspect
|
|
38
|
+
from sqlalchemy.orm import Session
|
|
39
|
+
from sqlalchemy.types import DateTime as SA_DateTime
|
|
40
|
+
from tradedangerous.db import make_engine_from_config, get_session_factory
|
|
41
|
+
from tradedangerous.db import orm_models as SA
|
|
42
|
+
from tradedangerous.db import lifecycle
|
|
43
|
+
from tradedangerous.db.utils import parse_ts, get_import_batch_size
|
|
44
|
+
|
|
34
45
|
from .fs import file_line_count
|
|
35
46
|
from .tradeexcept import TradeException
|
|
36
47
|
from tradedangerous.misc.progress import Progress, CountingBar
|
|
@@ -38,6 +49,7 @@ from . import corrections, utils
|
|
|
38
49
|
from . import prices
|
|
39
50
|
|
|
40
51
|
|
|
52
|
+
|
|
41
53
|
# For mypy/pylint type checking
|
|
42
54
|
if typing.TYPE_CHECKING:
|
|
43
55
|
from typing import Any, Callable, Optional, TextIO # noqa
|
|
@@ -233,6 +245,52 @@ class SupplyError(BuildCacheBaseException):
|
|
|
233
245
|
######################################################################
|
|
234
246
|
# Helpers
|
|
235
247
|
|
|
248
|
+
# --- tiny FK lookup caches (per import run) ---
|
|
249
|
+
_fk_cache_system = {}
|
|
250
|
+
_fk_cache_station = {}
|
|
251
|
+
_fk_cache_category = {}
|
|
252
|
+
_fk_cache_added = {}
|
|
253
|
+
|
|
254
|
+
def _get_system_id(session, system_name):
|
|
255
|
+
if system_name in _fk_cache_system:
|
|
256
|
+
return _fk_cache_system[system_name]
|
|
257
|
+
rid = session.query(SA.System.system_id).filter(SA.System.name == system_name).scalar()
|
|
258
|
+
if rid is None:
|
|
259
|
+
raise ValueError(f"Unknown System name: {system_name}")
|
|
260
|
+
_fk_cache_system[system_name] = rid
|
|
261
|
+
return rid
|
|
262
|
+
|
|
263
|
+
def _get_station_id(session, system_id, station_name):
|
|
264
|
+
key = (system_id, station_name)
|
|
265
|
+
if key in _fk_cache_station:
|
|
266
|
+
return _fk_cache_station[key]
|
|
267
|
+
rid = (
|
|
268
|
+
session.query(SA.Station.station_id)
|
|
269
|
+
.filter(SA.Station.system_id == system_id, SA.Station.name == station_name)
|
|
270
|
+
.scalar()
|
|
271
|
+
)
|
|
272
|
+
if rid is None:
|
|
273
|
+
raise ValueError(f"Unknown Station '{station_name}' in system_id={system_id}")
|
|
274
|
+
_fk_cache_station[key] = rid
|
|
275
|
+
return rid
|
|
276
|
+
|
|
277
|
+
def _get_category_id(session, cat_name):
|
|
278
|
+
if cat_name in _fk_cache_category:
|
|
279
|
+
return _fk_cache_category[cat_name]
|
|
280
|
+
rid = session.query(SA.Category.category_id).filter(SA.Category.name == cat_name).scalar()
|
|
281
|
+
if rid is None:
|
|
282
|
+
raise ValueError(f"Unknown Category name: {cat_name}")
|
|
283
|
+
_fk_cache_category[cat_name] = rid
|
|
284
|
+
return rid
|
|
285
|
+
|
|
286
|
+
def _get_added_id(session, added_name):
|
|
287
|
+
if added_name in _fk_cache_added:
|
|
288
|
+
return _fk_cache_added[added_name]
|
|
289
|
+
rid = session.query(SA.Added.added_id).filter(SA.Added.name == added_name).scalar()
|
|
290
|
+
if rid is None:
|
|
291
|
+
raise ValueError(f"Unknown Added name: {added_name}")
|
|
292
|
+
_fk_cache_added[added_name] = rid
|
|
293
|
+
return rid
|
|
236
294
|
|
|
237
295
|
# supply/demand levels are one of '?' for unknown, 'L', 'M' or 'H'
|
|
238
296
|
# for low, medium, or high. We turn these into integer values for
|
|
@@ -299,33 +357,38 @@ def parseSupply(pricesFile: Path, lineNo: int, category: str, reading: str) -> t
|
|
|
299
357
|
######################################################################
|
|
300
358
|
|
|
301
359
|
|
|
302
|
-
def getSystemByNameIndex(
|
|
303
|
-
"""
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
return {
|
|
360
|
+
def getSystemByNameIndex(session: Session) -> dict[str, int]:
|
|
361
|
+
"""Build system index by uppercase name → system_id."""
|
|
362
|
+
rows = (
|
|
363
|
+
session.query(SA.System.system_id, func.upper(SA.System.name))
|
|
364
|
+
.all()
|
|
365
|
+
)
|
|
366
|
+
return {name: ID for (ID, name) in rows}
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def getStationByNameIndex(session: Session) -> dict[str, int]:
|
|
370
|
+
"""Build station index in STAR/Station notation → station_id."""
|
|
371
|
+
rows = (
|
|
372
|
+
session.query(
|
|
373
|
+
SA.Station.station_id,
|
|
374
|
+
(SA.System.name + "/" + SA.Station.name)
|
|
375
|
+
)
|
|
376
|
+
.join(SA.System, SA.Station.system_id == SA.System.system_id)
|
|
377
|
+
.all()
|
|
378
|
+
)
|
|
379
|
+
# normalise case like original
|
|
380
|
+
return {name.upper(): ID for (ID, name) in rows}
|
|
309
381
|
|
|
310
382
|
|
|
311
|
-
def getStationByNameIndex(cur: sqlite3.Cursor) -> dict[str, int]:
|
|
312
|
-
""" Build station index in STAR/Station notation """
|
|
313
|
-
cur.execute("""
|
|
314
|
-
SELECT station_id,
|
|
315
|
-
system.name || '/' || station.name
|
|
316
|
-
FROM System
|
|
317
|
-
INNER JOIN Station
|
|
318
|
-
USING (system_id)
|
|
319
|
-
""")
|
|
320
|
-
return { name.upper(): ID for (ID, name) in cur }
|
|
321
383
|
|
|
384
|
+
def getItemByNameIndex(session: Session) -> dict[str, int]:
|
|
385
|
+
"""Generate item name index (uppercase item name → item_id)."""
|
|
386
|
+
rows = (
|
|
387
|
+
session.query(SA.Item.item_id, func.upper(SA.Item.name))
|
|
388
|
+
.all()
|
|
389
|
+
)
|
|
390
|
+
return {name: itemID for (itemID, name) in rows}
|
|
322
391
|
|
|
323
|
-
def getItemByNameIndex(cur: sqlite3.Cursor) -> dict[str, int]:
|
|
324
|
-
"""
|
|
325
|
-
Generate item name index.
|
|
326
|
-
"""
|
|
327
|
-
cur.execute("SELECT item_id, UPPER(name) FROM item")
|
|
328
|
-
return { name: itemID for (itemID, name) in cur }
|
|
329
392
|
|
|
330
393
|
|
|
331
394
|
# The return type of process prices is complicated, should probably have been a type
|
|
@@ -349,28 +412,33 @@ if typing.TYPE_CHECKING:
|
|
|
349
412
|
ZeroItems = list[tuple[int, int]] # stationID, itemID
|
|
350
413
|
|
|
351
414
|
|
|
352
|
-
def processPrices(
|
|
415
|
+
def processPrices(
|
|
416
|
+
tdenv: TradeEnv,
|
|
417
|
+
priceFile: Path,
|
|
418
|
+
session: Session,
|
|
419
|
+
defaultZero: bool
|
|
420
|
+
) -> tuple[ProcessedStationIds, ProcessedItems, ZeroItems, int, int, int, int]:
|
|
353
421
|
"""
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
422
|
+
Populate the database with prices by reading the given file.
|
|
423
|
+
|
|
424
|
+
:param tdenv: The environment we're working in
|
|
425
|
+
:param priceFile: File to read
|
|
426
|
+
:param session: Active SQLAlchemy session
|
|
427
|
+
:param defaultZero: Whether to create default zero-availability/-demand
|
|
428
|
+
records for missing data. For partial updates,
|
|
429
|
+
set False.
|
|
362
430
|
"""
|
|
363
|
-
|
|
431
|
+
|
|
364
432
|
DEBUG0, DEBUG1 = tdenv.DEBUG0, tdenv.DEBUG1
|
|
365
433
|
DEBUG0("Processing prices file: {}", priceFile)
|
|
366
|
-
|
|
367
|
-
cur = db.cursor()
|
|
434
|
+
|
|
368
435
|
ignoreUnknown = tdenv.ignoreUnknown
|
|
369
436
|
quiet = tdenv.quiet
|
|
370
437
|
merging = tdenv.mergeImport
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
438
|
+
|
|
439
|
+
# build lookup indexes from DB
|
|
440
|
+
systemByName = getSystemByNameIndex(session)
|
|
441
|
+
stationByName = getStationByNameIndex(session)
|
|
374
442
|
stationByName.update(
|
|
375
443
|
(sys, ID)
|
|
376
444
|
for sys, ID in corrections.stations.items()
|
|
@@ -382,12 +450,12 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
382
450
|
for stn, alt in corrections.stations.items()
|
|
383
451
|
if isinstance(alt, str)
|
|
384
452
|
}
|
|
385
|
-
|
|
386
|
-
itemByName = getItemByNameIndex(
|
|
387
|
-
|
|
453
|
+
|
|
454
|
+
itemByName = getItemByNameIndex(session)
|
|
455
|
+
|
|
388
456
|
defaultUnits = -1 if not defaultZero else 0
|
|
389
457
|
defaultLevel = -1 if not defaultZero else 0
|
|
390
|
-
|
|
458
|
+
|
|
391
459
|
stationID = None
|
|
392
460
|
facility = None
|
|
393
461
|
processedStations = {}
|
|
@@ -396,35 +464,37 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
396
464
|
stationItemDates = {}
|
|
397
465
|
DELETED = corrections.DELETED
|
|
398
466
|
items, zeros = [], []
|
|
399
|
-
|
|
467
|
+
|
|
400
468
|
lineNo, localAdd = 0, 0
|
|
469
|
+
|
|
401
470
|
if not ignoreUnknown:
|
|
402
471
|
def ignoreOrWarn(error: Exception) -> None:
|
|
403
472
|
raise error
|
|
404
|
-
|
|
405
473
|
elif not quiet:
|
|
406
|
-
ignoreOrWarn
|
|
474
|
+
def ignoreOrWarn(error: Exception) -> None:
|
|
475
|
+
# Ensure exceptions are stringified before passing to WARN
|
|
476
|
+
tdenv.WARN(str(error))
|
|
407
477
|
|
|
408
478
|
def changeStation(matches: re.Match) -> None:
|
|
409
479
|
nonlocal facility, stationID
|
|
410
480
|
nonlocal processedStations, processedItems, localAdd
|
|
411
481
|
nonlocal stationItemDates
|
|
412
|
-
|
|
482
|
+
|
|
413
483
|
# ## Change current station
|
|
414
484
|
stationItemDates = {}
|
|
415
485
|
systemNameIn, stationNameIn = matches.group(1, 2)
|
|
416
486
|
systemName, stationName = systemNameIn.upper(), stationNameIn.upper()
|
|
417
487
|
corrected = False
|
|
418
488
|
facility = f'{systemName}/{stationName}'
|
|
419
|
-
|
|
420
|
-
# Make sure it's valid.
|
|
489
|
+
|
|
421
490
|
stationID = DELETED
|
|
422
|
-
newID = stationByName.get(facility, -1)
|
|
491
|
+
newID = stationByName.get(facility, -1)
|
|
423
492
|
DEBUG0("Selected station: {}, ID={}", facility, newID)
|
|
493
|
+
|
|
424
494
|
if newID is DELETED:
|
|
425
495
|
DEBUG1("DELETED Station: {}", facility)
|
|
426
496
|
return
|
|
427
|
-
|
|
497
|
+
|
|
428
498
|
if newID < 0:
|
|
429
499
|
if utils.checkForOcrDerp(tdenv, systemName, stationName):
|
|
430
500
|
return
|
|
@@ -436,81 +506,81 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
436
506
|
if altName:
|
|
437
507
|
DEBUG1("SYSTEM '{}' renamed '{}'", systemName, altName)
|
|
438
508
|
systemName, facility = altName, "/".join((altName, stationName))
|
|
439
|
-
|
|
440
|
-
systemID = systemByName.get(systemName, -1)
|
|
509
|
+
|
|
510
|
+
systemID = systemByName.get(systemName, -1)
|
|
441
511
|
if systemID < 0:
|
|
442
512
|
ignoreOrWarn(
|
|
443
513
|
UnknownSystemError(priceFile, lineNo, facility)
|
|
444
514
|
)
|
|
445
515
|
return
|
|
446
|
-
|
|
516
|
+
|
|
447
517
|
altStation = stnCorrections.get(facility)
|
|
448
518
|
if altStation:
|
|
449
519
|
if altStation is DELETED:
|
|
450
520
|
DEBUG1("DELETED Station: {}", facility)
|
|
451
521
|
return
|
|
452
|
-
|
|
522
|
+
|
|
453
523
|
DEBUG1("Station '{}' renamed '{}'", facility, altStation)
|
|
454
524
|
stationName = altStation.upper()
|
|
455
525
|
facility = f'{systemName}/{stationName}'
|
|
456
|
-
|
|
526
|
+
|
|
457
527
|
newID = stationByName.get(facility, -1)
|
|
458
528
|
if newID is DELETED:
|
|
459
529
|
DEBUG1("Renamed station DELETED: {}", facility)
|
|
460
530
|
return
|
|
461
|
-
|
|
531
|
+
|
|
462
532
|
if newID < 0:
|
|
463
533
|
if not ignoreUnknown:
|
|
464
|
-
DEBUG0(f'Key value: "{list(stationByName.keys())[list(stationByName.values()).index(128893178)]}"')
|
|
465
534
|
ignoreOrWarn(
|
|
466
535
|
UnknownStationError(priceFile, lineNo, facility)
|
|
467
536
|
)
|
|
468
537
|
return
|
|
538
|
+
|
|
469
539
|
name = utils.titleFixup(stationName)
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
""", [systemID, name])
|
|
485
|
-
newID = inscur.lastrowid
|
|
540
|
+
# ORM insert: placeholder station
|
|
541
|
+
station = SA.Station(
|
|
542
|
+
system_id=systemID,
|
|
543
|
+
name=name,
|
|
544
|
+
ls_from_star=0,
|
|
545
|
+
blackmarket='?',
|
|
546
|
+
max_pad_size='?',
|
|
547
|
+
market='?',
|
|
548
|
+
shipyard='?',
|
|
549
|
+
)
|
|
550
|
+
session.add(station)
|
|
551
|
+
session.flush() # assign station_id
|
|
552
|
+
newID = station.station_id
|
|
553
|
+
|
|
486
554
|
stationByName[facility] = newID
|
|
487
555
|
tdenv.NOTE(
|
|
488
556
|
"Added local station placeholder for {} (#{})", facility, newID
|
|
489
557
|
)
|
|
490
558
|
localAdd += 1
|
|
559
|
+
|
|
491
560
|
elif newID in processedStations:
|
|
492
|
-
# Check for duplicates
|
|
493
561
|
if not corrected:
|
|
494
562
|
raise MultipleStationEntriesError(
|
|
495
563
|
priceFile, lineNo, facility,
|
|
496
564
|
processedStations[newID]
|
|
497
565
|
)
|
|
498
|
-
|
|
566
|
+
|
|
499
567
|
stationID = newID
|
|
500
568
|
processedSystems.add(systemName)
|
|
501
569
|
processedStations[stationID] = lineNo
|
|
502
570
|
processedItems = {}
|
|
571
|
+
|
|
572
|
+
# ORM query: load existing item → modified map
|
|
573
|
+
rows = (
|
|
574
|
+
session.query(SA.StationItem.item_id, SA.StationItem.modified)
|
|
575
|
+
.filter(SA.StationItem.station_id == stationID)
|
|
576
|
+
.all()
|
|
577
|
+
)
|
|
578
|
+
stationItemDates = dict(rows)
|
|
503
579
|
|
|
504
|
-
cur = db.execute("""
|
|
505
|
-
SELECT item_id, modified
|
|
506
|
-
FROM StationItem
|
|
507
|
-
WHERE station_id = ?
|
|
508
|
-
""", [stationID])
|
|
509
|
-
stationItemDates = dict(cur)
|
|
510
|
-
|
|
511
580
|
addItem, addZero = items.append, zeros.append
|
|
512
581
|
getItemID = itemByName.get
|
|
513
|
-
newItems, updtItems, ignItems = 0, 0, 0
|
|
582
|
+
newItems, updtItems, ignItems = 0, 0, 0 # <-- put this back
|
|
583
|
+
|
|
514
584
|
|
|
515
585
|
def processItemLine(matches):
|
|
516
586
|
nonlocal newItems, updtItems, ignItems
|
|
@@ -649,90 +719,98 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
649
719
|
######################################################################
|
|
650
720
|
|
|
651
721
|
|
|
652
|
-
def processPricesFile(
|
|
722
|
+
def processPricesFile(
|
|
723
|
+
tdenv: "TradeEnv",
|
|
724
|
+
session: Session,
|
|
725
|
+
pricesPath: Path,
|
|
726
|
+
pricesFh: Optional[typing.TextIO] = None,
|
|
727
|
+
defaultZero: bool = False,
|
|
728
|
+
) -> None:
|
|
729
|
+
"""
|
|
730
|
+
Process a .prices file and import data into the DB via ORM.
|
|
731
|
+
"""
|
|
732
|
+
|
|
653
733
|
tdenv.DEBUG0("Processing Prices file '{}'", pricesPath)
|
|
654
|
-
|
|
655
|
-
with (pricesFh or pricesPath.open(
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
734
|
+
|
|
735
|
+
with (pricesFh or pricesPath.open("r", encoding="utf-8")) as fh:
|
|
736
|
+
(
|
|
737
|
+
stations,
|
|
738
|
+
items,
|
|
739
|
+
zeros,
|
|
740
|
+
newItems,
|
|
741
|
+
updtItems,
|
|
742
|
+
ignItems,
|
|
743
|
+
numSys,
|
|
744
|
+
) = processPrices(tdenv, fh, session, defaultZero)
|
|
745
|
+
|
|
660
746
|
if not tdenv.mergeImport:
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
747
|
+
# Delete all StationItems for these stations
|
|
748
|
+
session.query(SA.StationItem).filter(
|
|
749
|
+
SA.StationItem.station_id.in_([sid for (sid,) in stations])
|
|
750
|
+
).delete(synchronize_session=False)
|
|
751
|
+
|
|
665
752
|
if zeros:
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
AND item_id = ?
|
|
670
|
-
""", zeros)
|
|
753
|
+
session.query(SA.StationItem).filter(
|
|
754
|
+
tuple_(SA.StationItem.station_id, SA.StationItem.item_id).in_(zeros)
|
|
755
|
+
).delete(synchronize_session=False)
|
|
671
756
|
removedItems = len(zeros)
|
|
672
|
-
|
|
757
|
+
|
|
673
758
|
if items:
|
|
674
759
|
for item in items:
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
# ?, ?, ?
|
|
700
|
-
# )
|
|
701
|
-
# """, items)
|
|
702
|
-
|
|
760
|
+
(
|
|
761
|
+
station_id,
|
|
762
|
+
item_id,
|
|
763
|
+
modified,
|
|
764
|
+
demand_price,
|
|
765
|
+
demand_units,
|
|
766
|
+
demand_level,
|
|
767
|
+
supply_price,
|
|
768
|
+
supply_units,
|
|
769
|
+
supply_level,
|
|
770
|
+
) = item
|
|
771
|
+
obj = SA.StationItem(
|
|
772
|
+
station_id=station_id,
|
|
773
|
+
item_id=item_id,
|
|
774
|
+
modified=modified or None,
|
|
775
|
+
demand_price=demand_price,
|
|
776
|
+
demand_units=demand_units,
|
|
777
|
+
demand_level=demand_level,
|
|
778
|
+
supply_price=supply_price,
|
|
779
|
+
supply_units=supply_units,
|
|
780
|
+
supply_level=supply_level,
|
|
781
|
+
)
|
|
782
|
+
session.merge(obj)
|
|
783
|
+
|
|
703
784
|
tdenv.DEBUG0("Marking populated stations as having a market")
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
"updated": updtItems,
|
|
719
|
-
"removed": removedItems,
|
|
720
|
-
}.items() if v) or "0"
|
|
721
|
-
|
|
785
|
+
session.query(SA.Station).filter(
|
|
786
|
+
SA.Station.station_id.in_([sid for (sid,) in stations])
|
|
787
|
+
).update({SA.Station.market: "Y"}, synchronize_session=False)
|
|
788
|
+
|
|
789
|
+
changes = " and ".join(
|
|
790
|
+
f"{v} {k}"
|
|
791
|
+
for k, v in {
|
|
792
|
+
"new": newItems,
|
|
793
|
+
"updated": updtItems,
|
|
794
|
+
"removed": removedItems,
|
|
795
|
+
}.items()
|
|
796
|
+
if v
|
|
797
|
+
) or "0"
|
|
798
|
+
|
|
722
799
|
tdenv.NOTE(
|
|
723
800
|
"Import complete: "
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
801
|
+
"{:s} items "
|
|
802
|
+
"over {:n} stations "
|
|
803
|
+
"in {:n} systems",
|
|
804
|
+
changes,
|
|
805
|
+
len(stations),
|
|
806
|
+
numSys,
|
|
730
807
|
)
|
|
731
|
-
|
|
808
|
+
|
|
732
809
|
if ignItems:
|
|
733
810
|
tdenv.NOTE("Ignored {} items with old data", ignItems)
|
|
734
811
|
|
|
735
812
|
|
|
813
|
+
|
|
736
814
|
######################################################################
|
|
737
815
|
|
|
738
816
|
|
|
@@ -780,303 +858,397 @@ def deprecationCheckItem(importPath, lineNo, line):
|
|
|
780
858
|
)
|
|
781
859
|
|
|
782
860
|
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
861
|
+
# --- main importer ---
|
|
862
|
+
def processImportFile(
|
|
863
|
+
tdenv,
|
|
864
|
+
session,
|
|
865
|
+
importPath,
|
|
866
|
+
tableName,
|
|
867
|
+
*,
|
|
868
|
+
line_callback: Optional[Callable] = None,
|
|
869
|
+
call_args: Optional[dict] = None,
|
|
870
|
+
):
|
|
871
|
+
"""
|
|
872
|
+
Import a CSV file into the given table.
|
|
873
|
+
|
|
874
|
+
- RareItem.csv:
|
|
875
|
+
Skips FK marker columns:
|
|
876
|
+
* !name@System.system_id
|
|
877
|
+
* name@Station.station_id
|
|
878
|
+
* name@Category.category_id
|
|
879
|
+
Looks up system_id (transient), station_id, category_id via cached helpers.
|
|
880
|
+
NOTE: system_id is NOT a RareItem column and is not passed to the model.
|
|
881
|
+
|
|
882
|
+
- System.csv:
|
|
883
|
+
Skips 'name@Added.added_id' from active columns; resolves Added by name,
|
|
884
|
+
defaulting to "EDSM" when blank.
|
|
885
|
+
|
|
886
|
+
- All tables:
|
|
887
|
+
Uses parse_ts() for datetimes.
|
|
888
|
+
Enforces unq: unique headers.
|
|
889
|
+
Commits per tradedangerous.db.utils.get_import_batch_size(session).
|
|
890
|
+
"""
|
|
891
|
+
|
|
892
|
+
tdenv.DEBUG0("Processing import file '{}' for table '{}'", str(importPath), tableName)
|
|
893
|
+
|
|
788
894
|
call_args = call_args or {}
|
|
789
895
|
if line_callback:
|
|
790
896
|
line_callback = partial_fn(line_callback, **call_args)
|
|
791
|
-
|
|
792
|
-
fkeySelectStr = (
|
|
793
|
-
"("
|
|
794
|
-
" SELECT {newValue}"
|
|
795
|
-
" FROM {table}"
|
|
796
|
-
" WHERE {stmt}"
|
|
797
|
-
")"
|
|
798
|
-
)
|
|
897
|
+
|
|
799
898
|
uniquePfx = "unq:"
|
|
800
899
|
uniqueLen = len(uniquePfx)
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
900
|
+
|
|
901
|
+
# Backend-aware batch policy (SQLite=None→single commit; MariaDB defaults to 50k; env override respected)
|
|
902
|
+
max_transaction_items = get_import_batch_size(session, profile="csv") # from tradedangerous.db.utils
|
|
903
|
+
transaction_items = 0
|
|
904
|
+
|
|
905
|
+
with importPath.open("r", encoding="utf-8") as importFile:
|
|
906
|
+
csvin = csv.reader(importFile, delimiter=",", quotechar="'", doublequote=True)
|
|
907
|
+
|
|
908
|
+
# header
|
|
808
909
|
columnDefs = next(csvin)
|
|
809
910
|
columnCount = len(columnDefs)
|
|
810
|
-
|
|
811
|
-
#
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
for
|
|
818
|
-
colName, _, srcKey = cName.partition(
|
|
819
|
-
|
|
911
|
+
|
|
912
|
+
activeColumns: list[str] = [] # headers that map directly to ORM fields
|
|
913
|
+
kept_indices: list[int] = [] # original header indices that we KEEP (aligns values)
|
|
914
|
+
uniqueIndexes: list[int] = [] # indexes into activeColumns (post-skip)
|
|
915
|
+
fk_col_indices: dict[str, int] = {} # special FK headers → their original indices
|
|
916
|
+
|
|
917
|
+
# --- preprocess header ---
|
|
918
|
+
for cIndex, cName in enumerate(columnDefs):
|
|
919
|
+
colName, _, srcKey = cName.partition("@")
|
|
920
|
+
|
|
921
|
+
# --- System.csv ---
|
|
922
|
+
if tableName == "System":
|
|
923
|
+
if cName == "name":
|
|
924
|
+
srcKey = "" # plain field
|
|
925
|
+
elif cName == "name@Added.added_id":
|
|
926
|
+
# We'll resolve Added by name separately; skip from active columns.
|
|
927
|
+
fk_col_indices["added"] = cIndex
|
|
928
|
+
continue # do NOT keep this header/value
|
|
929
|
+
|
|
930
|
+
# --- RareItem.csv (skip FK headers; remember positions) ---
|
|
931
|
+
if tableName == "RareItem":
|
|
932
|
+
if cName == "!name@System.system_id":
|
|
933
|
+
fk_col_indices["system"] = cIndex
|
|
934
|
+
continue # do NOT keep this header/value
|
|
935
|
+
if cName == "name@Station.station_id":
|
|
936
|
+
fk_col_indices["station"] = cIndex
|
|
937
|
+
continue # do NOT keep this header/value
|
|
938
|
+
if cName == "name@Category.category_id":
|
|
939
|
+
fk_col_indices["category"] = cIndex
|
|
940
|
+
continue # do NOT keep this header/value
|
|
941
|
+
|
|
942
|
+
# unique index marker (e.g., "unq:name")
|
|
820
943
|
if colName.startswith(uniquePfx):
|
|
821
|
-
uniqueIndexes.append(
|
|
944
|
+
uniqueIndexes.append(len(activeColumns))
|
|
822
945
|
colName = colName[uniqueLen:]
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
# this column is only used to resolve an FK
|
|
832
|
-
assert srcKey
|
|
833
|
-
colName = colName[len(ignorePfx):]
|
|
834
|
-
joinHelper.append((colName, queryTab, queryCol))
|
|
835
|
-
continue
|
|
836
|
-
|
|
837
|
-
# foreign key, we need to make a select
|
|
838
|
-
joinTable = [ queryTab ]
|
|
839
|
-
joinStmt = []
|
|
840
|
-
for nextCol, nextTab, nextJoin in joinHelper:
|
|
841
|
-
joinTable.append(
|
|
842
|
-
"INNER JOIN {} USING({})".format(nextTab, nextJoin)
|
|
843
|
-
)
|
|
844
|
-
joinStmt.append(
|
|
845
|
-
"{}.{} = ?".format(nextTab, nextCol)
|
|
846
|
-
)
|
|
847
|
-
joinHelper = []
|
|
848
|
-
joinStmt.append("{}.{} = ?".format(queryTab, colName))
|
|
849
|
-
bindColumns.append(queryCol)
|
|
850
|
-
bindValues.append(
|
|
851
|
-
fkeySelectStr.format(
|
|
852
|
-
newValue = srcKey,
|
|
853
|
-
table = " ".join(joinTable),
|
|
854
|
-
stmt = " AND ".join(joinStmt),
|
|
855
|
-
)
|
|
856
|
-
)
|
|
857
|
-
# now we can make the sql statement
|
|
858
|
-
sql_stmt = """
|
|
859
|
-
INSERT OR REPLACE INTO {table} ({columns}) VALUES({values})
|
|
860
|
-
""".format(
|
|
861
|
-
table=tableName,
|
|
862
|
-
columns=','.join(bindColumns),
|
|
863
|
-
values=','.join(bindValues)
|
|
864
|
-
)
|
|
865
|
-
tdenv.DEBUG0("SQL-Statement: {}", sql_stmt)
|
|
866
|
-
|
|
867
|
-
# Check if there is a deprecation check for this table.
|
|
868
|
-
deprecationFn = getattr(
|
|
869
|
-
sys.modules[__name__],
|
|
870
|
-
"deprecationCheck" + tableName,
|
|
871
|
-
None
|
|
872
|
-
)
|
|
873
|
-
|
|
874
|
-
# import the data
|
|
946
|
+
|
|
947
|
+
# keep normal columns and remember their source index
|
|
948
|
+
activeColumns.append(colName)
|
|
949
|
+
kept_indices.append(cIndex)
|
|
950
|
+
|
|
951
|
+
# optional deprecation checker
|
|
952
|
+
deprecationFn = getattr(sys.modules[__name__], "deprecationCheck" + tableName, None)
|
|
953
|
+
|
|
875
954
|
importCount = 0
|
|
876
955
|
uniqueIndex = {}
|
|
877
|
-
|
|
956
|
+
|
|
878
957
|
for linein in csvin:
|
|
879
958
|
if line_callback:
|
|
880
959
|
line_callback()
|
|
881
960
|
if not linein:
|
|
882
961
|
continue
|
|
883
962
|
lineNo = csvin.line_num
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
963
|
+
|
|
964
|
+
if len(linein) != columnCount:
|
|
965
|
+
tdenv.NOTE(
|
|
966
|
+
"Wrong number of columns ({}:{}): {}",
|
|
967
|
+
importPath,
|
|
968
|
+
lineNo,
|
|
969
|
+
", ".join(linein),
|
|
970
|
+
)
|
|
971
|
+
continue
|
|
972
|
+
|
|
973
|
+
tdenv.DEBUG1(" Values: {}", ", ".join(linein))
|
|
974
|
+
|
|
975
|
+
# deprecation checks
|
|
976
|
+
if deprecationFn:
|
|
977
|
+
try:
|
|
978
|
+
deprecationFn(importPath, lineNo, linein)
|
|
979
|
+
except (DeprecatedKeyError, DeletedKeyError) as e:
|
|
980
|
+
if not tdenv.ignoreUnknown:
|
|
981
|
+
raise e
|
|
982
|
+
e.category = "WARNING"
|
|
983
|
+
tdenv.NOTE("{}", e)
|
|
984
|
+
continue
|
|
985
|
+
|
|
986
|
+
# Build values aligned to activeColumns (skip the FK columns we excluded)
|
|
987
|
+
activeValues = [linein[i] for i in kept_indices]
|
|
988
|
+
|
|
989
|
+
# unique index enforcement over activeColumns
|
|
990
|
+
if uniqueIndexes:
|
|
991
|
+
keyValues = [str(activeValues[i]).upper() for i in uniqueIndexes]
|
|
992
|
+
key = ":!:".join(keyValues)
|
|
993
|
+
prevLineNo = uniqueIndex.get(key, 0)
|
|
994
|
+
if prevLineNo:
|
|
995
|
+
key_disp = "/".join(keyValues)
|
|
996
|
+
raise DuplicateKeyError(importPath, lineNo, "entry", key_disp, prevLineNo)
|
|
997
|
+
uniqueIndex[key] = lineNo
|
|
998
|
+
|
|
999
|
+
try:
|
|
1000
|
+
# Base rowdict from non-FK columns only
|
|
1001
|
+
rowdict = dict(zip(activeColumns, activeValues))
|
|
1002
|
+
|
|
1003
|
+
# --- RareItem foreign key lookups ---
|
|
1004
|
+
if tableName == "RareItem":
|
|
1005
|
+
# Resolve system (transient; only for station lookup)
|
|
1006
|
+
sys_id = None
|
|
1007
|
+
if "system" in fk_col_indices:
|
|
1008
|
+
sys_name = linein[fk_col_indices["system"]]
|
|
1009
|
+
try:
|
|
1010
|
+
sys_id = _get_system_id(session, sys_name)
|
|
1011
|
+
except ValueError:
|
|
1012
|
+
tdenv.WARN("Unknown System '{}' in {}", sys_name, importPath)
|
|
1013
|
+
|
|
1014
|
+
# Station (requires system context)
|
|
1015
|
+
if "station" in fk_col_indices:
|
|
1016
|
+
stn_name = linein[fk_col_indices["station"]]
|
|
1017
|
+
if sys_id is not None:
|
|
1018
|
+
try:
|
|
1019
|
+
rowdict["station_id"] = _get_station_id(session, sys_id, stn_name)
|
|
1020
|
+
except ValueError:
|
|
1021
|
+
tdenv.WARN("Unknown Station '{}' in {}", stn_name, importPath)
|
|
1022
|
+
else:
|
|
1023
|
+
tdenv.WARN("Station lookup skipped (no system_id) for '{}'", stn_name)
|
|
1024
|
+
|
|
1025
|
+
# Category
|
|
1026
|
+
if "category" in fk_col_indices:
|
|
1027
|
+
cat_name = linein[fk_col_indices["category"]]
|
|
1028
|
+
try:
|
|
1029
|
+
rowdict["category_id"] = _get_category_id(session, cat_name)
|
|
1030
|
+
except ValueError:
|
|
1031
|
+
tdenv.WARN("Unknown Category '{}' in {}", cat_name, importPath)
|
|
1032
|
+
|
|
1033
|
+
# --- System foreign key lookup (Added), default "EDSM" if blank ---
|
|
1034
|
+
if tableName == "System" and "added" in fk_col_indices:
|
|
1035
|
+
added_val = linein[fk_col_indices["added"]] or "EDSM"
|
|
887
1036
|
try:
|
|
888
|
-
|
|
889
|
-
except
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
1037
|
+
rowdict["added_id"] = _get_added_id(session, added_val)
|
|
1038
|
+
except ValueError:
|
|
1039
|
+
rowdict["added_id"] = None
|
|
1040
|
+
tdenv.WARN("Unknown Added value '{}' in {}", added_val, importPath)
|
|
1041
|
+
|
|
1042
|
+
# --- type coercion ---
|
|
1043
|
+
for key, val in list(rowdict.items()):
|
|
1044
|
+
if val in ("", None):
|
|
1045
|
+
rowdict[key] = None
|
|
894
1046
|
continue
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
1047
|
+
# ints
|
|
1048
|
+
if key.endswith("_id") or key.endswith("ID") or key in ("cost", "max_allocation"):
|
|
1049
|
+
try:
|
|
1050
|
+
rowdict[key] = int(val)
|
|
1051
|
+
except ValueError:
|
|
1052
|
+
rowdict[key] = None
|
|
1053
|
+
# floats
|
|
1054
|
+
elif key in ("pos_x", "pos_y", "pos_z", "ls_from_star"):
|
|
1055
|
+
try:
|
|
1056
|
+
rowdict[key] = float(val)
|
|
1057
|
+
except ValueError:
|
|
1058
|
+
rowdict[key] = None
|
|
1059
|
+
# datetimes
|
|
1060
|
+
elif "time" in key or key == "modified":
|
|
1061
|
+
parsed = parse_ts(val)
|
|
1062
|
+
if parsed:
|
|
1063
|
+
rowdict[key] = parsed
|
|
1064
|
+
else:
|
|
1065
|
+
tdenv.WARN(
|
|
1066
|
+
"Unparsable datetime in {} line {} col {}: {}",
|
|
1067
|
+
importPath,
|
|
1068
|
+
lineNo,
|
|
1069
|
+
key,
|
|
1070
|
+
val,
|
|
1071
|
+
)
|
|
1072
|
+
rowdict[key] = None
|
|
1073
|
+
# strings (incl. TriState flags) left as-is
|
|
1074
|
+
|
|
1075
|
+
# reserved word remaps
|
|
1076
|
+
if tableName == "Upgrade" and "class" in rowdict:
|
|
1077
|
+
rowdict["class_"] = rowdict.pop("class")
|
|
1078
|
+
if tableName == "FDevOutfitting" and "class" in rowdict:
|
|
1079
|
+
rowdict["class_"] = rowdict.pop("class")
|
|
1080
|
+
|
|
1081
|
+
# ensure we never pass system_id to RareItem (not a column)
|
|
1082
|
+
if tableName == "RareItem" and "system_id" in rowdict:
|
|
1083
|
+
rowdict.pop("system_id", None)
|
|
1084
|
+
|
|
1085
|
+
Model = getattr(SA, tableName)
|
|
1086
|
+
obj = Model(**rowdict)
|
|
1087
|
+
session.merge(obj)
|
|
1088
|
+
importCount += 1
|
|
1089
|
+
|
|
1090
|
+
# batched commit (only if enabled for this backend)
|
|
1091
|
+
if max_transaction_items:
|
|
1092
|
+
transaction_items += 1
|
|
1093
|
+
if transaction_items >= max_transaction_items:
|
|
1094
|
+
session.commit()
|
|
1095
|
+
session.begin()
|
|
1096
|
+
transaction_items = 0
|
|
1097
|
+
|
|
1098
|
+
except Exception as e:
|
|
1099
|
+
tdenv.WARN(
|
|
1100
|
+
"*** INTERNAL ERROR: {err}\n"
|
|
1101
|
+
"CSV File: {file}:{line}\n"
|
|
1102
|
+
"Table: {table}\n"
|
|
1103
|
+
"Params: {params}\n".format(
|
|
1104
|
+
err=str(e),
|
|
1105
|
+
file=str(importPath),
|
|
1106
|
+
line=lineNo,
|
|
1107
|
+
table=tableName,
|
|
1108
|
+
params=rowdict if "rowdict" in locals() else linein,
|
|
931
1109
|
)
|
|
932
|
-
pass
|
|
933
|
-
else:
|
|
934
|
-
tdenv.NOTE(
|
|
935
|
-
"Wrong number of columns ({}:{}): {}",
|
|
936
|
-
importPath,
|
|
937
|
-
lineNo,
|
|
938
|
-
', '.join(linein)
|
|
939
1110
|
)
|
|
940
|
-
|
|
941
|
-
tdenv.DEBUG0("{count} {table}s imported",
|
|
942
|
-
count = importCount,
|
|
943
|
-
table = tableName)
|
|
1111
|
+
session.rollback()
|
|
944
1112
|
|
|
945
|
-
|
|
1113
|
+
session.commit()
|
|
1114
|
+
tdenv.DEBUG0("{count} {table}s imported", count=importCount, table=tableName)
|
|
946
1115
|
|
|
947
1116
|
|
|
948
1117
|
def buildCache(tdb, tdenv):
|
|
949
1118
|
"""
|
|
950
|
-
Rebuilds the
|
|
951
|
-
|
|
1119
|
+
Rebuilds the database from source files.
|
|
1120
|
+
|
|
952
1121
|
TD's data is either "stable" - information that rarely changes like Ship
|
|
953
1122
|
details, star systems etc - and "volatile" - pricing information, etc.
|
|
954
|
-
|
|
1123
|
+
|
|
955
1124
|
The stable data starts out in data/TradeDangerous.sql while other data
|
|
956
1125
|
is stored in custom-formatted text files, e.g. ./TradeDangerous.prices.
|
|
957
|
-
|
|
958
|
-
We load both sets of data into
|
|
1126
|
+
|
|
1127
|
+
We load both sets of data into a database, after which we can
|
|
959
1128
|
avoid the text-processing overhead by simply checking if the text files
|
|
960
1129
|
are newer than the database.
|
|
961
1130
|
"""
|
|
962
|
-
|
|
1131
|
+
|
|
963
1132
|
tdenv.NOTE(
|
|
964
1133
|
"Rebuilding cache file: this may take a few moments.",
|
|
965
1134
|
stderr=True,
|
|
966
1135
|
)
|
|
967
|
-
|
|
1136
|
+
|
|
968
1137
|
dbPath = tdb.dbPath
|
|
969
1138
|
sqlPath = tdb.sqlPath
|
|
970
1139
|
pricesPath = tdb.pricesPath
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
)
|
|
1022
|
-
|
|
1023
|
-
|
|
1140
|
+
engine = tdb.engine
|
|
1141
|
+
|
|
1142
|
+
# --- Step 1: reset schema BEFORE opening a session/transaction ---
|
|
1143
|
+
# Single unified call; no dialect branching here.
|
|
1144
|
+
lifecycle.reset_db(engine, db_path=dbPath)
|
|
1145
|
+
|
|
1146
|
+
# --- Step 2: open a new session for rebuild work ---
|
|
1147
|
+
with tdb.Session() as session:
|
|
1148
|
+
# Import standard tables on a plain session with progress
|
|
1149
|
+
with Progress(
|
|
1150
|
+
max_value=len(tdb.importTables) + 1,
|
|
1151
|
+
prefix="Importing",
|
|
1152
|
+
width=25,
|
|
1153
|
+
style=CountingBar,
|
|
1154
|
+
) as prog:
|
|
1155
|
+
for importName, importTable in tdb.importTables:
|
|
1156
|
+
import_path = Path(importName)
|
|
1157
|
+
import_lines = file_line_count(import_path, missing_ok=True)
|
|
1158
|
+
with prog.sub_task(
|
|
1159
|
+
max_value=import_lines, description=importTable
|
|
1160
|
+
) as child:
|
|
1161
|
+
prog.increment(value=1)
|
|
1162
|
+
call_args = {"task": child, "advance": 1}
|
|
1163
|
+
try:
|
|
1164
|
+
processImportFile(
|
|
1165
|
+
tdenv,
|
|
1166
|
+
session,
|
|
1167
|
+
import_path,
|
|
1168
|
+
importTable,
|
|
1169
|
+
line_callback=prog.update_task,
|
|
1170
|
+
call_args=call_args,
|
|
1171
|
+
)
|
|
1172
|
+
# safety commit after each file
|
|
1173
|
+
session.commit()
|
|
1174
|
+
except FileNotFoundError:
|
|
1175
|
+
tdenv.DEBUG0(
|
|
1176
|
+
"WARNING: processImportFile found no {} file", importName
|
|
1177
|
+
)
|
|
1178
|
+
except StopIteration:
|
|
1179
|
+
tdenv.NOTE(
|
|
1180
|
+
"{} exists but is empty. "
|
|
1181
|
+
"Remove it or add the column definition line.",
|
|
1182
|
+
importName,
|
|
1183
|
+
)
|
|
1184
|
+
prog.increment(1)
|
|
1185
|
+
|
|
1186
|
+
with prog.sub_task(description="Save DB"):
|
|
1187
|
+
session.commit()
|
|
1188
|
+
|
|
1189
|
+
# --- Step 3: parse the prices file (still plain session) ---
|
|
1190
|
+
if pricesPath.exists():
|
|
1191
|
+
with Progress(max_value=None, width=25, prefix="Processing prices file"):
|
|
1192
|
+
processPricesFile(tdenv, session, pricesPath)
|
|
1193
|
+
else:
|
|
1194
|
+
tdenv.NOTE(
|
|
1195
|
+
f'Missing "{pricesPath}" file - no price data.',
|
|
1196
|
+
stderr=True,
|
|
1197
|
+
)
|
|
1198
|
+
|
|
1024
1199
|
tdb.close()
|
|
1025
|
-
|
|
1026
|
-
tdenv.DEBUG0("Swapping out db files")
|
|
1027
|
-
|
|
1028
|
-
if dbPath.exists():
|
|
1029
|
-
if backupPath.exists():
|
|
1030
|
-
backupPath.unlink()
|
|
1031
|
-
dbPath.rename(backupPath)
|
|
1032
|
-
tempPath.rename(dbPath)
|
|
1033
|
-
|
|
1034
1200
|
tdenv.DEBUG0("Finished")
|
|
1035
1201
|
|
|
1202
|
+
|
|
1036
1203
|
######################################################################
|
|
1037
1204
|
|
|
1038
1205
|
|
|
1039
1206
|
def regeneratePricesFile(tdb, tdenv):
|
|
1207
|
+
"""
|
|
1208
|
+
Regenerate the .prices file from the current DB contents.
|
|
1209
|
+
Uses the ORM session rather than raw sqlite.
|
|
1210
|
+
"""
|
|
1040
1211
|
tdenv.DEBUG0("Regenerating .prices file")
|
|
1041
|
-
|
|
1042
|
-
with tdb.
|
|
1043
|
-
|
|
1044
|
-
|
|
1212
|
+
|
|
1213
|
+
with tdb.Session() as session:
|
|
1214
|
+
with tdb.pricesPath.open("w", encoding="utf-8") as pricesFile:
|
|
1215
|
+
prices.dumpPrices(
|
|
1216
|
+
session,
|
|
1045
1217
|
prices.Element.full,
|
|
1046
|
-
file
|
|
1047
|
-
debug
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1218
|
+
file=pricesFile,
|
|
1219
|
+
debug=tdenv.debug,
|
|
1220
|
+
)
|
|
1221
|
+
|
|
1222
|
+
# Only touch the DB file on SQLite — MariaDB has no dbPath
|
|
1223
|
+
if tdb.engine.dialect.name == "sqlite" and tdb.dbPath and os.path.exists(tdb.dbPath):
|
|
1224
|
+
os.utime(tdb.dbPath)
|
|
1051
1225
|
|
|
1052
1226
|
######################################################################
|
|
1053
1227
|
|
|
1054
1228
|
|
|
1055
|
-
def importDataFromFile(tdb, tdenv, path, pricesFh
|
|
1229
|
+
def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False):
|
|
1056
1230
|
"""
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1231
|
+
Import price data from a file on a per-station basis,
|
|
1232
|
+
that is when a new station is encountered, delete any
|
|
1233
|
+
existing records for that station in the database.
|
|
1060
1234
|
"""
|
|
1061
|
-
|
|
1235
|
+
|
|
1062
1236
|
if not pricesFh and not path.exists():
|
|
1063
|
-
raise TradeException("No such file: {}"
|
|
1064
|
-
|
|
1065
|
-
))
|
|
1066
|
-
|
|
1237
|
+
raise TradeException(f"No such file: {path}")
|
|
1238
|
+
|
|
1067
1239
|
if reset:
|
|
1068
1240
|
tdenv.DEBUG0("Resetting price data")
|
|
1069
|
-
with tdb.
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1241
|
+
with tdb.Session.begin() as session:
|
|
1242
|
+
session.query(SA.StationItem).delete()
|
|
1243
|
+
|
|
1244
|
+
tdenv.DEBUG0(f"Importing data from {path}")
|
|
1245
|
+
processPricesFile(
|
|
1246
|
+
tdenv,
|
|
1247
|
+
db=tdb.getDB(), # still used for the incremental parsing logic
|
|
1248
|
+
pricesPath=path,
|
|
1249
|
+
pricesFh=pricesFh,
|
|
1078
1250
|
)
|
|
1079
|
-
|
|
1080
|
-
# If everything worked,
|
|
1251
|
+
|
|
1252
|
+
# If everything worked, regenerate the canonical prices file if this wasn’t the main one
|
|
1081
1253
|
if path != tdb.pricesPath:
|
|
1082
|
-
regeneratePricesFile(tdb, tdenv)
|
|
1254
|
+
regeneratePricesFile(tdb, tdenv)
|