tradedangerous 12.7.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py.typed +1 -0
- trade.py +49 -0
- tradedangerous/__init__.py +43 -0
- tradedangerous/cache.py +1381 -0
- tradedangerous/cli.py +136 -0
- tradedangerous/commands/TEMPLATE.py +74 -0
- tradedangerous/commands/__init__.py +244 -0
- tradedangerous/commands/buildcache_cmd.py +102 -0
- tradedangerous/commands/buy_cmd.py +427 -0
- tradedangerous/commands/commandenv.py +372 -0
- tradedangerous/commands/exceptions.py +94 -0
- tradedangerous/commands/export_cmd.py +150 -0
- tradedangerous/commands/import_cmd.py +222 -0
- tradedangerous/commands/local_cmd.py +243 -0
- tradedangerous/commands/market_cmd.py +207 -0
- tradedangerous/commands/nav_cmd.py +252 -0
- tradedangerous/commands/olddata_cmd.py +270 -0
- tradedangerous/commands/parsing.py +221 -0
- tradedangerous/commands/rares_cmd.py +298 -0
- tradedangerous/commands/run_cmd.py +1521 -0
- tradedangerous/commands/sell_cmd.py +262 -0
- tradedangerous/commands/shipvendor_cmd.py +60 -0
- tradedangerous/commands/station_cmd.py +68 -0
- tradedangerous/commands/trade_cmd.py +181 -0
- tradedangerous/commands/update_cmd.py +67 -0
- tradedangerous/corrections.py +55 -0
- tradedangerous/csvexport.py +234 -0
- tradedangerous/db/__init__.py +27 -0
- tradedangerous/db/adapter.py +192 -0
- tradedangerous/db/config.py +107 -0
- tradedangerous/db/engine.py +259 -0
- tradedangerous/db/lifecycle.py +332 -0
- tradedangerous/db/locks.py +208 -0
- tradedangerous/db/orm_models.py +500 -0
- tradedangerous/db/paths.py +113 -0
- tradedangerous/db/utils.py +661 -0
- tradedangerous/edscupdate.py +565 -0
- tradedangerous/edsmupdate.py +474 -0
- tradedangerous/formatting.py +210 -0
- tradedangerous/fs.py +156 -0
- tradedangerous/gui.py +1146 -0
- tradedangerous/mapping.py +133 -0
- tradedangerous/mfd/__init__.py +103 -0
- tradedangerous/mfd/saitek/__init__.py +3 -0
- tradedangerous/mfd/saitek/directoutput.py +678 -0
- tradedangerous/mfd/saitek/x52pro.py +195 -0
- tradedangerous/misc/checkpricebounds.py +287 -0
- tradedangerous/misc/clipboard.py +49 -0
- tradedangerous/misc/coord64.py +83 -0
- tradedangerous/misc/csvdialect.py +57 -0
- tradedangerous/misc/derp-sentinel.py +35 -0
- tradedangerous/misc/diff-system-csvs.py +159 -0
- tradedangerous/misc/eddb.py +81 -0
- tradedangerous/misc/eddn.py +349 -0
- tradedangerous/misc/edsc.py +437 -0
- tradedangerous/misc/edsm.py +121 -0
- tradedangerous/misc/importeddbstats.py +54 -0
- tradedangerous/misc/prices-json-exp.py +179 -0
- tradedangerous/misc/progress.py +194 -0
- tradedangerous/plugins/__init__.py +249 -0
- tradedangerous/plugins/edcd_plug.py +371 -0
- tradedangerous/plugins/eddblink_plug.py +861 -0
- tradedangerous/plugins/edmc_batch_plug.py +133 -0
- tradedangerous/plugins/spansh_plug.py +2647 -0
- tradedangerous/prices.py +211 -0
- tradedangerous/submit-distances.py +422 -0
- tradedangerous/templates/Added.csv +37 -0
- tradedangerous/templates/Category.csv +17 -0
- tradedangerous/templates/RareItem.csv +143 -0
- tradedangerous/templates/TradeDangerous.sql +338 -0
- tradedangerous/tools.py +40 -0
- tradedangerous/tradecalc.py +1302 -0
- tradedangerous/tradedb.py +2320 -0
- tradedangerous/tradeenv.py +313 -0
- tradedangerous/tradeenv.pyi +109 -0
- tradedangerous/tradeexcept.py +131 -0
- tradedangerous/tradeorm.py +183 -0
- tradedangerous/transfers.py +192 -0
- tradedangerous/utils.py +243 -0
- tradedangerous/version.py +16 -0
- tradedangerous-12.7.6.dist-info/METADATA +106 -0
- tradedangerous-12.7.6.dist-info/RECORD +87 -0
- tradedangerous-12.7.6.dist-info/WHEEL +5 -0
- tradedangerous-12.7.6.dist-info/entry_points.txt +3 -0
- tradedangerous-12.7.6.dist-info/licenses/LICENSE +373 -0
- tradedangerous-12.7.6.dist-info/top_level.txt +2 -0
- tradegui.py +24 -0
tradedangerous/cache.py
ADDED
|
@@ -0,0 +1,1381 @@
|
|
|
1
|
+
# --------------------------------------------------------------------
|
|
2
|
+
# Copyright (C) Oliver 'kfsone' Smith 2014 <oliver@kfs.org>:
|
|
3
|
+
# Copyright (C) Bernd 'Gazelle' Gollesch 2016, 2017
|
|
4
|
+
# Copyright (C) Stefan 'Tromador' Morrell 2025
|
|
5
|
+
# Copyright (C) Jonathan 'eyeonus' Jones 2018-2025
|
|
6
|
+
#
|
|
7
|
+
# You are free to use, redistribute, or even print and eat a copy of
|
|
8
|
+
# this software so long as you include this copyright notice.
|
|
9
|
+
# I guarantee there is at least one bug neither of us knew about.
|
|
10
|
+
# --------------------------------------------------------------------
|
|
11
|
+
# TradeDangerous :: Modules :: Cache loader
|
|
12
|
+
#
|
|
13
|
+
# TD works primarily from an SQLite3 database, but the data in that
|
|
14
|
+
# is sourced from text files.
|
|
15
|
+
# data/TradeDangerous.sql contains the less volatile data - systems,
|
|
16
|
+
# ships, etc
|
|
17
|
+
# data/TradeDangerous.prices contains a description of the price
|
|
18
|
+
# database that is intended to be easily editable and commitable to
|
|
19
|
+
# a source repository. -- DEPRECATED [eyeonus]
|
|
20
|
+
#
|
|
21
|
+
|
|
22
|
+
from __future__ import annotations
|
|
23
|
+
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
import csv
|
|
26
|
+
import os
|
|
27
|
+
import re
|
|
28
|
+
import typing
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
from functools import partial as partial_fn
|
|
32
|
+
from sqlalchemy import func, tuple_
|
|
33
|
+
from sqlalchemy.orm import Session
|
|
34
|
+
from tradedangerous.db import orm_models as SA
|
|
35
|
+
from tradedangerous.db import lifecycle
|
|
36
|
+
from tradedangerous.db.utils import parse_ts
|
|
37
|
+
|
|
38
|
+
from .fs import file_line_count
|
|
39
|
+
from .tradeexcept import TradeException
|
|
40
|
+
from tradedangerous.misc.progress import Progress, CountingBar
|
|
41
|
+
from . import corrections, utils
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
# For mypy/pylint type checking
|
|
46
|
+
if typing.TYPE_CHECKING:
|
|
47
|
+
from collections.abc import Callable
|
|
48
|
+
from typing import Any, Optional, TextIO
|
|
49
|
+
|
|
50
|
+
from .tradedb import TradeDB
|
|
51
|
+
from .tradeenv import TradeEnv
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
######################################################################
|
|
55
|
+
# Regular expression patterns. Here be draegons.
|
|
56
|
+
# If you add new patterns:
|
|
57
|
+
# - use fragments and re.VERBOSE (see itemPriceRe)
|
|
58
|
+
# - use named captures (?P<name> ...)
|
|
59
|
+
# - include comments
|
|
60
|
+
|
|
61
|
+
# # Match the '@ SYSTEM/Station' line
|
|
62
|
+
systemStationRe = re.compile(r'^\@\s*(.*)/(.*)')
|
|
63
|
+
|
|
64
|
+
# # Price Line matching
|
|
65
|
+
|
|
66
|
+
# first part of any prices line is the item name and paying/asking price
|
|
67
|
+
itemPriceFrag = r"""
|
|
68
|
+
# match item name, allowing spaces in the name
|
|
69
|
+
(?P<item> .*?)
|
|
70
|
+
\s+
|
|
71
|
+
# price station is buying the item for
|
|
72
|
+
(?P<sell> \d+)
|
|
73
|
+
\s+
|
|
74
|
+
# price station is selling item for
|
|
75
|
+
(?P<buy> \d+)
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
# time formats per https://www.sqlite.org/lang_datefunc.html
|
|
79
|
+
# YYYY-MM-DD HH:MM:SS
|
|
80
|
+
# YYYY-MM-DDTHH:MM:SS
|
|
81
|
+
# HH:MM:SS
|
|
82
|
+
# 'now'
|
|
83
|
+
timeFrag = r'(?P<time>(\d{4}-\d{2}-\d{2}[T ])?\d{2}:\d{2}:\d{2}|now)'
|
|
84
|
+
|
|
85
|
+
# <name> <sell> <buy> [ <demand> <supply> [ <time> | now ] ]
|
|
86
|
+
qtyLevelFrag = r"""
|
|
87
|
+
unk # You can just write 'unknown'
|
|
88
|
+
| \? # alias for unknown
|
|
89
|
+
| n/a # alias for 0L0
|
|
90
|
+
| - # alias for 0L0
|
|
91
|
+
| \d+[\?LMH] # Or <number><level> where level is L(ow), M(ed) or H(igh)
|
|
92
|
+
| 0 # alias for n/a
|
|
93
|
+
| bug
|
|
94
|
+
"""
|
|
95
|
+
newItemPriceRe = re.compile(r"""
|
|
96
|
+
^
|
|
97
|
+
{base_f}
|
|
98
|
+
(
|
|
99
|
+
\s+
|
|
100
|
+
# demand units and level
|
|
101
|
+
(?P<demand> {qtylvl_f})
|
|
102
|
+
\s+
|
|
103
|
+
# supply units and level
|
|
104
|
+
(?P<supply> {qtylvl_f})
|
|
105
|
+
# time is optional
|
|
106
|
+
(?:
|
|
107
|
+
\s+
|
|
108
|
+
{time_f}
|
|
109
|
+
)?
|
|
110
|
+
)?
|
|
111
|
+
\s*
|
|
112
|
+
$
|
|
113
|
+
""".format(base_f = itemPriceFrag, qtylvl_f = qtyLevelFrag, time_f = timeFrag),
|
|
114
|
+
re.IGNORECASE + re.VERBOSE)
|
|
115
|
+
|
|
116
|
+
######################################################################
|
|
117
|
+
# Exception classes
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class BuildCacheBaseException(TradeException):
|
|
121
|
+
"""
|
|
122
|
+
Baseclass for BuildCache exceptions
|
|
123
|
+
Attributes:
|
|
124
|
+
fileName Name of file being processedStations
|
|
125
|
+
lineNo Line the error occurred on
|
|
126
|
+
error Description of the error
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
def __init__(self, fromFile: Path, lineNo: int, error: str | None = None) -> None:
|
|
130
|
+
self.fileName = fromFile.name
|
|
131
|
+
self.lineNo = lineNo
|
|
132
|
+
self.category = "ERROR"
|
|
133
|
+
self.error = error or "UNKNOWN ERROR"
|
|
134
|
+
|
|
135
|
+
def __str__(self) -> str:
|
|
136
|
+
return f'{self.fileName}:{self.lineNo} {self.category} {self.error}'
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class UnknownSystemError(BuildCacheBaseException):
|
|
140
|
+
"""
|
|
141
|
+
Raised when the file contains an unknown star name.
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
def __init__(self, fromFile: Path, lineNo: int, key: str) -> None:
|
|
145
|
+
super().__init__(fromFile, lineNo, f'Unrecognized SYSTEM: "{key}"')
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class UnknownStationError(BuildCacheBaseException):
|
|
149
|
+
"""
|
|
150
|
+
Raised when the file contains an unknown star/station name.
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
def __init__(self, fromFile: Path, lineNo: int, key: str) -> None:
|
|
154
|
+
super().__init__(fromFile, lineNo, f'Unrecognized STAR/Station: "{key}"')
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class UnknownItemError(BuildCacheBaseException):
|
|
158
|
+
"""
|
|
159
|
+
Raised in the case of an item name that we don't know.
|
|
160
|
+
Attributes:
|
|
161
|
+
itemName Key we tried to look up.
|
|
162
|
+
"""
|
|
163
|
+
|
|
164
|
+
def __init__(self, fromFile: Path, lineNo: int, itemName: str) -> None:
|
|
165
|
+
super().__init__(fromFile, lineNo, f'Unrecognized item name: "{itemName}"')
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class DuplicateKeyError(BuildCacheBaseException):
|
|
169
|
+
"""
|
|
170
|
+
Raised when an item is being redefined.
|
|
171
|
+
"""
|
|
172
|
+
|
|
173
|
+
def __init__(self, fromFile: Path, lineNo: int, keyType: str, keyValue: str, prevLineNo: int) -> None:
|
|
174
|
+
super().__init__(fromFile, lineNo,
|
|
175
|
+
f'Second occurrance of {keyType} "{keyValue}", previous entry at line {prevLineNo}.')
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class DeletedKeyError(BuildCacheBaseException):
|
|
179
|
+
"""
|
|
180
|
+
Raised when a key value in a .csv file is marked as DELETED in the
|
|
181
|
+
corrections file.
|
|
182
|
+
"""
|
|
183
|
+
|
|
184
|
+
def __init__(self, fromFile: Path, lineNo: int, keyType: str, keyValue: str) -> None:
|
|
185
|
+
super().__init__(
|
|
186
|
+
fromFile, lineNo,
|
|
187
|
+
f'{keyType} "{keyValue}" is marked as DELETED and should not be used.'
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
class DeprecatedKeyError(BuildCacheBaseException):
|
|
192
|
+
"""
|
|
193
|
+
Raised when a key value in a .csv file has a correction; the old
|
|
194
|
+
name should not appear in the .csv file.
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
def __init__(self, fromFile: Path, lineNo: int, keyType: str, keyValue: str, newValue: str) -> None:
|
|
198
|
+
super().__init__(
|
|
199
|
+
fromFile, lineNo,
|
|
200
|
+
f'{keyType} "{keyValue}" is deprecated and should be replaced with "{newValue}".'
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class MultipleStationEntriesError(DuplicateKeyError):
|
|
205
|
+
""" Raised when a station appears multiple times in the same file. """
|
|
206
|
+
|
|
207
|
+
def __init__(self, fromFile: Path, lineNo: int, facility: str, prevLineNo: int) -> None:
|
|
208
|
+
super().__init__(fromFile, lineNo, 'station', facility, prevLineNo)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class MultipleItemEntriesError(DuplicateKeyError):
|
|
212
|
+
""" Raised when one item appears multiple times in the same station. """
|
|
213
|
+
|
|
214
|
+
def __init__(self, fromFile: Path, lineNo: int, item: str, prevLineNo: int) -> None:
|
|
215
|
+
super().__init__(fromFile, lineNo, 'item', item, prevLineNo)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class InvalidLineError(BuildCacheBaseException):
|
|
219
|
+
"""
|
|
220
|
+
Raised when an invalid line is read.
|
|
221
|
+
Attributes:
|
|
222
|
+
problem The problem that occurred
|
|
223
|
+
text Offending text
|
|
224
|
+
"""
|
|
225
|
+
|
|
226
|
+
def __init__(self, fromFile: Path, lineNo: int, problem: str, text: str) -> None:
|
|
227
|
+
super().__init__(fromFile, lineNo, f'{problem},\ngot: "{text.strip()}".')
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
class SupplyError(BuildCacheBaseException):
|
|
231
|
+
"""
|
|
232
|
+
Raised when a supply field is incorrectly formatted.
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
def __init__(self, fromFile: Path, lineNo: int, category: str, problem: str, value: Any) -> None:
|
|
236
|
+
super().__init__(fromFile, lineNo, f'Invalid {category} supply value: {problem}. Got: {value}')
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
######################################################################
|
|
240
|
+
# Helpers
|
|
241
|
+
|
|
242
|
+
# --- tiny FK lookup caches (per import run) ---
|
|
243
|
+
_fk_cache_system = {}
|
|
244
|
+
_fk_cache_station = {}
|
|
245
|
+
_fk_cache_category = {}
|
|
246
|
+
_fk_cache_added = {}
|
|
247
|
+
|
|
248
|
+
def _get_system_id(session, system_name):
|
|
249
|
+
if system_name in _fk_cache_system:
|
|
250
|
+
return _fk_cache_system[system_name]
|
|
251
|
+
rid = session.query(SA.System.system_id).filter(SA.System.name == system_name).scalar()
|
|
252
|
+
if rid is None:
|
|
253
|
+
raise ValueError(f"Unknown System name: {system_name}")
|
|
254
|
+
_fk_cache_system[system_name] = rid
|
|
255
|
+
return rid
|
|
256
|
+
|
|
257
|
+
def _get_station_id(session, system_id, station_name):
|
|
258
|
+
key = (system_id, station_name)
|
|
259
|
+
if key in _fk_cache_station:
|
|
260
|
+
return _fk_cache_station[key]
|
|
261
|
+
rid = (
|
|
262
|
+
session.query(SA.Station.station_id)
|
|
263
|
+
.filter(SA.Station.system_id == system_id, SA.Station.name == station_name)
|
|
264
|
+
.scalar()
|
|
265
|
+
)
|
|
266
|
+
if rid is None:
|
|
267
|
+
raise ValueError(f"Unknown Station '{station_name}' in system_id={system_id}")
|
|
268
|
+
_fk_cache_station[key] = rid
|
|
269
|
+
return rid
|
|
270
|
+
|
|
271
|
+
def _get_category_id(session, cat_name):
|
|
272
|
+
if cat_name in _fk_cache_category:
|
|
273
|
+
return _fk_cache_category[cat_name]
|
|
274
|
+
rid = session.query(SA.Category.category_id).filter(SA.Category.name == cat_name).scalar()
|
|
275
|
+
if rid is None:
|
|
276
|
+
raise ValueError(f"Unknown Category name: {cat_name}")
|
|
277
|
+
_fk_cache_category[cat_name] = rid
|
|
278
|
+
return rid
|
|
279
|
+
|
|
280
|
+
def _get_added_id(session, added_name):
|
|
281
|
+
if added_name in _fk_cache_added:
|
|
282
|
+
return _fk_cache_added[added_name]
|
|
283
|
+
rid = session.query(SA.Added.added_id).filter(SA.Added.name == added_name).scalar()
|
|
284
|
+
if rid is None:
|
|
285
|
+
raise ValueError(f"Unknown Added name: {added_name}")
|
|
286
|
+
_fk_cache_added[added_name] = rid
|
|
287
|
+
return rid
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
# supply/demand levels are one of '?' for unknown, 'L', 'M' or 'H'
|
|
291
|
+
# for low, medium, or high. We turn these into integer values for
|
|
292
|
+
# ordering convenience, and we include both upper and lower-case
|
|
293
|
+
# so we don't have to sweat ordering.
|
|
294
|
+
#
|
|
295
|
+
SUPPLY_LEVEL_VALUES = {
|
|
296
|
+
'?': -1,
|
|
297
|
+
'L': 1, 'l': 1,
|
|
298
|
+
'M': 2, 'm': 2,
|
|
299
|
+
'H': 3, 'h': 3,
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def parseSupply(pricesFile: Path, lineNo: int, category: str, reading: str) -> tuple[int, int]:
|
|
304
|
+
""" Parse a supply specifier which is expected to be in the <number><?, L, M, or H>, and
|
|
305
|
+
returns the units as an integer and a numeric level value suitable for ordering,
|
|
306
|
+
such that ? = -1, L/l = 0, M/m = 1, H/h = 2 """
|
|
307
|
+
|
|
308
|
+
# supply_level <- digit+ level;
|
|
309
|
+
# digit <- [0-9];
|
|
310
|
+
# level <- Unknown / Low / Medium / High;
|
|
311
|
+
# Unknown <- '?';
|
|
312
|
+
# Low <- 'L';
|
|
313
|
+
# Medium <- 'M';
|
|
314
|
+
# High <- 'H';
|
|
315
|
+
if reading == '?':
|
|
316
|
+
return -1, -1
|
|
317
|
+
if reading == '-':
|
|
318
|
+
return 0, 0
|
|
319
|
+
|
|
320
|
+
# extract the left most digits into unit and the last character into the level reading.
|
|
321
|
+
units, level = reading[0:-1], reading[-1]
|
|
322
|
+
|
|
323
|
+
# Extract the right most character as the "level" and look up its numeric value.
|
|
324
|
+
levelNo = SUPPLY_LEVEL_VALUES.get(level)
|
|
325
|
+
if levelNo is None:
|
|
326
|
+
raise SupplyError(
|
|
327
|
+
pricesFile, lineNo, category, reading,
|
|
328
|
+
f'Unrecognized level suffix: "{level}": expected one of "L", "M", "H" or "?"'
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
# Expecting a numeric value in units, e.g. 123? -> (units=123, level=?)
|
|
332
|
+
try:
|
|
333
|
+
unitsNo = int(units)
|
|
334
|
+
if unitsNo < 0:
|
|
335
|
+
# Use the same code-path as if the units fail to parse.
|
|
336
|
+
raise ValueError('negative unit count')
|
|
337
|
+
except ValueError:
|
|
338
|
+
raise SupplyError(
|
|
339
|
+
pricesFile, lineNo, category, reading,
|
|
340
|
+
f'Unrecognized units/level value: "{level}": expected "-", "?", or a number followed by a level (L, M, H or ?).'
|
|
341
|
+
) from None # don't forward the exception itself
|
|
342
|
+
|
|
343
|
+
# Normalize the units and level when there are no units.
|
|
344
|
+
if unitsNo == 0:
|
|
345
|
+
return 0, 0
|
|
346
|
+
|
|
347
|
+
return unitsNo, levelNo
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
######################################################################
|
|
351
|
+
# Code
|
|
352
|
+
######################################################################
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def getSystemByNameIndex(session: Session) -> dict[str, int]:
|
|
356
|
+
"""Build system index by uppercase name → system_id."""
|
|
357
|
+
rows = (
|
|
358
|
+
session.query(SA.System.system_id, func.upper(SA.System.name))
|
|
359
|
+
.all()
|
|
360
|
+
)
|
|
361
|
+
return {name: ID for (ID, name) in rows}
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def getStationByNameIndex(session: Session) -> dict[str, int]:
|
|
365
|
+
"""Build station index in STAR/Station notation → station_id."""
|
|
366
|
+
rows = (
|
|
367
|
+
session.query(
|
|
368
|
+
SA.Station.station_id,
|
|
369
|
+
(SA.System.name + "/" + SA.Station.name)
|
|
370
|
+
)
|
|
371
|
+
.join(SA.System, SA.Station.system_id == SA.System.system_id)
|
|
372
|
+
.all()
|
|
373
|
+
)
|
|
374
|
+
# normalise case like original
|
|
375
|
+
return {name.upper(): ID for (ID, name) in rows}
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
def getItemByNameIndex(session: Session) -> dict[str, int]:
|
|
379
|
+
"""Generate item name index (uppercase item name → item_id)."""
|
|
380
|
+
rows = (
|
|
381
|
+
session.query(SA.Item.item_id, func.upper(SA.Item.name))
|
|
382
|
+
.all()
|
|
383
|
+
)
|
|
384
|
+
return {name: itemID for (itemID, name) in rows}
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
# The return type of process prices is complicated, should probably have been a type
|
|
388
|
+
# in its own right. I'm going to define some aliases to try and persuade IDEs to be
|
|
389
|
+
# more helpful about what it is trying to return.
|
|
390
|
+
if typing.TYPE_CHECKING:
|
|
391
|
+
# A list of the IDs of stations that were modified so they can be updated
|
|
392
|
+
ProcessedStationIds= tuple[tuple[int]]
|
|
393
|
+
ProcessedItem = tuple[
|
|
394
|
+
int, # station ID
|
|
395
|
+
int, # item ID
|
|
396
|
+
Optional[int | float |str], # modified
|
|
397
|
+
int, # demandCR
|
|
398
|
+
int, # demandUnits
|
|
399
|
+
int, # demandLevel
|
|
400
|
+
int, # supplyCr
|
|
401
|
+
int, # supplyUnits
|
|
402
|
+
int, # supplyLevel
|
|
403
|
+
]
|
|
404
|
+
ProcessedItems = list[ProcessedItem]
|
|
405
|
+
ZeroItems = list[tuple[int, int]] # stationID, itemID
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
def processPrices(
|
|
409
|
+
tdenv: TradeEnv,
|
|
410
|
+
priceFile: Path,
|
|
411
|
+
session: Session,
|
|
412
|
+
defaultZero: bool
|
|
413
|
+
) -> tuple[ProcessedStationIds, ProcessedItems, ZeroItems, int, int, int, int]:
|
|
414
|
+
"""
|
|
415
|
+
Populate the database with prices by reading the given file.
|
|
416
|
+
|
|
417
|
+
:param tdenv: The environment we're working in
|
|
418
|
+
:param priceFile: File to read
|
|
419
|
+
:param session: Active SQLAlchemy session
|
|
420
|
+
:param defaultZero: Whether to create default zero-availability/-demand
|
|
421
|
+
records for missing data. For partial updates,
|
|
422
|
+
set False.
|
|
423
|
+
"""
|
|
424
|
+
|
|
425
|
+
DEBUG0, DEBUG1 = tdenv.DEBUG0, tdenv.DEBUG1
|
|
426
|
+
DEBUG0("Processing prices file: {}", priceFile)
|
|
427
|
+
|
|
428
|
+
ignoreUnknown = tdenv.ignoreUnknown
|
|
429
|
+
quiet = tdenv.quiet
|
|
430
|
+
merging = tdenv.mergeImport
|
|
431
|
+
|
|
432
|
+
# build lookup indexes from DB
|
|
433
|
+
systemByName = getSystemByNameIndex(session)
|
|
434
|
+
stationByName = getStationByNameIndex(session)
|
|
435
|
+
stationByName.update(
|
|
436
|
+
(sys, ID)
|
|
437
|
+
for sys, ID in corrections.stations.items()
|
|
438
|
+
if isinstance(ID, int)
|
|
439
|
+
)
|
|
440
|
+
sysCorrections = corrections.systems
|
|
441
|
+
stnCorrections = {
|
|
442
|
+
stn: alt
|
|
443
|
+
for stn, alt in corrections.stations.items()
|
|
444
|
+
if isinstance(alt, str)
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
itemByName = getItemByNameIndex(session)
|
|
448
|
+
|
|
449
|
+
defaultUnits = -1 if not defaultZero else 0
|
|
450
|
+
defaultLevel = -1 if not defaultZero else 0
|
|
451
|
+
|
|
452
|
+
stationID = None
|
|
453
|
+
facility = None
|
|
454
|
+
processedStations = {}
|
|
455
|
+
processedSystems = set()
|
|
456
|
+
processedItems = {}
|
|
457
|
+
stationItemDates = {}
|
|
458
|
+
DELETED = corrections.DELETED
|
|
459
|
+
items, zeros = [], []
|
|
460
|
+
|
|
461
|
+
lineNo, localAdd = 0, 0
|
|
462
|
+
|
|
463
|
+
if not ignoreUnknown:
|
|
464
|
+
def ignoreOrWarn(error: Exception) -> None:
|
|
465
|
+
raise error
|
|
466
|
+
elif not quiet:
|
|
467
|
+
def ignoreOrWarn(error: Exception) -> None:
|
|
468
|
+
# Ensure exceptions are stringified before passing to WARN
|
|
469
|
+
tdenv.WARN(str(error))
|
|
470
|
+
|
|
471
|
+
def changeStation(matches: re.Match) -> None:
|
|
472
|
+
nonlocal facility, stationID
|
|
473
|
+
nonlocal processedItems, localAdd
|
|
474
|
+
nonlocal stationItemDates
|
|
475
|
+
|
|
476
|
+
# Change current station
|
|
477
|
+
stationItemDates = {}
|
|
478
|
+
systemNameIn, stationNameIn = matches.group(1, 2)
|
|
479
|
+
systemName, stationName = systemNameIn.upper(), stationNameIn.upper()
|
|
480
|
+
corrected = False
|
|
481
|
+
facility = f'{systemName}/{stationName}'
|
|
482
|
+
|
|
483
|
+
stationID = DELETED
|
|
484
|
+
newID = stationByName.get(facility, -1)
|
|
485
|
+
DEBUG0("Selected station: {}, ID={}", facility, newID)
|
|
486
|
+
|
|
487
|
+
if newID is DELETED:
|
|
488
|
+
DEBUG1("DELETED Station: {}", facility)
|
|
489
|
+
return
|
|
490
|
+
|
|
491
|
+
if newID < 0:
|
|
492
|
+
if utils.checkForOcrDerp(tdenv, systemName, stationName):
|
|
493
|
+
return
|
|
494
|
+
corrected = True
|
|
495
|
+
altName = sysCorrections.get(systemName)
|
|
496
|
+
if altName is DELETED:
|
|
497
|
+
DEBUG1("DELETED System: {}", facility)
|
|
498
|
+
return
|
|
499
|
+
if altName:
|
|
500
|
+
DEBUG1("SYSTEM '{}' renamed '{}'", systemName, altName)
|
|
501
|
+
systemName, facility = altName, "/".join((altName, stationName))
|
|
502
|
+
|
|
503
|
+
systemID = systemByName.get(systemName, -1)
|
|
504
|
+
if systemID < 0:
|
|
505
|
+
ignoreOrWarn(
|
|
506
|
+
UnknownSystemError(priceFile, lineNo, facility)
|
|
507
|
+
)
|
|
508
|
+
return
|
|
509
|
+
|
|
510
|
+
altStation = stnCorrections.get(facility)
|
|
511
|
+
if altStation:
|
|
512
|
+
if altStation is DELETED:
|
|
513
|
+
DEBUG1("DELETED Station: {}", facility)
|
|
514
|
+
return
|
|
515
|
+
|
|
516
|
+
DEBUG1("Station '{}' renamed '{}'", facility, altStation)
|
|
517
|
+
stationName = altStation.upper()
|
|
518
|
+
facility = f'{systemName}/{stationName}'
|
|
519
|
+
|
|
520
|
+
newID = stationByName.get(facility, -1)
|
|
521
|
+
if newID is DELETED:
|
|
522
|
+
DEBUG1("Renamed station DELETED: {}", facility)
|
|
523
|
+
return
|
|
524
|
+
|
|
525
|
+
if newID < 0:
|
|
526
|
+
if not ignoreUnknown:
|
|
527
|
+
ignoreOrWarn(
|
|
528
|
+
UnknownStationError(priceFile, lineNo, facility)
|
|
529
|
+
)
|
|
530
|
+
return
|
|
531
|
+
|
|
532
|
+
name = utils.titleFixup(stationName)
|
|
533
|
+
# ORM insert: placeholder station
|
|
534
|
+
station = SA.Station(
|
|
535
|
+
system_id=systemID,
|
|
536
|
+
name=name,
|
|
537
|
+
ls_from_star=0,
|
|
538
|
+
blackmarket='?',
|
|
539
|
+
max_pad_size='?',
|
|
540
|
+
market='?',
|
|
541
|
+
shipyard='?',
|
|
542
|
+
)
|
|
543
|
+
session.add(station)
|
|
544
|
+
session.flush() # assign station_id
|
|
545
|
+
newID = station.station_id
|
|
546
|
+
|
|
547
|
+
stationByName[facility] = newID
|
|
548
|
+
tdenv.NOTE(
|
|
549
|
+
"Added local station placeholder for {} (#{})", facility, newID
|
|
550
|
+
)
|
|
551
|
+
localAdd += 1
|
|
552
|
+
|
|
553
|
+
elif newID in processedStations:
|
|
554
|
+
if not corrected:
|
|
555
|
+
raise MultipleStationEntriesError(
|
|
556
|
+
priceFile, lineNo, facility,
|
|
557
|
+
processedStations[newID]
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
stationID = newID
|
|
561
|
+
processedSystems.add(systemName)
|
|
562
|
+
processedStations[stationID] = lineNo
|
|
563
|
+
processedItems = {}
|
|
564
|
+
|
|
565
|
+
# ORM query: load existing item → modified map
|
|
566
|
+
rows = (
|
|
567
|
+
session.query(SA.StationItem.item_id, SA.StationItem.modified)
|
|
568
|
+
.filter(SA.StationItem.station_id == stationID)
|
|
569
|
+
.all()
|
|
570
|
+
)
|
|
571
|
+
stationItemDates = dict(rows)
|
|
572
|
+
|
|
573
|
+
addItem, addZero = items.append, zeros.append
|
|
574
|
+
getItemID = itemByName.get
|
|
575
|
+
newItems, updtItems, ignItems = 0, 0, 0 # <-- put this back
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
def processItemLine(matches):
|
|
579
|
+
nonlocal newItems, updtItems, ignItems
|
|
580
|
+
itemName, modified = matches.group('item', 'time')
|
|
581
|
+
itemName = itemName.upper()
|
|
582
|
+
|
|
583
|
+
# Look up the item ID.
|
|
584
|
+
itemID = getItemID(itemName, -1)
|
|
585
|
+
if itemID < 0:
|
|
586
|
+
oldName = itemName
|
|
587
|
+
itemName = corrections.correctItem(itemName)
|
|
588
|
+
|
|
589
|
+
# Silently skip DELETED items
|
|
590
|
+
if itemName == corrections.DELETED:
|
|
591
|
+
DEBUG1("DELETED {}", oldName)
|
|
592
|
+
return
|
|
593
|
+
|
|
594
|
+
# Retry with corrected name
|
|
595
|
+
itemName = itemName.upper()
|
|
596
|
+
itemID = getItemID(itemName, -1)
|
|
597
|
+
|
|
598
|
+
if itemID < 0:
|
|
599
|
+
ignoreOrWarn(
|
|
600
|
+
UnknownItemError(priceFile, lineNo, itemName)
|
|
601
|
+
)
|
|
602
|
+
return
|
|
603
|
+
|
|
604
|
+
DEBUG1("Renamed {} -> {}", oldName, itemName)
|
|
605
|
+
|
|
606
|
+
|
|
607
|
+
lastModified = stationItemDates.get(itemID, None)
|
|
608
|
+
if lastModified and merging:
|
|
609
|
+
if modified and modified != 'now' and modified <= lastModified:
|
|
610
|
+
DEBUG1("Ignoring {} @ {}: {} <= {}".format(
|
|
611
|
+
itemName, facility,
|
|
612
|
+
modified, lastModified,
|
|
613
|
+
))
|
|
614
|
+
if modified < lastModified:
|
|
615
|
+
ignItems += 1
|
|
616
|
+
return
|
|
617
|
+
|
|
618
|
+
# Check for duplicate items within the station.
|
|
619
|
+
if itemID in processedItems:
|
|
620
|
+
ignoreOrWarn(
|
|
621
|
+
MultipleItemEntriesError(
|
|
622
|
+
priceFile, lineNo,
|
|
623
|
+
f'{itemName}',
|
|
624
|
+
processedItems[itemID]
|
|
625
|
+
)
|
|
626
|
+
)
|
|
627
|
+
return
|
|
628
|
+
|
|
629
|
+
demandCr, supplyCr = matches.group('sell', 'buy')
|
|
630
|
+
demandCr, supplyCr = int(demandCr), int(supplyCr)
|
|
631
|
+
demandString, supplyString = matches.group('demand', 'supply')
|
|
632
|
+
|
|
633
|
+
if demandCr == 0 and supplyCr == 0:
|
|
634
|
+
if lastModified:
|
|
635
|
+
addZero((stationID, itemID))
|
|
636
|
+
else:
|
|
637
|
+
if lastModified:
|
|
638
|
+
updtItems += 1
|
|
639
|
+
else:
|
|
640
|
+
newItems += 1
|
|
641
|
+
if demandString:
|
|
642
|
+
demandUnits, demandLevel = parseSupply(
|
|
643
|
+
priceFile, lineNo, 'demand', demandString
|
|
644
|
+
)
|
|
645
|
+
else:
|
|
646
|
+
demandUnits, demandLevel = defaultUnits, defaultLevel
|
|
647
|
+
|
|
648
|
+
if demandString and supplyString:
|
|
649
|
+
supplyUnits, supplyLevel = parseSupply(
|
|
650
|
+
priceFile, lineNo, 'supply', supplyString
|
|
651
|
+
)
|
|
652
|
+
else:
|
|
653
|
+
supplyUnits, supplyLevel = defaultUnits, defaultLevel
|
|
654
|
+
|
|
655
|
+
if modified == 'now':
|
|
656
|
+
modified = None # Use CURRENT_FILESTAMP
|
|
657
|
+
|
|
658
|
+
addItem((
|
|
659
|
+
stationID, itemID, modified,
|
|
660
|
+
demandCr, demandUnits, demandLevel,
|
|
661
|
+
supplyCr, supplyUnits, supplyLevel,
|
|
662
|
+
))
|
|
663
|
+
|
|
664
|
+
processedItems[itemID] = lineNo
|
|
665
|
+
|
|
666
|
+
space_cleanup = re.compile(r'\s{2,}').sub
|
|
667
|
+
for line in priceFile:
|
|
668
|
+
lineNo += 1
|
|
669
|
+
|
|
670
|
+
text = line.split('#', 1)[0] # Discard comments
|
|
671
|
+
text = space_cleanup(' ', text).strip() # Remove leading/trailing whitespace, reduce multi-spaces
|
|
672
|
+
if not text:
|
|
673
|
+
continue
|
|
674
|
+
|
|
675
|
+
########################################
|
|
676
|
+
# ## "@ STAR/Station" lines.
|
|
677
|
+
if text.startswith('@'):
|
|
678
|
+
matches = systemStationRe.match(text)
|
|
679
|
+
if not matches:
|
|
680
|
+
raise InvalidLineError(priceFile, lineNo, "Unrecognized '@' line", text)
|
|
681
|
+
changeStation(matches)
|
|
682
|
+
continue
|
|
683
|
+
|
|
684
|
+
if not stationID:
|
|
685
|
+
# Need a station to process any other type of line.
|
|
686
|
+
raise InvalidLineError(priceFile, lineNo, "Expecting '@ SYSTEM / Station' line", text)
|
|
687
|
+
if stationID == DELETED:
|
|
688
|
+
# Ignore all values from a deleted station/system.
|
|
689
|
+
continue
|
|
690
|
+
|
|
691
|
+
########################################
|
|
692
|
+
# ## "+ Category" lines
|
|
693
|
+
if text.startswith('+'):
|
|
694
|
+
# we now ignore these.
|
|
695
|
+
continue
|
|
696
|
+
|
|
697
|
+
########################################
|
|
698
|
+
# ## "Item sell buy ..." lines.
|
|
699
|
+
matches = newItemPriceRe.match(text)
|
|
700
|
+
if not matches:
|
|
701
|
+
raise InvalidLineError(priceFile, lineNo, "Unrecognized line/syntax", text)
|
|
702
|
+
|
|
703
|
+
processItemLine(matches)
|
|
704
|
+
|
|
705
|
+
numSys = len(processedSystems)
|
|
706
|
+
|
|
707
|
+
if localAdd > 0:
|
|
708
|
+
tdenv.NOTE(
|
|
709
|
+
"Placeholder stations are added to the local DB only "
|
|
710
|
+
"(not the .CSV).\n"
|
|
711
|
+
"Use 'trade.py export --table Station' "
|
|
712
|
+
"if you /need/ to persist them."
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
stations = tuple((ID,) for ID in processedStations)
|
|
716
|
+
return stations, items, zeros, newItems, updtItems, ignItems, numSys
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
######################################################################
|
|
720
|
+
|
|
721
|
+
|
|
722
|
+
def processPricesFile(
|
|
723
|
+
tdenv: "TradeEnv",
|
|
724
|
+
session: Session,
|
|
725
|
+
pricesPath: Path,
|
|
726
|
+
pricesFh: Optional[TextIO] = None,
|
|
727
|
+
defaultZero: bool = False,
|
|
728
|
+
) -> None:
|
|
729
|
+
"""
|
|
730
|
+
Process a .prices file and import data into the DB via ORM.
|
|
731
|
+
"""
|
|
732
|
+
|
|
733
|
+
tdenv.DEBUG0("Processing Prices file '{}'", pricesPath)
|
|
734
|
+
|
|
735
|
+
with (pricesFh or pricesPath.open("r", encoding="utf-8")) as fh:
|
|
736
|
+
(
|
|
737
|
+
stations,
|
|
738
|
+
items,
|
|
739
|
+
zeros,
|
|
740
|
+
newItems,
|
|
741
|
+
updtItems,
|
|
742
|
+
ignItems,
|
|
743
|
+
numSys,
|
|
744
|
+
) = processPrices(tdenv, fh, session, defaultZero)
|
|
745
|
+
|
|
746
|
+
if not tdenv.mergeImport:
|
|
747
|
+
# Delete all StationItems for these stations
|
|
748
|
+
session.query(SA.StationItem).filter(
|
|
749
|
+
SA.StationItem.station_id.in_([sid for (sid,) in stations])
|
|
750
|
+
).delete(synchronize_session=False)
|
|
751
|
+
|
|
752
|
+
# Remove zeroed pairs
|
|
753
|
+
removedItems = 0
|
|
754
|
+
if zeros:
|
|
755
|
+
session.query(SA.StationItem).filter(
|
|
756
|
+
tuple_(SA.StationItem.station_id, SA.StationItem.item_id).in_(zeros)
|
|
757
|
+
).delete(synchronize_session=False)
|
|
758
|
+
removedItems = len(zeros)
|
|
759
|
+
|
|
760
|
+
# Upsert items
|
|
761
|
+
if items:
|
|
762
|
+
for (
|
|
763
|
+
station_id,
|
|
764
|
+
item_id,
|
|
765
|
+
modified,
|
|
766
|
+
demand_price,
|
|
767
|
+
demand_units,
|
|
768
|
+
demand_level,
|
|
769
|
+
supply_price,
|
|
770
|
+
supply_units,
|
|
771
|
+
supply_level,
|
|
772
|
+
) in items:
|
|
773
|
+
obj = SA.StationItem(
|
|
774
|
+
station_id=station_id,
|
|
775
|
+
item_id=item_id,
|
|
776
|
+
modified=modified or None,
|
|
777
|
+
demand_price=demand_price,
|
|
778
|
+
demand_units=demand_units,
|
|
779
|
+
demand_level=demand_level,
|
|
780
|
+
supply_price=supply_price,
|
|
781
|
+
supply_units=supply_units,
|
|
782
|
+
supply_level=supply_level,
|
|
783
|
+
)
|
|
784
|
+
session.merge(obj)
|
|
785
|
+
|
|
786
|
+
tdenv.DEBUG0("Marking populated stations as having a market")
|
|
787
|
+
session.query(SA.Station).filter(
|
|
788
|
+
SA.Station.station_id.in_([sid for (sid,) in stations])
|
|
789
|
+
).update({SA.Station.market: "Y"}, synchronize_session=False)
|
|
790
|
+
|
|
791
|
+
changes = " and ".join(
|
|
792
|
+
f"{v} {k}"
|
|
793
|
+
for k, v in {
|
|
794
|
+
"new": newItems,
|
|
795
|
+
"updated": updtItems,
|
|
796
|
+
"removed": removedItems,
|
|
797
|
+
}.items()
|
|
798
|
+
if v
|
|
799
|
+
) or "0"
|
|
800
|
+
|
|
801
|
+
tdenv.NOTE(
|
|
802
|
+
"Import complete: "
|
|
803
|
+
"{:s} items "
|
|
804
|
+
"over {:n} stations "
|
|
805
|
+
"in {:n} systems",
|
|
806
|
+
changes,
|
|
807
|
+
len(stations),
|
|
808
|
+
numSys,
|
|
809
|
+
)
|
|
810
|
+
|
|
811
|
+
if ignItems:
|
|
812
|
+
tdenv.NOTE("Ignored {} items with old data", ignItems)
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
######################################################################
|
|
818
|
+
|
|
819
|
+
|
|
820
|
+
def depCheck(importPath, lineNo, depType, key, correctKey):
|
|
821
|
+
if correctKey == key:
|
|
822
|
+
return
|
|
823
|
+
if correctKey == corrections.DELETED:
|
|
824
|
+
raise DeletedKeyError(importPath, lineNo, depType, key)
|
|
825
|
+
raise DeprecatedKeyError(importPath, lineNo, depType, key, correctKey)
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def deprecationCheckSystem(importPath, lineNo, line):
|
|
829
|
+
depCheck(
|
|
830
|
+
importPath, lineNo, 'System',
|
|
831
|
+
line[0], corrections.correctSystem(line[0]),
|
|
832
|
+
)
|
|
833
|
+
|
|
834
|
+
|
|
835
|
+
def deprecationCheckStation(importPath, lineNo, line):
|
|
836
|
+
depCheck(
|
|
837
|
+
importPath, lineNo, 'System',
|
|
838
|
+
line[0], corrections.correctSystem(line[0]),
|
|
839
|
+
)
|
|
840
|
+
depCheck(
|
|
841
|
+
importPath, lineNo, 'Station',
|
|
842
|
+
line[1], corrections.correctStation(line[0], line[1]),
|
|
843
|
+
)
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
def deprecationCheckCategory(importPath, lineNo, line):
|
|
847
|
+
depCheck(
|
|
848
|
+
importPath, lineNo, 'Category',
|
|
849
|
+
line[0], corrections.correctCategory(line[0]),
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
|
|
853
|
+
def deprecationCheckItem(importPath, lineNo, line):
|
|
854
|
+
depCheck(
|
|
855
|
+
importPath, lineNo, 'Category',
|
|
856
|
+
line[0], corrections.correctCategory(line[0]),
|
|
857
|
+
)
|
|
858
|
+
depCheck(
|
|
859
|
+
importPath, lineNo, 'Item',
|
|
860
|
+
line[1], corrections.correctItem(line[1]),
|
|
861
|
+
)
|
|
862
|
+
|
|
863
|
+
|
|
864
|
+
# --- main importer ---
|
|
865
|
+
def processImportFile(
|
|
866
|
+
tdenv,
|
|
867
|
+
session,
|
|
868
|
+
importPath,
|
|
869
|
+
tableName,
|
|
870
|
+
*,
|
|
871
|
+
line_callback: Optional[Callable] = None,
|
|
872
|
+
call_args: Optional[dict] = None,
|
|
873
|
+
):
|
|
874
|
+
"""
|
|
875
|
+
Import a CSV file into the given table.
|
|
876
|
+
|
|
877
|
+
Applies header parsing, uniqueness checks, foreign key lookups,
|
|
878
|
+
in-row deprecation correction (warnings only at -vv via DEBUG1), and upserts via SQLAlchemy ORM.
|
|
879
|
+
Commits in batches for large datasets.
|
|
880
|
+
"""
|
|
881
|
+
|
|
882
|
+
tdenv.DEBUG0("Processing import file '{}' for table '{}'", str(importPath), tableName)
|
|
883
|
+
|
|
884
|
+
call_args = call_args or {}
|
|
885
|
+
if line_callback:
|
|
886
|
+
line_callback = partial_fn(line_callback, **call_args)
|
|
887
|
+
|
|
888
|
+
# --- batch size config from environment or fallback ---
|
|
889
|
+
env_batch = os.environ.get("TD_LISTINGS_BATCH")
|
|
890
|
+
if env_batch:
|
|
891
|
+
try:
|
|
892
|
+
max_transaction_items = int(env_batch)
|
|
893
|
+
except ValueError:
|
|
894
|
+
tdenv.WARN("Invalid TD_LISTINGS_BATCH value %r, falling back to defaults.", env_batch)
|
|
895
|
+
max_transaction_items = None
|
|
896
|
+
else:
|
|
897
|
+
max_transaction_items = None
|
|
898
|
+
|
|
899
|
+
if max_transaction_items is None:
|
|
900
|
+
if session.bind.dialect.name in ("mysql", "mariadb"):
|
|
901
|
+
max_transaction_items = 50 * 1024
|
|
902
|
+
else:
|
|
903
|
+
max_transaction_items = 250 * 1024
|
|
904
|
+
|
|
905
|
+
transaction_items = 0 # track how many rows inserted before committing
|
|
906
|
+
|
|
907
|
+
with importPath.open("r", encoding="utf-8") as importFile:
|
|
908
|
+
csvin = csv.reader(importFile, delimiter=",", quotechar="'", doublequote=True)
|
|
909
|
+
|
|
910
|
+
# Read header row
|
|
911
|
+
columnDefs = next(csvin)
|
|
912
|
+
columnCount = len(columnDefs)
|
|
913
|
+
|
|
914
|
+
# --- Process headers: extract column names, track indices ---
|
|
915
|
+
activeColumns: list[str] = [] # Final columns we'll use (after "unq:" stripping)
|
|
916
|
+
kept_indices: list[int] = [] # Indices into CSV rows we keep (aligned to activeColumns)
|
|
917
|
+
uniqueIndexes: list[int] = [] # Indexes (into activeColumns) of unique keys
|
|
918
|
+
fk_col_indices: dict[str, int] = {} # Special handling for FK resolution
|
|
919
|
+
|
|
920
|
+
uniquePfx = "unq:"
|
|
921
|
+
uniqueLen = len(uniquePfx)
|
|
922
|
+
|
|
923
|
+
# map of header (without "unq:") -> original CSV index, for correction by name
|
|
924
|
+
header_index: dict[str, int] = {}
|
|
925
|
+
|
|
926
|
+
for cIndex, cName in enumerate(columnDefs):
|
|
927
|
+
colName, _, srcKey = cName.partition("@")
|
|
928
|
+
baseName = colName[uniqueLen:] if colName.startswith(uniquePfx) else colName
|
|
929
|
+
header_index[baseName] = cIndex
|
|
930
|
+
|
|
931
|
+
# Special-case: System-added
|
|
932
|
+
if tableName == "System":
|
|
933
|
+
if cName == "name":
|
|
934
|
+
srcKey = ""
|
|
935
|
+
elif cName == "name@Added.added_id":
|
|
936
|
+
fk_col_indices["added"] = cIndex
|
|
937
|
+
continue
|
|
938
|
+
|
|
939
|
+
# Foreign key columns for RareItem
|
|
940
|
+
if tableName == "RareItem":
|
|
941
|
+
if cName == "!name@System.system_id":
|
|
942
|
+
fk_col_indices["system"] = cIndex
|
|
943
|
+
continue
|
|
944
|
+
if cName == "name@Station.station_id":
|
|
945
|
+
fk_col_indices["station"] = cIndex
|
|
946
|
+
continue
|
|
947
|
+
if cName == "name@Category.category_id":
|
|
948
|
+
fk_col_indices["category"] = cIndex
|
|
949
|
+
continue
|
|
950
|
+
|
|
951
|
+
# Handle unique constraint tracking
|
|
952
|
+
if colName.startswith(uniquePfx):
|
|
953
|
+
uniqueIndexes.append(len(activeColumns))
|
|
954
|
+
colName = baseName
|
|
955
|
+
|
|
956
|
+
activeColumns.append(colName)
|
|
957
|
+
kept_indices.append(cIndex)
|
|
958
|
+
|
|
959
|
+
importCount = 0
|
|
960
|
+
uniqueIndex: dict[str, int] = {}
|
|
961
|
+
|
|
962
|
+
# helpers for correction + visibility-gated warning
|
|
963
|
+
DELETED = corrections.DELETED
|
|
964
|
+
|
|
965
|
+
def _warn(line_no: int, msg: str) -> None:
|
|
966
|
+
# Gate deprecation chatter to -vv (DEBUG1)
|
|
967
|
+
tdenv.DEBUG1("{}:{} WARNING {}", importPath, line_no, msg)
|
|
968
|
+
|
|
969
|
+
def _apply_row_corrections(table_name: str, row: list[str], line_no: int) -> bool:
|
|
970
|
+
"""
|
|
971
|
+
Returns True if the row should be skipped (deleted in tolerant mode), False otherwise.
|
|
972
|
+
Mutates 'row' in place with corrected values.
|
|
973
|
+
"""
|
|
974
|
+
if table_name == "System":
|
|
975
|
+
idx = header_index.get("name")
|
|
976
|
+
if idx is not None:
|
|
977
|
+
orig = row[idx]
|
|
978
|
+
corr = corrections.correctSystem(orig)
|
|
979
|
+
if corr is DELETED:
|
|
980
|
+
if tdenv.ignoreUnknown:
|
|
981
|
+
_warn(line_no, f'System "{orig}" is marked as DELETED and should not be used.')
|
|
982
|
+
return True
|
|
983
|
+
raise DeletedKeyError(importPath, line_no, "System", orig)
|
|
984
|
+
if corr != orig:
|
|
985
|
+
_warn(line_no, f'System "{orig}" is deprecated and should be replaced with "{corr}".')
|
|
986
|
+
row[idx] = corr
|
|
987
|
+
|
|
988
|
+
elif table_name == "Station":
|
|
989
|
+
s_idx = header_index.get("system")
|
|
990
|
+
n_idx = header_index.get("name")
|
|
991
|
+
if s_idx is not None and n_idx is not None:
|
|
992
|
+
s_orig = row[s_idx]
|
|
993
|
+
s_corr = corrections.correctSystem(s_orig)
|
|
994
|
+
if s_corr is DELETED:
|
|
995
|
+
if tdenv.ignoreUnknown:
|
|
996
|
+
_warn(line_no, f'System "{s_orig}" is marked as DELETED and should not be used.')
|
|
997
|
+
return True
|
|
998
|
+
raise DeletedKeyError(importPath, line_no, "System", s_orig)
|
|
999
|
+
if s_corr != s_orig:
|
|
1000
|
+
_warn(line_no, f'System "{s_orig}" is deprecated and should be replaced with "{s_corr}".')
|
|
1001
|
+
row[s_idx] = s_corr
|
|
1002
|
+
n_orig = row[n_idx]
|
|
1003
|
+
n_corr = corrections.correctStation(s_corr, n_orig)
|
|
1004
|
+
if n_corr is DELETED:
|
|
1005
|
+
if tdenv.ignoreUnknown:
|
|
1006
|
+
_warn(line_no, f'Station "{n_orig}" is marked as DELETED and should not be used.')
|
|
1007
|
+
return True
|
|
1008
|
+
raise DeletedKeyError(importPath, line_no, "Station", n_orig)
|
|
1009
|
+
if n_corr != n_orig:
|
|
1010
|
+
_warn(line_no, f'Station "{n_orig}" is deprecated and should be replaced with "{n_corr}".')
|
|
1011
|
+
row[n_idx] = n_corr
|
|
1012
|
+
|
|
1013
|
+
elif table_name == "Category":
|
|
1014
|
+
idx = header_index.get("name")
|
|
1015
|
+
if idx is not None:
|
|
1016
|
+
orig = row[idx]
|
|
1017
|
+
corr = corrections.correctCategory(orig)
|
|
1018
|
+
if corr is DELETED:
|
|
1019
|
+
if tdenv.ignoreUnknown:
|
|
1020
|
+
_warn(line_no, f'Category "{orig}" is marked as DELETED and should not be used.')
|
|
1021
|
+
return True
|
|
1022
|
+
raise DeletedKeyError(importPath, line_no, "Category", orig)
|
|
1023
|
+
if corr != orig:
|
|
1024
|
+
_warn(line_no, f'Category "{orig}" is deprecated and should be replaced with "{corr}".')
|
|
1025
|
+
row[idx] = corr
|
|
1026
|
+
|
|
1027
|
+
elif table_name == "Item":
|
|
1028
|
+
cat_idx = header_index.get("category")
|
|
1029
|
+
name_idx = header_index.get("name")
|
|
1030
|
+
if cat_idx is not None:
|
|
1031
|
+
c_orig = row[cat_idx]
|
|
1032
|
+
c_corr = corrections.correctCategory(c_orig)
|
|
1033
|
+
if c_corr is DELETED:
|
|
1034
|
+
if tdenv.ignoreUnknown:
|
|
1035
|
+
_warn(line_no, f'Category "{c_orig}" is marked as DELETED and should not be used.')
|
|
1036
|
+
return True
|
|
1037
|
+
raise DeletedKeyError(importPath, line_no, "Category", c_orig)
|
|
1038
|
+
if c_corr != c_orig:
|
|
1039
|
+
_warn(line_no, f'Category "{c_orig}" is deprecated and should be replaced with "{c_corr}".')
|
|
1040
|
+
row[cat_idx] = c_corr
|
|
1041
|
+
if name_idx is not None:
|
|
1042
|
+
i_orig = row[name_idx]
|
|
1043
|
+
i_corr = corrections.correctItem(i_orig)
|
|
1044
|
+
if i_corr is DELETED:
|
|
1045
|
+
if tdenv.ignoreUnknown:
|
|
1046
|
+
_warn(line_no, f'Item "{i_orig}" is marked as DELETED and should not be used.')
|
|
1047
|
+
return True
|
|
1048
|
+
raise DeletedKeyError(importPath, line_no, "Item", i_orig)
|
|
1049
|
+
if i_corr != i_orig:
|
|
1050
|
+
_warn(line_no, f'Item "{i_orig}" is deprecated and should be replaced with "{i_corr}".')
|
|
1051
|
+
row[name_idx] = i_corr
|
|
1052
|
+
|
|
1053
|
+
# RareItem: we only correct category (FK lookup uses names) to improve hit rate.
|
|
1054
|
+
elif table_name == "RareItem":
|
|
1055
|
+
cat_idx = header_index.get("category")
|
|
1056
|
+
if cat_idx is not None:
|
|
1057
|
+
c_orig = row[cat_idx]
|
|
1058
|
+
c_corr = corrections.correctCategory(c_orig)
|
|
1059
|
+
if c_corr is DELETED:
|
|
1060
|
+
if tdenv.ignoreUnknown:
|
|
1061
|
+
_warn(line_no, f'Category "{c_orig}" is marked as DELETED and should not be used.')
|
|
1062
|
+
return True
|
|
1063
|
+
raise DeletedKeyError(importPath, line_no, "Category", c_orig)
|
|
1064
|
+
if c_corr != c_orig:
|
|
1065
|
+
_warn(line_no, f'Category "{c_orig}" is deprecated and should be replaced with "{c_corr}".')
|
|
1066
|
+
row[cat_idx] = c_corr
|
|
1067
|
+
|
|
1068
|
+
return False # do not skip
|
|
1069
|
+
|
|
1070
|
+
# --- Read data lines ---
|
|
1071
|
+
for linein in csvin:
|
|
1072
|
+
if line_callback:
|
|
1073
|
+
line_callback()
|
|
1074
|
+
if not linein:
|
|
1075
|
+
continue
|
|
1076
|
+
|
|
1077
|
+
lineNo = csvin.line_num
|
|
1078
|
+
|
|
1079
|
+
if len(linein) != columnCount:
|
|
1080
|
+
tdenv.NOTE("Wrong number of columns ({}:{}): {}", importPath, lineNo, ", ".join(linein))
|
|
1081
|
+
continue
|
|
1082
|
+
|
|
1083
|
+
tdenv.DEBUG1(" Values: {}", ", ".join(linein))
|
|
1084
|
+
|
|
1085
|
+
# --- Apply corrections BEFORE uniqueness; may skip if deleted in tolerant mode
|
|
1086
|
+
try:
|
|
1087
|
+
if _apply_row_corrections(tableName, linein, lineNo):
|
|
1088
|
+
continue
|
|
1089
|
+
except DeletedKeyError:
|
|
1090
|
+
if not tdenv.ignoreUnknown:
|
|
1091
|
+
raise # strict, fail hard. resume the original fault with it's trace in-tact
|
|
1092
|
+
# tolerant: already warned in _apply_row_corrections; skip row
|
|
1093
|
+
continue
|
|
1094
|
+
|
|
1095
|
+
# Extract and clean values to use (from corrected line)
|
|
1096
|
+
activeValues = [linein[i] for i in kept_indices]
|
|
1097
|
+
|
|
1098
|
+
# --- Uniqueness check (after correction) ---
|
|
1099
|
+
try:
|
|
1100
|
+
if uniqueIndexes:
|
|
1101
|
+
keyValues = [str(activeValues[i]).upper() for i in uniqueIndexes]
|
|
1102
|
+
key = ":!:".join(keyValues)
|
|
1103
|
+
prevLineNo = uniqueIndex.get(key, 0)
|
|
1104
|
+
if prevLineNo:
|
|
1105
|
+
key_disp = "/".join(keyValues)
|
|
1106
|
+
if tdenv.ignoreUnknown:
|
|
1107
|
+
e = DuplicateKeyError(importPath, lineNo, "entry", key_disp, prevLineNo)
|
|
1108
|
+
e.category = "WARNING"
|
|
1109
|
+
tdenv.NOTE("{}", e)
|
|
1110
|
+
continue
|
|
1111
|
+
raise DuplicateKeyError(importPath, lineNo, "entry", key_disp, prevLineNo)
|
|
1112
|
+
uniqueIndex[key] = lineNo
|
|
1113
|
+
except Exception as e:
|
|
1114
|
+
# Keep processing the file, don’t tear down the loop
|
|
1115
|
+
tdenv.WARN(
|
|
1116
|
+
"*** INTERNAL ERROR: {err}\n"
|
|
1117
|
+
"CSV File: {file}:{line}\n"
|
|
1118
|
+
"Table: {table}\n"
|
|
1119
|
+
"Params: {params}\n".format(
|
|
1120
|
+
err=str(e),
|
|
1121
|
+
file=str(importPath),
|
|
1122
|
+
line=lineNo,
|
|
1123
|
+
table=tableName,
|
|
1124
|
+
params=linein,
|
|
1125
|
+
)
|
|
1126
|
+
)
|
|
1127
|
+
session.rollback()
|
|
1128
|
+
continue
|
|
1129
|
+
|
|
1130
|
+
try:
|
|
1131
|
+
rowdict = dict(zip(activeColumns, activeValues))
|
|
1132
|
+
|
|
1133
|
+
# Foreign key lookups — RareItem
|
|
1134
|
+
if tableName == "RareItem":
|
|
1135
|
+
sys_id = None
|
|
1136
|
+
if "system" in fk_col_indices:
|
|
1137
|
+
sys_name = linein[fk_col_indices["system"]]
|
|
1138
|
+
try:
|
|
1139
|
+
sys_id = _get_system_id(session, sys_name)
|
|
1140
|
+
except ValueError:
|
|
1141
|
+
tdenv.WARN("Unknown System '{}' in {}", sys_name, importPath)
|
|
1142
|
+
|
|
1143
|
+
if "station" in fk_col_indices:
|
|
1144
|
+
stn_name = linein[fk_col_indices["station"]]
|
|
1145
|
+
if sys_id is not None:
|
|
1146
|
+
try:
|
|
1147
|
+
rowdict["station_id"] = _get_station_id(session, sys_id, stn_name)
|
|
1148
|
+
except ValueError:
|
|
1149
|
+
tdenv.WARN("Unknown Station '{}' in {}", stn_name, importPath)
|
|
1150
|
+
else:
|
|
1151
|
+
tdenv.WARN("Station lookup skipped (no system_id) for '{}'", stn_name)
|
|
1152
|
+
|
|
1153
|
+
if "category" in fk_col_indices:
|
|
1154
|
+
cat_name = linein[fk_col_indices["category"]]
|
|
1155
|
+
try:
|
|
1156
|
+
rowdict["category_id"] = _get_category_id(session, cat_name)
|
|
1157
|
+
except ValueError:
|
|
1158
|
+
tdenv.WARN("Unknown Category '{}' in {}", cat_name, importPath)
|
|
1159
|
+
|
|
1160
|
+
# Foreign key lookups — System.added
|
|
1161
|
+
if tableName == "System" and "added" in fk_col_indices:
|
|
1162
|
+
added_val = linein[fk_col_indices["added"]] or "EDSM"
|
|
1163
|
+
try:
|
|
1164
|
+
rowdict["added_id"] = _get_added_id(session, added_val)
|
|
1165
|
+
except ValueError:
|
|
1166
|
+
rowdict["added_id"] = None
|
|
1167
|
+
tdenv.WARN("Unknown Added value '{}' in {}", added_val, importPath)
|
|
1168
|
+
|
|
1169
|
+
# --- Type coercion for common types ---
|
|
1170
|
+
for key, val in list(rowdict.items()):
|
|
1171
|
+
if val in ("", None):
|
|
1172
|
+
rowdict[key] = None
|
|
1173
|
+
continue
|
|
1174
|
+
if key.endswith("_id") or key.endswith("ID") or key in ("cost", "max_allocation"):
|
|
1175
|
+
try:
|
|
1176
|
+
rowdict[key] = int(val)
|
|
1177
|
+
except ValueError:
|
|
1178
|
+
rowdict[key] = None
|
|
1179
|
+
elif key in ("pos_x", "pos_y", "pos_z", "ls_from_star"):
|
|
1180
|
+
try:
|
|
1181
|
+
rowdict[key] = float(val)
|
|
1182
|
+
except ValueError:
|
|
1183
|
+
rowdict[key] = None
|
|
1184
|
+
elif "time" in key or key == "modified":
|
|
1185
|
+
parsed = parse_ts(val)
|
|
1186
|
+
if parsed:
|
|
1187
|
+
rowdict[key] = parsed
|
|
1188
|
+
else:
|
|
1189
|
+
tdenv.WARN(
|
|
1190
|
+
"Unparsable datetime in {} line {} col {}: {}",
|
|
1191
|
+
importPath,
|
|
1192
|
+
lineNo,
|
|
1193
|
+
key,
|
|
1194
|
+
val,
|
|
1195
|
+
)
|
|
1196
|
+
rowdict[key] = None
|
|
1197
|
+
|
|
1198
|
+
# Special handling for SQL reserved word `class`
|
|
1199
|
+
if tableName == "Upgrade" and "class" in rowdict:
|
|
1200
|
+
rowdict["class_"] = rowdict.pop("class")
|
|
1201
|
+
if tableName == "FDevOutfitting" and "class" in rowdict:
|
|
1202
|
+
rowdict["class_"] = rowdict.pop("class")
|
|
1203
|
+
if tableName == "RareItem" and "system_id" in rowdict:
|
|
1204
|
+
rowdict.pop("system_id", None)
|
|
1205
|
+
|
|
1206
|
+
# ORM insert/merge
|
|
1207
|
+
Model = getattr(SA, tableName)
|
|
1208
|
+
obj = Model(**rowdict)
|
|
1209
|
+
session.merge(obj)
|
|
1210
|
+
importCount += 1
|
|
1211
|
+
|
|
1212
|
+
# Batch commit
|
|
1213
|
+
if max_transaction_items:
|
|
1214
|
+
transaction_items += 1
|
|
1215
|
+
if transaction_items >= max_transaction_items:
|
|
1216
|
+
session.commit()
|
|
1217
|
+
session.begin()
|
|
1218
|
+
transaction_items = 0
|
|
1219
|
+
|
|
1220
|
+
except Exception as e:
|
|
1221
|
+
# Log all import errors — but keep going
|
|
1222
|
+
tdenv.WARN(
|
|
1223
|
+
"*** INTERNAL ERROR: {err}\n"
|
|
1224
|
+
"CSV File: {file}:{line}\n"
|
|
1225
|
+
"Table: {table}\n"
|
|
1226
|
+
"Params: {params}\n".format(
|
|
1227
|
+
err=str(e),
|
|
1228
|
+
file=str(importPath),
|
|
1229
|
+
line=lineNo,
|
|
1230
|
+
table=tableName,
|
|
1231
|
+
params=rowdict if "rowdict" in locals() else linein,
|
|
1232
|
+
)
|
|
1233
|
+
)
|
|
1234
|
+
session.rollback()
|
|
1235
|
+
|
|
1236
|
+
# Final commit after file done
|
|
1237
|
+
session.commit()
|
|
1238
|
+
tdenv.DEBUG0("{count} {table}s imported", count=importCount, table=tableName)
|
|
1239
|
+
|
|
1240
|
+
|
|
1241
|
+
|
|
1242
|
+
|
|
1243
|
+
def buildCache(tdb: TradeDB, tdenv: TradeEnv):
|
|
1244
|
+
"""
|
|
1245
|
+
Rebuilds the database from source files.
|
|
1246
|
+
|
|
1247
|
+
TD's data is either "stable" - information that rarely changes like Ship
|
|
1248
|
+
details, star systems etc - and "volatile" - pricing information, etc.
|
|
1249
|
+
|
|
1250
|
+
The stable data starts out in data/TradeDangerous.sql while other data
|
|
1251
|
+
is stored in custom-formatted text files, e.g. ./TradeDangerous.prices.
|
|
1252
|
+
|
|
1253
|
+
We load both sets of data into a database, after which we can
|
|
1254
|
+
avoid the text-processing overhead by simply checking if the text files
|
|
1255
|
+
are newer than the database.
|
|
1256
|
+
"""
|
|
1257
|
+
|
|
1258
|
+
tdenv.NOTE(
|
|
1259
|
+
"(Re)building database: this may take a few moments.",
|
|
1260
|
+
stderr=True,
|
|
1261
|
+
)
|
|
1262
|
+
|
|
1263
|
+
dbPath, engine = tdb.dbPath, tdb.engine
|
|
1264
|
+
|
|
1265
|
+
# --- Step 1: reset schema BEFORE opening a session/transaction ---
|
|
1266
|
+
# Single unified call; no dialect branching here.
|
|
1267
|
+
lifecycle.reset_db(engine, db_path=dbPath)
|
|
1268
|
+
|
|
1269
|
+
# --- Step 2: open a new session for rebuild work ---
|
|
1270
|
+
with tdb.Session() as session:
|
|
1271
|
+
# Import standard tables on a plain session with progress
|
|
1272
|
+
with Progress(
|
|
1273
|
+
max_value=len(tdb.importTables) + 1,
|
|
1274
|
+
prefix="Importing",
|
|
1275
|
+
width=25,
|
|
1276
|
+
style=CountingBar,
|
|
1277
|
+
) as prog:
|
|
1278
|
+
for importName, importTable in tdb.importTables:
|
|
1279
|
+
import_path = Path(importName)
|
|
1280
|
+
import_lines = file_line_count(import_path, missing_ok=True)
|
|
1281
|
+
with prog.sub_task(
|
|
1282
|
+
max_value=import_lines, description=importTable
|
|
1283
|
+
) as child:
|
|
1284
|
+
prog.increment(value=1)
|
|
1285
|
+
call_args = {"task": child, "advance": 1}
|
|
1286
|
+
try:
|
|
1287
|
+
processImportFile(
|
|
1288
|
+
tdenv,
|
|
1289
|
+
session,
|
|
1290
|
+
import_path,
|
|
1291
|
+
importTable,
|
|
1292
|
+
line_callback=prog.update_task,
|
|
1293
|
+
call_args=call_args,
|
|
1294
|
+
)
|
|
1295
|
+
# safety commit after each file
|
|
1296
|
+
session.commit()
|
|
1297
|
+
except FileNotFoundError:
|
|
1298
|
+
tdenv.DEBUG0(
|
|
1299
|
+
"WARNING: processImportFile found no {} file", importName
|
|
1300
|
+
)
|
|
1301
|
+
except StopIteration:
|
|
1302
|
+
tdenv.NOTE(
|
|
1303
|
+
"{} exists but is empty. "
|
|
1304
|
+
"Remove it or add the column definition line.",
|
|
1305
|
+
importName,
|
|
1306
|
+
)
|
|
1307
|
+
prog.increment(1)
|
|
1308
|
+
|
|
1309
|
+
with prog.sub_task(description="Save DB"):
|
|
1310
|
+
session.commit()
|
|
1311
|
+
|
|
1312
|
+
# # --- Step 3: parse the prices file (still plain session) ---
|
|
1313
|
+
# if pricesPath.exists():
|
|
1314
|
+
# with Progress(max_value=None, width=25, prefix="Processing prices file"):
|
|
1315
|
+
# processPricesFile(tdenv, session, pricesPath)
|
|
1316
|
+
# else:
|
|
1317
|
+
# tdenv.NOTE(
|
|
1318
|
+
# f'Missing "{pricesPath}" file - no price data.',
|
|
1319
|
+
# stderr=True,
|
|
1320
|
+
# )
|
|
1321
|
+
|
|
1322
|
+
tdb.close()
|
|
1323
|
+
tdenv.NOTE(
|
|
1324
|
+
"Database build completed.",
|
|
1325
|
+
stderr=True,
|
|
1326
|
+
)
|
|
1327
|
+
|
|
1328
|
+
|
|
1329
|
+
######################################################################
|
|
1330
|
+
|
|
1331
|
+
|
|
1332
|
+
def regeneratePricesFile(tdb: TradeDB, tdenv: TradeEnv) -> None:
|
|
1333
|
+
return
|
|
1334
|
+
# """
|
|
1335
|
+
# Regenerate the .prices file from the current DB contents.
|
|
1336
|
+
# Uses the ORM session rather than raw sqlite.
|
|
1337
|
+
# """
|
|
1338
|
+
# tdenv.DEBUG0("Regenerating .prices file")
|
|
1339
|
+
#
|
|
1340
|
+
# with tdb.Session() as session:
|
|
1341
|
+
# with tdb.pricesPath.open("w", encoding="utf-8") as pricesFile:
|
|
1342
|
+
# prices.dumpPrices(
|
|
1343
|
+
# session,
|
|
1344
|
+
# prices.Element.full,
|
|
1345
|
+
# file=pricesFile,
|
|
1346
|
+
# debug=tdenv.debug,
|
|
1347
|
+
# )
|
|
1348
|
+
#
|
|
1349
|
+
# # Only touch the DB file on SQLite — MariaDB has no dbPath
|
|
1350
|
+
# if tdb.engine.dialect.name == "sqlite" and tdb.dbPath and os.path.exists(tdb.dbPath):
|
|
1351
|
+
# os.utime(tdb.dbPath)
|
|
1352
|
+
|
|
1353
|
+
######################################################################
|
|
1354
|
+
|
|
1355
|
+
|
|
1356
|
+
def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False):
|
|
1357
|
+
"""
|
|
1358
|
+
Import price data from a file on a per-station basis,
|
|
1359
|
+
that is when a new station is encountered, delete any
|
|
1360
|
+
existing records for that station in the database.
|
|
1361
|
+
"""
|
|
1362
|
+
|
|
1363
|
+
if not pricesFh and not path.exists():
|
|
1364
|
+
raise TradeException(f"No such file: {path}")
|
|
1365
|
+
|
|
1366
|
+
if reset:
|
|
1367
|
+
tdenv.DEBUG0("Resetting price data")
|
|
1368
|
+
with tdb.Session.begin() as session:
|
|
1369
|
+
session.query(SA.StationItem).delete()
|
|
1370
|
+
|
|
1371
|
+
tdenv.DEBUG0(f"Importing data from {path}")
|
|
1372
|
+
processPricesFile(
|
|
1373
|
+
tdenv,
|
|
1374
|
+
db=tdb.getDB(), # still used for the incremental parsing logic
|
|
1375
|
+
pricesPath=path,
|
|
1376
|
+
pricesFh=pricesFh,
|
|
1377
|
+
)
|
|
1378
|
+
|
|
1379
|
+
# # If everything worked, regenerate the canonical prices file if this wasn’t the main one
|
|
1380
|
+
# if path != tdb.pricesPath:
|
|
1381
|
+
# regeneratePricesFile(tdb, tdenv)
|