tradedangerous 12.7.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py.typed +1 -0
- trade.py +49 -0
- tradedangerous/__init__.py +43 -0
- tradedangerous/cache.py +1381 -0
- tradedangerous/cli.py +136 -0
- tradedangerous/commands/TEMPLATE.py +74 -0
- tradedangerous/commands/__init__.py +244 -0
- tradedangerous/commands/buildcache_cmd.py +102 -0
- tradedangerous/commands/buy_cmd.py +427 -0
- tradedangerous/commands/commandenv.py +372 -0
- tradedangerous/commands/exceptions.py +94 -0
- tradedangerous/commands/export_cmd.py +150 -0
- tradedangerous/commands/import_cmd.py +222 -0
- tradedangerous/commands/local_cmd.py +243 -0
- tradedangerous/commands/market_cmd.py +207 -0
- tradedangerous/commands/nav_cmd.py +252 -0
- tradedangerous/commands/olddata_cmd.py +270 -0
- tradedangerous/commands/parsing.py +221 -0
- tradedangerous/commands/rares_cmd.py +298 -0
- tradedangerous/commands/run_cmd.py +1521 -0
- tradedangerous/commands/sell_cmd.py +262 -0
- tradedangerous/commands/shipvendor_cmd.py +60 -0
- tradedangerous/commands/station_cmd.py +68 -0
- tradedangerous/commands/trade_cmd.py +181 -0
- tradedangerous/commands/update_cmd.py +67 -0
- tradedangerous/corrections.py +55 -0
- tradedangerous/csvexport.py +234 -0
- tradedangerous/db/__init__.py +27 -0
- tradedangerous/db/adapter.py +192 -0
- tradedangerous/db/config.py +107 -0
- tradedangerous/db/engine.py +259 -0
- tradedangerous/db/lifecycle.py +332 -0
- tradedangerous/db/locks.py +208 -0
- tradedangerous/db/orm_models.py +500 -0
- tradedangerous/db/paths.py +113 -0
- tradedangerous/db/utils.py +661 -0
- tradedangerous/edscupdate.py +565 -0
- tradedangerous/edsmupdate.py +474 -0
- tradedangerous/formatting.py +210 -0
- tradedangerous/fs.py +156 -0
- tradedangerous/gui.py +1146 -0
- tradedangerous/mapping.py +133 -0
- tradedangerous/mfd/__init__.py +103 -0
- tradedangerous/mfd/saitek/__init__.py +3 -0
- tradedangerous/mfd/saitek/directoutput.py +678 -0
- tradedangerous/mfd/saitek/x52pro.py +195 -0
- tradedangerous/misc/checkpricebounds.py +287 -0
- tradedangerous/misc/clipboard.py +49 -0
- tradedangerous/misc/coord64.py +83 -0
- tradedangerous/misc/csvdialect.py +57 -0
- tradedangerous/misc/derp-sentinel.py +35 -0
- tradedangerous/misc/diff-system-csvs.py +159 -0
- tradedangerous/misc/eddb.py +81 -0
- tradedangerous/misc/eddn.py +349 -0
- tradedangerous/misc/edsc.py +437 -0
- tradedangerous/misc/edsm.py +121 -0
- tradedangerous/misc/importeddbstats.py +54 -0
- tradedangerous/misc/prices-json-exp.py +179 -0
- tradedangerous/misc/progress.py +194 -0
- tradedangerous/plugins/__init__.py +249 -0
- tradedangerous/plugins/edcd_plug.py +371 -0
- tradedangerous/plugins/eddblink_plug.py +861 -0
- tradedangerous/plugins/edmc_batch_plug.py +133 -0
- tradedangerous/plugins/spansh_plug.py +2647 -0
- tradedangerous/prices.py +211 -0
- tradedangerous/submit-distances.py +422 -0
- tradedangerous/templates/Added.csv +37 -0
- tradedangerous/templates/Category.csv +17 -0
- tradedangerous/templates/RareItem.csv +143 -0
- tradedangerous/templates/TradeDangerous.sql +338 -0
- tradedangerous/tools.py +40 -0
- tradedangerous/tradecalc.py +1302 -0
- tradedangerous/tradedb.py +2320 -0
- tradedangerous/tradeenv.py +313 -0
- tradedangerous/tradeenv.pyi +109 -0
- tradedangerous/tradeexcept.py +131 -0
- tradedangerous/tradeorm.py +183 -0
- tradedangerous/transfers.py +192 -0
- tradedangerous/utils.py +243 -0
- tradedangerous/version.py +16 -0
- tradedangerous-12.7.6.dist-info/METADATA +106 -0
- tradedangerous-12.7.6.dist-info/RECORD +87 -0
- tradedangerous-12.7.6.dist-info/WHEEL +5 -0
- tradedangerous-12.7.6.dist-info/entry_points.txt +3 -0
- tradedangerous-12.7.6.dist-info/licenses/LICENSE +373 -0
- tradedangerous-12.7.6.dist-info/top_level.txt +2 -0
- tradegui.py +24 -0
|
@@ -0,0 +1,861 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Import plugin that uses data files from
|
|
3
|
+
https://elite.tromador.com/ to update the Database.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from contextlib import contextmanager
|
|
8
|
+
from email.utils import parsedate_to_datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import csv
|
|
11
|
+
import datetime
|
|
12
|
+
import os
|
|
13
|
+
import requests
|
|
14
|
+
import time
|
|
15
|
+
import typing
|
|
16
|
+
|
|
17
|
+
from sqlalchemy.orm import Session
|
|
18
|
+
from sqlalchemy import func, delete, select, exists, text
|
|
19
|
+
|
|
20
|
+
from tradedangerous import plugins, transfers, TradeException
|
|
21
|
+
from tradedangerous.db import orm_models as SA, lifecycle
|
|
22
|
+
from tradedangerous.db.utils import (
|
|
23
|
+
begin_bulk_mode, end_bulk_mode,
|
|
24
|
+
get_import_batch_size, get_upsert_fn,
|
|
25
|
+
)
|
|
26
|
+
from tradedangerous.fs import file_line_count
|
|
27
|
+
from tradedangerous.misc import progress as pbar
|
|
28
|
+
from tradedangerous.plugins import PluginException
|
|
29
|
+
|
|
30
|
+
if typing.TYPE_CHECKING:
|
|
31
|
+
from tradedangerous.tradeenv import TradeEnv
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# Constants
|
|
35
|
+
BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class DecodingError(PluginException):
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@contextmanager
|
|
43
|
+
def bench(label: str, tdenv: TradeEnv):
|
|
44
|
+
started = time.time()
|
|
45
|
+
with pbar.Progress(0, 40, label=label, style=pbar.ElapsedBar):
|
|
46
|
+
yield
|
|
47
|
+
tdenv.NOTE("{} done ({:.3f}s)", label, time.time() - started)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
|
|
51
|
+
""" Calculates the number of entries in a listing file by counting the lines. """
|
|
52
|
+
if not listings.exists():
|
|
53
|
+
tdenv.NOTE("File not found, aborting: {}", listings)
|
|
54
|
+
return 0
|
|
55
|
+
|
|
56
|
+
tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
|
|
57
|
+
count = file_line_count(listings)
|
|
58
|
+
if count <= 1:
|
|
59
|
+
if count == 1:
|
|
60
|
+
tdenv.DEBUG0("Listing count of 1 suggests nothing but a header")
|
|
61
|
+
else:
|
|
62
|
+
tdenv.DEBUG0("Listings file is empty, nothing to do.")
|
|
63
|
+
return 0
|
|
64
|
+
|
|
65
|
+
return count + 1 # kfsone: Doesn't the header already make this + 1?
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _make_item_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
|
|
69
|
+
"""Helper: retrieve the list of commodities in database."""
|
|
70
|
+
tdenv.DEBUG0("Getting list of commodities...")
|
|
71
|
+
rows = session.query(SA.Item.item_id).all()
|
|
72
|
+
return frozenset(r[0] for r in rows)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _make_station_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
|
|
76
|
+
"""Helper: retrieve the list of station IDs in database."""
|
|
77
|
+
tdenv.DEBUG0("Getting list of stations...")
|
|
78
|
+
rows = session.query(SA.Station.station_id).all()
|
|
79
|
+
return frozenset(r[0] for r in rows)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _collect_station_modified_times(tdenv: TradeEnv, session: Session) -> dict[int, int]:
|
|
83
|
+
"""Helper: build a list of the last modified time for all stations by id (epoch seconds)."""
|
|
84
|
+
tdenv.DEBUG0("Getting last-update times for stations...")
|
|
85
|
+
rows = (
|
|
86
|
+
session.query(
|
|
87
|
+
SA.StationItem.station_id,
|
|
88
|
+
func.min(SA.StationItem.modified),
|
|
89
|
+
)
|
|
90
|
+
.group_by(SA.StationItem.station_id)
|
|
91
|
+
.all()
|
|
92
|
+
)
|
|
93
|
+
return {
|
|
94
|
+
station_id: int(modified.timestamp()) if modified else 0
|
|
95
|
+
for station_id, modified in rows
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class ImportPlugin(plugins.ImportPluginBase):
|
|
100
|
+
"""
|
|
101
|
+
Import plugin that uses data files from
|
|
102
|
+
https://elite.tromador.com/ to update the Database.
|
|
103
|
+
"""
|
|
104
|
+
pluginOptions = {
|
|
105
|
+
'item': "Update Items using latest file from server. (Implies '-O system,station')",
|
|
106
|
+
'rare': "Update RareItems using latest file from server. (Implies '-O system,station')",
|
|
107
|
+
'ship': "Update Ships using latest file from server.",
|
|
108
|
+
'upgrade': "Update Upgrades using latest file from server.",
|
|
109
|
+
'system': "Update Systems using latest file from server.",
|
|
110
|
+
'station': "Update Stations using latest file from server. (Implies '-O system')",
|
|
111
|
+
'shipvend': "Update ShipVendors using latest file from server. (Implies '-O system,station,ship')",
|
|
112
|
+
'upvend': "Update UpgradeVendors using latest file from server. (Implies '-O system,station,upgrade')",
|
|
113
|
+
'listings': "Update market data using latest listings.csv dump. (Implies '-O item,system,station')",
|
|
114
|
+
'all': "Update everything with latest dumpfiles. (Regenerates all tables)",
|
|
115
|
+
'clean': "Erase entire database and rebuild from empty. (Regenerates all tables.)",
|
|
116
|
+
'skipvend': "Don't regenerate ShipVendors or UpgradeVendors. (Supercedes '-O all', '-O clean'.)",
|
|
117
|
+
'force': "Force regeneration of selected items even if source file not updated since previous run. "
|
|
118
|
+
"(Useful for updating Vendor tables if they were skipped during a '-O clean' run.)",
|
|
119
|
+
'purge': "Remove any empty systems that previously had fleet carriers.",
|
|
120
|
+
'optimize': "Optimize ('vacuum') database after processing.",
|
|
121
|
+
'solo': "Don't download crowd-sourced market data. "
|
|
122
|
+
"(Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)",
|
|
123
|
+
'7days': "Ignore data more than 7 days old during import, and expire old records after import.",
|
|
124
|
+
'units': "Treat listing entries with 0 units as having the corresponding supply/demand price treated "
|
|
125
|
+
"as 0. This stops things like Tritium showing up where it's not available but someone was "
|
|
126
|
+
"able to sell it.",
|
|
127
|
+
'bootstrap': "Helper to 'do the right thing' and get you some data",
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
def __init__(self, tdb, tdenv):
|
|
131
|
+
super().__init__(tdb, tdenv)
|
|
132
|
+
|
|
133
|
+
self.dataPath = os.environ.get('TD_EDDB') or self.tdenv.tmpDir
|
|
134
|
+
self.categoriesPath = Path("Category.csv")
|
|
135
|
+
self.commoditiesPath = Path("Item.csv")
|
|
136
|
+
self.rareItemPath = Path("RareItem.csv")
|
|
137
|
+
self.shipPath = Path("Ship.csv")
|
|
138
|
+
self.urlShipyard = "https://raw.githubusercontent.com/EDCD/FDevIDs/master/shipyard.csv"
|
|
139
|
+
self.FDevShipyardPath = self.tdb.dataPath / Path("FDevShipyard.csv")
|
|
140
|
+
self.shipVendorPath = Path("ShipVendor.csv")
|
|
141
|
+
self.stationsPath = Path("Station.csv")
|
|
142
|
+
self.sysPath = Path("System.csv")
|
|
143
|
+
self.upgradesPath = Path("Upgrade.csv")
|
|
144
|
+
self.urlOutfitting = "https://raw.githubusercontent.com/EDCD/FDevIDs/master/outfitting.csv"
|
|
145
|
+
self.FDevOutfittingPath = self.tdb.dataPath / Path("FDevOutfitting.csv")
|
|
146
|
+
self.upgradeVendorPath = Path("UpgradeVendor.csv")
|
|
147
|
+
self.listingsPath = Path("listings.csv")
|
|
148
|
+
self.liveListingsPath = Path("listings-live.csv")
|
|
149
|
+
self.pricesPath = Path("listings.prices")
|
|
150
|
+
|
|
151
|
+
def now(self):
|
|
152
|
+
return datetime.datetime.now().strftime('%H:%M:%S')
|
|
153
|
+
|
|
154
|
+
def _eddblink_state_path(self) -> Path:
|
|
155
|
+
"""
|
|
156
|
+
Single sidecar state file stored in TD_DATA (tdb.dataPath).
|
|
157
|
+
This is the authoritative record of "downloaded from server" identity.
|
|
158
|
+
"""
|
|
159
|
+
return (self.tdb.dataPath / "eddblink_state.json").resolve()
|
|
160
|
+
|
|
161
|
+
def _load_eddblink_state(self) -> dict:
|
|
162
|
+
import json
|
|
163
|
+
|
|
164
|
+
state_path = self._eddblink_state_path()
|
|
165
|
+
if not state_path.exists():
|
|
166
|
+
return {"version": 1, "files": {}}
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
with state_path.open("r", encoding="utf-8") as fh:
|
|
170
|
+
state = json.load(fh)
|
|
171
|
+
if not isinstance(state, dict):
|
|
172
|
+
return {"version": 1, "files": {}}
|
|
173
|
+
state.setdefault("version", 1)
|
|
174
|
+
files = state.setdefault("files", {})
|
|
175
|
+
if not isinstance(files, dict):
|
|
176
|
+
state["files"] = {}
|
|
177
|
+
return state
|
|
178
|
+
except Exception:
|
|
179
|
+
# Corrupt/partial JSON shouldn't brick the importer; treat as "no state"
|
|
180
|
+
return {"version": 1, "files": {}}
|
|
181
|
+
|
|
182
|
+
def _save_eddblink_state(self, state: dict) -> None:
|
|
183
|
+
import json
|
|
184
|
+
|
|
185
|
+
state_path = self._eddblink_state_path()
|
|
186
|
+
state_path.parent.mkdir(parents=True, exist_ok=True)
|
|
187
|
+
|
|
188
|
+
tmp_path = state_path.with_name(state_path.name + ".tmp")
|
|
189
|
+
with tmp_path.open("w", encoding="utf-8") as fh:
|
|
190
|
+
json.dump(state, fh, indent=2, sort_keys=True)
|
|
191
|
+
fh.write("\n")
|
|
192
|
+
tmp_path.replace(state_path)
|
|
193
|
+
|
|
194
|
+
def _file_sha256(self, path: Path) -> str:
|
|
195
|
+
import hashlib
|
|
196
|
+
|
|
197
|
+
h = hashlib.sha256()
|
|
198
|
+
with path.open("rb") as fh:
|
|
199
|
+
for chunk in iter(lambda: fh.read(1024 * 1024), b""):
|
|
200
|
+
h.update(chunk)
|
|
201
|
+
return h.hexdigest()
|
|
202
|
+
|
|
203
|
+
def _sanity_check_category_root(self) -> None:
|
|
204
|
+
"""
|
|
205
|
+
Category is foundational. If it's wrong, the DB is not trustworthy.
|
|
206
|
+
Minimal check: Category.category_id == 1 must be 'Metals' (case-insensitive).
|
|
207
|
+
"""
|
|
208
|
+
rebuild_cmd = "trade import -P eddblink -O clean,skipvend"
|
|
209
|
+
|
|
210
|
+
Session = self.tdb.Session
|
|
211
|
+
try:
|
|
212
|
+
with Session() as session:
|
|
213
|
+
row = session.execute(
|
|
214
|
+
select(SA.Category.category_id, SA.Category.name)
|
|
215
|
+
.where(SA.Category.category_id == 1)
|
|
216
|
+
).first()
|
|
217
|
+
except Exception as e:
|
|
218
|
+
raise PluginException(
|
|
219
|
+
"Category table check failed (missing schema or broken DB).\n"
|
|
220
|
+
"This DB is not usable; rebuild your local database with:\n"
|
|
221
|
+
f" {rebuild_cmd}"
|
|
222
|
+
) from e
|
|
223
|
+
|
|
224
|
+
if not row:
|
|
225
|
+
raise PluginException(
|
|
226
|
+
"Category table is missing/empty.\n"
|
|
227
|
+
"This DB is not usable; rebuild your local database with:\n"
|
|
228
|
+
f" {rebuild_cmd}"
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
cid, name = row
|
|
232
|
+
got = (str(name) if name is not None else "").strip()
|
|
233
|
+
if got.lower() != "metals":
|
|
234
|
+
raise PluginException(
|
|
235
|
+
"Category table is corrupt: category_id=1 expected 'Metals'.\n"
|
|
236
|
+
f"Got: {got!r}\n"
|
|
237
|
+
"This DB is not trustworthy; rebuild your local database with:\n"
|
|
238
|
+
f" {rebuild_cmd}"
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def downloadFile(self, path):
|
|
243
|
+
"""
|
|
244
|
+
Fetch the latest dumpfile from the website based on server identity,
|
|
245
|
+
not local mtime.
|
|
246
|
+
|
|
247
|
+
Proof-of-sync is stored in TD_DATA/eddblink_state.json.
|
|
248
|
+
If there's no state entry for a file, it is considered out-of-sync
|
|
249
|
+
(e.g. template-copied files) and will be downloaded.
|
|
250
|
+
"""
|
|
251
|
+
if path not in (self.liveListingsPath, self.listingsPath):
|
|
252
|
+
localPath = Path(self.tdb.dataPath, path)
|
|
253
|
+
else:
|
|
254
|
+
localPath = Path(self.dataPath, path)
|
|
255
|
+
|
|
256
|
+
url = BASE_URL + str(path)
|
|
257
|
+
key = str(path)
|
|
258
|
+
|
|
259
|
+
self.tdenv.NOTE("Checking for update to '{}'.", path)
|
|
260
|
+
|
|
261
|
+
state = self._load_eddblink_state()
|
|
262
|
+
files_state = state.setdefault("files", {})
|
|
263
|
+
entry = files_state.get(key)
|
|
264
|
+
|
|
265
|
+
# Local integrity check against recorded state (detect template clobber / manual edits).
|
|
266
|
+
in_sync_locally = False
|
|
267
|
+
if entry and localPath.exists():
|
|
268
|
+
try:
|
|
269
|
+
st = localPath.stat()
|
|
270
|
+
if int(entry.get("size", -1)) == int(st.st_size):
|
|
271
|
+
want_sha = entry.get("sha256")
|
|
272
|
+
if want_sha:
|
|
273
|
+
got_sha = self._file_sha256(localPath)
|
|
274
|
+
if got_sha == want_sha:
|
|
275
|
+
in_sync_locally = True
|
|
276
|
+
else:
|
|
277
|
+
in_sync_locally = True
|
|
278
|
+
except Exception:
|
|
279
|
+
in_sync_locally = False
|
|
280
|
+
|
|
281
|
+
# HEAD request for remote identity (ETag/Last-Modified)
|
|
282
|
+
headers = {"User-Agent": "Trade-Dangerous", "Accept-Encoding": "identity"}
|
|
283
|
+
try:
|
|
284
|
+
response = requests.head(url, headers=headers, timeout=70)
|
|
285
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
286
|
+
self.tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", url, str(e))
|
|
287
|
+
return False
|
|
288
|
+
|
|
289
|
+
if not getattr(response, "ok", False):
|
|
290
|
+
self.tdenv.WARN("Problem with download:\n URL: {}\n HTTP: {}", url, getattr(response, "status_code", "?"))
|
|
291
|
+
return False
|
|
292
|
+
|
|
293
|
+
remote_etag = response.headers.get("etag")
|
|
294
|
+
remote_last_modified = response.headers.get("last-modified")
|
|
295
|
+
remote_length = response.headers.get("content-length")
|
|
296
|
+
|
|
297
|
+
dump_mod_time = None
|
|
298
|
+
if remote_last_modified:
|
|
299
|
+
try:
|
|
300
|
+
dump_mod_time = parsedate_to_datetime(remote_last_modified).timestamp()
|
|
301
|
+
except Exception:
|
|
302
|
+
dump_mod_time = None
|
|
303
|
+
|
|
304
|
+
# If we have a prior server-proven state AND local file matches that state,
|
|
305
|
+
# we can skip downloading when remote identity matches.
|
|
306
|
+
if entry and in_sync_locally:
|
|
307
|
+
# Prefer ETag when available; else fall back to Last-Modified.
|
|
308
|
+
if remote_etag and entry.get("etag") == remote_etag:
|
|
309
|
+
self.tdenv.DEBUG0("'{}': Remote ETag matches state; no download.", path)
|
|
310
|
+
return False
|
|
311
|
+
if (not remote_etag) and remote_last_modified and entry.get("last_modified") == remote_last_modified:
|
|
312
|
+
self.tdenv.DEBUG0("'{}': Remote Last-Modified matches state; no download.", path)
|
|
313
|
+
return False
|
|
314
|
+
|
|
315
|
+
# If state is missing, or local doesn't match recorded state, or remote identity differs -> download.
|
|
316
|
+
self.tdenv.NOTE("Downloading file '{}'.", path)
|
|
317
|
+
transfers.download(self.tdenv, url, localPath, chunkSize=16384, length=remote_length)
|
|
318
|
+
|
|
319
|
+
# Change timestamps on the file to match the server (human convenience only)
|
|
320
|
+
if dump_mod_time is not None:
|
|
321
|
+
try:
|
|
322
|
+
os.utime(localPath, (dump_mod_time, dump_mod_time))
|
|
323
|
+
except Exception:
|
|
324
|
+
pass
|
|
325
|
+
|
|
326
|
+
# Update sync state (stored in TD_DATA regardless of localPath location)
|
|
327
|
+
try:
|
|
328
|
+
st = localPath.stat()
|
|
329
|
+
new_entry = {
|
|
330
|
+
"url": url,
|
|
331
|
+
"local_path": str(localPath.resolve()),
|
|
332
|
+
"etag": remote_etag,
|
|
333
|
+
"last_modified": remote_last_modified,
|
|
334
|
+
"content_length": remote_length,
|
|
335
|
+
"downloaded_at": datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat(),
|
|
336
|
+
"size": int(st.st_size),
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
# Hash only the small “truth-critical” files (cheap + detects template clobber cleanly).
|
|
340
|
+
if key in ("Category.csv", "RareItem.csv", "Item.csv"):
|
|
341
|
+
new_entry["sha256"] = self._file_sha256(localPath)
|
|
342
|
+
|
|
343
|
+
files_state[key] = new_entry
|
|
344
|
+
self._save_eddblink_state(state)
|
|
345
|
+
except Exception:
|
|
346
|
+
# State failures must not make downloads fail.
|
|
347
|
+
pass
|
|
348
|
+
|
|
349
|
+
return True
|
|
350
|
+
|
|
351
|
+
def purgeSystems(self):
|
|
352
|
+
"""
|
|
353
|
+
Purges systems from the System table that do not have any stations claiming to be in them.
|
|
354
|
+
Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
|
|
355
|
+
"""
|
|
356
|
+
self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
|
|
357
|
+
|
|
358
|
+
Session = self.tdb.Session
|
|
359
|
+
with Session.begin() as session:
|
|
360
|
+
subq = select(SA.Station.system_id).where(SA.Station.system_id == SA.System.system_id)
|
|
361
|
+
stmt = delete(SA.System).where(~exists(subq))
|
|
362
|
+
session.execute(stmt)
|
|
363
|
+
|
|
364
|
+
self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
|
|
365
|
+
|
|
366
|
+
def importListings(self, listings_file):
|
|
367
|
+
"""
|
|
368
|
+
Updates the market data (StationItem) using `listings_file`.
|
|
369
|
+
|
|
370
|
+
Rules:
|
|
371
|
+
- If a row doesn't exist in DB → insert (copy CSV exactly).
|
|
372
|
+
- If it exists → update only when CSV.modified > DB.modified.
|
|
373
|
+
- If CSV.modified <= DB.modified → do nothing (no field changes).
|
|
374
|
+
"""
|
|
375
|
+
listings_path = Path(self.dataPath, listings_file).absolute()
|
|
376
|
+
from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
|
|
377
|
+
|
|
378
|
+
self.tdenv.NOTE("Checking listings")
|
|
379
|
+
total = _count_listing_entries(self.tdenv, listings_path)
|
|
380
|
+
if not total:
|
|
381
|
+
self.tdenv.NOTE("No listings")
|
|
382
|
+
return
|
|
383
|
+
|
|
384
|
+
self.tdenv.NOTE(
|
|
385
|
+
"Processing market data from {}: Start time = {}, Live = {}",
|
|
386
|
+
listings_file, self.now(), from_live
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
Session = self.tdb.Session
|
|
390
|
+
|
|
391
|
+
# Prefetch item/station IDs for early filtering
|
|
392
|
+
with Session.begin() as session:
|
|
393
|
+
item_lookup = _make_item_id_lookup(self.tdenv, session)
|
|
394
|
+
station_lookup = _make_station_id_lookup(self.tdenv, session)
|
|
395
|
+
|
|
396
|
+
self.tdenv.DEBUG0("Processing entries...")
|
|
397
|
+
|
|
398
|
+
with pbar.Progress(total, 40, label="Processing", style=pbar.LongRunningCountBar) as prog, \
|
|
399
|
+
listings_path.open("r", encoding="utf-8", errors="ignore") as fh, \
|
|
400
|
+
Session() as session:
|
|
401
|
+
|
|
402
|
+
token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
|
|
403
|
+
try:
|
|
404
|
+
commit_batch = get_import_batch_size(session, profile="eddblink")
|
|
405
|
+
execute_batch = commit_batch or 10000 # cap statement size even if single final commit
|
|
406
|
+
|
|
407
|
+
# Upsert: keys + guarded fields (including from_live), guarded by 'modified'
|
|
408
|
+
table = SA.StationItem.__table__
|
|
409
|
+
key_cols = ("station_id", "item_id")
|
|
410
|
+
update_cols = (
|
|
411
|
+
"demand_price", "demand_units", "demand_level",
|
|
412
|
+
"supply_price", "supply_units", "supply_level",
|
|
413
|
+
"from_live",
|
|
414
|
+
)
|
|
415
|
+
upsert = get_upsert_fn(
|
|
416
|
+
session,
|
|
417
|
+
table,
|
|
418
|
+
key_cols=key_cols,
|
|
419
|
+
update_cols=update_cols,
|
|
420
|
+
modified_col="modified",
|
|
421
|
+
always_update=(), # IMPORTANT: no unconditional updates
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
batch_rows = []
|
|
425
|
+
since_commit = 0
|
|
426
|
+
|
|
427
|
+
# optimize away millions of lookups
|
|
428
|
+
increment = prog.increment
|
|
429
|
+
|
|
430
|
+
def bump_progress():
|
|
431
|
+
increment(1)
|
|
432
|
+
|
|
433
|
+
from_timestamp = datetime.datetime.fromtimestamp
|
|
434
|
+
utc = datetime.timezone.utc
|
|
435
|
+
from_live_val = int(from_live)
|
|
436
|
+
week_in_seconds = 7 * 24 * 60 * 60
|
|
437
|
+
time_cutoff = 0 if not self.getOption("7days") else time.time() - week_in_seconds
|
|
438
|
+
squelch_zero_units = self.getOption("units")
|
|
439
|
+
|
|
440
|
+
# Columns:
|
|
441
|
+
#
|
|
442
|
+
# id, station_id, commodity_id, supply, supply_bracket, buy_price, sell_price, demand, demand_bracket, collected_at
|
|
443
|
+
# 0 1 2 3 4 5 6 7 8 9
|
|
444
|
+
reader = iter(csv.reader(fh))
|
|
445
|
+
headers = next(reader)
|
|
446
|
+
expect_headers = [
|
|
447
|
+
"id", "station_id", "commodity_id",
|
|
448
|
+
"supply", "supply_bracket", "buy_price",
|
|
449
|
+
"sell_price", "demand", "demand_bracket",
|
|
450
|
+
"collected_at"
|
|
451
|
+
]
|
|
452
|
+
if headers[:10] != expect_headers:
|
|
453
|
+
raise TradeException(
|
|
454
|
+
f"incompatible csv field organization in {listings_path}. "
|
|
455
|
+
f"expected {expect_headers}; got {headers}"
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
for listing in reader:
|
|
459
|
+
bump_progress()
|
|
460
|
+
try:
|
|
461
|
+
if squelch_zero_units:
|
|
462
|
+
if listing[3] == "0":
|
|
463
|
+
listing[3] = listing[4] = listing[5] = "0"
|
|
464
|
+
if listing[7] == "0":
|
|
465
|
+
listing[6] = listing[7] = listing[8] = "0"
|
|
466
|
+
|
|
467
|
+
# Do the cheapest skip-check first
|
|
468
|
+
if listing[5] == "0" and listing[6] == "0":
|
|
469
|
+
continue
|
|
470
|
+
|
|
471
|
+
# Cheap numeric condition
|
|
472
|
+
listing_time = int(listing[9])
|
|
473
|
+
if listing_time < time_cutoff:
|
|
474
|
+
continue
|
|
475
|
+
|
|
476
|
+
station_id = int(listing[1])
|
|
477
|
+
if station_id not in station_lookup:
|
|
478
|
+
continue
|
|
479
|
+
|
|
480
|
+
item_id = int(listing[2])
|
|
481
|
+
if item_id not in item_lookup:
|
|
482
|
+
continue # skip rare items (not in Item table)
|
|
483
|
+
|
|
484
|
+
dt_listing_time = from_timestamp(listing_time, utc)
|
|
485
|
+
|
|
486
|
+
row = {
|
|
487
|
+
"station_id": station_id,
|
|
488
|
+
"item_id": item_id,
|
|
489
|
+
"modified": dt_listing_time, # guard column
|
|
490
|
+
"from_live": from_live_val, # copied exactly when updating/inserting
|
|
491
|
+
"supply_units": int(listing[3]),
|
|
492
|
+
"supply_level": int(listing[4]),
|
|
493
|
+
"supply_price": int(listing[5]),
|
|
494
|
+
"demand_price": int(listing[6]),
|
|
495
|
+
"demand_units": int(listing[7]),
|
|
496
|
+
"demand_level": int(listing[8]),
|
|
497
|
+
}
|
|
498
|
+
batch_rows += [row]
|
|
499
|
+
since_commit += 1
|
|
500
|
+
|
|
501
|
+
if len(batch_rows) >= execute_batch:
|
|
502
|
+
upsert(batch_rows)
|
|
503
|
+
batch_rows[:] = [] # in-place clear without lookup
|
|
504
|
+
|
|
505
|
+
if commit_batch and since_commit >= commit_batch:
|
|
506
|
+
session.commit()
|
|
507
|
+
since_commit = 0
|
|
508
|
+
|
|
509
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
510
|
+
self.tdenv.WARN("Bad listing row (skipped): {} error: {}", listing, e)
|
|
511
|
+
continue
|
|
512
|
+
|
|
513
|
+
if batch_rows:
|
|
514
|
+
upsert(batch_rows)
|
|
515
|
+
batch_rows[:] = [] # in-place clear
|
|
516
|
+
|
|
517
|
+
session.commit()
|
|
518
|
+
|
|
519
|
+
finally:
|
|
520
|
+
end_bulk_mode(session, token)
|
|
521
|
+
|
|
522
|
+
# with pbar.Progress(1, 40, prefix="Saving"):
|
|
523
|
+
# pass
|
|
524
|
+
|
|
525
|
+
if self.getOption("7days"):
|
|
526
|
+
# This is a gimmick for first-time pruning: instead of trying to delete
|
|
527
|
+
# years of old data, do it a piece at a time. It gives the progress bar
|
|
528
|
+
# some movement.
|
|
529
|
+
expirations = [360, 330, 300, 270, 240, 210, 180, 150, 120, 90, 60, 30, 21, 14, 7]
|
|
530
|
+
with pbar.Progress(len(expirations) + 1, 40, 1, label="Expiring", style=pbar.LongRunningCountBar) as prog, \
|
|
531
|
+
Session.begin() as session:
|
|
532
|
+
for expiration in expirations:
|
|
533
|
+
session.execute(text(f"DELETE FROM StationItem WHERE modified < datetime('now', '-{expiration} days')"))
|
|
534
|
+
prog.increment(1)
|
|
535
|
+
|
|
536
|
+
if self.getOption("optimize"):
|
|
537
|
+
with pbar.Progress(0, 40, label="Optimizing", style=pbar.ElapsedBar) as prog:
|
|
538
|
+
if self.tdb.engine.dialect.name == "sqlite":
|
|
539
|
+
with Session.begin() as session:
|
|
540
|
+
session.execute(text("VACUUM"))
|
|
541
|
+
|
|
542
|
+
self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
|
|
543
|
+
|
|
544
|
+
def _refresh_dump_tables(self, table_jobs: list[tuple[str, Path]]) -> None:
|
|
545
|
+
"""Upsert-refresh (table_name, csv_path) jobs into the live ORM database,
|
|
546
|
+
with a proper row-count progress bar.
|
|
547
|
+
|
|
548
|
+
Note: RareItem is rebuilt (wiped then re-imported) whenever it is refreshed.
|
|
549
|
+
This avoids UNIQUE(name) collisions caused by historical PK drift / template-era imports.
|
|
550
|
+
"""
|
|
551
|
+
if not table_jobs:
|
|
552
|
+
return
|
|
553
|
+
|
|
554
|
+
# Local import to avoid plugin import-order headaches.
|
|
555
|
+
from tradedangerous import cache as td_cache
|
|
556
|
+
|
|
557
|
+
Session = self.tdb.Session
|
|
558
|
+
with Session() as session:
|
|
559
|
+
with pbar.Progress(
|
|
560
|
+
max_value=len(table_jobs) + 1,
|
|
561
|
+
prefix="Upserting",
|
|
562
|
+
width=25,
|
|
563
|
+
style=pbar.CountingBar,
|
|
564
|
+
) as prog:
|
|
565
|
+
for table_name, import_path in table_jobs:
|
|
566
|
+
import_lines = file_line_count(import_path, missing_ok=True)
|
|
567
|
+
with prog.sub_task(
|
|
568
|
+
max_value=import_lines,
|
|
569
|
+
description=table_name,
|
|
570
|
+
) as child:
|
|
571
|
+
prog.increment(value=1)
|
|
572
|
+
call_args = {"task": child, "advance": 1}
|
|
573
|
+
try:
|
|
574
|
+
# RareItem: rebuild contents on refresh to avoid uq_rareitem_name collisions
|
|
575
|
+
# when existing DB has same names under different rare_id values.
|
|
576
|
+
if table_name == "RareItem":
|
|
577
|
+
session.execute(delete(SA.RareItem))
|
|
578
|
+
|
|
579
|
+
td_cache.processImportFile(
|
|
580
|
+
self.tdenv,
|
|
581
|
+
session,
|
|
582
|
+
import_path,
|
|
583
|
+
table_name,
|
|
584
|
+
line_callback=prog.update_task,
|
|
585
|
+
call_args=call_args,
|
|
586
|
+
)
|
|
587
|
+
session.commit()
|
|
588
|
+
except FileNotFoundError:
|
|
589
|
+
self.tdenv.WARN("Missing import file for {}: {}", table_name, import_path)
|
|
590
|
+
except StopIteration:
|
|
591
|
+
self.tdenv.NOTE(
|
|
592
|
+
"{} exists but is empty. Remove it or add the column definition line.",
|
|
593
|
+
import_path,
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
prog.increment(1)
|
|
597
|
+
|
|
598
|
+
|
|
599
|
+
def run(self):
|
|
600
|
+
"""
|
|
601
|
+
EDDN/EDDB link importer.
|
|
602
|
+
|
|
603
|
+
Refactored DB flow:
|
|
604
|
+
- No dialect-specific logic in the plugin.
|
|
605
|
+
- Preflight uses TradeDB.reloadCache() (which centralizes sanity via lifecycle.ensure_fresh_db).
|
|
606
|
+
- For '--clean' → do a single full rebuild with the RareItem dance.
|
|
607
|
+
- Otherwise, if static CSVs changed → upsert-refresh only those tables (no drop/recreate).
|
|
608
|
+
- Listings import unchanged.
|
|
609
|
+
"""
|
|
610
|
+
self.tdenv.ignoreUnknown = True
|
|
611
|
+
self.tdb.dataPath.mkdir(parents=True, exist_ok=True)
|
|
612
|
+
|
|
613
|
+
# Enable 'listings' by default unless other explicit options are present
|
|
614
|
+
default = True
|
|
615
|
+
for option in self.options:
|
|
616
|
+
if option not in ('force', 'skipvend', 'purge', '7days', 'units'):
|
|
617
|
+
default = False
|
|
618
|
+
if default:
|
|
619
|
+
self.options["listings"] = True
|
|
620
|
+
|
|
621
|
+
if self.getOption("bootstrap"):
|
|
622
|
+
self.tdenv.NOTE("[bold][blue]bootstrap: Greetings, Commander!")
|
|
623
|
+
self.tdenv.NOTE(
|
|
624
|
+
"[yellow]This first-time import might take several minutes or longer, "
|
|
625
|
+
"it ensures your database is up to date with current EDDBLink System, Station, and Item tables "
|
|
626
|
+
"as well as trade listings for the last 7 days.")
|
|
627
|
+
self.tdenv.NOTE(
|
|
628
|
+
"[yellow]You can run this same command later to import updates - which should be much faster, "
|
|
629
|
+
"or `trade import -P eddblink -O 7days,skipvend`.")
|
|
630
|
+
self.tdenv.NOTE(
|
|
631
|
+
"[yellow]To contribute your own discoveries to market data, consider running the "
|
|
632
|
+
"Elite Dangerous Market Connector while playing.")
|
|
633
|
+
for child in ["system", "station", "item", "listings", "skipvend", "7days"]:
|
|
634
|
+
self.options[child] = True
|
|
635
|
+
|
|
636
|
+
# Check if database already exists and enable `clean` if not.
|
|
637
|
+
if lifecycle.is_empty(self.tdb.engine):
|
|
638
|
+
self.options["clean"] = True
|
|
639
|
+
|
|
640
|
+
if self.getOption("clean"):
|
|
641
|
+
# Remove CSVs so downloads become the new source of truth
|
|
642
|
+
for name in [
|
|
643
|
+
"Category", "Item", "RareItem",
|
|
644
|
+
"Ship", "ShipVendor",
|
|
645
|
+
"Station", "System",
|
|
646
|
+
"Upgrade", "UpgradeVendor",
|
|
647
|
+
"FDevShipyard", "FDevOutfitting",
|
|
648
|
+
]:
|
|
649
|
+
f = self.tdb.dataPath / f"{name}.csv"
|
|
650
|
+
try:
|
|
651
|
+
os.remove(str(f))
|
|
652
|
+
except FileNotFoundError:
|
|
653
|
+
pass
|
|
654
|
+
|
|
655
|
+
# Remove eddblink sync-state (sidecar) so templates never "win"
|
|
656
|
+
try:
|
|
657
|
+
os.remove(str(self._eddblink_state_path()))
|
|
658
|
+
except FileNotFoundError:
|
|
659
|
+
pass
|
|
660
|
+
|
|
661
|
+
# Remove .prices (DEPRECATED)
|
|
662
|
+
try:
|
|
663
|
+
os.remove(str(self.tdb.dataPath / "TradeDangerous.prices"))
|
|
664
|
+
except FileNotFoundError:
|
|
665
|
+
pass
|
|
666
|
+
|
|
667
|
+
self.options["all"] = True
|
|
668
|
+
self.options["force"] = True
|
|
669
|
+
else:
|
|
670
|
+
# Category is foundational; if it's wrong, this DB is not trustworthy.
|
|
671
|
+
# Hard-fail and force rebuild rather than attempting to "refresh" it.
|
|
672
|
+
self._sanity_check_category_root()
|
|
673
|
+
|
|
674
|
+
# Select which options will be updated
|
|
675
|
+
if self.getOption("listings"):
|
|
676
|
+
self.options["item"] = True
|
|
677
|
+
self.options["station"] = True
|
|
678
|
+
|
|
679
|
+
if self.getOption("shipvend"):
|
|
680
|
+
self.options["ship"] = True
|
|
681
|
+
self.options["station"] = True
|
|
682
|
+
|
|
683
|
+
if self.getOption("upvend"):
|
|
684
|
+
self.options["upgrade"] = True
|
|
685
|
+
self.options["station"] = True
|
|
686
|
+
|
|
687
|
+
if self.getOption("item"):
|
|
688
|
+
self.options["station"] = True
|
|
689
|
+
|
|
690
|
+
if self.getOption("rare"):
|
|
691
|
+
self.options["station"] = True
|
|
692
|
+
|
|
693
|
+
if self.getOption("station"):
|
|
694
|
+
self.options["system"] = True
|
|
695
|
+
|
|
696
|
+
if self.getOption("all"):
|
|
697
|
+
self.options["item"] = True
|
|
698
|
+
self.options["rare"] = True
|
|
699
|
+
self.options["ship"] = True
|
|
700
|
+
self.options["shipvend"] = True
|
|
701
|
+
self.options["station"] = True
|
|
702
|
+
self.options["system"] = True
|
|
703
|
+
self.options["upgrade"] = True
|
|
704
|
+
self.options["upvend"] = True
|
|
705
|
+
self.options["listings"] = True
|
|
706
|
+
|
|
707
|
+
if self.getOption("solo"):
|
|
708
|
+
self.options["listings"] = False
|
|
709
|
+
self.options["skipvend"] = True
|
|
710
|
+
|
|
711
|
+
if self.getOption("skipvend"):
|
|
712
|
+
self.options["shipvend"] = False
|
|
713
|
+
self.options["upvend"] = False
|
|
714
|
+
|
|
715
|
+
# Download required files and decide which tables need upsert-refresh.
|
|
716
|
+
force = self.getOption("force")
|
|
717
|
+
|
|
718
|
+
upgrade_changed = False
|
|
719
|
+
ship_changed = False
|
|
720
|
+
rare_changed = False
|
|
721
|
+
shipvend_changed = False
|
|
722
|
+
upvend_changed = False
|
|
723
|
+
system_changed = False
|
|
724
|
+
station_changed = False
|
|
725
|
+
category_changed = False
|
|
726
|
+
item_changed = False
|
|
727
|
+
|
|
728
|
+
# FDev bridge CSVs are treated as "changed" when we re-download them.
|
|
729
|
+
fdev_shipyard_changed = False
|
|
730
|
+
fdev_outfitting_changed = False
|
|
731
|
+
|
|
732
|
+
if self.getOption("upgrade"):
|
|
733
|
+
upgrade_changed = self.downloadFile(self.upgradesPath) or force
|
|
734
|
+
if upgrade_changed:
|
|
735
|
+
transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
|
|
736
|
+
fdev_outfitting_changed = True
|
|
737
|
+
|
|
738
|
+
if self.getOption("ship"):
|
|
739
|
+
ship_changed = self.downloadFile(self.shipPath) or force
|
|
740
|
+
if ship_changed:
|
|
741
|
+
transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
|
|
742
|
+
fdev_shipyard_changed = True
|
|
743
|
+
|
|
744
|
+
if self.getOption("rare"):
|
|
745
|
+
rare_changed = self.downloadFile(self.rareItemPath) or force
|
|
746
|
+
|
|
747
|
+
if self.getOption("shipvend"):
|
|
748
|
+
shipvend_changed = self.downloadFile(self.shipVendorPath) or force
|
|
749
|
+
|
|
750
|
+
if self.getOption("upvend"):
|
|
751
|
+
upvend_changed = self.downloadFile(self.upgradeVendorPath) or force
|
|
752
|
+
|
|
753
|
+
if self.getOption("system"):
|
|
754
|
+
system_changed = self.downloadFile(self.sysPath) or force
|
|
755
|
+
|
|
756
|
+
if self.getOption("station"):
|
|
757
|
+
station_changed = self.downloadFile(self.stationsPath) or force
|
|
758
|
+
|
|
759
|
+
if self.getOption("item"):
|
|
760
|
+
item_changed = self.downloadFile(self.commoditiesPath) or force
|
|
761
|
+
# Category can change independently; always check when item option is active.
|
|
762
|
+
category_changed = self.downloadFile(self.categoriesPath) or force
|
|
763
|
+
|
|
764
|
+
# If any of the non-listings tables changed, ensure DB is fresh and then upsert-refresh.
|
|
765
|
+
build_cache = any([
|
|
766
|
+
upgrade_changed, ship_changed, rare_changed,
|
|
767
|
+
shipvend_changed, upvend_changed,
|
|
768
|
+
system_changed, station_changed,
|
|
769
|
+
category_changed, item_changed,
|
|
770
|
+
fdev_shipyard_changed, fdev_outfitting_changed,
|
|
771
|
+
])
|
|
772
|
+
|
|
773
|
+
if build_cache:
|
|
774
|
+
if self.getOption("clean"):
|
|
775
|
+
# "clean" must mean clean for all backends:
|
|
776
|
+
# - sqlite → rotate/recreate DB file
|
|
777
|
+
# - mariadb → drop+recreate tables (NOT the database)
|
|
778
|
+
self.tdenv.NOTE("NOTE: --clean requested; resetting database schema.")
|
|
779
|
+
self.tdb.close()
|
|
780
|
+
lifecycle.reset_db(
|
|
781
|
+
self.tdb.engine,
|
|
782
|
+
db_path=self.tdb.dbPath,
|
|
783
|
+
sql_path=self.tdb.sqlPath,
|
|
784
|
+
)
|
|
785
|
+
else:
|
|
786
|
+
# Ensure schema exists and is sane (may rebuild on first run).
|
|
787
|
+
self.tdb.close()
|
|
788
|
+
self.tdb.reloadCache()
|
|
789
|
+
|
|
790
|
+
if self.tdb.engine.dialect.name == "sqlite":
|
|
791
|
+
# kfsone: see https://sqlite.org/pragma.html#pragma_optimize
|
|
792
|
+
self.tdb.Session().execute(text("PRAGMA optimize=0x10002"))
|
|
793
|
+
|
|
794
|
+
# Upsert-refresh tables in dependency order.
|
|
795
|
+
jobs: list[tuple[str, Path]] = []
|
|
796
|
+
|
|
797
|
+
if system_changed:
|
|
798
|
+
jobs.append(("System", (self.tdb.dataPath / self.sysPath).resolve()))
|
|
799
|
+
|
|
800
|
+
if station_changed:
|
|
801
|
+
jobs.append(("Station", (self.tdb.dataPath / self.stationsPath).resolve()))
|
|
802
|
+
|
|
803
|
+
if category_changed or item_changed:
|
|
804
|
+
jobs.append(("Category", (self.tdb.dataPath / self.categoriesPath).resolve()))
|
|
805
|
+
jobs.append(("Item", (self.tdb.dataPath / self.commoditiesPath).resolve()))
|
|
806
|
+
|
|
807
|
+
if ship_changed:
|
|
808
|
+
jobs.append(("Ship", (self.tdb.dataPath / self.shipPath).resolve()))
|
|
809
|
+
if fdev_shipyard_changed:
|
|
810
|
+
jobs.append(("FDevShipyard", self.FDevShipyardPath.resolve()))
|
|
811
|
+
|
|
812
|
+
if upgrade_changed:
|
|
813
|
+
jobs.append(("Upgrade", (self.tdb.dataPath / self.upgradesPath).resolve()))
|
|
814
|
+
if fdev_outfitting_changed:
|
|
815
|
+
jobs.append(("FDevOutfitting", self.FDevOutfittingPath.resolve()))
|
|
816
|
+
|
|
817
|
+
if shipvend_changed:
|
|
818
|
+
jobs.append(("ShipVendor", (self.tdb.dataPath / self.shipVendorPath).resolve()))
|
|
819
|
+
|
|
820
|
+
if upvend_changed:
|
|
821
|
+
jobs.append(("UpgradeVendor", (self.tdb.dataPath / self.upgradeVendorPath).resolve()))
|
|
822
|
+
|
|
823
|
+
if rare_changed:
|
|
824
|
+
jobs.append(("RareItem", (self.tdb.dataPath / self.rareItemPath).resolve()))
|
|
825
|
+
|
|
826
|
+
self._refresh_dump_tables(jobs)
|
|
827
|
+
self.tdb.close()
|
|
828
|
+
|
|
829
|
+
if self.getOption("purge"):
|
|
830
|
+
self.purgeSystems()
|
|
831
|
+
|
|
832
|
+
# Listings import (prices)
|
|
833
|
+
if self.getOption("listings"):
|
|
834
|
+
if self.downloadFile(self.listingsPath) or force:
|
|
835
|
+
self.importListings(self.listingsPath)
|
|
836
|
+
if self.downloadFile(self.liveListingsPath) or force:
|
|
837
|
+
self.importListings(self.liveListingsPath)
|
|
838
|
+
|
|
839
|
+
if self.tdb.engine.dialect.name == "sqlite":
|
|
840
|
+
with self.tdb.Session.begin() as session:
|
|
841
|
+
if self.getOption("optimize"):
|
|
842
|
+
with bench("Vacuum and optimize", self.tdenv):
|
|
843
|
+
session.execute(text("VACUUM"))
|
|
844
|
+
# This is a very aggressive analyze/optimize pass
|
|
845
|
+
session.execute(text("ANALYZE"))
|
|
846
|
+
else:
|
|
847
|
+
with bench("DB Tuning", self.tdenv):
|
|
848
|
+
session.execute(text("PRAGMA optimize"))
|
|
849
|
+
self.tdenv.INFO("Use --opt=optimize periodically for better query performance")
|
|
850
|
+
|
|
851
|
+
self.tdenv.NOTE("Import completed.")
|
|
852
|
+
|
|
853
|
+
return False
|
|
854
|
+
|
|
855
|
+
|
|
856
|
+
def finish(self):
|
|
857
|
+
""" override the base class 'finish' method """
|
|
858
|
+
# We expect to return 'False' from run, so if this is called, something went horribly wrong;
|
|
859
|
+
# if this gets reached, someone added a bad return to run().
|
|
860
|
+
self.tdenv.WARN("Internal error: plugin's finish() method was reached")
|
|
861
|
+
return False
|