tradedangerous 11.5.3__py3-none-any.whl → 12.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/cache.py +567 -395
- tradedangerous/cli.py +2 -2
- tradedangerous/commands/TEMPLATE.py +25 -26
- tradedangerous/commands/__init__.py +8 -16
- tradedangerous/commands/buildcache_cmd.py +40 -10
- tradedangerous/commands/buy_cmd.py +57 -46
- tradedangerous/commands/commandenv.py +0 -2
- tradedangerous/commands/export_cmd.py +78 -50
- tradedangerous/commands/import_cmd.py +67 -31
- tradedangerous/commands/market_cmd.py +52 -19
- tradedangerous/commands/olddata_cmd.py +120 -107
- tradedangerous/commands/rares_cmd.py +122 -110
- tradedangerous/commands/run_cmd.py +118 -66
- tradedangerous/commands/sell_cmd.py +52 -45
- tradedangerous/commands/shipvendor_cmd.py +49 -234
- tradedangerous/commands/station_cmd.py +55 -485
- tradedangerous/commands/update_cmd.py +56 -420
- tradedangerous/csvexport.py +173 -162
- tradedangerous/db/__init__.py +27 -0
- tradedangerous/db/adapter.py +191 -0
- tradedangerous/db/config.py +95 -0
- tradedangerous/db/engine.py +246 -0
- tradedangerous/db/lifecycle.py +332 -0
- tradedangerous/db/locks.py +208 -0
- tradedangerous/db/orm_models.py +455 -0
- tradedangerous/db/paths.py +112 -0
- tradedangerous/db/utils.py +661 -0
- tradedangerous/gui.py +2 -2
- tradedangerous/plugins/eddblink_plug.py +387 -251
- tradedangerous/plugins/spansh_plug.py +2488 -821
- tradedangerous/prices.py +124 -142
- tradedangerous/templates/TradeDangerous.sql +6 -6
- tradedangerous/tradecalc.py +1227 -1109
- tradedangerous/tradedb.py +533 -384
- tradedangerous/tradeenv.py +12 -1
- tradedangerous/version.py +1 -1
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/METADATA +11 -7
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/RECORD +42 -38
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/WHEEL +1 -1
- tradedangerous/commands/update_gui.py +0 -721
- tradedangerous/jsonprices.py +0 -254
- tradedangerous/plugins/edapi_plug.py +0 -1071
- tradedangerous/plugins/journal_plug.py +0 -537
- tradedangerous/plugins/netlog_plug.py +0 -316
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/entry_points.txt +0 -0
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info/licenses}/LICENSE +0 -0
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/top_level.txt +0 -0
|
@@ -1,12 +1,14 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
"""
|
|
2
4
|
Import plugin that uses data files from
|
|
3
5
|
https://elite.tromador.com/ to update the Database.
|
|
4
6
|
"""
|
|
5
|
-
|
|
7
|
+
|
|
6
8
|
|
|
7
9
|
from email.utils import parsedate_to_datetime
|
|
8
10
|
from pathlib import Path
|
|
9
|
-
from ..
|
|
11
|
+
from ..fs import file_line_count
|
|
10
12
|
from .. import plugins, cache, transfers
|
|
11
13
|
from ..misc import progress as pbar
|
|
12
14
|
from ..plugins import PluginException
|
|
@@ -15,13 +17,15 @@ import csv
|
|
|
15
17
|
import datetime
|
|
16
18
|
import os
|
|
17
19
|
import requests
|
|
18
|
-
import sqlite3
|
|
19
20
|
import typing
|
|
20
21
|
|
|
22
|
+
from sqlalchemy.orm import Session
|
|
23
|
+
from sqlalchemy import func, delete, select, exists, text
|
|
24
|
+
from ..db import orm_models as SA, lifecycle
|
|
21
25
|
|
|
22
26
|
if typing.TYPE_CHECKING:
|
|
23
27
|
from typing import Optional
|
|
24
|
-
from ..
|
|
28
|
+
from ..tradeenv import TradeEnv
|
|
25
29
|
|
|
26
30
|
# Constants
|
|
27
31
|
BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
|
|
@@ -36,7 +40,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
|
|
|
36
40
|
if not listings.exists():
|
|
37
41
|
tdenv.NOTE("File not found, aborting: {}", listings)
|
|
38
42
|
return 0
|
|
39
|
-
|
|
43
|
+
|
|
40
44
|
tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
|
|
41
45
|
count = file_line_count(listings)
|
|
42
46
|
if count <= 1:
|
|
@@ -45,26 +49,39 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
|
|
|
45
49
|
else:
|
|
46
50
|
tdenv.DEBUG0("Listings file is empty, nothing to do.")
|
|
47
51
|
return 0
|
|
48
|
-
|
|
52
|
+
|
|
49
53
|
return count + 1 # kfsone: Doesn't the header already make this + 1?
|
|
50
54
|
|
|
51
55
|
|
|
52
|
-
def _make_item_id_lookup(tdenv: TradeEnv,
|
|
53
|
-
"""
|
|
56
|
+
def _make_item_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
|
|
57
|
+
"""Helper: retrieve the list of commodities in database."""
|
|
54
58
|
tdenv.DEBUG0("Getting list of commodities...")
|
|
55
|
-
|
|
59
|
+
rows = session.query(SA.Item.item_id).all()
|
|
60
|
+
return frozenset(r[0] for r in rows)
|
|
56
61
|
|
|
57
62
|
|
|
58
|
-
def _make_station_id_lookup(tdenv: TradeEnv,
|
|
59
|
-
"""
|
|
63
|
+
def _make_station_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
|
|
64
|
+
"""Helper: retrieve the list of station IDs in database."""
|
|
60
65
|
tdenv.DEBUG0("Getting list of stations...")
|
|
61
|
-
|
|
66
|
+
rows = session.query(SA.Station.station_id).all()
|
|
67
|
+
return frozenset(r[0] for r in rows)
|
|
62
68
|
|
|
63
69
|
|
|
64
|
-
def _collect_station_modified_times(tdenv: TradeEnv,
|
|
65
|
-
"""
|
|
70
|
+
def _collect_station_modified_times(tdenv: TradeEnv, session: Session) -> dict[int, int]:
|
|
71
|
+
"""Helper: build a list of the last modified time for all stations by id (epoch seconds)."""
|
|
66
72
|
tdenv.DEBUG0("Getting last-update times for stations...")
|
|
67
|
-
|
|
73
|
+
rows = (
|
|
74
|
+
session.query(
|
|
75
|
+
SA.StationItem.station_id,
|
|
76
|
+
func.min(SA.StationItem.modified),
|
|
77
|
+
)
|
|
78
|
+
.group_by(SA.StationItem.station_id)
|
|
79
|
+
.all()
|
|
80
|
+
)
|
|
81
|
+
return {
|
|
82
|
+
station_id: int(modified.timestamp()) if modified else 0
|
|
83
|
+
for station_id, modified in rows
|
|
84
|
+
}
|
|
68
85
|
|
|
69
86
|
|
|
70
87
|
class ImportPlugin(plugins.ImportPluginBase):
|
|
@@ -91,10 +108,10 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
91
108
|
'optimize': "Optimize ('vacuum') database after processing.",
|
|
92
109
|
'solo': "Don't download crowd-sourced market data. (Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)",
|
|
93
110
|
}
|
|
94
|
-
|
|
111
|
+
|
|
95
112
|
def __init__(self, tdb, tdenv):
|
|
96
113
|
super().__init__(tdb, tdenv)
|
|
97
|
-
|
|
114
|
+
|
|
98
115
|
self.dataPath = os.environ.get('TD_EDDB') or self.tdenv.tmpDir
|
|
99
116
|
self.categoriesPath = Path("Category.csv")
|
|
100
117
|
self.commoditiesPath = Path("Item.csv")
|
|
@@ -112,10 +129,10 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
112
129
|
self.listingsPath = Path("listings.csv")
|
|
113
130
|
self.liveListingsPath = Path("listings-live.csv")
|
|
114
131
|
self.pricesPath = Path("listings.prices")
|
|
115
|
-
|
|
132
|
+
|
|
116
133
|
def now(self):
|
|
117
134
|
return datetime.datetime.now()
|
|
118
|
-
|
|
135
|
+
|
|
119
136
|
def downloadFile(self, path):
|
|
120
137
|
"""
|
|
121
138
|
Fetch the latest dumpfile from the website if newer than local copy.
|
|
@@ -124,9 +141,9 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
124
141
|
localPath = Path(self.tdb.dataPath, path)
|
|
125
142
|
else:
|
|
126
143
|
localPath = Path(self.dataPath, path)
|
|
127
|
-
|
|
128
|
-
url
|
|
129
|
-
|
|
144
|
+
|
|
145
|
+
url = BASE_URL + str(path)
|
|
146
|
+
|
|
130
147
|
self.tdenv.NOTE("Checking for update to '{}'.", path)
|
|
131
148
|
# Use an HTTP Request header to obtain the Last-Modified and Content-Length headers.
|
|
132
149
|
# Also, tell the server to give us the un-compressed length of the file by saying
|
|
@@ -137,277 +154,264 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
137
154
|
except Exception as e: # pylint: disable=broad-exception-caught
|
|
138
155
|
self.tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", url, str(e))
|
|
139
156
|
return False
|
|
140
|
-
|
|
157
|
+
|
|
141
158
|
last_modified = response.headers.get("last-modified")
|
|
142
159
|
dump_mod_time = parsedate_to_datetime(last_modified).timestamp()
|
|
143
|
-
|
|
160
|
+
|
|
144
161
|
if Path.exists(localPath):
|
|
145
162
|
local_mod_time = localPath.stat().st_mtime
|
|
146
163
|
if local_mod_time >= dump_mod_time:
|
|
147
164
|
self.tdenv.DEBUG0("'{}': Dump is not more recent than Local.", path)
|
|
148
165
|
return False
|
|
149
|
-
|
|
166
|
+
|
|
150
167
|
# The server doesn't know the gzip'd length, and we won't see the gzip'd data,
|
|
151
168
|
# so we want the actual text-only length. Capture it here so we can tell the
|
|
152
169
|
# transfer mechanism how big the file is going to be.
|
|
153
170
|
length = response.headers.get("content-length")
|
|
154
|
-
|
|
171
|
+
|
|
155
172
|
self.tdenv.NOTE("Downloading file '{}'.", path)
|
|
156
173
|
transfers.download(self.tdenv, url, localPath, chunkSize=16384, length=length)
|
|
157
|
-
|
|
174
|
+
|
|
158
175
|
# Change the timestamps on the file so they match the website
|
|
159
176
|
os.utime(localPath, (dump_mod_time, dump_mod_time))
|
|
160
|
-
|
|
177
|
+
|
|
161
178
|
return True
|
|
162
|
-
|
|
179
|
+
|
|
163
180
|
def purgeSystems(self):
|
|
164
181
|
"""
|
|
165
182
|
Purges systems from the System table that do not have any stations claiming to be in them.
|
|
166
183
|
Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
|
|
167
184
|
"""
|
|
168
|
-
db = self.tdb.getDB()
|
|
169
185
|
self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
186
|
+
|
|
187
|
+
Session = self.tdb.Session
|
|
188
|
+
with Session.begin() as session:
|
|
189
|
+
subq = select(SA.Station.system_id).where(SA.Station.system_id == SA.System.system_id)
|
|
190
|
+
stmt = delete(SA.System).where(~exists(subq))
|
|
191
|
+
session.execute(stmt)
|
|
192
|
+
|
|
177
193
|
self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
|
|
178
|
-
|
|
194
|
+
|
|
179
195
|
def importListings(self, listings_file):
|
|
180
196
|
"""
|
|
181
|
-
Updates the market data (
|
|
182
|
-
|
|
197
|
+
Updates the market data (StationItem) using `listings_file`.
|
|
198
|
+
|
|
199
|
+
Rules:
|
|
200
|
+
- If a row doesn't exist in DB → insert (copy CSV exactly).
|
|
201
|
+
- If it exists → update only when CSV.modified > DB.modified.
|
|
202
|
+
- If CSV.modified <= DB.modified → do nothing (no field changes).
|
|
183
203
|
"""
|
|
204
|
+
from tradedangerous.db.utils import (
|
|
205
|
+
get_import_batch_size,
|
|
206
|
+
begin_bulk_mode,
|
|
207
|
+
end_bulk_mode,
|
|
208
|
+
get_upsert_fn,
|
|
209
|
+
)
|
|
210
|
+
|
|
184
211
|
listings_path = Path(self.dataPath, listings_file).absolute()
|
|
185
|
-
from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
|
|
186
|
-
|
|
212
|
+
from_live = int(listings_path != Path(self.dataPath, self.listingsPath).absolute())
|
|
213
|
+
|
|
187
214
|
self.tdenv.NOTE("Checking listings")
|
|
188
215
|
total = _count_listing_entries(self.tdenv, listings_path)
|
|
189
216
|
if not total:
|
|
190
217
|
self.tdenv.NOTE("No listings")
|
|
191
218
|
return
|
|
192
|
-
|
|
193
|
-
self.tdenv.NOTE(
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
)
|
|
204
|
-
|
|
205
|
-
?, ?, datetime(?, 'unixepoch'), ?,
|
|
206
|
-
?, ?, ?,
|
|
207
|
-
?, ?, ?
|
|
208
|
-
)
|
|
209
|
-
"""
|
|
210
|
-
|
|
211
|
-
# Fetch all the items IDS
|
|
212
|
-
item_lookup = _make_item_id_lookup(self.tdenv, db.cursor())
|
|
213
|
-
station_lookup = _make_station_id_lookup(self.tdenv, db.cursor())
|
|
214
|
-
last_station_update_times = _collect_station_modified_times(self.tdenv, db.cursor())
|
|
215
|
-
|
|
216
|
-
cur_station = None
|
|
219
|
+
|
|
220
|
+
self.tdenv.NOTE(
|
|
221
|
+
"Processing market data from {}: Start time = {}. Live = {}",
|
|
222
|
+
listings_file, self.now(), bool(from_live)
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
Session = self.tdb.Session
|
|
226
|
+
|
|
227
|
+
# Prefetch item/station IDs for early filtering
|
|
228
|
+
with Session.begin() as session:
|
|
229
|
+
item_lookup = _make_item_id_lookup(self.tdenv, session)
|
|
230
|
+
station_lookup = _make_station_id_lookup(self.tdenv, session)
|
|
231
|
+
|
|
217
232
|
is_debug = self.tdenv.debug > 0
|
|
218
233
|
self.tdenv.DEBUG0("Processing entries...")
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
transaction_items += 1
|
|
255
|
-
skip_station = True
|
|
256
|
-
continue
|
|
257
|
-
|
|
258
|
-
# Unless the import file data is newer, nothing else needs to be done for this station,
|
|
259
|
-
# so the rest of the listings for this station can be skipped.
|
|
260
|
-
if listing_time <= last_modified:
|
|
261
|
-
skip_station = True
|
|
234
|
+
|
|
235
|
+
with pbar.Progress(total, 40, prefix="Processing", style=pbar.LongRunningCountBar) as prog, \
|
|
236
|
+
listings_path.open("r", encoding="utf-8", errors="ignore") as fh, \
|
|
237
|
+
Session() as session:
|
|
238
|
+
|
|
239
|
+
token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
|
|
240
|
+
try:
|
|
241
|
+
commit_batch = get_import_batch_size(session, profile="eddblink")
|
|
242
|
+
execute_batch = commit_batch or 10000 # cap statement size even if single final commit
|
|
243
|
+
|
|
244
|
+
# Upsert: keys + guarded fields (including from_live), guarded by 'modified'
|
|
245
|
+
table = SA.StationItem.__table__
|
|
246
|
+
key_cols = ("station_id", "item_id")
|
|
247
|
+
update_cols = (
|
|
248
|
+
"demand_price", "demand_units", "demand_level",
|
|
249
|
+
"supply_price", "supply_units", "supply_level",
|
|
250
|
+
"from_live",
|
|
251
|
+
)
|
|
252
|
+
upsert = get_upsert_fn(
|
|
253
|
+
session,
|
|
254
|
+
table,
|
|
255
|
+
key_cols=key_cols,
|
|
256
|
+
update_cols=update_cols,
|
|
257
|
+
modified_col="modified",
|
|
258
|
+
always_update=(), # IMPORTANT: no unconditional updates
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
batch_rows = []
|
|
262
|
+
since_commit = 0
|
|
263
|
+
|
|
264
|
+
for listing in csv.DictReader(fh):
|
|
265
|
+
prog.increment(1)
|
|
266
|
+
try:
|
|
267
|
+
station_id = int(listing["station_id"])
|
|
268
|
+
if station_id not in station_lookup:
|
|
262
269
|
continue
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
if
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
270
|
+
|
|
271
|
+
item_id = int(listing["commodity_id"])
|
|
272
|
+
if item_id not in item_lookup:
|
|
273
|
+
continue # skip rare items (not in Item table)
|
|
274
|
+
|
|
275
|
+
listing_time = int(listing["collected_at"])
|
|
276
|
+
dt_listing_time = datetime.datetime.utcfromtimestamp(listing_time)
|
|
277
|
+
|
|
278
|
+
row = {
|
|
279
|
+
"station_id": station_id,
|
|
280
|
+
"item_id": item_id,
|
|
281
|
+
"modified": dt_listing_time, # guard column
|
|
282
|
+
"from_live": from_live, # copied exactly when updating/inserting
|
|
283
|
+
"demand_price": int(listing["sell_price"]),
|
|
284
|
+
"demand_units": int(listing["demand"]),
|
|
285
|
+
"demand_level": int(listing.get("demand_bracket") or "-1"),
|
|
286
|
+
"supply_price": int(listing["buy_price"]),
|
|
287
|
+
"supply_units": int(listing["supply"]),
|
|
288
|
+
"supply_level": int(listing.get("supply_bracket") or "-1"),
|
|
289
|
+
}
|
|
290
|
+
batch_rows.append(row)
|
|
291
|
+
since_commit += 1
|
|
292
|
+
|
|
293
|
+
if len(batch_rows) >= execute_batch:
|
|
294
|
+
upsert(batch_rows)
|
|
295
|
+
batch_rows.clear()
|
|
296
|
+
|
|
297
|
+
if commit_batch and since_commit >= commit_batch:
|
|
298
|
+
session.commit()
|
|
299
|
+
since_commit = 0
|
|
300
|
+
|
|
301
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
302
|
+
self.tdenv.WARN("Bad listing row (skipped): {} error: {}", listing, e)
|
|
303
|
+
continue
|
|
304
|
+
|
|
305
|
+
if batch_rows:
|
|
306
|
+
upsert(batch_rows)
|
|
307
|
+
batch_rows.clear()
|
|
308
|
+
|
|
309
|
+
session.commit()
|
|
310
|
+
|
|
311
|
+
finally:
|
|
312
|
+
end_bulk_mode(session, token)
|
|
313
|
+
|
|
298
314
|
with pbar.Progress(1, 40, prefix="Saving"):
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
315
|
+
pass
|
|
316
|
+
|
|
302
317
|
if self.getOption("optimize"):
|
|
303
318
|
with pbar.Progress(1, 40, prefix="Optimizing"):
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
319
|
+
if self.tdb.engine.dialect.name == "sqlite":
|
|
320
|
+
with Session.begin() as session:
|
|
321
|
+
session.execute(text("VACUUM"))
|
|
322
|
+
|
|
308
323
|
self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
|
|
309
|
-
|
|
324
|
+
|
|
325
|
+
|
|
310
326
|
def run(self):
|
|
327
|
+
"""
|
|
328
|
+
EDDN/EDDB link importer.
|
|
329
|
+
|
|
330
|
+
Refactored DB flow:
|
|
331
|
+
- No dialect-specific logic in the plugin.
|
|
332
|
+
- Preflight uses TradeDB.reloadCache() (which centralizes sanity via lifecycle.ensure_fresh_db).
|
|
333
|
+
- For '--clean' → do a single full rebuild with the RareItem dance.
|
|
334
|
+
- Otherwise, if static CSVs changed → incrementally import only those tables (no drop/recreate).
|
|
335
|
+
- Listings import and .prices regeneration unchanged.
|
|
336
|
+
"""
|
|
337
|
+
import os
|
|
338
|
+
import time
|
|
339
|
+
from pathlib import Path
|
|
340
|
+
from tradedangerous import cache
|
|
341
|
+
# bulk-mode helpers for the incremental static import session
|
|
342
|
+
from tradedangerous.db.utils import begin_bulk_mode, end_bulk_mode
|
|
343
|
+
|
|
311
344
|
self.tdenv.ignoreUnknown = True
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
Path(str(self.dataPath)).mkdir()
|
|
316
|
-
except FileExistsError:
|
|
317
|
-
pass
|
|
318
|
-
|
|
319
|
-
# Run 'listings' by default:
|
|
320
|
-
# If no options, or if only 'force', and/or 'skipvend',
|
|
321
|
-
# have been passed, enable 'listings'.
|
|
345
|
+
self.tdb.dataPath.mkdir(parents=True, exist_ok=True)
|
|
346
|
+
|
|
347
|
+
# Enable 'listings' by default unless other explicit options are present
|
|
322
348
|
default = True
|
|
323
349
|
for option in self.options:
|
|
324
350
|
if option not in ('force', 'skipvend', 'purge'):
|
|
325
351
|
default = False
|
|
326
352
|
if default:
|
|
327
353
|
self.options["listings"] = True
|
|
328
|
-
|
|
329
|
-
#
|
|
330
|
-
#
|
|
331
|
-
|
|
332
|
-
self.options["clean"] = True
|
|
333
|
-
|
|
354
|
+
|
|
355
|
+
# -----------------------------
|
|
356
|
+
# Optional CLEAN: prepare inputs
|
|
357
|
+
# -----------------------------
|
|
334
358
|
if self.getOption("clean"):
|
|
335
|
-
#
|
|
336
|
-
# Can be done at anytime with the "clean" option.
|
|
359
|
+
# Remove CSVs so downloads become the new source of truth
|
|
337
360
|
for name in [
|
|
338
|
-
"Category",
|
|
339
|
-
"
|
|
340
|
-
"
|
|
341
|
-
"
|
|
342
|
-
"
|
|
343
|
-
"Station",
|
|
344
|
-
"System",
|
|
345
|
-
"Upgrade",
|
|
346
|
-
"UpgradeVendor",
|
|
347
|
-
"FDevShipyard",
|
|
348
|
-
"FDevOutfitting",
|
|
361
|
+
"Category", "Item", "RareItem",
|
|
362
|
+
"Ship", "ShipVendor",
|
|
363
|
+
"Station", "System",
|
|
364
|
+
"Upgrade", "UpgradeVendor",
|
|
365
|
+
"FDevShipyard", "FDevOutfitting",
|
|
349
366
|
]:
|
|
350
|
-
|
|
367
|
+
f = self.tdb.dataPath / f"{name}.csv"
|
|
351
368
|
try:
|
|
352
|
-
os.remove(str(
|
|
369
|
+
os.remove(str(f))
|
|
353
370
|
except FileNotFoundError:
|
|
354
371
|
pass
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
os.remove(str(self.tdb.dataPath) + "/TradeDangerous.db")
|
|
358
|
-
except FileNotFoundError:
|
|
359
|
-
pass
|
|
372
|
+
|
|
373
|
+
# Remove .prices (will be regenerated later)
|
|
360
374
|
try:
|
|
361
|
-
os.remove(str(self.tdb.dataPath
|
|
375
|
+
os.remove(str(self.tdb.dataPath / "TradeDangerous.prices"))
|
|
362
376
|
except FileNotFoundError:
|
|
363
377
|
pass
|
|
364
|
-
|
|
365
|
-
#
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
self.tdb.close()
|
|
376
|
-
|
|
377
|
-
self.tdb.reloadCache()
|
|
378
|
-
self.tdb.close()
|
|
379
|
-
|
|
380
|
-
# Now it's safe to move RareItems back.
|
|
381
|
-
if ri_path.exists():
|
|
382
|
-
ri_path.unlink()
|
|
383
|
-
if rib_path.exists():
|
|
384
|
-
rib_path.rename(ri_path)
|
|
385
|
-
|
|
378
|
+
|
|
379
|
+
# Stash RareItem.csv so a full rebuild doesn't hit FK issues
|
|
380
|
+
self._ri_path = self.tdb.dataPath / "RareItem.csv"
|
|
381
|
+
self._rib_path = self._ri_path.with_suffix(".tmp")
|
|
382
|
+
if self._ri_path.exists():
|
|
383
|
+
if self._rib_path.exists():
|
|
384
|
+
self._rib_path.unlink()
|
|
385
|
+
self._ri_path.rename(self._rib_path)
|
|
386
|
+
|
|
387
|
+
# Full update after downloads
|
|
386
388
|
self.options["all"] = True
|
|
387
389
|
self.options["force"] = True
|
|
388
|
-
|
|
389
|
-
#
|
|
390
|
+
|
|
391
|
+
# --------------------------------
|
|
392
|
+
# Option cascade (unchanged logic)
|
|
393
|
+
# --------------------------------
|
|
390
394
|
if self.getOption("listings"):
|
|
391
395
|
self.options["item"] = True
|
|
392
396
|
self.options["station"] = True
|
|
393
|
-
|
|
397
|
+
|
|
394
398
|
if self.getOption("shipvend"):
|
|
395
399
|
self.options["ship"] = True
|
|
396
400
|
self.options["station"] = True
|
|
397
|
-
|
|
401
|
+
|
|
398
402
|
if self.getOption("upvend"):
|
|
399
403
|
self.options["upgrade"] = True
|
|
400
404
|
self.options["station"] = True
|
|
401
|
-
|
|
405
|
+
|
|
402
406
|
if self.getOption("item"):
|
|
403
407
|
self.options["station"] = True
|
|
404
|
-
|
|
408
|
+
|
|
405
409
|
if self.getOption("rare"):
|
|
406
410
|
self.options["station"] = True
|
|
407
|
-
|
|
411
|
+
|
|
408
412
|
if self.getOption("station"):
|
|
409
413
|
self.options["system"] = True
|
|
410
|
-
|
|
414
|
+
|
|
411
415
|
if self.getOption("all"):
|
|
412
416
|
self.options["item"] = True
|
|
413
417
|
self.options["rare"] = True
|
|
@@ -418,73 +422,205 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
418
422
|
self.options["upgrade"] = True
|
|
419
423
|
self.options["upvend"] = True
|
|
420
424
|
self.options["listings"] = True
|
|
421
|
-
|
|
425
|
+
|
|
422
426
|
if self.getOption("solo"):
|
|
423
427
|
self.options["listings"] = False
|
|
424
428
|
self.options["skipvend"] = True
|
|
425
|
-
|
|
429
|
+
|
|
426
430
|
if self.getOption("skipvend"):
|
|
427
431
|
self.options["shipvend"] = False
|
|
428
432
|
self.options["upvend"] = False
|
|
429
|
-
|
|
430
|
-
#
|
|
431
|
-
|
|
433
|
+
|
|
434
|
+
# ---------------------------------------------
|
|
435
|
+
# Downloads — track which static CSVs changed
|
|
436
|
+
# ---------------------------------------------
|
|
437
|
+
changed = {
|
|
438
|
+
"System": False,
|
|
439
|
+
"Station": False,
|
|
440
|
+
"Category": False,
|
|
441
|
+
"Item": False,
|
|
442
|
+
"RareItem": False,
|
|
443
|
+
"Ship": False,
|
|
444
|
+
"ShipVendor": False,
|
|
445
|
+
"Upgrade": False,
|
|
446
|
+
"UpgradeVendor": False,
|
|
447
|
+
"FDevShipyard": False,
|
|
448
|
+
"FDevOutfitting": False,
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
# EDCD mirrors
|
|
432
452
|
if self.getOption("upgrade"):
|
|
433
453
|
if self.downloadFile(self.upgradesPath) or self.getOption("force"):
|
|
434
454
|
transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
|
|
435
|
-
|
|
436
|
-
|
|
455
|
+
changed["Upgrade"] = True
|
|
456
|
+
changed["FDevOutfitting"] = True
|
|
457
|
+
|
|
437
458
|
if self.getOption("ship"):
|
|
438
459
|
if self.downloadFile(self.shipPath) or self.getOption("force"):
|
|
439
460
|
transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
|
|
440
|
-
|
|
441
|
-
|
|
461
|
+
changed["Ship"] = True
|
|
462
|
+
changed["FDevShipyard"] = True
|
|
463
|
+
|
|
464
|
+
# Core static tables
|
|
442
465
|
if self.getOption("rare"):
|
|
443
466
|
if self.downloadFile(self.rareItemPath) or self.getOption("force"):
|
|
444
|
-
|
|
445
|
-
|
|
467
|
+
changed["RareItem"] = True
|
|
468
|
+
|
|
446
469
|
if self.getOption("shipvend"):
|
|
447
470
|
if self.downloadFile(self.shipVendorPath) or self.getOption("force"):
|
|
448
|
-
|
|
449
|
-
|
|
471
|
+
changed["ShipVendor"] = True
|
|
472
|
+
|
|
450
473
|
if self.getOption("upvend"):
|
|
451
474
|
if self.downloadFile(self.upgradeVendorPath) or self.getOption("force"):
|
|
452
|
-
|
|
453
|
-
|
|
475
|
+
changed["UpgradeVendor"] = True
|
|
476
|
+
|
|
454
477
|
if self.getOption("system"):
|
|
455
478
|
if self.downloadFile(self.sysPath) or self.getOption("force"):
|
|
456
|
-
|
|
457
|
-
|
|
479
|
+
changed["System"] = True
|
|
480
|
+
|
|
458
481
|
if self.getOption("station"):
|
|
459
482
|
if self.downloadFile(self.stationsPath) or self.getOption("force"):
|
|
460
|
-
|
|
461
|
-
|
|
483
|
+
changed["Station"] = True
|
|
484
|
+
|
|
462
485
|
if self.getOption("item"):
|
|
463
486
|
if self.downloadFile(self.commoditiesPath) or self.getOption("force"):
|
|
464
487
|
self.downloadFile(self.categoriesPath)
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
488
|
+
changed["Item"] = True
|
|
489
|
+
changed["Category"] = True
|
|
490
|
+
|
|
491
|
+
# -------------------------------------------------------------
|
|
492
|
+
# Preflight sanity (user-visible): make the pause explicit
|
|
493
|
+
# -------------------------------------------------------------
|
|
494
|
+
ri_path = getattr(self, "_ri_path", self.tdb.dataPath / "RareItem.csv")
|
|
495
|
+
rib_path = getattr(self, "_rib_path", ri_path.with_suffix(".tmp"))
|
|
496
|
+
rareitem_stashed = False
|
|
497
|
+
self.tdenv.NOTE("Preflight: verifying database (this can take a while on first run)...")
|
|
498
|
+
t0 = time.monotonic()
|
|
499
|
+
try:
|
|
500
|
+
if ri_path.exists():
|
|
501
|
+
if not rib_path.exists() and not self.getOption("clean"):
|
|
502
|
+
ri_path.rename(rib_path)
|
|
503
|
+
rareitem_stashed = True
|
|
504
|
+
|
|
505
|
+
# This may no-op or may call buildCache() internally
|
|
470
506
|
self.tdb.reloadCache()
|
|
507
|
+
finally:
|
|
508
|
+
if rib_path.exists() and (self.getOption("clean") or rareitem_stashed):
|
|
509
|
+
if ri_path.exists():
|
|
510
|
+
ri_path.unlink()
|
|
511
|
+
rib_path.rename(ri_path)
|
|
512
|
+
t1 = time.monotonic()
|
|
513
|
+
self.tdenv.NOTE("Preflight complete in {:.1f}s.", (t1 - t0))
|
|
514
|
+
|
|
515
|
+
# -----------------------------------------------------
|
|
516
|
+
# Rebuild or Incremental Import?
|
|
517
|
+
# -----------------------------------------------------
|
|
518
|
+
if self.getOption("clean"):
|
|
519
|
+
self.tdenv.NOTE("Performing full rebuild...")
|
|
520
|
+
self.tdb.close()
|
|
521
|
+
cache.buildCache(self.tdb, self.tdenv)
|
|
471
522
|
self.tdb.close()
|
|
472
|
-
|
|
523
|
+
self.tdenv.NOTE("Full rebuild complete.")
|
|
524
|
+
else:
|
|
525
|
+
# Incremental import of only changed tables (no schema drop)
|
|
526
|
+
IMPORT_ORDER = [
|
|
527
|
+
"System",
|
|
528
|
+
"Station",
|
|
529
|
+
"Category",
|
|
530
|
+
"Item",
|
|
531
|
+
"RareItem",
|
|
532
|
+
"Ship",
|
|
533
|
+
"ShipVendor",
|
|
534
|
+
"Upgrade",
|
|
535
|
+
"UpgradeVendor",
|
|
536
|
+
"FDevShipyard",
|
|
537
|
+
"FDevOutfitting",
|
|
538
|
+
]
|
|
539
|
+
|
|
540
|
+
any_changed = any(changed.values())
|
|
541
|
+
if any_changed:
|
|
542
|
+
self.tdenv.NOTE("Incremental import starting ({} tables changed).", sum(1 for v in changed.values() if v))
|
|
543
|
+
with self.tdb.Session() as session:
|
|
544
|
+
token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
|
|
545
|
+
try:
|
|
546
|
+
for table_name in IMPORT_ORDER:
|
|
547
|
+
if not changed.get(table_name):
|
|
548
|
+
continue
|
|
549
|
+
import_path = (self.tdb.dataPath / f"{table_name}.csv").resolve()
|
|
550
|
+
try:
|
|
551
|
+
# Determine a cheap per-table total (header-aware) for display only.
|
|
552
|
+
try:
|
|
553
|
+
total = max(file_line_count(import_path) - 1, 0)
|
|
554
|
+
except Exception:
|
|
555
|
+
total = 0
|
|
556
|
+
|
|
557
|
+
prefix = f"Processing {table_name}"
|
|
558
|
+
# Mirror listings-style progress: single-line if TTY, periodic otherwise.
|
|
559
|
+
with pbar.Progress(total or 1, 40, prefix=prefix, style=pbar.LongRunningCountBar) as prog:
|
|
560
|
+
|
|
561
|
+
def _cb(stats=None, **kwargs):
|
|
562
|
+
"""
|
|
563
|
+
Liberal progress callback used by cache.processImportFile.
|
|
564
|
+
Accepts either:
|
|
565
|
+
- int → increment by that many rows
|
|
566
|
+
- dict with keys inc/rows/count → increment by that value
|
|
567
|
+
- anything else → default increment of 1
|
|
568
|
+
"""
|
|
569
|
+
inc = 1
|
|
570
|
+
if isinstance(stats, int):
|
|
571
|
+
inc = max(int(stats), 1)
|
|
572
|
+
elif isinstance(stats, dict):
|
|
573
|
+
for k in ("inc", "rows", "count"):
|
|
574
|
+
if k in stats:
|
|
575
|
+
try:
|
|
576
|
+
inc = max(int(stats[k]), 1)
|
|
577
|
+
break
|
|
578
|
+
except Exception:
|
|
579
|
+
pass
|
|
580
|
+
prog.increment(inc)
|
|
581
|
+
|
|
582
|
+
cache.processImportFile(
|
|
583
|
+
self.tdenv,
|
|
584
|
+
session,
|
|
585
|
+
import_path,
|
|
586
|
+
table_name,
|
|
587
|
+
line_callback=_cb,
|
|
588
|
+
call_args={"table": table_name, "total": total},
|
|
589
|
+
)
|
|
590
|
+
|
|
591
|
+
session.commit()
|
|
592
|
+
self.tdenv.DEBUG0("Incremental import OK: {} ({})", table_name, import_path)
|
|
593
|
+
|
|
594
|
+
except FileNotFoundError:
|
|
595
|
+
self.tdenv.NOTE("{} missing; skipped incremental import ({})", table_name, import_path)
|
|
596
|
+
except StopIteration:
|
|
597
|
+
self.tdenv.NOTE("{} exists but is empty; skipped incremental import ({})", table_name, import_path)
|
|
598
|
+
except Exception as e:
|
|
599
|
+
self.tdenv.WARN("Incremental import failed for {}: {} ({})", table_name, e, import_path)
|
|
600
|
+
session.rollback()
|
|
601
|
+
self.tdenv.NOTE("Escalating to full rebuild due to import failure.")
|
|
602
|
+
self.tdb.close()
|
|
603
|
+
cache.buildCache(self.tdb, self.tdenv)
|
|
604
|
+
self.tdb.close()
|
|
605
|
+
break
|
|
606
|
+
finally:
|
|
607
|
+
end_bulk_mode(session, token)
|
|
608
|
+
self.tdenv.NOTE("Incremental import finished.")
|
|
609
|
+
|
|
610
|
+
|
|
473
611
|
if self.getOption("purge"):
|
|
474
612
|
self.purgeSystems()
|
|
475
|
-
|
|
476
|
-
|
|
613
|
+
|
|
614
|
+
# Listings import (prices)
|
|
477
615
|
if self.getOption("listings"):
|
|
478
616
|
if self.downloadFile(self.listingsPath) or self.getOption("force"):
|
|
479
617
|
self.importListings(self.listingsPath)
|
|
480
618
|
if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
|
|
481
619
|
self.importListings(self.liveListingsPath)
|
|
482
|
-
|
|
620
|
+
|
|
483
621
|
if self.getOption("listings"):
|
|
484
622
|
self.tdenv.NOTE("Regenerating .prices file.")
|
|
485
623
|
cache.regeneratePricesFile(self.tdb, self.tdenv)
|
|
486
|
-
|
|
624
|
+
|
|
487
625
|
self.tdenv.NOTE("Import completed.")
|
|
488
|
-
|
|
489
|
-
# TD doesn't need to do anything, tell it to just quit.
|
|
490
626
|
return False
|