tradedangerous 11.5.2__py3-none-any.whl → 12.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/cache.py +567 -395
- tradedangerous/cli.py +2 -2
- tradedangerous/commands/TEMPLATE.py +25 -26
- tradedangerous/commands/__init__.py +8 -16
- tradedangerous/commands/buildcache_cmd.py +40 -10
- tradedangerous/commands/buy_cmd.py +57 -46
- tradedangerous/commands/commandenv.py +0 -2
- tradedangerous/commands/export_cmd.py +78 -50
- tradedangerous/commands/import_cmd.py +70 -34
- tradedangerous/commands/market_cmd.py +52 -19
- tradedangerous/commands/olddata_cmd.py +120 -107
- tradedangerous/commands/rares_cmd.py +122 -110
- tradedangerous/commands/run_cmd.py +118 -66
- tradedangerous/commands/sell_cmd.py +52 -45
- tradedangerous/commands/shipvendor_cmd.py +49 -234
- tradedangerous/commands/station_cmd.py +55 -485
- tradedangerous/commands/update_cmd.py +56 -420
- tradedangerous/csvexport.py +173 -162
- tradedangerous/gui.py +2 -2
- tradedangerous/plugins/eddblink_plug.py +389 -252
- tradedangerous/plugins/spansh_plug.py +2488 -821
- tradedangerous/prices.py +124 -142
- tradedangerous/templates/TradeDangerous.sql +6 -6
- tradedangerous/tradecalc.py +1227 -1109
- tradedangerous/tradedb.py +533 -384
- tradedangerous/tradeenv.py +12 -1
- tradedangerous/version.py +1 -1
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/METADATA +17 -4
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/RECORD +33 -39
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/WHEEL +1 -1
- tradedangerous/commands/update_gui.py +0 -721
- tradedangerous/jsonprices.py +0 -254
- tradedangerous/plugins/edapi_plug.py +0 -1071
- tradedangerous/plugins/journal_plug.py +0 -537
- tradedangerous/plugins/netlog_plug.py +0 -316
- tradedangerous/templates/database_changes.json +0 -6
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/entry_points.txt +0 -0
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info/licenses}/LICENSE +0 -0
- {tradedangerous-11.5.2.dist-info → tradedangerous-12.0.0.dist-info}/top_level.txt +0 -0
|
@@ -1,12 +1,14 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
"""
|
|
2
4
|
Import plugin that uses data files from
|
|
3
5
|
https://elite.tromador.com/ to update the Database.
|
|
4
6
|
"""
|
|
5
|
-
|
|
7
|
+
|
|
6
8
|
|
|
7
9
|
from email.utils import parsedate_to_datetime
|
|
8
10
|
from pathlib import Path
|
|
9
|
-
from ..
|
|
11
|
+
from ..fs import file_line_count
|
|
10
12
|
from .. import plugins, cache, transfers
|
|
11
13
|
from ..misc import progress as pbar
|
|
12
14
|
from ..plugins import PluginException
|
|
@@ -15,13 +17,15 @@ import csv
|
|
|
15
17
|
import datetime
|
|
16
18
|
import os
|
|
17
19
|
import requests
|
|
18
|
-
import sqlite3
|
|
19
20
|
import typing
|
|
20
21
|
|
|
22
|
+
from sqlalchemy.orm import Session
|
|
23
|
+
from sqlalchemy import func, delete, select, exists, text
|
|
24
|
+
from ..db import orm_models as SA, lifecycle
|
|
21
25
|
|
|
22
26
|
if typing.TYPE_CHECKING:
|
|
23
27
|
from typing import Optional
|
|
24
|
-
from ..
|
|
28
|
+
from ..tradeenv import TradeEnv
|
|
25
29
|
|
|
26
30
|
# Constants
|
|
27
31
|
BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
|
|
@@ -36,7 +40,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
|
|
|
36
40
|
if not listings.exists():
|
|
37
41
|
tdenv.NOTE("File not found, aborting: {}", listings)
|
|
38
42
|
return 0
|
|
39
|
-
|
|
43
|
+
|
|
40
44
|
tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
|
|
41
45
|
count = file_line_count(listings)
|
|
42
46
|
if count <= 1:
|
|
@@ -45,31 +49,45 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
|
|
|
45
49
|
else:
|
|
46
50
|
tdenv.DEBUG0("Listings file is empty, nothing to do.")
|
|
47
51
|
return 0
|
|
48
|
-
|
|
52
|
+
|
|
49
53
|
return count + 1 # kfsone: Doesn't the header already make this + 1?
|
|
50
54
|
|
|
51
55
|
|
|
52
|
-
def _make_item_id_lookup(tdenv: TradeEnv,
|
|
53
|
-
"""
|
|
56
|
+
def _make_item_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
|
|
57
|
+
"""Helper: retrieve the list of commodities in database."""
|
|
54
58
|
tdenv.DEBUG0("Getting list of commodities...")
|
|
55
|
-
|
|
59
|
+
rows = session.query(SA.Item.item_id).all()
|
|
60
|
+
return frozenset(r[0] for r in rows)
|
|
56
61
|
|
|
57
62
|
|
|
58
|
-
def _make_station_id_lookup(tdenv: TradeEnv,
|
|
59
|
-
"""
|
|
63
|
+
def _make_station_id_lookup(tdenv: TradeEnv, session: Session) -> frozenset[int]:
|
|
64
|
+
"""Helper: retrieve the list of station IDs in database."""
|
|
60
65
|
tdenv.DEBUG0("Getting list of stations...")
|
|
61
|
-
|
|
66
|
+
rows = session.query(SA.Station.station_id).all()
|
|
67
|
+
return frozenset(r[0] for r in rows)
|
|
62
68
|
|
|
63
69
|
|
|
64
|
-
def _collect_station_modified_times(tdenv: TradeEnv,
|
|
65
|
-
"""
|
|
70
|
+
def _collect_station_modified_times(tdenv: TradeEnv, session: Session) -> dict[int, int]:
|
|
71
|
+
"""Helper: build a list of the last modified time for all stations by id (epoch seconds)."""
|
|
66
72
|
tdenv.DEBUG0("Getting last-update times for stations...")
|
|
67
|
-
|
|
73
|
+
rows = (
|
|
74
|
+
session.query(
|
|
75
|
+
SA.StationItem.station_id,
|
|
76
|
+
func.min(SA.StationItem.modified),
|
|
77
|
+
)
|
|
78
|
+
.group_by(SA.StationItem.station_id)
|
|
79
|
+
.all()
|
|
80
|
+
)
|
|
81
|
+
return {
|
|
82
|
+
station_id: int(modified.timestamp()) if modified else 0
|
|
83
|
+
for station_id, modified in rows
|
|
84
|
+
}
|
|
68
85
|
|
|
69
86
|
|
|
70
87
|
class ImportPlugin(plugins.ImportPluginBase):
|
|
71
88
|
"""
|
|
72
|
-
|
|
89
|
+
Import plugin that uses data files from
|
|
90
|
+
https://elite.tromador.com/ to update the Database.
|
|
73
91
|
"""
|
|
74
92
|
pluginOptions = {
|
|
75
93
|
'item': "Update Items using latest file from server. (Implies '-O system,station')",
|
|
@@ -90,10 +108,10 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
90
108
|
'optimize': "Optimize ('vacuum') database after processing.",
|
|
91
109
|
'solo': "Don't download crowd-sourced market data. (Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)",
|
|
92
110
|
}
|
|
93
|
-
|
|
111
|
+
|
|
94
112
|
def __init__(self, tdb, tdenv):
|
|
95
113
|
super().__init__(tdb, tdenv)
|
|
96
|
-
|
|
114
|
+
|
|
97
115
|
self.dataPath = os.environ.get('TD_EDDB') or self.tdenv.tmpDir
|
|
98
116
|
self.categoriesPath = Path("Category.csv")
|
|
99
117
|
self.commoditiesPath = Path("Item.csv")
|
|
@@ -111,10 +129,10 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
111
129
|
self.listingsPath = Path("listings.csv")
|
|
112
130
|
self.liveListingsPath = Path("listings-live.csv")
|
|
113
131
|
self.pricesPath = Path("listings.prices")
|
|
114
|
-
|
|
132
|
+
|
|
115
133
|
def now(self):
|
|
116
134
|
return datetime.datetime.now()
|
|
117
|
-
|
|
135
|
+
|
|
118
136
|
def downloadFile(self, path):
|
|
119
137
|
"""
|
|
120
138
|
Fetch the latest dumpfile from the website if newer than local copy.
|
|
@@ -123,9 +141,9 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
123
141
|
localPath = Path(self.tdb.dataPath, path)
|
|
124
142
|
else:
|
|
125
143
|
localPath = Path(self.dataPath, path)
|
|
126
|
-
|
|
127
|
-
url
|
|
128
|
-
|
|
144
|
+
|
|
145
|
+
url = BASE_URL + str(path)
|
|
146
|
+
|
|
129
147
|
self.tdenv.NOTE("Checking for update to '{}'.", path)
|
|
130
148
|
# Use an HTTP Request header to obtain the Last-Modified and Content-Length headers.
|
|
131
149
|
# Also, tell the server to give us the un-compressed length of the file by saying
|
|
@@ -136,277 +154,264 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
136
154
|
except Exception as e: # pylint: disable=broad-exception-caught
|
|
137
155
|
self.tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", url, str(e))
|
|
138
156
|
return False
|
|
139
|
-
|
|
157
|
+
|
|
140
158
|
last_modified = response.headers.get("last-modified")
|
|
141
159
|
dump_mod_time = parsedate_to_datetime(last_modified).timestamp()
|
|
142
|
-
|
|
160
|
+
|
|
143
161
|
if Path.exists(localPath):
|
|
144
162
|
local_mod_time = localPath.stat().st_mtime
|
|
145
163
|
if local_mod_time >= dump_mod_time:
|
|
146
164
|
self.tdenv.DEBUG0("'{}': Dump is not more recent than Local.", path)
|
|
147
165
|
return False
|
|
148
|
-
|
|
166
|
+
|
|
149
167
|
# The server doesn't know the gzip'd length, and we won't see the gzip'd data,
|
|
150
168
|
# so we want the actual text-only length. Capture it here so we can tell the
|
|
151
169
|
# transfer mechanism how big the file is going to be.
|
|
152
170
|
length = response.headers.get("content-length")
|
|
153
|
-
|
|
171
|
+
|
|
154
172
|
self.tdenv.NOTE("Downloading file '{}'.", path)
|
|
155
173
|
transfers.download(self.tdenv, url, localPath, chunkSize=16384, length=length)
|
|
156
|
-
|
|
174
|
+
|
|
157
175
|
# Change the timestamps on the file so they match the website
|
|
158
176
|
os.utime(localPath, (dump_mod_time, dump_mod_time))
|
|
159
|
-
|
|
177
|
+
|
|
160
178
|
return True
|
|
161
|
-
|
|
179
|
+
|
|
162
180
|
def purgeSystems(self):
|
|
163
181
|
"""
|
|
164
182
|
Purges systems from the System table that do not have any stations claiming to be in them.
|
|
165
183
|
Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
|
|
166
184
|
"""
|
|
167
|
-
db = self.tdb.getDB()
|
|
168
185
|
self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
186
|
+
|
|
187
|
+
Session = self.tdb.Session
|
|
188
|
+
with Session.begin() as session:
|
|
189
|
+
subq = select(SA.Station.system_id).where(SA.Station.system_id == SA.System.system_id)
|
|
190
|
+
stmt = delete(SA.System).where(~exists(subq))
|
|
191
|
+
session.execute(stmt)
|
|
192
|
+
|
|
176
193
|
self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
|
|
177
|
-
|
|
194
|
+
|
|
178
195
|
def importListings(self, listings_file):
|
|
179
196
|
"""
|
|
180
|
-
Updates the market data (
|
|
181
|
-
|
|
197
|
+
Updates the market data (StationItem) using `listings_file`.
|
|
198
|
+
|
|
199
|
+
Rules:
|
|
200
|
+
- If a row doesn't exist in DB → insert (copy CSV exactly).
|
|
201
|
+
- If it exists → update only when CSV.modified > DB.modified.
|
|
202
|
+
- If CSV.modified <= DB.modified → do nothing (no field changes).
|
|
182
203
|
"""
|
|
204
|
+
from tradedangerous.db.utils import (
|
|
205
|
+
get_import_batch_size,
|
|
206
|
+
begin_bulk_mode,
|
|
207
|
+
end_bulk_mode,
|
|
208
|
+
get_upsert_fn,
|
|
209
|
+
)
|
|
210
|
+
|
|
183
211
|
listings_path = Path(self.dataPath, listings_file).absolute()
|
|
184
|
-
from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
|
|
185
|
-
|
|
212
|
+
from_live = int(listings_path != Path(self.dataPath, self.listingsPath).absolute())
|
|
213
|
+
|
|
186
214
|
self.tdenv.NOTE("Checking listings")
|
|
187
215
|
total = _count_listing_entries(self.tdenv, listings_path)
|
|
188
216
|
if not total:
|
|
189
217
|
self.tdenv.NOTE("No listings")
|
|
190
218
|
return
|
|
191
|
-
|
|
192
|
-
self.tdenv.NOTE(
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
)
|
|
203
|
-
|
|
204
|
-
?, ?, datetime(?, 'unixepoch'), ?,
|
|
205
|
-
?, ?, ?,
|
|
206
|
-
?, ?, ?
|
|
207
|
-
)
|
|
208
|
-
"""
|
|
209
|
-
|
|
210
|
-
# Fetch all the items IDS
|
|
211
|
-
item_lookup = _make_item_id_lookup(self.tdenv, db.cursor())
|
|
212
|
-
station_lookup = _make_station_id_lookup(self.tdenv, db.cursor())
|
|
213
|
-
last_station_update_times = _collect_station_modified_times(self.tdenv, db.cursor())
|
|
214
|
-
|
|
215
|
-
cur_station = None
|
|
219
|
+
|
|
220
|
+
self.tdenv.NOTE(
|
|
221
|
+
"Processing market data from {}: Start time = {}. Live = {}",
|
|
222
|
+
listings_file, self.now(), bool(from_live)
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
Session = self.tdb.Session
|
|
226
|
+
|
|
227
|
+
# Prefetch item/station IDs for early filtering
|
|
228
|
+
with Session.begin() as session:
|
|
229
|
+
item_lookup = _make_item_id_lookup(self.tdenv, session)
|
|
230
|
+
station_lookup = _make_station_id_lookup(self.tdenv, session)
|
|
231
|
+
|
|
216
232
|
is_debug = self.tdenv.debug > 0
|
|
217
233
|
self.tdenv.DEBUG0("Processing entries...")
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
transaction_items += 1
|
|
254
|
-
skip_station = True
|
|
255
|
-
continue
|
|
256
|
-
|
|
257
|
-
# Unless the import file data is newer, nothing else needs to be done for this station,
|
|
258
|
-
# so the rest of the listings for this station can be skipped.
|
|
259
|
-
if listing_time <= last_modified:
|
|
260
|
-
skip_station = True
|
|
234
|
+
|
|
235
|
+
with pbar.Progress(total, 40, prefix="Processing", style=pbar.LongRunningCountBar) as prog, \
|
|
236
|
+
listings_path.open("r", encoding="utf-8", errors="ignore") as fh, \
|
|
237
|
+
Session() as session:
|
|
238
|
+
|
|
239
|
+
token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
|
|
240
|
+
try:
|
|
241
|
+
commit_batch = get_import_batch_size(session, profile="eddblink")
|
|
242
|
+
execute_batch = commit_batch or 10000 # cap statement size even if single final commit
|
|
243
|
+
|
|
244
|
+
# Upsert: keys + guarded fields (including from_live), guarded by 'modified'
|
|
245
|
+
table = SA.StationItem.__table__
|
|
246
|
+
key_cols = ("station_id", "item_id")
|
|
247
|
+
update_cols = (
|
|
248
|
+
"demand_price", "demand_units", "demand_level",
|
|
249
|
+
"supply_price", "supply_units", "supply_level",
|
|
250
|
+
"from_live",
|
|
251
|
+
)
|
|
252
|
+
upsert = get_upsert_fn(
|
|
253
|
+
session,
|
|
254
|
+
table,
|
|
255
|
+
key_cols=key_cols,
|
|
256
|
+
update_cols=update_cols,
|
|
257
|
+
modified_col="modified",
|
|
258
|
+
always_update=(), # IMPORTANT: no unconditional updates
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
batch_rows = []
|
|
262
|
+
since_commit = 0
|
|
263
|
+
|
|
264
|
+
for listing in csv.DictReader(fh):
|
|
265
|
+
prog.increment(1)
|
|
266
|
+
try:
|
|
267
|
+
station_id = int(listing["station_id"])
|
|
268
|
+
if station_id not in station_lookup:
|
|
261
269
|
continue
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
if
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
270
|
+
|
|
271
|
+
item_id = int(listing["commodity_id"])
|
|
272
|
+
if item_id not in item_lookup:
|
|
273
|
+
continue # skip rare items (not in Item table)
|
|
274
|
+
|
|
275
|
+
listing_time = int(listing["collected_at"])
|
|
276
|
+
dt_listing_time = datetime.datetime.utcfromtimestamp(listing_time)
|
|
277
|
+
|
|
278
|
+
row = {
|
|
279
|
+
"station_id": station_id,
|
|
280
|
+
"item_id": item_id,
|
|
281
|
+
"modified": dt_listing_time, # guard column
|
|
282
|
+
"from_live": from_live, # copied exactly when updating/inserting
|
|
283
|
+
"demand_price": int(listing["sell_price"]),
|
|
284
|
+
"demand_units": int(listing["demand"]),
|
|
285
|
+
"demand_level": int(listing.get("demand_bracket") or "-1"),
|
|
286
|
+
"supply_price": int(listing["buy_price"]),
|
|
287
|
+
"supply_units": int(listing["supply"]),
|
|
288
|
+
"supply_level": int(listing.get("supply_bracket") or "-1"),
|
|
289
|
+
}
|
|
290
|
+
batch_rows.append(row)
|
|
291
|
+
since_commit += 1
|
|
292
|
+
|
|
293
|
+
if len(batch_rows) >= execute_batch:
|
|
294
|
+
upsert(batch_rows)
|
|
295
|
+
batch_rows.clear()
|
|
296
|
+
|
|
297
|
+
if commit_batch and since_commit >= commit_batch:
|
|
298
|
+
session.commit()
|
|
299
|
+
since_commit = 0
|
|
300
|
+
|
|
301
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
302
|
+
self.tdenv.WARN("Bad listing row (skipped): {} error: {}", listing, e)
|
|
303
|
+
continue
|
|
304
|
+
|
|
305
|
+
if batch_rows:
|
|
306
|
+
upsert(batch_rows)
|
|
307
|
+
batch_rows.clear()
|
|
308
|
+
|
|
309
|
+
session.commit()
|
|
310
|
+
|
|
311
|
+
finally:
|
|
312
|
+
end_bulk_mode(session, token)
|
|
313
|
+
|
|
297
314
|
with pbar.Progress(1, 40, prefix="Saving"):
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
315
|
+
pass
|
|
316
|
+
|
|
301
317
|
if self.getOption("optimize"):
|
|
302
318
|
with pbar.Progress(1, 40, prefix="Optimizing"):
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
319
|
+
if self.tdb.engine.dialect.name == "sqlite":
|
|
320
|
+
with Session.begin() as session:
|
|
321
|
+
session.execute(text("VACUUM"))
|
|
322
|
+
|
|
307
323
|
self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
|
|
308
|
-
|
|
324
|
+
|
|
325
|
+
|
|
309
326
|
def run(self):
|
|
327
|
+
"""
|
|
328
|
+
EDDN/EDDB link importer.
|
|
329
|
+
|
|
330
|
+
Refactored DB flow:
|
|
331
|
+
- No dialect-specific logic in the plugin.
|
|
332
|
+
- Preflight uses TradeDB.reloadCache() (which centralizes sanity via lifecycle.ensure_fresh_db).
|
|
333
|
+
- For '--clean' → do a single full rebuild with the RareItem dance.
|
|
334
|
+
- Otherwise, if static CSVs changed → incrementally import only those tables (no drop/recreate).
|
|
335
|
+
- Listings import and .prices regeneration unchanged.
|
|
336
|
+
"""
|
|
337
|
+
import os
|
|
338
|
+
import time
|
|
339
|
+
from pathlib import Path
|
|
340
|
+
from tradedangerous import cache
|
|
341
|
+
# bulk-mode helpers for the incremental static import session
|
|
342
|
+
from tradedangerous.db.utils import begin_bulk_mode, end_bulk_mode
|
|
343
|
+
|
|
310
344
|
self.tdenv.ignoreUnknown = True
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
Path(str(self.dataPath)).mkdir()
|
|
315
|
-
except FileExistsError:
|
|
316
|
-
pass
|
|
317
|
-
|
|
318
|
-
# Run 'listings' by default:
|
|
319
|
-
# If no options, or if only 'force', and/or 'skipvend',
|
|
320
|
-
# have been passed, enable 'listings'.
|
|
345
|
+
self.tdb.dataPath.mkdir(parents=True, exist_ok=True)
|
|
346
|
+
|
|
347
|
+
# Enable 'listings' by default unless other explicit options are present
|
|
321
348
|
default = True
|
|
322
349
|
for option in self.options:
|
|
323
350
|
if option not in ('force', 'skipvend', 'purge'):
|
|
324
351
|
default = False
|
|
325
352
|
if default:
|
|
326
353
|
self.options["listings"] = True
|
|
327
|
-
|
|
328
|
-
#
|
|
329
|
-
#
|
|
330
|
-
|
|
331
|
-
self.options["clean"] = True
|
|
332
|
-
|
|
354
|
+
|
|
355
|
+
# -----------------------------
|
|
356
|
+
# Optional CLEAN: prepare inputs
|
|
357
|
+
# -----------------------------
|
|
333
358
|
if self.getOption("clean"):
|
|
334
|
-
#
|
|
335
|
-
# Can be done at anytime with the "clean" option.
|
|
359
|
+
# Remove CSVs so downloads become the new source of truth
|
|
336
360
|
for name in [
|
|
337
|
-
"Category",
|
|
338
|
-
"
|
|
339
|
-
"
|
|
340
|
-
"
|
|
341
|
-
"
|
|
342
|
-
"Station",
|
|
343
|
-
"System",
|
|
344
|
-
"Upgrade",
|
|
345
|
-
"UpgradeVendor",
|
|
346
|
-
"FDevShipyard",
|
|
347
|
-
"FDevOutfitting",
|
|
361
|
+
"Category", "Item", "RareItem",
|
|
362
|
+
"Ship", "ShipVendor",
|
|
363
|
+
"Station", "System",
|
|
364
|
+
"Upgrade", "UpgradeVendor",
|
|
365
|
+
"FDevShipyard", "FDevOutfitting",
|
|
348
366
|
]:
|
|
349
|
-
|
|
367
|
+
f = self.tdb.dataPath / f"{name}.csv"
|
|
350
368
|
try:
|
|
351
|
-
os.remove(str(
|
|
369
|
+
os.remove(str(f))
|
|
352
370
|
except FileNotFoundError:
|
|
353
371
|
pass
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
os.remove(str(self.tdb.dataPath) + "/TradeDangerous.db")
|
|
357
|
-
except FileNotFoundError:
|
|
358
|
-
pass
|
|
372
|
+
|
|
373
|
+
# Remove .prices (will be regenerated later)
|
|
359
374
|
try:
|
|
360
|
-
os.remove(str(self.tdb.dataPath
|
|
375
|
+
os.remove(str(self.tdb.dataPath / "TradeDangerous.prices"))
|
|
361
376
|
except FileNotFoundError:
|
|
362
377
|
pass
|
|
363
|
-
|
|
364
|
-
#
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
self.tdb.close()
|
|
375
|
-
|
|
376
|
-
self.tdb.reloadCache()
|
|
377
|
-
self.tdb.close()
|
|
378
|
-
|
|
379
|
-
# Now it's safe to move RareItems back.
|
|
380
|
-
if ri_path.exists():
|
|
381
|
-
ri_path.unlink()
|
|
382
|
-
if rib_path.exists():
|
|
383
|
-
rib_path.rename(ri_path)
|
|
384
|
-
|
|
378
|
+
|
|
379
|
+
# Stash RareItem.csv so a full rebuild doesn't hit FK issues
|
|
380
|
+
self._ri_path = self.tdb.dataPath / "RareItem.csv"
|
|
381
|
+
self._rib_path = self._ri_path.with_suffix(".tmp")
|
|
382
|
+
if self._ri_path.exists():
|
|
383
|
+
if self._rib_path.exists():
|
|
384
|
+
self._rib_path.unlink()
|
|
385
|
+
self._ri_path.rename(self._rib_path)
|
|
386
|
+
|
|
387
|
+
# Full update after downloads
|
|
385
388
|
self.options["all"] = True
|
|
386
389
|
self.options["force"] = True
|
|
387
|
-
|
|
388
|
-
#
|
|
390
|
+
|
|
391
|
+
# --------------------------------
|
|
392
|
+
# Option cascade (unchanged logic)
|
|
393
|
+
# --------------------------------
|
|
389
394
|
if self.getOption("listings"):
|
|
390
395
|
self.options["item"] = True
|
|
391
396
|
self.options["station"] = True
|
|
392
|
-
|
|
397
|
+
|
|
393
398
|
if self.getOption("shipvend"):
|
|
394
399
|
self.options["ship"] = True
|
|
395
400
|
self.options["station"] = True
|
|
396
|
-
|
|
401
|
+
|
|
397
402
|
if self.getOption("upvend"):
|
|
398
403
|
self.options["upgrade"] = True
|
|
399
404
|
self.options["station"] = True
|
|
400
|
-
|
|
405
|
+
|
|
401
406
|
if self.getOption("item"):
|
|
402
407
|
self.options["station"] = True
|
|
403
|
-
|
|
408
|
+
|
|
404
409
|
if self.getOption("rare"):
|
|
405
410
|
self.options["station"] = True
|
|
406
|
-
|
|
411
|
+
|
|
407
412
|
if self.getOption("station"):
|
|
408
413
|
self.options["system"] = True
|
|
409
|
-
|
|
414
|
+
|
|
410
415
|
if self.getOption("all"):
|
|
411
416
|
self.options["item"] = True
|
|
412
417
|
self.options["rare"] = True
|
|
@@ -417,73 +422,205 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
417
422
|
self.options["upgrade"] = True
|
|
418
423
|
self.options["upvend"] = True
|
|
419
424
|
self.options["listings"] = True
|
|
420
|
-
|
|
425
|
+
|
|
421
426
|
if self.getOption("solo"):
|
|
422
427
|
self.options["listings"] = False
|
|
423
428
|
self.options["skipvend"] = True
|
|
424
|
-
|
|
429
|
+
|
|
425
430
|
if self.getOption("skipvend"):
|
|
426
431
|
self.options["shipvend"] = False
|
|
427
432
|
self.options["upvend"] = False
|
|
428
|
-
|
|
429
|
-
#
|
|
430
|
-
|
|
433
|
+
|
|
434
|
+
# ---------------------------------------------
|
|
435
|
+
# Downloads — track which static CSVs changed
|
|
436
|
+
# ---------------------------------------------
|
|
437
|
+
changed = {
|
|
438
|
+
"System": False,
|
|
439
|
+
"Station": False,
|
|
440
|
+
"Category": False,
|
|
441
|
+
"Item": False,
|
|
442
|
+
"RareItem": False,
|
|
443
|
+
"Ship": False,
|
|
444
|
+
"ShipVendor": False,
|
|
445
|
+
"Upgrade": False,
|
|
446
|
+
"UpgradeVendor": False,
|
|
447
|
+
"FDevShipyard": False,
|
|
448
|
+
"FDevOutfitting": False,
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
# EDCD mirrors
|
|
431
452
|
if self.getOption("upgrade"):
|
|
432
453
|
if self.downloadFile(self.upgradesPath) or self.getOption("force"):
|
|
433
454
|
transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
|
|
434
|
-
|
|
435
|
-
|
|
455
|
+
changed["Upgrade"] = True
|
|
456
|
+
changed["FDevOutfitting"] = True
|
|
457
|
+
|
|
436
458
|
if self.getOption("ship"):
|
|
437
459
|
if self.downloadFile(self.shipPath) or self.getOption("force"):
|
|
438
460
|
transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
|
|
439
|
-
|
|
440
|
-
|
|
461
|
+
changed["Ship"] = True
|
|
462
|
+
changed["FDevShipyard"] = True
|
|
463
|
+
|
|
464
|
+
# Core static tables
|
|
441
465
|
if self.getOption("rare"):
|
|
442
466
|
if self.downloadFile(self.rareItemPath) or self.getOption("force"):
|
|
443
|
-
|
|
444
|
-
|
|
467
|
+
changed["RareItem"] = True
|
|
468
|
+
|
|
445
469
|
if self.getOption("shipvend"):
|
|
446
470
|
if self.downloadFile(self.shipVendorPath) or self.getOption("force"):
|
|
447
|
-
|
|
448
|
-
|
|
471
|
+
changed["ShipVendor"] = True
|
|
472
|
+
|
|
449
473
|
if self.getOption("upvend"):
|
|
450
474
|
if self.downloadFile(self.upgradeVendorPath) or self.getOption("force"):
|
|
451
|
-
|
|
452
|
-
|
|
475
|
+
changed["UpgradeVendor"] = True
|
|
476
|
+
|
|
453
477
|
if self.getOption("system"):
|
|
454
478
|
if self.downloadFile(self.sysPath) or self.getOption("force"):
|
|
455
|
-
|
|
456
|
-
|
|
479
|
+
changed["System"] = True
|
|
480
|
+
|
|
457
481
|
if self.getOption("station"):
|
|
458
482
|
if self.downloadFile(self.stationsPath) or self.getOption("force"):
|
|
459
|
-
|
|
460
|
-
|
|
483
|
+
changed["Station"] = True
|
|
484
|
+
|
|
461
485
|
if self.getOption("item"):
|
|
462
486
|
if self.downloadFile(self.commoditiesPath) or self.getOption("force"):
|
|
463
487
|
self.downloadFile(self.categoriesPath)
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
488
|
+
changed["Item"] = True
|
|
489
|
+
changed["Category"] = True
|
|
490
|
+
|
|
491
|
+
# -------------------------------------------------------------
|
|
492
|
+
# Preflight sanity (user-visible): make the pause explicit
|
|
493
|
+
# -------------------------------------------------------------
|
|
494
|
+
ri_path = getattr(self, "_ri_path", self.tdb.dataPath / "RareItem.csv")
|
|
495
|
+
rib_path = getattr(self, "_rib_path", ri_path.with_suffix(".tmp"))
|
|
496
|
+
rareitem_stashed = False
|
|
497
|
+
self.tdenv.NOTE("Preflight: verifying database (this can take a while on first run)...")
|
|
498
|
+
t0 = time.monotonic()
|
|
499
|
+
try:
|
|
500
|
+
if ri_path.exists():
|
|
501
|
+
if not rib_path.exists() and not self.getOption("clean"):
|
|
502
|
+
ri_path.rename(rib_path)
|
|
503
|
+
rareitem_stashed = True
|
|
504
|
+
|
|
505
|
+
# This may no-op or may call buildCache() internally
|
|
469
506
|
self.tdb.reloadCache()
|
|
507
|
+
finally:
|
|
508
|
+
if rib_path.exists() and (self.getOption("clean") or rareitem_stashed):
|
|
509
|
+
if ri_path.exists():
|
|
510
|
+
ri_path.unlink()
|
|
511
|
+
rib_path.rename(ri_path)
|
|
512
|
+
t1 = time.monotonic()
|
|
513
|
+
self.tdenv.NOTE("Preflight complete in {:.1f}s.", (t1 - t0))
|
|
514
|
+
|
|
515
|
+
# -----------------------------------------------------
|
|
516
|
+
# Rebuild or Incremental Import?
|
|
517
|
+
# -----------------------------------------------------
|
|
518
|
+
if self.getOption("clean"):
|
|
519
|
+
self.tdenv.NOTE("Performing full rebuild...")
|
|
520
|
+
self.tdb.close()
|
|
521
|
+
cache.buildCache(self.tdb, self.tdenv)
|
|
470
522
|
self.tdb.close()
|
|
471
|
-
|
|
523
|
+
self.tdenv.NOTE("Full rebuild complete.")
|
|
524
|
+
else:
|
|
525
|
+
# Incremental import of only changed tables (no schema drop)
|
|
526
|
+
IMPORT_ORDER = [
|
|
527
|
+
"System",
|
|
528
|
+
"Station",
|
|
529
|
+
"Category",
|
|
530
|
+
"Item",
|
|
531
|
+
"RareItem",
|
|
532
|
+
"Ship",
|
|
533
|
+
"ShipVendor",
|
|
534
|
+
"Upgrade",
|
|
535
|
+
"UpgradeVendor",
|
|
536
|
+
"FDevShipyard",
|
|
537
|
+
"FDevOutfitting",
|
|
538
|
+
]
|
|
539
|
+
|
|
540
|
+
any_changed = any(changed.values())
|
|
541
|
+
if any_changed:
|
|
542
|
+
self.tdenv.NOTE("Incremental import starting ({} tables changed).", sum(1 for v in changed.values() if v))
|
|
543
|
+
with self.tdb.Session() as session:
|
|
544
|
+
token = begin_bulk_mode(session, profile="eddblink", phase="incremental")
|
|
545
|
+
try:
|
|
546
|
+
for table_name in IMPORT_ORDER:
|
|
547
|
+
if not changed.get(table_name):
|
|
548
|
+
continue
|
|
549
|
+
import_path = (self.tdb.dataPath / f"{table_name}.csv").resolve()
|
|
550
|
+
try:
|
|
551
|
+
# Determine a cheap per-table total (header-aware) for display only.
|
|
552
|
+
try:
|
|
553
|
+
total = max(file_line_count(import_path) - 1, 0)
|
|
554
|
+
except Exception:
|
|
555
|
+
total = 0
|
|
556
|
+
|
|
557
|
+
prefix = f"Processing {table_name}"
|
|
558
|
+
# Mirror listings-style progress: single-line if TTY, periodic otherwise.
|
|
559
|
+
with pbar.Progress(total or 1, 40, prefix=prefix, style=pbar.LongRunningCountBar) as prog:
|
|
560
|
+
|
|
561
|
+
def _cb(stats=None, **kwargs):
|
|
562
|
+
"""
|
|
563
|
+
Liberal progress callback used by cache.processImportFile.
|
|
564
|
+
Accepts either:
|
|
565
|
+
- int → increment by that many rows
|
|
566
|
+
- dict with keys inc/rows/count → increment by that value
|
|
567
|
+
- anything else → default increment of 1
|
|
568
|
+
"""
|
|
569
|
+
inc = 1
|
|
570
|
+
if isinstance(stats, int):
|
|
571
|
+
inc = max(int(stats), 1)
|
|
572
|
+
elif isinstance(stats, dict):
|
|
573
|
+
for k in ("inc", "rows", "count"):
|
|
574
|
+
if k in stats:
|
|
575
|
+
try:
|
|
576
|
+
inc = max(int(stats[k]), 1)
|
|
577
|
+
break
|
|
578
|
+
except Exception:
|
|
579
|
+
pass
|
|
580
|
+
prog.increment(inc)
|
|
581
|
+
|
|
582
|
+
cache.processImportFile(
|
|
583
|
+
self.tdenv,
|
|
584
|
+
session,
|
|
585
|
+
import_path,
|
|
586
|
+
table_name,
|
|
587
|
+
line_callback=_cb,
|
|
588
|
+
call_args={"table": table_name, "total": total},
|
|
589
|
+
)
|
|
590
|
+
|
|
591
|
+
session.commit()
|
|
592
|
+
self.tdenv.DEBUG0("Incremental import OK: {} ({})", table_name, import_path)
|
|
593
|
+
|
|
594
|
+
except FileNotFoundError:
|
|
595
|
+
self.tdenv.NOTE("{} missing; skipped incremental import ({})", table_name, import_path)
|
|
596
|
+
except StopIteration:
|
|
597
|
+
self.tdenv.NOTE("{} exists but is empty; skipped incremental import ({})", table_name, import_path)
|
|
598
|
+
except Exception as e:
|
|
599
|
+
self.tdenv.WARN("Incremental import failed for {}: {} ({})", table_name, e, import_path)
|
|
600
|
+
session.rollback()
|
|
601
|
+
self.tdenv.NOTE("Escalating to full rebuild due to import failure.")
|
|
602
|
+
self.tdb.close()
|
|
603
|
+
cache.buildCache(self.tdb, self.tdenv)
|
|
604
|
+
self.tdb.close()
|
|
605
|
+
break
|
|
606
|
+
finally:
|
|
607
|
+
end_bulk_mode(session, token)
|
|
608
|
+
self.tdenv.NOTE("Incremental import finished.")
|
|
609
|
+
|
|
610
|
+
|
|
472
611
|
if self.getOption("purge"):
|
|
473
612
|
self.purgeSystems()
|
|
474
|
-
|
|
475
|
-
|
|
613
|
+
|
|
614
|
+
# Listings import (prices)
|
|
476
615
|
if self.getOption("listings"):
|
|
477
616
|
if self.downloadFile(self.listingsPath) or self.getOption("force"):
|
|
478
617
|
self.importListings(self.listingsPath)
|
|
479
618
|
if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
|
|
480
619
|
self.importListings(self.liveListingsPath)
|
|
481
|
-
|
|
620
|
+
|
|
482
621
|
if self.getOption("listings"):
|
|
483
622
|
self.tdenv.NOTE("Regenerating .prices file.")
|
|
484
623
|
cache.regeneratePricesFile(self.tdb, self.tdenv)
|
|
485
|
-
|
|
624
|
+
|
|
486
625
|
self.tdenv.NOTE("Import completed.")
|
|
487
|
-
|
|
488
|
-
# TD doesn't need to do anything, tell it to just quit.
|
|
489
626
|
return False
|