tradedangerous 11.2.0__py3-none-any.whl → 11.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

@@ -4,19 +4,18 @@ https://elite.tromador.com/ to update the Database.
4
4
  """
5
5
  from __future__ import annotations
6
6
 
7
+ from email.utils import parsedate_to_datetime
7
8
  from pathlib import Path
9
+ from .. fs import file_line_count
8
10
  from .. import plugins, cache, transfers
9
11
  from ..misc import progress as pbar
10
12
  from ..plugins import PluginException
11
13
 
12
- import certifi
13
14
  import csv
14
15
  import datetime
15
- from email.utils import parsedate_to_datetime
16
16
  import os
17
17
  import requests
18
18
  import sqlite3
19
- import ssl
20
19
  import typing
21
20
 
22
21
 
@@ -26,41 +25,12 @@ if typing.TYPE_CHECKING:
26
25
 
27
26
  # Constants
28
27
  BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
29
- CONTEXT=ssl.create_default_context(cafile=certifi.where())
30
28
 
31
29
 
32
30
  class DecodingError(PluginException):
33
31
  pass
34
32
 
35
33
 
36
- def _file_line_count(from_file: Path, bufsize: int = 128 * 1024) -> int:
37
- """ counts the number of newline characters in a given file. """
38
- # Pre-allocate a buffer so we aren't putting pressure on the garbage collector.
39
- buf = bytearray(bufsize)
40
-
41
- # Capture it's counting method, so we don't have to keep looking that up on
42
- # large files.
43
- counter = buf.count
44
-
45
- total = 0
46
- with from_file.open("rb") as fh:
47
- # Capture the 'readinto' method to avoid lookups.
48
- reader = fh.readinto
49
-
50
- # read into the buffer and capture the number of bytes fetched,
51
- # which will be 'size' until the last read from the file.
52
- read = reader(buf)
53
- while read == bufsize: # nominal case for large files
54
- total += counter(b'\n')
55
- read = reader(buf)
56
-
57
- # when 0 <= read < bufsize we're on the last page of the
58
- # file, so we need to take a slice of the buffer, which creates
59
- # a new object and thus we also have to lookup count. it's trivial
60
- # but if you have to do it 10,000x it's definitely not a rounding error.
61
- return total + buf[:read].count(b'\n')
62
-
63
-
64
34
  def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
65
35
  """ Calculates the number of entries in a listing file by counting the lines. """
66
36
  if not listings.exists():
@@ -68,7 +38,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
68
38
  return 0
69
39
 
70
40
  tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
71
- count = _file_line_count(listings)
41
+ count = file_line_count(listings)
72
42
  if count <= 1:
73
43
  if count == 1:
74
44
  tdenv.DEBUG0("Listing count of 1 suggests nothing but a header")
@@ -101,7 +71,6 @@ class ImportPlugin(plugins.ImportPluginBase):
101
71
  """
102
72
  Plugin that downloads data from eddb.
103
73
  """
104
-
105
74
  pluginOptions = {
106
75
  'item': "Update Items using latest file from server. (Implies '-O system,station')",
107
76
  'rare': "Update RareItems using latest file from server. (Implies '-O system,station')",
@@ -118,6 +87,7 @@ class ImportPlugin(plugins.ImportPluginBase):
118
87
  'force': "Force regeneration of selected items even if source file not updated since previous run. "
119
88
  "(Useful for updating Vendor tables if they were skipped during a '-O clean' run.)",
120
89
  'purge': "Remove any empty systems that previously had fleet carriers.",
90
+ 'optimize': "Optimize ('vacuum') database after processing.",
121
91
  'solo': "Don't download crowd-sourced market data. (Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)",
122
92
  }
123
93
 
@@ -197,22 +167,10 @@ class ImportPlugin(plugins.ImportPluginBase):
197
167
  db = self.tdb.getDB()
198
168
  self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
199
169
 
200
- db.execute("PRAGMA foreign_keys = OFF")
201
-
202
- self.tdenv.DEBUG0("Saving systems with stations.... " + str(self.now()) + "\t\t\t\t", end="\r")
203
- db.execute("DROP TABLE IF EXISTS System_copy")
204
- db.execute("""CREATE TABLE System_copy AS SELECT * FROM System
205
- WHERE system_id IN (SELECT system_id FROM Station)
206
- """)
207
-
208
- self.tdenv.DEBUG0("Erasing table and reinserting kept systems.... " + str(self.now()) + "\t\t\t\t", end="\r")
209
- db.execute("DELETE FROM System")
210
- db.execute("INSERT INTO System SELECT * FROM System_copy")
211
-
212
- self.tdenv.DEBUG0("Removing copy.... " + str(self.now()) + "\t\t\t\t", end="\r")
213
- db.execute("PRAGMA foreign_keys = ON")
214
- db.execute("DROP TABLE IF EXISTS System_copy")
215
-
170
+ db.execute("""
171
+ DELETE FROM System
172
+ WHERE NOT EXISTS(SELECT 1 FROM Station WHERE Station.system_id = System.system_id)
173
+ """)
216
174
  db.commit()
217
175
 
218
176
  self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
@@ -224,12 +182,16 @@ class ImportPlugin(plugins.ImportPluginBase):
224
182
  """
225
183
  listings_path = Path(self.dataPath, listings_file).absolute()
226
184
  from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
227
- self.tdenv.NOTE("Processing market data from {}: Start time = {}. Live = {}", listings_file, self.now(), from_live)
228
185
 
186
+ self.tdenv.NOTE("Checking listings")
229
187
  total = _count_listing_entries(self.tdenv, listings_path)
230
188
  if not total:
189
+ self.tdenv.NOTE("No listings")
231
190
  return
232
191
 
192
+ self.tdenv.NOTE("Processing market data from {}: Start time = {}. Live = {}", listings_file, self.now(), from_live)
193
+
194
+ db = self.tdb.getDB()
233
195
  stmt_unliven_station = """UPDATE StationItem SET from_live = 0 WHERE station_id = ?"""
234
196
  stmt_flush_station = """DELETE from StationItem WHERE station_id = ?"""
235
197
  stmt_add_listing = """
@@ -246,20 +208,25 @@ class ImportPlugin(plugins.ImportPluginBase):
246
208
  """
247
209
 
248
210
  # Fetch all the items IDS
249
- db = self.tdb.getDB()
250
211
  item_lookup = _make_item_id_lookup(self.tdenv, db.cursor())
251
212
  station_lookup = _make_station_id_lookup(self.tdenv, db.cursor())
252
213
  last_station_update_times = _collect_station_modified_times(self.tdenv, db.cursor())
253
214
 
254
215
  cur_station = None
216
+ is_debug = self.tdenv.debug > 0
255
217
  self.tdenv.DEBUG0("Processing entries...")
256
- with listings_path.open("r", encoding="utf-8", errors="ignore") as fh:
257
- prog = pbar.Progress(total, 50)
258
-
259
- cursor: Optional[sqlite3.Cursor] = db.cursor()
218
+
219
+ # Try to find a balance between doing too many commits where we fail
220
+ # to get any benefits from constructing transactions, and blowing up
221
+ # the WAL and memory usage by making massive transactions.
222
+ max_transaction_items, transaction_items = 32 * 1024, 0
223
+ with pbar.Progress(total, 40, prefix="Processing", style=pbar.LongRunningCountBar) as prog,\
224
+ listings_path.open("r", encoding="utf-8", errors="ignore") as fh:
225
+ cursor = db.cursor()
226
+ cursor.execute("BEGIN TRANSACTION")
260
227
 
261
228
  for listing in csv.DictReader(fh):
262
- prog.increment(1, postfix = lambda value, total: f" {(value / total * 100):.0f}% {value} / {total}")
229
+ prog.increment(1)
263
230
 
264
231
  station_id = int(listing['station_id'])
265
232
  if station_id not in station_lookup:
@@ -269,16 +236,21 @@ class ImportPlugin(plugins.ImportPluginBase):
269
236
 
270
237
  if station_id != cur_station:
271
238
  # commit anything from the previous station, get a new cursor
272
- db.commit()
273
- cur_station, skip_station, cursor = station_id, False, db.cursor()
239
+ if transaction_items >= max_transaction_items:
240
+ cursor.execute("COMMIT")
241
+ transaction_items = 0
242
+ cursor.execute("BEGIN TRANSACTION")
243
+ cur_station, skip_station = station_id, False
274
244
 
275
245
  # Check if listing already exists in DB and needs updated.
276
246
  last_modified: int = int(last_station_update_times.get(station_id, 0))
277
247
  if last_modified:
278
248
  # When the listings.csv data matches the database, update to make from_live == 0.
279
249
  if listing_time == last_modified and not from_live:
280
- self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live' (old={last_modified}, listing={listing_time}).")
250
+ if is_debug:
251
+ self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live' (old={last_modified}, listing={listing_time}).")
281
252
  cursor.execute(stmt_unliven_station, (cur_station,))
253
+ transaction_items += 1
282
254
  skip_station = True
283
255
  continue
284
256
 
@@ -289,8 +261,10 @@ class ImportPlugin(plugins.ImportPluginBase):
289
261
  continue
290
262
 
291
263
  # The data from the import file is newer, so we need to delete the old data for this station.
292
- self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station} (old={last_modified}, listing={listing_time}).")
264
+ if is_debug:
265
+ self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station} (old={last_modified}, listing={listing_time}).")
293
266
  cursor.execute(stmt_flush_station, (cur_station,))
267
+ transaction_items += 1
294
268
  last_station_update_times[station_id] = listing_time
295
269
 
296
270
  # station skip lasts until we change station id.
@@ -310,20 +284,24 @@ class ImportPlugin(plugins.ImportPluginBase):
310
284
  supply_units = int(listing['supply'])
311
285
  supply_level = int(listing.get('supply_bracket') or '-1')
312
286
 
313
- self.tdenv.DEBUG1(f"Inserting new listing data for {station_id}.")
287
+ if is_debug:
288
+ self.tdenv.DEBUG1(f"Inserting new listing data for {station_id}.")
314
289
  cursor.execute(stmt_add_listing, (
315
290
  station_id, item_id, listing_time, from_live,
316
291
  demand_price, demand_units, demand_level,
317
292
  supply_price, supply_units, supply_level,
318
293
  ))
319
-
320
- prog.clear()
321
-
322
- # Do a final commit to be sure
323
- db.commit()
324
-
325
- self.tdenv.NOTE("Optimizing database...")
326
- db.execute("VACUUM")
294
+ transaction_items += 1
295
+
296
+ # These will take a little while, which has four steps, so we'll make it a counter.
297
+ with pbar.Progress(1, 40, prefix="Saving"):
298
+ # Do a final commit to be sure
299
+ cursor.execute("COMMIT")
300
+
301
+ if self.getOption("optimize"):
302
+ with pbar.Progress(1, 40, prefix="Optimizing"):
303
+ db.execute("VACUUM")
304
+
327
305
  self.tdb.close()
328
306
 
329
307
  self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
@@ -349,7 +327,7 @@ class ImportPlugin(plugins.ImportPluginBase):
349
327
 
350
328
  # We can probably safely assume that the plugin
351
329
  # has never been run if the db file doesn't exist.
352
- if not (self.tdb.dataPath / Path("TradeDangerous.db")).exists():
330
+ if not self.tdb.dbPath.exists():
353
331
  self.options["clean"] = True
354
332
 
355
333
  if self.getOption("clean"):
@@ -392,8 +370,11 @@ class ImportPlugin(plugins.ImportPluginBase):
392
370
  if rib_path.exists():
393
371
  rib_path.unlink()
394
372
  ri_path.rename(rib_path)
395
-
373
+
374
+ self.tdb.close()
375
+
396
376
  self.tdb.reloadCache()
377
+ self.tdb.close()
397
378
 
398
379
  # Now it's safe to move RareItems back.
399
380
  if ri_path.exists():
@@ -451,7 +432,7 @@ class ImportPlugin(plugins.ImportPluginBase):
451
432
  if self.downloadFile(self.upgradesPath) or self.getOption("force"):
452
433
  transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
453
434
  buildCache = True
454
-
435
+
455
436
  if self.getOption("ship"):
456
437
  if self.downloadFile(self.shipPath) or self.getOption("force"):
457
438
  transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
@@ -486,16 +467,18 @@ class ImportPlugin(plugins.ImportPluginBase):
486
467
  if buildCache:
487
468
  self.tdb.close()
488
469
  self.tdb.reloadCache()
489
-
470
+ self.tdb.close()
471
+
490
472
  if self.getOption("purge"):
491
473
  self.purgeSystems()
474
+ self.tdb.close()
492
475
 
493
476
  if self.getOption("listings"):
494
477
  if self.downloadFile(self.listingsPath) or self.getOption("force"):
495
478
  self.importListings(self.listingsPath)
496
479
  if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
497
480
  self.importListings(self.liveListingsPath)
498
-
481
+
499
482
  if self.getOption("listings"):
500
483
  self.tdenv.NOTE("Regenerating .prices file.")
501
484
  cache.regeneratePricesFile(self.tdb, self.tdenv)
@@ -92,10 +92,9 @@ CREATE TABLE Station
92
92
  UNIQUE (station_id),
93
93
 
94
94
  FOREIGN KEY (system_id) REFERENCES System(system_id)
95
- ON UPDATE CASCADE
96
95
  ON DELETE CASCADE
97
- );
98
- CREATE INDEX idx_station_by_system ON Station (system_id, station_id);
96
+ ) WITHOUT ROWID;
97
+ CREATE INDEX idx_station_by_system ON Station (system_id);
99
98
  CREATE INDEX idx_station_by_name ON Station (name);
100
99
 
101
100
 
@@ -226,11 +225,45 @@ CREATE TABLE StationItem
226
225
  ON UPDATE CASCADE ON DELETE CASCADE,
227
226
  FOREIGN KEY (item_id) REFERENCES Item(item_id)
228
227
  ON UPDATE CASCADE ON DELETE CASCADE
229
- );
228
+ ) WITHOUT ROWID;
230
229
  CREATE INDEX si_mod_stn_itm ON StationItem(modified, station_id, item_id);
231
230
  CREATE INDEX si_itm_dmdpr ON StationItem(item_id, demand_price) WHERE demand_price > 0;
232
231
  CREATE INDEX si_itm_suppr ON StationItem(item_id, supply_price) WHERE supply_price > 0;
233
232
 
233
+ -- Not used yet
234
+ CREATE TABLE IF NOT EXISTS StationDemand
235
+ (
236
+ station_id INTEGER NOT NULL,
237
+ item_id INTEGER NOT NULL,
238
+ price INTEGER NOT NULL,
239
+ units INTEGER NOT NULL,
240
+ level INTEGER NOT NULL,
241
+ modified INTEGER NOT NULL,
242
+ from_live INTEGER NOT NULL DEFAULT 0,
243
+ CONSTRAINT pk_StationDemand PRIMARY KEY (station_id, item_id),
244
+ CONSTRAINT fk_StationDemand_station_id_Station FOREIGN KEY (station_id) REFERENCES Station (station_id) ON DELETE CASCADE,
245
+ CONSTRAINT fk_StationDemand_item_id_Item FOREIGN KEY (item_id) REFERENCES Item (item_id) ON DELETE CASCADE
246
+ ) WITHOUT ROWID;
247
+ DELETE FROM StationDemand;
248
+ CREATE INDEX idx_StationDemand_item ON StationDemand (item_id);
249
+
250
+ -- Not used yet
251
+ CREATE TABLE IF NOT EXISTS StationSupply
252
+ (
253
+ station_id INTEGER NOT NULL,
254
+ item_id INTEGER NOT NULL,
255
+ price INTEGER NOT NULL,
256
+ units INTEGER NOT NULL,
257
+ level INTEGER NOT NULL,
258
+ modified INTEGER NOT NULL,
259
+ from_live INTEGER NOT NULL DEFAULT 0,
260
+ CONSTRAINT pk_StationSupply PRIMARY KEY (station_id, item_id),
261
+ CONSTRAINT fk_StationSupply_station_id_Station FOREIGN KEY (station_id) REFERENCES Station (station_id) ON DELETE CASCADE,
262
+ CONSTRAINT fk_StationSupply_item_id_Item FOREIGN KEY (item_id) REFERENCES Item (item_id) ON DELETE CASCADE
263
+ ) WITHOUT ROWID;
264
+ DELETE FROM StationSupply;
265
+ CREATE INDEX idx_StationSupply_item ON StationSupply (item_id);
266
+
234
267
  CREATE VIEW StationBuying AS
235
268
  SELECT station_id,
236
269
  item_id,