tradedangerous 10.16.14__py3-none-any.whl → 10.16.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/plugins/eddblink_plug.py +42 -26
- tradedangerous/version.py +1 -1
- {tradedangerous-10.16.14.dist-info → tradedangerous-10.16.16.dist-info}/METADATA +1 -1
- {tradedangerous-10.16.14.dist-info → tradedangerous-10.16.16.dist-info}/RECORD +8 -8
- {tradedangerous-10.16.14.dist-info → tradedangerous-10.16.16.dist-info}/LICENSE +0 -0
- {tradedangerous-10.16.14.dist-info → tradedangerous-10.16.16.dist-info}/WHEEL +0 -0
- {tradedangerous-10.16.14.dist-info → tradedangerous-10.16.16.dist-info}/entry_points.txt +0 -0
- {tradedangerous-10.16.14.dist-info → tradedangerous-10.16.16.dist-info}/top_level.txt +0 -0
|
@@ -40,6 +40,34 @@ class DecodingError(PluginException):
|
|
|
40
40
|
pass
|
|
41
41
|
|
|
42
42
|
|
|
43
|
+
def file_line_count(from_file: Path, bufsize: int = 128 * 1024) -> int:
|
|
44
|
+
""" counts the number of newline characters in a given file. """
|
|
45
|
+
# Pre-allocate a buffer so we're not putting pressure on the garbage collector,
|
|
46
|
+
# capture it's counting method so we don't have to keep looking that up on
|
|
47
|
+
# large files.
|
|
48
|
+
buf = bytearray(bufsize)
|
|
49
|
+
counter = buf.count
|
|
50
|
+
|
|
51
|
+
total = 0
|
|
52
|
+
|
|
53
|
+
with from_file.open("rb") as fh:
|
|
54
|
+
# Capture the 'readinto' method to avoid lookups.
|
|
55
|
+
reader = fh.readinto
|
|
56
|
+
|
|
57
|
+
# read into the buffer and capture the number of bytes fetched,
|
|
58
|
+
# which will be 'size' until the last read from the file.
|
|
59
|
+
read = reader(buf)
|
|
60
|
+
while read == bufsize: # nominal case for large files
|
|
61
|
+
total += counter(b'\n')
|
|
62
|
+
read = reader(buf)
|
|
63
|
+
|
|
64
|
+
# when 0 <= read < bufsize we're on the last page of the
|
|
65
|
+
# file, so we need to take a slice of the buffer, which creates
|
|
66
|
+
# a new object and thus we also have to lookup count. it's trivial
|
|
67
|
+
# but if you have to do it 10,000x it's definitly not a rounding error.
|
|
68
|
+
return total + buf[:read].count(b'\n')
|
|
69
|
+
|
|
70
|
+
|
|
43
71
|
class ImportPlugin(plugins.ImportPluginBase):
|
|
44
72
|
"""
|
|
45
73
|
Plugin that downloads data from eddb.
|
|
@@ -123,14 +151,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
123
151
|
for result in results:
|
|
124
152
|
yield result
|
|
125
153
|
|
|
126
|
-
|
|
127
|
-
def blocks(f, size = 65536):
|
|
128
|
-
while True:
|
|
129
|
-
b = f.read(size)
|
|
130
|
-
if not b:
|
|
131
|
-
break
|
|
132
|
-
yield b
|
|
133
|
-
|
|
154
|
+
|
|
134
155
|
def downloadFile(self, path):
|
|
135
156
|
"""
|
|
136
157
|
Fetch the latest dumpfile from the website if newer than local copy.
|
|
@@ -219,18 +240,15 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
219
240
|
from_live = 0 if listings_file == self.listingsPath else 1
|
|
220
241
|
|
|
221
242
|
self.tdenv.DEBUG0(f"Getting total number of entries in {listings_file}...")
|
|
222
|
-
|
|
223
|
-
|
|
243
|
+
listings_path = Path(self.dataPath, listings_file)
|
|
244
|
+
total += file_line_count(listings_path)
|
|
224
245
|
|
|
225
|
-
liveList = []
|
|
226
246
|
liveStmt = """UPDATE StationItem
|
|
227
247
|
SET from_live = 0
|
|
228
248
|
WHERE station_id = ?"""
|
|
229
249
|
|
|
230
|
-
delList = []
|
|
231
250
|
delStmt = "DELETE from StationItem WHERE station_id = ?"
|
|
232
251
|
|
|
233
|
-
listingList = []
|
|
234
252
|
listingStmt = """INSERT OR IGNORE INTO StationItem
|
|
235
253
|
(station_id, item_id, modified,
|
|
236
254
|
demand_price, demand_units, demand_level,
|
|
@@ -238,10 +256,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
238
256
|
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )"""
|
|
239
257
|
|
|
240
258
|
self.tdenv.DEBUG0("Getting list of commodities...")
|
|
241
|
-
items = []
|
|
242
|
-
it_result = self.execute("SELECT item_id FROM Item ORDER BY item_id").fetchall()
|
|
243
|
-
for item in it_result:
|
|
244
|
-
items.append(item[0])
|
|
259
|
+
items = [cols[0] for cols in self.execute("SELECT item_id FROM Item ORDER BY item_id")]
|
|
245
260
|
|
|
246
261
|
self.tdenv.DEBUG0("Getting list of stations...")
|
|
247
262
|
stationList = {
|
|
@@ -249,15 +264,17 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
249
264
|
for (stationID,) in self.execute("SELECT station_id FROM Station")
|
|
250
265
|
}
|
|
251
266
|
|
|
267
|
+
stationItems = dict(self.execute('SELECT station_id, UNIXEPOCH(modified) FROM StationItem').fetchall())
|
|
268
|
+
|
|
252
269
|
self.tdenv.DEBUG0("Processing entries...")
|
|
253
|
-
with open(
|
|
270
|
+
with listings_file.open("r", encoding="utf-8", errors="ignore") as fh:
|
|
254
271
|
prog = pbar.Progress(total, 50)
|
|
255
272
|
listings = csv.DictReader(fh)
|
|
256
273
|
|
|
257
274
|
cur_station = -1
|
|
258
275
|
|
|
259
276
|
for listing in listings:
|
|
260
|
-
if prog.increment(1, postfix = lambda value,
|
|
277
|
+
if prog.increment(1, postfix = lambda value, total: f" {(value / total * 100):.0f}% {value} / {total}"):
|
|
261
278
|
# Do a commit and close the DB every 2%.
|
|
262
279
|
# This ensures the listings are put in the DB and the WAL is cleared.
|
|
263
280
|
self.commit()
|
|
@@ -272,23 +289,22 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
272
289
|
skipStation = False
|
|
273
290
|
|
|
274
291
|
# Check if listing already exists in DB and needs updated.
|
|
275
|
-
|
|
276
|
-
result = self.execute("SELECT modified FROM StationItem WHERE station_id = ?", (station_id,)).fetchone()
|
|
277
|
-
if result:
|
|
278
|
-
updated = timegm(datetime.datetime.strptime(result[0].split('.')[0], '%Y-%m-%d %H:%M:%S').timetuple())
|
|
292
|
+
if stationItems.get(station_id):
|
|
279
293
|
# When the listings.csv data matches the database, update to make from_live == 0.
|
|
280
|
-
if int(listing['collected_at']) ==
|
|
294
|
+
if int(listing['collected_at']) == stationItems.get(station_id) and not from_live:
|
|
281
295
|
self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live'.")
|
|
282
296
|
self.execute(liveStmt, (cur_station,))
|
|
283
297
|
# Unless the import file data is newer, nothing else needs to be done for this station,
|
|
284
298
|
# so the rest of the listings for this station can be skipped.
|
|
285
|
-
if int(listing['collected_at']) <=
|
|
299
|
+
if int(listing['collected_at']) <= stationItems.get(station_id):
|
|
286
300
|
skipStation = True
|
|
287
301
|
continue
|
|
288
302
|
|
|
289
303
|
# The data from the import file is newer, so we need to delete the old data for this station.
|
|
290
304
|
self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station}.")
|
|
291
305
|
self.execute(delStmt, (cur_station,))
|
|
306
|
+
# We've deleted all the items from this station, so remove it.
|
|
307
|
+
del stationItems[station_id]
|
|
292
308
|
|
|
293
309
|
|
|
294
310
|
if skipStation:
|
|
@@ -317,7 +333,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
317
333
|
self.tdb.close()
|
|
318
334
|
|
|
319
335
|
while prog.value < prog.maxValue:
|
|
320
|
-
prog.increment(1, postfix = lambda value,
|
|
336
|
+
prog.increment(1, postfix = lambda value, total: " " + str(round(value / total * 100)) + "%")
|
|
321
337
|
prog.clear()
|
|
322
338
|
|
|
323
339
|
self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
|
tradedangerous/version.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: tradedangerous
|
|
3
|
-
Version: 10.16.
|
|
3
|
+
Version: 10.16.16
|
|
4
4
|
Summary: Trade-Dangerous is a set of powerful trading tools for Elite Dangerous, organized around one of the most powerful trade run optimizers available.
|
|
5
5
|
Home-page: https://github.com/eyeonus/Trade-Dangerous
|
|
6
6
|
Author: eyeonus
|
|
@@ -21,7 +21,7 @@ tradedangerous/tradeexcept.py,sha256=aZ-Y31MbkjF7lmAzBAbaMsPPE7FEEfuf4gaX2GvriDk
|
|
|
21
21
|
tradedangerous/tradegui.py,sha256=JbGFnsWupgesk6hrcUgKSdD9NNDyo0U9gh6m3DccAwU,782
|
|
22
22
|
tradedangerous/transfers.py,sha256=NmXXk2aF88YkAvYqc9Syt_aO6d2jJjC-OxoRFoOyQH4,9923
|
|
23
23
|
tradedangerous/utils.py,sha256=PUPvAEqUyxYGqqQa0b_yfLAvq8YVUxK6HfdS-CxM-Lo,5186
|
|
24
|
-
tradedangerous/version.py,sha256=
|
|
24
|
+
tradedangerous/version.py,sha256=vROLJSJzupfaXUWuLCNyZhDbAbuFvSrk3T-AY9MMAEc,648
|
|
25
25
|
tradedangerous/commands/TEMPLATE.py,sha256=7oXL124aqxGHwnb0h9yRylUiwc6M5QrRrGVrubwI1gg,2124
|
|
26
26
|
tradedangerous/commands/__init__.py,sha256=6B0WuqkFBOll5Hj67yKDAnhmyr5ZAnHc6nzUNEUh384,9640
|
|
27
27
|
tradedangerous/commands/buildcache_cmd.py,sha256=oJvP06fA8svnHrfrpWkHKR16cba8GIhHdMOyZqds18Y,2332
|
|
@@ -62,7 +62,7 @@ tradedangerous/misc/progress.py,sha256=-_V7E51sIYUtSxeeA1cphBEW4A_OBuH1guEDxUjik
|
|
|
62
62
|
tradedangerous/plugins/__init__.py,sha256=zCEVbTem1CAM1cOV9r96H3ikjqza3dd-XoaubE5_xkc,7868
|
|
63
63
|
tradedangerous/plugins/edapi_plug.py,sha256=IQxfDGM9IqwuJbDZRL8RFIyGMWzd0YmeJVWUVPDA3Ik,42275
|
|
64
64
|
tradedangerous/plugins/edcd_plug.py,sha256=ZPtRzLhcQZEiwEo3AoPyk3Uy4UmRLM6gv2Qi1s7K_Vs,14469
|
|
65
|
-
tradedangerous/plugins/eddblink_plug.py,sha256=
|
|
65
|
+
tradedangerous/plugins/eddblink_plug.py,sha256=3BOX6McE6huXq0AJevbN7m0rZXc_Mns4YnGlaH6nDP4,21964
|
|
66
66
|
tradedangerous/plugins/edmc_batch_plug.py,sha256=3Ptr-SZqaZFR8ViIIrp9Ak7rvfU3zl11AZYBhIceN7s,4224
|
|
67
67
|
tradedangerous/plugins/journal_plug.py,sha256=K1oIeI7E3mb04fvYLXyoAh7fOTyM9NBelibTI88MIDQ,23696
|
|
68
68
|
tradedangerous/plugins/netlog_plug.py,sha256=Gw_HSZWpN17D--OIYEM3Vo8y9SvDOv9UwAUfY24kz28,13460
|
|
@@ -71,9 +71,9 @@ tradedangerous/templates/Added.csv,sha256=8o54civQCcS9y7_DBo0GX196XWRbbREQqKDYTK
|
|
|
71
71
|
tradedangerous/templates/Category.csv,sha256=8xwUDcBZE25T6x6dZGlRUMTCqeDLt3a9LXU5h6hRHV8,250
|
|
72
72
|
tradedangerous/templates/RareItem.csv,sha256=F1RhRnTD82PiwrVUO-ai2ErGH2PTqNnQaDw5mcgljXs,10483
|
|
73
73
|
tradedangerous/templates/TradeDangerous.sql,sha256=1EiJ7cNJQKvdW4X-LQAHw3Y1POc0roKf25LJJy6jGlo,8135
|
|
74
|
-
tradedangerous-10.16.
|
|
75
|
-
tradedangerous-10.16.
|
|
76
|
-
tradedangerous-10.16.
|
|
77
|
-
tradedangerous-10.16.
|
|
78
|
-
tradedangerous-10.16.
|
|
79
|
-
tradedangerous-10.16.
|
|
74
|
+
tradedangerous-10.16.16.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
|
75
|
+
tradedangerous-10.16.16.dist-info/METADATA,sha256=ZBKc1-gMkTbqzQl9YVoevR_fqnEiTxK6nIIQ8joxx94,4442
|
|
76
|
+
tradedangerous-10.16.16.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
77
|
+
tradedangerous-10.16.16.dist-info/entry_points.txt,sha256=pSwa-q0ob443uiKux7xFKYQl8uen66iDTnjdrQhNLx8,92
|
|
78
|
+
tradedangerous-10.16.16.dist-info/top_level.txt,sha256=bF29i-oEltmNICgElEKxNsg83oahJvxg3a7YrxZi9Rk,15
|
|
79
|
+
tradedangerous-10.16.16.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|