tradedangerous 10.16.12__py3-none-any.whl → 10.16.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

@@ -91,6 +91,7 @@ class ImportPlugin(plugins.ImportPluginBase):
91
91
  def execute(self, sql_cmd, args = None):
92
92
  cur = self.tdb.getDB().cursor()
93
93
 
94
+ self.tdenv.DEBUG2(f"SQL-Statement:\n'{sql_cmd},{args}'")
94
95
  success = False
95
96
  result = None
96
97
  while not success:
@@ -109,24 +110,6 @@ class ImportPlugin(plugins.ImportPluginBase):
109
110
  time.sleep(1)
110
111
  return result
111
112
 
112
- def executemany(self, sql_cmd, args):
113
- cur = self.tdb.getDB().cursor()
114
-
115
- success = False
116
- result = None
117
- while not success:
118
- try:
119
- result = cur.executemany(sql_cmd, args)
120
- success = True
121
- except sqlite3.OperationalError as e:
122
- if "locked" not in str(e):
123
- success = True
124
- raise sqlite3.OperationalError(e)
125
- else:
126
- print("(execute) Database is locked, waiting for access.", end = "\r")
127
- time.sleep(1)
128
- return result
129
-
130
113
  @staticmethod
131
114
  def fetchIter(cursor, arraysize = 1000):
132
115
  """
@@ -235,13 +218,7 @@ class ImportPlugin(plugins.ImportPluginBase):
235
218
 
236
219
  from_live = 0 if listings_file == self.listingsPath else 1
237
220
 
238
- # Used to check if the listings file is using the fdev_id as a temporary
239
- # item_id, but the item is in the DB with a permanent item_id.
240
- fdev2item = dict()
241
- result = self.execute("SELECT fdev_id,item_id FROM Item ORDER BY fdev_id").fetchall()
242
- for item in result:
243
- fdev2item[item[0]] = item[1]
244
-
221
+ self.tdenv.DEBUG0(f"Getting total number of entries in {listings_file}...")
245
222
  with open(str(self.dataPath / listings_file), "r", encoding = "utf-8", errors = 'ignore') as f:
246
223
  total += (sum(bl.count("\n") for bl in self.blocks(f)))
247
224
 
@@ -260,16 +237,19 @@ class ImportPlugin(plugins.ImportPluginBase):
260
237
  supply_price, supply_units, supply_level, from_live)
261
238
  VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )"""
262
239
 
240
+ self.tdenv.DEBUG0("Getting list of commodities...")
263
241
  items = []
264
242
  it_result = self.execute("SELECT item_id FROM Item ORDER BY item_id").fetchall()
265
243
  for item in it_result:
266
244
  items.append(item[0])
267
245
 
246
+ self.tdenv.DEBUG0("Getting list of stations...")
268
247
  stationList = {
269
248
  stationID
270
249
  for (stationID,) in self.execute("SELECT station_id FROM Station")
271
250
  }
272
251
 
252
+ self.tdenv.DEBUG0("Processing entries...")
273
253
  with open(str(self.dataPath / listings_file), "r") as fh:
274
254
  prog = pbar.Progress(total, 50)
275
255
  listings = csv.DictReader(fh)
@@ -277,7 +257,11 @@ class ImportPlugin(plugins.ImportPluginBase):
277
257
  cur_station = -1
278
258
 
279
259
  for listing in listings:
280
- prog.increment(1, postfix = lambda value, goal: " " + str(round(value / total * 100)) + "%")
260
+ if prog.increment(1, postfix = lambda value, goal: f" {(value / total * 100):.0f}% {value} / {total}"):
261
+ # Do a commit and close the DB every 2%.
262
+ # This ensures the listings are put in the DB and the WAL is cleared.
263
+ self.commit()
264
+ self.tdb.close()
281
265
 
282
266
  station_id = int(listing['station_id'])
283
267
  if station_id not in stationList:
@@ -294,7 +278,8 @@ class ImportPlugin(plugins.ImportPluginBase):
294
278
  updated = timegm(datetime.datetime.strptime(result[0].split('.')[0], '%Y-%m-%d %H:%M:%S').timetuple())
295
279
  # When the listings.csv data matches the database, update to make from_live == 0.
296
280
  if int(listing['collected_at']) == updated and not from_live:
297
- liveList.append((cur_station,))
281
+ self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live'.")
282
+ self.execute(liveStmt, (cur_station,))
298
283
  # Unless the import file data is newer, nothing else needs to be done for this station,
299
284
  # so the rest of the listings for this station can be skipped.
300
285
  if int(listing['collected_at']) <= updated:
@@ -302,7 +287,9 @@ class ImportPlugin(plugins.ImportPluginBase):
302
287
  continue
303
288
 
304
289
  # The data from the import file is newer, so we need to delete the old data for this station.
305
- delList.append((cur_station,))
290
+ self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station}.")
291
+ self.execute(delStmt, (cur_station,))
292
+
306
293
 
307
294
  if skipStation:
308
295
  continue
@@ -320,24 +307,18 @@ class ImportPlugin(plugins.ImportPluginBase):
320
307
  supply_units = int(listing['supply'])
321
308
  supply_level = int(listing['supply_bracket']) if listing['supply_bracket'] != '' else -1
322
309
 
323
- listingList.append((station_id, item_id, modified,
310
+ self.tdenv.DEBUG1(f"Inserting new listing data for {station_id}.")
311
+ self.execute(listingStmt, (station_id, item_id, modified,
324
312
  demand_price, demand_units, demand_level,
325
313
  supply_price, supply_units, supply_level, from_live))
326
314
 
315
+ # Do a final commit to be sure
316
+ self.commit()
317
+ self.tdb.close()
318
+
327
319
  while prog.value < prog.maxValue:
328
320
  prog.increment(1, postfix = lambda value, goal: " " + str(round(value / total * 100)) + "%")
329
321
  prog.clear()
330
-
331
- self.tdenv.NOTE("Import file processing complete, updating database. {}", self.now())
332
- if liveList:
333
- self.tdenv.NOTE("Marking data now in the EDDB listings.csv as no longer 'live'. {}", self.now())
334
- self.executemany(liveStmt, liveList)
335
- if delList:
336
- self.tdenv.NOTE("Deleting old listing data. {}", self.now())
337
- self.executemany(delStmt, delList)
338
- if listingList:
339
- self.tdenv.NOTE("Inserting new listing data. {}", self.now())
340
- self.executemany(listingStmt, listingList)
341
322
 
342
323
  self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
343
324
 
@@ -416,19 +397,6 @@ class ImportPlugin(plugins.ImportPluginBase):
416
397
  self.options["all"] = True
417
398
  self.options["force"] = True
418
399
 
419
- self.tdenv.ignoreUnknown = True
420
-
421
- success = False
422
- while not success:
423
- try:
424
- self.tdenv.DEBUG0("Loading Database. {}", self.now())
425
- self.tdb.load(maxSystemLinkLy = self.tdenv.maxSystemLinkLy)
426
- success = True
427
- except sqlite3.OperationalError:
428
- print("Database is locked, waiting for access.", end = "\r")
429
- time.sleep(1)
430
- self.tdenv.DEBUG0("Database loaded.")
431
-
432
400
  # Select which options will be updated
433
401
  if self.getOption("listings"):
434
402
  self.options["item"] = True
@@ -510,7 +478,9 @@ class ImportPlugin(plugins.ImportPluginBase):
510
478
  # Remake the .db files with the updated info.
511
479
  if buildCache:
512
480
  self.tdb.close()
513
- cache.buildCache(self.tdb, self.tdenv)
481
+ self.tdb.reloadCache()
482
+
483
+ self.tdenv.ignoreUnknown = True
514
484
 
515
485
  if self.getOption("purge"):
516
486
  self.purgeSystems()
@@ -522,9 +492,6 @@ class ImportPlugin(plugins.ImportPluginBase):
522
492
  if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
523
493
  self.importListings(self.liveListingsPath)
524
494
 
525
- # self.commit()
526
- self.tdb.close()
527
-
528
495
  if self.getOption("listings"):
529
496
  self.tdenv.NOTE("Regenerating .prices file.")
530
497
  cache.regeneratePricesFile(self.tdb, self.tdenv)
@@ -90,7 +90,6 @@ class ImportPlugin(plugins.ImportPluginBase):
90
90
  if rib_path.exists():
91
91
  rib_path.rename(ri_path)
92
92
 
93
- # self.known_space = self.load_known_space()
94
93
  self.known_systems = self.load_known_systems()
95
94
  self.known_stations = self.load_known_stations()
96
95
  self.known_commodities = self.load_known_commodities()
@@ -99,34 +98,13 @@ class ImportPlugin(plugins.ImportPluginBase):
99
98
  return self.tdenv.uprint(*args, **kwargs)
100
99
 
101
100
  def run(self):
102
- # fs.ensurefolder(self.tdenv.tmpDir)
103
- # filePath = self.tdenv.tmpDir / Path("spansh.prices")
104
101
  if not self.tdenv.detail:
105
102
  self.print('This will take at least several minutes...')
106
103
  self.print('You can increase verbosity (-v) to get a sense of progress')
107
104
  with Timing() as timing:
108
- # with open(filePath, 'w') as f, Timing() as timing:
109
- # self.print(f'Writing prices to {filePath}')
110
- # f.write('# Generated from spansh galaxy data\n')
111
- # f.write(f'# Source: {self.file or self.url}\n')
112
- # f.write('#\n')
113
- # f.write((
114
- # '# {name:50s} {sell:>7s} {buy:>7s} '
115
- # '{demand:>11s} {supply:>11s} {ts}\n'
116
- # ).format(
117
- # name='Item Name',
118
- # sell='SellCr',
119
- # buy='BuyCr',
120
- # demand='Demand',
121
- # supply='Supply',
122
- # ts='Timestamp',
123
- # ))
124
105
  system_count = 0
125
106
  total_station_count = 0
126
107
  total_commodity_count = 0
127
- # self.need_commit = False
128
- # self.update_cache = False
129
- # seen_stations = set()
130
108
  for system, stations in self.data_stream():
131
109
  self.ensure_system(system)
132
110
  station_count = 0
@@ -135,16 +113,9 @@ class ImportPlugin(plugins.ImportPluginBase):
135
113
  fq_station_name = f'@{system.name.upper()}/{station.name}'
136
114
  if self.maxage and (datetime.now() - station.modified) > timedelta(days=self.maxage):
137
115
  if self.tdenv.detail:
138
- self.print(f' | {fq_station_name:50s} | Skipping station due to age: {datetime.now() - station.modified}, ts: {station.modified}')
116
+ self.print(f' | {fq_station_name:50s} | Skipping station due to age: {(datetime.now() - station.modified) / timedelta (days=1):.2f} days old')
139
117
  continue
140
- # if (system.name.upper(), station.name.upper()) in seen_stations:
141
- # if self.tdenv.detail:
142
- # self.print(f' | {fq_station_name:50s} | Skipping duplicate station record')
143
- # continue
144
- # seen_stations.add((system.name.upper(), station.name.upper()))
145
118
  self.ensure_station(system, station)
146
- # f.write('\n')
147
- # f.write(f'@ {system.name.upper()}/{station.name}\n')
148
119
 
149
120
  items = []
150
121
  for commodity in commodities:
@@ -176,23 +147,6 @@ class ImportPlugin(plugins.ImportPluginBase):
176
147
  commodity_count += 1
177
148
  self.execute('COMMIT')
178
149
 
179
- # categories = self.categorise_commodities(commodities)
180
- # for category_name, category_commodities in categories.items():
181
- # f.write(f' + {category_name}\n')
182
- # for commodity in category_commodities:
183
- # commodity = self.ensure_commodity(commodity)
184
- # f.write((
185
- # ' {name:50s} {sell:7d} {buy:7d} '
186
- # '{demand:10d}? {supply:10d}? {modified}\n'
187
- # ).format(
188
- # name=commodity.name,
189
- # sell=commodity.sell,
190
- # buy=commodity.buy,
191
- # demand=commodity.demand,
192
- # supply=commodity.supply,
193
- # modified=commodity.modified,
194
- # ))
195
- # commodity_count += 1
196
150
  if commodity_count:
197
151
  station_count += 1
198
152
  if station_count:
@@ -204,16 +158,6 @@ class ImportPlugin(plugins.ImportPluginBase):
204
158
  f'{system_count:6d} | {system.name.upper():50s} | '
205
159
  f'{station_count:3d} st {commodity_count:6d} co'
206
160
  )
207
- # self.execute('COMMIT')
208
- # if self.need_commit:
209
- # self.execute('COMMIT')
210
- # self.need_commit = False
211
- # self.update_cache = True
212
-
213
- # Need to make sure cached tables are updated, if changes were made
214
- # if self.update_cache:
215
- # for table in [ "Item", "Station", "System" ]:
216
- # _, path = csvexport.exportTableToFile( self.tdb, self.tdenv, table )
217
161
 
218
162
  self.execute('COMMIT')
219
163
  self.tdb.close()
@@ -230,13 +174,7 @@ class ImportPlugin(plugins.ImportPluginBase):
230
174
  self.print('Exporting to cache...')
231
175
  cache.regeneratePricesFile(self.tdb, self.tdenv)
232
176
  self.print(f'Cache export completed in {timedelta(seconds=int(timing.elapsed))!s}')
233
-
234
- # if not self.listener:
235
- # with Timing() as timing:
236
- # self.print('Importing to database...')
237
- # self.tdenv.mergeImport = True
238
- # cache.importDataFromFile(self.tdb, self.tdenv, filePath)
239
- # self.print(f'Database import completed in {timedelta(seconds=int(timing.elapsed))!s}')
177
+
240
178
  return False
241
179
 
242
180
  def data_stream(self):
@@ -253,11 +191,6 @@ class ImportPlugin(plugins.ImportPluginBase):
253
191
  elif self.file:
254
192
  self.print(f'Reading prices from local file: {self.file}')
255
193
  stream = open(self.file, 'r', encoding='utf8')
256
- # else:
257
- # url = self.url or SOURCE_URL
258
- # self.print(f'Reading prices from remote URL: {url}')
259
- # req = requests.get(url, stream=True)
260
- # stream = req.iter_lines(decode_unicode=True)
261
194
  return ingest_stream(stream)
262
195
 
263
196
  def categorise_commodities(self, commodities):
@@ -280,20 +213,6 @@ class ImportPlugin(plugins.ImportPluginBase):
280
213
  attempts -= 1
281
214
  self.print(f'Retrying query \'{query}\': {ex!s}')
282
215
  time.sleep(1)
283
-
284
- # def load_known_space(self):
285
- # cache = {}
286
- # result = self.execute(
287
- # '''
288
- # SELECT System.name, Station.name FROM System
289
- # LEFT JOIN Station USING (system_id)
290
- # '''
291
- # ).fetchall()
292
- # for system, station in result:
293
- # cache.setdefault(system.upper(), set())
294
- # if station is not None:
295
- # cache[system.upper()].add(station.upper())
296
- # return cache
297
216
 
298
217
  def load_known_systems(self):
299
218
  try:
@@ -382,19 +301,6 @@ class ImportPlugin(plugins.ImportPluginBase):
382
301
 
383
302
  def ensure_commodity(self, commodity):
384
303
  if commodity.id in self.known_commodities:
385
- # if self.known_commodities[commodity.id] != commodity.name:
386
- # if self.tdenv.detail >= 3:
387
- # self.print(f' | - {commodity.name:45s} | Replace with pre-existing "{self.known_commodities[commodity.id]}"')
388
- # return Commodity(
389
- # id=commodity.id,
390
- # name=self.known_commodities[commodity.id],
391
- # category=commodity.category,
392
- # demand=commodity.demand,
393
- # supply=commodity.supply,
394
- # sell=commodity.sell,
395
- # buy=commodity.buy,
396
- # modified=commodity.modified,
397
- # )
398
304
  return commodity
399
305
  self.execute(
400
306
  '''
tradedangerous/tradedb.py CHANGED
@@ -438,16 +438,15 @@ class Station(object):
438
438
 
439
439
 
440
440
  class Ship(namedtuple('Ship', (
441
- 'ID', 'dbname', 'cost', 'fdevID', 'stations'
441
+ 'ID', 'dbname', 'cost', 'stations'
442
442
  ))):
443
443
  """
444
444
  Ship description.
445
445
 
446
446
  Attributes:
447
- ID -- The database ID
447
+ ID -- FDevID as provided by the companion API.
448
448
  dbname -- The name as present in the database
449
449
  cost -- How many credits to buy
450
- fdevID -- FDevID as provided by the companion API.
451
450
  stations -- List of Stations ship is sold at.
452
451
  """
453
452
 
@@ -1837,7 +1836,7 @@ class TradeDB(object):
1837
1836
  CAUTION: Will orphan previously loaded objects.
1838
1837
  """
1839
1838
  stmt = """
1840
- SELECT ship_id, name, cost, fdev_id
1839
+ SELECT ship_id, name, cost
1841
1840
  FROM Ship
1842
1841
  """
1843
1842
  self.shipByID = {
tradedangerous/version.py CHANGED
@@ -12,5 +12,5 @@
12
12
  """just keeper of current version"""
13
13
 
14
14
  # TODO: remember to update tests when version changes
15
- __version__ = '10.16.12'
15
+ __version__ = '10.16.14'
16
16
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tradedangerous
3
- Version: 10.16.12
3
+ Version: 10.16.14
4
4
  Summary: Trade-Dangerous is a set of powerful trading tools for Elite Dangerous, organized around one of the most powerful trade run optimizers available.
5
5
  Home-page: https://github.com/eyeonus/Trade-Dangerous
6
6
  Author: eyeonus
@@ -15,13 +15,13 @@ tradedangerous/submit-distances.py,sha256=lEHtCLUzR2QNRrgPD-PkDIzHYYHOODZVTRxDsA
15
15
  tradedangerous/tools.py,sha256=pp-4WtA12SVaaQHFJFOMTF7EDFRCU2mQeOhC4xoXmEk,1331
16
16
  tradedangerous/trade.py,sha256=vBEJZR3Bybesw9FMelcHOTRA7KqKeH-4_wqbJ4VMB_E,1779
17
17
  tradedangerous/tradecalc.py,sha256=GyuYZNuyfn0y0eoPbxDEI7dxTJtHmRXAKFMpiR0Z1iU,42004
18
- tradedangerous/tradedb.py,sha256=6JzoSAiW1WJjkWxeGMJkN63GHTBp9RA0dkfRviuEHeM,73515
18
+ tradedangerous/tradedb.py,sha256=RD8oOwoGqjVRThGPrWVXhf_Hklp4SqQ08awh-cYbDdY,73457
19
19
  tradedangerous/tradeenv.py,sha256=8uByR4m-xzZxNSBLQnDHS4uJMPohf3mP5R_rsXnhmds,7887
20
20
  tradedangerous/tradeexcept.py,sha256=aZ-Y31MbkjF7lmAzBAbaMsPPE7FEEfuf4gaX2GvriDk,368
21
21
  tradedangerous/tradegui.py,sha256=JbGFnsWupgesk6hrcUgKSdD9NNDyo0U9gh6m3DccAwU,782
22
22
  tradedangerous/transfers.py,sha256=NmXXk2aF88YkAvYqc9Syt_aO6d2jJjC-OxoRFoOyQH4,9923
23
23
  tradedangerous/utils.py,sha256=PUPvAEqUyxYGqqQa0b_yfLAvq8YVUxK6HfdS-CxM-Lo,5186
24
- tradedangerous/version.py,sha256=Vx8jvW69Uth2ThVn-QdqSmgGU532csejhFasrFT13nE,648
24
+ tradedangerous/version.py,sha256=roMp1D3OOt2HhAHHANGah0MJB-QcbbxZ-h7gHE0RKf0,648
25
25
  tradedangerous/commands/TEMPLATE.py,sha256=7oXL124aqxGHwnb0h9yRylUiwc6M5QrRrGVrubwI1gg,2124
26
26
  tradedangerous/commands/__init__.py,sha256=6B0WuqkFBOll5Hj67yKDAnhmyr5ZAnHc6nzUNEUh384,9640
27
27
  tradedangerous/commands/buildcache_cmd.py,sha256=oJvP06fA8svnHrfrpWkHKR16cba8GIhHdMOyZqds18Y,2332
@@ -62,18 +62,18 @@ tradedangerous/misc/progress.py,sha256=-_V7E51sIYUtSxeeA1cphBEW4A_OBuH1guEDxUjik
62
62
  tradedangerous/plugins/__init__.py,sha256=zCEVbTem1CAM1cOV9r96H3ikjqza3dd-XoaubE5_xkc,7868
63
63
  tradedangerous/plugins/edapi_plug.py,sha256=IQxfDGM9IqwuJbDZRL8RFIyGMWzd0YmeJVWUVPDA3Ik,42275
64
64
  tradedangerous/plugins/edcd_plug.py,sha256=ZPtRzLhcQZEiwEo3AoPyk3Uy4UmRLM6gv2Qi1s7K_Vs,14469
65
- tradedangerous/plugins/eddblink_plug.py,sha256=BCA87Y8dZdzWUae2dc46hqISy6GN3uDwnwagXcCZhmI,22362
65
+ tradedangerous/plugins/eddblink_plug.py,sha256=KpRNjQugXWyA9oFuGzcB2Z0GFqzQRtANig2ksB9NheA,21192
66
66
  tradedangerous/plugins/edmc_batch_plug.py,sha256=3Ptr-SZqaZFR8ViIIrp9Ak7rvfU3zl11AZYBhIceN7s,4224
67
67
  tradedangerous/plugins/journal_plug.py,sha256=K1oIeI7E3mb04fvYLXyoAh7fOTyM9NBelibTI88MIDQ,23696
68
68
  tradedangerous/plugins/netlog_plug.py,sha256=Gw_HSZWpN17D--OIYEM3Vo8y9SvDOv9UwAUfY24kz28,13460
69
- tradedangerous/plugins/spansh_plug.py,sha256=WOpTm9C4oXPfFeEaSQtcu82OqX1uAuNGNy8wVY4iw_I,21792
69
+ tradedangerous/plugins/spansh_plug.py,sha256=RXzLWHyVvRxmlzQEz4rhaZdtcEt1k3mdykjTphI9jdc,17088
70
70
  tradedangerous/templates/Added.csv,sha256=8o54civQCcS9y7_DBo0GX196XWRbbREQqKDYTKibsgQ,649
71
71
  tradedangerous/templates/Category.csv,sha256=8xwUDcBZE25T6x6dZGlRUMTCqeDLt3a9LXU5h6hRHV8,250
72
72
  tradedangerous/templates/RareItem.csv,sha256=F1RhRnTD82PiwrVUO-ai2ErGH2PTqNnQaDw5mcgljXs,10483
73
73
  tradedangerous/templates/TradeDangerous.sql,sha256=1EiJ7cNJQKvdW4X-LQAHw3Y1POc0roKf25LJJy6jGlo,8135
74
- tradedangerous-10.16.12.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
75
- tradedangerous-10.16.12.dist-info/METADATA,sha256=Hl6Op6z_O5_VrFO59KIsDHSzkklQwcePOEa_DJTdzOQ,4442
76
- tradedangerous-10.16.12.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
77
- tradedangerous-10.16.12.dist-info/entry_points.txt,sha256=pSwa-q0ob443uiKux7xFKYQl8uen66iDTnjdrQhNLx8,92
78
- tradedangerous-10.16.12.dist-info/top_level.txt,sha256=bF29i-oEltmNICgElEKxNsg83oahJvxg3a7YrxZi9Rk,15
79
- tradedangerous-10.16.12.dist-info/RECORD,,
74
+ tradedangerous-10.16.14.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
75
+ tradedangerous-10.16.14.dist-info/METADATA,sha256=r0hvIz_n-XPZpEJDyBB0VFb82_Qrz_2-4_3TLFccQco,4442
76
+ tradedangerous-10.16.14.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
77
+ tradedangerous-10.16.14.dist-info/entry_points.txt,sha256=pSwa-q0ob443uiKux7xFKYQl8uen66iDTnjdrQhNLx8,92
78
+ tradedangerous-10.16.14.dist-info/top_level.txt,sha256=bF29i-oEltmNICgElEKxNsg83oahJvxg3a7YrxZi9Rk,15
79
+ tradedangerous-10.16.14.dist-info/RECORD,,