tradedangerous 11.4.0__py3-none-any.whl → 11.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

tradedangerous/cache.py CHANGED
@@ -1005,7 +1005,7 @@ def buildCache(tdb, tdenv):
1005
1005
  importName
1006
1006
  )
1007
1007
  prog.increment(1)
1008
-
1008
+
1009
1009
  with prog.sub_task(description="Save DB"):
1010
1010
  tempDB.commit()
1011
1011
 
@@ -9,7 +9,7 @@ from .parsing import (
9
9
  NoPlanetSwitch, OdysseyArgument, PadSizeArgument, ParseArgument, PlanetaryArgument,
10
10
  )
11
11
 
12
-
12
+ # TODO: Add UPGRADE_MODE
13
13
  ITEM_MODE = "Item"
14
14
  SHIP_MODE = "Ship"
15
15
 
@@ -70,6 +70,13 @@ class CommandEnv(TradeEnv):
70
70
  the properties we have are valid.
71
71
  """
72
72
  self.tdb = tdb
73
+ db_change = pathlib.Path(self.tdb.templatePath, 'database_changes.json')
74
+ if pathlib.Path.exists(db_change):
75
+ import ijson
76
+ with open(db_change) as file:
77
+ for change in ijson.items(file, 'item'):
78
+ self.tdb.getDB().execute(change)
79
+ db_change.unlink()
73
80
 
74
81
  self.checkMFD()
75
82
  self.checkFromToNear()
@@ -7,7 +7,7 @@ from ..tradeexcept import TradeException
7
7
  class UsageError(TradeException):
8
8
  def __init__(self, title, usage):
9
9
  self.title, self.usage = title, usage
10
-
10
+
11
11
  def __str__(self):
12
12
  return self.title + "\n\n" + self.usage
13
13
 
@@ -20,7 +20,7 @@ class CommandLineError(TradeException):
20
20
  """
21
21
  def __init__(self, errorStr, usage=None):
22
22
  self.errorStr, self.usage = errorStr, usage
23
-
23
+
24
24
  def __str__(self):
25
25
  if self.usage:
26
26
  return "ERROR: {}\n\n{}".format(self.errorStr, self.usage)
@@ -36,7 +36,7 @@ class NoDataError(TradeException):
36
36
  """
37
37
  def __init__(self, errorStr):
38
38
  self.errorStr = errorStr
39
-
39
+
40
40
  def __str__(self):
41
41
  return f"""Error: {self.errorStr}
42
42
  Possible causes:
@@ -28,7 +28,7 @@ class CreditParser(int):
28
28
  'k' for thousands, 'm' for millions and 'b' for billions.
29
29
  """
30
30
  suffixes = {'k': 10**3, 'm': 10**6, 'b': 10**9}
31
-
31
+
32
32
  def __new__(cls, val, **kwargs):
33
33
  if isinstance(val, str):
34
34
  if val[-1] in CreditParser.suffixes:
@@ -208,7 +208,7 @@ def run(results, cmdenv, tdb):
208
208
  for ship in ships.values():
209
209
  if action(tdb, cmdenv, station, ship):
210
210
  dataToExport = True
211
-
211
+
212
212
  cmdenv.DEBUG0("dataToExport = {}", dataToExport)
213
213
 
214
214
  maybeExportToCSV(tdb, cmdenv)
tradedangerous/fs.py CHANGED
@@ -35,7 +35,7 @@ def copy_if_newer(src, dst):
35
35
  dstPath = pathify(dst)
36
36
  if dstPath.exists() and dstPath.stat().st_mtime >= srcPath.stat().st_mtime:
37
37
  return srcPath
38
-
38
+
39
39
  shcopy(str(srcPath), str(dstPath))
40
40
  return dstPath
41
41
 
@@ -105,29 +105,29 @@ def file_line_count(from_file: PathLike, buf_size: int = 128 * 1024, *, missing_
105
105
  """ counts the number of newline characters in a given file. """
106
106
  if not isinstance(from_file, Path):
107
107
  from_file = Path(from_file)
108
-
108
+
109
109
  if missing_ok and not from_file.exists():
110
110
  return 0
111
-
111
+
112
112
  # Pre-allocate a buffer so that we aren't putting pressure on the garbage collector.
113
113
  buf = bytearray(buf_size)
114
-
114
+
115
115
  # Capture it's counting method, so we don't have to keep looking that up on
116
116
  # large files.
117
117
  counter = buf.count
118
-
118
+
119
119
  total = 0
120
120
  with from_file.open("rb") as fh:
121
121
  # Capture the 'readinto' method to avoid lookups.
122
122
  reader = fh.readinto
123
-
123
+
124
124
  # read into the buffer and capture the number of bytes fetched,
125
125
  # which will be 'size' until the last read from the file.
126
126
  read = reader(buf)
127
127
  while read == buf_size: # nominal case for large files
128
128
  total += counter(b'\n')
129
129
  read = reader(buf)
130
-
130
+
131
131
  # when 0 <= read < buf_size we're on the last page of the
132
132
  # file, so we need to take a slice of the buffer, which creates
133
133
  # a new object, thus we also have to lookup count. it's trivial
@@ -118,7 +118,7 @@ def load_prices_json(
118
118
  blackMarket = '?'
119
119
  except KeyError:
120
120
  blackMarket = '?'
121
-
121
+
122
122
  try:
123
123
  maxPadSize = stnData['mps'].upper()
124
124
  if maxPadSize not in ['S', 'M', 'L']:
@@ -73,10 +73,10 @@ class Progress:
73
73
  self.show = bool(show)
74
74
  if not show:
75
75
  return
76
-
76
+
77
77
  if style is None:
78
78
  style = DefaultBar
79
-
79
+
80
80
  self.max_value = 0 if max_value is None else max(max_value, start)
81
81
  self.value = start
82
82
  self.prefix = prefix or ""
@@ -90,23 +90,23 @@ class Progress:
90
90
  # Hide it once it's finished, update it for us, 4x a second
91
91
  transient=True, auto_refresh=True, refresh_per_second=5
92
92
  )
93
-
93
+
94
94
  # Now we add an actual task to track progress on.
95
95
  self.task = self.progress.add_task("Working...", total=max_value, start=True)
96
96
  if self.value:
97
97
  self.progress.update(self.task, advance=self.value)
98
-
98
+
99
99
  # And show the task tracker.
100
100
  self.progress.start()
101
-
101
+
102
102
  def __enter__(self):
103
103
  """ Context manager.
104
-
104
+
105
105
  Example use:
106
-
106
+
107
107
  import time
108
108
  import tradedangerous.progress
109
-
109
+
110
110
  # Progress(max_value=100, width=32, style=progress.CountingBar)
111
111
  with progress.Progress(100, 32, style=progress.CountingBar) as prog:
112
112
  for i in range(100):
@@ -114,10 +114,10 @@ class Progress:
114
114
  time.sleep(3)
115
115
  """
116
116
  return self
117
-
117
+
118
118
  def __exit__(self, *args, **kwargs):
119
119
  self.clear()
120
-
120
+
121
121
  def increment(self, value: Optional[float] = None, description: Optional[str] = None, *, progress: Optional[float] = None) -> None:
122
122
  """
123
123
  Increase the progress of the bar by a given amount.
@@ -139,30 +139,30 @@ class Progress:
139
139
  elif value:
140
140
  self.value += value # Update our internal count
141
141
  bump = True
142
-
142
+
143
143
  if self.value >= self.max_value: # Did we go past the end? Increase the end.
144
144
  self.max_value += value * 2
145
145
  self.progress.update(self.task, description=self.prefix, total=self.max_value)
146
146
  bump = True
147
-
147
+
148
148
  if bump and self.max_value > 0:
149
149
  self.progress.update(self.task, description=self.prefix, completed=self.value)
150
-
150
+
151
151
  def clear(self) -> None:
152
152
  """ Remove the current progress bar, if any. """
153
153
  # These two shouldn't happen separately, but incase someone tinkers, test each
154
154
  # separately and shut them down.
155
155
  if not self.show:
156
156
  return
157
-
157
+
158
158
  if self.task:
159
159
  self.progress.remove_task(self.task)
160
160
  self.task = None
161
-
161
+
162
162
  if self.progress:
163
163
  self.progress.stop()
164
164
  self.progress = None
165
-
165
+
166
166
  @contextmanager
167
167
  def sub_task(self, description: str, max_value: Optional[int] = None, width: int = 25):
168
168
  if not self.show:
@@ -173,7 +173,7 @@ class Progress:
173
173
  yield task
174
174
  finally:
175
175
  self.progress.remove_task(task)
176
-
176
+
177
177
  def update_task(self, task: TaskID, advance: Union[float, int], description: Optional[str] = None):
178
178
  if self.show:
179
179
  self.progress.update(task, advance=advance, description=description)
@@ -45,7 +45,7 @@ def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
45
45
  else:
46
46
  tdenv.DEBUG0("Listings file is empty, nothing to do.")
47
47
  return 0
48
-
48
+
49
49
  return count + 1 # kfsone: Doesn't the header already make this + 1?
50
50
 
51
51
 
@@ -94,7 +94,7 @@ class ImportPlugin(plugins.ImportPluginBase):
94
94
  def __init__(self, tdb, tdenv):
95
95
  super().__init__(tdb, tdenv)
96
96
 
97
- self.dataPath = Path(os.environ.get('TD_EDDB')) if os.environ.get('TD_EDDB') else self.tdb.dataPath / Path("eddb")
97
+ self.dataPath = os.environ.get('TD_EDDB') or self.tdenv.tmpDir
98
98
  self.categoriesPath = Path("Category.csv")
99
99
  self.commoditiesPath = Path("Item.csv")
100
100
  self.rareItemPath = Path("RareItem.csv")
@@ -166,13 +166,13 @@ class ImportPlugin(plugins.ImportPluginBase):
166
166
  """
167
167
  db = self.tdb.getDB()
168
168
  self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
169
-
169
+
170
170
  db.execute("""
171
171
  DELETE FROM System
172
172
  WHERE NOT EXISTS(SELECT 1 FROM Station WHERE Station.system_id = System.system_id)
173
173
  """)
174
174
  db.commit()
175
-
175
+
176
176
  self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
177
177
 
178
178
  def importListings(self, listings_file):
@@ -188,9 +188,9 @@ class ImportPlugin(plugins.ImportPluginBase):
188
188
  if not total:
189
189
  self.tdenv.NOTE("No listings")
190
190
  return
191
-
191
+
192
192
  self.tdenv.NOTE("Processing market data from {}: Start time = {}. Live = {}", listings_file, self.now(), from_live)
193
-
193
+
194
194
  db = self.tdb.getDB()
195
195
  stmt_unliven_station = """UPDATE StationItem SET from_live = 0 WHERE station_id = ?"""
196
196
  stmt_flush_station = """DELETE from StationItem WHERE station_id = ?"""
@@ -253,7 +253,7 @@ class ImportPlugin(plugins.ImportPluginBase):
253
253
  transaction_items += 1
254
254
  skip_station = True
255
255
  continue
256
-
256
+
257
257
  # Unless the import file data is newer, nothing else needs to be done for this station,
258
258
  # so the rest of the listings for this station can be skipped.
259
259
  if listing_time <= last_modified:
@@ -276,7 +276,7 @@ class ImportPlugin(plugins.ImportPluginBase):
276
276
  # listings.csv includes rare items, which we are ignoring.
277
277
  if item_id not in item_lookup:
278
278
  continue
279
-
279
+
280
280
  demand_price = int(listing['sell_price'])
281
281
  demand_units = int(listing['demand'])
282
282
  demand_level = int(listing.get('demand_bracket') or '-1')
@@ -292,16 +292,16 @@ class ImportPlugin(plugins.ImportPluginBase):
292
292
  supply_price, supply_units, supply_level,
293
293
  ))
294
294
  transaction_items += 1
295
-
295
+
296
296
  # These will take a little while, which has four steps, so we'll make it a counter.
297
297
  with pbar.Progress(1, 40, prefix="Saving"):
298
298
  # Do a final commit to be sure
299
299
  cursor.execute("COMMIT")
300
-
300
+
301
301
  if self.getOption("optimize"):
302
302
  with pbar.Progress(1, 40, prefix="Optimizing"):
303
303
  db.execute("VACUUM")
304
-
304
+
305
305
  self.tdb.close()
306
306
 
307
307
  self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
@@ -370,9 +370,9 @@ class ImportPlugin(plugins.ImportPluginBase):
370
370
  if rib_path.exists():
371
371
  rib_path.unlink()
372
372
  ri_path.rename(rib_path)
373
-
373
+
374
374
  self.tdb.close()
375
-
375
+
376
376
  self.tdb.reloadCache()
377
377
  self.tdb.close()
378
378
 
@@ -432,7 +432,7 @@ class ImportPlugin(plugins.ImportPluginBase):
432
432
  if self.downloadFile(self.upgradesPath) or self.getOption("force"):
433
433
  transfers.download(self.tdenv, self.urlOutfitting, self.FDevOutfittingPath)
434
434
  buildCache = True
435
-
435
+
436
436
  if self.getOption("ship"):
437
437
  if self.downloadFile(self.shipPath) or self.getOption("force"):
438
438
  transfers.download(self.tdenv, self.urlShipyard, self.FDevShipyardPath)
@@ -468,7 +468,7 @@ class ImportPlugin(plugins.ImportPluginBase):
468
468
  self.tdb.close()
469
469
  self.tdb.reloadCache()
470
470
  self.tdb.close()
471
-
471
+
472
472
  if self.getOption("purge"):
473
473
  self.purgeSystems()
474
474
  self.tdb.close()
@@ -478,7 +478,7 @@ class ImportPlugin(plugins.ImportPluginBase):
478
478
  self.importListings(self.listingsPath)
479
479
  if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
480
480
  self.importListings(self.liveListingsPath)
481
-
481
+
482
482
  if self.getOption("listings"):
483
483
  self.tdenv.NOTE("Regenerating .prices file.")
484
484
  cache.regeneratePricesFile(self.tdb, self.tdenv)
@@ -39,10 +39,9 @@ STATION_TYPE_MAP = {
39
39
  'Mega ship': [13, False],
40
40
  'Asteroid base': [14, False],
41
41
  'Drake-Class Carrier': [24, False], # fleet carriers
42
- 'Settlement': [25, True], # odyssey settlements
42
+ 'Settlement': [25, True], # odyssey settlements
43
43
  }
44
44
 
45
-
46
45
  if dataclass:
47
46
  # Dataclass with slots is considerably cheaper and faster than namedtuple
48
47
  # but is only reliably introduced in 3.10+
@@ -72,8 +71,22 @@ if dataclass:
72
71
  planetary: str # should be Optional[bool]
73
72
  type: int # station type
74
73
  modified: float
75
-
76
-
74
+
75
+ @dataclass(slots=True)
76
+ class Ship:
77
+ id: int
78
+ name: str
79
+ modified: float
80
+
81
+ @dataclass(slots=True)
82
+ class Module:
83
+ id: int
84
+ name: str
85
+ cls: int
86
+ rating: str
87
+ ship: str
88
+ modified: float
89
+
77
90
  @dataclass(slots=True)
78
91
  class Commodity:
79
92
  id: int
@@ -90,6 +103,8 @@ else:
90
103
  Station = namedtuple('Station',
91
104
  'id,system_id,name,distance,max_pad_size,'
92
105
  'market,black_market,shipyard,outfitting,rearm,refuel,repair,planetary,type,modified')
106
+ Ship = namedtuple('Ship', 'id,name,modified')
107
+ Module = namedtuple('Module', 'id,name,cls,rating,ship,modified')
93
108
  Commodity = namedtuple('Commodity', 'id,name,category,demand,supply,sell,buy,modified')
94
109
 
95
110
 
@@ -110,7 +125,7 @@ class Timing:
110
125
 
111
126
  @property
112
127
  def elapsed(self) -> Optional[float]:
113
- """ If the timing has finish, calculates the elapsed time. """
128
+ """ If the timing has finished, calculates the elapsed time. """
114
129
  if self.start_ts is None:
115
130
  return None
116
131
  return (self.end_ts or time.perf_counter()) - self.start_ts
@@ -175,7 +190,7 @@ class Progresser:
175
190
  self.progress.update(task, advance=advance, description=description)
176
191
 
177
192
 
178
- def get_timings(started: float, system_count: int, total_station_count: int, *, min_count: int = 100) -> str:
193
+ def get_timings(started: float, system_count: int, total_station_count: int, *, min_count: int = 100) -> tuple[float, str]:
179
194
  """ describes how long it is taking to process each system and station """
180
195
  elapsed = time.time() - started
181
196
  timings = "sys="
@@ -198,7 +213,7 @@ class ImportPlugin(plugins.ImportPluginBase):
198
213
  """
199
214
 
200
215
  pluginOptions = {
201
- 'url': f'URL to download galaxy data from (defaults to {SOURCE_URL})',
216
+ 'url': f'URL to download galaxy data from (defaults to {SOURCE_URL})',
202
217
  'file': 'Local filename to import galaxy data from; use "-" to load from stdin',
203
218
  'maxage': 'Skip all entries older than specified age in days, ex.: maxage=1.5',
204
219
  }
@@ -231,6 +246,8 @@ class ImportPlugin(plugins.ImportPluginBase):
231
246
 
232
247
  self.known_systems = self.load_known_systems()
233
248
  self.known_stations = self.load_known_stations()
249
+ self.known_ships = self.load_known_ships()
250
+ self.known_modules = self.load_known_modules()
234
251
  self.known_commodities = self.load_known_commodities()
235
252
 
236
253
  def print(self, *args, **kwargs) -> None:
@@ -253,14 +270,15 @@ class ImportPlugin(plugins.ImportPluginBase):
253
270
  self.commit_limit = self.commit_rate
254
271
  self.need_commit = False
255
272
 
256
- def run(self) -> bool:
273
+ def run(self):
257
274
  if not self.tdenv.detail:
258
275
  self.print('This will take at least several minutes...')
259
276
  self.print('You can increase verbosity (-v) to get a sense of progress')
260
277
 
261
278
  theme = self.tdenv.theme
262
279
  BOLD, CLOSE, DIM, ITALIC = theme.bold, theme.CLOSE, theme.dim, theme.italic # pylint: disable=invalid-name
263
-
280
+ # TODO: don't download file if local copy is not older
281
+ # see eddblink_plug.download_file()
264
282
  if not self.file:
265
283
  url = self.url or SOURCE_URL
266
284
  self.print(f'Downloading prices from remote URL: {url}')
@@ -268,11 +286,28 @@ class ImportPlugin(plugins.ImportPluginBase):
268
286
  transfers.download(self.tdenv, url, self.file)
269
287
  self.print(f'Download complete, saved to local file: "{self.file}"')
270
288
 
271
-
272
289
  sys_desc = f"Importing {ITALIC}spansh{CLOSE} data"
273
- with Timing() as timing, Progresser(self.tdenv, sys_desc, total=len(self.known_systems)) as progress:
290
+
291
+ # TODO: find a better way to get the total number of systems
292
+ # A bad way to do it:
293
+ total_systems = 0
294
+ if self.tdenv.detail:
295
+ print('Counting total number of systems...')
296
+ with open(self.file, 'r', encoding='utf8') as stream:
297
+ for system_data in ijson.items(stream, 'item', use_float=True):
298
+ total_systems += 1
299
+ if (not total_systems % 250) and self.tdenv.detail:
300
+ print(f'Total systems: {total_systems}', end='\r')
301
+
302
+ if self.tdenv.detail:
303
+ print(f'Total systems: {total_systems}')
304
+
305
+ with Timing() as timing, Progresser(self.tdenv, sys_desc, total=total_systems) as progress:
306
+ # with Timing() as timing, Progresser(self.tdenv, sys_desc, total=len(self.known_stations)) as progress:
274
307
  system_count = 0
275
308
  total_station_count = 0
309
+ total_ship_count = 0
310
+ total_module_count = 0
276
311
  total_commodity_count = 0
277
312
 
278
313
  age_cutoff = timedelta(days=self.maxage) if self.maxage else None
@@ -285,14 +320,16 @@ class ImportPlugin(plugins.ImportPluginBase):
285
320
  elapsed, averages = get_timings(started, system_count, total_station_count)
286
321
  label = f"{ITALIC}#{system_count:<5d}{CLOSE} {BOLD}{upper_sys:30s}{CLOSE} {DIM}({elapsed:.2f}s, avgs: {averages}){CLOSE}"
287
322
  stations = list(station_iter)
288
- with progress.task(label, total=len(stations)) as sys_task:
323
+ with progress.task(label, total=len(stations)) as sta_task:
289
324
  if system.id not in self.known_systems:
290
325
  self.ensure_system(system, upper_sys)
291
326
 
292
327
  station_count = 0
328
+ ship_count = 0
329
+ module_count = 0
293
330
  commodity_count = 0
294
331
 
295
- for station, commodities in stations:
332
+ for station, ships, modules, commodities in stations:
296
333
  fq_station_name = f'@{upper_sys}/{station.name}'
297
334
 
298
335
  station_info = self.known_stations.get(station.id)
@@ -303,8 +340,73 @@ class ImportPlugin(plugins.ImportPluginBase):
303
340
  self.execute("UPDATE Station SET system_id = ? WHERE station_id = ?", station.system_id, station.id, commitable=True)
304
341
  self.known_stations[station.id] = (station.name, station.system_id, station.modified)
305
342
 
306
- items = []
307
- db_times = dict(self.execute("SELECT item_id, modified FROM StationItem WHERE station_id = ?", station.id))
343
+ # Ships
344
+ ship_entries = []
345
+ db_ship_times = dict(self.execute("SELECT ship_id, modified FROM ShipVendor WHERE station_id = ?", station.id))
346
+
347
+ for ship in ships:
348
+ if ship.id not in self.known_ships:
349
+ ship = self.ensure_ship(ship)
350
+
351
+ # We're concerned with the ship age, not the station age,
352
+ # as they each have their own 'modified' times.
353
+ if age_cutoff and (now - ship.modified) > age_cutoff:
354
+ if self.tdenv.detail:
355
+ self.print(f' | {fq_station_name:50s} | Skipping shipyard due to age: {now - ship.modified}, ts: {ship.modified}')
356
+ break
357
+ db_modified = db_ship_times.get(ship.id)
358
+ modified = parse_ts(db_modified) if db_modified else None
359
+ if modified and ship.modified <= modified:
360
+ # All ships in a station will have the same modified time,
361
+ # so no need to check the rest if the first is older.
362
+ if self.tdenv.detail > 2:
363
+ self.print(f' | {fq_station_name:50s} | Skipping older shipyard data')
364
+ break
365
+
366
+ ship_entries.append((ship.id, station.id, ship.modified))
367
+ if ship_entries:
368
+ self.executemany("""INSERT OR REPLACE INTO ShipVendor (
369
+ ship_id, station_id, modified
370
+ ) VALUES (
371
+ ?, ?, IFNULL(?, CURRENT_TIMESTAMP)
372
+ )""", ship_entries, commitable=True)
373
+ ship_count += len(ship_entries)
374
+
375
+ # Upgrades
376
+ module_entries = []
377
+ db_module_times = dict(self.execute("SELECT upgrade_id, modified FROM UpgradeVendor WHERE station_id = ?", station.id))
378
+
379
+ for module in modules:
380
+ if module.id not in self.known_modules:
381
+ module = self.ensure_module(module)
382
+
383
+ # We're concerned with the outfitting age, not the station age,
384
+ # as they each have their own 'modified' times.
385
+ if age_cutoff and (now - module.modified) > age_cutoff:
386
+ if self.tdenv.detail:
387
+ self.print(f' | {fq_station_name:50s} | Skipping outfitting due to age: {now - station.modified}, ts: {station.modified}')
388
+ break
389
+ db_modified = db_module_times.get(module.id)
390
+ modified = parse_ts(db_modified) if db_modified else None
391
+ if modified and module.modified <= modified:
392
+ # All modules in a station will have the same modified time,
393
+ # so no need to check the rest if the fist is older.
394
+ if self.tdenv.detail > 2:
395
+ self.print(f' | {fq_station_name:50s} | Skipping older outfitting data')
396
+ break
397
+
398
+ module_entries.append((module.id, station.id, module.modified))
399
+ if module_entries:
400
+ self.executemany("""INSERT OR REPLACE INTO UpgradeVendor (
401
+ upgrade_id, station_id, modified
402
+ ) VALUES (
403
+ ?, ?, IFNULL(?, CURRENT_TIMESTAMP)
404
+ )""", module_entries, commitable=True)
405
+ module_count += len(module_entries)
406
+
407
+ # Items
408
+ commodity_entries = []
409
+ db_commodity_times = dict(self.execute("SELECT item_id, modified FROM StationItem WHERE station_id = ?", station.id))
308
410
 
309
411
  for commodity in commodities:
310
412
  if commodity.id not in self.known_commodities:
@@ -314,21 +416,21 @@ class ImportPlugin(plugins.ImportPluginBase):
314
416
  # as they each have their own 'modified' times.
315
417
  if age_cutoff and (now - commodity.modified) > age_cutoff:
316
418
  if self.tdenv.detail:
317
- self.print(f' | {fq_station_name:50s} | Skipping station due to age: {now - station.modified}, ts: {station.modified}')
419
+ self.print(f' | {fq_station_name:50s} | Skipping market due to age: {now - station.modified}, ts: {station.modified}')
318
420
  break
319
421
 
320
- db_modified = db_times.get(commodity.id)
422
+ db_modified = db_commodity_times.get(commodity.id)
321
423
  modified = parse_ts(db_modified) if db_modified else None
322
424
  if modified and commodity.modified <= modified:
323
425
  # All commodities in a station will have the same modified time,
324
426
  # so no need to check the rest if the fist is older.
325
427
  if self.tdenv.detail > 2:
326
- self.print(f' | {fq_station_name:50s} | Skipping older commodity data')
428
+ self.print(f' | {fq_station_name:50s} | Skipping older market data')
327
429
  break
328
- items.append((station.id, commodity.id, commodity.modified,
329
- commodity.sell, commodity.demand, -1,
330
- commodity.buy, commodity.supply, -1, 0))
331
- if items:
430
+ commodity_entries.append((station.id, commodity.id, commodity.modified,
431
+ commodity.sell, commodity.demand, -1,
432
+ commodity.buy, commodity.supply, -1, 0))
433
+ if commodity_entries:
332
434
  self.executemany("""INSERT OR REPLACE INTO StationItem (
333
435
  station_id, item_id, modified,
334
436
  demand_price, demand_units, demand_level,
@@ -337,68 +439,66 @@ class ImportPlugin(plugins.ImportPluginBase):
337
439
  ?, ?, IFNULL(?, CURRENT_TIMESTAMP),
338
440
  ?, ?, ?,
339
441
  ?, ?, ?, ?
340
- )""", items, commitable=True)
341
- commodity_count += len(items)
442
+ )""", commodity_entries, commitable=True)
443
+ commodity_count += len(commodity_entries)
342
444
  # Good time to save data and try to keep the transaction small
343
445
  self.commit()
344
446
 
345
- if commodity_count:
447
+ if commodity_count or ship_count or module_count:
346
448
  station_count += 1
347
- progress.bump(sys_task)
348
-
349
- if station_count:
350
- system_count += 1
351
- total_station_count += station_count
352
- total_commodity_count += commodity_count
353
- if self.tdenv.detail:
354
- self.print(
355
- f'{system_count:6d} | {upper_sys:50s} | '
356
- f'{station_count:3d} st {commodity_count:6d} co'
357
- )
358
- self.commit()
359
-
360
- if system_count % 25 == 1:
361
- avg_stations = total_station_count / (system_count or 1)
362
- progress.update(f"{sys_desc}{DIM} ({total_station_count}:station:, {avg_stations:.1f}per:glowing_star:){CLOSE}")
449
+ progress.bump(sta_task)
450
+
451
+ system_count += 1
452
+ if station_count:
453
+ total_station_count += station_count
454
+ total_ship_count += ship_count
455
+ total_module_count += module_count
456
+ total_commodity_count += commodity_count
457
+ if self.tdenv.detail:
458
+ self.print(
459
+ f'{system_count:6d} | {upper_sys:50s} | '
460
+ f'{station_count:3d} st {commodity_count:5d} co '
461
+ f'{ship_count:4d} sh {module_count:4d} mo'
462
+ )
463
+ self.commit()
464
+
465
+ if not system_count % 25:
466
+ avg_stations = total_station_count / (system_count or 1)
467
+ progress.update(f"{sys_desc}{DIM} ({total_station_count}:station:, {system_count}:glowing_star:, {avg_stations:.1f}:station:/:glowing_star:){CLOSE}")
363
468
 
364
469
  self.commit()
365
470
  self.tdb.close()
366
-
367
- # Need to make sure cached tables are updated
368
- for table in ("Item", "Station", "System", "StationItem"):
369
- # _, path =
370
- csvexport.exportTableToFile(self.tdb, self.tdenv, table)
371
-
372
471
  self.print(
373
472
  f'{timedelta(seconds=int(timing.elapsed))!s} Done '
374
- f'{total_station_count} st {total_commodity_count} co'
473
+ f'{total_station_count} st {total_commodity_count} co '
474
+ f'{total_ship_count} sh {total_module_count} mo'
375
475
  )
376
476
 
377
477
  with Timing() as timing:
478
+ # Need to make sure cached tables are updated
378
479
  self.print('Exporting to cache...')
480
+ for table in ("Item", "Station", "System", "StationItem", "Ship", "ShipVendor", "Upgrade", "UpgradeVendor"):
481
+ self.print(f'Exporting {table}.csv ', end='\r')
482
+ csvexport.exportTableToFile(self.tdb, self.tdenv, table)
483
+ self.print('Exporting TradeDangerous.prices', end='\r')
379
484
  cache.regeneratePricesFile(self.tdb, self.tdenv)
380
485
  self.print(f'Cache export completed in {timedelta(seconds=int(timing.elapsed))!s}')
381
486
 
382
487
  return False
383
488
 
384
489
  def data_stream(self):
490
+ stream = None
385
491
  if self.file == '-':
386
- self.print('Reading prices from stdin')
492
+ self.print('Reading data from stdin')
387
493
  stream = sys.stdin
388
494
  elif self.file:
389
- self.print(f'Reading prices from local file: "{self.file}"')
495
+ self.print(f'Reading data from local file: "{self.file}"')
390
496
  stream = open(self.file, 'r', encoding='utf8')
391
- return ingest_stream(stream)
392
-
393
- def categorise_commodities(self, commodities):
394
- categories = {}
395
- for commodity in commodities:
396
- categories.setdefault(commodity.category, []).append(commodity)
397
- return categories
497
+ return self.ingest_stream(stream)
398
498
 
399
499
  def execute(self, query: str, *params, commitable: bool = False) -> sqlite3.Cursor:
400
- """ helper method that performs retriable queries and marks the transaction as needing to commit
401
- if the query is commitable."""
500
+ """ helper method that performs retriable queries and marks the transaction
501
+ as needing to commit if the query is commitable."""
402
502
  if commitable:
403
503
  self.need_commit = True
404
504
  attempts = 5
@@ -443,8 +543,8 @@ class ImportPlugin(plugins.ImportPluginBase):
443
543
  self.tdenv.DEBUG0(f"load_known_systems query raised {e}")
444
544
  return {}
445
545
 
446
- def load_known_stations(self) -> dict[int, tuple[str, int]]:
447
- """ Returns a dictionary of {station_id -> (station_name, system_id)} for all current stations in the database. """
546
+ def load_known_stations(self) -> dict[int, tuple[str, int, float]]:
547
+ """ Returns a dictionary of {station_id -> (station_name, system_id, modified)} for all current stations in the database. """
448
548
  try:
449
549
  return {cols[0]: (cols[1], cols[2], parse_ts(cols[3])) for cols in self.cursor.execute('SELECT station_id, name, system_id, modified FROM Station')}
450
550
  except Exception as e: # pylint: disable=broad-except
@@ -452,6 +552,24 @@ class ImportPlugin(plugins.ImportPluginBase):
452
552
  self.tdenv.DEBUG0(f"load_known_stations query raised {e}")
453
553
  return {}
454
554
 
555
+ def load_known_ships(self):
556
+ """ Returns a dictionary of {ship_id -> name} for all current ships in the database. """
557
+ try:
558
+ return dict(self.cursor.execute('SELECT ship_id, name FROM Ship'))
559
+ except Exception as e: # pylint: disable=broad-except
560
+ self.print("[purple]:thinking_face:Assuming no ship data yet")
561
+ self.tdenv.DEBUG0(f"load_known_ships query raised {e}")
562
+ return {}
563
+
564
+ def load_known_modules(self):
565
+ """ Returns a dictionary of {upgrade_id -> name} for all current modules in the database. """
566
+ try:
567
+ return dict(self.cursor.execute('SELECT upgrade_id, name FROM Upgrade'))
568
+ except Exception as e: # pylint: disable=broad-except
569
+ self.print("[purple]:thinking_face:Assuming no module data yet")
570
+ self.tdenv.DEBUG0(f"load_known_modules query raised {e}")
571
+ return {}
572
+
455
573
  def load_known_commodities(self):
456
574
  """ Returns a dictionary of {fdev_id -> name} for all current commodities in the database. """
457
575
  try:
@@ -516,9 +634,38 @@ class ImportPlugin(plugins.ImportPluginBase):
516
634
  )
517
635
  note = "Updated" if self.known_stations.get(station.id) else "Added"
518
636
  if self.tdenv.detail > 1:
519
- self.print(f' | {station.name:50s} | {note} station')
637
+ system_name = self.known_systems[station.system_id]
638
+ upper_sys = system_name.upper()
639
+ fq_station_name = f'@{upper_sys}/{station.name}'
640
+ self.print(f' | {fq_station_name:50s} | {note} station')
520
641
  self.known_stations[station.id] = (station.name, station.system_id, station.modified)
521
642
 
643
+ def ensure_ship(self, ship: Ship):
644
+ """ Adds a record for a ship, and registers the ship in the known_ships dict. """
645
+ self.execute(
646
+ '''
647
+ INSERT INTO Ship (ship_id, name) VALUES (?, ?)
648
+ ''',
649
+ ship.id, ship.name,
650
+ commitable=True,
651
+ )
652
+ self.known_ships[ship.id] = ship.name
653
+
654
+ return ship
655
+
656
+ def ensure_module(self, module: Module):
657
+ """ Adds a record for a module, and registers the module in the known_modules dict. """
658
+ self.execute(
659
+ '''
660
+ INSERT INTO Upgrade (upgrade_id, name, class, rating, ship) VALUES (?, ?, ?, ?, ?)
661
+ ''',
662
+ module.id, module.name, module.cls, module.rating, module.ship,
663
+ commitable=True,
664
+ )
665
+ self.known_modules[module.id] = module.name
666
+
667
+ return module
668
+
522
669
  def ensure_commodity(self, commodity: Commodity):
523
670
  """ Adds a record for a commodity and registers the commodity in the known_commodities dict. """
524
671
  self.execute(
@@ -566,23 +713,28 @@ class ImportPlugin(plugins.ImportPluginBase):
566
713
  def bool_yn(self, value: Optional[bool]) -> str:
567
714
  """ translates a ternary (none, true, false) into the ?/Y/N representation """
568
715
  return '?' if value is None else ('Y' if value else 'N')
569
-
570
-
571
- def ingest_stream(stream):
572
- """Ingest a spansh-style galaxy dump, yielding system-level data."""
573
- for system_data in ijson.items(stream, 'item', use_float=True):
574
- coords = system_data.get('coords', {})
575
- yield (
576
- System(
577
- id=system_data.get('id64'),
578
- name=system_data.get('name', 'Unnamed').strip(),
579
- pos_x=coords.get('x', 999999),
580
- pos_y=coords.get('y', 999999),
581
- pos_z=coords.get('z', 999999),
582
- modified=parse_ts(system_data.get('date')),
583
- ),
584
- ingest_stations(system_data),
585
- )
716
+
717
+ def ingest_stream(self, stream):
718
+ """Ingest a spansh-style galaxy dump, yielding system-level data."""
719
+ for system_data in ijson.items(stream, 'item', use_float=True):
720
+ if "Shinrarta Dezhra" in system_data.get('name') and self.tdenv.debug:
721
+ with open(Path(self.tdenv.tmpDir, "shin_dez.json"), 'w') as file:
722
+ # file.write(system_data)
723
+ import json
724
+ json.dump(system_data, file, indent=4)
725
+
726
+ coords = system_data.get('coords', {})
727
+ yield (
728
+ System(
729
+ id=system_data.get('id64'),
730
+ name=system_data.get('name', 'Unnamed').strip(),
731
+ pos_x=coords.get('x', 999999),
732
+ pos_y=coords.get('y', 999999),
733
+ pos_z=coords.get('z', 999999),
734
+ modified=parse_ts(system_data.get('date')),
735
+ ),
736
+ ingest_stations(system_data),
737
+ )
586
738
 
587
739
 
588
740
  def ingest_stations(system_data):
@@ -592,10 +744,16 @@ def ingest_stations(system_data):
592
744
  for target in targets:
593
745
  for station_data in target.get('stations', ()):
594
746
  services = set(station_data.get('services', ()))
595
- if 'Market' not in services:
596
- continue
597
- market = station_data.get('market', {})
598
- if not market.get('commodities'):
747
+ shipyard = None
748
+ if 'Shipyard' in services:
749
+ shipyard = station_data.get('shipyard', {})
750
+ outfitting = None
751
+ if 'Outfitting' in services:
752
+ outfitting = station_data.get('outfitting', {})
753
+ market = None
754
+ if 'Market' in services:
755
+ market = station_data.get('market', {})
756
+ if not shipyard and not outfitting and not market:
599
757
  continue
600
758
  landing_pads = station_data.get('landingPads', {})
601
759
  max_pad_size = '?'
@@ -613,7 +771,7 @@ def ingest_stations(system_data):
613
771
  name=station_data.get('name', 'Unnamed').strip(),
614
772
  distance=station_data.get('distanceToArrival', 999999),
615
773
  max_pad_size=max_pad_size,
616
- market=True,
774
+ market='Market' in services,
617
775
  black_market='Black Market' in services,
618
776
  shipyard='Shipyard' in services,
619
777
  outfitting='Outfitting' in services,
@@ -624,12 +782,40 @@ def ingest_stations(system_data):
624
782
  type=station_type[0] if station_type else 0,
625
783
  modified=parse_ts(station_data.get('updateTime')),
626
784
  ),
785
+ ingest_shipyard(shipyard),
786
+ ingest_outfitting(outfitting),
627
787
  ingest_market(market),
628
788
  )
629
789
 
790
+ def ingest_shipyard(shipyard):
791
+ """Ingest station-level market data, yielding commodities."""
792
+ if not shipyard or not shipyard.get('ships'):
793
+ return None
794
+ for ship in shipyard['ships']:
795
+ yield Ship(
796
+ id=ship.get('shipId'),
797
+ name=ship.get('name'),
798
+ modified=parse_ts(shipyard.get('updateTime'))
799
+ )
800
+
801
+ def ingest_outfitting(outfitting):
802
+ """Ingest station-level market data, yielding commodities."""
803
+ if not outfitting or not outfitting.get('modules'):
804
+ return None
805
+ for module in outfitting['modules']:
806
+ yield Module(
807
+ id=module.get('moduleId'),
808
+ name=module.get('name'),
809
+ cls=module.get('class'),
810
+ rating=module.get('rating'),
811
+ ship=module.get('ship'),
812
+ modified=parse_ts(outfitting.get('updateTime'))
813
+ )
630
814
 
631
815
  def ingest_market(market):
632
816
  """Ingest station-level market data, yielding commodities."""
817
+ if not market or not market.get('commodities'):
818
+ return None
633
819
  for commodity in market['commodities']:
634
820
  yield Commodity(
635
821
  id=commodity.get('commodityId'),
@@ -642,7 +828,6 @@ def ingest_market(market):
642
828
  modified=parse_ts(market.get('updateTime'))
643
829
  )
644
830
 
645
-
646
831
  def parse_ts(ts):
647
832
  if ts is None:
648
833
  return None
@@ -45,7 +45,7 @@ sys.stderr.write("*** WARNING: submit-distances.py is deprecated; if you rely on
45
45
  class UsageError(Exception):
46
46
  def __init__(self, argv, error):
47
47
  self.argv, self.error = argv, error
48
-
48
+
49
49
  def __str__(self):
50
50
  return error + "\n" + argv.format_usage()
51
51
 
@@ -102,7 +102,7 @@ CREATE TABLE Ship
102
102
  (
103
103
  ship_id INTEGER PRIMARY KEY,
104
104
  name VARCHAR(40) COLLATE nocase,
105
- cost INTEGER NOT NULL,
105
+ cost INTEGER,
106
106
 
107
107
  UNIQUE (ship_id)
108
108
  );
@@ -130,8 +130,9 @@ CREATE TABLE Upgrade
130
130
  (
131
131
  upgrade_id INTEGER PRIMARY KEY,
132
132
  name VARCHAR(40) COLLATE nocase,
133
- weight NUMBER NOT NULL,
134
- cost NUMBER NOT NULL,
133
+ class NUMBER NOT NULL,
134
+ rating CHAR(1) NOT NULL,
135
+ ship VARCHAR(40) COLLATE nocase,
135
136
 
136
137
  UNIQUE (upgrade_id)
137
138
  );
@@ -141,7 +142,6 @@ CREATE TABLE UpgradeVendor
141
142
  (
142
143
  upgrade_id INTEGER NOT NULL,
143
144
  station_id INTEGER NOT NULL,
144
- cost INTEGER,
145
145
  modified DATETIME NOT NULL,
146
146
 
147
147
  PRIMARY KEY (upgrade_id, station_id),
@@ -0,0 +1,6 @@
1
+ [
2
+ "DROP TABLE Ship;",
3
+ "CREATE TABLE Ship (ship_id INTEGER PRIMARY KEY, name VARCHAR(40) COLLATE nocase, cost INTEGER, UNIQUE (ship_id));",
4
+ "DROP TABLE Upgrade;",
5
+ "CREATE TABLE Upgrade(upgrade_id INTEGER PRIMARY KEY, name VARCHAR(40) COLLATE nocase, class NUMBER NOT NULL, rating CHAR(1) NOT NULL, ship VARCHAR(40) COLLATE nocase, UNIQUE (upgrade_id));"
6
+ ]
@@ -1096,7 +1096,7 @@ class TradeCalc:
1096
1096
  bestToDest[dstID] = (
1097
1097
  dstStation, route, trade, dest.via, dest.distLy, score
1098
1098
  )
1099
-
1099
+
1100
1100
  if connections == 0:
1101
1101
  raise NoHopsError(
1102
1102
  "No destinations could be reached within the constraints."
@@ -271,7 +271,7 @@ class TradeEnv(Utf8SafeConsoleIOMixin):
271
271
  return noteFn
272
272
 
273
273
  return None
274
-
274
+
275
275
  def remove_file(self, *args) -> bool:
276
276
  """ Unlinks a file, as long as it exists, and logs the action at level 1. """
277
277
  path = Path(*args)
@@ -280,7 +280,7 @@ class TradeEnv(Utf8SafeConsoleIOMixin):
280
280
  path.unlink()
281
281
  self.DEBUG1(":cross_mark: deleted {}", path)
282
282
  return True
283
-
283
+
284
284
  def rename_file(self, *, old: os.PathLike, new: os.PathLike) -> bool:
285
285
  """
286
286
  If 'new' exists, deletes it, and then attempts to rename old -> new. If new is not specified,
@@ -293,14 +293,14 @@ class TradeEnv(Utf8SafeConsoleIOMixin):
293
293
  # Promote new to a guaranteed Path and remove it if it's present.
294
294
  new = Path(new)
295
295
  self.remove_file(new)
296
-
296
+
297
297
  # Promote new to a guaranteed Path and confirm it exists.
298
298
  old = Path(old)
299
299
  if not old.exists():
300
300
  return False
301
-
301
+
302
302
  # Perform the rename and log it at level 1.
303
303
  old.rename(new)
304
304
  self.DEBUG1(":recycle: moved {} to {}", old, new)
305
-
305
+
306
306
  return True
@@ -69,10 +69,10 @@ def download(
69
69
  :param shebang: function to call on the first line
70
70
  """
71
71
  tdenv.NOTE("Requesting {}".format(url))
72
-
72
+
73
73
  if isinstance(length, str):
74
74
  length = int(length)
75
-
75
+
76
76
  # If the caller provided an existing session stream, use that the fetch the request.
77
77
  req = (session or requests).get(url, headers=headers or None, stream=True, timeout=timeout)
78
78
  req.raise_for_status()
@@ -90,7 +90,7 @@ def download(
90
90
  raise TradeException(
91
91
  "Remote server gave an empty response. Please try again later."
92
92
  )
93
-
93
+
94
94
  # if the file is being compressed by the server, the headers tell us the
95
95
  # length of the compressed data, but in our loop below we will be receiving
96
96
  # the uncompressed data, which should be larger, which will cause our
@@ -125,7 +125,7 @@ def download(
125
125
  if prog:
126
126
  prog.increment(len(data))
127
127
  tdenv.DEBUG0("End of data")
128
-
128
+
129
129
  if not tdenv.quiet:
130
130
  elapsed = (time.time() - started) or 1
131
131
  tdenv.NOTE(
@@ -169,7 +169,7 @@ def get_json_data(url, *, timeout: int = 90):
169
169
  totalLength = int(totalLength)
170
170
  filename = get_filename_from_url(url)
171
171
  progBar = pbar.Progress(totalLength, 25, prefix=filename)
172
-
172
+
173
173
  jsData = bytes()
174
174
  for data in req.iter_content():
175
175
  jsData += data
tradedangerous/version.py CHANGED
@@ -12,5 +12,5 @@
12
12
  """just keeper of current version"""
13
13
 
14
14
  # TODO: remember to update tests when version changes
15
- __version__ = '11.4.0'
15
+ __version__ = '11.5.0'
16
16
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tradedangerous
3
- Version: 11.4.0
3
+ Version: 11.5.0
4
4
  Summary: Trade-Dangerous is a set of powerful trading tools for Elite Dangerous, organized around one of the most powerful trade run optimizers available.
5
5
  Home-page: https://github.com/eyeonus/Trade-Dangerous
6
6
  Author: eyeonus
@@ -1,44 +1,44 @@
1
1
  trade.py,sha256=Skqy0bC47g1KujuYrVwa3T8AfJ9NoLLOZcVjjxJ2daQ,1769
2
2
  tradegui.py,sha256=q2HdIdoyeLUpeF2X0hVIGn7sU6T4zOzq1HN0zGvZdyE,788
3
3
  tradedangerous/__init__.py,sha256=bwsbE_GyCNsuyGDKnfXAg0RD-ewsWHliySJ5QfCK7h8,1166
4
- tradedangerous/cache.py,sha256=0Pyvh5XYrNzC7Pj7KzDIioI-Xhydx7JGUXZGcrO1org,37225
4
+ tradedangerous/cache.py,sha256=BM3jxnezff0fd8WhIbnVn9dDuULtLMKld0oiY8h7nZU,37233
5
5
  tradedangerous/cli.py,sha256=dLekZ3MTbn9XcSGtE532qZF3iSnsb5G-ddQyErwTv9o,4559
6
6
  tradedangerous/corrections.py,sha256=_WLgo1IBWoskrrPFeshRwCOeJ2BeJb_x4tDQ0JdAo-s,1340
7
7
  tradedangerous/csvexport.py,sha256=_19bGVnCGJPzvx_8DzLGOs4rqx8Asn7eCVdXKdKu92E,8653
8
8
  tradedangerous/edscupdate.py,sha256=To7hL2FuR4A7A4gJfvow_jGl6mlD7aP-Drv1rEkwy-I,17310
9
9
  tradedangerous/edsmupdate.py,sha256=9IANIASA8ZKnZsLPBRAh7IArDRsa0jriL2UwjqKJ1fI,14928
10
10
  tradedangerous/formatting.py,sha256=R8GX2Zya1xazLOOBIWlLqUEjIESZJO41UlvhZDbyY4Q,6908
11
- tradedangerous/fs.py,sha256=41yUdriBSv-rinGEmxjBTKyKH4-RA2bIO4sKCMIwvUA,3861
11
+ tradedangerous/fs.py,sha256=Uqc0wEbORJektD22MQgjqerxbJbc3mUee9MlXna5Wx0,3897
12
12
  tradedangerous/gui.py,sha256=DFsF5zATr-lyJShL6t5kPKvcLLJYkICurzBz0WBa-oQ,43676
13
- tradedangerous/jsonprices.py,sha256=GJ07fbZSNX5U_6lczWTSk6mWwme_weJbXgE8zcZBnpY,7225
13
+ tradedangerous/jsonprices.py,sha256=vNj3Pz2NN39FgpHMo_TRH6MI3GQaFGIa2idg2HYYu2A,7229
14
14
  tradedangerous/mapping.py,sha256=Bf2G8LzP1Bat5k3hFs9XyeI1z3tfUpfeh4nuLP2QJuQ,4122
15
15
  tradedangerous/prices.py,sha256=JqiDVrtvvPd5pqE3HdwOHOuFgdAbOR-pt0GLD3ZIXM8,7425
16
- tradedangerous/submit-distances.py,sha256=xL7fwdbVrb05-zWNH-9nyYBDtV4bfUfP7OBx3NMBc34,11749
16
+ tradedangerous/submit-distances.py,sha256=m1qAuqH_yq8euMSWSqkmkTNM4a136Vl5WrGmTk15LiM,11745
17
17
  tradedangerous/tools.py,sha256=pp-4WtA12SVaaQHFJFOMTF7EDFRCU2mQeOhC4xoXmEk,1331
18
- tradedangerous/tradecalc.py,sha256=GlJZ9UwkoQUoayqUGVwlefhQvs-1SxYB5Y6Vkr31nSg,42753
18
+ tradedangerous/tradecalc.py,sha256=ZuTxp_9gMj4wDGalp1k_VfoVupwF8Oh9FOYML3vN-oM,42749
19
19
  tradedangerous/tradedb.py,sha256=mitKkS4MczivDK_K7A-IC94hkObUmGWFhwIrh_ts9qw,72282
20
- tradedangerous/tradeenv.py,sha256=o956HN7-7uzIcNi9vI4zh-L1z5jwBioVARyhhiAlsRQ,11885
20
+ tradedangerous/tradeenv.py,sha256=aIeK291Ene98kaEEu1LvlJ4KT4EvPfIXlT4IfZnpVc0,11917
21
21
  tradedangerous/tradeexcept.py,sha256=aZ-Y31MbkjF7lmAzBAbaMsPPE7FEEfuf4gaX2GvriDk,368
22
- tradedangerous/transfers.py,sha256=s0fRWDZCsL-VYm5E4ceTMKcUelcvzcOUelsfg6TjYI8,6000
22
+ tradedangerous/transfers.py,sha256=t_0Sjr4FI9lhIPwyPOXdR8d97bE611ujbKK4ioWShm0,6024
23
23
  tradedangerous/utils.py,sha256=PUPvAEqUyxYGqqQa0b_yfLAvq8YVUxK6HfdS-CxM-Lo,5186
24
- tradedangerous/version.py,sha256=mKbYHoSAHiOL_sv28H0wQfisFwP0y4WYtsR_RnDyBf0,646
24
+ tradedangerous/version.py,sha256=NMsHLdL8hA7iAFucNiKOx8yn15j7n8falWI0neiBfeI,646
25
25
  tradedangerous/commands/TEMPLATE.py,sha256=MOE69xsZPHPIMBQ-LXicfsOlCZdy-2gPX_nlnwYYil8,2026
26
26
  tradedangerous/commands/__init__.py,sha256=3gz2cnXNZNkV1gtZh0dOnCRxBkQHbeIyysRe3bM2WEE,9516
27
27
  tradedangerous/commands/buildcache_cmd.py,sha256=jhNSqHX_xX43SiSUMFiKtWpB9v4oeZ0sqfNq6DCrjUs,2181
28
- tradedangerous/commands/buy_cmd.py,sha256=GZa4wx5NKsZLKx1vtO1a47hSL8la5fatOY3dCazQSSo,13923
29
- tradedangerous/commands/commandenv.py,sha256=lzPbxhrgx4PJL_x8pRdYu22rqtL4U6kNQ5ExXTPoAao,9462
30
- tradedangerous/commands/exceptions.py,sha256=xJib2n0YRSgrs8WhZX5IeVHM-XakS3YwfjlF8_cNx4E,3476
28
+ tradedangerous/commands/buy_cmd.py,sha256=qgOBTzCvcYgLOcdkx7BpqF-rSs6GFjOL_f3Gzutmx1I,13947
29
+ tradedangerous/commands/commandenv.py,sha256=kw1FDkxDvJMaui92gWYHv8bNWsxxn5xHXCjmHGFeGUM,9794
30
+ tradedangerous/commands/exceptions.py,sha256=sisGoBvOpt25TYWWjp-s-k_--O_nZe5Zd9HhAW3SElA,3487
31
31
  tradedangerous/commands/export_cmd.py,sha256=VfxwrNU_TzrSw71KrmtmXKYCBkpDGr5KRLGPXOBewnI,4405
32
32
  tradedangerous/commands/import_cmd.py,sha256=PKMrO1DhOGkqAn_q9vZBxhogpFL5MHp2JyYb4qiBdq0,5633
33
33
  tradedangerous/commands/local_cmd.py,sha256=tf7YMGX-vaVGNO2lvQF9EvQEN3Wj7DE9-NTSVrtaZx0,8392
34
34
  tradedangerous/commands/market_cmd.py,sha256=Ig16zDuksywiiF3Exps6UuM-ZhqgbXqkW6Lu2s9xQf0,5411
35
35
  tradedangerous/commands/nav_cmd.py,sha256=v245L1MxiUliITUgvWeeB4cL4UdkNO8n0CiP6ztrV54,8460
36
36
  tradedangerous/commands/olddata_cmd.py,sha256=6rpPRRs4kLhV9c0sogmctVAjta7v0L0Deu999spXY2s,7888
37
- tradedangerous/commands/parsing.py,sha256=_AoC41kXFWlHMHADZY8QqVmeyRNT6_VapoFhj7sSflY,6595
37
+ tradedangerous/commands/parsing.py,sha256=IFYAVrEnlweocM08czG39oqRGuCgI7O5eN1-3NXQezs,6599
38
38
  tradedangerous/commands/rares_cmd.py,sha256=L_QoW2zIZTU9Jpavt_K2fZyu8T153nUSuVqIiz4uksQ,9207
39
39
  tradedangerous/commands/run_cmd.py,sha256=HtvxKfD2ef_fUCDdUzBv9yRwd2gBOqIq4hzUAOLgcyU,47650
40
40
  tradedangerous/commands/sell_cmd.py,sha256=yF0Bsq_gAycG8FQI0-JWmk_DvNnAgyno2wvj-PyrqBs,8269
41
- tradedangerous/commands/shipvendor_cmd.py,sha256=IA85tHmZK-dBAUM5Q2ds5cBSvRzDyhILnXxjNbq9Lsg,6980
41
+ tradedangerous/commands/shipvendor_cmd.py,sha256=opszTmFMVDkker2ON-iAql82HzVmQ_5MDklb3MNUmJI,6984
42
42
  tradedangerous/commands/station_cmd.py,sha256=gnWOT-Z8DfOeA4GNjUwj57eNxuNGh5mPZ-D0E_BeoTw,16232
43
43
  tradedangerous/commands/trade_cmd.py,sha256=o4Xdel5asGY8i6qES95fLHdyfyjKdC5CTOQrZIeu0i4,3016
44
44
  tradedangerous/commands/update_cmd.py,sha256=2UWR6aDbAbvtKFtfmehVwFUBQfJM5KThR71HoC6wVnM,14564
@@ -58,22 +58,23 @@ tradedangerous/misc/edsc.py,sha256=SN_da9qZ7H8ozibyhoFVB8nAvwBDPF3Z_PMlt70J2Ro,1
58
58
  tradedangerous/misc/edsm.py,sha256=equDwO1SY3QTsoJIb3LjiHes4C8Dejaap_TMpYlCm8o,2910
59
59
  tradedangerous/misc/importeddbstats.py,sha256=iLAcrFzdwiMm_MnUuaHcT_xGgZF8pfEEd1qljhhWJTU,1787
60
60
  tradedangerous/misc/prices-json-exp.py,sha256=Fpm62ugP35ZBqnRs6ekYfS3GoDFYmvLD3b3SFJfaMOI,4944
61
- tradedangerous/misc/progress.py,sha256=pWBHPA8Xc5uzjufAwCn7mkTrz7UbwSpY46o2BgEi7HU,7058
61
+ tradedangerous/misc/progress.py,sha256=QwWgbbhAuMB7LG0o-jy7WxN1uhLHtN0FwKkV314ulrA,7170
62
62
  tradedangerous/plugins/__init__.py,sha256=TL-OIptlqNENKhoFqkFeBJn_vSw8L0pVaDJgjhaTj7A,7860
63
63
  tradedangerous/plugins/edapi_plug.py,sha256=5nqBYmjUceAt-KTfiBn7IEl443R1SsGLDmfVXgbcyms,42262
64
64
  tradedangerous/plugins/edcd_plug.py,sha256=JuDtuEM_mN9Sz2H09-qYizM-9N3cuNjgvQy7Y-wHwKw,14412
65
- tradedangerous/plugins/eddblink_plug.py,sha256=V58hRCfjeZ7t3aAIgdzbS5VdO5l39A-9_QKbY7KNJ48,21209
65
+ tradedangerous/plugins/eddblink_plug.py,sha256=dyVaQhpElq7ceLD0bJhVeuiZoMu8Jdo3Y2hwcAcgx-4,21305
66
66
  tradedangerous/plugins/edmc_batch_plug.py,sha256=rrP_lFFxWsba8DPEo0WF2EdCiMoRC7tCT8z62MIvtIo,4173
67
67
  tradedangerous/plugins/journal_plug.py,sha256=5HMyoxQ7z42qj7NiL8rDxSyTN9gKikoQjyWzJLD-SYQ,23746
68
68
  tradedangerous/plugins/netlog_plug.py,sha256=yUl47l9xt3kGj9oSiY_FZaDGdnQj63oa9MBtSeIy1Zo,13469
69
- tradedangerous/plugins/spansh_plug.py,sha256=FiIS9cN2_8VKDrAj8yvkdy1NIni2kEc0ECqhgrvML4E,27048
69
+ tradedangerous/plugins/spansh_plug.py,sha256=y7-yedZdGbKpliHQucz48SlggzPfVJOV_MjX5QkSdA4,36952
70
70
  tradedangerous/templates/Added.csv,sha256=8o54civQCcS9y7_DBo0GX196XWRbbREQqKDYTKibsgQ,649
71
71
  tradedangerous/templates/Category.csv,sha256=8xwUDcBZE25T6x6dZGlRUMTCqeDLt3a9LXU5h6hRHV8,250
72
72
  tradedangerous/templates/RareItem.csv,sha256=F1RhRnTD82PiwrVUO-ai2ErGH2PTqNnQaDw5mcgljXs,10483
73
- tradedangerous/templates/TradeDangerous.sql,sha256=S9TH1bp0dfJLv90T6lTQ-TD8w8YGEZRMyUn8J1W4G3k,9330
74
- tradedangerous-11.4.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
75
- tradedangerous-11.4.0.dist-info/METADATA,sha256=1DbAmZFII4oH9A3B3IE2daweZD3Eyz0QyLTI4UMl1Rc,4435
76
- tradedangerous-11.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
77
- tradedangerous-11.4.0.dist-info/entry_points.txt,sha256=lrA7U9JHOcNuam2WEK4Hmc3vQ3mrJfsbJCE74qd9au8,62
78
- tradedangerous-11.4.0.dist-info/top_level.txt,sha256=JEoOVAhg5GfXZ4kHpNontu0RVzek_7P9_jp93f3Pqn8,16
79
- tradedangerous-11.4.0.dist-info/RECORD,,
73
+ tradedangerous/templates/TradeDangerous.sql,sha256=XJ0T09OiS9UdYKJXXAG-rCe6T_ua4MU-icb2lD7iEz0,9342
74
+ tradedangerous/templates/database_changes.json,sha256=-KszgXhUUhwI7FRPDxnkV3bv9paXGpTgnUZxZN7i5OA,371
75
+ tradedangerous-11.5.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
76
+ tradedangerous-11.5.0.dist-info/METADATA,sha256=qRJG-26PAjw7DiDR3HhJaGC0sj0bzvLGpuO1B5q8MLk,4435
77
+ tradedangerous-11.5.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
78
+ tradedangerous-11.5.0.dist-info/entry_points.txt,sha256=lrA7U9JHOcNuam2WEK4Hmc3vQ3mrJfsbJCE74qd9au8,62
79
+ tradedangerous-11.5.0.dist-info/top_level.txt,sha256=JEoOVAhg5GfXZ4kHpNontu0RVzek_7P9_jp93f3Pqn8,16
80
+ tradedangerous-11.5.0.dist-info/RECORD,,