tradedangerous 10.15.2__tar.gz → 10.16.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/PKG-INFO +1 -1
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/setup.py +1 -1
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/cache.py +7 -5
- tradedangerous-10.16.1/tradedangerous/plugins/eddblink_plug.py +526 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/spansh_plug.py +205 -119
- tradedangerous-10.16.1/tradedangerous/templates/Category.csv +17 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/tradedb.py +1 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/version.py +1 -1
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/PKG-INFO +1 -1
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/SOURCES.txt +1 -2
- tradedangerous-10.15.2/tests/test_trade_import_eddblink.py +0 -58
- tradedangerous-10.15.2/tradedangerous/plugins/eddblink_plug.py +0 -1133
- tradedangerous-10.15.2/tradedangerous/templates/DefaultShipIndex.json +0 -6330
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/LICENSE +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/README.md +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/pyproject.toml +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/setup.cfg +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_bootstrap_commands.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_bootstrap_plugins.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_cache.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_commands.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_fs.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_peek.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_tools.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_trade.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_trade_run.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tests/test_utils.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/__init__.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/cli.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/TEMPLATE.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/__init__.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/buildcache_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/buy_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/commandenv.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/exceptions.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/export_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/import_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/local_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/market_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/nav_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/olddata_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/parsing.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/rares_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/run_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/sell_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/shipvendor_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/station_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/trade_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/update_cmd.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/commands/update_gui.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/corrections.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/csvexport.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/edscupdate.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/edsmupdate.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/formatting.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/fs.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/gui.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/jsonprices.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/mapping.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/mfd/__init__.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/mfd/saitek/__init__.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/mfd/saitek/directoutput.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/mfd/saitek/x52pro.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/checkpricebounds.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/clipboard.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/coord64.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/derp-sentinel.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/diff-system-csvs.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/eddb.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/eddn.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/edsc.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/edsm.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/importeddbstats.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/prices-json-exp.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/misc/progress.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/__init__.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/edapi_plug.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/edcd_plug.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/edmc_batch_plug.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/journal_plug.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/plugins/netlog_plug.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/prices.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/submit-distances.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/templates/Added.csv +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/templates/RareItem.csv +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/templates/TradeDangerous.sql +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/tools.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/trade.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/tradecalc.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/tradeenv.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/tradeexcept.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/tradegui.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/transfers.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous/utils.py +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/dependency_links.txt +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/entry_points.txt +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/not-zip-safe +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/requires.txt +0 -0
- {tradedangerous-10.15.2 → tradedangerous-10.16.1}/tradedangerous.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: tradedangerous
|
|
3
|
-
Version: 10.
|
|
3
|
+
Version: 10.16.1
|
|
4
4
|
Summary: Trade-Dangerous is a set of powerful trading tools for Elite Dangerous, organized around one of the most powerful trade run optimizers available.
|
|
5
5
|
Home-page: https://github.com/eyeonus/Trade-Dangerous
|
|
6
6
|
Author: eyeonus
|
|
@@ -54,7 +54,7 @@ setup(name = package,
|
|
|
54
54
|
],
|
|
55
55
|
license = "MPL",
|
|
56
56
|
test_suite = "tests",
|
|
57
|
-
package_data = {"tradedangerous": ["templates/TradeDangerous.sql", "templates/Added.csv", "templates/
|
|
57
|
+
package_data = {"tradedangerous": ["templates/TradeDangerous.sql", "templates/Added.csv", "templates/Category.csv", "templates/RareItem.csv"]},
|
|
58
58
|
entry_points = {
|
|
59
59
|
"console_scripts": [
|
|
60
60
|
"trade=tradedangerous.trade:main",
|
|
@@ -407,6 +407,7 @@ def processPrices(tdenv, priceFile, db, defaultZero):
|
|
|
407
407
|
|
|
408
408
|
if newID < 0:
|
|
409
409
|
if not ignoreUnknown:
|
|
410
|
+
DEBUG0(f'Key value: "{list(stationByName.keys())[list(stationByName.values()).index(128893178)]}"')
|
|
410
411
|
ignoreOrWarn(
|
|
411
412
|
UnknownStationError(priceFile, lineNo, facility)
|
|
412
413
|
)
|
|
@@ -552,14 +553,15 @@ def processPrices(tdenv, priceFile, db, defaultZero):
|
|
|
552
553
|
for line in priceFile:
|
|
553
554
|
lineNo += 1
|
|
554
555
|
text, _, comment = line.partition('#')
|
|
555
|
-
text =
|
|
556
|
+
text = text.strip()
|
|
557
|
+
# text = space_cleanup(text, ' ').strip()
|
|
556
558
|
if not text:
|
|
557
559
|
continue
|
|
558
560
|
|
|
559
|
-
#
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
561
|
+
# replace whitespace with single spaces
|
|
562
|
+
if text.find(" "):
|
|
563
|
+
# http://stackoverflow.com/questions/2077897
|
|
564
|
+
text = ' '.join(text.split())
|
|
563
565
|
|
|
564
566
|
########################################
|
|
565
567
|
# ## "@ STAR/Station" lines.
|
|
@@ -0,0 +1,526 @@
|
|
|
1
|
+
# ----------------------------------------------------------------
|
|
2
|
+
# Import plugin that uses data files from EDDB.io and (optionally)
|
|
3
|
+
# a EDDBlink_listener server to update the Database.
|
|
4
|
+
# ----------------------------------------------------------------
|
|
5
|
+
import certifi
|
|
6
|
+
import codecs
|
|
7
|
+
import csv
|
|
8
|
+
import datetime
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import platform
|
|
12
|
+
import sqlite3
|
|
13
|
+
import ssl
|
|
14
|
+
import time
|
|
15
|
+
|
|
16
|
+
from urllib import request
|
|
17
|
+
from calendar import timegm
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from importlib import reload
|
|
20
|
+
|
|
21
|
+
from .. import plugins, cache, csvexport, tradedb, tradeenv, transfers
|
|
22
|
+
from ..misc import progress as pbar
|
|
23
|
+
from ..plugins import PluginException
|
|
24
|
+
from shutil import copyfile
|
|
25
|
+
|
|
26
|
+
# Constants
|
|
27
|
+
BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
|
|
28
|
+
CONTEXT=ssl.create_default_context(cafile=certifi.where())
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def request_url(url, headers=None):
|
|
32
|
+
data = None
|
|
33
|
+
if headers:
|
|
34
|
+
data = bytes(json.dumps(headers), encoding="utf-8")
|
|
35
|
+
|
|
36
|
+
return request.urlopen(request.Request(url, data=data), context=CONTEXT)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class DecodingError(PluginException):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class ImportPlugin(plugins.ImportPluginBase):
|
|
44
|
+
"""
|
|
45
|
+
Plugin that downloads data from eddb.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
pluginOptions = {
|
|
49
|
+
'item': "Update Items using latest file from server. (Implies '-O system,station')",
|
|
50
|
+
'rare': "Update RareItems using latest file from server. (Implies '-O system,station')",
|
|
51
|
+
'ship': "Update Ships using latest file from server.",
|
|
52
|
+
'upgrade': "Update Upgrades using latest file from server.",
|
|
53
|
+
'system': "Update Systems using latest file from server.",
|
|
54
|
+
'station': "Update Stations using latest file from server. (Implies '-O system')",
|
|
55
|
+
'shipvend': "Update ShipVendors using latest file from server. (Implies '-O system,station,ship')",
|
|
56
|
+
'upvend': "Update UpgradeVendors using latest file from server. (Implies '-O system,station,upgrade')",
|
|
57
|
+
'listings': "Update market data using latest listings.csv dump. (Implies '-O item,system,station')",
|
|
58
|
+
'all': "Update everything with latest dumpfiles. (Regenerates all tables)",
|
|
59
|
+
'clean': "Erase entire database and rebuild from empty. (Regenerates all tables.)",
|
|
60
|
+
'skipvend': "Don't regenerate ShipVendors or UpgradeVendors. (Supercedes '-O all', '-O clean'.)",
|
|
61
|
+
'force': "Force regeneration of selected items even if source file not updated since previous run. "
|
|
62
|
+
"(Useful for updating Vendor tables if they were skipped during a '-O clean' run.)",
|
|
63
|
+
'purge': "Remove any empty systems that previously had fleet carriers.",
|
|
64
|
+
'solo': "Don't download crowd-sourced market data. (Implies '-O skipvend', supercedes '-O all', '-O clean', '-O listings'.)"
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
def __init__(self, tdb, tdenv):
|
|
68
|
+
super().__init__(tdb, tdenv)
|
|
69
|
+
|
|
70
|
+
self.dataPath = Path(os.environ.get('TD_EDDB')) if os.environ.get('TD_EDDB') else self.tdb.dataPath / Path("eddb")
|
|
71
|
+
self.categoriesPath = Path("Category.csv")
|
|
72
|
+
self.commoditiesPath = Path("Item.csv")
|
|
73
|
+
self.rareItemPath = Path("RareItem.csv")
|
|
74
|
+
self.shipPath = Path("Ship.csv")
|
|
75
|
+
self.shipVendorPath = Path("ShipVendor.csv")
|
|
76
|
+
self.stationsPath = Path("Station.csv")
|
|
77
|
+
self.sysPath = Path("System.csv")
|
|
78
|
+
self.upgradesPath = Path("Upgrade.csv")
|
|
79
|
+
self.upgradeVendorPath = Path("UpgradeVendor.csv")
|
|
80
|
+
self.listingsPath = Path("listings.csv")
|
|
81
|
+
self.liveListingsPath = Path("listings-live.csv")
|
|
82
|
+
self.pricesPath = Path("listings.prices")
|
|
83
|
+
|
|
84
|
+
def now(self):
|
|
85
|
+
return datetime.datetime.now()
|
|
86
|
+
|
|
87
|
+
def execute(self, sql_cmd, args = None):
|
|
88
|
+
cur = self.tdb.getDB().cursor()
|
|
89
|
+
|
|
90
|
+
success = False
|
|
91
|
+
result = None
|
|
92
|
+
while not success:
|
|
93
|
+
try:
|
|
94
|
+
if args:
|
|
95
|
+
result = cur.execute(sql_cmd, args)
|
|
96
|
+
else:
|
|
97
|
+
result = cur.execute(sql_cmd)
|
|
98
|
+
success = True
|
|
99
|
+
except sqlite3.OperationalError as e:
|
|
100
|
+
if "locked" not in str(e):
|
|
101
|
+
success = True
|
|
102
|
+
raise sqlite3.OperationalError(e)
|
|
103
|
+
else:
|
|
104
|
+
print("(execute) Database is locked, waiting for access.", end = "\r")
|
|
105
|
+
time.sleep(1)
|
|
106
|
+
return result
|
|
107
|
+
|
|
108
|
+
def executemany(self, sql_cmd, args):
|
|
109
|
+
cur = self.tdb.getDB().cursor()
|
|
110
|
+
|
|
111
|
+
success = False
|
|
112
|
+
result = None
|
|
113
|
+
while not success:
|
|
114
|
+
try:
|
|
115
|
+
result = cur.executemany(sql_cmd, args)
|
|
116
|
+
success = True
|
|
117
|
+
except sqlite3.OperationalError as e:
|
|
118
|
+
if "locked" not in str(e):
|
|
119
|
+
success = True
|
|
120
|
+
raise sqlite3.OperationalError(e)
|
|
121
|
+
else:
|
|
122
|
+
print("(execute) Database is locked, waiting for access.", end = "\r")
|
|
123
|
+
time.sleep(1)
|
|
124
|
+
return result
|
|
125
|
+
|
|
126
|
+
@staticmethod
|
|
127
|
+
def fetchIter(cursor, arraysize = 1000):
|
|
128
|
+
"""
|
|
129
|
+
An iterator that uses fetchmany to keep memory usage down
|
|
130
|
+
and speed up the time to retrieve the results dramatically.
|
|
131
|
+
"""
|
|
132
|
+
while True:
|
|
133
|
+
results = cursor.fetchmany(arraysize)
|
|
134
|
+
if not results:
|
|
135
|
+
break
|
|
136
|
+
for result in results:
|
|
137
|
+
yield result
|
|
138
|
+
|
|
139
|
+
@staticmethod
|
|
140
|
+
def blocks(f, size = 65536):
|
|
141
|
+
while True:
|
|
142
|
+
b = f.read(size)
|
|
143
|
+
if not b:
|
|
144
|
+
break
|
|
145
|
+
yield b
|
|
146
|
+
|
|
147
|
+
def downloadFile(self, path):
|
|
148
|
+
"""
|
|
149
|
+
Fetch the latest dumpfile from the website if newer than local copy.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
def openURL(url):
|
|
153
|
+
return request_url(url, headers = {'User-Agent': 'Trade-Dangerous'})
|
|
154
|
+
|
|
155
|
+
if path != self.liveListingsPath and path != self.listingsPath:
|
|
156
|
+
localPath = self.tdb.dataPath / path
|
|
157
|
+
else:
|
|
158
|
+
localPath = self.dataPath / path
|
|
159
|
+
|
|
160
|
+
url = BASE_URL + str(path)
|
|
161
|
+
|
|
162
|
+
self.tdenv.NOTE("Checking for update to '{}'.", path)
|
|
163
|
+
try:
|
|
164
|
+
response = openURL(url)
|
|
165
|
+
except Exception as e:
|
|
166
|
+
tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", BASE_URL + str(path), str(e))
|
|
167
|
+
return False
|
|
168
|
+
|
|
169
|
+
url_time = response.getheader("Last-Modified")
|
|
170
|
+
dumpModded = datetime.strptime(url_time, "%a, %d %b %Y %H:%M:%S %Z").timestamp()
|
|
171
|
+
|
|
172
|
+
if Path.exists(localPath):
|
|
173
|
+
localModded = localPath.stat().st_mtime
|
|
174
|
+
if localModded >= dumpModded:
|
|
175
|
+
self.tdenv.DEBUG0("'{}': Dump is not more recent than Local.", path)
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
self.tdenv.NOTE("Downloading file '{}'.", path)
|
|
179
|
+
transfers.download(self.tdenv, url, localPath)
|
|
180
|
+
return True
|
|
181
|
+
|
|
182
|
+
def purgeSystems(self):
|
|
183
|
+
"""
|
|
184
|
+
Purges systems from the System table that do not have any stations claiming to be in them.
|
|
185
|
+
Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
|
|
186
|
+
"""
|
|
187
|
+
|
|
188
|
+
self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
|
|
189
|
+
|
|
190
|
+
self.execute("PRAGMA foreign_keys = OFF")
|
|
191
|
+
|
|
192
|
+
print("Saving systems with stations.... " + str(self.now()) + "\t\t\t\t", end="\r")
|
|
193
|
+
self.execute("DROP TABLE IF EXISTS System_copy")
|
|
194
|
+
self.execute("""CREATE TABLE System_copy AS SELECT * FROM System
|
|
195
|
+
WHERE system_id IN (SELECT system_id FROM Station)
|
|
196
|
+
""")
|
|
197
|
+
|
|
198
|
+
print("Erasing table and reinserting kept systems.... " + str(self.now()) + "\t\t\t\t", end="\r")
|
|
199
|
+
self.execute("DELETE FROM System")
|
|
200
|
+
self.execute("INSERT INTO System SELECT * FROM System_copy")
|
|
201
|
+
|
|
202
|
+
print("Removing copy.... " + str(self.now()) + "\t\t\t\t", end="\r")
|
|
203
|
+
self.execute("PRAGMA foreign_keys = ON")
|
|
204
|
+
self.execute("DROP TABLE IF EXISTS System_copy")
|
|
205
|
+
|
|
206
|
+
self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
|
|
207
|
+
|
|
208
|
+
def commit(self):
|
|
209
|
+
success = False
|
|
210
|
+
while not success:
|
|
211
|
+
try:
|
|
212
|
+
self.tdb.getDB().commit()
|
|
213
|
+
success = True
|
|
214
|
+
except sqlite3.OperationalError:
|
|
215
|
+
print("(commit) Database is locked, waiting for access.", end = "\r")
|
|
216
|
+
time.sleep(1)
|
|
217
|
+
|
|
218
|
+
def importListings(self, listings_file):
|
|
219
|
+
"""
|
|
220
|
+
Updates the market data (AKA the StationItem table) using listings.csv
|
|
221
|
+
Writes directly to database.
|
|
222
|
+
"""
|
|
223
|
+
|
|
224
|
+
self.tdenv.NOTE("Processing market data from {}: Start time = {}", listings_file, self.now())
|
|
225
|
+
if not (self.dataPath / listings_file).exists():
|
|
226
|
+
self.tdenv.NOTE("File not found, aborting: {}", (self.dataPath / listings_file))
|
|
227
|
+
return
|
|
228
|
+
|
|
229
|
+
total = 1
|
|
230
|
+
|
|
231
|
+
from_live = 0 if listings_file == self.listingsPath else 1
|
|
232
|
+
|
|
233
|
+
# Used to check if the listings file is using the fdev_id as a temporary
|
|
234
|
+
# item_id, but the item is in the DB with a permanent item_id.
|
|
235
|
+
fdev2item = dict()
|
|
236
|
+
result = self.execute("SELECT fdev_id,item_id FROM Item ORDER BY fdev_id").fetchall()
|
|
237
|
+
for item in result:
|
|
238
|
+
fdev2item[item[0]] = item[1]
|
|
239
|
+
|
|
240
|
+
with open(str(self.dataPath / listings_file), "r", encoding = "utf-8", errors = 'ignore') as f:
|
|
241
|
+
total += (sum(bl.count("\n") for bl in self.blocks(f)))
|
|
242
|
+
|
|
243
|
+
liveList = []
|
|
244
|
+
liveStmt = """UPDATE StationItem
|
|
245
|
+
SET from_live = 0
|
|
246
|
+
WHERE station_id = ?"""
|
|
247
|
+
|
|
248
|
+
delList = []
|
|
249
|
+
delStmt = "DELETE from StationItem WHERE station_id = ?"
|
|
250
|
+
|
|
251
|
+
listingList = []
|
|
252
|
+
listingStmt = """INSERT OR IGNORE INTO StationItem
|
|
253
|
+
(station_id, item_id, modified,
|
|
254
|
+
demand_price, demand_units, demand_level,
|
|
255
|
+
supply_price, supply_units, supply_level, from_live)
|
|
256
|
+
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )"""
|
|
257
|
+
|
|
258
|
+
items = []
|
|
259
|
+
it_result = self.execute("SELECT item_id FROM Item ORDER BY item_id").fetchall()
|
|
260
|
+
for item in it_result:
|
|
261
|
+
items.append(item[0])
|
|
262
|
+
|
|
263
|
+
stationList = {
|
|
264
|
+
stationID
|
|
265
|
+
for (stationID,) in self.execute("SELECT station_id FROM Station")
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
with open(str(self.dataPath / listings_file), "r") as fh:
|
|
269
|
+
prog = pbar.Progress(total, 50)
|
|
270
|
+
listings = csv.DictReader(fh)
|
|
271
|
+
|
|
272
|
+
cur_station = -1
|
|
273
|
+
|
|
274
|
+
for listing in listings:
|
|
275
|
+
prog.increment(1, postfix = lambda value, goal: " " + str(round(value / total * 100)) + "%")
|
|
276
|
+
|
|
277
|
+
station_id = int(listing['station_id'])
|
|
278
|
+
if station_id not in stationList:
|
|
279
|
+
continue
|
|
280
|
+
|
|
281
|
+
if station_id != cur_station:
|
|
282
|
+
cur_station = station_id
|
|
283
|
+
skipStation = False
|
|
284
|
+
|
|
285
|
+
# Check if listing already exists in DB and needs updated.
|
|
286
|
+
# Only need to check the date for the first item at a specific station.
|
|
287
|
+
result = self.execute("SELECT modified FROM StationItem WHERE station_id = ?", (station_id,)).fetchone()
|
|
288
|
+
if result:
|
|
289
|
+
updated = timegm(datetime.datetime.strptime(result[0].split('.')[0], '%Y-%m-%d %H:%M:%S').timetuple())
|
|
290
|
+
# When the listings.csv data matches the database, update to make from_live == 0.
|
|
291
|
+
if int(listing['collected_at']) == updated and not from_live:
|
|
292
|
+
liveList.append((cur_station,))
|
|
293
|
+
# Unless the import file data is newer, nothing else needs to be done for this station,
|
|
294
|
+
# so the rest of the listings for this station can be skipped.
|
|
295
|
+
if int(listing['collected_at']) <= updated:
|
|
296
|
+
skipStation = True
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
# The data from the import file is newer, so we need to delete the old data for this station.
|
|
300
|
+
delList.append((cur_station,))
|
|
301
|
+
|
|
302
|
+
if skipStation:
|
|
303
|
+
continue
|
|
304
|
+
|
|
305
|
+
# Since this station is not being skipped, get the data and prepare for insertion into the DB.
|
|
306
|
+
item_id = int(listing['commodity_id'])
|
|
307
|
+
# listings.csv includes rare items, which we are ignoring.
|
|
308
|
+
if item_id not in items:
|
|
309
|
+
continue
|
|
310
|
+
modified = datetime.datetime.utcfromtimestamp(int(listing['collected_at'])).strftime('%Y-%m-%d %H:%M:%S')
|
|
311
|
+
demand_price = int(listing['sell_price'])
|
|
312
|
+
demand_units = int(listing['demand'])
|
|
313
|
+
demand_level = int(listing['demand_bracket']) if listing['demand_bracket'] != '' else -1
|
|
314
|
+
supply_price = int(listing['buy_price'])
|
|
315
|
+
supply_units = int(listing['supply'])
|
|
316
|
+
supply_level = int(listing['supply_bracket']) if listing['supply_bracket'] != '' else -1
|
|
317
|
+
|
|
318
|
+
listingList.append((station_id, item_id, modified,
|
|
319
|
+
demand_price, demand_units, demand_level,
|
|
320
|
+
supply_price, supply_units, supply_level, from_live))
|
|
321
|
+
|
|
322
|
+
while prog.value < prog.maxValue:
|
|
323
|
+
prog.increment(1, postfix = lambda value, goal: " " + str(round(value / total * 100)) + "%")
|
|
324
|
+
prog.clear()
|
|
325
|
+
|
|
326
|
+
self.tdenv.NOTE("Import file processing complete, updating database. {}", self.now())
|
|
327
|
+
if liveList:
|
|
328
|
+
self.tdenv.NOTE("Marking data now in the EDDB listings.csv as no longer 'live'. {}", self.now())
|
|
329
|
+
self.executemany(liveStmt, liveList)
|
|
330
|
+
if delList:
|
|
331
|
+
self.tdenv.NOTE("Deleting old listing data. {}", self.now())
|
|
332
|
+
self.executemany(delStmt, delList)
|
|
333
|
+
if listingList:
|
|
334
|
+
self.tdenv.NOTE("Inserting new listing data. {}", self.now())
|
|
335
|
+
self.executemany(listingStmt, listingList)
|
|
336
|
+
|
|
337
|
+
self.updated['Listings'] = True
|
|
338
|
+
self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
|
|
339
|
+
|
|
340
|
+
def run(self):
|
|
341
|
+
# Create the /eddb folder for downloading the source files if it doesn't exist.
|
|
342
|
+
try:
|
|
343
|
+
Path(str(self.dataPath)).mkdir()
|
|
344
|
+
except FileExistsError:
|
|
345
|
+
pass
|
|
346
|
+
|
|
347
|
+
# Run 'listings' by default:
|
|
348
|
+
# If no options, or if only 'force', and/or 'skipvend',
|
|
349
|
+
# have been passed, enable 'listings'.
|
|
350
|
+
default = True
|
|
351
|
+
for option in self.options:
|
|
352
|
+
# if not option in ('force', 'fallback', 'skipvend', 'progbar'):
|
|
353
|
+
if not option in ('force', 'skipvend'):
|
|
354
|
+
default = False
|
|
355
|
+
if default:
|
|
356
|
+
self.options["listings"] = True
|
|
357
|
+
|
|
358
|
+
# We can probably safely assume that the plugin has never been run if
|
|
359
|
+
# the prices file doesn't exist, since the plugin always generates it.
|
|
360
|
+
if not (self.tdb.dataPath / Path("TradeDangerous.prices")).exists():
|
|
361
|
+
self.options["clean"] = True
|
|
362
|
+
|
|
363
|
+
if self.getOption("clean"):
|
|
364
|
+
# Rebuild the tables from scratch. Must be done on first run of plugin.
|
|
365
|
+
# Can be done at anytime with the "clean" option.
|
|
366
|
+
for name in [
|
|
367
|
+
"Category",
|
|
368
|
+
"Item",
|
|
369
|
+
"Ship",
|
|
370
|
+
"ShipVendor",
|
|
371
|
+
"Station",
|
|
372
|
+
"System",
|
|
373
|
+
"Upgrade",
|
|
374
|
+
"UpgradeVendor",
|
|
375
|
+
]:
|
|
376
|
+
file = self.tdb.dataPath / Path(name + ".csv")
|
|
377
|
+
try:
|
|
378
|
+
os.remove(str(file))
|
|
379
|
+
except FileNotFoundError:
|
|
380
|
+
pass
|
|
381
|
+
|
|
382
|
+
try:
|
|
383
|
+
os.remove(str(self.tdb.dataPath) + "/TradeDangerous.db")
|
|
384
|
+
except FileNotFoundError:
|
|
385
|
+
pass
|
|
386
|
+
try:
|
|
387
|
+
os.remove(str(self.tdb.dataPath) + "/TradeDangerous.prices")
|
|
388
|
+
except FileNotFoundError:
|
|
389
|
+
pass
|
|
390
|
+
|
|
391
|
+
# Because this is a clean run, we need to temporarily rename the RareItem.csv,
|
|
392
|
+
# otherwise TD will crash trying to insert the rare items to the database,
|
|
393
|
+
# because there's nothing in the Station table it tries to pull from.
|
|
394
|
+
ri_path = self.tdb.dataPath / Path("RareItem.csv")
|
|
395
|
+
rib_path = ri_path.with_suffix(".tmp")
|
|
396
|
+
if ri_path.exists():
|
|
397
|
+
if rib_path.exists():
|
|
398
|
+
rib_path.unlink()
|
|
399
|
+
ri_path.rename(rib_path)
|
|
400
|
+
|
|
401
|
+
self.tdb.reloadCache()
|
|
402
|
+
|
|
403
|
+
# Now it's safe to move RareItems back.
|
|
404
|
+
if ri_path.exists():
|
|
405
|
+
ri_path.unlink()
|
|
406
|
+
if rib_path.exists():
|
|
407
|
+
rib_path.rename(ri_path)
|
|
408
|
+
|
|
409
|
+
self.options["all"] = True
|
|
410
|
+
self.options["force"] = True
|
|
411
|
+
|
|
412
|
+
self.tdenv.ignoreUnknown = True
|
|
413
|
+
|
|
414
|
+
success = False
|
|
415
|
+
while not success:
|
|
416
|
+
try:
|
|
417
|
+
self.tdenv.DEBUG0("Loading Database. {}", self.now())
|
|
418
|
+
self.tdb.load(maxSystemLinkLy = self.tdenv.maxSystemLinkLy)
|
|
419
|
+
success = True
|
|
420
|
+
except sqlite3.OperationalError:
|
|
421
|
+
print("Database is locked, waiting for access.", end = "\r")
|
|
422
|
+
time.sleep(1)
|
|
423
|
+
self.tdenv.DEBUG0("Database loaded.")
|
|
424
|
+
|
|
425
|
+
# Select which options will be updated
|
|
426
|
+
if self.getOption("listings"):
|
|
427
|
+
self.options["item"] = True
|
|
428
|
+
self.options["station"] = True
|
|
429
|
+
|
|
430
|
+
if self.getOption("shipvend"):
|
|
431
|
+
self.options["ship"] = True
|
|
432
|
+
self.options["station"] = True
|
|
433
|
+
|
|
434
|
+
if self.getOption("upvend"):
|
|
435
|
+
self.options["upgrade"] = True
|
|
436
|
+
self.options["station"] = True
|
|
437
|
+
|
|
438
|
+
if self.getOption['item']:
|
|
439
|
+
self.options['station'] = True
|
|
440
|
+
|
|
441
|
+
if self.getOption['rare']:
|
|
442
|
+
self.options['station'] = True
|
|
443
|
+
|
|
444
|
+
if self.getOption("station"):
|
|
445
|
+
self.options["system"] = True
|
|
446
|
+
|
|
447
|
+
if self.getOption("all"):
|
|
448
|
+
self.options["item"] = True
|
|
449
|
+
self.options['rare'] = True
|
|
450
|
+
self.options["ship"] = True
|
|
451
|
+
self.options["shipvend"] = True
|
|
452
|
+
self.options["station"] = True
|
|
453
|
+
self.options["system"] = True
|
|
454
|
+
self.options["upgrade"] = True
|
|
455
|
+
self.options["upvend"] = True
|
|
456
|
+
self.options["listings"] = True
|
|
457
|
+
|
|
458
|
+
if self.getOption("solo"):
|
|
459
|
+
self.options["listings"] = False
|
|
460
|
+
self.options["skipvend"] = True
|
|
461
|
+
|
|
462
|
+
if self.getOption("skipvend"):
|
|
463
|
+
self.options["shipvend"] = False
|
|
464
|
+
self.options["upvend"] = False
|
|
465
|
+
|
|
466
|
+
# Download required files and update tables.
|
|
467
|
+
buildCache = False
|
|
468
|
+
if self.getOption("purge"):
|
|
469
|
+
self.purgeSystems()
|
|
470
|
+
self.commit()
|
|
471
|
+
|
|
472
|
+
if self.getOption("upgrade"):
|
|
473
|
+
if self.downloadFile(self.upgradesPath) or self.getOption("force"):
|
|
474
|
+
buildCache = True
|
|
475
|
+
|
|
476
|
+
if self.getOption("ship"):
|
|
477
|
+
if self.downloadFile(self.shipsPath) or self.getOption("force"):
|
|
478
|
+
buildCache = True
|
|
479
|
+
|
|
480
|
+
if self.getOption("rare"):
|
|
481
|
+
if self.downloadFile(self.rareItemPath) or self.getOption("force"):
|
|
482
|
+
buildCache = True
|
|
483
|
+
|
|
484
|
+
if self.getOption("shipvend"):
|
|
485
|
+
if self.downloadFile(self.shipVendorPath) or self.getOption("force"):
|
|
486
|
+
buildCache = True
|
|
487
|
+
|
|
488
|
+
if self.getOption("upvend"):
|
|
489
|
+
if self.downloadFile(self.upgradeVendorPath) or self.getOption("force"):
|
|
490
|
+
buildCache = True
|
|
491
|
+
|
|
492
|
+
if self.getOption("system"):
|
|
493
|
+
if self.downloadFile(self.sysPath) or self.getOption("force"):
|
|
494
|
+
buildCache = True
|
|
495
|
+
|
|
496
|
+
if self.getOption("station"):
|
|
497
|
+
if self.downloadFile(self.stationsPath) or self.getOption("force"):
|
|
498
|
+
buildCache = True
|
|
499
|
+
|
|
500
|
+
if self.getOption("item"):
|
|
501
|
+
if self.downloadFile(self.commoditiesPath) or self.getOption("force"):
|
|
502
|
+
self.downloadFile(self.categoriesPath)
|
|
503
|
+
buildCache = True
|
|
504
|
+
|
|
505
|
+
# Remake the .db files with the updated info.
|
|
506
|
+
if buildCache:
|
|
507
|
+
cache.buildCache(self.tdb, self.tdenv)
|
|
508
|
+
|
|
509
|
+
if self.getOption("listings"):
|
|
510
|
+
if self.downloadFile(self.listingsPath) or self.getOption("force"):
|
|
511
|
+
self.importListings(self.listingsPath)
|
|
512
|
+
if self.downloadFile(self.liveListingsPath) or self.getOption("force"):
|
|
513
|
+
self.importListings(self.liveListingsPath)
|
|
514
|
+
|
|
515
|
+
self.commit()
|
|
516
|
+
|
|
517
|
+
self.tdb.close()
|
|
518
|
+
|
|
519
|
+
if self.updated['Listings']:
|
|
520
|
+
self.tdenv.NOTE("Regenerating .prices file.")
|
|
521
|
+
cache.regeneratePricesFile(self.tdb, self.tdenv)
|
|
522
|
+
|
|
523
|
+
self.tdenv.NOTE("Import completed.")
|
|
524
|
+
|
|
525
|
+
# TD doesn't need to do anything, tell it to just quit.
|
|
526
|
+
return False
|