tradedangerous 11.0.3__tar.gz → 11.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- {tradedangerous-11.0.3/tradedangerous.egg-info → tradedangerous-11.0.5}/PKG-INFO +1 -1
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/setup.py +3 -3
- {tradedangerous-11.0.3/tradedangerous → tradedangerous-11.0.5}/trade.py +5 -12
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/cache.py +9 -9
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/cli.py +8 -7
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/csvexport.py +1 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/formatting.py +7 -6
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/eddblink_plug.py +152 -158
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/spansh_plug.py +34 -34
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/version.py +1 -1
- {tradedangerous-11.0.3 → tradedangerous-11.0.5/tradedangerous.egg-info}/PKG-INFO +1 -1
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous.egg-info/SOURCES.txt +2 -2
- tradedangerous-11.0.5/tradedangerous.egg-info/entry_points.txt +3 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous.egg-info/top_level.txt +1 -0
- tradedangerous-11.0.3/tradedangerous.egg-info/entry_points.txt +0 -3
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/LICENSE +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/README.md +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/pyproject.toml +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/setup.cfg +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_bootstrap_commands.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_bootstrap_plugins.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_cache.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_commands.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_fs.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_peek.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_tools.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_trade.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_trade_run.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tests/test_utils.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/__init__.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/TEMPLATE.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/__init__.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/buildcache_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/buy_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/commandenv.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/exceptions.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/export_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/import_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/local_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/market_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/nav_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/olddata_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/parsing.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/rares_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/run_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/sell_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/shipvendor_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/station_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/trade_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/update_cmd.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/commands/update_gui.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/corrections.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/edscupdate.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/edsmupdate.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/fs.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/gui.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/jsonprices.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/mapping.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/mfd/__init__.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/mfd/saitek/__init__.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/mfd/saitek/directoutput.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/mfd/saitek/x52pro.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/checkpricebounds.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/clipboard.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/coord64.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/derp-sentinel.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/diff-system-csvs.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/eddb.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/eddn.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/edsc.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/edsm.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/importeddbstats.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/prices-json-exp.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/misc/progress.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/__init__.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/edapi_plug.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/edcd_plug.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/edmc_batch_plug.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/journal_plug.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/plugins/netlog_plug.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/prices.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/submit-distances.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/templates/Added.csv +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/templates/Category.csv +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/templates/RareItem.csv +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/templates/TradeDangerous.sql +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/tools.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/tradecalc.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/tradedb.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/tradeenv.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/tradeexcept.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/transfers.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous/utils.py +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous.egg-info/dependency_links.txt +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous.egg-info/not-zip-safe +0 -0
- {tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous.egg-info/requires.txt +0 -0
- {tradedangerous-11.0.3/tradedangerous → tradedangerous-11.0.5}/tradegui.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: tradedangerous
|
|
3
|
-
Version: 11.0.
|
|
3
|
+
Version: 11.0.5
|
|
4
4
|
Summary: Trade-Dangerous is a set of powerful trading tools for Elite Dangerous, organized around one of the most powerful trade run optimizers available.
|
|
5
5
|
Home-page: https://github.com/eyeonus/Trade-Dangerous
|
|
6
6
|
Author: eyeonus
|
|
@@ -30,7 +30,7 @@ setup(name = package,
|
|
|
30
30
|
install_requires = ["requests", "appJar", "ijson", "rich"],
|
|
31
31
|
setup_requires = ["pytest-runner"],
|
|
32
32
|
tests_require = ["pytest"],
|
|
33
|
-
packages = ['tradedangerous', 'tradedangerous.commands', 'tradedangerous.mfd', 'tradedangerous.mfd.saitek', 'tradedangerous.misc', 'tradedangerous.plugins'],
|
|
33
|
+
packages = ['.', 'tradedangerous', 'tradedangerous.commands', 'tradedangerous.mfd', 'tradedangerous.mfd.saitek', 'tradedangerous.misc', 'tradedangerous.plugins'],
|
|
34
34
|
url = "https://github.com/eyeonus/Trade-Dangerous",
|
|
35
35
|
project_urls = {
|
|
36
36
|
"Bug Tracker": "https://github.com/eyeonus/Trade-Dangerous/issues",
|
|
@@ -57,8 +57,8 @@ setup(name = package,
|
|
|
57
57
|
package_data = {"tradedangerous": ["templates/TradeDangerous.sql", "templates/Added.csv", "templates/Category.csv", "templates/RareItem.csv"]},
|
|
58
58
|
entry_points = {
|
|
59
59
|
"console_scripts": [
|
|
60
|
-
"trade=
|
|
61
|
-
"tradegui=
|
|
60
|
+
"trade=trade:main",
|
|
61
|
+
"tradegui=tradegui:main"
|
|
62
62
|
]
|
|
63
63
|
},
|
|
64
64
|
zip_safe = False
|
|
@@ -33,19 +33,12 @@
|
|
|
33
33
|
# DEVELOPERS: If you are a programmer who wants TD to do something
|
|
34
34
|
# cool, please see the TradeDB and TradeCalc modules. TD is designed
|
|
35
35
|
# to empower other programmers to do cool stuff.
|
|
36
|
-
from __future__ import annotations
|
|
37
|
-
|
|
38
36
|
from tradedangerous import cli
|
|
39
37
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def main(argv: list[tuple] = None) -> None:
|
|
44
|
-
""" Entry point for the TradeDangerous command-line app. """
|
|
45
|
-
if argv is None:
|
|
46
|
-
argv = sys.argv
|
|
47
|
-
cli.main(argv)
|
|
48
|
-
|
|
38
|
+
def main(argv = None):
|
|
39
|
+
import sys
|
|
40
|
+
cli.main(sys.argv)
|
|
49
41
|
|
|
50
42
|
if __name__ == "__main__":
|
|
51
|
-
|
|
43
|
+
import sys
|
|
44
|
+
cli.main(sys.argv)
|
|
@@ -206,7 +206,7 @@ class MultipleItemEntriesError(DuplicateKeyError):
|
|
|
206
206
|
super().__init__(fromFile, lineNo, 'item', item, prevLineNo)
|
|
207
207
|
|
|
208
208
|
|
|
209
|
-
class
|
|
209
|
+
class InvalidLineError(BuildCacheBaseException):
|
|
210
210
|
"""
|
|
211
211
|
Raised when an invalid line is read.
|
|
212
212
|
Attributes:
|
|
@@ -258,7 +258,7 @@ def parseSupply(pricesFile: Path, lineNo: int, category: str, reading: str) -> t
|
|
|
258
258
|
# High <- 'H';
|
|
259
259
|
if reading == '?':
|
|
260
260
|
return -1, -1
|
|
261
|
-
|
|
261
|
+
if reading == '-':
|
|
262
262
|
return 0, 0
|
|
263
263
|
|
|
264
264
|
# extract the left most digits into unit and the last character into the level reading.
|
|
@@ -604,13 +604,13 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
604
604
|
if text.startswith('@'):
|
|
605
605
|
matches = systemStationRe.match(text)
|
|
606
606
|
if not matches:
|
|
607
|
-
raise
|
|
607
|
+
raise InvalidLineError(priceFile, lineNo, "Unrecognized '@' line", text)
|
|
608
608
|
changeStation(matches)
|
|
609
609
|
continue
|
|
610
610
|
|
|
611
611
|
if not stationID:
|
|
612
612
|
# Need a station to process any other type of line.
|
|
613
|
-
raise
|
|
613
|
+
raise InvalidLineError(priceFile, lineNo, "Expecting '@ SYSTEM / Station' line", text)
|
|
614
614
|
if stationID == DELETED:
|
|
615
615
|
# Ignore all values from a deleted station/system.
|
|
616
616
|
continue
|
|
@@ -625,7 +625,7 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
625
625
|
# ## "Item sell buy ..." lines.
|
|
626
626
|
matches = newItemPriceRe.match(text)
|
|
627
627
|
if not matches:
|
|
628
|
-
raise
|
|
628
|
+
raise InvalidLineError(priceFile, lineNo, "Unrecognized line/syntax", text)
|
|
629
629
|
|
|
630
630
|
processItemLine(matches)
|
|
631
631
|
|
|
@@ -639,7 +639,7 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
639
639
|
"if you /need/ to persist them."
|
|
640
640
|
)
|
|
641
641
|
|
|
642
|
-
stations = tuple((ID,) for ID in processedStations
|
|
642
|
+
stations = tuple((ID,) for ID in processedStations)
|
|
643
643
|
return stations, items, zeros, newItems, updtItems, ignItems, numSys
|
|
644
644
|
|
|
645
645
|
|
|
@@ -649,9 +649,9 @@ def processPrices(tdenv: TradeEnv, priceFile: Path, db: sqlite3.Connection, defa
|
|
|
649
649
|
def processPricesFile(tdenv: TradeEnv, db: sqlite3.Connection, pricesPath: Path, pricesFh: Optional[TextIO] = None, defaultZero: bool = False) -> None:
|
|
650
650
|
tdenv.DEBUG0("Processing Prices file '{}'", pricesPath)
|
|
651
651
|
|
|
652
|
-
with pricesFh or pricesPath.open('r', encoding='utf-8') as
|
|
652
|
+
with (pricesFh or pricesPath.open('r', encoding='utf-8')) as fh:
|
|
653
653
|
stations, items, zeros, newItems, updtItems, ignItems, numSys = processPrices(
|
|
654
|
-
tdenv,
|
|
654
|
+
tdenv, fh, db, defaultZero
|
|
655
655
|
)
|
|
656
656
|
|
|
657
657
|
if not tdenv.mergeImport:
|
|
@@ -907,7 +907,7 @@ def processImportFile(tdenv, db, importPath, tableName):
|
|
|
907
907
|
try:
|
|
908
908
|
db.execute(sql_stmt, linein)
|
|
909
909
|
importCount += 1
|
|
910
|
-
except Exception as e:
|
|
910
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
911
911
|
tdenv.WARN(
|
|
912
912
|
"*** INTERNAL ERROR: {err}\n"
|
|
913
913
|
"CSV File: {file}:{line}\n"
|
|
@@ -32,34 +32,35 @@
|
|
|
32
32
|
# to empower other programmers to do cool stuff.
|
|
33
33
|
|
|
34
34
|
import os
|
|
35
|
+
import sys
|
|
35
36
|
import traceback
|
|
36
37
|
|
|
37
38
|
from . import commands
|
|
39
|
+
from . import tradeexcept
|
|
38
40
|
from .commands import exceptions
|
|
39
41
|
from .plugins import PluginException
|
|
40
42
|
|
|
41
|
-
|
|
42
43
|
from . import tradedb
|
|
43
44
|
|
|
45
|
+
if "CPROF" in os.environ:
|
|
46
|
+
import cProfile
|
|
47
|
+
|
|
44
48
|
|
|
45
49
|
def main(argv = None):
|
|
46
|
-
import sys
|
|
47
50
|
if not argv:
|
|
48
51
|
argv = sys.argv
|
|
49
|
-
if sys.hexversion <
|
|
52
|
+
if sys.hexversion < 0x03070000:
|
|
50
53
|
raise SystemExit(
|
|
51
|
-
"Sorry: TradeDangerous requires Python 3.
|
|
54
|
+
"Sorry: TradeDangerous requires Python 3.7 or higher.\n"
|
|
52
55
|
"For assistance, see:\n"
|
|
53
56
|
"\tBug Tracker: https://github.com/eyeonus/Trade-Dangerous/issues\n"
|
|
54
57
|
"\tDocumentation: https://github.com/eyeonus/Trade-Dangerous/wiki\n"
|
|
55
58
|
"\tEDForum Thread: https://forums.frontier.co.uk/showthread.php/441509\n"
|
|
56
59
|
)
|
|
57
|
-
from . import tradeexcept
|
|
58
60
|
|
|
59
61
|
try:
|
|
60
62
|
try:
|
|
61
63
|
if "CPROF" in os.environ:
|
|
62
|
-
import cProfile
|
|
63
64
|
cProfile.run("trade(argv)")
|
|
64
65
|
else:
|
|
65
66
|
trade(argv)
|
|
@@ -79,7 +80,7 @@ def main(argv = None):
|
|
|
79
80
|
print()
|
|
80
81
|
print(traceback.format_exc())
|
|
81
82
|
print(
|
|
82
|
-
"Please report this bug (http://
|
|
83
|
+
"Please report this bug (http://github.com/eyeonus/Trade-Dangerous/issues). You may be "
|
|
83
84
|
"able to work around it by using the '-q' parameter. Windows "
|
|
84
85
|
"users may be able to use 'chcp.com 65001' to tell the console "
|
|
85
86
|
"you want to support UTF-8 characters."
|
|
@@ -8,7 +8,9 @@ import itertools
|
|
|
8
8
|
import typing
|
|
9
9
|
|
|
10
10
|
if typing.TYPE_CHECKING:
|
|
11
|
-
from typing import Any,
|
|
11
|
+
from typing import Any, Optional
|
|
12
|
+
from collections.abc import Callable
|
|
13
|
+
|
|
12
14
|
|
|
13
15
|
class ColumnFormat:
|
|
14
16
|
"""
|
|
@@ -197,8 +199,7 @@ if __name__ == '__main__':
|
|
|
197
199
|
print("Simple usage:")
|
|
198
200
|
present()
|
|
199
201
|
|
|
200
|
-
print()
|
|
201
|
-
print("Adding age ColumnFormat:")
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
present()
|
|
202
|
+
#print()
|
|
203
|
+
#print("Adding age ColumnFormat:")
|
|
204
|
+
#rowFmt.append(after='Name', col=ColumnFormat("Age", '>', 3, pre='|', post='|', key=lambda row: row['age']))
|
|
205
|
+
#present()
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
1
|
+
"""
|
|
2
|
+
Import plugin that uses data files from
|
|
3
|
+
https://elite.tromador.com/ to update the Database.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
5
6
|
import certifi
|
|
6
7
|
import csv
|
|
7
8
|
import datetime
|
|
@@ -10,42 +11,48 @@ import os
|
|
|
10
11
|
import sqlite3
|
|
11
12
|
import ssl
|
|
12
13
|
import time
|
|
14
|
+
import typing
|
|
13
15
|
|
|
14
16
|
from urllib import request
|
|
15
|
-
from calendar import timegm
|
|
16
17
|
from pathlib import Path
|
|
17
18
|
|
|
18
19
|
from .. import plugins, cache, transfers
|
|
19
20
|
from ..misc import progress as pbar
|
|
20
21
|
from ..plugins import PluginException
|
|
21
22
|
|
|
23
|
+
|
|
24
|
+
if typing.TYPE_CHECKING:
|
|
25
|
+
from typing import Optional
|
|
26
|
+
from .. tradeenv import TradeEnv
|
|
27
|
+
|
|
28
|
+
|
|
22
29
|
# Constants
|
|
23
30
|
BASE_URL = os.environ.get('TD_SERVER') or "https://elite.tromador.com/files/"
|
|
24
31
|
CONTEXT=ssl.create_default_context(cafile=certifi.where())
|
|
25
32
|
|
|
26
33
|
|
|
27
|
-
def
|
|
34
|
+
def _request_url(url, headers=None):
|
|
28
35
|
data = None
|
|
29
36
|
if headers:
|
|
30
37
|
data = bytes(json.dumps(headers), encoding="utf-8")
|
|
31
38
|
|
|
32
|
-
return request.urlopen(request.Request(url, data=data), context=CONTEXT)
|
|
39
|
+
return request.urlopen(request.Request(url, data=data), context=CONTEXT, timeout=90)
|
|
33
40
|
|
|
34
41
|
|
|
35
42
|
class DecodingError(PluginException):
|
|
36
43
|
pass
|
|
37
44
|
|
|
38
45
|
|
|
39
|
-
def
|
|
46
|
+
def _file_line_count(from_file: Path, bufsize: int = 128 * 1024) -> int:
|
|
40
47
|
""" counts the number of newline characters in a given file. """
|
|
41
|
-
# Pre-allocate a buffer so we'
|
|
42
|
-
# capture it's counting method so we don't have to keep looking that up on
|
|
43
|
-
# large files.
|
|
48
|
+
# Pre-allocate a buffer so we aren't putting pressure on the garbage collector.
|
|
44
49
|
buf = bytearray(bufsize)
|
|
50
|
+
|
|
51
|
+
# Capture it's counting method, so we don't have to keep looking that up on
|
|
52
|
+
# large files.
|
|
45
53
|
counter = buf.count
|
|
46
|
-
|
|
54
|
+
|
|
47
55
|
total = 0
|
|
48
|
-
|
|
49
56
|
with from_file.open("rb") as fh:
|
|
50
57
|
# Capture the 'readinto' method to avoid lookups.
|
|
51
58
|
reader = fh.readinto
|
|
@@ -60,10 +67,46 @@ def file_line_count(from_file: Path, bufsize: int = 128 * 1024) -> int:
|
|
|
60
67
|
# when 0 <= read < bufsize we're on the last page of the
|
|
61
68
|
# file, so we need to take a slice of the buffer, which creates
|
|
62
69
|
# a new object and thus we also have to lookup count. it's trivial
|
|
63
|
-
# but if you have to do it 10,000x it's
|
|
70
|
+
# but if you have to do it 10,000x it's definitely not a rounding error.
|
|
64
71
|
return total + buf[:read].count(b'\n')
|
|
65
72
|
|
|
66
73
|
|
|
74
|
+
def _count_listing_entries(tdenv: TradeEnv, listings: Path) -> int:
|
|
75
|
+
""" Calculates the number of entries in a listing file by counting the lines. """
|
|
76
|
+
if not listings.exists():
|
|
77
|
+
tdenv.NOTE("File not found, aborting: {}", listings)
|
|
78
|
+
return 0
|
|
79
|
+
|
|
80
|
+
tdenv.DEBUG0(f"Getting total number of entries in {listings}...")
|
|
81
|
+
count = _file_line_count(listings)
|
|
82
|
+
if count <= 1:
|
|
83
|
+
if count == 1:
|
|
84
|
+
tdenv.DEBUG0("Listing count of 1 suggests nothing but a header")
|
|
85
|
+
else:
|
|
86
|
+
tdenv.DEBUG0("Listings file is empty, nothing to do.")
|
|
87
|
+
return 0
|
|
88
|
+
|
|
89
|
+
return count + 1 # kfsone: Doesn't the header already make this + 1?
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _make_item_id_lookup(tdenv: TradeEnv, db: sqlite3.Cursor) -> frozenset[int]:
|
|
93
|
+
""" helper: retrieve the list of commodities in database. """
|
|
94
|
+
tdenv.DEBUG0("Getting list of commodities...")
|
|
95
|
+
return frozenset(cols[0] for cols in db.execute("SELECT item_id FROM Item"))
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _make_station_id_lookup(tdenv: TradeEnv, db: sqlite3.Cursor) -> frozenset[int]:
|
|
99
|
+
""" helper: retrieve the list of station IDs in database. """
|
|
100
|
+
tdenv.DEBUG0("Getting list of stations...")
|
|
101
|
+
return frozenset(cols[0] for cols in db.execute("SELECT station_id FROM Station"))
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _collect_station_modified_times(tdenv: TradeEnv, db: sqlite3.Cursor) -> dict[int, int]:
|
|
105
|
+
""" helper: build a list of the last modified time for all stations by id. """
|
|
106
|
+
tdenv.DEBUG0("Getting last-update times for stations...")
|
|
107
|
+
return dict(db.execute("SELECT station_id, strftime('%s', MIN(modified)) FROM StationItem GROUP BY station_id"))
|
|
108
|
+
|
|
109
|
+
|
|
67
110
|
class ImportPlugin(plugins.ImportPluginBase):
|
|
68
111
|
"""
|
|
69
112
|
Plugin that downloads data from eddb.
|
|
@@ -113,61 +156,25 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
113
156
|
def now(self):
|
|
114
157
|
return datetime.datetime.now()
|
|
115
158
|
|
|
116
|
-
def execute(self, sql_cmd, args = None):
|
|
117
|
-
cur = self.tdb.getDB().cursor()
|
|
118
|
-
|
|
119
|
-
self.tdenv.DEBUG2(f"SQL-Statement:\n'{sql_cmd},{args}'")
|
|
120
|
-
success = False
|
|
121
|
-
result = None
|
|
122
|
-
while not success:
|
|
123
|
-
try:
|
|
124
|
-
if args:
|
|
125
|
-
result = cur.execute(sql_cmd, args)
|
|
126
|
-
else:
|
|
127
|
-
result = cur.execute(sql_cmd)
|
|
128
|
-
success = True
|
|
129
|
-
except sqlite3.OperationalError as e:
|
|
130
|
-
if "locked" not in str(e):
|
|
131
|
-
success = True
|
|
132
|
-
raise sqlite3.OperationalError(e)
|
|
133
|
-
else:
|
|
134
|
-
print("(execute) Database is locked, waiting for access.", end = "\r")
|
|
135
|
-
time.sleep(1)
|
|
136
|
-
return result
|
|
137
|
-
|
|
138
|
-
@staticmethod
|
|
139
|
-
def fetchIter(cursor, arraysize = 1000):
|
|
140
|
-
"""
|
|
141
|
-
An iterator that uses fetchmany to keep memory usage down
|
|
142
|
-
and speed up the time to retrieve the results dramatically.
|
|
143
|
-
"""
|
|
144
|
-
while True:
|
|
145
|
-
results = cursor.fetchmany(arraysize)
|
|
146
|
-
if not results:
|
|
147
|
-
break
|
|
148
|
-
for result in results:
|
|
149
|
-
yield result
|
|
150
|
-
|
|
151
|
-
|
|
152
159
|
def downloadFile(self, path):
|
|
153
160
|
"""
|
|
154
161
|
Fetch the latest dumpfile from the website if newer than local copy.
|
|
155
162
|
"""
|
|
156
163
|
|
|
157
164
|
def openURL(url):
|
|
158
|
-
return
|
|
165
|
+
return _request_url(url, headers = {'User-Agent': 'Trade-Dangerous'})
|
|
159
166
|
|
|
160
|
-
if path
|
|
161
|
-
localPath = self.tdb.dataPath
|
|
167
|
+
if path not in (self.liveListingsPath, self.listingsPath):
|
|
168
|
+
localPath = Path(self.tdb.dataPath, path)
|
|
162
169
|
else:
|
|
163
|
-
localPath = self.dataPath
|
|
170
|
+
localPath = Path(self.dataPath, path)
|
|
164
171
|
|
|
165
172
|
url = BASE_URL + str(path)
|
|
166
173
|
|
|
167
174
|
self.tdenv.NOTE("Checking for update to '{}'.", path)
|
|
168
175
|
try:
|
|
169
176
|
response = openURL(url)
|
|
170
|
-
except Exception as e:
|
|
177
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
171
178
|
self.tdenv.WARN("Problem with download:\n URL: {}\n Error: {}", BASE_URL + str(path), str(e))
|
|
172
179
|
return False
|
|
173
180
|
|
|
@@ -190,148 +197,137 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
190
197
|
Purges systems from the System table that do not have any stations claiming to be in them.
|
|
191
198
|
Keeps table from becoming too large because of fleet carriers moving to unpopulated systems.
|
|
192
199
|
"""
|
|
193
|
-
|
|
200
|
+
db = self.tdb.getDB()
|
|
194
201
|
self.tdenv.NOTE("Purging Systems with no stations: Start time = {}", self.now())
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
202
|
+
|
|
203
|
+
db.execute("PRAGMA foreign_keys = OFF")
|
|
204
|
+
|
|
205
|
+
self.tdenv.DEBUG0("Saving systems with stations.... " + str(self.now()) + "\t\t\t\t", end="\r")
|
|
206
|
+
db.execute("DROP TABLE IF EXISTS System_copy")
|
|
207
|
+
db.execute("""CREATE TABLE System_copy AS SELECT * FROM System
|
|
201
208
|
WHERE system_id IN (SELECT system_id FROM Station)
|
|
202
209
|
""")
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
210
|
+
|
|
211
|
+
self.tdenv.DEBUG0("Erasing table and reinserting kept systems.... " + str(self.now()) + "\t\t\t\t", end="\r")
|
|
212
|
+
db.execute("DELETE FROM System")
|
|
213
|
+
db.execute("INSERT INTO System SELECT * FROM System_copy")
|
|
214
|
+
|
|
215
|
+
self.tdenv.DEBUG0("Removing copy.... " + str(self.now()) + "\t\t\t\t", end="\r")
|
|
216
|
+
db.execute("PRAGMA foreign_keys = ON")
|
|
217
|
+
db.execute("DROP TABLE IF EXISTS System_copy")
|
|
218
|
+
|
|
219
|
+
db.commit()
|
|
220
|
+
|
|
212
221
|
self.tdenv.NOTE("Finished purging Systems. End time = {}", self.now())
|
|
213
222
|
|
|
214
|
-
def commit(self):
|
|
215
|
-
success = False
|
|
216
|
-
while not success:
|
|
217
|
-
try:
|
|
218
|
-
self.tdb.getDB().commit()
|
|
219
|
-
success = True
|
|
220
|
-
except sqlite3.OperationalError:
|
|
221
|
-
print("(commit) Database is locked, waiting for access.", end = "\r")
|
|
222
|
-
time.sleep(1)
|
|
223
|
-
|
|
224
223
|
def importListings(self, listings_file):
|
|
225
224
|
"""
|
|
226
|
-
Updates the market data (AKA the StationItem table) using
|
|
225
|
+
Updates the market data (AKA the StationItem table) using listings_file
|
|
227
226
|
Writes directly to database.
|
|
228
227
|
"""
|
|
228
|
+
listings_path = Path(self.dataPath, listings_file).absolute()
|
|
229
|
+
from_live = listings_path != Path(self.dataPath, self.listingsPath).absolute()
|
|
230
|
+
self.tdenv.NOTE("Processing market data from {}: Start time = {}. Live = {}", listings_file, self.now(), from_live)
|
|
229
231
|
|
|
230
|
-
self.tdenv
|
|
231
|
-
if not
|
|
232
|
-
self.tdenv.NOTE("File not found, aborting: {}", (self.dataPath / listings_file))
|
|
232
|
+
total = _count_listing_entries(self.tdenv, listings_path)
|
|
233
|
+
if not total:
|
|
233
234
|
return
|
|
235
|
+
|
|
236
|
+
stmt_unliven_station = """UPDATE StationItem SET from_live = 0 WHERE station_id = ?"""
|
|
237
|
+
stmt_flush_station = """DELETE from StationItem WHERE station_id = ?"""
|
|
238
|
+
stmt_add_listing = """
|
|
239
|
+
INSERT OR IGNORE INTO StationItem (
|
|
240
|
+
station_id, item_id, modified, from_live,
|
|
241
|
+
demand_price, demand_units, demand_level,
|
|
242
|
+
supply_price, supply_units, supply_level
|
|
243
|
+
)
|
|
244
|
+
VALUES (
|
|
245
|
+
?, ?, datetime(?, 'unixepoch'), ?,
|
|
246
|
+
?, ?, ?,
|
|
247
|
+
?, ?, ?
|
|
248
|
+
)
|
|
249
|
+
"""
|
|
234
250
|
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
self.tdenv.
|
|
240
|
-
listings_path = Path(self.dataPath, listings_file)
|
|
241
|
-
total += file_line_count(listings_path)
|
|
242
|
-
|
|
243
|
-
liveStmt = """UPDATE StationItem
|
|
244
|
-
SET from_live = 0
|
|
245
|
-
WHERE station_id = ?"""
|
|
246
|
-
|
|
247
|
-
delStmt = "DELETE from StationItem WHERE station_id = ?"
|
|
248
|
-
|
|
249
|
-
listingStmt = """INSERT OR IGNORE INTO StationItem
|
|
250
|
-
(station_id, item_id, modified,
|
|
251
|
-
demand_price, demand_units, demand_level,
|
|
252
|
-
supply_price, supply_units, supply_level, from_live)
|
|
253
|
-
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )"""
|
|
254
|
-
|
|
255
|
-
self.tdenv.DEBUG0("Getting list of commodities...")
|
|
256
|
-
items = [cols[0] for cols in self.execute("SELECT item_id FROM Item ORDER BY item_id")]
|
|
257
|
-
|
|
258
|
-
self.tdenv.DEBUG0("Getting list of stations...")
|
|
259
|
-
stationList = {
|
|
260
|
-
stationID
|
|
261
|
-
for (stationID,) in self.execute("SELECT station_id FROM Station")
|
|
262
|
-
}
|
|
263
|
-
|
|
264
|
-
stationItems = dict(self.execute('SELECT station_id, UNIXEPOCH(modified) FROM StationItem').fetchall())
|
|
251
|
+
# Fetch all the items IDS
|
|
252
|
+
db = self.tdb.getDB()
|
|
253
|
+
item_lookup = _make_item_id_lookup(self.tdenv, db.cursor())
|
|
254
|
+
station_lookup = _make_station_id_lookup(self.tdenv, db.cursor())
|
|
255
|
+
last_station_update_times = _collect_station_modified_times(self.tdenv, db.cursor())
|
|
265
256
|
|
|
257
|
+
cur_station = None
|
|
266
258
|
self.tdenv.DEBUG0("Processing entries...")
|
|
267
259
|
with listings_path.open("r", encoding="utf-8", errors="ignore") as fh:
|
|
268
260
|
prog = pbar.Progress(total, 50)
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
cur_station = -1
|
|
261
|
+
|
|
262
|
+
cursor: Optional[sqlite3.Cursor] = db.cursor()
|
|
272
263
|
|
|
273
|
-
for listing in
|
|
274
|
-
|
|
275
|
-
# Do a commit and close the DB every 2%.
|
|
276
|
-
# This ensures the listings are put in the DB and the WAL is cleared.
|
|
277
|
-
self.commit()
|
|
278
|
-
self.tdb.close()
|
|
264
|
+
for listing in csv.DictReader(fh):
|
|
265
|
+
prog.increment(1, postfix = lambda value, total: f" {(value / total * 100):.0f}% {value} / {total}")
|
|
279
266
|
|
|
280
267
|
station_id = int(listing['station_id'])
|
|
281
|
-
if station_id not in
|
|
268
|
+
if station_id not in station_lookup:
|
|
282
269
|
continue
|
|
283
270
|
|
|
271
|
+
listing_time = int(listing['collected_at'])
|
|
272
|
+
|
|
284
273
|
if station_id != cur_station:
|
|
285
|
-
|
|
286
|
-
|
|
274
|
+
# commit anything from the previous station, get a new cursor
|
|
275
|
+
db.commit()
|
|
276
|
+
cur_station, skip_station, cursor = station_id, False, db.cursor()
|
|
287
277
|
|
|
288
278
|
# Check if listing already exists in DB and needs updated.
|
|
289
|
-
|
|
279
|
+
last_modified: int = int(last_station_update_times.get(station_id, 0))
|
|
280
|
+
if last_modified:
|
|
290
281
|
# When the listings.csv data matches the database, update to make from_live == 0.
|
|
291
|
-
if
|
|
292
|
-
self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live'.")
|
|
293
|
-
|
|
282
|
+
if listing_time == last_modified and not from_live:
|
|
283
|
+
self.tdenv.DEBUG1(f"Marking {cur_station} as no longer 'live' (old={last_modified}, listing={listing_time}).")
|
|
284
|
+
cursor.execute(stmt_unliven_station, (cur_station,))
|
|
285
|
+
skip_station = True
|
|
286
|
+
continue
|
|
287
|
+
|
|
294
288
|
# Unless the import file data is newer, nothing else needs to be done for this station,
|
|
295
289
|
# so the rest of the listings for this station can be skipped.
|
|
296
|
-
if
|
|
297
|
-
|
|
290
|
+
if listing_time <= last_modified:
|
|
291
|
+
skip_station = True
|
|
298
292
|
continue
|
|
299
293
|
|
|
300
294
|
# The data from the import file is newer, so we need to delete the old data for this station.
|
|
301
|
-
self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station}.")
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
del stationItems[station_id]
|
|
305
|
-
|
|
295
|
+
self.tdenv.DEBUG1(f"Deleting old listing data for {cur_station} (old={last_modified}, listing={listing_time}).")
|
|
296
|
+
cursor.execute(stmt_flush_station, (cur_station,))
|
|
297
|
+
last_station_update_times[station_id] = listing_time
|
|
306
298
|
|
|
307
|
-
|
|
299
|
+
# station skip lasts until we change station id.
|
|
300
|
+
if skip_station:
|
|
308
301
|
continue
|
|
309
302
|
|
|
310
303
|
# Since this station is not being skipped, get the data and prepare for insertion into the DB.
|
|
311
304
|
item_id = int(listing['commodity_id'])
|
|
312
305
|
# listings.csv includes rare items, which we are ignoring.
|
|
313
|
-
if item_id not in
|
|
306
|
+
if item_id not in item_lookup:
|
|
314
307
|
continue
|
|
315
|
-
|
|
308
|
+
|
|
316
309
|
demand_price = int(listing['sell_price'])
|
|
317
310
|
demand_units = int(listing['demand'])
|
|
318
|
-
demand_level = int(listing
|
|
311
|
+
demand_level = int(listing.get('demand_bracket') or '-1')
|
|
319
312
|
supply_price = int(listing['buy_price'])
|
|
320
313
|
supply_units = int(listing['supply'])
|
|
321
|
-
supply_level = int(listing
|
|
314
|
+
supply_level = int(listing.get('supply_bracket') or '-1')
|
|
322
315
|
|
|
323
316
|
self.tdenv.DEBUG1(f"Inserting new listing data for {station_id}.")
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
317
|
+
cursor.execute(stmt_add_listing, (
|
|
318
|
+
station_id, item_id, listing_time, from_live,
|
|
319
|
+
demand_price, demand_units, demand_level,
|
|
320
|
+
supply_price, supply_units, supply_level,
|
|
321
|
+
))
|
|
327
322
|
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
323
|
+
prog.clear()
|
|
324
|
+
|
|
325
|
+
# Do a final commit to be sure
|
|
326
|
+
db.commit()
|
|
327
|
+
|
|
328
|
+
self.tdenv.NOTE("Optimizing database...")
|
|
329
|
+
db.execute("VACUUM")
|
|
330
|
+
self.tdb.close()
|
|
335
331
|
|
|
336
332
|
self.tdenv.NOTE("Finished processing market data. End time = {}", self.now())
|
|
337
333
|
|
|
@@ -347,14 +343,13 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
347
343
|
# have been passed, enable 'listings'.
|
|
348
344
|
default = True
|
|
349
345
|
for option in self.options:
|
|
350
|
-
|
|
351
|
-
if option not in ('force', 'skipvend'):
|
|
346
|
+
if option not in ('force', 'skipvend', 'purge'):
|
|
352
347
|
default = False
|
|
353
348
|
if default:
|
|
354
349
|
self.options["listings"] = True
|
|
355
350
|
|
|
356
|
-
# We can probably safely assume that the plugin
|
|
357
|
-
# the
|
|
351
|
+
# We can probably safely assume that the plugin
|
|
352
|
+
# has never been run if the db file doesn't exist.
|
|
358
353
|
if not (self.tdb.dataPath / Path("TradeDangerous.db")).exists():
|
|
359
354
|
self.options["clean"] = True
|
|
360
355
|
|
|
@@ -497,7 +492,6 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
497
492
|
|
|
498
493
|
if self.getOption("purge"):
|
|
499
494
|
self.purgeSystems()
|
|
500
|
-
# self.commit()
|
|
501
495
|
|
|
502
496
|
if self.getOption("listings"):
|
|
503
497
|
if self.downloadFile(self.listingsPath) or self.getOption("force"):
|
|
@@ -1,26 +1,29 @@
|
|
|
1
|
+
""" Plugin for importing data from spansh """
|
|
1
2
|
from __future__ import annotations
|
|
2
3
|
|
|
4
|
+
from collections import namedtuple
|
|
3
5
|
from contextlib import contextmanager
|
|
4
6
|
from datetime import datetime, timedelta
|
|
5
7
|
from pathlib import Path
|
|
8
|
+
from rich.progress import Progress
|
|
9
|
+
|
|
10
|
+
from .. import plugins, cache, transfers, csvexport, corrections
|
|
6
11
|
|
|
12
|
+
import sqlite3
|
|
7
13
|
import sys
|
|
8
14
|
import time
|
|
9
15
|
import typing
|
|
10
|
-
|
|
16
|
+
|
|
17
|
+
import ijson
|
|
18
|
+
|
|
11
19
|
if sys.version_info.major == 3 and sys.version_info.minor >= 10:
|
|
12
20
|
from dataclasses import dataclass
|
|
13
21
|
else:
|
|
14
22
|
dataclass = False # pylint: disable=invalid-name
|
|
15
23
|
|
|
16
|
-
from rich.progress import Progress
|
|
17
|
-
import ijson
|
|
18
|
-
import sqlite3
|
|
19
|
-
|
|
20
|
-
from .. import plugins, cache, transfers, csvexport, corrections
|
|
21
|
-
|
|
22
24
|
if typing.TYPE_CHECKING:
|
|
23
|
-
from typing import Any,
|
|
25
|
+
from typing import Any, Optional
|
|
26
|
+
from collections.abc import Iterable
|
|
24
27
|
from .. tradeenv import TradeEnv
|
|
25
28
|
|
|
26
29
|
SOURCE_URL = 'https://downloads.spansh.co.uk/galaxy_stations.json'
|
|
@@ -237,11 +240,11 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
237
240
|
def commit(self, *, force: bool = False) -> None:
|
|
238
241
|
""" Perform a commit if required, but try not to do a crazy amount of committing. """
|
|
239
242
|
if not force and not self.need_commit:
|
|
240
|
-
return
|
|
243
|
+
return
|
|
241
244
|
|
|
242
245
|
if not force and self.commit_limit > 0:
|
|
243
246
|
self.commit_limit -= 1
|
|
244
|
-
return
|
|
247
|
+
return
|
|
245
248
|
|
|
246
249
|
db = self.tdb.getDB()
|
|
247
250
|
db.commit()
|
|
@@ -291,19 +294,14 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
291
294
|
|
|
292
295
|
for station, commodities in stations:
|
|
293
296
|
fq_station_name = f'@{upper_sys}/{station.name}'
|
|
294
|
-
if age_cutoff and (now - station.modified) > age_cutoff:
|
|
295
|
-
if self.tdenv.detail:
|
|
296
|
-
self.print(f' | {fq_station_name:50s} | Skipping station due to age: {now - station.modified}, ts: {station.modified}')
|
|
297
|
-
progress.bump(sys_task)
|
|
298
|
-
continue
|
|
299
297
|
|
|
300
298
|
station_info = self.known_stations.get(station.id)
|
|
301
|
-
if not station_info:
|
|
299
|
+
if not station_info or station.modified > station_info[2]:
|
|
302
300
|
self.ensure_station(station)
|
|
303
301
|
elif station_info[1] != station.system_id:
|
|
304
302
|
self.print(f' | {station.name:50s} | Megaship station moved, updating system')
|
|
305
303
|
self.execute("UPDATE Station SET system_id = ? WHERE station_id = ?", station.system_id, station.id, commitable=True)
|
|
306
|
-
self.known_stations[station.id] = (station.name, station.system_id)
|
|
304
|
+
self.known_stations[station.id] = (station.name, station.system_id, station.modified)
|
|
307
305
|
|
|
308
306
|
items = []
|
|
309
307
|
db_times = dict(self.execute("SELECT item_id, modified FROM StationItem WHERE station_id = ?", station.id))
|
|
@@ -312,12 +310,19 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
312
310
|
if commodity.id not in self.known_commodities:
|
|
313
311
|
commodity = self.ensure_commodity(commodity)
|
|
314
312
|
|
|
313
|
+
# We're concerned with the market age, not the station age,
|
|
314
|
+
# as they each have their own 'modified' times.
|
|
315
|
+
if age_cutoff and (now - commodity.modified) > age_cutoff:
|
|
316
|
+
if self.tdenv.detail:
|
|
317
|
+
self.print(f' | {fq_station_name:50s} | Skipping station due to age: {now - station.modified}, ts: {station.modified}')
|
|
318
|
+
break
|
|
319
|
+
|
|
315
320
|
db_modified = db_times.get(commodity.id)
|
|
316
321
|
modified = parse_ts(db_modified) if db_modified else None
|
|
317
322
|
if modified and commodity.modified <= modified:
|
|
318
323
|
# All commodities in a station will have the same modified time,
|
|
319
324
|
# so no need to check the rest if the fist is older.
|
|
320
|
-
if self.tdenv.detail:
|
|
325
|
+
if self.tdenv.detail > 2:
|
|
321
326
|
self.print(f' | {fq_station_name:50s} | Skipping older commodity data')
|
|
322
327
|
break
|
|
323
328
|
items.append((station.id, commodity.id, commodity.modified,
|
|
@@ -334,8 +339,8 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
334
339
|
?, ?, ?, ?
|
|
335
340
|
)""", items, commitable=True)
|
|
336
341
|
commodity_count += len(items)
|
|
337
|
-
|
|
338
|
-
|
|
342
|
+
# Good time to save data and try to keep the transaction small
|
|
343
|
+
self.commit()
|
|
339
344
|
|
|
340
345
|
if commodity_count:
|
|
341
346
|
station_count += 1
|
|
@@ -357,12 +362,6 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
357
362
|
progress.update(f"{sys_desc}{DIM} ({total_station_count}:station:, {avg_stations:.1f}per:glowing_star:){CLOSE}")
|
|
358
363
|
|
|
359
364
|
self.commit()
|
|
360
|
-
|
|
361
|
-
# Need to make sure cached tables are updated, if changes were made
|
|
362
|
-
# if self.update_cache:
|
|
363
|
-
# for table in [ "Item", "Station", "System" ]:
|
|
364
|
-
# _, path = csvexport.exportTableToFile( self.tdb, self.tdenv, table )
|
|
365
|
-
|
|
366
365
|
self.tdb.close()
|
|
367
366
|
|
|
368
367
|
# Need to make sure cached tables are updated
|
|
@@ -392,7 +391,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
392
391
|
categories.setdefault(commodity.category, []).append(commodity)
|
|
393
392
|
return categories
|
|
394
393
|
|
|
395
|
-
def execute(self, query: str, *params, commitable: bool = False) ->
|
|
394
|
+
def execute(self, query: str, *params, commitable: bool = False) -> sqlite3.Cursor:
|
|
396
395
|
""" helper method that performs retriable queries and marks the transaction as needing to commit
|
|
397
396
|
if the query is commitable."""
|
|
398
397
|
if commitable:
|
|
@@ -404,14 +403,14 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
404
403
|
except sqlite3.OperationalError as ex:
|
|
405
404
|
if "no transaction is active" in str(ex):
|
|
406
405
|
self.print(f"no transaction for {query}")
|
|
407
|
-
|
|
406
|
+
raise
|
|
408
407
|
if not attempts:
|
|
409
408
|
raise
|
|
410
409
|
attempts -= 1
|
|
411
410
|
self.print(f'Retrying query \'{query}\': {ex!s}')
|
|
412
411
|
time.sleep(1)
|
|
413
412
|
|
|
414
|
-
def executemany(self, query: str, data: Iterable[Any], *, commitable: bool = False) ->
|
|
413
|
+
def executemany(self, query: str, data: Iterable[Any], *, commitable: bool = False) -> sqlite3.Cursor:
|
|
415
414
|
""" helper method that performs retriable queries and marks the transaction as needing to commit
|
|
416
415
|
if the query is commitable."""
|
|
417
416
|
if commitable:
|
|
@@ -423,7 +422,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
423
422
|
except sqlite3.OperationalError as ex:
|
|
424
423
|
if "no transaction is active" in str(ex):
|
|
425
424
|
self.print(f"no transaction for {query}")
|
|
426
|
-
|
|
425
|
+
raise
|
|
427
426
|
if not attempts:
|
|
428
427
|
raise
|
|
429
428
|
attempts -= 1
|
|
@@ -442,7 +441,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
442
441
|
def load_known_stations(self) -> dict[int, tuple[str, int]]:
|
|
443
442
|
""" Returns a dictionary of {station_id -> (station_name, system_id)} for all current stations in the database. """
|
|
444
443
|
try:
|
|
445
|
-
return {cols[0]: (cols[1], cols[2]) for cols in self.cursor.execute('SELECT station_id, name, system_id FROM Station')}
|
|
444
|
+
return {cols[0]: (cols[1], cols[2], parse_ts(cols[3])) for cols in self.cursor.execute('SELECT station_id, name, system_id, modified FROM Station')}
|
|
446
445
|
except Exception as e: # pylint: disable=broad-except
|
|
447
446
|
self.print("[purple]:thinking_face:Assuming no station data yet")
|
|
448
447
|
self.tdenv.DEBUG0(f"load_known_stations query raised {e}")
|
|
@@ -474,7 +473,7 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
474
473
|
""" Adds a record for a station, and registers the station in the known_stations dict. """
|
|
475
474
|
self.execute(
|
|
476
475
|
'''
|
|
477
|
-
INSERT INTO Station (
|
|
476
|
+
INSERT OR REPLACE INTO Station (
|
|
478
477
|
system_id, station_id, name,
|
|
479
478
|
ls_from_star, max_pad_size,
|
|
480
479
|
market, blackmarket, shipyard, outfitting,
|
|
@@ -510,9 +509,10 @@ class ImportPlugin(plugins.ImportPluginBase):
|
|
|
510
509
|
station.type,
|
|
511
510
|
commitable=True,
|
|
512
511
|
)
|
|
512
|
+
note = "Updated" if self.known_stations.get(station.id) else "Added"
|
|
513
513
|
if self.tdenv.detail > 1:
|
|
514
|
-
self.print(f' | {station.name:50s} |
|
|
515
|
-
self.known_stations[station.id] = (station.name, station.system_id)
|
|
514
|
+
self.print(f' | {station.name:50s} | {note} station')
|
|
515
|
+
self.known_stations[station.id] = (station.name, station.system_id, station.modified)
|
|
516
516
|
|
|
517
517
|
def ensure_commodity(self, commodity: Commodity):
|
|
518
518
|
""" Adds a record for a commodity and registers the commodity in the known_commodities dict. """
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: tradedangerous
|
|
3
|
-
Version: 11.0.
|
|
3
|
+
Version: 11.0.5
|
|
4
4
|
Summary: Trade-Dangerous is a set of powerful trading tools for Elite Dangerous, organized around one of the most powerful trade run optimizers available.
|
|
5
5
|
Home-page: https://github.com/eyeonus/Trade-Dangerous
|
|
6
6
|
Author: eyeonus
|
|
@@ -3,6 +3,8 @@ README.md
|
|
|
3
3
|
pyproject.toml
|
|
4
4
|
setup.cfg
|
|
5
5
|
setup.py
|
|
6
|
+
trade.py
|
|
7
|
+
tradegui.py
|
|
6
8
|
tests/test_bootstrap_commands.py
|
|
7
9
|
tests/test_bootstrap_plugins.py
|
|
8
10
|
tests/test_cache.py
|
|
@@ -28,12 +30,10 @@ tradedangerous/mapping.py
|
|
|
28
30
|
tradedangerous/prices.py
|
|
29
31
|
tradedangerous/submit-distances.py
|
|
30
32
|
tradedangerous/tools.py
|
|
31
|
-
tradedangerous/trade.py
|
|
32
33
|
tradedangerous/tradecalc.py
|
|
33
34
|
tradedangerous/tradedb.py
|
|
34
35
|
tradedangerous/tradeenv.py
|
|
35
36
|
tradedangerous/tradeexcept.py
|
|
36
|
-
tradedangerous/tradegui.py
|
|
37
37
|
tradedangerous/transfers.py
|
|
38
38
|
tradedangerous/utils.py
|
|
39
39
|
tradedangerous/version.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{tradedangerous-11.0.3 → tradedangerous-11.0.5}/tradedangerous.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|