tradedangerous 11.5.3__py3-none-any.whl → 12.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradedangerous might be problematic. Click here for more details.
- tradedangerous/cache.py +567 -395
- tradedangerous/cli.py +2 -2
- tradedangerous/commands/TEMPLATE.py +25 -26
- tradedangerous/commands/__init__.py +8 -16
- tradedangerous/commands/buildcache_cmd.py +40 -10
- tradedangerous/commands/buy_cmd.py +57 -46
- tradedangerous/commands/commandenv.py +0 -2
- tradedangerous/commands/export_cmd.py +78 -50
- tradedangerous/commands/import_cmd.py +67 -31
- tradedangerous/commands/market_cmd.py +52 -19
- tradedangerous/commands/olddata_cmd.py +120 -107
- tradedangerous/commands/rares_cmd.py +122 -110
- tradedangerous/commands/run_cmd.py +118 -66
- tradedangerous/commands/sell_cmd.py +52 -45
- tradedangerous/commands/shipvendor_cmd.py +49 -234
- tradedangerous/commands/station_cmd.py +55 -485
- tradedangerous/commands/update_cmd.py +56 -420
- tradedangerous/csvexport.py +173 -162
- tradedangerous/db/__init__.py +27 -0
- tradedangerous/db/adapter.py +191 -0
- tradedangerous/db/config.py +95 -0
- tradedangerous/db/engine.py +246 -0
- tradedangerous/db/lifecycle.py +332 -0
- tradedangerous/db/locks.py +208 -0
- tradedangerous/db/orm_models.py +455 -0
- tradedangerous/db/paths.py +112 -0
- tradedangerous/db/utils.py +661 -0
- tradedangerous/gui.py +2 -2
- tradedangerous/plugins/eddblink_plug.py +387 -251
- tradedangerous/plugins/spansh_plug.py +2488 -821
- tradedangerous/prices.py +124 -142
- tradedangerous/templates/TradeDangerous.sql +6 -6
- tradedangerous/tradecalc.py +1227 -1109
- tradedangerous/tradedb.py +533 -384
- tradedangerous/tradeenv.py +12 -1
- tradedangerous/version.py +1 -1
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/METADATA +11 -7
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/RECORD +42 -38
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/WHEEL +1 -1
- tradedangerous/commands/update_gui.py +0 -721
- tradedangerous/jsonprices.py +0 -254
- tradedangerous/plugins/edapi_plug.py +0 -1071
- tradedangerous/plugins/journal_plug.py +0 -537
- tradedangerous/plugins/netlog_plug.py +0 -316
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/entry_points.txt +0 -0
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info/licenses}/LICENSE +0 -0
- {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/top_level.txt +0 -0
tradedangerous/csvexport.py
CHANGED
|
@@ -1,31 +1,31 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
|
+
from sqlalchemy import inspect, text
|
|
2
3
|
from .tradeexcept import TradeException
|
|
3
4
|
|
|
5
|
+
from .db import utils as db_utils
|
|
6
|
+
|
|
7
|
+
|
|
4
8
|
import csv
|
|
5
9
|
import os
|
|
6
|
-
import sqlite3
|
|
7
10
|
|
|
8
11
|
######################################################################
|
|
9
12
|
# TradeDangerous :: Modules :: CSV Exporter
|
|
10
13
|
#
|
|
11
|
-
# Generate
|
|
14
|
+
# Generate CSV files for database tables.
|
|
12
15
|
#
|
|
13
|
-
#
|
|
14
|
-
#
|
|
15
|
-
#
|
|
16
|
-
#
|
|
17
|
-
#
|
|
18
|
-
# * One column primary keys will be handled by the database engine
|
|
16
|
+
# Assumptions:
|
|
17
|
+
# * Each table has at most one UNIQUE index.
|
|
18
|
+
# * Referenced tables also have a UNIQUE index.
|
|
19
|
+
# * Only single-column foreign keys are supported.
|
|
20
|
+
# * Single-column primary keys are inferred automatically by SQLAlchemy.
|
|
19
21
|
#
|
|
20
|
-
|
|
21
|
-
# CAUTION: If the database structure gets changed this script might
|
|
22
|
-
# need some corrections.
|
|
22
|
+
# CAUTION: If the schema changes this module may require updates.
|
|
23
23
|
######################################################################
|
|
24
24
|
|
|
25
25
|
######################################################################
|
|
26
26
|
# Default values
|
|
27
27
|
|
|
28
|
-
#
|
|
28
|
+
# For some tables the first two columns will be reversed
|
|
29
29
|
reverseList = []
|
|
30
30
|
|
|
31
31
|
######################################################################
|
|
@@ -38,183 +38,194 @@ def search_keyList(items, val):
|
|
|
38
38
|
return row
|
|
39
39
|
return None
|
|
40
40
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
41
|
+
|
|
42
|
+
def getUniqueIndex(session, tableName):
|
|
43
|
+
"""Return all unique columns via SQLAlchemy inspector."""
|
|
44
|
+
inspector = inspect(session.get_bind())
|
|
44
45
|
unqIndex = []
|
|
45
|
-
for
|
|
46
|
-
if
|
|
47
|
-
|
|
48
|
-
unqCursor = conn.cursor()
|
|
49
|
-
for unqRow in unqCursor.execute("PRAGMA index_info('%s')" % idxRow['name']):
|
|
50
|
-
unqIndex.append(unqRow['name'])
|
|
46
|
+
for idx in inspector.get_indexes(tableName):
|
|
47
|
+
if idx.get("unique"):
|
|
48
|
+
unqIndex.extend(idx.get("column_names", []))
|
|
51
49
|
return unqIndex
|
|
52
50
|
|
|
53
|
-
|
|
54
|
-
|
|
51
|
+
|
|
52
|
+
def getFKeyList(session, tableName):
|
|
53
|
+
"""Return all single-column foreign keys via SQLAlchemy inspector."""
|
|
54
|
+
inspector = inspect(session.get_bind())
|
|
55
55
|
keyList = []
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
if
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
if keyRow['seq'] == 1:
|
|
66
|
-
# if there is a second column, remove it from the list
|
|
67
|
-
keyList.remove( keyList[keyCount] )
|
|
68
|
-
keyCount -= 1
|
|
69
|
-
|
|
56
|
+
for fk in inspector.get_foreign_keys(tableName):
|
|
57
|
+
cols = fk.get("constrained_columns", [])
|
|
58
|
+
referred = fk.get("referred_columns", [])
|
|
59
|
+
if len(cols) == 1 and len(referred) == 1:
|
|
60
|
+
keyList.append({
|
|
61
|
+
"table": fk.get("referred_table"),
|
|
62
|
+
"from": cols[0],
|
|
63
|
+
"to": referred[0],
|
|
64
|
+
})
|
|
70
65
|
return keyList
|
|
71
66
|
|
|
72
|
-
|
|
67
|
+
|
|
68
|
+
def buildFKeyStmt(session, tableName, key):
|
|
73
69
|
"""
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
70
|
+
Resolve the FK constraint against the UNIQUE index of the
|
|
71
|
+
referenced table.
|
|
72
|
+
|
|
73
|
+
Multicolumn UNIQUEs are allowed, but only the last column
|
|
74
|
+
may be treated as a single-column join target.
|
|
77
75
|
"""
|
|
78
|
-
unqIndex = getUniqueIndex(
|
|
79
|
-
keyList
|
|
76
|
+
unqIndex = getUniqueIndex(session, key["table"])
|
|
77
|
+
keyList = getFKeyList(session, key["table"])
|
|
80
78
|
keyStmt = []
|
|
79
|
+
|
|
81
80
|
for colName in unqIndex:
|
|
82
|
-
#
|
|
81
|
+
# If this unique column is itself a foreign key, recurse
|
|
83
82
|
keyKey = search_keyList(keyList, colName)
|
|
84
83
|
if keyKey:
|
|
85
|
-
|
|
86
|
-
for row in newStmt:
|
|
87
|
-
keyStmt.append(row)
|
|
84
|
+
keyStmt.extend(buildFKeyStmt(session, key["table"], keyKey))
|
|
88
85
|
else:
|
|
89
86
|
keyStmt.append({
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
87
|
+
"table": tableName,
|
|
88
|
+
"column": colName,
|
|
89
|
+
"joinTable": key["table"],
|
|
90
|
+
"joinColumn": key["to"],
|
|
94
91
|
})
|
|
95
|
-
|
|
92
|
+
|
|
96
93
|
return keyStmt
|
|
97
94
|
|
|
95
|
+
|
|
98
96
|
######################################################################
|
|
99
97
|
# Code
|
|
100
98
|
######################################################################
|
|
101
99
|
|
|
102
|
-
def exportTableToFile(
|
|
100
|
+
def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
|
|
103
101
|
"""
|
|
104
|
-
|
|
105
|
-
|
|
102
|
+
Generate the CSV file for tableName in csvPath.
|
|
103
|
+
Returns (lineCount, exportPath).
|
|
104
|
+
|
|
105
|
+
Behaviour:
|
|
106
|
+
- Prefix unique columns with "unq:".
|
|
107
|
+
- Foreign keys are exported as "<col>@<joinTable>.<uniqueCol>".
|
|
108
|
+
- Datetime-like values for 'modified' columns are exported as
|
|
109
|
+
"YYYY-MM-DD HH:MM:SS" (no microseconds).
|
|
110
|
+
|
|
111
|
+
Compatible with either:
|
|
112
|
+
* a SQLAlchemy Session
|
|
113
|
+
* a TradeDB wrapper exposing .engine
|
|
106
114
|
"""
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
if
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
115
|
+
from sqlalchemy.orm import Session
|
|
116
|
+
|
|
117
|
+
# --- Resolve a SQLAlchemy session ---
|
|
118
|
+
if hasattr(tdb_or_session, "engine"):
|
|
119
|
+
# Likely a TradeDB instance
|
|
120
|
+
engine = tdb_or_session.engine
|
|
121
|
+
session = Session(engine)
|
|
122
|
+
elif hasattr(tdb_or_session, "get_bind"):
|
|
123
|
+
# Already a Session
|
|
124
|
+
session = tdb_or_session
|
|
125
|
+
else:
|
|
126
|
+
raise TradeException(
|
|
127
|
+
f"Unsupported DB object passed to exportTableToFile: {type(tdb_or_session)}"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
csvPath = csvPath or Path(tdenv.csvDir)
|
|
131
|
+
if not Path(csvPath).is_dir():
|
|
132
|
+
raise TradeException(f"Save location '{csvPath}' not found.")
|
|
133
|
+
|
|
118
134
|
uniquePfx = "unq:"
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
135
|
+
exportPath = (Path(csvPath) / Path(tableName)).with_suffix(".csv")
|
|
136
|
+
tdenv.DEBUG0(f"Export Table '{tableName}' to '{exportPath}'")
|
|
137
|
+
|
|
138
|
+
def _fmt_ts(val):
|
|
139
|
+
if hasattr(val, "strftime"):
|
|
140
|
+
try:
|
|
141
|
+
return val.strftime("%Y-%m-%d %H:%M:%S")
|
|
142
|
+
except Exception:
|
|
143
|
+
pass
|
|
144
|
+
if isinstance(val, str):
|
|
145
|
+
s = val
|
|
146
|
+
if len(s) >= 19 and s[10] == "T":
|
|
147
|
+
s = s[:10] + " " + s[11:]
|
|
148
|
+
if len(s) >= 19 and s[4] == "-" and s[7] == "-" and s[10] == " " and s[13] == ":" and s[16] == ":":
|
|
149
|
+
return s[:19]
|
|
150
|
+
return val
|
|
151
|
+
|
|
127
152
|
lineCount = 0
|
|
128
|
-
with exportPath.open("w", encoding=
|
|
129
|
-
exportOut = csv.writer(
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
raise TradeException("No columns to export for table '{}'.".format(tableName))
|
|
149
|
-
|
|
150
|
-
# reverse the first two columns for some tables
|
|
151
|
-
if tableName in reverseList:
|
|
152
|
-
columnList[0], columnList[1] = columnList[1], columnList[0]
|
|
153
|
-
|
|
154
|
-
# initialize helper lists
|
|
155
|
-
csvHead = []
|
|
153
|
+
with exportPath.open("w", encoding="utf-8", newline="\n") as exportFile:
|
|
154
|
+
exportOut = csv.writer(
|
|
155
|
+
exportFile,
|
|
156
|
+
delimiter=",",
|
|
157
|
+
quotechar="'",
|
|
158
|
+
doublequote=True,
|
|
159
|
+
quoting=csv.QUOTE_NONNUMERIC,
|
|
160
|
+
lineterminator="\n",
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
bind = session.get_bind()
|
|
164
|
+
inspector = inspect(bind)
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
unique_cols = db_utils.get_unique_columns(session, tableName)
|
|
168
|
+
fk_list = db_utils.get_foreign_keys(session, tableName)
|
|
169
|
+
except Exception as e:
|
|
170
|
+
raise TradeException(f"Failed to introspect table '{tableName}': {e!r}")
|
|
171
|
+
|
|
172
|
+
csvHead = []
|
|
156
173
|
stmtColumn = []
|
|
157
|
-
stmtTable
|
|
158
|
-
stmtOrder
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
# column is part of an unique index
|
|
184
|
-
csvPfx = uniquePfx + csvPfx
|
|
185
|
-
csvHead += [ "{}{}@{}.{}".format(csvPfx, keyRow['column'], keyRow['joinTable'], keyRow['joinColumn']) ]
|
|
186
|
-
stmtColumn += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ]
|
|
187
|
-
if col['notnull']:
|
|
188
|
-
stmtTable += [ 'INNER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ]
|
|
189
|
-
else:
|
|
190
|
-
stmtTable += [ 'LEFT OUTER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ]
|
|
191
|
-
stmtOrder += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ]
|
|
174
|
+
stmtTable = [tableName]
|
|
175
|
+
stmtOrder = []
|
|
176
|
+
is_modified_col = []
|
|
177
|
+
|
|
178
|
+
for col in inspector.get_columns(tableName):
|
|
179
|
+
col_name = col["name"]
|
|
180
|
+
fk = next((fk for fk in fk_list if fk["from"] == col_name), None)
|
|
181
|
+
if fk:
|
|
182
|
+
joinTable = fk["table"]
|
|
183
|
+
joinColumn = fk["to"]
|
|
184
|
+
join_unique_cols = db_utils.get_unique_columns(session, joinTable)
|
|
185
|
+
if not join_unique_cols:
|
|
186
|
+
raise TradeException(
|
|
187
|
+
f"No unique column found in referenced table '{joinTable}'"
|
|
188
|
+
)
|
|
189
|
+
export_col = join_unique_cols[0]
|
|
190
|
+
csvPfx = uniquePfx if col_name in unique_cols else ""
|
|
191
|
+
csvHead.append(f"{csvPfx}{col_name}@{joinTable}.{export_col}")
|
|
192
|
+
stmtColumn.append(f"{joinTable}.{export_col}")
|
|
193
|
+
is_modified_col.append(export_col == "modified")
|
|
194
|
+
nullable = bool(col.get("nullable", True))
|
|
195
|
+
join_type = "LEFT OUTER JOIN" if nullable else "INNER JOIN"
|
|
196
|
+
stmtTable.append(
|
|
197
|
+
f"{join_type} {joinTable} ON {tableName}.{col_name} = {joinTable}.{joinColumn}"
|
|
198
|
+
)
|
|
199
|
+
stmtOrder.append(f"{joinTable}.{export_col}")
|
|
192
200
|
else:
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
csvHead += [ uniquePfx + col['name'] ]
|
|
197
|
-
stmtOrder += [ "{}.{}".format(tableName, col['name']) ]
|
|
201
|
+
if col_name in unique_cols:
|
|
202
|
+
csvHead.append(uniquePfx + col_name)
|
|
203
|
+
stmtOrder.append(f"{tableName}.{col_name}")
|
|
198
204
|
else:
|
|
199
|
-
csvHead
|
|
200
|
-
stmtColumn
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
sqlStmt = "SELECT {
|
|
204
|
-
if
|
|
205
|
-
sqlStmt += " ORDER BY {
|
|
206
|
-
tdenv.DEBUG1("SQL:
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
for line in cur.execute(sqlStmt):
|
|
205
|
+
csvHead.append(col_name)
|
|
206
|
+
stmtColumn.append(f"{tableName}.{col_name}")
|
|
207
|
+
is_modified_col.append(col_name == "modified")
|
|
208
|
+
|
|
209
|
+
sqlStmt = f"SELECT {','.join(stmtColumn)} FROM {' '.join(stmtTable)}"
|
|
210
|
+
if stmtOrder:
|
|
211
|
+
sqlStmt += f" ORDER BY {','.join(stmtOrder)}"
|
|
212
|
+
tdenv.DEBUG1(f"SQL: {sqlStmt}")
|
|
213
|
+
|
|
214
|
+
exportFile.write(f"{','.join(csvHead)}\n")
|
|
215
|
+
|
|
216
|
+
for row in session.execute(text(sqlStmt)):
|
|
212
217
|
lineCount += 1
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
218
|
+
row_out = [
|
|
219
|
+
_fmt_ts(val) if is_modified_col[i] else val
|
|
220
|
+
for i, val in enumerate(row)
|
|
221
|
+
]
|
|
222
|
+
tdenv.DEBUG2(f"{lineCount}: {row_out}")
|
|
223
|
+
exportOut.writerow(row_out)
|
|
224
|
+
|
|
225
|
+
tdenv.DEBUG1(f"{lineCount} {tableName}s exported")
|
|
226
|
+
|
|
227
|
+
# Close session if we created it
|
|
228
|
+
if hasattr(tdb_or_session, "engine"):
|
|
229
|
+
session.close()
|
|
230
|
+
|
|
220
231
|
return lineCount, exportPath
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""TradeDangerous SQLAlchemy bootstrap package (Stage 3A, Part 1).
|
|
2
|
+
|
|
3
|
+
Side-effect free on import. Provides a minimal, cross-platform API
|
|
4
|
+
for config loading, path resolution, and engine/session bootstrap.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
from tradedangerous.db import (
|
|
8
|
+
load_config, resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path,
|
|
9
|
+
make_engine_from_config, get_session_factory, healthcheck,
|
|
10
|
+
)
|
|
11
|
+
"""
|
|
12
|
+
from .config import load_config
|
|
13
|
+
from .paths import resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path
|
|
14
|
+
from .engine import make_engine_from_config, get_session_factory, healthcheck
|
|
15
|
+
from .lifecycle import ensure_fresh_db
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"load_config",
|
|
19
|
+
"resolve_data_dir",
|
|
20
|
+
"resolve_tmp_dir",
|
|
21
|
+
"ensure_dir",
|
|
22
|
+
"resolve_db_config_path",
|
|
23
|
+
"make_engine_from_config",
|
|
24
|
+
"get_session_factory",
|
|
25
|
+
"healthcheck",
|
|
26
|
+
"ensure_fresh_db",
|
|
27
|
+
]
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
# tradedangerous/db/adapter.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from contextlib import contextmanager
|
|
5
|
+
from typing import Dict, Generator, Iterable, Optional, Tuple
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import select, func
|
|
8
|
+
from sqlalchemy.engine import Engine
|
|
9
|
+
from sqlalchemy.orm import Session
|
|
10
|
+
|
|
11
|
+
# Local engine + ORM (authoritative)
|
|
12
|
+
from .engine import make_engine_from_config, get_session_factory # uses env/CWD-resolved db_config.ini by default
|
|
13
|
+
from .orm_models import System, Station, Item, StationItem # canonical models
|
|
14
|
+
from .paths import resolve_db_config_path
|
|
15
|
+
|
|
16
|
+
# ---- Public factory ---------------------------------------------------------
|
|
17
|
+
|
|
18
|
+
def get_adapter_if_enabled(cfg_path: Optional[str] = None) -> "TradeDBReadAdapter | None":
|
|
19
|
+
"""
|
|
20
|
+
Return an adapter when [database] backend != 'sqlite', else None.
|
|
21
|
+
- No engine/session created at import: construction is lazy.
|
|
22
|
+
- This is called by tradedb.py (thin gate).
|
|
23
|
+
"""
|
|
24
|
+
import configparser, os
|
|
25
|
+
if cfg_path is None:
|
|
26
|
+
cfg_path = str(resolve_db_config_path())
|
|
27
|
+
cfg = configparser.ConfigParser()
|
|
28
|
+
if not os.path.exists(cfg_path):
|
|
29
|
+
return None
|
|
30
|
+
with open(cfg_path, "r", encoding="utf-8") as fh:
|
|
31
|
+
cfg.read_file(fh)
|
|
32
|
+
backend = (cfg.get("database", "backend", fallback="sqlite") or "sqlite").strip().lower()
|
|
33
|
+
if backend == "sqlite":
|
|
34
|
+
return None
|
|
35
|
+
|
|
36
|
+
# Engine is created lazily via the property below to honour "no side-effects at import".
|
|
37
|
+
return TradeDBReadAdapter(cfg_path)
|
|
38
|
+
|
|
39
|
+
# ---- Adapter (read-only) ----------------------------------------------------
|
|
40
|
+
|
|
41
|
+
class TradeDBReadAdapter:
|
|
42
|
+
"""
|
|
43
|
+
Very small, read-only façade over SQLAlchemy for legacy TradeDB reads:
|
|
44
|
+
- systems() list
|
|
45
|
+
- lookup system by name (case-insensitive)
|
|
46
|
+
- station by (system_id, station_name) (case-insensitive)
|
|
47
|
+
- average selling/buying prices (used by trade_cmd at detail>1)
|
|
48
|
+
"""
|
|
49
|
+
def __init__(self, cfg_path: str):
|
|
50
|
+
self._cfg_path = cfg_path
|
|
51
|
+
self._engine: Optional[Engine] = None
|
|
52
|
+
self._Session = None # sessionmaker
|
|
53
|
+
|
|
54
|
+
# Lazy engine/session factory (no import-time work)
|
|
55
|
+
@property
|
|
56
|
+
def Session(self):
|
|
57
|
+
if self._Session is None:
|
|
58
|
+
engine = make_engine_from_config(self._cfg_path)
|
|
59
|
+
self._engine = engine
|
|
60
|
+
self._Session = get_session_factory(engine)
|
|
61
|
+
return self._Session
|
|
62
|
+
|
|
63
|
+
@contextmanager
|
|
64
|
+
def session(self) -> Generator[Session, None, None]:
|
|
65
|
+
Session = self.Session
|
|
66
|
+
with Session() as s:
|
|
67
|
+
yield s
|
|
68
|
+
|
|
69
|
+
# ---- Reads mapped to ORM ------------------------------------------------
|
|
70
|
+
|
|
71
|
+
def list_system_rows(self) -> Iterable[Tuple[int, str, float, float, float, Optional[int]]]:
|
|
72
|
+
"""
|
|
73
|
+
Shape matches legacy _loadSystems SELECT:
|
|
74
|
+
(system_id, name, pos_x, pos_y, pos_z, added_id)
|
|
75
|
+
"""
|
|
76
|
+
with self.session() as s:
|
|
77
|
+
rows = s.execute(
|
|
78
|
+
select(
|
|
79
|
+
System.system_id,
|
|
80
|
+
System.name,
|
|
81
|
+
System.pos_x,
|
|
82
|
+
System.pos_y,
|
|
83
|
+
System.pos_z,
|
|
84
|
+
System.added_id,
|
|
85
|
+
)
|
|
86
|
+
)
|
|
87
|
+
for r in rows:
|
|
88
|
+
yield (r.system_id, r.name, r.pos_x, r.pos_y, r.pos_z, r.added_id)
|
|
89
|
+
|
|
90
|
+
def system_by_name(self, name_ci: str) -> Optional[Tuple[int, str, float, float, float, Optional[int]]]:
|
|
91
|
+
"""
|
|
92
|
+
Case-insensitive name match for System.
|
|
93
|
+
"""
|
|
94
|
+
with self.session() as s:
|
|
95
|
+
row = s.execute(
|
|
96
|
+
select(
|
|
97
|
+
System.system_id, System.name, System.pos_x, System.pos_y, System.pos_z, System.added_id
|
|
98
|
+
).where(func.upper(System.name) == func.upper(func.trim(func.cast(name_ci, System.name.type))))
|
|
99
|
+
).first()
|
|
100
|
+
if not row:
|
|
101
|
+
return None
|
|
102
|
+
return (row.system_id, row.name, row.pos_x, row.pos_y, row.pos_z, row.added_id)
|
|
103
|
+
|
|
104
|
+
def station_by_system_and_name(
|
|
105
|
+
self, system_id: int, station_name_ci: str
|
|
106
|
+
) -> Optional[Tuple[int, int, str, int, str, str, str, str, str, str, str, str, str, int]]:
|
|
107
|
+
"""
|
|
108
|
+
Return the single Station row by system + name (CI).
|
|
109
|
+
Shape matches legacy _loadStations row consumed by Station(...):
|
|
110
|
+
(station_id, system_id, name,
|
|
111
|
+
ls_from_star, market, blackmarket, shipyard,
|
|
112
|
+
max_pad_size, outfitting, rearm, refuel, repair, planetary, type_id)
|
|
113
|
+
"""
|
|
114
|
+
with self.session() as s:
|
|
115
|
+
r = s.execute(
|
|
116
|
+
select(
|
|
117
|
+
Station.station_id,
|
|
118
|
+
Station.system_id,
|
|
119
|
+
Station.name,
|
|
120
|
+
Station.ls_from_star,
|
|
121
|
+
Station.market,
|
|
122
|
+
Station.blackmarket,
|
|
123
|
+
Station.shipyard,
|
|
124
|
+
Station.max_pad_size,
|
|
125
|
+
Station.outfitting,
|
|
126
|
+
Station.rearm,
|
|
127
|
+
Station.refuel,
|
|
128
|
+
Station.repair,
|
|
129
|
+
Station.planetary,
|
|
130
|
+
Station.type_id,
|
|
131
|
+
).where(
|
|
132
|
+
Station.system_id == system_id,
|
|
133
|
+
func.upper(Station.name) == func.upper(func.trim(func.cast(station_name_ci, Station.name.type))),
|
|
134
|
+
)
|
|
135
|
+
).first()
|
|
136
|
+
if not r:
|
|
137
|
+
return None
|
|
138
|
+
return (
|
|
139
|
+
r.station_id,
|
|
140
|
+
r.system_id,
|
|
141
|
+
r.name,
|
|
142
|
+
r.ls_from_star,
|
|
143
|
+
r.market,
|
|
144
|
+
r.blackmarket,
|
|
145
|
+
r.shipyard,
|
|
146
|
+
r.max_pad_size,
|
|
147
|
+
r.outfitting,
|
|
148
|
+
r.rearm,
|
|
149
|
+
r.refuel,
|
|
150
|
+
r.repair,
|
|
151
|
+
r.planetary,
|
|
152
|
+
r.type_id,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def average_selling(self) -> Dict[int, int]:
|
|
156
|
+
"""
|
|
157
|
+
{item_id: avg_supply_price>0}
|
|
158
|
+
Mirrors the legacy SQL used in TradeDB.getAverageSelling.
|
|
159
|
+
"""
|
|
160
|
+
with self.session() as s:
|
|
161
|
+
rows = s.execute(
|
|
162
|
+
select(
|
|
163
|
+
Item.item_id,
|
|
164
|
+
func.IFNULL(func.avg(StationItem.supply_price), 0),
|
|
165
|
+
)
|
|
166
|
+
.select_from(Item.__table__.outerjoin(
|
|
167
|
+
StationItem, (Item.item_id == StationItem.item_id) & (StationItem.supply_price > 0)
|
|
168
|
+
))
|
|
169
|
+
.where(StationItem.supply_price > 0)
|
|
170
|
+
.group_by(Item.item_id)
|
|
171
|
+
)
|
|
172
|
+
return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
|
|
173
|
+
|
|
174
|
+
def average_buying(self) -> Dict[int, int]:
|
|
175
|
+
"""
|
|
176
|
+
{item_id: avg_demand_price>0}
|
|
177
|
+
Mirrors the legacy SQL used in TradeDB.getAverageBuying.
|
|
178
|
+
"""
|
|
179
|
+
with self.session() as s:
|
|
180
|
+
rows = s.execute(
|
|
181
|
+
select(
|
|
182
|
+
Item.item_id,
|
|
183
|
+
func.IFNULL(func.avg(StationItem.demand_price), 0),
|
|
184
|
+
)
|
|
185
|
+
.select_from(Item.__table__.outerjoin(
|
|
186
|
+
StationItem, (Item.item_id == StationItem.item_id) & (StationItem.demand_price > 0)
|
|
187
|
+
))
|
|
188
|
+
.where(StationItem.demand_price > 0)
|
|
189
|
+
.group_by(Item.item_id)
|
|
190
|
+
)
|
|
191
|
+
return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
|