tradedangerous 11.5.3__py3-none-any.whl → 12.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tradedangerous might be problematic. Click here for more details.

Files changed (47) hide show
  1. tradedangerous/cache.py +567 -395
  2. tradedangerous/cli.py +2 -2
  3. tradedangerous/commands/TEMPLATE.py +25 -26
  4. tradedangerous/commands/__init__.py +8 -16
  5. tradedangerous/commands/buildcache_cmd.py +40 -10
  6. tradedangerous/commands/buy_cmd.py +57 -46
  7. tradedangerous/commands/commandenv.py +0 -2
  8. tradedangerous/commands/export_cmd.py +78 -50
  9. tradedangerous/commands/import_cmd.py +67 -31
  10. tradedangerous/commands/market_cmd.py +52 -19
  11. tradedangerous/commands/olddata_cmd.py +120 -107
  12. tradedangerous/commands/rares_cmd.py +122 -110
  13. tradedangerous/commands/run_cmd.py +118 -66
  14. tradedangerous/commands/sell_cmd.py +52 -45
  15. tradedangerous/commands/shipvendor_cmd.py +49 -234
  16. tradedangerous/commands/station_cmd.py +55 -485
  17. tradedangerous/commands/update_cmd.py +56 -420
  18. tradedangerous/csvexport.py +173 -162
  19. tradedangerous/db/__init__.py +27 -0
  20. tradedangerous/db/adapter.py +191 -0
  21. tradedangerous/db/config.py +95 -0
  22. tradedangerous/db/engine.py +246 -0
  23. tradedangerous/db/lifecycle.py +332 -0
  24. tradedangerous/db/locks.py +208 -0
  25. tradedangerous/db/orm_models.py +455 -0
  26. tradedangerous/db/paths.py +112 -0
  27. tradedangerous/db/utils.py +661 -0
  28. tradedangerous/gui.py +2 -2
  29. tradedangerous/plugins/eddblink_plug.py +387 -251
  30. tradedangerous/plugins/spansh_plug.py +2488 -821
  31. tradedangerous/prices.py +124 -142
  32. tradedangerous/templates/TradeDangerous.sql +6 -6
  33. tradedangerous/tradecalc.py +1227 -1109
  34. tradedangerous/tradedb.py +533 -384
  35. tradedangerous/tradeenv.py +12 -1
  36. tradedangerous/version.py +1 -1
  37. {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/METADATA +11 -7
  38. {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/RECORD +42 -38
  39. {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/WHEEL +1 -1
  40. tradedangerous/commands/update_gui.py +0 -721
  41. tradedangerous/jsonprices.py +0 -254
  42. tradedangerous/plugins/edapi_plug.py +0 -1071
  43. tradedangerous/plugins/journal_plug.py +0 -537
  44. tradedangerous/plugins/netlog_plug.py +0 -316
  45. {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/entry_points.txt +0 -0
  46. {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info/licenses}/LICENSE +0 -0
  47. {tradedangerous-11.5.3.dist-info → tradedangerous-12.0.1.dist-info}/top_level.txt +0 -0
@@ -1,31 +1,31 @@
1
1
  from pathlib import Path
2
+ from sqlalchemy import inspect, text
2
3
  from .tradeexcept import TradeException
3
4
 
5
+ from .db import utils as db_utils
6
+
7
+
4
8
  import csv
5
9
  import os
6
- import sqlite3
7
10
 
8
11
  ######################################################################
9
12
  # TradeDangerous :: Modules :: CSV Exporter
10
13
  #
11
- # Generate a CSV files for a table of the database.
14
+ # Generate CSV files for database tables.
12
15
  #
13
- # Note: This routine makes some assumptions about the structure
14
- # of the database:
15
- # * The table should only have one UNIQUE index
16
- # * The referenced table must have one UNIQUE index
17
- # * The FK columns must have the same name in both tables
18
- # * One column primary keys will be handled by the database engine
16
+ # Assumptions:
17
+ # * Each table has at most one UNIQUE index.
18
+ # * Referenced tables also have a UNIQUE index.
19
+ # * Only single-column foreign keys are supported.
20
+ # * Single-column primary keys are inferred automatically by SQLAlchemy.
19
21
  #
20
- ######################################################################
21
- # CAUTION: If the database structure gets changed this script might
22
- # need some corrections.
22
+ # CAUTION: If the schema changes this module may require updates.
23
23
  ######################################################################
24
24
 
25
25
  ######################################################################
26
26
  # Default values
27
27
 
28
- # for some tables the first two columns will be reversed
28
+ # For some tables the first two columns will be reversed
29
29
  reverseList = []
30
30
 
31
31
  ######################################################################
@@ -38,183 +38,194 @@ def search_keyList(items, val):
38
38
  return row
39
39
  return None
40
40
 
41
- def getUniqueIndex(conn, tableName):
42
- """ return all unique columns """
43
- idxCursor = conn.cursor()
41
+
42
+ def getUniqueIndex(session, tableName):
43
+ """Return all unique columns via SQLAlchemy inspector."""
44
+ inspector = inspect(session.get_bind())
44
45
  unqIndex = []
45
- for idxRow in idxCursor.execute("PRAGMA index_list('%s')" % tableName):
46
- if idxRow['unique']:
47
- # it's a unique index
48
- unqCursor = conn.cursor()
49
- for unqRow in unqCursor.execute("PRAGMA index_info('%s')" % idxRow['name']):
50
- unqIndex.append(unqRow['name'])
46
+ for idx in inspector.get_indexes(tableName):
47
+ if idx.get("unique"):
48
+ unqIndex.extend(idx.get("column_names", []))
51
49
  return unqIndex
52
50
 
53
- def getFKeyList(conn, tableName):
54
- """ get all single column FKs """
51
+
52
+ def getFKeyList(session, tableName):
53
+ """Return all single-column foreign keys via SQLAlchemy inspector."""
54
+ inspector = inspect(session.get_bind())
55
55
  keyList = []
56
- keyCount = -1
57
- keyCursor = conn.cursor()
58
- for keyRow in keyCursor.execute("PRAGMA foreign_key_list('%s')" % tableName):
59
- if keyRow['seq'] == 0:
60
- keyCount += 1
61
- keyList.append( {'table': keyRow['table'],
62
- 'from': keyRow['from'],
63
- 'to': keyRow['to']}
64
- )
65
- if keyRow['seq'] == 1:
66
- # if there is a second column, remove it from the list
67
- keyList.remove( keyList[keyCount] )
68
- keyCount -= 1
69
-
56
+ for fk in inspector.get_foreign_keys(tableName):
57
+ cols = fk.get("constrained_columns", [])
58
+ referred = fk.get("referred_columns", [])
59
+ if len(cols) == 1 and len(referred) == 1:
60
+ keyList.append({
61
+ "table": fk.get("referred_table"),
62
+ "from": cols[0],
63
+ "to": referred[0],
64
+ })
70
65
  return keyList
71
66
 
72
- def buildFKeyStmt(conn, tableName, key):
67
+
68
+ def buildFKeyStmt(session, tableName, key):
73
69
  """
74
- resolve the FK constrain with the UNIQUE index
75
- multicolumn UNIQUEs are allowed as long as the last one
76
- will be a single column one
70
+ Resolve the FK constraint against the UNIQUE index of the
71
+ referenced table.
72
+
73
+ Multicolumn UNIQUEs are allowed, but only the last column
74
+ may be treated as a single-column join target.
77
75
  """
78
- unqIndex = getUniqueIndex(conn, key['table'])
79
- keyList = getFKeyList(conn, key['table'])
76
+ unqIndex = getUniqueIndex(session, key["table"])
77
+ keyList = getFKeyList(session, key["table"])
80
78
  keyStmt = []
79
+
81
80
  for colName in unqIndex:
82
- # check if the column is a foreign key
81
+ # If this unique column is itself a foreign key, recurse
83
82
  keyKey = search_keyList(keyList, colName)
84
83
  if keyKey:
85
- newStmt = buildFKeyStmt(conn, key['table'], keyKey)
86
- for row in newStmt:
87
- keyStmt.append(row)
84
+ keyStmt.extend(buildFKeyStmt(session, key["table"], keyKey))
88
85
  else:
89
86
  keyStmt.append({
90
- 'table': tableName,
91
- 'column': colName,
92
- 'joinTable': key['table'],
93
- 'joinColumn': key['to']
87
+ "table": tableName,
88
+ "column": colName,
89
+ "joinTable": key["table"],
90
+ "joinColumn": key["to"],
94
91
  })
95
-
92
+
96
93
  return keyStmt
97
94
 
95
+
98
96
  ######################################################################
99
97
  # Code
100
98
  ######################################################################
101
99
 
102
- def exportTableToFile(tdb, tdenv, tableName, csvPath=None):
100
+ def exportTableToFile(tdb_or_session, tdenv, tableName, csvPath=None):
103
101
  """
104
- Generate the csv file for tableName in csvPath
105
- returns lineCount, exportPath
102
+ Generate the CSV file for tableName in csvPath.
103
+ Returns (lineCount, exportPath).
104
+
105
+ Behaviour:
106
+ - Prefix unique columns with "unq:".
107
+ - Foreign keys are exported as "<col>@<joinTable>.<uniqueCol>".
108
+ - Datetime-like values for 'modified' columns are exported as
109
+ "YYYY-MM-DD HH:MM:SS" (no microseconds).
110
+
111
+ Compatible with either:
112
+ * a SQLAlchemy Session
113
+ * a TradeDB wrapper exposing .engine
106
114
  """
107
-
108
- # path for csv file
109
- csvPath = csvPath or tdb.csvPath
110
- if not csvPath.is_dir():
111
- raise TradeException("Save location '{}' not found.".format(str(csvPath)))
112
-
113
- # connect to the database
114
- conn = tdb.getDB()
115
- conn.row_factory = sqlite3.Row
116
-
117
- # prefix for unique/ignore columns
115
+ from sqlalchemy.orm import Session
116
+
117
+ # --- Resolve a SQLAlchemy session ---
118
+ if hasattr(tdb_or_session, "engine"):
119
+ # Likely a TradeDB instance
120
+ engine = tdb_or_session.engine
121
+ session = Session(engine)
122
+ elif hasattr(tdb_or_session, "get_bind"):
123
+ # Already a Session
124
+ session = tdb_or_session
125
+ else:
126
+ raise TradeException(
127
+ f"Unsupported DB object passed to exportTableToFile: {type(tdb_or_session)}"
128
+ )
129
+
130
+ csvPath = csvPath or Path(tdenv.csvDir)
131
+ if not Path(csvPath).is_dir():
132
+ raise TradeException(f"Save location '{csvPath}' not found.")
133
+
118
134
  uniquePfx = "unq:"
119
- ignorePfx = "!"
120
-
121
- # create CSV files
122
- exportPath = (csvPath / Path(tableName)).with_suffix(".csv")
123
- tdenv.DEBUG0("Export Table '{table}' to '{file}'".format(
124
- table=tableName, file=str(exportPath)
125
- ))
126
-
135
+ exportPath = (Path(csvPath) / Path(tableName)).with_suffix(".csv")
136
+ tdenv.DEBUG0(f"Export Table '{tableName}' to '{exportPath}'")
137
+
138
+ def _fmt_ts(val):
139
+ if hasattr(val, "strftime"):
140
+ try:
141
+ return val.strftime("%Y-%m-%d %H:%M:%S")
142
+ except Exception:
143
+ pass
144
+ if isinstance(val, str):
145
+ s = val
146
+ if len(s) >= 19 and s[10] == "T":
147
+ s = s[:10] + " " + s[11:]
148
+ if len(s) >= 19 and s[4] == "-" and s[7] == "-" and s[10] == " " and s[13] == ":" and s[16] == ":":
149
+ return s[:19]
150
+ return val
151
+
127
152
  lineCount = 0
128
- with exportPath.open("w", encoding='utf-8', newline="\n") as exportFile:
129
- exportOut = csv.writer(exportFile, delimiter=",", quotechar="'", doublequote=True, quoting=csv.QUOTE_NONNUMERIC, lineterminator="\n")
130
-
131
- cur = conn.cursor()
132
-
133
- # check for single PRIMARY KEY
134
- pkCount = 0
135
- for columnRow in cur.execute("PRAGMA table_info('%s')" % tableName):
136
- # count the columns of the primary key
137
- if columnRow['pk'] > 0:
138
- pkCount += 1
139
-
140
- # build column list
141
- columnList = []
142
- for columnRow in cur.execute("PRAGMA table_info('%s')" % tableName):
143
- # if there is only one PK column, ignore it
144
- # if columnRow['pk'] > 0 and pkCount == 1: continue
145
- columnList.append(columnRow)
146
-
147
- if len(columnList) == 0:
148
- raise TradeException("No columns to export for table '{}'.".format(tableName))
149
-
150
- # reverse the first two columns for some tables
151
- if tableName in reverseList:
152
- columnList[0], columnList[1] = columnList[1], columnList[0]
153
-
154
- # initialize helper lists
155
- csvHead = []
153
+ with exportPath.open("w", encoding="utf-8", newline="\n") as exportFile:
154
+ exportOut = csv.writer(
155
+ exportFile,
156
+ delimiter=",",
157
+ quotechar="'",
158
+ doublequote=True,
159
+ quoting=csv.QUOTE_NONNUMERIC,
160
+ lineterminator="\n",
161
+ )
162
+
163
+ bind = session.get_bind()
164
+ inspector = inspect(bind)
165
+
166
+ try:
167
+ unique_cols = db_utils.get_unique_columns(session, tableName)
168
+ fk_list = db_utils.get_foreign_keys(session, tableName)
169
+ except Exception as e:
170
+ raise TradeException(f"Failed to introspect table '{tableName}': {e!r}")
171
+
172
+ csvHead = []
156
173
  stmtColumn = []
157
- stmtTable = [ tableName ]
158
- stmtOrder = []
159
- unqIndex = getUniqueIndex(conn, tableName)
160
- keyList = getFKeyList(conn, tableName)
161
-
162
- tdenv.DEBUG1('UNIQUE: ' + ", ".join(unqIndex))
163
-
164
- # iterate over all columns of the table
165
- for col in columnList:
166
- # check if the column is a foreign key
167
- key = search_keyList(keyList, col['name'])
168
- if key:
169
- # make the join statement
170
- keyStmt = buildFKeyStmt(conn, tableName, key)
171
- for keyRow in keyStmt:
172
- tdenv.DEBUG1('FK-Stmt: {}'.format(list(keyRow)))
173
- # is the join for the same table
174
- if keyRow['table'] == tableName:
175
- csvPfx = ''
176
- joinStmt = 'USING({})'.format(keyRow['joinColumn'])
177
- else:
178
- # this column must be ignored by the importer, it's only
179
- # used to resolve the FK relation
180
- csvPfx = ignorePfx
181
- joinStmt = 'ON {}.{} = {}.{}'.format(keyRow['table'], keyRow['joinColumn'], keyRow['joinTable'], keyRow['joinColumn'])
182
- if col['name'] in unqIndex:
183
- # column is part of an unique index
184
- csvPfx = uniquePfx + csvPfx
185
- csvHead += [ "{}{}@{}.{}".format(csvPfx, keyRow['column'], keyRow['joinTable'], keyRow['joinColumn']) ]
186
- stmtColumn += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ]
187
- if col['notnull']:
188
- stmtTable += [ 'INNER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ]
189
- else:
190
- stmtTable += [ 'LEFT OUTER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ]
191
- stmtOrder += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ]
174
+ stmtTable = [tableName]
175
+ stmtOrder = []
176
+ is_modified_col = []
177
+
178
+ for col in inspector.get_columns(tableName):
179
+ col_name = col["name"]
180
+ fk = next((fk for fk in fk_list if fk["from"] == col_name), None)
181
+ if fk:
182
+ joinTable = fk["table"]
183
+ joinColumn = fk["to"]
184
+ join_unique_cols = db_utils.get_unique_columns(session, joinTable)
185
+ if not join_unique_cols:
186
+ raise TradeException(
187
+ f"No unique column found in referenced table '{joinTable}'"
188
+ )
189
+ export_col = join_unique_cols[0]
190
+ csvPfx = uniquePfx if col_name in unique_cols else ""
191
+ csvHead.append(f"{csvPfx}{col_name}@{joinTable}.{export_col}")
192
+ stmtColumn.append(f"{joinTable}.{export_col}")
193
+ is_modified_col.append(export_col == "modified")
194
+ nullable = bool(col.get("nullable", True))
195
+ join_type = "LEFT OUTER JOIN" if nullable else "INNER JOIN"
196
+ stmtTable.append(
197
+ f"{join_type} {joinTable} ON {tableName}.{col_name} = {joinTable}.{joinColumn}"
198
+ )
199
+ stmtOrder.append(f"{joinTable}.{export_col}")
192
200
  else:
193
- # ordinary column
194
- if col['name'] in unqIndex:
195
- # column is part of an unique index
196
- csvHead += [ uniquePfx + col['name'] ]
197
- stmtOrder += [ "{}.{}".format(tableName, col['name']) ]
201
+ if col_name in unique_cols:
202
+ csvHead.append(uniquePfx + col_name)
203
+ stmtOrder.append(f"{tableName}.{col_name}")
198
204
  else:
199
- csvHead += [ col['name'] ]
200
- stmtColumn += [ "{}.{}".format(tableName, col['name']) ]
201
-
202
- # build the SQL statement
203
- sqlStmt = "SELECT {} FROM {}".format(",".join(stmtColumn), " ".join(stmtTable))
204
- if len(stmtOrder) > 0:
205
- sqlStmt += " ORDER BY {}".format(",".join(stmtOrder))
206
- tdenv.DEBUG1("SQL: %s" % sqlStmt)
207
-
208
- # finally generate the csv file
209
- # write header line without quotes
210
- exportFile.write("{}\n".format(",".join(csvHead)))
211
- for line in cur.execute(sqlStmt):
205
+ csvHead.append(col_name)
206
+ stmtColumn.append(f"{tableName}.{col_name}")
207
+ is_modified_col.append(col_name == "modified")
208
+
209
+ sqlStmt = f"SELECT {','.join(stmtColumn)} FROM {' '.join(stmtTable)}"
210
+ if stmtOrder:
211
+ sqlStmt += f" ORDER BY {','.join(stmtOrder)}"
212
+ tdenv.DEBUG1(f"SQL: {sqlStmt}")
213
+
214
+ exportFile.write(f"{','.join(csvHead)}\n")
215
+
216
+ for row in session.execute(text(sqlStmt)):
212
217
  lineCount += 1
213
- tdenv.DEBUG2("{count}: {values}".format(count=lineCount, values=list(line)))
214
- exportOut.writerow(list(line))
215
- tdenv.DEBUG1("{count} {table}s exported".format(count=lineCount, table=tableName))
216
-
217
- # Update the DB file so we don't regenerate it.
218
- os.utime(str(tdb.dbPath))
219
-
218
+ row_out = [
219
+ _fmt_ts(val) if is_modified_col[i] else val
220
+ for i, val in enumerate(row)
221
+ ]
222
+ tdenv.DEBUG2(f"{lineCount}: {row_out}")
223
+ exportOut.writerow(row_out)
224
+
225
+ tdenv.DEBUG1(f"{lineCount} {tableName}s exported")
226
+
227
+ # Close session if we created it
228
+ if hasattr(tdb_or_session, "engine"):
229
+ session.close()
230
+
220
231
  return lineCount, exportPath
@@ -0,0 +1,27 @@
1
+ """TradeDangerous SQLAlchemy bootstrap package (Stage 3A, Part 1).
2
+
3
+ Side-effect free on import. Provides a minimal, cross-platform API
4
+ for config loading, path resolution, and engine/session bootstrap.
5
+
6
+ Usage:
7
+ from tradedangerous.db import (
8
+ load_config, resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path,
9
+ make_engine_from_config, get_session_factory, healthcheck,
10
+ )
11
+ """
12
+ from .config import load_config
13
+ from .paths import resolve_data_dir, resolve_tmp_dir, ensure_dir, resolve_db_config_path
14
+ from .engine import make_engine_from_config, get_session_factory, healthcheck
15
+ from .lifecycle import ensure_fresh_db
16
+
17
+ __all__ = [
18
+ "load_config",
19
+ "resolve_data_dir",
20
+ "resolve_tmp_dir",
21
+ "ensure_dir",
22
+ "resolve_db_config_path",
23
+ "make_engine_from_config",
24
+ "get_session_factory",
25
+ "healthcheck",
26
+ "ensure_fresh_db",
27
+ ]
@@ -0,0 +1,191 @@
1
+ # tradedangerous/db/adapter.py
2
+ from __future__ import annotations
3
+
4
+ from contextlib import contextmanager
5
+ from typing import Dict, Generator, Iterable, Optional, Tuple
6
+
7
+ from sqlalchemy import select, func
8
+ from sqlalchemy.engine import Engine
9
+ from sqlalchemy.orm import Session
10
+
11
+ # Local engine + ORM (authoritative)
12
+ from .engine import make_engine_from_config, get_session_factory # uses env/CWD-resolved db_config.ini by default
13
+ from .orm_models import System, Station, Item, StationItem # canonical models
14
+ from .paths import resolve_db_config_path
15
+
16
+ # ---- Public factory ---------------------------------------------------------
17
+
18
+ def get_adapter_if_enabled(cfg_path: Optional[str] = None) -> "TradeDBReadAdapter | None":
19
+ """
20
+ Return an adapter when [database] backend != 'sqlite', else None.
21
+ - No engine/session created at import: construction is lazy.
22
+ - This is called by tradedb.py (thin gate).
23
+ """
24
+ import configparser, os
25
+ if cfg_path is None:
26
+ cfg_path = str(resolve_db_config_path())
27
+ cfg = configparser.ConfigParser()
28
+ if not os.path.exists(cfg_path):
29
+ return None
30
+ with open(cfg_path, "r", encoding="utf-8") as fh:
31
+ cfg.read_file(fh)
32
+ backend = (cfg.get("database", "backend", fallback="sqlite") or "sqlite").strip().lower()
33
+ if backend == "sqlite":
34
+ return None
35
+
36
+ # Engine is created lazily via the property below to honour "no side-effects at import".
37
+ return TradeDBReadAdapter(cfg_path)
38
+
39
+ # ---- Adapter (read-only) ----------------------------------------------------
40
+
41
+ class TradeDBReadAdapter:
42
+ """
43
+ Very small, read-only façade over SQLAlchemy for legacy TradeDB reads:
44
+ - systems() list
45
+ - lookup system by name (case-insensitive)
46
+ - station by (system_id, station_name) (case-insensitive)
47
+ - average selling/buying prices (used by trade_cmd at detail>1)
48
+ """
49
+ def __init__(self, cfg_path: str):
50
+ self._cfg_path = cfg_path
51
+ self._engine: Optional[Engine] = None
52
+ self._Session = None # sessionmaker
53
+
54
+ # Lazy engine/session factory (no import-time work)
55
+ @property
56
+ def Session(self):
57
+ if self._Session is None:
58
+ engine = make_engine_from_config(self._cfg_path)
59
+ self._engine = engine
60
+ self._Session = get_session_factory(engine)
61
+ return self._Session
62
+
63
+ @contextmanager
64
+ def session(self) -> Generator[Session, None, None]:
65
+ Session = self.Session
66
+ with Session() as s:
67
+ yield s
68
+
69
+ # ---- Reads mapped to ORM ------------------------------------------------
70
+
71
+ def list_system_rows(self) -> Iterable[Tuple[int, str, float, float, float, Optional[int]]]:
72
+ """
73
+ Shape matches legacy _loadSystems SELECT:
74
+ (system_id, name, pos_x, pos_y, pos_z, added_id)
75
+ """
76
+ with self.session() as s:
77
+ rows = s.execute(
78
+ select(
79
+ System.system_id,
80
+ System.name,
81
+ System.pos_x,
82
+ System.pos_y,
83
+ System.pos_z,
84
+ System.added_id,
85
+ )
86
+ )
87
+ for r in rows:
88
+ yield (r.system_id, r.name, r.pos_x, r.pos_y, r.pos_z, r.added_id)
89
+
90
+ def system_by_name(self, name_ci: str) -> Optional[Tuple[int, str, float, float, float, Optional[int]]]:
91
+ """
92
+ Case-insensitive name match for System.
93
+ """
94
+ with self.session() as s:
95
+ row = s.execute(
96
+ select(
97
+ System.system_id, System.name, System.pos_x, System.pos_y, System.pos_z, System.added_id
98
+ ).where(func.upper(System.name) == func.upper(func.trim(func.cast(name_ci, System.name.type))))
99
+ ).first()
100
+ if not row:
101
+ return None
102
+ return (row.system_id, row.name, row.pos_x, row.pos_y, row.pos_z, row.added_id)
103
+
104
+ def station_by_system_and_name(
105
+ self, system_id: int, station_name_ci: str
106
+ ) -> Optional[Tuple[int, int, str, int, str, str, str, str, str, str, str, str, str, int]]:
107
+ """
108
+ Return the single Station row by system + name (CI).
109
+ Shape matches legacy _loadStations row consumed by Station(...):
110
+ (station_id, system_id, name,
111
+ ls_from_star, market, blackmarket, shipyard,
112
+ max_pad_size, outfitting, rearm, refuel, repair, planetary, type_id)
113
+ """
114
+ with self.session() as s:
115
+ r = s.execute(
116
+ select(
117
+ Station.station_id,
118
+ Station.system_id,
119
+ Station.name,
120
+ Station.ls_from_star,
121
+ Station.market,
122
+ Station.blackmarket,
123
+ Station.shipyard,
124
+ Station.max_pad_size,
125
+ Station.outfitting,
126
+ Station.rearm,
127
+ Station.refuel,
128
+ Station.repair,
129
+ Station.planetary,
130
+ Station.type_id,
131
+ ).where(
132
+ Station.system_id == system_id,
133
+ func.upper(Station.name) == func.upper(func.trim(func.cast(station_name_ci, Station.name.type))),
134
+ )
135
+ ).first()
136
+ if not r:
137
+ return None
138
+ return (
139
+ r.station_id,
140
+ r.system_id,
141
+ r.name,
142
+ r.ls_from_star,
143
+ r.market,
144
+ r.blackmarket,
145
+ r.shipyard,
146
+ r.max_pad_size,
147
+ r.outfitting,
148
+ r.rearm,
149
+ r.refuel,
150
+ r.repair,
151
+ r.planetary,
152
+ r.type_id,
153
+ )
154
+
155
+ def average_selling(self) -> Dict[int, int]:
156
+ """
157
+ {item_id: avg_supply_price>0}
158
+ Mirrors the legacy SQL used in TradeDB.getAverageSelling.
159
+ """
160
+ with self.session() as s:
161
+ rows = s.execute(
162
+ select(
163
+ Item.item_id,
164
+ func.IFNULL(func.avg(StationItem.supply_price), 0),
165
+ )
166
+ .select_from(Item.__table__.outerjoin(
167
+ StationItem, (Item.item_id == StationItem.item_id) & (StationItem.supply_price > 0)
168
+ ))
169
+ .where(StationItem.supply_price > 0)
170
+ .group_by(Item.item_id)
171
+ )
172
+ return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}
173
+
174
+ def average_buying(self) -> Dict[int, int]:
175
+ """
176
+ {item_id: avg_demand_price>0}
177
+ Mirrors the legacy SQL used in TradeDB.getAverageBuying.
178
+ """
179
+ with self.session() as s:
180
+ rows = s.execute(
181
+ select(
182
+ Item.item_id,
183
+ func.IFNULL(func.avg(StationItem.demand_price), 0),
184
+ )
185
+ .select_from(Item.__table__.outerjoin(
186
+ StationItem, (Item.item_id == StationItem.item_id) & (StationItem.demand_price > 0)
187
+ ))
188
+ .where(StationItem.demand_price > 0)
189
+ .group_by(Item.item_id)
190
+ )
191
+ return {int(item_id): int(avg_cr) for (item_id, avg_cr) in rows}