deeptrade-quant 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. deeptrade/__init__.py +8 -0
  2. deeptrade/channels_builtin/__init__.py +0 -0
  3. deeptrade/channels_builtin/stdout/__init__.py +0 -0
  4. deeptrade/channels_builtin/stdout/deeptrade_plugin.yaml +25 -0
  5. deeptrade/channels_builtin/stdout/migrations/20260429_001_init.sql +13 -0
  6. deeptrade/channels_builtin/stdout/stdout_channel/__init__.py +0 -0
  7. deeptrade/channels_builtin/stdout/stdout_channel/channel.py +180 -0
  8. deeptrade/cli.py +214 -0
  9. deeptrade/cli_config.py +396 -0
  10. deeptrade/cli_data.py +33 -0
  11. deeptrade/cli_plugin.py +176 -0
  12. deeptrade/core/__init__.py +8 -0
  13. deeptrade/core/config.py +344 -0
  14. deeptrade/core/config_migrations.py +138 -0
  15. deeptrade/core/db.py +176 -0
  16. deeptrade/core/llm_client.py +591 -0
  17. deeptrade/core/llm_manager.py +174 -0
  18. deeptrade/core/logging_config.py +61 -0
  19. deeptrade/core/migrations/__init__.py +0 -0
  20. deeptrade/core/migrations/core/20260427_001_init.sql +121 -0
  21. deeptrade/core/migrations/core/20260501_002_drop_llm_calls_stage.sql +10 -0
  22. deeptrade/core/migrations/core/__init__.py +0 -0
  23. deeptrade/core/notifier.py +302 -0
  24. deeptrade/core/paths.py +49 -0
  25. deeptrade/core/plugin_manager.py +616 -0
  26. deeptrade/core/run_status.py +29 -0
  27. deeptrade/core/secrets.py +152 -0
  28. deeptrade/core/tushare_client.py +824 -0
  29. deeptrade/plugins_api/__init__.py +44 -0
  30. deeptrade/plugins_api/base.py +66 -0
  31. deeptrade/plugins_api/channel.py +42 -0
  32. deeptrade/plugins_api/events.py +61 -0
  33. deeptrade/plugins_api/llm.py +46 -0
  34. deeptrade/plugins_api/metadata.py +84 -0
  35. deeptrade/plugins_api/notify.py +67 -0
  36. deeptrade/strategies_builtin/__init__.py +0 -0
  37. deeptrade/strategies_builtin/limit_up_board/__init__.py +0 -0
  38. deeptrade/strategies_builtin/limit_up_board/deeptrade_plugin.yaml +101 -0
  39. deeptrade/strategies_builtin/limit_up_board/limit_up_board/__init__.py +0 -0
  40. deeptrade/strategies_builtin/limit_up_board/limit_up_board/calendar.py +65 -0
  41. deeptrade/strategies_builtin/limit_up_board/limit_up_board/cli.py +269 -0
  42. deeptrade/strategies_builtin/limit_up_board/limit_up_board/config.py +76 -0
  43. deeptrade/strategies_builtin/limit_up_board/limit_up_board/data.py +1191 -0
  44. deeptrade/strategies_builtin/limit_up_board/limit_up_board/pipeline.py +869 -0
  45. deeptrade/strategies_builtin/limit_up_board/limit_up_board/plugin.py +30 -0
  46. deeptrade/strategies_builtin/limit_up_board/limit_up_board/profiles.py +85 -0
  47. deeptrade/strategies_builtin/limit_up_board/limit_up_board/prompts.py +485 -0
  48. deeptrade/strategies_builtin/limit_up_board/limit_up_board/render.py +890 -0
  49. deeptrade/strategies_builtin/limit_up_board/limit_up_board/runner.py +1087 -0
  50. deeptrade/strategies_builtin/limit_up_board/limit_up_board/runtime.py +172 -0
  51. deeptrade/strategies_builtin/limit_up_board/limit_up_board/schemas.py +178 -0
  52. deeptrade/strategies_builtin/limit_up_board/migrations/20260430_001_init.sql +150 -0
  53. deeptrade/strategies_builtin/limit_up_board/migrations/20260501_002_lub_stage_results_llm_provider.sql +8 -0
  54. deeptrade/strategies_builtin/limit_up_board/migrations/20260508_001_lub_lhb_tables.sql +36 -0
  55. deeptrade/strategies_builtin/limit_up_board/migrations/20260508_002_lub_cyq_perf.sql +18 -0
  56. deeptrade/strategies_builtin/limit_up_board/migrations/20260508_003_lub_lhb_pk_fix.sql +46 -0
  57. deeptrade/strategies_builtin/limit_up_board/migrations/20260508_004_lub_lhb_drop_pk.sql +53 -0
  58. deeptrade/strategies_builtin/limit_up_board/migrations/20260508_005_lub_config.sql +17 -0
  59. deeptrade/strategies_builtin/volume_anomaly/__init__.py +0 -0
  60. deeptrade/strategies_builtin/volume_anomaly/deeptrade_plugin.yaml +59 -0
  61. deeptrade/strategies_builtin/volume_anomaly/migrations/20260430_001_init.sql +94 -0
  62. deeptrade/strategies_builtin/volume_anomaly/migrations/20260601_001_realized_returns.sql +44 -0
  63. deeptrade/strategies_builtin/volume_anomaly/migrations/20260601_002_dimension_scores.sql +13 -0
  64. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/__init__.py +0 -0
  65. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/calendar.py +52 -0
  66. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/cli.py +247 -0
  67. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/data.py +2154 -0
  68. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/pipeline.py +327 -0
  69. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/plugin.py +22 -0
  70. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/profiles.py +49 -0
  71. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/prompts.py +187 -0
  72. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/prompts_examples.py +84 -0
  73. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/render.py +906 -0
  74. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/runner.py +772 -0
  75. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/runtime.py +90 -0
  76. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/schemas.py +97 -0
  77. deeptrade/strategies_builtin/volume_anomaly/volume_anomaly/stats.py +174 -0
  78. deeptrade/theme.py +48 -0
  79. deeptrade_quant-0.0.2.dist-info/METADATA +166 -0
  80. deeptrade_quant-0.0.2.dist-info/RECORD +83 -0
  81. deeptrade_quant-0.0.2.dist-info/WHEEL +4 -0
  82. deeptrade_quant-0.0.2.dist-info/entry_points.txt +2 -0
  83. deeptrade_quant-0.0.2.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,49 @@
1
+ """Resolve well-known paths for DeepTrade local artifacts.
2
+
3
+ DESIGN §5.1 user directory layout.
4
+
5
+ Override the root via the DEEPTRADE_HOME env var (used by tests for isolation).
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ from pathlib import Path
12
+
13
+
14
+ def home_dir() -> Path:
15
+ """Root of all local artifacts. Defaults to ~/.deeptrade."""
16
+ override = os.environ.get("DEEPTRADE_HOME")
17
+ if override:
18
+ return Path(override).expanduser().resolve()
19
+ return Path.home() / ".deeptrade"
20
+
21
+
22
+ def db_path() -> Path:
23
+ """Path to the main DuckDB file."""
24
+ override = os.environ.get("DEEPTRADE_DB_PATH")
25
+ if override:
26
+ return Path(override).expanduser().resolve()
27
+ return home_dir() / "deeptrade.duckdb"
28
+
29
+
30
+ def logs_dir() -> Path:
31
+ return home_dir() / "logs"
32
+
33
+
34
+ def reports_dir() -> Path:
35
+ return home_dir() / "reports"
36
+
37
+
38
+ def plugins_dir() -> Path:
39
+ return home_dir() / "plugins" / "installed"
40
+
41
+
42
+ def plugins_cache_dir() -> Path:
43
+ return home_dir() / "plugins" / "cache"
44
+
45
+
46
+ def ensure_layout() -> None:
47
+ """Create the standard ~/.deeptrade subtree if missing. Idempotent."""
48
+ for d in (home_dir(), logs_dir(), reports_dir(), plugins_dir(), plugins_cache_dir()):
49
+ d.mkdir(parents=True, exist_ok=True)
@@ -0,0 +1,616 @@
1
+ """Plugin install / validate / uninstall / upgrade.
2
+
3
+ DESIGN §8.3 + S1 (migrations are the sole DDL source) + S2 (install never
4
+ touches the network) + M3 (llm_tools=true is rejected).
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import hashlib
10
+ import importlib.util
11
+ import logging
12
+ import shutil
13
+ import sys
14
+ import textwrap
15
+ from collections.abc import Sequence
16
+ from dataclasses import dataclass
17
+ from pathlib import Path
18
+ from typing import Any
19
+
20
+ import yaml
21
+
22
+ from deeptrade.core import paths
23
+ from deeptrade.core.db import Database
24
+ from deeptrade.plugins_api.base import Plugin
25
+ from deeptrade.plugins_api.metadata import MigrationSpec, PluginMetadata
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+ CURRENT_API_VERSION = "1"
30
+
31
+ # Reserved framework-level command names. A plugin_id colliding with any of
32
+ # these would shadow framework dispatch and is rejected at install time.
33
+ RESERVED_PLUGIN_IDS: frozenset[str] = frozenset({"init", "config", "plugin", "data"})
34
+
35
+
36
+ # ---------------------------------------------------------------------------
37
+ # Errors
38
+ # ---------------------------------------------------------------------------
39
+
40
+
41
+ class PluginError(Exception):
42
+ """Generic plugin manager error."""
43
+
44
+
45
+ class PluginInstallError(PluginError):
46
+ """Install pipeline failure."""
47
+
48
+
49
+ class PluginNotFoundError(PluginError):
50
+ """No such installed plugin."""
51
+
52
+
53
+ # ---------------------------------------------------------------------------
54
+ # Records (lightweight DTOs over the DB rows)
55
+ # ---------------------------------------------------------------------------
56
+
57
+
58
+ @dataclass
59
+ class InstalledPlugin:
60
+ plugin_id: str
61
+ name: str
62
+ version: str
63
+ type: str
64
+ api_version: str
65
+ entrypoint: str
66
+ install_path: str
67
+ enabled: bool
68
+ metadata: PluginMetadata
69
+
70
+
71
+ # ---------------------------------------------------------------------------
72
+ # Loading helpers
73
+ # ---------------------------------------------------------------------------
74
+
75
+
76
+ def _load_metadata_yaml(yaml_path: Path) -> PluginMetadata:
77
+ if not yaml_path.is_file():
78
+ raise PluginInstallError(f"metadata file not found: {yaml_path}")
79
+ raw = yaml.safe_load(yaml_path.read_text(encoding="utf-8"))
80
+ if not isinstance(raw, dict):
81
+ raise PluginInstallError(f"metadata must be a YAML mapping: {yaml_path}")
82
+ try:
83
+ return PluginMetadata.model_validate(raw)
84
+ except Exception as e: # noqa: BLE001
85
+ raise PluginInstallError(f"invalid metadata in {yaml_path}: {e}") from e
86
+
87
+
88
+ def _verify_migration_checksum(plugin_root: Path, mig: MigrationSpec) -> str:
89
+ """Read mig.file and compare to mig.checksum (sha256:<hex>). Returns the
90
+ SQL text on success."""
91
+ sql_path = plugin_root / mig.file
92
+ if not sql_path.is_file():
93
+ raise PluginInstallError(f"migration file missing: {mig.file}")
94
+ sql_text = sql_path.read_text(encoding="utf-8")
95
+ actual = "sha256:" + hashlib.sha256(sql_text.encode("utf-8")).hexdigest()
96
+ if actual != mig.checksum:
97
+ raise PluginInstallError(
98
+ f"checksum mismatch for {mig.file}: expected {mig.checksum}, got {actual}"
99
+ )
100
+ return sql_text
101
+
102
+
103
+ def _load_entrypoint(
104
+ install_path: Path,
105
+ entrypoint: str,
106
+ metadata: PluginMetadata | None = None,
107
+ ) -> Plugin:
108
+ """Load ``module.path:Class`` from the installed plugin directory and
109
+ instantiate it.
110
+
111
+ Uses ``sys.path`` insertion + ``importlib.import_module`` so that intra-plugin
112
+ relative imports (``from .calendar import TradeCalendar``) work — these would
113
+ fail with ``spec_from_file_location`` because the parent package would not
114
+ be initialized.
115
+
116
+ When ``metadata`` is supplied, it is set on the resulting instance so
117
+ plugins can read ``self.metadata`` at runtime.
118
+ """
119
+ module_path, _, class_name = entrypoint.partition(":")
120
+ if not module_path or not class_name:
121
+ raise PluginInstallError(f"bad entrypoint: {entrypoint}")
122
+
123
+ install_str = str(install_path)
124
+ top_pkg_name = module_path.split(".", 1)[0]
125
+
126
+ # Verify the module file actually lives under install_path BEFORE touching sys.path
127
+ expected_pkg_dir = install_path / top_pkg_name
128
+ if not expected_pkg_dir.is_dir():
129
+ # Try single-file leaf (rare, but supported)
130
+ if not (install_path / (module_path.replace(".", "/") + ".py")).is_file():
131
+ raise PluginInstallError(f"cannot locate module {module_path!r} under {install_path}")
132
+
133
+ # Evict any cached copy of this plugin's top package so install_path is used
134
+ for cached in [m for m in sys.modules if m == top_pkg_name or m.startswith(top_pkg_name + ".")]:
135
+ sys.modules.pop(cached, None)
136
+
137
+ sys.path.insert(0, install_str)
138
+ try:
139
+ module = importlib.import_module(module_path)
140
+ except Exception as e:
141
+ raise PluginInstallError(f"cannot import {module_path}: {e}") from e
142
+ finally:
143
+ # Don't leave install_path on sys.path; the module objects already imported
144
+ # are cached in sys.modules and remain usable by reference.
145
+ if install_str in sys.path:
146
+ sys.path.remove(install_str)
147
+
148
+ if not hasattr(module, class_name):
149
+ raise PluginInstallError(f"{module_path} has no class {class_name}")
150
+ plugin_cls = getattr(module, class_name)
151
+ instance = plugin_cls()
152
+ if metadata is not None:
153
+ instance.metadata = metadata
154
+ return instance # type: ignore[no-any-return]
155
+
156
+
157
+ def _build_validate_ctx(db: Database, meta: PluginMetadata) -> Any:
158
+ """Build the framework's minimal ``PluginContext`` for ``validate_static``.
159
+
160
+ All plugin types (strategy / channel / future) share the same narrow
161
+ context shape: db + config + plugin_id. Plugins that need richer services
162
+ (TushareClient, LLMManager / LLMClient, ...) construct them inside their own
163
+ ``dispatch`` from these primitives.
164
+ """
165
+ from deeptrade.core.config import ConfigService
166
+ from deeptrade.plugins_api.base import PluginContext
167
+
168
+ return PluginContext(db=db, config=ConfigService(db), plugin_id=meta.plugin_id)
169
+
170
+
171
+ # ---------------------------------------------------------------------------
172
+ # Manager
173
+ # ---------------------------------------------------------------------------
174
+
175
+
176
+ class PluginManager:
177
+ def __init__(self, db: Database, install_root: Path | None = None) -> None:
178
+ self._db = db
179
+ self._install_root = install_root or paths.plugins_dir()
180
+ self._install_root.mkdir(parents=True, exist_ok=True)
181
+
182
+ # --- install -----------------------------------------------------
183
+
184
+ def install(self, source_path: Path) -> InstalledPlugin:
185
+ """Install a plugin from a local directory. Network never touched."""
186
+ source_path = source_path.resolve()
187
+ if not source_path.is_dir():
188
+ raise PluginInstallError(f"source path is not a directory: {source_path}")
189
+
190
+ meta = _load_metadata_yaml(source_path / "deeptrade_plugin.yaml")
191
+
192
+ if meta.plugin_id in RESERVED_PLUGIN_IDS:
193
+ raise PluginInstallError(
194
+ f"plugin_id {meta.plugin_id!r} is reserved by the framework "
195
+ f"(reserved: {sorted(RESERVED_PLUGIN_IDS)})"
196
+ )
197
+
198
+ if meta.api_version != CURRENT_API_VERSION:
199
+ raise PluginInstallError(
200
+ f"plugin api_version {meta.api_version} != framework {CURRENT_API_VERSION}"
201
+ )
202
+
203
+ # M3 hard-constraint enforcement (Pydantic Literal[False] also catches it)
204
+ if meta.permissions.llm_tools is not False:
205
+ raise PluginInstallError("permissions.llm_tools=true is forbidden")
206
+
207
+ # Uniqueness
208
+ existing = self._fetch_one_plugin(meta.plugin_id)
209
+ if existing is not None:
210
+ raise PluginInstallError(
211
+ f"plugin_id {meta.plugin_id!r} already installed at {existing.install_path}; "
212
+ f"use `plugin upgrade` for version change"
213
+ )
214
+
215
+ # Verify migration checksums BEFORE copy / DB writes
216
+ mig_sql: list[tuple[MigrationSpec, str]] = []
217
+ for mig in meta.migrations:
218
+ sql_text = _verify_migration_checksum(source_path, mig)
219
+ mig_sql.append((mig, sql_text))
220
+
221
+ # Copy to ~/.deeptrade/plugins/installed/<plugin_id>/<version>/
222
+ target = self._install_root / meta.plugin_id / meta.version
223
+ if target.exists():
224
+ shutil.rmtree(target)
225
+ shutil.copytree(source_path, target)
226
+
227
+ # Apply migrations + write registries inside ONE transaction.
228
+ try:
229
+ with self._db.transaction():
230
+ applied = self._apply_migrations(meta.plugin_id, mig_sql)
231
+ self._record_plugin(meta, target)
232
+ self._record_tables(meta)
233
+ self._record_migrations(meta.plugin_id, applied)
234
+
235
+ # Verify each declared table actually exists post-migration
236
+ missing = self._missing_declared_tables(meta)
237
+ if missing:
238
+ raise PluginInstallError(
239
+ f"declared tables not created by migrations: {sorted(missing)}"
240
+ )
241
+ except Exception:
242
+ # rollback: also remove the copied directory
243
+ if target.exists():
244
+ shutil.rmtree(target, ignore_errors=True)
245
+ raise
246
+
247
+ # B2.2 — Static self-check (no network) MUST gate install acceptance.
248
+ # Failure → roll back DB rows + remove install copy + raise.
249
+ try:
250
+ instance = _load_entrypoint(target, meta.entrypoint, meta)
251
+ if hasattr(instance, "validate_static"):
252
+ instance.validate_static(_build_validate_ctx(self._db, meta))
253
+ except Exception as e:
254
+ # Roll back: drop the just-installed plugin tables + delete registry rows + remove copy
255
+ self._rollback_install(meta, target)
256
+ raise PluginInstallError(
257
+ f"validate_static / entrypoint load failed for {meta.plugin_id}: {e}"
258
+ ) from e
259
+
260
+ return self._compose_record(meta, target, enabled=True)
261
+
262
+ def _rollback_install(self, meta: PluginMetadata, target: Path) -> None:
263
+ """Undo a partially-completed install. Idempotent."""
264
+ with self._db.transaction():
265
+ # Drop owned tables (best-effort)
266
+ for t in meta.tables:
267
+ if t.purge_on_uninstall:
268
+ try:
269
+ self._db.execute(f"DROP TABLE IF EXISTS {t.name}") # noqa: S608
270
+ except Exception: # noqa: BLE001
271
+ pass
272
+ self._db.execute("DELETE FROM plugin_tables WHERE plugin_id = ?", (meta.plugin_id,))
273
+ self._db.execute(
274
+ "DELETE FROM plugin_schema_migrations WHERE plugin_id = ?", (meta.plugin_id,)
275
+ )
276
+ self._db.execute("DELETE FROM plugins WHERE plugin_id = ?", (meta.plugin_id,))
277
+ if target.exists():
278
+ shutil.rmtree(target, ignore_errors=True)
279
+
280
+ # --- list / info / disable / enable / uninstall / upgrade --------
281
+
282
+ def list_all(self) -> list[InstalledPlugin]:
283
+ """List all installed plugins (renamed from `list` to avoid shadowing the builtin)."""
284
+ rows = self._db.fetchall(
285
+ "SELECT plugin_id, name, version, type, api_version, entrypoint, "
286
+ "install_path, enabled, metadata_yaml FROM plugins ORDER BY plugin_id"
287
+ )
288
+ return [self._row_to_record(r) for r in rows]
289
+
290
+ def info(self, plugin_id: str) -> InstalledPlugin:
291
+ rec = self._fetch_one_plugin(plugin_id)
292
+ if rec is None:
293
+ raise PluginNotFoundError(plugin_id)
294
+ return rec
295
+
296
+ def disable(self, plugin_id: str) -> None:
297
+ if self._fetch_one_plugin(plugin_id) is None:
298
+ raise PluginNotFoundError(plugin_id)
299
+ self._db.execute("UPDATE plugins SET enabled = FALSE WHERE plugin_id = ?", (plugin_id,))
300
+
301
+ def enable(self, plugin_id: str) -> None:
302
+ rec = self._fetch_one_plugin(plugin_id)
303
+ if rec is None:
304
+ raise PluginNotFoundError(plugin_id)
305
+ # F-L1 — guard against enabling a plugin whose install_path was
306
+ # removed (e.g. by an earlier uninstall without --purge that wiped
307
+ # the on-disk copy). Re-enabling such a record would later crash
308
+ # the runner with a confusing ImportError.
309
+ if not Path(rec.install_path).exists():
310
+ raise PluginInstallError(
311
+ f"plugin {plugin_id!r} install_path missing ({rec.install_path}); "
312
+ f"reinstall before enabling"
313
+ )
314
+ self._db.execute("UPDATE plugins SET enabled = TRUE WHERE plugin_id = ?", (plugin_id,))
315
+
316
+ def uninstall(self, plugin_id: str, *, purge: bool = False) -> dict[str, Any]:
317
+ rec = self._fetch_one_plugin(plugin_id)
318
+ if rec is None:
319
+ raise PluginNotFoundError(plugin_id)
320
+
321
+ dropped: list[str] = []
322
+ if purge:
323
+ tables = self._db.fetchall(
324
+ "SELECT table_name, purge_on_uninstall FROM plugin_tables WHERE plugin_id = ?",
325
+ (plugin_id,),
326
+ )
327
+ with self._db.transaction():
328
+ for tname, purge_flag in tables:
329
+ if purge_flag:
330
+ self._db.execute(f"DROP TABLE IF EXISTS {tname}") # noqa: S608 — name validated by Pydantic regex
331
+ dropped.append(tname)
332
+ self._db.execute("DELETE FROM plugin_tables WHERE plugin_id = ?", (plugin_id,))
333
+ self._db.execute(
334
+ "DELETE FROM plugin_schema_migrations WHERE plugin_id = ?", (plugin_id,)
335
+ )
336
+ self._db.execute("DELETE FROM plugins WHERE plugin_id = ?", (plugin_id,))
337
+ else:
338
+ # default: just disable + remove the install copy
339
+ self._db.execute("UPDATE plugins SET enabled = FALSE WHERE plugin_id = ?", (plugin_id,))
340
+
341
+ # remove the on-disk install copy (idempotent)
342
+ install_path = Path(rec.install_path)
343
+ if install_path.exists():
344
+ shutil.rmtree(install_path, ignore_errors=True)
345
+
346
+ return {"purged_tables": dropped, "purge": purge}
347
+
348
+ def upgrade(self, source_path: Path) -> InstalledPlugin:
349
+ """Upgrade an existing plugin: apply only NEW migrations (S5)."""
350
+ source_path = source_path.resolve()
351
+ meta = _load_metadata_yaml(source_path / "deeptrade_plugin.yaml")
352
+ existing = self._fetch_one_plugin(meta.plugin_id)
353
+ if existing is None:
354
+ raise PluginNotFoundError(meta.plugin_id)
355
+
356
+ if meta.api_version != CURRENT_API_VERSION:
357
+ raise PluginInstallError(
358
+ f"plugin api_version {meta.api_version} != framework {CURRENT_API_VERSION}"
359
+ )
360
+ if meta.permissions.llm_tools is not False:
361
+ raise PluginInstallError("permissions.llm_tools=true is forbidden")
362
+
363
+ # Decide which migrations are new
364
+ applied_versions = {
365
+ row[0]
366
+ for row in self._db.fetchall(
367
+ "SELECT version FROM plugin_schema_migrations WHERE plugin_id = ?",
368
+ (meta.plugin_id,),
369
+ )
370
+ }
371
+ new_migrations: list[tuple[MigrationSpec, str]] = []
372
+ for mig in meta.migrations:
373
+ if mig.version in applied_versions:
374
+ continue
375
+ sql_text = _verify_migration_checksum(source_path, mig)
376
+ new_migrations.append((mig, sql_text))
377
+
378
+ # Copy new version
379
+ target = self._install_root / meta.plugin_id / meta.version
380
+ if target.exists():
381
+ shutil.rmtree(target)
382
+ shutil.copytree(source_path, target)
383
+
384
+ # F-M5 — keep a backup of the previous install_path so we can roll back on failure
385
+ prev_install_path = Path(existing.install_path)
386
+ prev_metadata_yaml = self._db.fetchone(
387
+ "SELECT metadata_yaml FROM plugins WHERE plugin_id = ?", (meta.plugin_id,)
388
+ )
389
+
390
+ try:
391
+ with self._db.transaction():
392
+ if new_migrations:
393
+ self._apply_migrations(meta.plugin_id, new_migrations)
394
+ self._record_migrations(meta.plugin_id, new_migrations)
395
+ # update the plugins row
396
+ self._db.execute(
397
+ "UPDATE plugins SET name=?, version=?, type=?, api_version=?, entrypoint=?, "
398
+ "install_path=?, metadata_yaml=?, updated_at=CURRENT_TIMESTAMP "
399
+ "WHERE plugin_id=?",
400
+ (
401
+ meta.name,
402
+ meta.version,
403
+ meta.type,
404
+ meta.api_version,
405
+ meta.entrypoint,
406
+ str(target),
407
+ yaml.safe_dump(meta.model_dump(mode="json"), allow_unicode=True),
408
+ meta.plugin_id,
409
+ ),
410
+ )
411
+ # add any newly-declared tables to plugin_tables (idempotent)
412
+ self._record_tables(meta)
413
+
414
+ # F-M5 — same post-install validation as install():
415
+ # missing-tables check + entrypoint import + validate_static
416
+ missing = self._missing_declared_tables(meta)
417
+ if missing:
418
+ raise PluginInstallError(
419
+ f"declared tables not created by migrations: {sorted(missing)}"
420
+ )
421
+ except Exception:
422
+ if target.exists():
423
+ shutil.rmtree(target, ignore_errors=True)
424
+ raise
425
+
426
+ # entrypoint + validate_static — outside the transaction (may load network-free
427
+ # plugin code). Failure → roll back the plugins row to the previous version.
428
+ try:
429
+ instance = _load_entrypoint(target, meta.entrypoint, meta)
430
+ if hasattr(instance, "validate_static"):
431
+ instance.validate_static(_build_validate_ctx(self._db, meta))
432
+ except Exception as e:
433
+ # Roll back the plugins row to the prior version (install_path,
434
+ # metadata_yaml, version, entrypoint). Do NOT touch migrations: the
435
+ # new schema is already applied, and old metadata referenced an
436
+ # earlier subset; rolling back schema would be more dangerous than
437
+ # leaving forward-compatible columns/tables.
438
+ if prev_metadata_yaml is not None:
439
+ prev_meta = PluginMetadata.model_validate(yaml.safe_load(prev_metadata_yaml[0]))
440
+ self._db.execute(
441
+ "UPDATE plugins SET name=?, version=?, api_version=?, entrypoint=?, "
442
+ "install_path=?, metadata_yaml=?, updated_at=CURRENT_TIMESTAMP "
443
+ "WHERE plugin_id=?",
444
+ (
445
+ prev_meta.name,
446
+ prev_meta.version,
447
+ prev_meta.api_version,
448
+ prev_meta.entrypoint,
449
+ str(prev_install_path),
450
+ prev_metadata_yaml[0],
451
+ meta.plugin_id,
452
+ ),
453
+ )
454
+ if target.exists():
455
+ shutil.rmtree(target, ignore_errors=True)
456
+ raise PluginInstallError(
457
+ f"upgrade validation failed for {meta.plugin_id}: {e}; rolled back to prior version"
458
+ ) from e
459
+
460
+ return self._compose_record(meta, target, enabled=existing.enabled)
461
+
462
+ # --- internal helpers --------------------------------------------
463
+
464
+ def _apply_migrations(
465
+ self, plugin_id: str, migs: Sequence[tuple[MigrationSpec, str]]
466
+ ) -> list[tuple[MigrationSpec, str]]:
467
+ """Run each SQL inside the calling transaction. Caller wraps in transaction."""
468
+ for _mig, sql in migs:
469
+ # split on ';' is not safe for some DDL but DuckDB supports executing
470
+ # multi-statement strings via ``execute`` with ``;``-separated bodies.
471
+ for stmt in self._iter_statements(sql):
472
+ if stmt.strip():
473
+ self._db.execute(stmt)
474
+ return list(migs)
475
+
476
+ @staticmethod
477
+ def _iter_statements(sql: str) -> list[str]:
478
+ """Split SQL on top-level semicolons. Handles -- comments and quoted strings."""
479
+ stmts: list[str] = []
480
+ buf: list[str] = []
481
+ in_single = False
482
+ in_double = False
483
+ i = 0
484
+ n = len(sql)
485
+ while i < n:
486
+ ch = sql[i]
487
+ # line comment
488
+ if not in_single and not in_double and ch == "-" and i + 1 < n and sql[i + 1] == "-":
489
+ # consume to end of line
490
+ eol = sql.find("\n", i)
491
+ if eol == -1:
492
+ eol = n
493
+ # don't include comment text in buffer
494
+ i = eol
495
+ continue
496
+ if ch == "'" and not in_double:
497
+ in_single = not in_single
498
+ elif ch == '"' and not in_single:
499
+ in_double = not in_double
500
+ if ch == ";" and not in_single and not in_double:
501
+ stmts.append("".join(buf))
502
+ buf = []
503
+ else:
504
+ buf.append(ch)
505
+ i += 1
506
+ if buf:
507
+ tail = "".join(buf).strip()
508
+ if tail:
509
+ stmts.append(tail)
510
+ return [s.strip() for s in stmts if s.strip()]
511
+
512
+ def _record_plugin(self, meta: PluginMetadata, install_path: Path) -> None:
513
+ self._db.execute(
514
+ "INSERT INTO plugins(plugin_id, name, version, type, api_version, entrypoint, "
515
+ "install_path, enabled, metadata_yaml) VALUES (?, ?, ?, ?, ?, ?, ?, TRUE, ?)",
516
+ (
517
+ meta.plugin_id,
518
+ meta.name,
519
+ meta.version,
520
+ meta.type,
521
+ meta.api_version,
522
+ meta.entrypoint,
523
+ str(install_path),
524
+ yaml.safe_dump(meta.model_dump(mode="json"), allow_unicode=True),
525
+ ),
526
+ )
527
+
528
+ def _record_tables(self, meta: PluginMetadata) -> None:
529
+ for t in meta.tables:
530
+ # idempotent: delete then insert
531
+ self._db.execute(
532
+ "DELETE FROM plugin_tables WHERE plugin_id = ? AND table_name = ?",
533
+ (meta.plugin_id, t.name),
534
+ )
535
+ self._db.execute(
536
+ "INSERT INTO plugin_tables(plugin_id, table_name, description, "
537
+ "purge_on_uninstall) VALUES (?, ?, ?, ?)",
538
+ (meta.plugin_id, t.name, t.description, t.purge_on_uninstall),
539
+ )
540
+
541
+ def _record_migrations(self, plugin_id: str, migs: Sequence[tuple[MigrationSpec, str]]) -> None:
542
+ for mig, _ in migs:
543
+ self._db.execute(
544
+ "INSERT INTO plugin_schema_migrations(plugin_id, version, checksum) "
545
+ "VALUES (?, ?, ?)",
546
+ (plugin_id, mig.version, mig.checksum),
547
+ )
548
+
549
+ def _missing_declared_tables(self, meta: PluginMetadata) -> set[str]:
550
+ existing = {
551
+ r[0]
552
+ for r in self._db.fetchall(
553
+ "SELECT table_name FROM information_schema.tables WHERE table_schema='main'"
554
+ )
555
+ }
556
+ declared = {t.name for t in meta.tables}
557
+ return declared - existing
558
+
559
+ def _fetch_one_plugin(self, plugin_id: str) -> InstalledPlugin | None:
560
+ row = self._db.fetchone(
561
+ "SELECT plugin_id, name, version, type, api_version, entrypoint, "
562
+ "install_path, enabled, metadata_yaml FROM plugins WHERE plugin_id = ?",
563
+ (plugin_id,),
564
+ )
565
+ if row is None:
566
+ return None
567
+ return self._row_to_record(row)
568
+
569
+ def _row_to_record(self, row: Sequence[Any]) -> InstalledPlugin:
570
+ meta_dict = yaml.safe_load(row[8])
571
+ meta = PluginMetadata.model_validate(meta_dict)
572
+ return InstalledPlugin(
573
+ plugin_id=row[0],
574
+ name=row[1],
575
+ version=row[2],
576
+ type=row[3],
577
+ api_version=row[4],
578
+ entrypoint=row[5],
579
+ install_path=row[6],
580
+ enabled=bool(row[7]),
581
+ metadata=meta,
582
+ )
583
+
584
+ def _compose_record(
585
+ self, meta: PluginMetadata, install_path: Path, *, enabled: bool
586
+ ) -> InstalledPlugin:
587
+ return InstalledPlugin(
588
+ plugin_id=meta.plugin_id,
589
+ name=meta.name,
590
+ version=meta.version,
591
+ type=meta.type,
592
+ api_version=meta.api_version,
593
+ entrypoint=meta.entrypoint,
594
+ install_path=str(install_path),
595
+ enabled=enabled,
596
+ metadata=meta,
597
+ )
598
+
599
+
600
+ def summarize_for_install(meta: PluginMetadata, source_path: Path) -> str:
601
+ """Render the install confirmation pre-flight summary (CLI only)."""
602
+ lines = textwrap.dedent(
603
+ f"""
604
+ plugin_id : {meta.plugin_id}
605
+ name : {meta.name}
606
+ version : {meta.version}
607
+ type : {meta.type}
608
+ entrypoint : {meta.entrypoint}
609
+ source : {source_path}
610
+ required : {", ".join(meta.permissions.tushare_apis.required) or "(none)"}
611
+ optional : {", ".join(meta.permissions.tushare_apis.optional) or "(none)"}
612
+ migrations : {", ".join(m.version for m in meta.migrations)}
613
+ tables ({len(meta.tables)}): {", ".join(t.name for t in meta.tables)}
614
+ """
615
+ ).strip()
616
+ return lines
@@ -0,0 +1,29 @@
1
+ """Run status enum + Pydantic-layer validation.
2
+
3
+ DESIGN §13.1 status values + S3 fix: validation moved out of the DDL because
4
+ DuckDB doesn't ALTER CHECK constraints in-place.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from enum import StrEnum
10
+
11
+
12
+ class RunStatus(StrEnum):
13
+ """Allowed values for a plugin run's terminal status.
14
+
15
+ v0.5+: each plugin owns its own ``<prefix>_runs.status`` column (e.g.
16
+ ``lub_runs`` / ``va_runs``); the framework no longer keeps a unified
17
+ ``strategy_runs`` table.
18
+ """
19
+
20
+ RUNNING = "running"
21
+ SUCCESS = "success"
22
+ FAILED = "failed"
23
+ PARTIAL_FAILED = "partial_failed"
24
+ CANCELLED = "cancelled"
25
+
26
+
27
+ def validate_status(value: str) -> RunStatus:
28
+ """Validate a status string. Raises ValueError on invalid value."""
29
+ return RunStatus(value)