microlens-submit 0.12.2__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microlens_submit/__init__.py +7 -157
- microlens_submit/cli/__init__.py +5 -0
- microlens_submit/cli/__main__.py +6 -0
- microlens_submit/cli/commands/__init__.py +1 -0
- microlens_submit/cli/commands/dossier.py +139 -0
- microlens_submit/cli/commands/export.py +177 -0
- microlens_submit/cli/commands/init.py +172 -0
- microlens_submit/cli/commands/solutions.py +722 -0
- microlens_submit/cli/commands/validation.py +241 -0
- microlens_submit/cli/main.py +120 -0
- microlens_submit/dossier/__init__.py +51 -0
- microlens_submit/dossier/dashboard.py +499 -0
- microlens_submit/dossier/event_page.py +369 -0
- microlens_submit/dossier/full_report.py +330 -0
- microlens_submit/dossier/solution_page.py +533 -0
- microlens_submit/dossier/utils.py +111 -0
- microlens_submit/error_messages.py +283 -0
- microlens_submit/models/__init__.py +28 -0
- microlens_submit/models/event.py +406 -0
- microlens_submit/models/solution.py +569 -0
- microlens_submit/models/submission.py +569 -0
- microlens_submit/tier_validation.py +208 -0
- microlens_submit/utils.py +373 -0
- microlens_submit/validate_parameters.py +478 -180
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.0.dist-info}/METADATA +42 -27
- microlens_submit-0.16.0.dist-info/RECORD +32 -0
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.0.dist-info}/WHEEL +1 -1
- microlens_submit/api.py +0 -1257
- microlens_submit/cli.py +0 -1803
- microlens_submit/dossier.py +0 -1443
- microlens_submit-0.12.2.dist-info/RECORD +0 -13
- {microlens_submit-0.12.2.dist-info/licenses → microlens_submit-0.16.0.dist-info}/LICENSE +0 -0
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.0.dist-info}/entry_points.txt +0 -0
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,722 @@
|
|
|
1
|
+
"""Solution management commands for microlens-submit CLI."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Dict, List, Optional, Tuple
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
from rich.panel import Panel
|
|
11
|
+
|
|
12
|
+
from microlens_submit.error_messages import enhance_validation_messages, format_cli_error_with_suggestions
|
|
13
|
+
from microlens_submit.utils import import_solutions_from_csv, load
|
|
14
|
+
|
|
15
|
+
console = Console()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _parse_pairs(pairs: Optional[List[str]]) -> Optional[Dict]:
|
|
19
|
+
"""Convert CLI key=value options into a dictionary."""
|
|
20
|
+
if not pairs:
|
|
21
|
+
return None
|
|
22
|
+
out: Dict = {}
|
|
23
|
+
for item in pairs:
|
|
24
|
+
if "=" not in item:
|
|
25
|
+
raise typer.BadParameter(f"Invalid format: {item}")
|
|
26
|
+
key, value = item.split("=", 1)
|
|
27
|
+
try:
|
|
28
|
+
out[key] = json.loads(value)
|
|
29
|
+
except json.JSONDecodeError:
|
|
30
|
+
out[key] = value
|
|
31
|
+
return out
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _params_file_callback(ctx: typer.Context, value: Optional[Path]) -> Optional[Path]:
|
|
35
|
+
"""Validate mutually exclusive parameter options."""
|
|
36
|
+
param_vals = ctx.params.get("param")
|
|
37
|
+
if value is not None and param_vals:
|
|
38
|
+
raise typer.BadParameter("Cannot use --param with --params-file")
|
|
39
|
+
if value is None and not param_vals and not ctx.resilient_parsing:
|
|
40
|
+
raise typer.BadParameter("Provide either --param or --params-file")
|
|
41
|
+
return value
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _parse_structured_params_file(params_file: Path) -> Tuple[Dict, Dict]:
|
|
45
|
+
"""Parse a structured parameter file that can contain both parameters and
|
|
46
|
+
uncertainties.
|
|
47
|
+
"""
|
|
48
|
+
import yaml
|
|
49
|
+
|
|
50
|
+
with params_file.open("r", encoding="utf-8") as fh:
|
|
51
|
+
if params_file.suffix.lower() in [".yaml", ".yml"]:
|
|
52
|
+
data = yaml.safe_load(fh)
|
|
53
|
+
else:
|
|
54
|
+
data = json.load(fh)
|
|
55
|
+
|
|
56
|
+
# Handle structured format
|
|
57
|
+
if isinstance(data, dict) and ("parameters" in data or "uncertainties" in data):
|
|
58
|
+
parameters = data.get("parameters", {})
|
|
59
|
+
uncertainties = data.get("uncertainties", {})
|
|
60
|
+
else:
|
|
61
|
+
# Simple format - all keys are parameters
|
|
62
|
+
parameters = data
|
|
63
|
+
uncertainties = {}
|
|
64
|
+
|
|
65
|
+
return parameters, uncertainties
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def add_solution(
|
|
69
|
+
event_id: str,
|
|
70
|
+
model_type: str = typer.Argument(
|
|
71
|
+
...,
|
|
72
|
+
metavar="{1S1L|1S2L|2S1L|2S2L|1S3L|2S3L|other}",
|
|
73
|
+
help="Type of model used for the solution (e.g., 1S1L, 1S2L)",
|
|
74
|
+
),
|
|
75
|
+
param: Optional[List[str]] = typer.Option(
|
|
76
|
+
None,
|
|
77
|
+
help="Model parameters as key=value [BASIC]",
|
|
78
|
+
),
|
|
79
|
+
log_likelihood: Optional[float] = typer.Option(
|
|
80
|
+
None,
|
|
81
|
+
help="Log likelihood [BASIC]",
|
|
82
|
+
),
|
|
83
|
+
n_data_points: Optional[int] = typer.Option(
|
|
84
|
+
None,
|
|
85
|
+
"--n-data-points",
|
|
86
|
+
help="Number of data points used in this solution [BASIC]",
|
|
87
|
+
),
|
|
88
|
+
project_path: Path = typer.Argument(
|
|
89
|
+
Path("."),
|
|
90
|
+
help="Project directory [BASIC]",
|
|
91
|
+
),
|
|
92
|
+
# ADVANCED OPTIONS
|
|
93
|
+
params_file: Optional[Path] = typer.Option(
|
|
94
|
+
None,
|
|
95
|
+
"--params-file",
|
|
96
|
+
help=("Path to JSON or YAML file with model parameters " "and uncertainties [ADVANCED]"),
|
|
97
|
+
callback=_params_file_callback,
|
|
98
|
+
),
|
|
99
|
+
bands: Optional[List[str]] = typer.Option(
|
|
100
|
+
None,
|
|
101
|
+
"--bands",
|
|
102
|
+
help=("Photometric bands used (e.g., 0,1,2). " "Required if using band-specific flux parameters [ADVANCED]"),
|
|
103
|
+
),
|
|
104
|
+
higher_order_effect: Optional[List[str]] = typer.Option(
|
|
105
|
+
None,
|
|
106
|
+
"--higher-order-effect",
|
|
107
|
+
help=(
|
|
108
|
+
"Higher-order effects: parallax, finite-source, lens-orbital-motion, "
|
|
109
|
+
"xallarap, gaussian-process, stellar-rotation, fitted-limb-darkening "
|
|
110
|
+
"[ADVANCED]"
|
|
111
|
+
),
|
|
112
|
+
),
|
|
113
|
+
t_ref: Optional[float] = typer.Option(
|
|
114
|
+
None,
|
|
115
|
+
"--t-ref",
|
|
116
|
+
help=(
|
|
117
|
+
"Reference time for time-dependent effects (Julian Date). "
|
|
118
|
+
"Required for parallax, xallarap, etc. [ADVANCED]"
|
|
119
|
+
),
|
|
120
|
+
),
|
|
121
|
+
used_astrometry: bool = typer.Option(
|
|
122
|
+
False,
|
|
123
|
+
help="Set if astrometry data was used in the fit [ADVANCED]",
|
|
124
|
+
),
|
|
125
|
+
used_postage_stamps: bool = typer.Option(
|
|
126
|
+
False,
|
|
127
|
+
help=("Set if postage stamp images were used in the analysis [ADVANCED]"),
|
|
128
|
+
),
|
|
129
|
+
limb_darkening_model: Optional[str] = typer.Option(
|
|
130
|
+
None,
|
|
131
|
+
help=(
|
|
132
|
+
"Fixed limb darkening model name (e.g., 'claret'). "
|
|
133
|
+
"Use --higher-order-effect fitted-limb-darkening for fitted coefficients "
|
|
134
|
+
"[ADVANCED]"
|
|
135
|
+
),
|
|
136
|
+
),
|
|
137
|
+
limb_darkening_coeff: Optional[List[str]] = typer.Option(
|
|
138
|
+
None,
|
|
139
|
+
"--limb-darkening-coeff",
|
|
140
|
+
help=(
|
|
141
|
+
"Limb darkening coefficients as key=value. " "Use with fitted-limb-darkening higher-order effect [ADVANCED]"
|
|
142
|
+
),
|
|
143
|
+
),
|
|
144
|
+
parameter_uncertainty: Optional[List[str]] = typer.Option(
|
|
145
|
+
None,
|
|
146
|
+
"--param-uncertainty",
|
|
147
|
+
help=(
|
|
148
|
+
"Parameter uncertainties as key=value. "
|
|
149
|
+
"Can be single value (symmetric) or [lower,upper] (asymmetric) [ADVANCED]"
|
|
150
|
+
),
|
|
151
|
+
),
|
|
152
|
+
physical_param: Optional[List[str]] = typer.Option(
|
|
153
|
+
None,
|
|
154
|
+
"--physical-param",
|
|
155
|
+
help=("Physical parameters (M_L, D_L, M_planet, a, etc.) " "derived from model parameters [ADVANCED]"),
|
|
156
|
+
),
|
|
157
|
+
relative_probability: Optional[float] = typer.Option(
|
|
158
|
+
None,
|
|
159
|
+
"--relative-probability",
|
|
160
|
+
help=("Relative probability of this solution (0-1). " "Used for model comparison [ADVANCED]"),
|
|
161
|
+
),
|
|
162
|
+
cpu_hours: Optional[float] = typer.Option(
|
|
163
|
+
None,
|
|
164
|
+
"--cpu-hours",
|
|
165
|
+
help="CPU hours used for this solution. " "Automatically captured if not specified [ADVANCED]",
|
|
166
|
+
),
|
|
167
|
+
wall_time_hours: Optional[float] = typer.Option(
|
|
168
|
+
None,
|
|
169
|
+
"--wall-time-hours",
|
|
170
|
+
help="Wall time hours used for this solution. " "Automatically captured if not specified [ADVANCED]",
|
|
171
|
+
),
|
|
172
|
+
lightcurve_plot_path: Optional[Path] = typer.Option(
|
|
173
|
+
None,
|
|
174
|
+
"--lightcurve-plot-path",
|
|
175
|
+
help="Path to lightcurve plot file " "(relative to project directory) [ADVANCED]",
|
|
176
|
+
),
|
|
177
|
+
lens_plane_plot_path: Optional[Path] = typer.Option(
|
|
178
|
+
None,
|
|
179
|
+
"--lens-plane-plot-path",
|
|
180
|
+
help="Path to lens plane plot file " "(relative to project directory) [ADVANCED]",
|
|
181
|
+
),
|
|
182
|
+
alias: Optional[str] = typer.Option(
|
|
183
|
+
None,
|
|
184
|
+
"--alias",
|
|
185
|
+
help=("Set or update the human-readable alias for this solution " "(must be unique within the event)"),
|
|
186
|
+
),
|
|
187
|
+
notes: Optional[str] = typer.Option(
|
|
188
|
+
None,
|
|
189
|
+
help=("Notes for the solution (supports Markdown formatting)"),
|
|
190
|
+
),
|
|
191
|
+
notes_file: Optional[Path] = typer.Option(
|
|
192
|
+
None,
|
|
193
|
+
"--notes-file",
|
|
194
|
+
help=("Path to a Markdown file for solution notes " "(mutually exclusive with --notes)"),
|
|
195
|
+
),
|
|
196
|
+
dry_run: bool = typer.Option(
|
|
197
|
+
False,
|
|
198
|
+
"--dry-run",
|
|
199
|
+
help="Show what would be created without saving. " "Useful for testing parameter parsing [ADVANCED]",
|
|
200
|
+
),
|
|
201
|
+
) -> None:
|
|
202
|
+
"""Add a new solution entry for a microlensing event.
|
|
203
|
+
|
|
204
|
+
Basic usage: microlens-submit add-solution EVENT123 1S1L --param t0=2459123.5
|
|
205
|
+
--param u0=0.1 --param tE=20.0 --log-likelihood -1234.56 --n-data-points 1250
|
|
206
|
+
|
|
207
|
+
Use --help to see all options including higher-order effects, uncertainties,
|
|
208
|
+
and metadata.
|
|
209
|
+
"""
|
|
210
|
+
sub = load(str(project_path))
|
|
211
|
+
evt = sub.get_event(event_id)
|
|
212
|
+
params: Dict = {}
|
|
213
|
+
uncertainties: Dict = {}
|
|
214
|
+
if params_file is not None:
|
|
215
|
+
params, uncertainties = _parse_structured_params_file(params_file)
|
|
216
|
+
else:
|
|
217
|
+
for p in param or []:
|
|
218
|
+
if "=" not in p:
|
|
219
|
+
raise typer.BadParameter(f"Invalid parameter format: {p}")
|
|
220
|
+
key, value = p.split("=", 1)
|
|
221
|
+
try:
|
|
222
|
+
params[key] = json.loads(value)
|
|
223
|
+
except json.JSONDecodeError:
|
|
224
|
+
params[key] = value
|
|
225
|
+
allowed_model_types = [
|
|
226
|
+
"1S1L",
|
|
227
|
+
"1S2L",
|
|
228
|
+
"2S1L",
|
|
229
|
+
"2S2L",
|
|
230
|
+
"1S3L",
|
|
231
|
+
"2S3L",
|
|
232
|
+
"other",
|
|
233
|
+
]
|
|
234
|
+
if model_type not in allowed_model_types:
|
|
235
|
+
error_msg = f"model_type must be one of {allowed_model_types}"
|
|
236
|
+
enhanced_error = format_cli_error_with_suggestions(error_msg, {"model_type": model_type})
|
|
237
|
+
raise typer.BadParameter(enhanced_error)
|
|
238
|
+
if bands and len(bands) == 1 and "," in bands[0]:
|
|
239
|
+
bands = bands[0].split(",")
|
|
240
|
+
if higher_order_effect and len(higher_order_effect) == 1 and "," in higher_order_effect[0]:
|
|
241
|
+
higher_order_effect = higher_order_effect[0].split(",")
|
|
242
|
+
sol = evt.add_solution(model_type=model_type, parameters=params, alias=alias)
|
|
243
|
+
sol.bands = bands or []
|
|
244
|
+
sol.higher_order_effects = higher_order_effect or []
|
|
245
|
+
sol.t_ref = t_ref
|
|
246
|
+
sol.used_astrometry = used_astrometry
|
|
247
|
+
sol.used_postage_stamps = used_postage_stamps
|
|
248
|
+
sol.limb_darkening_model = limb_darkening_model
|
|
249
|
+
sol.limb_darkening_coeffs = _parse_pairs(limb_darkening_coeff)
|
|
250
|
+
sol.parameter_uncertainties = _parse_pairs(parameter_uncertainty) or uncertainties
|
|
251
|
+
sol.physical_parameters = _parse_pairs(physical_param)
|
|
252
|
+
sol.log_likelihood = log_likelihood
|
|
253
|
+
sol.relative_probability = relative_probability
|
|
254
|
+
sol.n_data_points = n_data_points
|
|
255
|
+
if cpu_hours is not None or wall_time_hours is not None:
|
|
256
|
+
sol.set_compute_info(cpu_hours=cpu_hours, wall_time_hours=wall_time_hours)
|
|
257
|
+
sol.lightcurve_plot_path = str(lightcurve_plot_path) if lightcurve_plot_path else None
|
|
258
|
+
sol.lens_plane_plot_path = str(lens_plane_plot_path) if lens_plane_plot_path else None
|
|
259
|
+
# Handle notes file logic
|
|
260
|
+
canonical_notes_path = Path(project_path) / "events" / event_id / "solutions" / f"{sol.solution_id}.md"
|
|
261
|
+
if notes_file is not None:
|
|
262
|
+
sol.notes_path = str(notes_file)
|
|
263
|
+
else:
|
|
264
|
+
sol.notes_path = str(canonical_notes_path.relative_to(project_path))
|
|
265
|
+
if dry_run:
|
|
266
|
+
parsed = {
|
|
267
|
+
"event_id": event_id,
|
|
268
|
+
"model_type": model_type,
|
|
269
|
+
"parameters": params,
|
|
270
|
+
"bands": bands,
|
|
271
|
+
"higher_order_effects": higher_order_effect,
|
|
272
|
+
"t_ref": t_ref,
|
|
273
|
+
"used_astrometry": used_astrometry,
|
|
274
|
+
"used_postage_stamps": used_postage_stamps,
|
|
275
|
+
"limb_darkening_model": limb_darkening_model,
|
|
276
|
+
"limb_darkening_coeffs": _parse_pairs(limb_darkening_coeff),
|
|
277
|
+
"parameter_uncertainties": _parse_pairs(parameter_uncertainty),
|
|
278
|
+
"physical_parameters": _parse_pairs(physical_param),
|
|
279
|
+
"log_likelihood": log_likelihood,
|
|
280
|
+
"relative_probability": relative_probability,
|
|
281
|
+
"n_data_points": n_data_points,
|
|
282
|
+
"cpu_hours": cpu_hours,
|
|
283
|
+
"wall_time_hours": wall_time_hours,
|
|
284
|
+
"lightcurve_plot_path": (str(lightcurve_plot_path) if lightcurve_plot_path else None),
|
|
285
|
+
"lens_plane_plot_path": (str(lens_plane_plot_path) if lens_plane_plot_path else None),
|
|
286
|
+
"alias": alias,
|
|
287
|
+
"notes_path": sol.notes_path,
|
|
288
|
+
}
|
|
289
|
+
console.print(Panel("Parsed Input", style="cyan"))
|
|
290
|
+
console.print(json.dumps(parsed, indent=2))
|
|
291
|
+
console.print(Panel("Schema Output", style="cyan"))
|
|
292
|
+
console.print(sol.model_dump_json(indent=2))
|
|
293
|
+
validation_messages = sol.run_validation()
|
|
294
|
+
if validation_messages:
|
|
295
|
+
enhanced_messages = enhance_validation_messages(validation_messages, model_type, params)
|
|
296
|
+
console.print(Panel("Validation Warnings", style="yellow"))
|
|
297
|
+
for msg in enhanced_messages:
|
|
298
|
+
console.print(f" • {msg}")
|
|
299
|
+
else:
|
|
300
|
+
console.print(Panel("Solution validated successfully!", style="green"))
|
|
301
|
+
return
|
|
302
|
+
# Only write files if not dry_run
|
|
303
|
+
if notes_file is not None:
|
|
304
|
+
# If a notes file is provided, do not overwrite it, just ensure path is set
|
|
305
|
+
pass
|
|
306
|
+
else:
|
|
307
|
+
if notes is not None:
|
|
308
|
+
canonical_notes_path.parent.mkdir(parents=True, exist_ok=True)
|
|
309
|
+
canonical_notes_path.write_text(notes, encoding="utf-8")
|
|
310
|
+
elif not canonical_notes_path.exists():
|
|
311
|
+
canonical_notes_path.parent.mkdir(parents=True, exist_ok=True)
|
|
312
|
+
canonical_notes_path.write_text("", encoding="utf-8")
|
|
313
|
+
sub.save()
|
|
314
|
+
validation_messages = sol.run_validation()
|
|
315
|
+
if validation_messages:
|
|
316
|
+
enhanced_messages = enhance_validation_messages(validation_messages, model_type, params)
|
|
317
|
+
console.print(Panel("Validation Warnings", style="yellow"))
|
|
318
|
+
for msg in enhanced_messages:
|
|
319
|
+
console.print(f" • {msg}")
|
|
320
|
+
else:
|
|
321
|
+
console.print(
|
|
322
|
+
f"✅ Solution {sol.solution_id} created successfully!",
|
|
323
|
+
style="green",
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def deactivate(
|
|
328
|
+
solution_id: str,
|
|
329
|
+
project_path: Path = typer.Argument(Path("."), help="Project directory"),
|
|
330
|
+
) -> None:
|
|
331
|
+
"""Mark a solution as inactive so it is excluded from exports."""
|
|
332
|
+
sub = load(str(project_path))
|
|
333
|
+
for event in sub.events.values():
|
|
334
|
+
if solution_id in event.solutions:
|
|
335
|
+
event.solutions[solution_id].deactivate()
|
|
336
|
+
sub.save()
|
|
337
|
+
console.print(f"Deactivated {solution_id}")
|
|
338
|
+
return
|
|
339
|
+
console.print(f"Solution {solution_id} not found", style="bold red")
|
|
340
|
+
raise typer.Exit(code=1)
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
def activate(
|
|
344
|
+
solution_id: str,
|
|
345
|
+
project_path: Path = typer.Argument(Path("."), help="Project directory"),
|
|
346
|
+
) -> None:
|
|
347
|
+
"""Activate a previously deactivated solution."""
|
|
348
|
+
submission = load(project_path)
|
|
349
|
+
|
|
350
|
+
# Find the solution across all events
|
|
351
|
+
solution = None
|
|
352
|
+
event_id = None
|
|
353
|
+
for eid, event in submission.events.items():
|
|
354
|
+
if solution_id in event.solutions:
|
|
355
|
+
solution = event.solutions[solution_id]
|
|
356
|
+
event_id = eid
|
|
357
|
+
break
|
|
358
|
+
|
|
359
|
+
if solution is None:
|
|
360
|
+
console.print(f"[red]Error: Solution {solution_id} not found[/red]")
|
|
361
|
+
raise typer.Exit(1)
|
|
362
|
+
|
|
363
|
+
solution.activate()
|
|
364
|
+
submission.save()
|
|
365
|
+
console.print(f"[green]✅ Activated solution {solution_id[:8]}... in event {event_id}[/green]")
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
def remove_solution(
|
|
369
|
+
solution_id: str,
|
|
370
|
+
force: bool = typer.Option(False, "--force", help="Force removal of saved solutions (use with caution)"),
|
|
371
|
+
project_path: Path = typer.Argument(Path("."), help="Project directory"),
|
|
372
|
+
) -> None:
|
|
373
|
+
"""Completely remove a solution from the submission."""
|
|
374
|
+
submission = load(project_path)
|
|
375
|
+
|
|
376
|
+
# Find the solution across all events
|
|
377
|
+
solution = None
|
|
378
|
+
event_id = None
|
|
379
|
+
for eid, event in submission.events.items():
|
|
380
|
+
if solution_id in event.solutions:
|
|
381
|
+
solution = event.solutions[solution_id]
|
|
382
|
+
event_id = eid
|
|
383
|
+
break
|
|
384
|
+
|
|
385
|
+
if solution is None:
|
|
386
|
+
console.print(f"[red]Error: Solution {solution_id} not found[/red]")
|
|
387
|
+
raise typer.Exit(1)
|
|
388
|
+
|
|
389
|
+
try:
|
|
390
|
+
removed = submission.events[event_id].remove_solution(solution_id, force=force)
|
|
391
|
+
if removed:
|
|
392
|
+
submission.save()
|
|
393
|
+
console.print(f"[green]✅ Solution {solution_id[:8]}... removed from event " f"{event_id}[/green]")
|
|
394
|
+
else:
|
|
395
|
+
console.print(f"[red]Error: Failed to remove solution {solution_id}[/red]")
|
|
396
|
+
raise typer.Exit(1)
|
|
397
|
+
except ValueError as e:
|
|
398
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
399
|
+
console.print(
|
|
400
|
+
"[yellow]💡 Use --force to override safety checks, or use deactivate to " "keep the solution[/yellow]"
|
|
401
|
+
)
|
|
402
|
+
raise typer.Exit(1)
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def edit_solution(
|
|
406
|
+
solution_id: str,
|
|
407
|
+
relative_probability: Optional[float] = typer.Option(
|
|
408
|
+
None,
|
|
409
|
+
"--relative-probability",
|
|
410
|
+
help="Relative probability of this solution",
|
|
411
|
+
),
|
|
412
|
+
log_likelihood: Optional[float] = typer.Option(
|
|
413
|
+
None,
|
|
414
|
+
help="Log likelihood [BASIC]",
|
|
415
|
+
),
|
|
416
|
+
n_data_points: Optional[int] = typer.Option(
|
|
417
|
+
None,
|
|
418
|
+
"--n-data-points",
|
|
419
|
+
help="Number of data points used in this solution",
|
|
420
|
+
),
|
|
421
|
+
alias: Optional[str] = typer.Option(
|
|
422
|
+
None,
|
|
423
|
+
"--alias",
|
|
424
|
+
help=("Set or update the human-readable alias for this solution " "(must be unique within the event)"),
|
|
425
|
+
),
|
|
426
|
+
notes: Optional[str] = typer.Option(
|
|
427
|
+
None,
|
|
428
|
+
help=("Notes for the solution (supports Markdown formatting)"),
|
|
429
|
+
),
|
|
430
|
+
notes_file: Optional[Path] = typer.Option(
|
|
431
|
+
None,
|
|
432
|
+
"--notes-file",
|
|
433
|
+
help=("Path to a Markdown file for solution notes " "(mutually exclusive with --notes)"),
|
|
434
|
+
),
|
|
435
|
+
append_notes: Optional[str] = typer.Option(
|
|
436
|
+
None,
|
|
437
|
+
"--append-notes",
|
|
438
|
+
help=("Append text to existing notes (use --notes to replace instead)"),
|
|
439
|
+
),
|
|
440
|
+
clear_notes: bool = typer.Option(False, help="Clear all notes"),
|
|
441
|
+
clear_relative_probability: bool = typer.Option(False, help="Clear relative probability"),
|
|
442
|
+
clear_log_likelihood: bool = typer.Option(False, help="Clear log likelihood"),
|
|
443
|
+
clear_n_data_points: bool = typer.Option(False, help="Clear n_data_points"),
|
|
444
|
+
clear_parameter_uncertainties: bool = typer.Option(False, help="Clear parameter uncertainties"),
|
|
445
|
+
clear_physical_parameters: bool = typer.Option(False, help="Clear physical parameters"),
|
|
446
|
+
cpu_hours: Optional[float] = typer.Option(None, help="CPU hours used"),
|
|
447
|
+
wall_time_hours: Optional[float] = typer.Option(None, help="Wall time hours used"),
|
|
448
|
+
param: Optional[List[str]] = typer.Option(
|
|
449
|
+
None,
|
|
450
|
+
help=("Model parameters as key=value (updates existing parameters)"),
|
|
451
|
+
),
|
|
452
|
+
param_uncertainty: Optional[List[str]] = typer.Option(
|
|
453
|
+
None,
|
|
454
|
+
"--param-uncertainty",
|
|
455
|
+
help=("Parameter uncertainties as key=value (updates existing uncertainties)"),
|
|
456
|
+
),
|
|
457
|
+
higher_order_effect: Optional[List[str]] = typer.Option(
|
|
458
|
+
None,
|
|
459
|
+
"--higher-order-effect",
|
|
460
|
+
help="Higher-order effects (replaces existing effects)",
|
|
461
|
+
),
|
|
462
|
+
clear_higher_order_effects: bool = typer.Option(False, help="Clear all higher-order effects"),
|
|
463
|
+
dry_run: bool = typer.Option(
|
|
464
|
+
False,
|
|
465
|
+
"--dry-run",
|
|
466
|
+
help="Show what would be changed without saving",
|
|
467
|
+
),
|
|
468
|
+
project_path: Path = typer.Argument(
|
|
469
|
+
Path("."),
|
|
470
|
+
help="Project directory [BASIC]",
|
|
471
|
+
),
|
|
472
|
+
) -> None:
|
|
473
|
+
"""Edit an existing solution's attributes, including file-based notes and alias."""
|
|
474
|
+
sub = load(str(project_path))
|
|
475
|
+
target_solution = None
|
|
476
|
+
target_event_id = None
|
|
477
|
+
for event_id, event in sub.events.items():
|
|
478
|
+
if solution_id in event.solutions:
|
|
479
|
+
target_solution = event.solutions[solution_id]
|
|
480
|
+
target_event_id = event_id
|
|
481
|
+
break
|
|
482
|
+
if target_solution is None:
|
|
483
|
+
console.print(f"Solution {solution_id} not found", style="bold red")
|
|
484
|
+
raise typer.Exit(code=1)
|
|
485
|
+
changes = []
|
|
486
|
+
if alias is not None:
|
|
487
|
+
if target_solution.alias != alias:
|
|
488
|
+
changes.append(f"Update alias: {target_solution.alias} → {alias}")
|
|
489
|
+
target_solution.alias = alias
|
|
490
|
+
if clear_relative_probability:
|
|
491
|
+
if target_solution.relative_probability is not None:
|
|
492
|
+
changes.append(f"Clear relative_probability: {target_solution.relative_probability}")
|
|
493
|
+
target_solution.relative_probability = None
|
|
494
|
+
elif relative_probability is not None:
|
|
495
|
+
if target_solution.relative_probability != relative_probability:
|
|
496
|
+
changes.append(
|
|
497
|
+
f"Update relative_probability: " f"{target_solution.relative_probability} " f"→ {relative_probability}"
|
|
498
|
+
)
|
|
499
|
+
target_solution.relative_probability = relative_probability
|
|
500
|
+
if clear_log_likelihood:
|
|
501
|
+
if target_solution.log_likelihood is not None:
|
|
502
|
+
changes.append(f"Clear log_likelihood: {target_solution.log_likelihood}")
|
|
503
|
+
target_solution.log_likelihood = None
|
|
504
|
+
elif log_likelihood is not None:
|
|
505
|
+
if target_solution.log_likelihood != log_likelihood:
|
|
506
|
+
changes.append(f"Update log_likelihood: {target_solution.log_likelihood} " f"→ {log_likelihood}")
|
|
507
|
+
target_solution.log_likelihood = log_likelihood
|
|
508
|
+
if clear_n_data_points:
|
|
509
|
+
if target_solution.n_data_points is not None:
|
|
510
|
+
changes.append(f"Clear n_data_points: {target_solution.n_data_points}")
|
|
511
|
+
target_solution.n_data_points = None
|
|
512
|
+
elif n_data_points is not None:
|
|
513
|
+
if target_solution.n_data_points != n_data_points:
|
|
514
|
+
changes.append(f"Update n_data_points: {target_solution.n_data_points} " f"→ {n_data_points}")
|
|
515
|
+
target_solution.n_data_points = n_data_points
|
|
516
|
+
# Notes file logic
|
|
517
|
+
canonical_notes_path = (
|
|
518
|
+
Path(project_path) / "events" / target_event_id / "solutions" / f"{target_solution.solution_id}.md"
|
|
519
|
+
)
|
|
520
|
+
if notes_file is not None:
|
|
521
|
+
target_solution.notes_path = str(notes_file)
|
|
522
|
+
changes.append(f"Set notes_path to {notes_file}")
|
|
523
|
+
elif notes is not None:
|
|
524
|
+
target_solution.notes_path = str(canonical_notes_path.relative_to(project_path))
|
|
525
|
+
canonical_notes_path.parent.mkdir(parents=True, exist_ok=True)
|
|
526
|
+
canonical_notes_path.write_text(notes, encoding="utf-8")
|
|
527
|
+
changes.append(f"Updated notes in {canonical_notes_path}")
|
|
528
|
+
elif append_notes is not None:
|
|
529
|
+
if target_solution.notes_path:
|
|
530
|
+
notes_file_path = Path(project_path) / target_solution.notes_path
|
|
531
|
+
old_content = notes_file_path.read_text(encoding="utf-8") if notes_file_path.exists() else ""
|
|
532
|
+
notes_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
533
|
+
notes_file_path.write_text(old_content + "\n" + append_notes, encoding="utf-8")
|
|
534
|
+
changes.append(f"Appended notes in {notes_file_path}")
|
|
535
|
+
elif clear_notes:
|
|
536
|
+
if target_solution.notes_path:
|
|
537
|
+
notes_file_path = Path(project_path) / target_solution.notes_path
|
|
538
|
+
notes_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
539
|
+
notes_file_path.write_text("", encoding="utf-8")
|
|
540
|
+
changes.append(f"Cleared notes in {notes_file_path}")
|
|
541
|
+
if clear_parameter_uncertainties:
|
|
542
|
+
if target_solution.parameter_uncertainties:
|
|
543
|
+
changes.append("Clear parameter_uncertainties")
|
|
544
|
+
target_solution.parameter_uncertainties = None
|
|
545
|
+
if clear_physical_parameters:
|
|
546
|
+
if target_solution.physical_parameters:
|
|
547
|
+
changes.append("Clear physical_parameters")
|
|
548
|
+
target_solution.physical_parameters = None
|
|
549
|
+
if cpu_hours is not None or wall_time_hours is not None:
|
|
550
|
+
old_cpu = target_solution.compute_info.get("cpu_hours")
|
|
551
|
+
old_wall = target_solution.compute_info.get("wall_time_hours")
|
|
552
|
+
if cpu_hours is not None and old_cpu != cpu_hours:
|
|
553
|
+
changes.append(f"Update cpu_hours: {old_cpu} → {cpu_hours}")
|
|
554
|
+
if wall_time_hours is not None and old_wall != wall_time_hours:
|
|
555
|
+
changes.append(f"Update wall_time_hours: {old_wall} → {wall_time_hours}")
|
|
556
|
+
target_solution.set_compute_info(
|
|
557
|
+
cpu_hours=cpu_hours if cpu_hours is not None else old_cpu,
|
|
558
|
+
wall_time_hours=(wall_time_hours if wall_time_hours is not None else old_wall),
|
|
559
|
+
)
|
|
560
|
+
if param:
|
|
561
|
+
for p in param:
|
|
562
|
+
if "=" not in p:
|
|
563
|
+
raise typer.BadParameter(f"Invalid parameter format: {p}")
|
|
564
|
+
key, value = p.split("=", 1)
|
|
565
|
+
try:
|
|
566
|
+
new_value = json.loads(value)
|
|
567
|
+
except json.JSONDecodeError:
|
|
568
|
+
new_value = value
|
|
569
|
+
old_value = target_solution.parameters.get(key)
|
|
570
|
+
if old_value != new_value:
|
|
571
|
+
changes.append(f"Update parameter {key}: {old_value} → {new_value}")
|
|
572
|
+
target_solution.parameters[key] = new_value
|
|
573
|
+
if param_uncertainty:
|
|
574
|
+
if target_solution.parameter_uncertainties is None:
|
|
575
|
+
target_solution.parameter_uncertainties = {}
|
|
576
|
+
for p in param_uncertainty:
|
|
577
|
+
if "=" not in p:
|
|
578
|
+
raise typer.BadParameter(f"Invalid uncertainty format: {p}")
|
|
579
|
+
key, value = p.split("=", 1)
|
|
580
|
+
try:
|
|
581
|
+
new_value = json.loads(value)
|
|
582
|
+
except json.JSONDecodeError:
|
|
583
|
+
new_value = value
|
|
584
|
+
old_value = target_solution.parameter_uncertainties.get(key)
|
|
585
|
+
if old_value != new_value:
|
|
586
|
+
changes.append(f"Update uncertainty {key}: {old_value} → {new_value}")
|
|
587
|
+
target_solution.parameter_uncertainties[key] = new_value
|
|
588
|
+
if clear_higher_order_effects:
|
|
589
|
+
if target_solution.higher_order_effects:
|
|
590
|
+
changes.append(f"Clear higher_order_effects: {target_solution.higher_order_effects}")
|
|
591
|
+
target_solution.higher_order_effects = []
|
|
592
|
+
elif higher_order_effect:
|
|
593
|
+
if target_solution.higher_order_effects != higher_order_effect:
|
|
594
|
+
changes.append(
|
|
595
|
+
f"Update higher_order_effects: " f"{target_solution.higher_order_effects} " f"→ {higher_order_effect}"
|
|
596
|
+
)
|
|
597
|
+
target_solution.higher_order_effects = higher_order_effect
|
|
598
|
+
if dry_run:
|
|
599
|
+
if changes:
|
|
600
|
+
console.print(Panel(f"Changes for {solution_id} (event {target_event_id})", style="cyan"))
|
|
601
|
+
for change in changes:
|
|
602
|
+
console.print(f" • {change}")
|
|
603
|
+
else:
|
|
604
|
+
console.print(Panel("No changes would be made", style="yellow"))
|
|
605
|
+
return
|
|
606
|
+
if changes:
|
|
607
|
+
sub.save()
|
|
608
|
+
console.print(Panel(f"Updated {solution_id} (event {target_event_id})", style="green"))
|
|
609
|
+
for change in changes:
|
|
610
|
+
console.print(f" • {change}")
|
|
611
|
+
else:
|
|
612
|
+
console.print(Panel("No changes made", style="yellow"))
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def edit_notes(
|
|
616
|
+
solution_id: str,
|
|
617
|
+
project_path: Path = typer.Argument(Path("."), help="Project directory"),
|
|
618
|
+
) -> None:
|
|
619
|
+
"""Open the notes file for a solution in the default text editor."""
|
|
620
|
+
sub = load(str(project_path))
|
|
621
|
+
for event in sub.events.values():
|
|
622
|
+
if solution_id in event.solutions:
|
|
623
|
+
sol = event.solutions[solution_id]
|
|
624
|
+
if not sol.notes_path:
|
|
625
|
+
console.print(
|
|
626
|
+
f"No notes file associated with solution {solution_id}",
|
|
627
|
+
style="bold red",
|
|
628
|
+
)
|
|
629
|
+
raise typer.Exit(code=1)
|
|
630
|
+
notes_file = Path(project_path) / sol.notes_path
|
|
631
|
+
notes_file.parent.mkdir(parents=True, exist_ok=True)
|
|
632
|
+
if not notes_file.exists():
|
|
633
|
+
notes_file.write_text("", encoding="utf-8")
|
|
634
|
+
editor = os.environ.get("EDITOR", None)
|
|
635
|
+
if editor:
|
|
636
|
+
os.system(f'{editor} "{notes_file}"')
|
|
637
|
+
else:
|
|
638
|
+
# Try nano, then vi
|
|
639
|
+
for fallback in ["nano", "vi"]:
|
|
640
|
+
if os.system(f"command -v {fallback} > /dev/null 2>&1") == 0:
|
|
641
|
+
os.system(f'{fallback} "{notes_file}"')
|
|
642
|
+
break
|
|
643
|
+
else:
|
|
644
|
+
console.print(
|
|
645
|
+
f"Could not find an editor to open {notes_file}",
|
|
646
|
+
style="bold red",
|
|
647
|
+
)
|
|
648
|
+
raise typer.Exit(code=1)
|
|
649
|
+
return
|
|
650
|
+
console.print(f"Solution {solution_id} not found", style="bold red")
|
|
651
|
+
raise typer.Exit(code=1)
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
def import_solutions(
|
|
655
|
+
csv_file: Path = typer.Argument(..., help="Path to CSV file containing solutions"),
|
|
656
|
+
parameter_map_file: Optional[Path] = typer.Option(
|
|
657
|
+
None,
|
|
658
|
+
"--parameter-map-file",
|
|
659
|
+
help="YAML file mapping CSV columns to solution attributes",
|
|
660
|
+
),
|
|
661
|
+
project_path: Path = typer.Option(Path("."), "--project-path", help="Project directory"),
|
|
662
|
+
delimiter: Optional[str] = typer.Option(None, "--delimiter", help="CSV delimiter (auto-detected if not specified)"),
|
|
663
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be imported without making changes"),
|
|
664
|
+
validate: bool = typer.Option(False, "--validate", help="Validate solution parameters during import"),
|
|
665
|
+
on_duplicate: str = typer.Option(
|
|
666
|
+
"error",
|
|
667
|
+
"--on-duplicate",
|
|
668
|
+
help="How to handle duplicate alias keys: error, override, or ignore",
|
|
669
|
+
),
|
|
670
|
+
) -> None:
|
|
671
|
+
"""Import solutions from a CSV file into the current project."""
|
|
672
|
+
if on_duplicate not in ["error", "override", "ignore"]:
|
|
673
|
+
typer.echo(f"❌ Invalid --on-duplicate option: {on_duplicate}")
|
|
674
|
+
typer.echo(" Valid options: error, override, ignore")
|
|
675
|
+
raise typer.Exit(1)
|
|
676
|
+
|
|
677
|
+
try:
|
|
678
|
+
submission = load(str(project_path))
|
|
679
|
+
except Exception as e: # pragma: no cover - unexpected I/O errors
|
|
680
|
+
typer.echo(f"❌ Failed to load submission: {e}")
|
|
681
|
+
raise typer.Exit(1)
|
|
682
|
+
|
|
683
|
+
try:
|
|
684
|
+
stats = import_solutions_from_csv(
|
|
685
|
+
submission=submission,
|
|
686
|
+
csv_file=csv_file,
|
|
687
|
+
parameter_map_file=parameter_map_file,
|
|
688
|
+
delimiter=delimiter,
|
|
689
|
+
dry_run=dry_run,
|
|
690
|
+
validate=validate,
|
|
691
|
+
on_duplicate=on_duplicate,
|
|
692
|
+
project_path=project_path,
|
|
693
|
+
)
|
|
694
|
+
except Exception as e: # pragma: no cover - unexpected parse errors
|
|
695
|
+
typer.echo(f"❌ Failed to import solutions: {e}")
|
|
696
|
+
raise typer.Exit(1)
|
|
697
|
+
|
|
698
|
+
if not dry_run and stats["successful_imports"] > 0:
|
|
699
|
+
try:
|
|
700
|
+
submission.save()
|
|
701
|
+
except Exception as e: # pragma: no cover - disk failures
|
|
702
|
+
typer.echo(f"❌ Failed to save submission: {e}")
|
|
703
|
+
raise typer.Exit(1)
|
|
704
|
+
|
|
705
|
+
typer.echo("\n📊 Import Summary:")
|
|
706
|
+
typer.echo(f" Total rows processed: {stats['total_rows']}")
|
|
707
|
+
typer.echo(f" Successful imports: {stats['successful_imports']}")
|
|
708
|
+
typer.echo(f" Skipped rows: {stats['skipped_rows']}")
|
|
709
|
+
typer.echo(f" Validation errors: {stats['validation_errors']}")
|
|
710
|
+
typer.echo(f" Duplicates handled: {stats['duplicate_handled']}")
|
|
711
|
+
|
|
712
|
+
if stats["errors"]:
|
|
713
|
+
typer.echo("\n⚠️ Errors encountered:")
|
|
714
|
+
for error in stats["errors"][:10]:
|
|
715
|
+
typer.echo(f" {error}")
|
|
716
|
+
if len(stats["errors"]) > 10:
|
|
717
|
+
typer.echo(f" ... and {len(stats['errors']) - 10} more errors")
|
|
718
|
+
|
|
719
|
+
if dry_run:
|
|
720
|
+
typer.echo("\n🔍 Dry run completed - no changes made")
|
|
721
|
+
else:
|
|
722
|
+
typer.echo("\n✅ Import completed successfully")
|