microlens-submit 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microlens_submit/__init__.py +163 -0
- microlens_submit/api.py +1274 -0
- microlens_submit/assets/github-desktop_logo.png +0 -0
- microlens_submit/assets/rges-pit_logo.png +0 -0
- microlens_submit/cli.py +1803 -0
- microlens_submit/dossier.py +1443 -0
- microlens_submit/validate_parameters.py +639 -0
- microlens_submit-0.12.1.dist-info/METADATA +159 -0
- microlens_submit-0.12.1.dist-info/RECORD +13 -0
- microlens_submit-0.12.1.dist-info/WHEEL +5 -0
- microlens_submit-0.12.1.dist-info/entry_points.txt +2 -0
- microlens_submit-0.12.1.dist-info/licenses/LICENSE +21 -0
- microlens_submit-0.12.1.dist-info/top_level.txt +1 -0
microlens_submit/api.py
ADDED
|
@@ -0,0 +1,1274 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
"""Core API for microlens-submit.
|
|
4
|
+
|
|
5
|
+
This module provides the core data models and API for managing microlensing
|
|
6
|
+
challenge submissions. The main classes are:
|
|
7
|
+
|
|
8
|
+
- :class:`Submission`: Top-level container for a submission project
|
|
9
|
+
- :class:`Event`: Container for solutions to a single microlensing event
|
|
10
|
+
- :class:`Solution`: Individual model fit with parameters and metadata
|
|
11
|
+
|
|
12
|
+
The :class:`Submission` class provides methods for validation, export, and
|
|
13
|
+
persistence. The :func:`load` function is the main entry point for loading
|
|
14
|
+
or creating submission projects.
|
|
15
|
+
|
|
16
|
+
Example:
|
|
17
|
+
>>> from microlens_submit import load
|
|
18
|
+
>>>
|
|
19
|
+
>>> # Load or create a submission project
|
|
20
|
+
>>> submission = load("./my_project")
|
|
21
|
+
>>>
|
|
22
|
+
>>> # Set submission metadata
|
|
23
|
+
>>> submission.team_name = "Team Alpha"
|
|
24
|
+
>>> submission.tier = "advanced"
|
|
25
|
+
>>> submission.repo_url = "https://github.com/team/repo"
|
|
26
|
+
>>>
|
|
27
|
+
>>> # Add an event and solution
|
|
28
|
+
>>> event = submission.get_event("EVENT001")
|
|
29
|
+
>>> solution = event.add_solution("1S1L", {"t0": 2459123.5, "u0": 0.1, "tE": 20.0})
|
|
30
|
+
>>> solution.log_likelihood = -1234.56
|
|
31
|
+
>>> solution.set_compute_info(cpu_hours=2.5, wall_time_hours=0.5)
|
|
32
|
+
>>>
|
|
33
|
+
>>> # Save the submission
|
|
34
|
+
>>> submission.save()
|
|
35
|
+
>>>
|
|
36
|
+
>>> # Export for submission
|
|
37
|
+
>>> submission.export("submission.zip")
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
import logging
|
|
41
|
+
import os
|
|
42
|
+
import subprocess
|
|
43
|
+
import sys
|
|
44
|
+
import uuid
|
|
45
|
+
import zipfile
|
|
46
|
+
import math
|
|
47
|
+
from datetime import datetime
|
|
48
|
+
from pathlib import Path
|
|
49
|
+
from typing import Dict, Optional, Literal, List
|
|
50
|
+
|
|
51
|
+
from pydantic import BaseModel, Field
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class Solution(BaseModel):
|
|
55
|
+
"""Container for an individual microlensing model fit.
|
|
56
|
+
|
|
57
|
+
This data model stores everything required to describe a single
|
|
58
|
+
microlensing solution, including the numeric parameters of the fit and
|
|
59
|
+
metadata about how it was produced. Instances are normally created via
|
|
60
|
+
:meth:`Event.add_solution` and persisted to disk when
|
|
61
|
+
:meth:`Submission.save` is called.
|
|
62
|
+
|
|
63
|
+
Attributes:
|
|
64
|
+
solution_id: Unique identifier for the solution (auto-generated UUID).
|
|
65
|
+
model_type: Specific lens/source configuration such as "1S1L" or "1S2L".
|
|
66
|
+
bands: List of photometric bands used in the fit (e.g., ["0", "1", "2"]).
|
|
67
|
+
higher_order_effects: List of physical effects modeled (e.g., ["parallax"]).
|
|
68
|
+
t_ref: Reference time for time-dependent effects (Julian Date).
|
|
69
|
+
parameters: Dictionary of model parameters used for the fit.
|
|
70
|
+
is_active: Flag indicating whether the solution should be included in
|
|
71
|
+
the final submission export.
|
|
72
|
+
compute_info: Metadata about the computing environment, populated by
|
|
73
|
+
:meth:`set_compute_info`.
|
|
74
|
+
posterior_path: Optional path to a file containing posterior samples.
|
|
75
|
+
lightcurve_plot_path: Optional path to the lightcurve plot file.
|
|
76
|
+
lens_plane_plot_path: Optional path to the lens plane plot file.
|
|
77
|
+
notes_path: Path to the markdown notes file for this solution.
|
|
78
|
+
used_astrometry: Whether astrometric information was used when fitting.
|
|
79
|
+
used_postage_stamps: Whether postage stamp data was used.
|
|
80
|
+
limb_darkening_model: Name of the limb darkening model employed.
|
|
81
|
+
limb_darkening_coeffs: Mapping of limb darkening coefficients.
|
|
82
|
+
parameter_uncertainties: Uncertainties for parameters in parameters.
|
|
83
|
+
physical_parameters: Physical parameters derived from the model.
|
|
84
|
+
log_likelihood: Log-likelihood value of the fit.
|
|
85
|
+
relative_probability: Optional probability of this solution being the best model.
|
|
86
|
+
n_data_points: Number of data points used in the fit.
|
|
87
|
+
creation_timestamp: UTC timestamp when the solution was created.
|
|
88
|
+
|
|
89
|
+
Example:
|
|
90
|
+
>>> from microlens_submit import load
|
|
91
|
+
>>>
|
|
92
|
+
>>> # Load a submission and get an event
|
|
93
|
+
>>> submission = load("./my_project")
|
|
94
|
+
>>> event = submission.get_event("EVENT001")
|
|
95
|
+
>>>
|
|
96
|
+
>>> # Create a simple 1S1L solution
|
|
97
|
+
>>> solution = event.add_solution("1S1L", {
|
|
98
|
+
... "t0": 2459123.5, # Time of closest approach
|
|
99
|
+
... "u0": 0.1, # Impact parameter
|
|
100
|
+
... "tE": 20.0 # Einstein crossing time
|
|
101
|
+
... })
|
|
102
|
+
>>>
|
|
103
|
+
>>> # Add metadata
|
|
104
|
+
>>> solution.log_likelihood = -1234.56
|
|
105
|
+
>>> solution.n_data_points = 1250
|
|
106
|
+
>>> solution.relative_probability = 0.8
|
|
107
|
+
>>> solution.higher_order_effects = ["parallax"]
|
|
108
|
+
>>> solution.t_ref = 2459123.0
|
|
109
|
+
>>>
|
|
110
|
+
>>> # Record compute information
|
|
111
|
+
>>> solution.set_compute_info(cpu_hours=2.5, wall_time_hours=0.5)
|
|
112
|
+
>>>
|
|
113
|
+
>>> # Add notes
|
|
114
|
+
>>> solution.set_notes("# My Solution Notes\n\nThis is a simple point lens fit.")
|
|
115
|
+
>>>
|
|
116
|
+
>>> # Validate the solution
|
|
117
|
+
>>> messages = solution.validate()
|
|
118
|
+
>>> if messages:
|
|
119
|
+
... print("Validation issues:", messages)
|
|
120
|
+
|
|
121
|
+
Note:
|
|
122
|
+
The notes_path field supports Markdown formatting, allowing you to create rich,
|
|
123
|
+
structured documentation with headers, lists, code blocks, tables, and links.
|
|
124
|
+
This is particularly useful for creating detailed submission dossiers for evaluators.
|
|
125
|
+
|
|
126
|
+
The validate() method performs comprehensive validation of parameters,
|
|
127
|
+
higher-order effects, and physical consistency. Always validate solutions
|
|
128
|
+
before submission.
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
solution_id: str
|
|
132
|
+
model_type: Literal["1S1L", "1S2L", "2S1L", "2S2L", "1S3L", "2S3L", "other"]
|
|
133
|
+
bands: List[str] = Field(default_factory=list)
|
|
134
|
+
higher_order_effects: List[
|
|
135
|
+
Literal[
|
|
136
|
+
"lens-orbital-motion",
|
|
137
|
+
"parallax",
|
|
138
|
+
"finite-source",
|
|
139
|
+
"limb-darkening",
|
|
140
|
+
"xallarap",
|
|
141
|
+
"stellar-rotation",
|
|
142
|
+
"fitted-limb-darkening",
|
|
143
|
+
"gaussian-process",
|
|
144
|
+
"other",
|
|
145
|
+
]
|
|
146
|
+
] = Field(default_factory=list)
|
|
147
|
+
t_ref: Optional[float] = None
|
|
148
|
+
parameters: dict
|
|
149
|
+
is_active: bool = True
|
|
150
|
+
compute_info: dict = Field(default_factory=dict)
|
|
151
|
+
posterior_path: Optional[str] = None
|
|
152
|
+
lightcurve_plot_path: Optional[str] = None
|
|
153
|
+
lens_plane_plot_path: Optional[str] = None
|
|
154
|
+
notes_path: Optional[str] = None
|
|
155
|
+
used_astrometry: bool = False
|
|
156
|
+
used_postage_stamps: bool = False
|
|
157
|
+
limb_darkening_model: Optional[str] = None
|
|
158
|
+
limb_darkening_coeffs: Optional[dict] = None
|
|
159
|
+
parameter_uncertainties: Optional[dict] = None
|
|
160
|
+
physical_parameters: Optional[dict] = None
|
|
161
|
+
log_likelihood: Optional[float] = None
|
|
162
|
+
relative_probability: Optional[float] = None
|
|
163
|
+
n_data_points: Optional[int] = None
|
|
164
|
+
creation_timestamp: str = Field(
|
|
165
|
+
default_factory=lambda: datetime.utcnow().isoformat()
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
def set_compute_info(
|
|
169
|
+
self,
|
|
170
|
+
cpu_hours: float | None = None,
|
|
171
|
+
wall_time_hours: float | None = None,
|
|
172
|
+
) -> None:
|
|
173
|
+
"""Record compute metadata and capture environment details.
|
|
174
|
+
|
|
175
|
+
When called, this method populates :attr:`compute_info` with timing
|
|
176
|
+
information as well as a list of installed Python packages and the
|
|
177
|
+
current Git state. It is safe to call multiple times—previous values
|
|
178
|
+
will be overwritten.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
cpu_hours: Total CPU time consumed by the model fit in hours.
|
|
182
|
+
wall_time_hours: Real-world time consumed by the fit in hours.
|
|
183
|
+
|
|
184
|
+
Example:
|
|
185
|
+
>>> solution = event.add_solution("1S1L", {"t0": 2459123.5, "u0": 0.1})
|
|
186
|
+
>>>
|
|
187
|
+
>>> # Record compute information
|
|
188
|
+
>>> solution.set_compute_info(cpu_hours=2.5, wall_time_hours=0.5)
|
|
189
|
+
>>>
|
|
190
|
+
>>> # The compute_info now contains:
|
|
191
|
+
>>> # - cpu_hours: 2.5
|
|
192
|
+
>>> # - wall_time_hours: 0.5
|
|
193
|
+
>>> # - dependencies: [list of installed packages]
|
|
194
|
+
>>> # - git_info: {commit, branch, is_dirty}
|
|
195
|
+
|
|
196
|
+
Note:
|
|
197
|
+
This method automatically captures the current Python environment
|
|
198
|
+
(via pip freeze) and Git state (commit, branch, dirty status).
|
|
199
|
+
If Git is not available or not a repository, git_info will be None.
|
|
200
|
+
If pip is not available, dependencies will be an empty list.
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
if cpu_hours is not None:
|
|
204
|
+
self.compute_info["cpu_hours"] = cpu_hours
|
|
205
|
+
if wall_time_hours is not None:
|
|
206
|
+
self.compute_info["wall_time_hours"] = wall_time_hours
|
|
207
|
+
|
|
208
|
+
try:
|
|
209
|
+
result = subprocess.run(
|
|
210
|
+
[sys.executable, "-m", "pip", "freeze"],
|
|
211
|
+
capture_output=True,
|
|
212
|
+
text=True,
|
|
213
|
+
check=True,
|
|
214
|
+
)
|
|
215
|
+
self.compute_info["dependencies"] = (
|
|
216
|
+
result.stdout.strip().split("\n") if result.stdout else []
|
|
217
|
+
)
|
|
218
|
+
except (subprocess.CalledProcessError, FileNotFoundError) as e:
|
|
219
|
+
logging.warning("Could not capture pip environment: %s", e)
|
|
220
|
+
self.compute_info["dependencies"] = []
|
|
221
|
+
|
|
222
|
+
try:
|
|
223
|
+
commit = subprocess.run(
|
|
224
|
+
["git", "rev-parse", "HEAD"],
|
|
225
|
+
capture_output=True,
|
|
226
|
+
text=True,
|
|
227
|
+
check=True,
|
|
228
|
+
).stdout.strip()
|
|
229
|
+
branch = subprocess.run(
|
|
230
|
+
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
|
231
|
+
capture_output=True,
|
|
232
|
+
text=True,
|
|
233
|
+
check=True,
|
|
234
|
+
).stdout.strip()
|
|
235
|
+
status = subprocess.run(
|
|
236
|
+
["git", "status", "--porcelain"],
|
|
237
|
+
capture_output=True,
|
|
238
|
+
text=True,
|
|
239
|
+
check=True,
|
|
240
|
+
).stdout.strip()
|
|
241
|
+
self.compute_info["git_info"] = {
|
|
242
|
+
"commit": commit,
|
|
243
|
+
"branch": branch,
|
|
244
|
+
"is_dirty": bool(status),
|
|
245
|
+
}
|
|
246
|
+
except (subprocess.CalledProcessError, FileNotFoundError) as e:
|
|
247
|
+
logging.warning("Could not capture git info: %s", e)
|
|
248
|
+
self.compute_info["git_info"] = None
|
|
249
|
+
|
|
250
|
+
def deactivate(self) -> None:
|
|
251
|
+
"""Mark this solution as inactive.
|
|
252
|
+
|
|
253
|
+
Inactive solutions are excluded from submission exports and dossier
|
|
254
|
+
generation. This is useful for keeping alternative fits without
|
|
255
|
+
including them in the final submission.
|
|
256
|
+
|
|
257
|
+
Example:
|
|
258
|
+
>>> solution = event.get_solution("solution_uuid")
|
|
259
|
+
>>> solution.deactivate()
|
|
260
|
+
>>>
|
|
261
|
+
>>> # The solution is now inactive and won't be included in exports
|
|
262
|
+
>>> submission.save() # Persist the change
|
|
263
|
+
|
|
264
|
+
Note:
|
|
265
|
+
This method only changes the is_active flag. The solution data
|
|
266
|
+
remains intact and can be reactivated later using activate().
|
|
267
|
+
"""
|
|
268
|
+
self.is_active = False
|
|
269
|
+
|
|
270
|
+
def activate(self) -> None:
|
|
271
|
+
"""Mark this solution as active.
|
|
272
|
+
|
|
273
|
+
Active solutions are included in submission exports and dossier
|
|
274
|
+
generation. This is the default state for new solutions.
|
|
275
|
+
|
|
276
|
+
Example:
|
|
277
|
+
>>> solution = event.get_solution("solution_uuid")
|
|
278
|
+
>>> solution.activate()
|
|
279
|
+
>>>
|
|
280
|
+
>>> # The solution is now active and will be included in exports
|
|
281
|
+
>>> submission.save() # Persist the change
|
|
282
|
+
|
|
283
|
+
Note:
|
|
284
|
+
This method only changes the is_active flag. The solution data
|
|
285
|
+
remains intact.
|
|
286
|
+
"""
|
|
287
|
+
self.is_active = True
|
|
288
|
+
|
|
289
|
+
def validate(self) -> list[str]:
|
|
290
|
+
"""Validate this solution's parameters and configuration.
|
|
291
|
+
|
|
292
|
+
This method performs comprehensive validation using centralized validation logic
|
|
293
|
+
to ensure the solution is complete, consistent, and ready for submission.
|
|
294
|
+
|
|
295
|
+
**Validation Checks:**
|
|
296
|
+
|
|
297
|
+
**Parameter Completeness:**
|
|
298
|
+
- Ensures all required parameters are present for the given model type
|
|
299
|
+
- Validates higher-order effect requirements (e.g., parallax needs piEN, piEE)
|
|
300
|
+
- Checks band-specific flux parameters when bands are specified
|
|
301
|
+
- Verifies t_ref is provided when required by time-dependent effects
|
|
302
|
+
|
|
303
|
+
**Parameter Types and Values:**
|
|
304
|
+
- Validates parameter data types (float, int, string)
|
|
305
|
+
- Checks physically meaningful parameter ranges
|
|
306
|
+
- Ensures positive values for quantities that must be positive (tE, s, etc.)
|
|
307
|
+
- Validates mass ratio (q) is between 0 and 1
|
|
308
|
+
|
|
309
|
+
**Physical Consistency:**
|
|
310
|
+
- Checks for physically impossible parameter combinations
|
|
311
|
+
- Validates binary lens separation ranges for caustic crossing
|
|
312
|
+
- Ensures relative_probability is between 0 and 1 when specified
|
|
313
|
+
|
|
314
|
+
**Model-Specific Validation:**
|
|
315
|
+
- 1S1L: Requires t0, u0, tE
|
|
316
|
+
- 1S2L: Requires t0, u0, tE, s, q, alpha
|
|
317
|
+
- 2S1L: Requires t0, u0, tE (core lens parameters)
|
|
318
|
+
- Higher-order effects: Validates effect-specific parameters
|
|
319
|
+
|
|
320
|
+
**Higher-Order Effects Supported:**
|
|
321
|
+
- parallax: Requires piEN, piEE, t_ref
|
|
322
|
+
- finite-source: Requires rho
|
|
323
|
+
- lens-orbital-motion: Requires dsdt, dadt, t_ref
|
|
324
|
+
- gaussian-process: Optional ln_K, ln_lambda, ln_period, ln_gamma
|
|
325
|
+
- fitted-limb-darkening: Optional u1, u2, u3, u4
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
None
|
|
329
|
+
|
|
330
|
+
Returns:
|
|
331
|
+
list[str]: Human-readable validation messages. Empty list indicates all
|
|
332
|
+
validations passed. Messages may include warnings (non-critical)
|
|
333
|
+
and errors (critical issues that should be addressed).
|
|
334
|
+
|
|
335
|
+
Example:
|
|
336
|
+
>>> solution = event.add_solution("1S2L", {"t0": 2459123.5, "u0": 0.1})
|
|
337
|
+
>>> messages = solution.validate()
|
|
338
|
+
>>> if messages:
|
|
339
|
+
... print("Validation issues found:")
|
|
340
|
+
... for msg in messages:
|
|
341
|
+
... print(f" - {msg}")
|
|
342
|
+
... else:
|
|
343
|
+
... print("Solution is valid!")
|
|
344
|
+
|
|
345
|
+
Note:
|
|
346
|
+
Always validate solutions before submission. The validation logic
|
|
347
|
+
is centralized and covers all model types and higher-order effects.
|
|
348
|
+
Some warnings may be non-critical but should be reviewed.
|
|
349
|
+
"""
|
|
350
|
+
from .validate_parameters import (
|
|
351
|
+
check_solution_completeness,
|
|
352
|
+
validate_parameter_types,
|
|
353
|
+
validate_solution_consistency,
|
|
354
|
+
validate_parameter_uncertainties
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
messages = []
|
|
358
|
+
|
|
359
|
+
# Check solution completeness
|
|
360
|
+
completeness_messages = check_solution_completeness(
|
|
361
|
+
model_type=self.model_type,
|
|
362
|
+
parameters=self.parameters,
|
|
363
|
+
higher_order_effects=self.higher_order_effects,
|
|
364
|
+
bands=self.bands,
|
|
365
|
+
t_ref=self.t_ref
|
|
366
|
+
)
|
|
367
|
+
messages.extend(completeness_messages)
|
|
368
|
+
|
|
369
|
+
# Check parameter types
|
|
370
|
+
type_messages = validate_parameter_types(
|
|
371
|
+
parameters=self.parameters,
|
|
372
|
+
model_type=self.model_type
|
|
373
|
+
)
|
|
374
|
+
messages.extend(type_messages)
|
|
375
|
+
|
|
376
|
+
# Check parameter uncertainties
|
|
377
|
+
uncertainty_messages = validate_parameter_uncertainties(
|
|
378
|
+
parameters=self.parameters,
|
|
379
|
+
uncertainties=self.parameter_uncertainties
|
|
380
|
+
)
|
|
381
|
+
messages.extend(uncertainty_messages)
|
|
382
|
+
|
|
383
|
+
# Check solution consistency
|
|
384
|
+
consistency_messages = validate_solution_consistency(
|
|
385
|
+
model_type=self.model_type,
|
|
386
|
+
parameters=self.parameters,
|
|
387
|
+
relative_probability=self.relative_probability,
|
|
388
|
+
)
|
|
389
|
+
messages.extend(consistency_messages)
|
|
390
|
+
|
|
391
|
+
return messages
|
|
392
|
+
|
|
393
|
+
def _save(self, event_path: Path) -> None:
|
|
394
|
+
"""Write this solution to disk.
|
|
395
|
+
|
|
396
|
+
Args:
|
|
397
|
+
event_path: Directory of the parent event within the project.
|
|
398
|
+
|
|
399
|
+
Example:
|
|
400
|
+
>>> # This is called automatically by Event._save()
|
|
401
|
+
>>> event._save() # This calls solution._save() for each solution
|
|
402
|
+
|
|
403
|
+
Note:
|
|
404
|
+
This is an internal method. Solutions are automatically saved
|
|
405
|
+
when the parent event is saved via submission.save().
|
|
406
|
+
"""
|
|
407
|
+
solutions_dir = event_path / "solutions"
|
|
408
|
+
solutions_dir.mkdir(parents=True, exist_ok=True)
|
|
409
|
+
out_path = solutions_dir / f"{self.solution_id}.json"
|
|
410
|
+
with out_path.open("w", encoding="utf-8") as fh:
|
|
411
|
+
fh.write(self.model_dump_json(indent=2))
|
|
412
|
+
|
|
413
|
+
def get_notes(self, project_root: Optional[Path] = None) -> str:
|
|
414
|
+
"""Read notes from the notes file, if present.
|
|
415
|
+
|
|
416
|
+
Args:
|
|
417
|
+
project_root: Optional project root path for resolving relative
|
|
418
|
+
notes_path. If None, uses the current working directory.
|
|
419
|
+
|
|
420
|
+
Returns:
|
|
421
|
+
str: The contents of the notes file as a string, or empty string
|
|
422
|
+
if no notes file exists or notes_path is not set.
|
|
423
|
+
|
|
424
|
+
Example:
|
|
425
|
+
>>> solution = event.get_solution("solution_uuid")
|
|
426
|
+
>>> notes = solution.get_notes(project_root=Path("./my_project"))
|
|
427
|
+
>>> print(notes)
|
|
428
|
+
# My Solution Notes
|
|
429
|
+
|
|
430
|
+
This is a detailed description of my fit...
|
|
431
|
+
|
|
432
|
+
Note:
|
|
433
|
+
This method handles both absolute and relative notes_path values.
|
|
434
|
+
If notes_path is relative, it's resolved against project_root.
|
|
435
|
+
"""
|
|
436
|
+
if not self.notes_path:
|
|
437
|
+
return ""
|
|
438
|
+
path = Path(self.notes_path)
|
|
439
|
+
if not path.is_absolute() and project_root is not None:
|
|
440
|
+
path = project_root / path
|
|
441
|
+
if path.exists():
|
|
442
|
+
return path.read_text(encoding="utf-8")
|
|
443
|
+
return ""
|
|
444
|
+
|
|
445
|
+
def set_notes(self, content: str, project_root: Optional[Path] = None) -> None:
|
|
446
|
+
"""Write notes to the notes file, creating it if needed.
|
|
447
|
+
|
|
448
|
+
If notes_path is not set, creates a temporary file in tmp/<solution_id>.md
|
|
449
|
+
and sets notes_path. On Submission.save(), temporary notes files are
|
|
450
|
+
moved to the canonical location.
|
|
451
|
+
|
|
452
|
+
Args:
|
|
453
|
+
content: The markdown content to write to the notes file.
|
|
454
|
+
project_root: Optional project root path for resolving relative
|
|
455
|
+
notes_path. If None, uses the current working directory.
|
|
456
|
+
|
|
457
|
+
Example:
|
|
458
|
+
>>> solution = event.get_solution("solution_uuid")
|
|
459
|
+
>>>
|
|
460
|
+
>>> # Set notes with markdown content
|
|
461
|
+
>>> solution.set_notes('''
|
|
462
|
+
... # My Solution Notes
|
|
463
|
+
...
|
|
464
|
+
... This is a detailed description of my microlensing fit.
|
|
465
|
+
...
|
|
466
|
+
... ## Parameters
|
|
467
|
+
... - t0: Time of closest approach
|
|
468
|
+
... - u0: Impact parameter
|
|
469
|
+
... - tE: Einstein crossing time
|
|
470
|
+
...
|
|
471
|
+
... ## Notes
|
|
472
|
+
... The fit shows clear evidence of a binary lens...
|
|
473
|
+
... ''', project_root=Path("./my_project"))
|
|
474
|
+
>>>
|
|
475
|
+
>>> # The notes are now saved and can be read back
|
|
476
|
+
>>> notes = solution.get_notes(project_root=Path("./my_project"))
|
|
477
|
+
|
|
478
|
+
Note:
|
|
479
|
+
This method supports markdown formatting. The notes will be
|
|
480
|
+
rendered as HTML in the dossier with syntax highlighting
|
|
481
|
+
for code blocks.
|
|
482
|
+
"""
|
|
483
|
+
if not self.notes_path:
|
|
484
|
+
# Use tmp/ for unsaved notes
|
|
485
|
+
tmp_dir = Path(project_root or ".") / "tmp"
|
|
486
|
+
tmp_dir.mkdir(parents=True, exist_ok=True)
|
|
487
|
+
tmp_path = tmp_dir / f"{self.solution_id}.md"
|
|
488
|
+
self.notes_path = str(tmp_path.relative_to(project_root or "."))
|
|
489
|
+
path = Path(self.notes_path)
|
|
490
|
+
if not path.is_absolute() and project_root is not None:
|
|
491
|
+
path = project_root / path
|
|
492
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
493
|
+
path.write_text(content, encoding="utf-8")
|
|
494
|
+
|
|
495
|
+
@property
|
|
496
|
+
def notes(self) -> str:
|
|
497
|
+
"""Return the Markdown notes string from the notes file (read-only).
|
|
498
|
+
|
|
499
|
+
Returns:
|
|
500
|
+
str: The contents of the notes file as a string, or empty string
|
|
501
|
+
if no notes file exists.
|
|
502
|
+
|
|
503
|
+
Example:
|
|
504
|
+
>>> solution = event.get_solution("solution_uuid")
|
|
505
|
+
>>> print(solution.notes)
|
|
506
|
+
# My Solution Notes
|
|
507
|
+
|
|
508
|
+
This is a detailed description of my fit...
|
|
509
|
+
|
|
510
|
+
Note:
|
|
511
|
+
This is a read-only property. Use set_notes() to modify the notes.
|
|
512
|
+
The property uses the current working directory to resolve relative
|
|
513
|
+
notes_path. For more control, use get_notes() with project_root.
|
|
514
|
+
"""
|
|
515
|
+
return self.get_notes()
|
|
516
|
+
|
|
517
|
+
def view_notes(self, render_html: bool = True, project_root: Optional[Path] = None) -> str:
|
|
518
|
+
"""Return the notes as Markdown or rendered HTML.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
render_html: If True, return HTML using markdown.markdown with
|
|
522
|
+
extensions for tables and fenced code blocks. If False,
|
|
523
|
+
return the raw Markdown string.
|
|
524
|
+
project_root: Optionally specify the project root for relative
|
|
525
|
+
notes_path resolution.
|
|
526
|
+
|
|
527
|
+
Returns:
|
|
528
|
+
str: Markdown or HTML string depending on render_html parameter.
|
|
529
|
+
|
|
530
|
+
Example:
|
|
531
|
+
>>> solution = event.get_solution("solution_uuid")
|
|
532
|
+
>>>
|
|
533
|
+
>>> # Get raw markdown
|
|
534
|
+
>>> md = solution.view_notes(render_html=False)
|
|
535
|
+
>>> print(md)
|
|
536
|
+
# My Solution Notes
|
|
537
|
+
|
|
538
|
+
>>> # Get rendered HTML (useful for Jupyter/IPython)
|
|
539
|
+
>>> html = solution.view_notes(render_html=True)
|
|
540
|
+
>>> print(html)
|
|
541
|
+
<h1>My Solution Notes</h1>
|
|
542
|
+
<p>...</p>
|
|
543
|
+
|
|
544
|
+
Note:
|
|
545
|
+
When render_html=True, the markdown is rendered with extensions
|
|
546
|
+
for tables, fenced code blocks, and other advanced features.
|
|
547
|
+
This is particularly useful for displaying notes in Jupyter
|
|
548
|
+
notebooks or other HTML contexts.
|
|
549
|
+
"""
|
|
550
|
+
md = self.get_notes(project_root=project_root)
|
|
551
|
+
if render_html:
|
|
552
|
+
import markdown
|
|
553
|
+
return markdown.markdown(md or "", extensions=["extra", "tables", "fenced_code"])
|
|
554
|
+
return md
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
class Event(BaseModel):
|
|
558
|
+
"""A collection of solutions for a single microlensing event.
|
|
559
|
+
|
|
560
|
+
Events act as containers that group one or more :class:`Solution` objects
|
|
561
|
+
under a common ``event_id``. They are created on demand via
|
|
562
|
+
:meth:`Submission.get_event` and are written to disk when the parent
|
|
563
|
+
submission is saved.
|
|
564
|
+
|
|
565
|
+
Attributes:
|
|
566
|
+
event_id: Identifier used to reference the event within the project.
|
|
567
|
+
solutions: Mapping of solution IDs to :class:`Solution` instances.
|
|
568
|
+
submission: The parent :class:`Submission` or ``None`` if detached.
|
|
569
|
+
|
|
570
|
+
Example:
|
|
571
|
+
>>> from microlens_submit import load
|
|
572
|
+
>>>
|
|
573
|
+
>>> # Load a submission and get/create an event
|
|
574
|
+
>>> submission = load("./my_project")
|
|
575
|
+
>>> event = submission.get_event("EVENT001")
|
|
576
|
+
>>>
|
|
577
|
+
>>> # Add multiple solutions to the event
|
|
578
|
+
>>> solution1 = event.add_solution("1S1L", {
|
|
579
|
+
... "t0": 2459123.5, "u0": 0.1, "tE": 20.0
|
|
580
|
+
... })
|
|
581
|
+
>>> solution2 = event.add_solution("1S2L", {
|
|
582
|
+
... "t0": 2459123.5, "u0": 0.1, "tE": 20.0,
|
|
583
|
+
... "s": 1.2, "q": 0.5, "alpha": 45.0
|
|
584
|
+
... })
|
|
585
|
+
>>>
|
|
586
|
+
>>> # Get active solutions
|
|
587
|
+
>>> active_solutions = event.get_active_solutions()
|
|
588
|
+
>>> print(f"Event {event.event_id} has {len(active_solutions)} active solutions")
|
|
589
|
+
>>>
|
|
590
|
+
>>> # Deactivate a solution
|
|
591
|
+
>>> solution1.deactivate()
|
|
592
|
+
>>>
|
|
593
|
+
>>> # Save the submission (includes all events and solutions)
|
|
594
|
+
>>> submission.save()
|
|
595
|
+
|
|
596
|
+
Note:
|
|
597
|
+
Events are automatically created when you call submission.get_event()
|
|
598
|
+
with a new event_id. All solutions for an event are stored together
|
|
599
|
+
in the project directory structure.
|
|
600
|
+
"""
|
|
601
|
+
|
|
602
|
+
event_id: str
|
|
603
|
+
solutions: Dict[str, Solution] = Field(default_factory=dict)
|
|
604
|
+
submission: Optional["Submission"] = Field(default=None, exclude=True)
|
|
605
|
+
|
|
606
|
+
def add_solution(self, model_type: str, parameters: dict) -> Solution:
|
|
607
|
+
"""Create and attach a new solution to this event.
|
|
608
|
+
|
|
609
|
+
Parameters are stored as provided and the new solution is returned for
|
|
610
|
+
further modification. A unique solution_id is automatically generated.
|
|
611
|
+
|
|
612
|
+
Args:
|
|
613
|
+
model_type: Short label describing the model type (e.g., "1S1L", "1S2L").
|
|
614
|
+
parameters: Dictionary of model parameters for the fit.
|
|
615
|
+
|
|
616
|
+
Returns:
|
|
617
|
+
Solution: The newly created solution instance.
|
|
618
|
+
|
|
619
|
+
Example:
|
|
620
|
+
>>> event = submission.get_event("EVENT001")
|
|
621
|
+
>>>
|
|
622
|
+
>>> # Create a simple point lens solution
|
|
623
|
+
>>> solution = event.add_solution("1S1L", {
|
|
624
|
+
... "t0": 2459123.5, # Time of closest approach
|
|
625
|
+
... "u0": 0.1, # Impact parameter
|
|
626
|
+
... "tE": 20.0 # Einstein crossing time
|
|
627
|
+
... })
|
|
628
|
+
>>>
|
|
629
|
+
>>> # The solution is automatically added to the event
|
|
630
|
+
>>> print(f"Event now has {len(event.solutions)} solutions")
|
|
631
|
+
>>> print(f"Solution ID: {solution.solution_id}")
|
|
632
|
+
|
|
633
|
+
Note:
|
|
634
|
+
The solution is automatically marked as active and assigned a
|
|
635
|
+
unique UUID. You can modify the solution attributes after creation
|
|
636
|
+
and then save the submission to persist changes.
|
|
637
|
+
"""
|
|
638
|
+
solution_id = str(uuid.uuid4())
|
|
639
|
+
sol = Solution(
|
|
640
|
+
solution_id=solution_id, model_type=model_type, parameters=parameters
|
|
641
|
+
)
|
|
642
|
+
self.solutions[solution_id] = sol
|
|
643
|
+
return sol
|
|
644
|
+
|
|
645
|
+
def get_solution(self, solution_id: str) -> Solution:
|
|
646
|
+
"""Return a previously added solution.
|
|
647
|
+
|
|
648
|
+
Args:
|
|
649
|
+
solution_id: Identifier of the solution to retrieve.
|
|
650
|
+
|
|
651
|
+
Returns:
|
|
652
|
+
Solution: The corresponding solution.
|
|
653
|
+
|
|
654
|
+
Raises:
|
|
655
|
+
KeyError: If the solution_id is not found in this event.
|
|
656
|
+
|
|
657
|
+
Example:
|
|
658
|
+
>>> event = submission.get_event("EVENT001")
|
|
659
|
+
>>>
|
|
660
|
+
>>> # Get a specific solution
|
|
661
|
+
>>> solution = event.get_solution("solution_uuid_here")
|
|
662
|
+
>>> print(f"Model type: {solution.model_type}")
|
|
663
|
+
>>> print(f"Parameters: {solution.parameters}")
|
|
664
|
+
|
|
665
|
+
Note:
|
|
666
|
+
Use this method to retrieve existing solutions. If you need to
|
|
667
|
+
create a new solution, use add_solution() instead.
|
|
668
|
+
"""
|
|
669
|
+
return self.solutions[solution_id]
|
|
670
|
+
|
|
671
|
+
def get_active_solutions(self) -> list[Solution]:
|
|
672
|
+
"""Return all solutions currently marked as active.
|
|
673
|
+
|
|
674
|
+
Returns:
|
|
675
|
+
list[Solution]: List of all active solutions in this event.
|
|
676
|
+
|
|
677
|
+
Example:
|
|
678
|
+
>>> event = submission.get_event("EVENT001")
|
|
679
|
+
>>>
|
|
680
|
+
>>> # Get only active solutions
|
|
681
|
+
>>> active_solutions = event.get_active_solutions()
|
|
682
|
+
>>> print(f"Event has {len(active_solutions)} active solutions")
|
|
683
|
+
>>>
|
|
684
|
+
>>> # Only active solutions are included in exports
|
|
685
|
+
>>> for solution in active_solutions:
|
|
686
|
+
... print(f"- {solution.solution_id}: {solution.model_type}")
|
|
687
|
+
|
|
688
|
+
Note:
|
|
689
|
+
Only active solutions are included in submission exports and
|
|
690
|
+
dossier generation. Use deactivate() to exclude solutions from
|
|
691
|
+
the final submission.
|
|
692
|
+
"""
|
|
693
|
+
return [sol for sol in self.solutions.values() if sol.is_active]
|
|
694
|
+
|
|
695
|
+
def clear_solutions(self) -> None:
|
|
696
|
+
"""Deactivate every solution associated with this event.
|
|
697
|
+
|
|
698
|
+
This method marks all solutions in the event as inactive, effectively
|
|
699
|
+
removing them from submission exports and dossier generation.
|
|
700
|
+
|
|
701
|
+
Example:
|
|
702
|
+
>>> event = submission.get_event("EVENT001")
|
|
703
|
+
>>>
|
|
704
|
+
>>> # Deactivate all solutions in this event
|
|
705
|
+
>>> event.clear_solutions()
|
|
706
|
+
>>>
|
|
707
|
+
>>> # Now no solutions are active
|
|
708
|
+
>>> active_solutions = event.get_active_solutions()
|
|
709
|
+
>>> print(f"Active solutions: {len(active_solutions)}") # 0
|
|
710
|
+
|
|
711
|
+
Note:
|
|
712
|
+
This only deactivates solutions; they are not deleted. You can
|
|
713
|
+
reactivate individual solutions using solution.activate().
|
|
714
|
+
"""
|
|
715
|
+
for sol in self.solutions.values():
|
|
716
|
+
sol.is_active = False
|
|
717
|
+
|
|
718
|
+
@classmethod
|
|
719
|
+
def _from_dir(cls, event_dir: Path, submission: "Submission") -> "Event":
|
|
720
|
+
"""Load an event from disk."""
|
|
721
|
+
event_json = event_dir / "event.json"
|
|
722
|
+
if event_json.exists():
|
|
723
|
+
with event_json.open("r", encoding="utf-8") as fh:
|
|
724
|
+
event = cls.model_validate_json(fh.read())
|
|
725
|
+
else:
|
|
726
|
+
event = cls(event_id=event_dir.name)
|
|
727
|
+
event.submission = submission
|
|
728
|
+
solutions_dir = event_dir / "solutions"
|
|
729
|
+
if solutions_dir.exists():
|
|
730
|
+
for sol_file in solutions_dir.glob("*.json"):
|
|
731
|
+
with sol_file.open("r", encoding="utf-8") as fh:
|
|
732
|
+
sol = Solution.model_validate_json(fh.read())
|
|
733
|
+
event.solutions[sol.solution_id] = sol
|
|
734
|
+
return event
|
|
735
|
+
|
|
736
|
+
def _save(self) -> None:
|
|
737
|
+
"""Write this event and its solutions to disk."""
|
|
738
|
+
if self.submission is None:
|
|
739
|
+
raise ValueError("Event is not attached to a submission")
|
|
740
|
+
base = Path(self.submission.project_path) / "events" / self.event_id
|
|
741
|
+
base.mkdir(parents=True, exist_ok=True)
|
|
742
|
+
with (base / "event.json").open("w", encoding="utf-8") as fh:
|
|
743
|
+
fh.write(
|
|
744
|
+
self.model_dump_json(exclude={"solutions", "submission"}, indent=2)
|
|
745
|
+
)
|
|
746
|
+
for sol in self.solutions.values():
|
|
747
|
+
sol._save(base)
|
|
748
|
+
|
|
749
|
+
|
|
750
|
+
class Submission(BaseModel):
|
|
751
|
+
"""Top-level object representing an on-disk submission project.
|
|
752
|
+
|
|
753
|
+
A ``Submission`` manages a collection of :class:`Event` objects and handles
|
|
754
|
+
serialization to the project directory. Users typically obtain an instance
|
|
755
|
+
via :func:`load` and then interact with events and solutions before calling
|
|
756
|
+
:meth:`save` or :meth:`export`.
|
|
757
|
+
|
|
758
|
+
Attributes:
|
|
759
|
+
project_path: Root directory where submission files are stored.
|
|
760
|
+
team_name: Name of the participating team.
|
|
761
|
+
tier: Challenge tier for the submission (e.g., "basic", "advanced").
|
|
762
|
+
hardware_info: Optional dictionary describing the compute platform.
|
|
763
|
+
events: Mapping of event IDs to :class:`Event` instances.
|
|
764
|
+
repo_url: GitHub repository URL for the team codebase.
|
|
765
|
+
|
|
766
|
+
Example:
|
|
767
|
+
>>> from microlens_submit import load
|
|
768
|
+
>>>
|
|
769
|
+
>>> # Load or create a submission project
|
|
770
|
+
>>> submission = load("./my_project")
|
|
771
|
+
>>>
|
|
772
|
+
>>> # Set submission metadata
|
|
773
|
+
>>> submission.team_name = "Team Alpha"
|
|
774
|
+
>>> submission.tier = "advanced"
|
|
775
|
+
>>> submission.repo_url = "https://github.com/team/microlens-submit"
|
|
776
|
+
>>>
|
|
777
|
+
>>> # Add events and solutions
|
|
778
|
+
>>> event1 = submission.get_event("EVENT001")
|
|
779
|
+
>>> solution1 = event1.add_solution("1S1L", {"t0": 2459123.5, "u0": 0.1, "tE": 20.0})
|
|
780
|
+
>>>
|
|
781
|
+
>>> event2 = submission.get_event("EVENT002")
|
|
782
|
+
>>> solution2 = event2.add_solution("1S2L", {"t0": 2459156.2, "u0": 0.08, "tE": 35.7, "s": 0.95, "q": 0.0005, "alpha": 78.3})
|
|
783
|
+
>>>
|
|
784
|
+
>>> # Validate the submission
|
|
785
|
+
>>> warnings = submission.validate()
|
|
786
|
+
>>> if warnings:
|
|
787
|
+
... print("Validation warnings:")
|
|
788
|
+
... for warning in warnings:
|
|
789
|
+
... print(f" - {warning}")
|
|
790
|
+
... else:
|
|
791
|
+
... print("✅ Submission is valid!")
|
|
792
|
+
>>>
|
|
793
|
+
>>> # Save the submission
|
|
794
|
+
>>> submission.save()
|
|
795
|
+
>>>
|
|
796
|
+
>>> # Export for submission
|
|
797
|
+
>>> submission.export("submission.zip")
|
|
798
|
+
|
|
799
|
+
Note:
|
|
800
|
+
The submission project structure is automatically created when you
|
|
801
|
+
first call load() with a new directory. All data is stored in JSON
|
|
802
|
+
format with a clear directory structure for events and solutions.
|
|
803
|
+
"""
|
|
804
|
+
|
|
805
|
+
project_path: str = Field(default="", exclude=True)
|
|
806
|
+
team_name: str = ""
|
|
807
|
+
tier: str = ""
|
|
808
|
+
hardware_info: Optional[dict] = None
|
|
809
|
+
events: Dict[str, Event] = Field(default_factory=dict)
|
|
810
|
+
repo_url: Optional[str] = None
|
|
811
|
+
|
|
812
|
+
def validate(self) -> list[str]:
|
|
813
|
+
"""Check the submission for missing or incomplete information.
|
|
814
|
+
|
|
815
|
+
The method performs lightweight validation and returns a list of
|
|
816
|
+
warnings describing potential issues. It does not raise exceptions and
|
|
817
|
+
can be used to provide user feedback prior to exporting.
|
|
818
|
+
|
|
819
|
+
Returns:
|
|
820
|
+
list[str]: Human-readable warning messages. Empty list indicates
|
|
821
|
+
no issues found.
|
|
822
|
+
|
|
823
|
+
Example:
|
|
824
|
+
>>> submission = load("./my_project")
|
|
825
|
+
>>>
|
|
826
|
+
>>> # Validate the submission
|
|
827
|
+
>>> warnings = submission.validate()
|
|
828
|
+
>>> if warnings:
|
|
829
|
+
... print("Validation warnings:")
|
|
830
|
+
... for warning in warnings:
|
|
831
|
+
... print(f" - {warning}")
|
|
832
|
+
... else:
|
|
833
|
+
... print("✅ Submission is valid!")
|
|
834
|
+
|
|
835
|
+
Note:
|
|
836
|
+
This method checks for common issues like missing repo_url,
|
|
837
|
+
inactive events, incomplete solution data, and validation
|
|
838
|
+
problems in individual solutions. Always validate before
|
|
839
|
+
exporting your submission.
|
|
840
|
+
"""
|
|
841
|
+
|
|
842
|
+
warnings: list[str] = []
|
|
843
|
+
if not self.hardware_info:
|
|
844
|
+
warnings.append("Hardware info is missing")
|
|
845
|
+
|
|
846
|
+
# Check for missing or invalid repo_url
|
|
847
|
+
if not self.repo_url or not isinstance(self.repo_url, str) or not self.repo_url.strip():
|
|
848
|
+
warnings.append("repo_url (GitHub repository URL) is missing from submission.json")
|
|
849
|
+
elif not ("github.com" in self.repo_url):
|
|
850
|
+
warnings.append(f"repo_url does not appear to be a valid GitHub URL: {self.repo_url}")
|
|
851
|
+
|
|
852
|
+
for event in self.events.values():
|
|
853
|
+
active = [sol for sol in event.solutions.values() if sol.is_active]
|
|
854
|
+
if not active:
|
|
855
|
+
warnings.append(f"Event {event.event_id} has no active solutions")
|
|
856
|
+
else:
|
|
857
|
+
# Check relative probabilities for active solutions
|
|
858
|
+
if len(active) > 1:
|
|
859
|
+
# Multiple active solutions - check if probabilities sum to 1.0
|
|
860
|
+
total_prob = sum(sol.relative_probability or 0.0 for sol in active)
|
|
861
|
+
|
|
862
|
+
if total_prob > 0.0 and abs(total_prob - 1.0) > 1e-6: # Allow small floating point errors
|
|
863
|
+
warnings.append(
|
|
864
|
+
f"Event {event.event_id}: Relative probabilities for active solutions sum to {total_prob:.3f}, "
|
|
865
|
+
f"should sum to 1.0. Solutions: {[sol.solution_id[:8] + '...' for sol in active]}"
|
|
866
|
+
)
|
|
867
|
+
elif len(active) == 1:
|
|
868
|
+
# Single active solution - probability should be 1.0 or None
|
|
869
|
+
sol = active[0]
|
|
870
|
+
if sol.relative_probability is not None and abs(sol.relative_probability - 1.0) > 1e-6:
|
|
871
|
+
warnings.append(
|
|
872
|
+
f"Event {event.event_id}: Single active solution has relative_probability {sol.relative_probability:.3f}, "
|
|
873
|
+
f"should be 1.0 or None"
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
for sol in active:
|
|
877
|
+
# Use the new centralized validation
|
|
878
|
+
solution_messages = sol.validate()
|
|
879
|
+
for msg in solution_messages:
|
|
880
|
+
warnings.append(f"Solution {sol.solution_id} in event {event.event_id}: {msg}")
|
|
881
|
+
|
|
882
|
+
# Additional checks for missing metadata
|
|
883
|
+
if sol.log_likelihood is None:
|
|
884
|
+
warnings.append(
|
|
885
|
+
f"Solution {sol.solution_id} in event {event.event_id} is missing log_likelihood"
|
|
886
|
+
)
|
|
887
|
+
if sol.lightcurve_plot_path is None:
|
|
888
|
+
warnings.append(
|
|
889
|
+
f"Solution {sol.solution_id} in event {event.event_id} is missing lightcurve_plot_path"
|
|
890
|
+
)
|
|
891
|
+
if sol.lens_plane_plot_path is None:
|
|
892
|
+
warnings.append(
|
|
893
|
+
f"Solution {sol.solution_id} in event {event.event_id} is missing lens_plane_plot_path"
|
|
894
|
+
)
|
|
895
|
+
# Check for missing compute info
|
|
896
|
+
compute_info = sol.compute_info or {}
|
|
897
|
+
if "cpu_hours" not in compute_info:
|
|
898
|
+
warnings.append(
|
|
899
|
+
f"Solution {sol.solution_id} in event {event.event_id} is missing cpu_hours"
|
|
900
|
+
)
|
|
901
|
+
if "wall_time_hours" not in compute_info:
|
|
902
|
+
warnings.append(
|
|
903
|
+
f"Solution {sol.solution_id} in event {event.event_id} is missing wall_time_hours"
|
|
904
|
+
)
|
|
905
|
+
|
|
906
|
+
return warnings
|
|
907
|
+
|
|
908
|
+
def get_event(self, event_id: str) -> Event:
|
|
909
|
+
"""Return the event with ``event_id``.
|
|
910
|
+
|
|
911
|
+
If the event does not yet exist in the submission it will be created
|
|
912
|
+
automatically and attached to the submission.
|
|
913
|
+
|
|
914
|
+
Args:
|
|
915
|
+
event_id: Identifier of the event.
|
|
916
|
+
|
|
917
|
+
Returns:
|
|
918
|
+
Event: The corresponding event object.
|
|
919
|
+
|
|
920
|
+
Example:
|
|
921
|
+
>>> submission = load("./my_project")
|
|
922
|
+
>>>
|
|
923
|
+
>>> # Get an existing event or create a new one
|
|
924
|
+
>>> event = submission.get_event("EVENT001")
|
|
925
|
+
>>>
|
|
926
|
+
>>> # The event is automatically added to the submission
|
|
927
|
+
>>> print(f"Submission has {len(submission.events)} events")
|
|
928
|
+
>>> print(f"Event {event.event_id} has {len(event.solutions)} solutions")
|
|
929
|
+
|
|
930
|
+
Note:
|
|
931
|
+
Events are created on-demand when you first access them. This
|
|
932
|
+
allows you to work with events without explicitly creating them
|
|
933
|
+
first. The event is automatically saved when you call
|
|
934
|
+
submission.save().
|
|
935
|
+
"""
|
|
936
|
+
if event_id not in self.events:
|
|
937
|
+
self.events[event_id] = Event(event_id=event_id, submission=self)
|
|
938
|
+
return self.events[event_id]
|
|
939
|
+
|
|
940
|
+
def autofill_nexus_info(self) -> None:
|
|
941
|
+
"""Populate :attr:`hardware_info` with Roman Nexus platform details.
|
|
942
|
+
|
|
943
|
+
This helper reads a few well-known files from the Roman Science
|
|
944
|
+
Platform environment to infer CPU model, available memory and the image
|
|
945
|
+
identifier. Missing information is silently ignored.
|
|
946
|
+
|
|
947
|
+
Example:
|
|
948
|
+
>>> submission = load("./my_project")
|
|
949
|
+
>>>
|
|
950
|
+
>>> # Auto-detect Nexus platform information
|
|
951
|
+
>>> submission.autofill_nexus_info()
|
|
952
|
+
>>>
|
|
953
|
+
>>> # Check what was detected
|
|
954
|
+
>>> if submission.hardware_info:
|
|
955
|
+
... print("Hardware info:", submission.hardware_info)
|
|
956
|
+
... else:
|
|
957
|
+
... print("No hardware info detected")
|
|
958
|
+
|
|
959
|
+
Note:
|
|
960
|
+
This method is designed for the Roman Science Platform environment.
|
|
961
|
+
It reads from /proc/cpuinfo, /proc/meminfo, and JUPYTER_IMAGE_SPEC
|
|
962
|
+
environment variable. If these are not available (e.g., on a
|
|
963
|
+
different platform), the method will silently skip them.
|
|
964
|
+
"""
|
|
965
|
+
|
|
966
|
+
if self.hardware_info is None:
|
|
967
|
+
self.hardware_info = {}
|
|
968
|
+
|
|
969
|
+
try:
|
|
970
|
+
image = os.environ.get("JUPYTER_IMAGE_SPEC")
|
|
971
|
+
if image:
|
|
972
|
+
self.hardware_info["nexus_image"] = image
|
|
973
|
+
except Exception as exc: # pragma: no cover - environment may not exist
|
|
974
|
+
logging.debug("Failed to read JUPYTER_IMAGE_SPEC: %s", exc)
|
|
975
|
+
|
|
976
|
+
try:
|
|
977
|
+
with open("/proc/cpuinfo", "r", encoding="utf-8") as fh:
|
|
978
|
+
for line in fh:
|
|
979
|
+
if line.lower().startswith("model name"):
|
|
980
|
+
self.hardware_info["cpu_details"] = line.split(":", 1)[
|
|
981
|
+
1
|
|
982
|
+
].strip()
|
|
983
|
+
break
|
|
984
|
+
except OSError as exc: # pragma: no cover
|
|
985
|
+
logging.debug("Failed to read /proc/cpuinfo: %s", exc)
|
|
986
|
+
|
|
987
|
+
try:
|
|
988
|
+
with open("/proc/meminfo", "r", encoding="utf-8") as fh:
|
|
989
|
+
for line in fh:
|
|
990
|
+
if line.startswith("MemTotal"):
|
|
991
|
+
mem_kb = int(line.split(":", 1)[1].strip().split()[0])
|
|
992
|
+
self.hardware_info["memory_gb"] = round(mem_kb / 1024**2, 2)
|
|
993
|
+
break
|
|
994
|
+
except OSError as exc: # pragma: no cover
|
|
995
|
+
logging.debug("Failed to read /proc/meminfo: %s", exc)
|
|
996
|
+
|
|
997
|
+
def save(self) -> None:
|
|
998
|
+
"""Persist the current state of the submission to ``project_path``.
|
|
999
|
+
|
|
1000
|
+
This method writes all submission data to disk, including events,
|
|
1001
|
+
solutions, and metadata. It also handles moving temporary notes
|
|
1002
|
+
files to their canonical locations.
|
|
1003
|
+
|
|
1004
|
+
Example:
|
|
1005
|
+
>>> submission = load("./my_project")
|
|
1006
|
+
>>>
|
|
1007
|
+
>>> # Make changes to the submission
|
|
1008
|
+
>>> submission.team_name = "Team Alpha"
|
|
1009
|
+
>>> event = submission.get_event("EVENT001")
|
|
1010
|
+
>>> solution = event.add_solution("1S1L", {"t0": 2459123.5, "u0": 0.1, "tE": 20.0})
|
|
1011
|
+
>>>
|
|
1012
|
+
>>> # Save all changes to disk
|
|
1013
|
+
>>> submission.save()
|
|
1014
|
+
>>>
|
|
1015
|
+
>>> # All data is now persisted in the project directory
|
|
1016
|
+
|
|
1017
|
+
Note:
|
|
1018
|
+
This method creates the project directory structure if it doesn't
|
|
1019
|
+
exist and moves any temporary notes files from tmp/ to their
|
|
1020
|
+
canonical locations in events/{event_id}/solutions/{solution_id}.md.
|
|
1021
|
+
Always call save() after making changes to persist them.
|
|
1022
|
+
"""
|
|
1023
|
+
project = Path(self.project_path)
|
|
1024
|
+
events_dir = project / "events"
|
|
1025
|
+
events_dir.mkdir(parents=True, exist_ok=True)
|
|
1026
|
+
# Move any notes files from tmp/ to canonical location
|
|
1027
|
+
for event in self.events.values():
|
|
1028
|
+
for sol in event.solutions.values():
|
|
1029
|
+
if sol.notes_path:
|
|
1030
|
+
notes_path = Path(sol.notes_path)
|
|
1031
|
+
if notes_path.parts and notes_path.parts[0] == "tmp":
|
|
1032
|
+
# Move to canonical location
|
|
1033
|
+
canonical = Path("events") / event.event_id / "solutions" / f"{sol.solution_id}.md"
|
|
1034
|
+
src = project / notes_path
|
|
1035
|
+
dst = project / canonical
|
|
1036
|
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
1037
|
+
if src.exists():
|
|
1038
|
+
src.replace(dst)
|
|
1039
|
+
sol.notes_path = str(canonical)
|
|
1040
|
+
with (project / "submission.json").open("w", encoding="utf-8") as fh:
|
|
1041
|
+
fh.write(self.model_dump_json(exclude={"events", "project_path"}, indent=2))
|
|
1042
|
+
for event in self.events.values():
|
|
1043
|
+
event.submission = self
|
|
1044
|
+
event._save()
|
|
1045
|
+
|
|
1046
|
+
def export(self, output_path: str) -> None:
|
|
1047
|
+
"""Create a zip archive of all active solutions.
|
|
1048
|
+
|
|
1049
|
+
The archive is created using ``zipfile.ZIP_DEFLATED`` compression to
|
|
1050
|
+
minimize file size. Only active solutions are included in the export.
|
|
1051
|
+
|
|
1052
|
+
Args:
|
|
1053
|
+
output_path: Destination path for the zip archive.
|
|
1054
|
+
|
|
1055
|
+
Raises:
|
|
1056
|
+
ValueError: If referenced files (plots, posterior data) don't exist.
|
|
1057
|
+
OSError: If unable to create the zip file.
|
|
1058
|
+
|
|
1059
|
+
Example:
|
|
1060
|
+
>>> submission = load("./my_project")
|
|
1061
|
+
>>>
|
|
1062
|
+
>>> # Validate before export
|
|
1063
|
+
>>> warnings = submission.validate()
|
|
1064
|
+
>>> if warnings:
|
|
1065
|
+
... print("Fix validation issues before export:", warnings)
|
|
1066
|
+
... else:
|
|
1067
|
+
... # Export the submission
|
|
1068
|
+
... submission.export("my_submission.zip")
|
|
1069
|
+
... print("Submission exported to my_submission.zip")
|
|
1070
|
+
|
|
1071
|
+
Note:
|
|
1072
|
+
The export includes:
|
|
1073
|
+
- submission.json with metadata
|
|
1074
|
+
- All active solutions with parameters
|
|
1075
|
+
- Notes files for each solution
|
|
1076
|
+
- Referenced files (plots, posterior data)
|
|
1077
|
+
|
|
1078
|
+
Relative probabilities are automatically calculated for solutions
|
|
1079
|
+
that don't have them set, using BIC if sufficient data is available.
|
|
1080
|
+
Only active solutions are included in the export.
|
|
1081
|
+
"""
|
|
1082
|
+
project = Path(self.project_path)
|
|
1083
|
+
with zipfile.ZipFile(output_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
|
1084
|
+
submission_json = project / "submission.json"
|
|
1085
|
+
if submission_json.exists():
|
|
1086
|
+
zf.write(submission_json, arcname="submission.json")
|
|
1087
|
+
events_dir = project / "events"
|
|
1088
|
+
for event in self.events.values():
|
|
1089
|
+
event_dir = events_dir / event.event_id
|
|
1090
|
+
event_json = event_dir / "event.json"
|
|
1091
|
+
if event_json.exists():
|
|
1092
|
+
zf.write(event_json, arcname=f"events/{event.event_id}/event.json")
|
|
1093
|
+
active_sols = [s for s in event.solutions.values() if s.is_active]
|
|
1094
|
+
|
|
1095
|
+
# Determine relative probabilities for this event
|
|
1096
|
+
rel_prob_map: dict[str, float] = {}
|
|
1097
|
+
if active_sols:
|
|
1098
|
+
provided_sum = sum(
|
|
1099
|
+
s.relative_probability or 0.0
|
|
1100
|
+
for s in active_sols
|
|
1101
|
+
if s.relative_probability is not None
|
|
1102
|
+
)
|
|
1103
|
+
need_calc = [
|
|
1104
|
+
s for s in active_sols if s.relative_probability is None
|
|
1105
|
+
]
|
|
1106
|
+
if need_calc:
|
|
1107
|
+
can_calc = True
|
|
1108
|
+
for s in need_calc:
|
|
1109
|
+
if (
|
|
1110
|
+
s.log_likelihood is None
|
|
1111
|
+
or s.n_data_points is None
|
|
1112
|
+
or s.n_data_points <= 0
|
|
1113
|
+
or len(s.parameters) == 0
|
|
1114
|
+
):
|
|
1115
|
+
can_calc = False
|
|
1116
|
+
break
|
|
1117
|
+
remaining = max(1.0 - provided_sum, 0.0)
|
|
1118
|
+
if can_calc:
|
|
1119
|
+
bic_vals = {
|
|
1120
|
+
s.solution_id: len(s.parameters)
|
|
1121
|
+
* math.log(s.n_data_points)
|
|
1122
|
+
- 2 * s.log_likelihood
|
|
1123
|
+
for s in need_calc
|
|
1124
|
+
}
|
|
1125
|
+
bic_min = min(bic_vals.values())
|
|
1126
|
+
weights = {
|
|
1127
|
+
sid: math.exp(-0.5 * (bic - bic_min))
|
|
1128
|
+
for sid, bic in bic_vals.items()
|
|
1129
|
+
}
|
|
1130
|
+
wsum = sum(weights.values())
|
|
1131
|
+
for sid, w in weights.items():
|
|
1132
|
+
rel_prob_map[sid] = (
|
|
1133
|
+
remaining * w / wsum
|
|
1134
|
+
if wsum > 0
|
|
1135
|
+
else remaining / len(weights)
|
|
1136
|
+
)
|
|
1137
|
+
logging.warning(
|
|
1138
|
+
"relative_probability calculated for event %s using BIC",
|
|
1139
|
+
event.event_id,
|
|
1140
|
+
)
|
|
1141
|
+
else:
|
|
1142
|
+
eq = remaining / len(need_calc) if need_calc else 0.0
|
|
1143
|
+
for s in need_calc:
|
|
1144
|
+
rel_prob_map[s.solution_id] = eq
|
|
1145
|
+
logging.warning(
|
|
1146
|
+
"relative_probability set equally for event %s due to missing data",
|
|
1147
|
+
event.event_id,
|
|
1148
|
+
)
|
|
1149
|
+
|
|
1150
|
+
for sol in active_sols:
|
|
1151
|
+
sol_path = event_dir / "solutions" / f"{sol.solution_id}.json"
|
|
1152
|
+
if sol_path.exists():
|
|
1153
|
+
arc = (
|
|
1154
|
+
f"events/{event.event_id}/solutions/{sol.solution_id}.json"
|
|
1155
|
+
)
|
|
1156
|
+
export_sol = sol.model_copy()
|
|
1157
|
+
for attr in [
|
|
1158
|
+
"posterior_path",
|
|
1159
|
+
"lightcurve_plot_path",
|
|
1160
|
+
"lens_plane_plot_path",
|
|
1161
|
+
]:
|
|
1162
|
+
path = getattr(sol, attr)
|
|
1163
|
+
if path is not None:
|
|
1164
|
+
filename = Path(path).name
|
|
1165
|
+
new_path = f"events/{event.event_id}/solutions/{sol.solution_id}/{filename}"
|
|
1166
|
+
setattr(export_sol, attr, new_path)
|
|
1167
|
+
if sol.notes_path:
|
|
1168
|
+
notes_file = Path(self.project_path) / sol.notes_path
|
|
1169
|
+
if notes_file.exists():
|
|
1170
|
+
notes_filename = notes_file.name
|
|
1171
|
+
notes_arc = f"events/{event.event_id}/solutions/{sol.solution_id}/{notes_filename}"
|
|
1172
|
+
export_sol.notes_path = notes_arc
|
|
1173
|
+
zf.write(notes_file, arcname=notes_arc)
|
|
1174
|
+
if export_sol.relative_probability is None:
|
|
1175
|
+
export_sol.relative_probability = rel_prob_map.get(
|
|
1176
|
+
sol.solution_id
|
|
1177
|
+
)
|
|
1178
|
+
zf.writestr(arc, export_sol.model_dump_json(indent=2))
|
|
1179
|
+
# Include any referenced external files
|
|
1180
|
+
sol_dir_arc = f"events/{event.event_id}/solutions/{sol.solution_id}"
|
|
1181
|
+
for attr in [
|
|
1182
|
+
"posterior_path",
|
|
1183
|
+
"lightcurve_plot_path",
|
|
1184
|
+
"lens_plane_plot_path",
|
|
1185
|
+
]:
|
|
1186
|
+
path = getattr(sol, attr)
|
|
1187
|
+
if path is not None:
|
|
1188
|
+
file_path = Path(self.project_path) / path
|
|
1189
|
+
if not file_path.exists():
|
|
1190
|
+
raise ValueError(
|
|
1191
|
+
f"Error: File specified by {attr} in solution {sol.solution_id} does not exist: {file_path}"
|
|
1192
|
+
)
|
|
1193
|
+
zf.write(
|
|
1194
|
+
file_path,
|
|
1195
|
+
arcname=f"{sol_dir_arc}/{Path(path).name}",
|
|
1196
|
+
)
|
|
1197
|
+
|
|
1198
|
+
|
|
1199
|
+
def load(project_path: str) -> Submission:
|
|
1200
|
+
"""Load an existing submission or create a new one.
|
|
1201
|
+
|
|
1202
|
+
The directory specified by ``project_path`` becomes the working
|
|
1203
|
+
directory for all subsequent operations. If the directory does not
|
|
1204
|
+
exist, a new project structure is created automatically.
|
|
1205
|
+
|
|
1206
|
+
Args:
|
|
1207
|
+
project_path: Location of the submission project on disk.
|
|
1208
|
+
|
|
1209
|
+
Returns:
|
|
1210
|
+
Submission: The loaded or newly created submission instance.
|
|
1211
|
+
|
|
1212
|
+
Raises:
|
|
1213
|
+
OSError: If unable to create the project directory or read files.
|
|
1214
|
+
ValueError: If existing submission.json is invalid.
|
|
1215
|
+
|
|
1216
|
+
Example:
|
|
1217
|
+
>>> from microlens_submit import load
|
|
1218
|
+
>>>
|
|
1219
|
+
>>> # Load existing project
|
|
1220
|
+
>>> submission = load("./existing_project")
|
|
1221
|
+
>>> print(f"Team: {submission.team_name}")
|
|
1222
|
+
>>> print(f"Events: {len(submission.events)}")
|
|
1223
|
+
>>>
|
|
1224
|
+
>>> # Create new project
|
|
1225
|
+
>>> submission = load("./new_project")
|
|
1226
|
+
>>> submission.team_name = "Team Beta"
|
|
1227
|
+
>>> submission.tier = "basic"
|
|
1228
|
+
>>> submission.save()
|
|
1229
|
+
>>>
|
|
1230
|
+
>>> # The project structure is automatically created:
|
|
1231
|
+
>>> # ./new_project/
|
|
1232
|
+
>>> # ├── submission.json
|
|
1233
|
+
>>> # └── events/
|
|
1234
|
+
>>> # └── (event directories created as needed)
|
|
1235
|
+
|
|
1236
|
+
Note:
|
|
1237
|
+
This is the main entry point for working with submission projects.
|
|
1238
|
+
The function automatically creates the project directory structure
|
|
1239
|
+
if it doesn't exist, making it safe to use with new projects.
|
|
1240
|
+
All subsequent operations (adding events, solutions, etc.) work
|
|
1241
|
+
with the returned Submission instance.
|
|
1242
|
+
"""
|
|
1243
|
+
project = Path(project_path)
|
|
1244
|
+
events_dir = project / "events"
|
|
1245
|
+
|
|
1246
|
+
if not project.exists():
|
|
1247
|
+
events_dir.mkdir(parents=True, exist_ok=True)
|
|
1248
|
+
submission = Submission(project_path=str(project))
|
|
1249
|
+
with (project / "submission.json").open("w", encoding="utf-8") as fh:
|
|
1250
|
+
fh.write(
|
|
1251
|
+
submission.model_dump_json(exclude={"events", "project_path"}, indent=2)
|
|
1252
|
+
)
|
|
1253
|
+
return submission
|
|
1254
|
+
|
|
1255
|
+
sub_json = project / "submission.json"
|
|
1256
|
+
if sub_json.exists():
|
|
1257
|
+
with sub_json.open("r", encoding="utf-8") as fh:
|
|
1258
|
+
submission = Submission.model_validate_json(fh.read())
|
|
1259
|
+
submission.project_path = str(project)
|
|
1260
|
+
else:
|
|
1261
|
+
submission = Submission(project_path=str(project))
|
|
1262
|
+
|
|
1263
|
+
if events_dir.exists():
|
|
1264
|
+
for event_dir in events_dir.iterdir():
|
|
1265
|
+
if event_dir.is_dir():
|
|
1266
|
+
event = Event._from_dir(event_dir, submission)
|
|
1267
|
+
submission.events[event.event_id] = event
|
|
1268
|
+
|
|
1269
|
+
return submission
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
# Resolve forward references
|
|
1273
|
+
Event.model_rebuild()
|
|
1274
|
+
Submission.model_rebuild()
|