microlens-submit 0.12.2__py3-none-any.whl → 0.16.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microlens_submit/__init__.py +7 -157
- microlens_submit/cli/__init__.py +5 -0
- microlens_submit/cli/__main__.py +6 -0
- microlens_submit/cli/commands/__init__.py +1 -0
- microlens_submit/cli/commands/dossier.py +139 -0
- microlens_submit/cli/commands/export.py +177 -0
- microlens_submit/cli/commands/init.py +172 -0
- microlens_submit/cli/commands/solutions.py +722 -0
- microlens_submit/cli/commands/validation.py +241 -0
- microlens_submit/cli/main.py +120 -0
- microlens_submit/dossier/__init__.py +51 -0
- microlens_submit/dossier/dashboard.py +503 -0
- microlens_submit/dossier/event_page.py +370 -0
- microlens_submit/dossier/full_report.py +330 -0
- microlens_submit/dossier/solution_page.py +534 -0
- microlens_submit/dossier/utils.py +111 -0
- microlens_submit/error_messages.py +283 -0
- microlens_submit/models/__init__.py +28 -0
- microlens_submit/models/event.py +406 -0
- microlens_submit/models/solution.py +569 -0
- microlens_submit/models/submission.py +569 -0
- microlens_submit/tier_validation.py +208 -0
- microlens_submit/utils.py +373 -0
- microlens_submit/validate_parameters.py +478 -180
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.1.dist-info}/METADATA +52 -14
- microlens_submit-0.16.1.dist-info/RECORD +32 -0
- microlens_submit/api.py +0 -1257
- microlens_submit/cli.py +0 -1803
- microlens_submit/dossier.py +0 -1443
- microlens_submit-0.12.2.dist-info/RECORD +0 -13
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.1.dist-info}/WHEEL +0 -0
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.1.dist-info}/entry_points.txt +0 -0
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.1.dist-info}/licenses/LICENSE +0 -0
- {microlens_submit-0.12.2.dist-info → microlens_submit-0.16.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,569 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Submission model for microlens-submit.
|
|
3
|
+
|
|
4
|
+
This module contains the Submission class, which represents the top-level
|
|
5
|
+
container for a microlensing challenge submission project.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import math
|
|
11
|
+
import os
|
|
12
|
+
import zipfile
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Dict, List, Optional
|
|
15
|
+
|
|
16
|
+
from pydantic import BaseModel, Field
|
|
17
|
+
|
|
18
|
+
from .event import Event
|
|
19
|
+
from .solution import Solution
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Submission(BaseModel):
|
|
23
|
+
"""Top-level object representing an on-disk submission project.
|
|
24
|
+
|
|
25
|
+
A ``Submission`` manages a collection of :class:`Event` objects and handles
|
|
26
|
+
serialization to the project directory. Users typically obtain an instance
|
|
27
|
+
via :func:`load` and then interact with events and solutions before calling
|
|
28
|
+
:meth:`save` or :meth:`export`.
|
|
29
|
+
|
|
30
|
+
Attributes:
|
|
31
|
+
project_path: Root directory where submission files are stored.
|
|
32
|
+
team_name: Name of the participating team (required for validation).
|
|
33
|
+
tier: Challenge tier for the submission (e.g., "basic", "advanced") (required for validation).
|
|
34
|
+
hardware_info: Dictionary describing the compute platform (required for validation).
|
|
35
|
+
events: Mapping of event IDs to :class:`Event` instances.
|
|
36
|
+
repo_url: GitHub repository URL for the team codebase (required for validation).
|
|
37
|
+
|
|
38
|
+
Example:
|
|
39
|
+
>>> from microlens_submit import load
|
|
40
|
+
>>>
|
|
41
|
+
>>> # Load or create a submission project
|
|
42
|
+
>>> submission = load("./my_project")
|
|
43
|
+
>>>
|
|
44
|
+
>>> # Set submission metadata
|
|
45
|
+
>>> submission.team_name = "Team Alpha"
|
|
46
|
+
>>> submission.tier = "advanced"
|
|
47
|
+
>>> submission.repo_url = "https://github.com/team/microlens-submit"
|
|
48
|
+
>>>
|
|
49
|
+
>>> # Add events and solutions
|
|
50
|
+
>>> event1 = submission.get_event("EVENT001")
|
|
51
|
+
>>> solution1 = event1.add_solution("1S1L", {"t0": 2459123.5, "u0": 0.1, "tE": 20.0})
|
|
52
|
+
>>>
|
|
53
|
+
>>> event2 = submission.get_event("EVENT002")
|
|
54
|
+
>>> params2 = {"t0": 2459156.2, "u0": 0.08, "tE": 35.7, "s": 0.95, "q": 0.0005, "alpha": 78.3}
|
|
55
|
+
>>> solution2 = event2.add_solution("1S2L", params2)
|
|
56
|
+
>>>
|
|
57
|
+
>>> # Validate the submission
|
|
58
|
+
>>> warnings = submission.run_validation()
|
|
59
|
+
>>> if warnings:
|
|
60
|
+
... print("Validation warnings:")
|
|
61
|
+
... for warning in warnings:
|
|
62
|
+
... print(f" - {warning}")
|
|
63
|
+
... else:
|
|
64
|
+
... print("✅ Submission is valid!")
|
|
65
|
+
>>>
|
|
66
|
+
>>> # Save the submission
|
|
67
|
+
>>> submission.save()
|
|
68
|
+
>>>
|
|
69
|
+
>>> # Export for submission
|
|
70
|
+
>>> submission.export("submission.zip")
|
|
71
|
+
|
|
72
|
+
Note:
|
|
73
|
+
The submission project structure is automatically created when you
|
|
74
|
+
first call load() with a new directory. All data is stored in JSON
|
|
75
|
+
format with a clear directory structure for events and solutions.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
project_path: str = Field(default="", exclude=True)
|
|
79
|
+
team_name: str = ""
|
|
80
|
+
tier: str = ""
|
|
81
|
+
hardware_info: Optional[dict] = None
|
|
82
|
+
events: Dict[str, Event] = Field(default_factory=dict)
|
|
83
|
+
repo_url: Optional[str] = None
|
|
84
|
+
|
|
85
|
+
def run_validation_warnings(self) -> List[str]:
|
|
86
|
+
"""Validate the submission and return warnings only (non-blocking issues).
|
|
87
|
+
|
|
88
|
+
This method performs validation but only returns warnings for missing
|
|
89
|
+
optional fields. It does not fail for missing required fields like
|
|
90
|
+
repo_url or hardware_info.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
List[str]: Human-readable warning messages. Empty list indicates
|
|
94
|
+
no warnings.
|
|
95
|
+
"""
|
|
96
|
+
messages = []
|
|
97
|
+
|
|
98
|
+
# Check metadata completeness (warnings only)
|
|
99
|
+
if not self.team_name:
|
|
100
|
+
messages.append("team_name is required")
|
|
101
|
+
if not self.tier:
|
|
102
|
+
messages.append("tier is required")
|
|
103
|
+
if not self.repo_url:
|
|
104
|
+
messages.append("repo_url is required (GitHub repository URL)")
|
|
105
|
+
if not self.hardware_info:
|
|
106
|
+
messages.append("Hardware info is missing")
|
|
107
|
+
|
|
108
|
+
# Validate tier and event IDs
|
|
109
|
+
if self.tier:
|
|
110
|
+
try:
|
|
111
|
+
from ..tier_validation import get_available_tiers, get_event_validation_error, validate_event_id
|
|
112
|
+
|
|
113
|
+
# Check if tier is valid first
|
|
114
|
+
available_tiers = get_available_tiers()
|
|
115
|
+
if self.tier not in available_tiers:
|
|
116
|
+
messages.append(
|
|
117
|
+
f"Invalid tier '{self.tier}' changed to 'None'. Available tiers: {available_tiers}."
|
|
118
|
+
)
|
|
119
|
+
# Automatically change to None tier
|
|
120
|
+
self.tier = "None"
|
|
121
|
+
|
|
122
|
+
# Only validate events if tier is not "None"
|
|
123
|
+
if self.tier != "None":
|
|
124
|
+
for event_id in self.events.keys():
|
|
125
|
+
if not validate_event_id(event_id, self.tier):
|
|
126
|
+
error_msg = get_event_validation_error(event_id, self.tier)
|
|
127
|
+
if error_msg:
|
|
128
|
+
messages.append(error_msg)
|
|
129
|
+
except ImportError:
|
|
130
|
+
# Tier validation module not available, skip validation
|
|
131
|
+
pass
|
|
132
|
+
except ValueError as e:
|
|
133
|
+
# Invalid tier (fallback for other validation errors)
|
|
134
|
+
messages.append(f"Invalid tier '{self.tier}': {e}")
|
|
135
|
+
|
|
136
|
+
# Validate all events
|
|
137
|
+
for event_id, event in self.events.items():
|
|
138
|
+
event_messages = event.run_validation()
|
|
139
|
+
for msg in event_messages:
|
|
140
|
+
messages.append(f"Event {event_id}: {msg}")
|
|
141
|
+
|
|
142
|
+
# Check for duplicate aliases across events
|
|
143
|
+
alias_messages = self._validate_alias_uniqueness()
|
|
144
|
+
messages.extend(alias_messages)
|
|
145
|
+
|
|
146
|
+
return messages
|
|
147
|
+
|
|
148
|
+
def run_validation(self) -> List[str]:
|
|
149
|
+
"""Validate the entire submission for missing or incomplete information.
|
|
150
|
+
|
|
151
|
+
This method performs comprehensive validation of the submission structure,
|
|
152
|
+
including metadata completeness, event configuration, and solution validation.
|
|
153
|
+
It returns a list of human-readable validation messages.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
List[str]: Human-readable validation messages. Empty list indicates
|
|
157
|
+
all validations passed.
|
|
158
|
+
|
|
159
|
+
Example:
|
|
160
|
+
>>> submission = load("./my_project")
|
|
161
|
+
>>> messages = submission.run_validation()
|
|
162
|
+
>>> if messages:
|
|
163
|
+
... print("Validation issues found:")
|
|
164
|
+
... for msg in messages:
|
|
165
|
+
... print(f" - {msg}")
|
|
166
|
+
... else:
|
|
167
|
+
... print("Submission is valid!")
|
|
168
|
+
|
|
169
|
+
Note:
|
|
170
|
+
This method calls run_validation() on all events and solutions,
|
|
171
|
+
providing a comprehensive validation report for the entire submission.
|
|
172
|
+
"""
|
|
173
|
+
messages = []
|
|
174
|
+
|
|
175
|
+
# Check metadata completeness (strict validation for save/export)
|
|
176
|
+
if not self.team_name:
|
|
177
|
+
messages.append("team_name is required")
|
|
178
|
+
if not self.tier:
|
|
179
|
+
messages.append("tier is required")
|
|
180
|
+
if not self.repo_url:
|
|
181
|
+
messages.append("repo_url is required (GitHub repository URL)")
|
|
182
|
+
|
|
183
|
+
# Check hardware info
|
|
184
|
+
if not self.hardware_info:
|
|
185
|
+
messages.append("Hardware info is missing")
|
|
186
|
+
|
|
187
|
+
# Validate tier and event IDs
|
|
188
|
+
if self.tier:
|
|
189
|
+
try:
|
|
190
|
+
from ..tier_validation import get_available_tiers, get_event_validation_error, validate_event_id
|
|
191
|
+
|
|
192
|
+
# Check if tier is valid first
|
|
193
|
+
available_tiers = get_available_tiers()
|
|
194
|
+
if self.tier not in available_tiers:
|
|
195
|
+
messages.append(
|
|
196
|
+
f"Invalid tier '{self.tier}' changed to 'None'. Available tiers: {available_tiers}."
|
|
197
|
+
)
|
|
198
|
+
# Automatically change to None tier
|
|
199
|
+
self.tier = "None"
|
|
200
|
+
|
|
201
|
+
# Only validate events if tier is not "None"
|
|
202
|
+
if self.tier != "None":
|
|
203
|
+
for event_id in self.events.keys():
|
|
204
|
+
if not validate_event_id(event_id, self.tier):
|
|
205
|
+
error_msg = get_event_validation_error(event_id, self.tier)
|
|
206
|
+
if error_msg:
|
|
207
|
+
messages.append(error_msg)
|
|
208
|
+
except ImportError:
|
|
209
|
+
# Tier validation module not available, skip validation
|
|
210
|
+
pass
|
|
211
|
+
except ValueError as e:
|
|
212
|
+
# Invalid tier (fallback for other validation errors)
|
|
213
|
+
messages.append(f"Invalid tier '{self.tier}': {e}")
|
|
214
|
+
|
|
215
|
+
# Validate all events
|
|
216
|
+
for event_id, event in self.events.items():
|
|
217
|
+
event_messages = event.run_validation()
|
|
218
|
+
for msg in event_messages:
|
|
219
|
+
messages.append(f"Event {event_id}: {msg}")
|
|
220
|
+
|
|
221
|
+
# Check for duplicate aliases across events
|
|
222
|
+
alias_messages = self._validate_alias_uniqueness()
|
|
223
|
+
messages.extend(alias_messages)
|
|
224
|
+
|
|
225
|
+
return messages
|
|
226
|
+
|
|
227
|
+
def get_event(self, event_id: str) -> Event:
|
|
228
|
+
if event_id not in self.events:
|
|
229
|
+
self.events[event_id] = Event(event_id=event_id, submission=self)
|
|
230
|
+
return self.events[event_id]
|
|
231
|
+
|
|
232
|
+
def autofill_nexus_info(self) -> None:
|
|
233
|
+
if self.hardware_info is None:
|
|
234
|
+
self.hardware_info = {}
|
|
235
|
+
try:
|
|
236
|
+
image = os.environ.get("JUPYTER_IMAGE_SPEC")
|
|
237
|
+
if image:
|
|
238
|
+
self.hardware_info["nexus_image"] = image
|
|
239
|
+
except Exception as exc:
|
|
240
|
+
logging.debug("Failed to read JUPYTER_IMAGE_SPEC: %s", exc)
|
|
241
|
+
try:
|
|
242
|
+
with open("/proc/cpuinfo", "r", encoding="utf-8") as fh:
|
|
243
|
+
for line in fh:
|
|
244
|
+
if line.lower().startswith("model name"):
|
|
245
|
+
self.hardware_info["cpu_details"] = line.split(":", 1)[1].strip()
|
|
246
|
+
break
|
|
247
|
+
except OSError as exc:
|
|
248
|
+
logging.debug("Failed to read /proc/cpuinfo: %s", exc)
|
|
249
|
+
try:
|
|
250
|
+
with open("/proc/meminfo", "r", encoding="utf-8") as fh:
|
|
251
|
+
for line in fh:
|
|
252
|
+
if line.startswith("MemTotal"):
|
|
253
|
+
mem_kb = int(line.split(":", 1)[1].strip().split()[0])
|
|
254
|
+
self.hardware_info["memory_gb"] = round(mem_kb / 1024**2, 2)
|
|
255
|
+
break
|
|
256
|
+
except OSError as exc:
|
|
257
|
+
logging.debug("Failed to read /proc/meminfo: %s", exc)
|
|
258
|
+
|
|
259
|
+
def _get_alias_lookup_path(self) -> Path:
|
|
260
|
+
return Path(self.project_path) / "aliases.json"
|
|
261
|
+
|
|
262
|
+
def _load_alias_lookup(self) -> Dict[str, str]:
|
|
263
|
+
alias_path = self._get_alias_lookup_path()
|
|
264
|
+
if alias_path.exists():
|
|
265
|
+
try:
|
|
266
|
+
with alias_path.open("r", encoding="utf-8") as fh:
|
|
267
|
+
return json.load(fh)
|
|
268
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
269
|
+
logging.warning("Failed to load alias lookup table: %s", e)
|
|
270
|
+
return {}
|
|
271
|
+
return {}
|
|
272
|
+
|
|
273
|
+
def _save_alias_lookup(self, alias_lookup: Dict[str, str]) -> None:
|
|
274
|
+
alias_path = self._get_alias_lookup_path()
|
|
275
|
+
try:
|
|
276
|
+
with alias_path.open("w", encoding="utf-8") as fh:
|
|
277
|
+
json.dump(alias_lookup, fh, indent=2, sort_keys=True)
|
|
278
|
+
except OSError as e:
|
|
279
|
+
logging.error("Failed to save alias lookup table: %s", e)
|
|
280
|
+
raise
|
|
281
|
+
|
|
282
|
+
def _build_alias_lookup(self) -> Dict[str, str]:
|
|
283
|
+
alias_lookup = {}
|
|
284
|
+
for event_id, event in self.events.items():
|
|
285
|
+
for solution in event.solutions.values():
|
|
286
|
+
if solution.alias:
|
|
287
|
+
alias_key = f"{event_id} {solution.alias}"
|
|
288
|
+
alias_lookup[alias_key] = solution.solution_id
|
|
289
|
+
return alias_lookup
|
|
290
|
+
|
|
291
|
+
def _validate_alias_uniqueness(self) -> List[str]:
|
|
292
|
+
errors = []
|
|
293
|
+
for event_id, event in self.events.items():
|
|
294
|
+
seen_aliases = set()
|
|
295
|
+
for solution in event.solutions.values():
|
|
296
|
+
if solution.alias:
|
|
297
|
+
if solution.alias in seen_aliases:
|
|
298
|
+
errors.append(
|
|
299
|
+
f"Duplicate alias '{solution.alias}' found in event '{event_id}'. "
|
|
300
|
+
f"Alias must be unique within each event."
|
|
301
|
+
)
|
|
302
|
+
seen_aliases.add(solution.alias)
|
|
303
|
+
return errors
|
|
304
|
+
|
|
305
|
+
def get_solution_by_alias(self, event_id: str, alias: str) -> Optional[Solution]:
|
|
306
|
+
if event_id not in self.events:
|
|
307
|
+
return None
|
|
308
|
+
event = self.events[event_id]
|
|
309
|
+
for solution in event.solutions.values():
|
|
310
|
+
if solution.alias == alias:
|
|
311
|
+
return solution
|
|
312
|
+
return None
|
|
313
|
+
|
|
314
|
+
def get_solution_status(self) -> dict:
|
|
315
|
+
status = {
|
|
316
|
+
"saved": 0,
|
|
317
|
+
"unsaved": 0,
|
|
318
|
+
"total": 0,
|
|
319
|
+
"events": {},
|
|
320
|
+
"duplicate_aliases": [],
|
|
321
|
+
}
|
|
322
|
+
alias_errors = self._validate_alias_uniqueness()
|
|
323
|
+
status["duplicate_aliases"] = alias_errors
|
|
324
|
+
for event_id, event in self.events.items():
|
|
325
|
+
event_status = {
|
|
326
|
+
"saved": 0,
|
|
327
|
+
"unsaved": 0,
|
|
328
|
+
"total": len(event.solutions),
|
|
329
|
+
"solutions": {},
|
|
330
|
+
}
|
|
331
|
+
for sol_id, solution in event.solutions.items():
|
|
332
|
+
sol_status = {
|
|
333
|
+
"saved": solution.saved,
|
|
334
|
+
"alias": solution.alias,
|
|
335
|
+
"model_type": solution.model_type,
|
|
336
|
+
"is_active": solution.is_active,
|
|
337
|
+
}
|
|
338
|
+
event_status["solutions"][sol_id[:8] + "..."] = sol_status
|
|
339
|
+
if solution.saved:
|
|
340
|
+
event_status["saved"] += 1
|
|
341
|
+
status["saved"] += 1
|
|
342
|
+
else:
|
|
343
|
+
event_status["unsaved"] += 1
|
|
344
|
+
status["unsaved"] += 1
|
|
345
|
+
status["total"] += 1
|
|
346
|
+
status["events"][event_id] = event_status
|
|
347
|
+
return status
|
|
348
|
+
|
|
349
|
+
def print_solution_status(self) -> None:
|
|
350
|
+
status = self.get_solution_status()
|
|
351
|
+
print("📊 Solution Status Summary:")
|
|
352
|
+
print(f" Total solutions: {status['total']}")
|
|
353
|
+
print(f" Saved to disk: {status['saved']}")
|
|
354
|
+
print(f" Unsaved (in memory): {status['unsaved']}")
|
|
355
|
+
if status["unsaved"] > 0:
|
|
356
|
+
print(" 💾 Call submission.save() to persist unsaved solutions")
|
|
357
|
+
if status["duplicate_aliases"]:
|
|
358
|
+
print(" ❌ Alias conflicts found:")
|
|
359
|
+
for error in status["duplicate_aliases"]:
|
|
360
|
+
print(f" {error}")
|
|
361
|
+
print(" 💡 Resolve conflicts before saving")
|
|
362
|
+
for event_id, event_status in status["events"].items():
|
|
363
|
+
print(f"\n📁 Event {event_id}:")
|
|
364
|
+
print(f" Solutions: {event_status['saved']} saved, {event_status['unsaved']} unsaved")
|
|
365
|
+
for sol_id, sol_status in event_status["solutions"].items():
|
|
366
|
+
status_icon = "✅" if sol_status["saved"] else "⏳"
|
|
367
|
+
alias_info = f" (alias: {sol_status['alias']})" if sol_status["alias"] else ""
|
|
368
|
+
active_info = "" if sol_status["is_active"] else " [inactive]"
|
|
369
|
+
print(f" {status_icon} {sol_id} - {sol_status['model_type']}{alias_info}{active_info}")
|
|
370
|
+
|
|
371
|
+
def save(self, force: bool = False) -> None:
|
|
372
|
+
# Run comprehensive validation first
|
|
373
|
+
validation_errors = self.run_validation()
|
|
374
|
+
if validation_errors:
|
|
375
|
+
print("⚠️ Save completed with validation warnings:")
|
|
376
|
+
for error in validation_errors:
|
|
377
|
+
print(f" {error}")
|
|
378
|
+
print("💡 Fix validation errors before exporting for submission")
|
|
379
|
+
|
|
380
|
+
if not force:
|
|
381
|
+
print("💾 Submission saved locally (incomplete - not ready for submission)")
|
|
382
|
+
else:
|
|
383
|
+
print("💾 Submission saved locally (forced save with validation errors)")
|
|
384
|
+
else:
|
|
385
|
+
print("✅ Submission saved successfully (ready for export)")
|
|
386
|
+
|
|
387
|
+
# Check for alias conflicts (existing behavior)
|
|
388
|
+
alias_errors = self._validate_alias_uniqueness()
|
|
389
|
+
if alias_errors:
|
|
390
|
+
print("❌ Save failed due to alias validation errors:")
|
|
391
|
+
for error in alias_errors:
|
|
392
|
+
print(f" {error}")
|
|
393
|
+
print("💡 Solutions with duplicate aliases remain in memory but are not saved")
|
|
394
|
+
print(" Use different aliases or remove aliases to resolve conflicts")
|
|
395
|
+
raise ValueError("Alias validation failed:\n" + "\n".join(alias_errors))
|
|
396
|
+
|
|
397
|
+
unsaved_count = sum(1 for event in self.events.values() for sol in event.solutions.values() if not sol.saved)
|
|
398
|
+
project = Path(self.project_path)
|
|
399
|
+
events_dir = project / "events"
|
|
400
|
+
events_dir.mkdir(parents=True, exist_ok=True)
|
|
401
|
+
for event in self.events.values():
|
|
402
|
+
for sol in event.solutions.values():
|
|
403
|
+
if sol.notes_path:
|
|
404
|
+
notes_path = Path(sol.notes_path)
|
|
405
|
+
if notes_path.parts and notes_path.parts[0] == "tmp":
|
|
406
|
+
canonical = Path("events") / event.event_id / "solutions" / f"{sol.solution_id}.md"
|
|
407
|
+
src = project / notes_path
|
|
408
|
+
dst = project / canonical
|
|
409
|
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
410
|
+
if src.exists():
|
|
411
|
+
src.replace(dst)
|
|
412
|
+
sol.notes_path = str(canonical)
|
|
413
|
+
with (project / "submission.json").open("w", encoding="utf-8") as fh:
|
|
414
|
+
fh.write(self.model_dump_json(exclude={"events", "project_path"}, indent=2))
|
|
415
|
+
alias_lookup = self._build_alias_lookup()
|
|
416
|
+
self._save_alias_lookup(alias_lookup)
|
|
417
|
+
for event in self.events.values():
|
|
418
|
+
event.submission = self
|
|
419
|
+
event._save()
|
|
420
|
+
for sol in event.solutions.values():
|
|
421
|
+
sol.saved = True
|
|
422
|
+
if unsaved_count > 0:
|
|
423
|
+
print(f"✅ Successfully saved {unsaved_count} new solution(s) to disk")
|
|
424
|
+
else:
|
|
425
|
+
print("✅ Successfully saved submission to disk")
|
|
426
|
+
saved_aliases = [
|
|
427
|
+
f"{event_id} {sol.alias}"
|
|
428
|
+
for event_id, event in self.events.items()
|
|
429
|
+
for sol in event.solutions.values()
|
|
430
|
+
if sol.alias and sol.saved
|
|
431
|
+
]
|
|
432
|
+
if saved_aliases:
|
|
433
|
+
print(f"📋 Saved aliases: {', '.join(saved_aliases)}")
|
|
434
|
+
|
|
435
|
+
def export(self, output_path: str) -> None:
|
|
436
|
+
# Run comprehensive validation first - export is strict
|
|
437
|
+
validation_errors = self.run_validation()
|
|
438
|
+
if validation_errors:
|
|
439
|
+
print("❌ Export failed due to validation errors:")
|
|
440
|
+
for error in validation_errors:
|
|
441
|
+
print(f" {error}")
|
|
442
|
+
print("💡 Fix validation errors before exporting for submission")
|
|
443
|
+
print("💡 Use submission.save() to save incomplete work locally")
|
|
444
|
+
raise ValueError("Validation failed:\n" + "\n".join(validation_errors))
|
|
445
|
+
|
|
446
|
+
project = Path(self.project_path)
|
|
447
|
+
with zipfile.ZipFile(output_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
|
448
|
+
submission_json = project / "submission.json"
|
|
449
|
+
if submission_json.exists():
|
|
450
|
+
zf.write(submission_json, arcname="submission.json")
|
|
451
|
+
events_dir = project / "events"
|
|
452
|
+
for event in self.events.values():
|
|
453
|
+
event_dir = events_dir / event.event_id
|
|
454
|
+
event_json = event_dir / "event.json"
|
|
455
|
+
if event_json.exists():
|
|
456
|
+
zf.write(event_json, arcname=f"events/{event.event_id}/event.json")
|
|
457
|
+
active_sols = [s for s in event.solutions.values() if s.is_active]
|
|
458
|
+
rel_prob_map: Dict[str, float] = {}
|
|
459
|
+
if active_sols:
|
|
460
|
+
provided_sum = sum(
|
|
461
|
+
s.relative_probability or 0.0 for s in active_sols if s.relative_probability is not None
|
|
462
|
+
)
|
|
463
|
+
need_calc = [s for s in active_sols if s.relative_probability is None]
|
|
464
|
+
if need_calc:
|
|
465
|
+
can_calc = True
|
|
466
|
+
for s in need_calc:
|
|
467
|
+
if (
|
|
468
|
+
s.log_likelihood is None
|
|
469
|
+
or s.n_data_points is None
|
|
470
|
+
or s.n_data_points <= 0
|
|
471
|
+
or len(s.parameters) == 0
|
|
472
|
+
):
|
|
473
|
+
can_calc = False
|
|
474
|
+
break
|
|
475
|
+
remaining = max(1.0 - provided_sum, 0.0)
|
|
476
|
+
if can_calc:
|
|
477
|
+
bic_vals = {
|
|
478
|
+
s.solution_id: len(s.parameters) * math.log(s.n_data_points) - 2 * s.log_likelihood
|
|
479
|
+
for s in need_calc
|
|
480
|
+
}
|
|
481
|
+
bic_min = min(bic_vals.values())
|
|
482
|
+
weights = {sid: math.exp(-0.5 * (bic - bic_min)) for sid, bic in bic_vals.items()}
|
|
483
|
+
wsum = sum(weights.values())
|
|
484
|
+
for sid, w in weights.items():
|
|
485
|
+
rel_prob_map[sid] = remaining * w / wsum if wsum > 0 else remaining / len(weights)
|
|
486
|
+
logging.warning(
|
|
487
|
+
"relative_probability calculated for event %s using BIC",
|
|
488
|
+
event.event_id,
|
|
489
|
+
)
|
|
490
|
+
else:
|
|
491
|
+
eq = remaining / len(need_calc) if need_calc else 0.0
|
|
492
|
+
for s in need_calc:
|
|
493
|
+
rel_prob_map[s.solution_id] = eq
|
|
494
|
+
logging.warning(
|
|
495
|
+
"relative_probability set equally for event %s due to missing data",
|
|
496
|
+
event.event_id,
|
|
497
|
+
)
|
|
498
|
+
for sol in active_sols:
|
|
499
|
+
sol_path = event_dir / "solutions" / f"{sol.solution_id}.json"
|
|
500
|
+
if sol_path.exists():
|
|
501
|
+
arc = f"events/{event.event_id}/solutions/{sol.solution_id}.json"
|
|
502
|
+
export_sol = sol.model_copy()
|
|
503
|
+
for attr in [
|
|
504
|
+
"posterior_path",
|
|
505
|
+
"lightcurve_plot_path",
|
|
506
|
+
"lens_plane_plot_path",
|
|
507
|
+
]:
|
|
508
|
+
path = getattr(sol, attr)
|
|
509
|
+
if path is not None:
|
|
510
|
+
filename = Path(path).name
|
|
511
|
+
new_path = f"events/{event.event_id}/solutions/{sol.solution_id}/{filename}"
|
|
512
|
+
setattr(export_sol, attr, new_path)
|
|
513
|
+
if sol.notes_path:
|
|
514
|
+
notes_file = Path(self.project_path) / sol.notes_path
|
|
515
|
+
if notes_file.exists():
|
|
516
|
+
notes_filename = notes_file.name
|
|
517
|
+
notes_arc = f"events/{event.event_id}/solutions/{sol.solution_id}/{notes_filename}"
|
|
518
|
+
export_sol.notes_path = notes_arc
|
|
519
|
+
zf.write(notes_file, arcname=notes_arc)
|
|
520
|
+
if export_sol.relative_probability is None:
|
|
521
|
+
export_sol.relative_probability = rel_prob_map.get(sol.solution_id)
|
|
522
|
+
zf.writestr(arc, export_sol.model_dump_json(indent=2))
|
|
523
|
+
sol_dir_arc = f"events/{event.event_id}/solutions/{sol.solution_id}"
|
|
524
|
+
for attr in [
|
|
525
|
+
"posterior_path",
|
|
526
|
+
"lightcurve_plot_path",
|
|
527
|
+
"lens_plane_plot_path",
|
|
528
|
+
]:
|
|
529
|
+
path = getattr(sol, attr)
|
|
530
|
+
if path is not None:
|
|
531
|
+
file_path = Path(self.project_path) / path
|
|
532
|
+
if not file_path.exists():
|
|
533
|
+
raise ValueError(
|
|
534
|
+
f"Error: File specified by {attr} in solution {sol.solution_id} "
|
|
535
|
+
f"does not exist: {file_path}"
|
|
536
|
+
)
|
|
537
|
+
zf.write(
|
|
538
|
+
file_path,
|
|
539
|
+
arcname=f"{sol_dir_arc}/{Path(path).name}",
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
def remove_event(self, event_id: str, force: bool = False) -> bool:
|
|
543
|
+
if event_id not in self.events:
|
|
544
|
+
return False
|
|
545
|
+
event = self.events[event_id]
|
|
546
|
+
has_saved_solutions = any(sol.saved for sol in event.solutions.values())
|
|
547
|
+
if has_saved_solutions and not force:
|
|
548
|
+
saved_count = sum(1 for sol in event.solutions.values() if sol.saved)
|
|
549
|
+
raise ValueError(
|
|
550
|
+
f"Cannot remove event '{event_id}' with {saved_count} saved solutions without force=True. "
|
|
551
|
+
f"Use event.clear_solutions() to exclude all solutions from exports instead, or "
|
|
552
|
+
f"call remove_event(event_id, force=True) to force removal."
|
|
553
|
+
)
|
|
554
|
+
for solution in event.solutions.values():
|
|
555
|
+
if not solution.saved and solution.notes_path:
|
|
556
|
+
notes_path = Path(solution.notes_path)
|
|
557
|
+
if notes_path.parts and notes_path.parts[0] == "tmp":
|
|
558
|
+
full_path = Path(self.project_path) / notes_path
|
|
559
|
+
try:
|
|
560
|
+
if full_path.exists():
|
|
561
|
+
full_path.unlink()
|
|
562
|
+
print(f"🗑️ Removed temporary notes file: {notes_path}")
|
|
563
|
+
except OSError as e:
|
|
564
|
+
print(f"⚠️ Warning: Could not remove temporary file {notes_path}: {e}")
|
|
565
|
+
del self.events[event_id]
|
|
566
|
+
print(f"🗑️ Removed event '{event_id}' with {len(event.solutions)} solutions")
|
|
567
|
+
return True
|
|
568
|
+
|
|
569
|
+
# ... (all methods from Submission class, unchanged, including docstrings)
|