citrascope 0.5.2__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- citrascope/citra_scope_daemon.py +22 -38
- citrascope/hardware/abstract_astro_hardware_adapter.py +64 -6
- citrascope/hardware/kstars_dbus_adapter.py +875 -30
- citrascope/hardware/kstars_scheduler_template.esl +30 -0
- citrascope/hardware/kstars_sequence_template.esq +16 -0
- citrascope/hardware/nina_adv_http_adapter.py +74 -59
- citrascope/hardware/nina_adv_http_survey_template.json +4 -4
- citrascope/settings/citrascope_settings.py +25 -4
- citrascope/tasks/runner.py +103 -0
- citrascope/tasks/scope/static_telescope_task.py +6 -1
- citrascope/web/app.py +82 -37
- citrascope/web/static/app.js +83 -0
- citrascope/web/static/config.js +244 -39
- citrascope/web/templates/dashboard.html +62 -27
- {citrascope-0.5.2.dist-info → citrascope-0.7.0.dist-info}/METADATA +19 -1
- {citrascope-0.5.2.dist-info → citrascope-0.7.0.dist-info}/RECORD +19 -17
- {citrascope-0.5.2.dist-info → citrascope-0.7.0.dist-info}/WHEEL +0 -0
- {citrascope-0.5.2.dist-info → citrascope-0.7.0.dist-info}/entry_points.txt +0 -0
- {citrascope-0.5.2.dist-info → citrascope-0.7.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,7 +1,12 @@
|
|
|
1
|
-
import
|
|
1
|
+
import json
|
|
2
2
|
import logging
|
|
3
|
+
import shutil
|
|
3
4
|
import time
|
|
4
5
|
from pathlib import Path
|
|
6
|
+
from typing import Any, Dict, Optional
|
|
7
|
+
|
|
8
|
+
import dbus
|
|
9
|
+
from platformdirs import user_cache_dir, user_data_dir
|
|
5
10
|
|
|
6
11
|
from citrascope.hardware.abstract_astro_hardware_adapter import (
|
|
7
12
|
AbstractAstroHardwareAdapter,
|
|
@@ -11,7 +16,42 @@ from citrascope.hardware.abstract_astro_hardware_adapter import (
|
|
|
11
16
|
|
|
12
17
|
|
|
13
18
|
class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
14
|
-
"""
|
|
19
|
+
"""
|
|
20
|
+
Adapter for controlling astronomical equipment through KStars via DBus.
|
|
21
|
+
|
|
22
|
+
DBus Interface Documentation (from introspection):
|
|
23
|
+
|
|
24
|
+
Mount Interface (org.kde.kstars.Ekos.Mount):
|
|
25
|
+
Methods:
|
|
26
|
+
- slew(double RA, double DEC) -> bool: Slew telescope to coordinates
|
|
27
|
+
- sync(double RA, double DEC) -> bool: Sync telescope at coordinates
|
|
28
|
+
- abort() -> bool: Abort current slew
|
|
29
|
+
- park() -> bool: Park telescope
|
|
30
|
+
- unpark() -> bool: Unpark telescope
|
|
31
|
+
|
|
32
|
+
Properties:
|
|
33
|
+
- equatorialCoords (ad): Current RA/Dec as list of doubles [RA, Dec]
|
|
34
|
+
- slewStatus (i): Current slew status (0=idle, others=slewing)
|
|
35
|
+
- status (i): Mount status enumeration
|
|
36
|
+
- canPark (b): Whether mount supports parking
|
|
37
|
+
|
|
38
|
+
Scheduler Interface (org.kde.kstars.Ekos.Scheduler):
|
|
39
|
+
Methods:
|
|
40
|
+
- loadScheduler(string fileURL) -> bool: Load ESL scheduler file
|
|
41
|
+
- setSequence(string sequenceFileURL): Set sequence file (ESQ)
|
|
42
|
+
- start(): Start scheduler execution
|
|
43
|
+
- stop(): Stop scheduler
|
|
44
|
+
- removeAllJobs(): Clear all jobs
|
|
45
|
+
- resetAllJobs(): Reset job states
|
|
46
|
+
|
|
47
|
+
Properties:
|
|
48
|
+
- status (i): Scheduler state enumeration
|
|
49
|
+
- currentJobName (s): Name of currently executing job
|
|
50
|
+
- jsonJobs (s): JSON representation of all jobs
|
|
51
|
+
|
|
52
|
+
Signals:
|
|
53
|
+
- newStatus(int status): Emitted when scheduler state changes
|
|
54
|
+
"""
|
|
15
55
|
|
|
16
56
|
def __init__(self, logger: logging.Logger, images_dir: Path, **kwargs):
|
|
17
57
|
"""
|
|
@@ -20,17 +60,28 @@ class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
|
20
60
|
Args:
|
|
21
61
|
logger: Logger instance for logging messages
|
|
22
62
|
images_dir: Path to the images directory
|
|
23
|
-
**kwargs: Configuration including bus_name
|
|
63
|
+
**kwargs: Configuration including bus_name, ccd_name, filter_wheel_name
|
|
24
64
|
"""
|
|
25
|
-
super().__init__(images_dir=images_dir)
|
|
65
|
+
super().__init__(images_dir=images_dir, **kwargs)
|
|
26
66
|
self.logger: logging.Logger = logger
|
|
27
|
-
self.bus_name = kwargs.get("bus_name"
|
|
28
|
-
self.
|
|
29
|
-
self.
|
|
30
|
-
self.
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
self.
|
|
67
|
+
self.bus_name = kwargs.get("bus_name") or "org.kde.kstars"
|
|
68
|
+
self.ccd_name = kwargs.get("ccd_name") or "CCD Simulator"
|
|
69
|
+
self.filter_wheel_name = kwargs.get("filter_wheel_name") or ""
|
|
70
|
+
self.optical_train_name = kwargs.get("optical_train_name") or "Primary"
|
|
71
|
+
|
|
72
|
+
# Capture parameters
|
|
73
|
+
self.exposure_time = kwargs.get("exposure_time", 5.0)
|
|
74
|
+
self.frame_count = kwargs.get("frame_count", 1)
|
|
75
|
+
self.binning_x = kwargs.get("binning_x", 1)
|
|
76
|
+
self.binning_y = kwargs.get("binning_y", 1)
|
|
77
|
+
self.image_format = kwargs.get("image_format", "Mono")
|
|
78
|
+
|
|
79
|
+
self.bus: dbus.SessionBus | None = None
|
|
80
|
+
self.kstars: dbus.Interface | None = None
|
|
81
|
+
self.ekos: dbus.Interface | None = None
|
|
82
|
+
self.mount: dbus.Interface | None = None
|
|
83
|
+
self.camera: dbus.Interface | None = None
|
|
84
|
+
self.scheduler: dbus.Interface | None = None
|
|
34
85
|
|
|
35
86
|
@classmethod
|
|
36
87
|
def get_settings_schema(cls) -> list[SettingSchemaEntry]:
|
|
@@ -43,20 +94,604 @@ class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
|
43
94
|
"friendly_name": "D-Bus Service Name",
|
|
44
95
|
"type": "str",
|
|
45
96
|
"default": "org.kde.kstars",
|
|
46
|
-
"description": "D-Bus service name for KStars",
|
|
47
|
-
"required":
|
|
97
|
+
"description": "D-Bus service name for KStars (default: org.kde.kstars)",
|
|
98
|
+
"required": False,
|
|
48
99
|
"placeholder": "org.kde.kstars",
|
|
49
|
-
}
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
"name": "ccd_name",
|
|
103
|
+
"friendly_name": "Camera/CCD Device Name",
|
|
104
|
+
"type": "str",
|
|
105
|
+
"default": "CCD Simulator",
|
|
106
|
+
"description": "Name of the camera device in your Ekos profile (check Ekos logs on connect for available devices)",
|
|
107
|
+
"required": False,
|
|
108
|
+
"placeholder": "CCD Simulator",
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
"name": "filter_wheel_name",
|
|
112
|
+
"friendly_name": "Filter Wheel Device Name",
|
|
113
|
+
"type": "str",
|
|
114
|
+
"default": "",
|
|
115
|
+
"description": "Name of the filter wheel device (leave empty if no filter wheel)",
|
|
116
|
+
"required": False,
|
|
117
|
+
"placeholder": "Filter Simulator",
|
|
118
|
+
},
|
|
119
|
+
{
|
|
120
|
+
"name": "optical_train_name",
|
|
121
|
+
"friendly_name": "Optical Train Name",
|
|
122
|
+
"type": "str",
|
|
123
|
+
"default": "Primary",
|
|
124
|
+
"description": "Name of the optical train in your Ekos profile (check Ekos logs on connect for available trains)",
|
|
125
|
+
"required": False,
|
|
126
|
+
"placeholder": "Primary",
|
|
127
|
+
},
|
|
128
|
+
{
|
|
129
|
+
"name": "exposure_time",
|
|
130
|
+
"friendly_name": "Exposure Time (seconds)",
|
|
131
|
+
"type": "float",
|
|
132
|
+
"default": 1.0,
|
|
133
|
+
"description": "Exposure duration in seconds for each frame",
|
|
134
|
+
"required": False,
|
|
135
|
+
"placeholder": "1.0",
|
|
136
|
+
"min": 0.001,
|
|
137
|
+
"max": 300.0,
|
|
138
|
+
},
|
|
139
|
+
{
|
|
140
|
+
"name": "frame_count",
|
|
141
|
+
"friendly_name": "Frame Count",
|
|
142
|
+
"type": "int",
|
|
143
|
+
"default": 1,
|
|
144
|
+
"description": "Number of frames to capture per observation",
|
|
145
|
+
"required": False,
|
|
146
|
+
"placeholder": "1",
|
|
147
|
+
"min": 1,
|
|
148
|
+
"max": 100,
|
|
149
|
+
},
|
|
150
|
+
{
|
|
151
|
+
"name": "binning_x",
|
|
152
|
+
"friendly_name": "Binning X",
|
|
153
|
+
"type": "int",
|
|
154
|
+
"default": 1,
|
|
155
|
+
"description": "Horizontal pixel binning (1=no binning, 2=2x2, etc.)",
|
|
156
|
+
"required": False,
|
|
157
|
+
"placeholder": "1",
|
|
158
|
+
"min": 1,
|
|
159
|
+
"max": 4,
|
|
160
|
+
},
|
|
161
|
+
{
|
|
162
|
+
"name": "binning_y",
|
|
163
|
+
"friendly_name": "Binning Y",
|
|
164
|
+
"type": "int",
|
|
165
|
+
"default": 1,
|
|
166
|
+
"description": "Vertical pixel binning (1=no binning, 2=2x2, etc.)",
|
|
167
|
+
"required": False,
|
|
168
|
+
"placeholder": "1",
|
|
169
|
+
"min": 1,
|
|
170
|
+
"max": 4,
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
"name": "image_format",
|
|
174
|
+
"friendly_name": "Image Format",
|
|
175
|
+
"type": "str",
|
|
176
|
+
"default": "Mono",
|
|
177
|
+
"description": "Camera image format (Mono for monochrome, RGGB/RGB for color cameras)",
|
|
178
|
+
"required": False,
|
|
179
|
+
"placeholder": "Mono",
|
|
180
|
+
"options": ["Mono", "RGGB", "RGB"],
|
|
181
|
+
},
|
|
50
182
|
]
|
|
51
183
|
|
|
52
184
|
def _do_point_telescope(self, ra: float, dec: float):
|
|
53
|
-
|
|
185
|
+
"""
|
|
186
|
+
Point the telescope to the specified RA/Dec coordinates.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
ra: Right Ascension in degrees
|
|
190
|
+
dec: Declination in degrees
|
|
191
|
+
|
|
192
|
+
Raises:
|
|
193
|
+
RuntimeError: If mount is not connected or slew fails
|
|
194
|
+
"""
|
|
195
|
+
if not self.mount:
|
|
196
|
+
raise RuntimeError("Mount interface not connected. Call connect() first.")
|
|
197
|
+
|
|
198
|
+
try:
|
|
199
|
+
# Convert RA from degrees to hours for KStars (KStars expects RA in hours)
|
|
200
|
+
ra_hours = ra / 15.0
|
|
201
|
+
|
|
202
|
+
self.logger.info(f"Slewing telescope to RA={ra_hours:.4f}h ({ra:.4f}°), Dec={dec:.4f}°")
|
|
203
|
+
|
|
204
|
+
# Call the slew method via DBus
|
|
205
|
+
success = self.mount.slew(ra_hours, dec)
|
|
206
|
+
|
|
207
|
+
if not success:
|
|
208
|
+
raise RuntimeError(f"Mount slew command failed for RA={ra_hours}h, Dec={dec}°")
|
|
209
|
+
|
|
210
|
+
self.logger.info("Slew command sent successfully")
|
|
211
|
+
|
|
212
|
+
except Exception as e:
|
|
213
|
+
self.logger.error(f"Failed to slew telescope: {e}")
|
|
214
|
+
raise RuntimeError(f"Telescope slew failed: {e}")
|
|
54
215
|
|
|
55
216
|
def get_observation_strategy(self) -> ObservationStrategy:
|
|
56
217
|
return ObservationStrategy.SEQUENCE_TO_CONTROLLER
|
|
57
218
|
|
|
58
|
-
def
|
|
59
|
-
|
|
219
|
+
def _load_template(self, template_name: str) -> str:
|
|
220
|
+
"""Load a template file from the hardware directory."""
|
|
221
|
+
template_path = Path(__file__).parent / template_name
|
|
222
|
+
if not template_path.exists():
|
|
223
|
+
raise FileNotFoundError(f"Template not found: {template_path}")
|
|
224
|
+
return template_path.read_text()
|
|
225
|
+
|
|
226
|
+
def _create_sequence_file(self, task_id: str, satellite_data: dict, output_dir: Path) -> Path:
|
|
227
|
+
"""
|
|
228
|
+
Create an ESQ sequence file from template.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
task_id: Unique task identifier
|
|
232
|
+
satellite_data: Dictionary containing target information
|
|
233
|
+
output_dir: Base output directory for captures
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
Path to the created sequence file
|
|
237
|
+
"""
|
|
238
|
+
template = self._load_template("kstars_sequence_template.esq")
|
|
239
|
+
|
|
240
|
+
# Extract target info
|
|
241
|
+
target_name = satellite_data.get("name", "Unknown").replace(" ", "_")
|
|
242
|
+
|
|
243
|
+
# Generate job blocks based on filter configuration
|
|
244
|
+
jobs_xml = self._generate_job_blocks(output_dir)
|
|
245
|
+
|
|
246
|
+
# Replace placeholders
|
|
247
|
+
sequence_content = template.replace("{{JOBS}}", jobs_xml)
|
|
248
|
+
sequence_content = sequence_content.replace("{{OUTPUT_DIR}}", str(output_dir))
|
|
249
|
+
sequence_content = sequence_content.replace("{{TASK_ID}}", task_id)
|
|
250
|
+
sequence_content = sequence_content.replace("{{TARGET_NAME}}", target_name)
|
|
251
|
+
sequence_content = sequence_content.replace("{{CCD_NAME}}", self.ccd_name)
|
|
252
|
+
sequence_content = sequence_content.replace("{{FILTER_WHEEL_NAME}}", self.filter_wheel_name)
|
|
253
|
+
sequence_content = sequence_content.replace("{{OPTICAL_TRAIN}}", self.optical_train_name)
|
|
254
|
+
|
|
255
|
+
# Write to temporary file
|
|
256
|
+
temp_dir = Path(user_cache_dir("citrascope")) / "kstars"
|
|
257
|
+
temp_dir.mkdir(exist_ok=True, parents=True)
|
|
258
|
+
sequence_file = temp_dir / f"{task_id}_sequence.esq"
|
|
259
|
+
sequence_file.write_text(sequence_content)
|
|
260
|
+
|
|
261
|
+
self.logger.info(f"Created sequence file: {sequence_file}")
|
|
262
|
+
return sequence_file
|
|
263
|
+
|
|
264
|
+
def _generate_job_blocks(self, output_dir: Path) -> str:
|
|
265
|
+
"""
|
|
266
|
+
Generate XML job blocks for each filter in filter_map.
|
|
267
|
+
If no filters discovered, generates single job with no filter.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
output_dir: Base output directory for captures
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
XML string containing one or more <Job> blocks
|
|
274
|
+
"""
|
|
275
|
+
job_template = """ <Job>
|
|
276
|
+
<Exposure>{exposure}</Exposure>
|
|
277
|
+
<Format>{format}</Format>
|
|
278
|
+
<Encoding>FITS</Encoding>
|
|
279
|
+
<Binning>
|
|
280
|
+
<X>{binning_x}</X>
|
|
281
|
+
<Y>{binning_y}</Y>
|
|
282
|
+
</Binning>
|
|
283
|
+
<Frame>
|
|
284
|
+
<X>0</X>
|
|
285
|
+
<Y>0</Y>
|
|
286
|
+
<W>0</W>
|
|
287
|
+
<H>0</H>
|
|
288
|
+
</Frame>
|
|
289
|
+
<Temperature force='false'>0</Temperature>
|
|
290
|
+
<Filter>{filter_name}</Filter>
|
|
291
|
+
<Type>Light</Type>
|
|
292
|
+
<Count>{count}</Count>
|
|
293
|
+
<Delay>0</Delay>
|
|
294
|
+
<GuideDitherPerJob>0</GuideDitherPerJob>
|
|
295
|
+
<FITSDirectory>{output_dir}</FITSDirectory>
|
|
296
|
+
<PlaceholderFormat>%t_%F</PlaceholderFormat>
|
|
297
|
+
<PlaceholderSuffix>0</PlaceholderSuffix>
|
|
298
|
+
<UploadMode>0</UploadMode>
|
|
299
|
+
<Properties>
|
|
300
|
+
</Properties>
|
|
301
|
+
<Calibration>
|
|
302
|
+
<PreAction>
|
|
303
|
+
<Type>1</Type>
|
|
304
|
+
</PreAction>
|
|
305
|
+
<FlatDuration dark='false'>
|
|
306
|
+
<Type>Manual</Type>
|
|
307
|
+
</FlatDuration>
|
|
308
|
+
</Calibration>
|
|
309
|
+
</Job>
|
|
310
|
+
"""
|
|
311
|
+
|
|
312
|
+
jobs = []
|
|
313
|
+
|
|
314
|
+
# Filter to only enabled filters
|
|
315
|
+
enabled_filters = {fid: fdata for fid, fdata in self.filter_map.items() if fdata.get("enabled", True)}
|
|
316
|
+
|
|
317
|
+
if enabled_filters:
|
|
318
|
+
# Multi-filter mode: create one job per enabled filter
|
|
319
|
+
self.logger.info(
|
|
320
|
+
f"Generating {len(enabled_filters)} jobs for enabled filters: "
|
|
321
|
+
f"{[f['name'] for f in enabled_filters.values()]}"
|
|
322
|
+
)
|
|
323
|
+
for filter_idx in sorted(enabled_filters.keys()):
|
|
324
|
+
filter_info = enabled_filters[filter_idx]
|
|
325
|
+
filter_name = filter_info["name"]
|
|
326
|
+
|
|
327
|
+
job_xml = job_template.format(
|
|
328
|
+
exposure=self.exposure_time,
|
|
329
|
+
format=self.image_format,
|
|
330
|
+
binning_x=self.binning_x,
|
|
331
|
+
binning_y=self.binning_y,
|
|
332
|
+
filter_name=filter_name,
|
|
333
|
+
count=self.frame_count,
|
|
334
|
+
output_dir=str(output_dir),
|
|
335
|
+
)
|
|
336
|
+
jobs.append(job_xml)
|
|
337
|
+
else:
|
|
338
|
+
# Single-filter mode: use '--' for no filter
|
|
339
|
+
filter_name = "--" if not self.filter_wheel_name else "Luminance"
|
|
340
|
+
self.logger.info(f"Generating single job with filter: {filter_name}")
|
|
341
|
+
|
|
342
|
+
job_xml = job_template.format(
|
|
343
|
+
exposure=self.exposure_time,
|
|
344
|
+
format=self.image_format,
|
|
345
|
+
binning_x=self.binning_x,
|
|
346
|
+
binning_y=self.binning_y,
|
|
347
|
+
filter_name=filter_name,
|
|
348
|
+
count=self.frame_count,
|
|
349
|
+
output_dir=str(output_dir),
|
|
350
|
+
)
|
|
351
|
+
jobs.append(job_xml)
|
|
352
|
+
|
|
353
|
+
return "\n".join(jobs)
|
|
354
|
+
|
|
355
|
+
def _create_scheduler_job(self, task_id: str, satellite_data: dict, sequence_file: Path) -> Path:
|
|
356
|
+
"""
|
|
357
|
+
Create an ESL scheduler job file from template.
|
|
358
|
+
|
|
359
|
+
Args:
|
|
360
|
+
task_id: Unique task identifier
|
|
361
|
+
satellite_data: Dictionary containing target coordinates
|
|
362
|
+
sequence_file: Path to the ESQ sequence file
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
Path to the created scheduler job file
|
|
366
|
+
"""
|
|
367
|
+
template = self._load_template("kstars_scheduler_template.esl")
|
|
368
|
+
|
|
369
|
+
# Extract target info
|
|
370
|
+
target_name = satellite_data.get("name", "Unknown")
|
|
371
|
+
ra_deg = satellite_data.get("ra", 0.0) # RA in degrees
|
|
372
|
+
dec_deg = satellite_data.get("dec", 0.0) # Dec in degrees
|
|
373
|
+
|
|
374
|
+
# Convert RA from degrees to hours for Ekos
|
|
375
|
+
ra_hours = ra_deg / 15.0
|
|
376
|
+
|
|
377
|
+
self.logger.info(f"Target: {target_name} at RA={ra_deg:.4f}° ({ra_hours:.4f}h), Dec={dec_deg:.4f}°")
|
|
378
|
+
|
|
379
|
+
# Replace placeholders
|
|
380
|
+
job_name = f"CitraScope: {target_name} (Task: {task_id})"
|
|
381
|
+
scheduler_content = template.replace("{{JOB_NAME}}", job_name)
|
|
382
|
+
scheduler_content = scheduler_content.replace("{{TARGET_RA}}", f"{ra_hours:.6f}")
|
|
383
|
+
scheduler_content = scheduler_content.replace("{{TARGET_DEC}}", f"{dec_deg:.6f}")
|
|
384
|
+
scheduler_content = scheduler_content.replace("{{SEQUENCE_FILE}}", str(sequence_file))
|
|
385
|
+
scheduler_content = scheduler_content.replace("{{MIN_ALTITUDE}}", "0") # 0° minimum altitude for satellites
|
|
386
|
+
|
|
387
|
+
# Write to temporary file
|
|
388
|
+
temp_dir = Path(user_cache_dir("citrascope")) / "kstars"
|
|
389
|
+
temp_dir.mkdir(exist_ok=True, parents=True)
|
|
390
|
+
job_file = temp_dir / f"{task_id}_job.esl"
|
|
391
|
+
job_file.write_text(scheduler_content)
|
|
392
|
+
|
|
393
|
+
self.logger.info(f"Created scheduler job: {job_file}")
|
|
394
|
+
return job_file
|
|
395
|
+
|
|
396
|
+
def _wait_for_job_completion(
|
|
397
|
+
self, timeout: int = 300, task_id: str = "", output_dir: Optional[Path] = None
|
|
398
|
+
) -> bool:
|
|
399
|
+
"""
|
|
400
|
+
Poll the scheduler status until job completes or times out.
|
|
401
|
+
With Loop completion, we poll for images and stop when we have all expected images.
|
|
402
|
+
For multi-filter sequences, waits until images from all filters are captured.
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
timeout: Maximum time to wait in seconds
|
|
406
|
+
task_id: Task identifier for image detection
|
|
407
|
+
output_dir: Output directory for image detection
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
True if job completed successfully, False otherwise
|
|
411
|
+
"""
|
|
412
|
+
if not self.scheduler:
|
|
413
|
+
raise RuntimeError("Scheduler interface not connected")
|
|
414
|
+
|
|
415
|
+
assert self.bus is not None
|
|
416
|
+
|
|
417
|
+
# Calculate expected number of images based on enabled filters
|
|
418
|
+
enabled_filter_count = (
|
|
419
|
+
sum(1 for f in self.filter_map.values() if f.get("enabled", True)) if self.filter_map else 1
|
|
420
|
+
)
|
|
421
|
+
expected_total_images = enabled_filter_count * self.frame_count
|
|
422
|
+
|
|
423
|
+
self.logger.info(
|
|
424
|
+
f"Waiting for scheduler job completion (timeout: {timeout}s, "
|
|
425
|
+
f"expecting {expected_total_images} images across {enabled_filter_count} filters)..."
|
|
426
|
+
)
|
|
427
|
+
start_time = time.time()
|
|
428
|
+
|
|
429
|
+
# Get scheduler object for property access
|
|
430
|
+
scheduler_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Scheduler")
|
|
431
|
+
props = dbus.Interface(scheduler_obj, "org.freedesktop.DBus.Properties")
|
|
432
|
+
|
|
433
|
+
while time.time() - start_time < timeout:
|
|
434
|
+
try:
|
|
435
|
+
# Get scheduler status (0=Idle, 1=Running, 2=Paused, etc.)
|
|
436
|
+
status = int(props.Get("org.kde.kstars.Ekos.Scheduler", "status"))
|
|
437
|
+
current_job = props.Get("org.kde.kstars.Ekos.Scheduler", "currentJobName")
|
|
438
|
+
|
|
439
|
+
self.logger.debug(f"Scheduler status: {status}, Current job: {current_job}")
|
|
440
|
+
|
|
441
|
+
# Check for images if we're using Loop completion
|
|
442
|
+
if task_id and output_dir:
|
|
443
|
+
images = self._retrieve_captured_images(task_id, output_dir)
|
|
444
|
+
if len(images) >= expected_total_images:
|
|
445
|
+
self.logger.info(
|
|
446
|
+
f"Found {len(images)} images (expected {expected_total_images}), stopping scheduler"
|
|
447
|
+
)
|
|
448
|
+
self.scheduler.stop()
|
|
449
|
+
time.sleep(1) # Give it time to stop
|
|
450
|
+
return True
|
|
451
|
+
elif images:
|
|
452
|
+
self.logger.debug(f"Found {len(images)}/{expected_total_images} images so far, continuing...")
|
|
453
|
+
|
|
454
|
+
# Status 0 = Idle, meaning job finished or not started
|
|
455
|
+
# If we were running and now idle, job completed
|
|
456
|
+
if status == 0 and current_job == "":
|
|
457
|
+
self.logger.info("Scheduler job completed")
|
|
458
|
+
return True
|
|
459
|
+
|
|
460
|
+
time.sleep(5) # Poll every 5 seconds (slower since we're checking files)
|
|
461
|
+
|
|
462
|
+
except dbus.DBusException as e:
|
|
463
|
+
if "ServiceUnknown" in str(e) or "NoReply" in str(e):
|
|
464
|
+
self.logger.error("KStars appears to have crashed or disconnected")
|
|
465
|
+
return False
|
|
466
|
+
self.logger.warning(f"Error checking scheduler status: {e}")
|
|
467
|
+
time.sleep(2)
|
|
468
|
+
except Exception as e:
|
|
469
|
+
self.logger.warning(f"Error checking scheduler status: {e}")
|
|
470
|
+
time.sleep(2)
|
|
471
|
+
|
|
472
|
+
self.logger.error(f"Scheduler job did not complete within {timeout}s")
|
|
473
|
+
return False
|
|
474
|
+
|
|
475
|
+
def _retrieve_captured_images(self, task_id: str, output_dir: Path) -> list[str]:
|
|
476
|
+
"""
|
|
477
|
+
Find and return paths to captured images for this task.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
task_id: Unique task identifier
|
|
481
|
+
output_dir: Base output directory where images were saved
|
|
482
|
+
|
|
483
|
+
Returns:
|
|
484
|
+
List of absolute paths to captured FITS files
|
|
485
|
+
"""
|
|
486
|
+
self.logger.debug(f"Looking for captured images in: {output_dir}")
|
|
487
|
+
|
|
488
|
+
# Check if base output directory exists
|
|
489
|
+
if not output_dir.exists():
|
|
490
|
+
self.logger.warning(f"Base output directory does not exist: {output_dir}")
|
|
491
|
+
# List parent directory to see what's there
|
|
492
|
+
parent = output_dir.parent
|
|
493
|
+
if parent.exists():
|
|
494
|
+
self.logger.debug(f"Parent directory contents: {list(parent.iterdir())}")
|
|
495
|
+
return []
|
|
496
|
+
|
|
497
|
+
# List what's in the base directory
|
|
498
|
+
self.logger.debug(f"Base directory contents: {list(output_dir.iterdir())}")
|
|
499
|
+
|
|
500
|
+
# Look for images in task-specific subdirectory
|
|
501
|
+
task_dir = output_dir / task_id
|
|
502
|
+
|
|
503
|
+
if not task_dir.exists():
|
|
504
|
+
self.logger.error(f"Task directory does not exist: {task_dir}")
|
|
505
|
+
self.logger.error(f"This likely indicates Ekos failed to create the capture directory")
|
|
506
|
+
self.logger.error(f"Expected directory structure: {output_dir}/{task_id}/")
|
|
507
|
+
raise RuntimeError(
|
|
508
|
+
f"Task-specific capture directory not found: {task_dir}. "
|
|
509
|
+
f"Ekos may have failed to start the capture sequence."
|
|
510
|
+
)
|
|
511
|
+
|
|
512
|
+
# Find all FITS files in task directory and subdirectories
|
|
513
|
+
fits_files = list(task_dir.rglob("*.fits")) + list(task_dir.rglob("*.fit"))
|
|
514
|
+
|
|
515
|
+
# Since files are in task-specific directory, we don't need to filter by filename
|
|
516
|
+
matching_files = [str(f.absolute()) for f in fits_files]
|
|
517
|
+
|
|
518
|
+
self.logger.info(f"Found {len(matching_files)} captured images for task {task_id}")
|
|
519
|
+
for img_path in matching_files:
|
|
520
|
+
self.logger.debug(f" - {img_path}")
|
|
521
|
+
|
|
522
|
+
return matching_files
|
|
523
|
+
|
|
524
|
+
def perform_observation_sequence(self, task_id: str, satellite_data: dict) -> list[str]:
|
|
525
|
+
"""
|
|
526
|
+
Execute a complete observation sequence using Ekos Scheduler.
|
|
527
|
+
|
|
528
|
+
Args:
|
|
529
|
+
task_id: Unique task identifier
|
|
530
|
+
satellite_data: Dictionary with keys: 'name', and either 'ra'/'dec' or TLE data
|
|
531
|
+
|
|
532
|
+
Returns:
|
|
533
|
+
List of paths to captured FITS files
|
|
534
|
+
|
|
535
|
+
Raises:
|
|
536
|
+
RuntimeError: If scheduler not connected or job execution fails
|
|
537
|
+
"""
|
|
538
|
+
if not self.scheduler:
|
|
539
|
+
raise RuntimeError("Scheduler interface not connected. Call connect() first.")
|
|
540
|
+
|
|
541
|
+
# Calculate current position if not already provided
|
|
542
|
+
if "ra" not in satellite_data or "dec" not in satellite_data:
|
|
543
|
+
# For now, require RA/Dec to be provided by caller
|
|
544
|
+
# TODO: Add TLE propagation capability to adapter for full autonomy
|
|
545
|
+
raise ValueError("satellite_data must include 'ra' and 'dec' keys (in degrees)")
|
|
546
|
+
|
|
547
|
+
try:
|
|
548
|
+
# Setup output directory
|
|
549
|
+
output_dir = Path(user_data_dir("citrascope")) / "kstars_captures"
|
|
550
|
+
output_dir.mkdir(exist_ok=True, parents=True)
|
|
551
|
+
|
|
552
|
+
# Clear task-specific directory to prevent Ekos from thinking job is already done
|
|
553
|
+
task_output_dir = output_dir / task_id
|
|
554
|
+
if task_output_dir.exists():
|
|
555
|
+
shutil.rmtree(task_output_dir)
|
|
556
|
+
self.logger.info(f"Cleared existing output directory: {task_output_dir}")
|
|
557
|
+
|
|
558
|
+
# Create task directory for this observation
|
|
559
|
+
task_output_dir.mkdir(exist_ok=True, parents=True)
|
|
560
|
+
self.logger.info(f"Output directory: {task_output_dir}")
|
|
561
|
+
|
|
562
|
+
# Create sequence and scheduler job files (use task-specific directory)
|
|
563
|
+
sequence_file = self._create_sequence_file(task_id, satellite_data, task_output_dir)
|
|
564
|
+
job_file = self._create_scheduler_job(task_id, satellite_data, sequence_file)
|
|
565
|
+
|
|
566
|
+
# Ensure temp files are cleaned up even on failure
|
|
567
|
+
try:
|
|
568
|
+
self._execute_observation(task_id, output_dir, sequence_file, job_file)
|
|
569
|
+
finally:
|
|
570
|
+
# Cleanup temp files
|
|
571
|
+
self._cleanup_temp_files(sequence_file, job_file)
|
|
572
|
+
|
|
573
|
+
# Retrieve and return captured images
|
|
574
|
+
image_paths = self._retrieve_captured_images(task_id, output_dir)
|
|
575
|
+
if not image_paths:
|
|
576
|
+
raise RuntimeError(f"No images captured for task {task_id}")
|
|
577
|
+
|
|
578
|
+
self.logger.info(f"Observation sequence complete: {len(image_paths)} images captured")
|
|
579
|
+
return image_paths
|
|
580
|
+
|
|
581
|
+
except Exception as e:
|
|
582
|
+
self.logger.error(f"Failed to execute observation sequence: {e}")
|
|
583
|
+
raise
|
|
584
|
+
|
|
585
|
+
def _execute_observation(self, task_id: str, output_dir: Path, sequence_file: Path, job_file: Path):
|
|
586
|
+
"""Execute the observation by loading scheduler job and waiting for completion.
|
|
587
|
+
|
|
588
|
+
Args:
|
|
589
|
+
task_id: Task identifier
|
|
590
|
+
output_dir: Base output directory
|
|
591
|
+
sequence_file: Path to ESQ sequence file
|
|
592
|
+
job_file: Path to ESL scheduler job file
|
|
593
|
+
"""
|
|
594
|
+
assert self.scheduler is not None
|
|
595
|
+
assert self.bus is not None
|
|
596
|
+
|
|
597
|
+
# Load scheduler job via DBus
|
|
598
|
+
self.logger.info(f"Loading scheduler job: {job_file}")
|
|
599
|
+
|
|
600
|
+
# Verify files exist and have content
|
|
601
|
+
if not job_file.exists():
|
|
602
|
+
raise RuntimeError(f"Scheduler job file does not exist: {job_file}")
|
|
603
|
+
if not sequence_file.exists():
|
|
604
|
+
raise RuntimeError(f"Sequence file does not exist: {sequence_file}")
|
|
605
|
+
|
|
606
|
+
self.logger.debug(f"Job file size: {job_file.stat().st_size} bytes")
|
|
607
|
+
self.logger.debug(f"Sequence file size: {sequence_file.stat().st_size} bytes")
|
|
608
|
+
|
|
609
|
+
# Load the scheduler job
|
|
610
|
+
try:
|
|
611
|
+
# Clear any existing jobs first to prevent state conflicts
|
|
612
|
+
try:
|
|
613
|
+
self.scheduler.removeAllJobs()
|
|
614
|
+
self.logger.info("Cleared existing scheduler jobs")
|
|
615
|
+
time.sleep(0.5) # Brief pause after clearing
|
|
616
|
+
except Exception as clear_error:
|
|
617
|
+
self.logger.warning(f"Could not clear jobs (might not exist): {clear_error}")
|
|
618
|
+
|
|
619
|
+
success = self.scheduler.loadScheduler(str(job_file))
|
|
620
|
+
self.logger.debug(f"loadScheduler() returned: {success}")
|
|
621
|
+
except Exception as dbus_error:
|
|
622
|
+
self.logger.error(f"DBus error calling loadScheduler: {dbus_error}")
|
|
623
|
+
raise RuntimeError(f"DBus error loading scheduler job: {dbus_error}")
|
|
624
|
+
|
|
625
|
+
if not success:
|
|
626
|
+
# Log file contents for debugging
|
|
627
|
+
self.logger.error(f"Scheduler rejected job file. Contents:")
|
|
628
|
+
self.logger.error(job_file.read_text()[:500]) # First 500 chars
|
|
629
|
+
raise RuntimeError(f"Ekos Scheduler rejected job file: {job_file}")
|
|
630
|
+
|
|
631
|
+
self.logger.info("Scheduler job loaded successfully")
|
|
632
|
+
|
|
633
|
+
# Verify what was loaded before starting
|
|
634
|
+
try:
|
|
635
|
+
scheduler_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Scheduler")
|
|
636
|
+
props = dbus.Interface(scheduler_obj, "org.freedesktop.DBus.Properties")
|
|
637
|
+
json_jobs = props.Get("org.kde.kstars.Ekos.Scheduler", "jsonJobs")
|
|
638
|
+
self.logger.info(f"Loaded jobs: {json_jobs}")
|
|
639
|
+
|
|
640
|
+
# Parse and validate the job looks correct
|
|
641
|
+
jobs = json.loads(str(json_jobs))
|
|
642
|
+
if jobs:
|
|
643
|
+
job = jobs[0] # We only load one job at a time
|
|
644
|
+
self.logger.info(f"Loaded {len(jobs)} job(s):")
|
|
645
|
+
self.logger.info(f" Name: {job.get('name', 'Unknown')}")
|
|
646
|
+
self.logger.info(f" State: {job.get('state', 'Unknown')}")
|
|
647
|
+
self.logger.info(f" RA: {job.get('targetRA', 'N/A')}h, Dec: {job.get('targetDEC', 'N/A')}°")
|
|
648
|
+
self.logger.info(f" Altitude: {job.get('altitudeFormatted', 'N/A')}")
|
|
649
|
+
self.logger.info(f" Repeats: {job.get('repeatsRemaining', 0)}/{job.get('repeatsRequired', 0)}")
|
|
650
|
+
self.logger.info(f" Completed: {job.get('completedCount', 0)}")
|
|
651
|
+
else:
|
|
652
|
+
self.logger.warning("No jobs found in scheduler after loading!")
|
|
653
|
+
except Exception as e:
|
|
654
|
+
self.logger.warning(f"Could not validate loaded jobs: {e}")
|
|
655
|
+
|
|
656
|
+
# Start scheduler
|
|
657
|
+
self.logger.info("Starting scheduler execution...")
|
|
658
|
+
self.scheduler.start()
|
|
659
|
+
|
|
660
|
+
# Give it a moment to start
|
|
661
|
+
time.sleep(1)
|
|
662
|
+
|
|
663
|
+
# Check scheduler logs immediately after starting
|
|
664
|
+
try:
|
|
665
|
+
scheduler_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Scheduler")
|
|
666
|
+
props = dbus.Interface(scheduler_obj, "org.freedesktop.DBus.Properties")
|
|
667
|
+
log_lines = props.Get("org.kde.kstars.Ekos.Scheduler", "logText")
|
|
668
|
+
if log_lines:
|
|
669
|
+
self.logger.info("Scheduler logs after start:")
|
|
670
|
+
for line in log_lines[-10:]: # Last 10 lines
|
|
671
|
+
self.logger.info(f" Ekos: {line}")
|
|
672
|
+
except Exception as e:
|
|
673
|
+
self.logger.debug(f"Could not read scheduler logs: {e}")
|
|
674
|
+
|
|
675
|
+
# Wait for completion (with Loop mode, this polls for images and stops when found)
|
|
676
|
+
if not self._wait_for_job_completion(timeout=300, task_id=task_id, output_dir=output_dir):
|
|
677
|
+
raise RuntimeError("Scheduler job did not complete in time")
|
|
678
|
+
|
|
679
|
+
def _cleanup_temp_files(self, sequence_file: Path, job_file: Path):
|
|
680
|
+
"""Clean up temporary ESQ and ESL files.
|
|
681
|
+
|
|
682
|
+
Args:
|
|
683
|
+
sequence_file: Path to ESQ sequence file
|
|
684
|
+
job_file: Path to ESL scheduler job file
|
|
685
|
+
"""
|
|
686
|
+
try:
|
|
687
|
+
if sequence_file.exists():
|
|
688
|
+
sequence_file.unlink()
|
|
689
|
+
self.logger.debug(f"Cleaned up sequence file: {sequence_file.name}")
|
|
690
|
+
if job_file.exists():
|
|
691
|
+
job_file.unlink()
|
|
692
|
+
self.logger.debug(f"Cleaned up job file: {job_file.name}")
|
|
693
|
+
except Exception as e:
|
|
694
|
+
self.logger.warning(f"Failed to cleanup temp files: {e}")
|
|
60
695
|
|
|
61
696
|
def connect(self) -> bool:
|
|
62
697
|
"""
|
|
@@ -66,13 +701,6 @@ class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
|
66
701
|
bool: True if connection successful, False otherwise
|
|
67
702
|
"""
|
|
68
703
|
try:
|
|
69
|
-
# Import dbus here to make it an optional dependency
|
|
70
|
-
try:
|
|
71
|
-
import dbus
|
|
72
|
-
except ImportError:
|
|
73
|
-
self.logger.error("dbus-python is not installed. Install with: pip install dbus-python")
|
|
74
|
-
return False
|
|
75
|
-
|
|
76
704
|
# Connect to the session bus
|
|
77
705
|
self.logger.info("Connecting to DBus session bus...")
|
|
78
706
|
self.bus = dbus.SessionBus()
|
|
@@ -131,6 +759,12 @@ class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
|
131
759
|
except dbus.DBusException as e:
|
|
132
760
|
self.logger.warning(f"Scheduler interface not available: {e}")
|
|
133
761
|
|
|
762
|
+
# Validate devices and imaging train
|
|
763
|
+
self._validate_devices()
|
|
764
|
+
|
|
765
|
+
# Discover available filters (non-fatal if fails)
|
|
766
|
+
self.discover_filters()
|
|
767
|
+
|
|
134
768
|
self.logger.info("Successfully connected to KStars via DBus")
|
|
135
769
|
return True
|
|
136
770
|
|
|
@@ -138,18 +772,161 @@ class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
|
138
772
|
self.logger.error(f"Failed to connect to KStars via DBus: {e}")
|
|
139
773
|
return False
|
|
140
774
|
|
|
775
|
+
def _validate_devices(self):
|
|
776
|
+
"""Check what optical train/devices are configured in Ekos."""
|
|
777
|
+
try:
|
|
778
|
+
assert self.bus is not None
|
|
779
|
+
# Use Capture module (not Camera)
|
|
780
|
+
capture_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Capture")
|
|
781
|
+
props = dbus.Interface(capture_obj, "org.freedesktop.DBus.Properties")
|
|
782
|
+
|
|
783
|
+
optical_train = props.Get("org.kde.kstars.Ekos.Capture", "opticalTrain")
|
|
784
|
+
camera_name = props.Get("org.kde.kstars.Ekos.Capture", "camera")
|
|
785
|
+
filter_wheel = props.Get("org.kde.kstars.Ekos.Capture", "filterWheel")
|
|
786
|
+
|
|
787
|
+
self.logger.info(f"Ekos optical train: {optical_train}")
|
|
788
|
+
self.logger.info(f"Ekos camera device: {camera_name}")
|
|
789
|
+
self.logger.info(f"Ekos filter wheel: {filter_wheel}")
|
|
790
|
+
|
|
791
|
+
except Exception as e:
|
|
792
|
+
self.logger.warning(f"Could not read Ekos devices: {e}")
|
|
793
|
+
# Non-fatal - continue with defaults
|
|
794
|
+
|
|
795
|
+
def discover_filters(self):
|
|
796
|
+
"""Discover available filters from Ekos filter wheel via INDI interface.
|
|
797
|
+
|
|
798
|
+
This is called during connect() to populate filter_map.
|
|
799
|
+
Uses INDI interface to query FILTER_NAME properties for each slot.
|
|
800
|
+
If no filter wheel is configured or discovery fails, filter_map remains empty
|
|
801
|
+
and adapter falls back to single-filter behavior.
|
|
802
|
+
"""
|
|
803
|
+
try:
|
|
804
|
+
if not self.bus:
|
|
805
|
+
self.logger.debug("Cannot discover filters: DBus not connected")
|
|
806
|
+
return
|
|
807
|
+
|
|
808
|
+
self.logger.info("Attempting to discover filters...")
|
|
809
|
+
|
|
810
|
+
# Get filter wheel device name from Capture module
|
|
811
|
+
capture_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Capture")
|
|
812
|
+
capture_props = dbus.Interface(capture_obj, "org.freedesktop.DBus.Properties")
|
|
813
|
+
|
|
814
|
+
try:
|
|
815
|
+
filter_wheel_name = capture_props.Get("org.kde.kstars.Ekos.Capture", "filterWheel")
|
|
816
|
+
if not filter_wheel_name or filter_wheel_name == "--":
|
|
817
|
+
self.logger.info("No filter wheel configured in Capture module")
|
|
818
|
+
return
|
|
819
|
+
self.logger.info(f"Filter wheel detected: {filter_wheel_name}")
|
|
820
|
+
except Exception as e:
|
|
821
|
+
self.logger.debug(f"Could not get filter wheel name: {e}")
|
|
822
|
+
return
|
|
823
|
+
|
|
824
|
+
# Use INDI interface to query filter properties
|
|
825
|
+
indi_obj = self.bus.get_object(self.bus_name, "/KStars/INDI")
|
|
826
|
+
indi_iface = dbus.Interface(indi_obj, "org.kde.kstars.INDI")
|
|
827
|
+
|
|
828
|
+
# Get all properties for the filter wheel device
|
|
829
|
+
properties = indi_iface.getProperties(filter_wheel_name)
|
|
830
|
+
|
|
831
|
+
# Find FILTER_NAME properties (FILTER_SLOT_NAME_1, FILTER_SLOT_NAME_2, etc.)
|
|
832
|
+
filter_slots = []
|
|
833
|
+
for prop in properties:
|
|
834
|
+
if "FILTER_NAME.FILTER_SLOT_NAME_" in prop:
|
|
835
|
+
slot_num = prop.split("_")[-1]
|
|
836
|
+
try:
|
|
837
|
+
filter_slots.append(int(slot_num))
|
|
838
|
+
except ValueError:
|
|
839
|
+
continue
|
|
840
|
+
|
|
841
|
+
if not filter_slots:
|
|
842
|
+
self.logger.warning(f"No FILTER_NAME properties found for {filter_wheel_name}")
|
|
843
|
+
return
|
|
844
|
+
|
|
845
|
+
# Query each filter slot name and merge with pre-populated filter_map
|
|
846
|
+
filter_slots.sort()
|
|
847
|
+
for slot_num in filter_slots:
|
|
848
|
+
try:
|
|
849
|
+
filter_name = indi_iface.getText(filter_wheel_name, "FILTER_NAME", f"FILTER_SLOT_NAME_{slot_num}")
|
|
850
|
+
# Use 0-based indexing for filter_map (slot 1 -> index 0)
|
|
851
|
+
filter_idx = slot_num - 1
|
|
852
|
+
|
|
853
|
+
# If filter already in map (from saved settings), preserve focus position and enabled state
|
|
854
|
+
if filter_idx in self.filter_map:
|
|
855
|
+
focus_position = self.filter_map[filter_idx].get("focus_position", 0)
|
|
856
|
+
enabled = self.filter_map[filter_idx].get("enabled", True)
|
|
857
|
+
self.logger.debug(
|
|
858
|
+
f"Filter slot {slot_num} ({filter_name}): using saved focus position {focus_position}, enabled: {enabled}"
|
|
859
|
+
)
|
|
860
|
+
else:
|
|
861
|
+
focus_position = 0
|
|
862
|
+
enabled = True # Default new filters to enabled
|
|
863
|
+
self.logger.debug(
|
|
864
|
+
f"Filter slot {slot_num} ({filter_name}): new filter, using default focus position"
|
|
865
|
+
)
|
|
866
|
+
|
|
867
|
+
self.filter_map[filter_idx] = {
|
|
868
|
+
"name": filter_name,
|
|
869
|
+
"focus_position": focus_position,
|
|
870
|
+
"enabled": enabled,
|
|
871
|
+
}
|
|
872
|
+
except Exception as e:
|
|
873
|
+
self.logger.warning(f"Could not read filter slot {slot_num}: {e}")
|
|
874
|
+
|
|
875
|
+
if self.filter_map:
|
|
876
|
+
self.logger.info(
|
|
877
|
+
f"Discovered {len(self.filter_map)} filters: {[f['name'] for f in self.filter_map.values()]}"
|
|
878
|
+
)
|
|
879
|
+
else:
|
|
880
|
+
self.logger.warning("No filters discovered from filter wheel")
|
|
881
|
+
|
|
882
|
+
except Exception as e:
|
|
883
|
+
self.logger.info(f"Filter discovery failed (non-fatal): {e}")
|
|
884
|
+
# Leave filter_map empty, use single-filter mode
|
|
885
|
+
|
|
886
|
+
def supports_autofocus(self) -> bool:
|
|
887
|
+
"""Indicates that KStars adapter does not support autofocus yet.
|
|
888
|
+
|
|
889
|
+
Returns:
|
|
890
|
+
bool: False (autofocus not implemented).
|
|
891
|
+
"""
|
|
892
|
+
return False
|
|
893
|
+
|
|
894
|
+
def supports_filter_management(self) -> bool:
|
|
895
|
+
"""Indicates whether this adapter supports filter/focus management.
|
|
896
|
+
|
|
897
|
+
Returns:
|
|
898
|
+
bool: True if filters were discovered, False otherwise.
|
|
899
|
+
"""
|
|
900
|
+
return bool(self.filter_map)
|
|
901
|
+
|
|
141
902
|
def disconnect(self):
|
|
142
903
|
raise NotImplementedError
|
|
143
904
|
|
|
144
905
|
def is_telescope_connected(self) -> bool:
|
|
145
906
|
"""Check if telescope is connected and responsive."""
|
|
146
|
-
|
|
147
|
-
|
|
907
|
+
if not self.mount or not self.bus:
|
|
908
|
+
return False
|
|
909
|
+
try:
|
|
910
|
+
# Actually test the connection by reading a property
|
|
911
|
+
mount_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Mount")
|
|
912
|
+
props = dbus.Interface(mount_obj, "org.freedesktop.DBus.Properties")
|
|
913
|
+
props.Get("org.kde.kstars.Ekos.Mount", "status")
|
|
914
|
+
return True
|
|
915
|
+
except (dbus.DBusException, Exception):
|
|
916
|
+
return False
|
|
148
917
|
|
|
149
918
|
def is_camera_connected(self) -> bool:
|
|
150
919
|
"""Check if camera is connected and responsive."""
|
|
151
|
-
|
|
152
|
-
|
|
920
|
+
if not self.camera or not self.bus:
|
|
921
|
+
return False
|
|
922
|
+
try:
|
|
923
|
+
# Actually test the connection by reading a property
|
|
924
|
+
capture_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Capture")
|
|
925
|
+
props = dbus.Interface(capture_obj, "org.freedesktop.DBus.Properties")
|
|
926
|
+
props.Get("org.kde.kstars.Ekos.Capture", "status")
|
|
927
|
+
return True
|
|
928
|
+
except (dbus.DBusException, Exception):
|
|
929
|
+
return False
|
|
153
930
|
|
|
154
931
|
def list_devices(self) -> list[str]:
|
|
155
932
|
raise NotImplementedError
|
|
@@ -158,10 +935,78 @@ class KStarsDBusAdapter(AbstractAstroHardwareAdapter):
|
|
|
158
935
|
raise NotImplementedError
|
|
159
936
|
|
|
160
937
|
def get_telescope_direction(self) -> tuple[float, float]:
|
|
161
|
-
|
|
938
|
+
"""
|
|
939
|
+
Get the current telescope pointing direction.
|
|
940
|
+
|
|
941
|
+
Returns:
|
|
942
|
+
tuple[float, float]: Current (RA, Dec) in degrees
|
|
943
|
+
|
|
944
|
+
Raises:
|
|
945
|
+
RuntimeError: If mount is not connected or position query fails
|
|
946
|
+
"""
|
|
947
|
+
if not self.mount:
|
|
948
|
+
raise RuntimeError("Mount interface not connected. Call connect() first.")
|
|
949
|
+
|
|
950
|
+
assert self.bus is not None
|
|
951
|
+
|
|
952
|
+
try:
|
|
953
|
+
# Get the mount object for property access
|
|
954
|
+
mount_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Mount")
|
|
955
|
+
props = dbus.Interface(mount_obj, "org.freedesktop.DBus.Properties")
|
|
956
|
+
|
|
957
|
+
# Get equatorial coordinates property (returns list [RA in hours, Dec in degrees])
|
|
958
|
+
coords = props.Get("org.kde.kstars.Ekos.Mount", "equatorialCoords")
|
|
959
|
+
|
|
960
|
+
if not coords or len(coords) < 2:
|
|
961
|
+
raise RuntimeError("Failed to retrieve valid coordinates from mount")
|
|
962
|
+
|
|
963
|
+
# coords[0] is RA in hours, coords[1] is Dec in degrees
|
|
964
|
+
ra_hours = float(coords[0])
|
|
965
|
+
dec_deg = float(coords[1])
|
|
966
|
+
|
|
967
|
+
# Convert RA from hours to degrees
|
|
968
|
+
ra_deg = ra_hours * 15.0
|
|
969
|
+
|
|
970
|
+
self.logger.debug(f"Current telescope position: RA={ra_deg:.4f}° ({ra_hours:.4f}h), Dec={dec_deg:.4f}°")
|
|
971
|
+
|
|
972
|
+
return (ra_deg, dec_deg)
|
|
973
|
+
|
|
974
|
+
except Exception as e:
|
|
975
|
+
self.logger.error(f"Failed to get telescope position: {e}")
|
|
976
|
+
raise RuntimeError(f"Failed to get telescope position: {e}")
|
|
162
977
|
|
|
163
978
|
def telescope_is_moving(self) -> bool:
|
|
164
|
-
|
|
979
|
+
"""
|
|
980
|
+
Check if the telescope is currently slewing.
|
|
981
|
+
|
|
982
|
+
Returns:
|
|
983
|
+
bool: True if telescope is slewing, False if idle or tracking
|
|
984
|
+
|
|
985
|
+
Raises:
|
|
986
|
+
RuntimeError: If mount is not connected or status query fails
|
|
987
|
+
"""
|
|
988
|
+
if not self.mount:
|
|
989
|
+
raise RuntimeError("Mount interface not connected. Call connect() first.")
|
|
990
|
+
|
|
991
|
+
assert self.bus is not None
|
|
992
|
+
|
|
993
|
+
try:
|
|
994
|
+
# Get the mount object for property access
|
|
995
|
+
mount_obj = self.bus.get_object(self.bus_name, "/KStars/Ekos/Mount")
|
|
996
|
+
props = dbus.Interface(mount_obj, "org.freedesktop.DBus.Properties")
|
|
997
|
+
|
|
998
|
+
# Get slewStatus property (0 = idle, non-zero = slewing)
|
|
999
|
+
slew_status = props.Get("org.kde.kstars.Ekos.Mount", "slewStatus")
|
|
1000
|
+
|
|
1001
|
+
is_slewing = int(slew_status) != 0
|
|
1002
|
+
|
|
1003
|
+
self.logger.debug(f"Mount slew status: {slew_status} (is_slewing={is_slewing})")
|
|
1004
|
+
|
|
1005
|
+
return is_slewing
|
|
1006
|
+
|
|
1007
|
+
except Exception as e:
|
|
1008
|
+
self.logger.error(f"Failed to get telescope slew status: {e}")
|
|
1009
|
+
raise RuntimeError(f"Failed to get telescope slew status: {e}")
|
|
165
1010
|
|
|
166
1011
|
def select_camera(self, device_name: str) -> bool:
|
|
167
1012
|
raise NotImplementedError
|