lsst-ctrl-bps-htcondor 29.2025.2100__tar.gz → 29.2025.2200__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {lsst_ctrl_bps_htcondor-29.2025.2100/python/lsst_ctrl_bps_htcondor.egg-info → lsst_ctrl_bps_htcondor-29.2025.2200}/PKG-INFO +1 -1
  2. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/doc/lsst.ctrl.bps.htcondor/userguide.rst +20 -2
  3. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/htcondor_service.py +125 -4
  4. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/lssthtc.py +1 -0
  5. lsst_ctrl_bps_htcondor-29.2025.2200/python/lsst/ctrl/bps/htcondor/version.py +2 -0
  6. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200/python/lsst_ctrl_bps_htcondor.egg-info}/PKG-INFO +1 -1
  7. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/tests/test_htcondor_service.py +129 -1
  8. lsst_ctrl_bps_htcondor-29.2025.2100/python/lsst/ctrl/bps/htcondor/version.py +0 -2
  9. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/COPYRIGHT +0 -0
  10. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/LICENSE +0 -0
  11. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/MANIFEST.in +0 -0
  12. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/README.rst +0 -0
  13. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/bsd_license.txt +0 -0
  14. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/doc/lsst.ctrl.bps.htcondor/CHANGES.rst +0 -0
  15. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/doc/lsst.ctrl.bps.htcondor/index.rst +0 -0
  16. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/gpl-v3.0.txt +0 -0
  17. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/pyproject.toml +0 -0
  18. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/__init__.py +0 -0
  19. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/etc/__init__.py +0 -0
  20. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/etc/htcondor_defaults.yaml +0 -0
  21. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/final_post.sh +0 -0
  22. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/handlers.py +0 -0
  23. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/htcondor_config.py +0 -0
  24. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst/ctrl/bps/htcondor/provisioner.py +0 -0
  25. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst_ctrl_bps_htcondor.egg-info/SOURCES.txt +0 -0
  26. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst_ctrl_bps_htcondor.egg-info/dependency_links.txt +0 -0
  27. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst_ctrl_bps_htcondor.egg-info/requires.txt +0 -0
  28. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst_ctrl_bps_htcondor.egg-info/top_level.txt +0 -0
  29. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/python/lsst_ctrl_bps_htcondor.egg-info/zip-safe +0 -0
  30. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/setup.cfg +0 -0
  31. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/tests/test_handlers.py +0 -0
  32. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/tests/test_lssthtc.py +0 -0
  33. {lsst_ctrl_bps_htcondor-29.2025.2100 → lsst_ctrl_bps_htcondor-29.2025.2200}/tests/test_provisioner.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-ctrl-bps-htcondor
3
- Version: 29.2025.2100
3
+ Version: 29.2025.2200
4
4
  Summary: HTCondor plugin for lsst-ctrl-bps.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License
@@ -148,11 +148,28 @@ See `bps submit`_.
148
148
 
149
149
  .. Describe any plugin specific aspects of a submission below if any.
150
150
 
151
- .. _htc-plugin-report:
151
+
152
+ .. _htc-plugin-status:
152
153
 
153
154
  Checking status
154
155
  ---------------
155
156
 
157
+ See `bps status`_.
158
+
159
+ The plugin can take either the HTCondor ID (as shown in ``bps report`` or
160
+ ``condor_q``) or the submit path.
161
+
162
+ For not-completed workflows, the speed of using the ID can depend on
163
+ whether on the same submit machine (i.e., local schedd) or not and how
164
+ busy the schedd machines are. For completed workflows, using the ID
165
+ may not work if the HTCondor logs have rolled over between the time of
166
+ completion and time of the status command.
167
+
168
+ .. _htc-plugin-report:
169
+
170
+ Printing a report
171
+ -----------------
172
+
156
173
  See `bps report`_.
157
174
 
158
175
  .. Describe any plugin specific aspects of checking a submission status below
@@ -555,7 +572,8 @@ complete your run.
555
572
  .. _HTCondor: https://htcondor.readthedocs.io/en/latest/
556
573
  .. _Slurm: https://slurm.schedmd.com/overview.html
557
574
  .. _bps cancel: https://pipelines.lsst.io/v/weekly/modules/lsst.ctrl.bps/quickstart.html#canceling-submitted-jobs
558
- .. _bps report: https://pipelines.lsst.io/v/weekly/modules/lsst.ctrl.bps/quickstart.html#checking-status
575
+ .. _bps status: https://pipelines.lsst.io/v/weekly/modules/lsst.ctrl.bps/quickstart.html#checking-status
576
+ .. _bps report: https://pipelines.lsst.io/v/weekly/modules/lsst.ctrl.bps/quickstart.html#printing-a-report
559
577
  .. _bps restart: https://pipelines.lsst.io/v/weekly/modules/lsst.ctrl.bps/quickstart.html#restarting-a-failed-run
560
578
  .. _bps submit: https://pipelines.lsst.io/v/weekly/modules/lsst.ctrl.bps/quickstart.html#submitting-a-run
561
579
  .. _ctrl_bps: https://github.com/lsst/ctrl_bps
@@ -401,6 +401,54 @@ class HTCondorService(BaseWmsService):
401
401
  _LOG.debug("job_ids = %s", job_ids)
402
402
  return job_ids
403
403
 
404
+ def get_status(
405
+ self,
406
+ wms_workflow_id: str,
407
+ hist: float = 1,
408
+ is_global: bool = False,
409
+ ) -> tuple[WmsStates, str]:
410
+ """Return status of run based upon given constraints.
411
+
412
+ Parameters
413
+ ----------
414
+ wms_workflow_id : `str`
415
+ Limit to specific run based on id (queue id or path).
416
+ hist : `float`, optional
417
+ Limit history search to this many days. Defaults to 1.
418
+ is_global : `bool`, optional
419
+ If set, all job queues (and their histories) will be queried for
420
+ job information. Defaults to False which means that only the local
421
+ job queue will be queried.
422
+
423
+ Returns
424
+ -------
425
+ state : `lsst.ctrl.bps.WmsStates`
426
+ Status of single run from given information.
427
+ message : `str`
428
+ Extra message for status command to print. This could be pointers
429
+ to documentation or to WMS specific commands.
430
+ """
431
+ _LOG.debug("get_status: id=%s, hist=%s, is_global=%s", wms_workflow_id, hist, is_global)
432
+
433
+ id_type = _wms_id_type(wms_workflow_id)
434
+ _LOG.debug("id_type = %s", id_type.name)
435
+
436
+ if id_type == WmsIdType.LOCAL:
437
+ schedulers = _locate_schedds(locate_all=is_global)
438
+ _LOG.debug("schedulers = %s", schedulers)
439
+ state, message = _get_status_from_id(wms_workflow_id, hist, schedds=schedulers)
440
+ elif id_type == WmsIdType.GLOBAL:
441
+ schedulers = _locate_schedds(locate_all=True)
442
+ _LOG.debug("schedulers = %s", schedulers)
443
+ state, message = _get_status_from_id(wms_workflow_id, hist, schedds=schedulers)
444
+ elif id_type == WmsIdType.PATH:
445
+ state, message = _get_status_from_path(wms_workflow_id)
446
+ else:
447
+ state, message = WmsStates.UNKNOWN, "Invalid job id"
448
+ _LOG.debug("state: %s, %s", state, message)
449
+
450
+ return state, message
451
+
404
452
  def report(
405
453
  self,
406
454
  wms_workflow_id=None,
@@ -1014,6 +1062,77 @@ def _handle_job_inputs(generic_workflow: GenericWorkflow, job_name: str, use_sha
1014
1062
  return htc_commands
1015
1063
 
1016
1064
 
1065
+ def _get_status_from_id(
1066
+ wms_workflow_id: str, hist: float, schedds: dict[str, htcondor.Schedd]
1067
+ ) -> tuple[WmsStates, str]:
1068
+ """Gather run information using workflow id.
1069
+
1070
+ Parameters
1071
+ ----------
1072
+ wms_workflow_id : `str`
1073
+ Limit to specific run based on id.
1074
+ hist : `float`
1075
+ Limit history search to this many days.
1076
+ schedds : `dict` [ `str`, `htcondor.Schedd` ]
1077
+ HTCondor schedulers which to query for job information. If empty
1078
+ dictionary, all queries will be run against the local scheduler only.
1079
+
1080
+ Returns
1081
+ -------
1082
+ state : `lsst.ctrl.bps.WmsStates`
1083
+ Status for the corresponding run.
1084
+ message : `str`
1085
+ Message with extra error information.
1086
+ """
1087
+ _LOG.debug("_get_status_from_id: id=%s, hist=%s, schedds=%s", wms_workflow_id, hist, schedds)
1088
+
1089
+ message = ""
1090
+
1091
+ # Collect information about the job by querying HTCondor schedd and
1092
+ # HTCondor history.
1093
+ schedd_dag_info = _get_info_from_schedd(wms_workflow_id, hist, schedds)
1094
+ if len(schedd_dag_info) == 1:
1095
+ schedd_name = next(iter(schedd_dag_info))
1096
+ dag_id = next(iter(schedd_dag_info[schedd_name]))
1097
+ dag_ad = schedd_dag_info[schedd_name][dag_id]
1098
+ state = _htc_status_to_wms_state(dag_ad)
1099
+ else:
1100
+ state = WmsStates.UNKNOWN
1101
+ message = f"DAGMan job {wms_workflow_id} not found in queue or history. Check id or try path."
1102
+ return state, message
1103
+
1104
+
1105
+ def _get_status_from_path(wms_path: str | os.PathLike) -> tuple[WmsStates, str]:
1106
+ """Gather run status from a given run directory.
1107
+
1108
+ Parameters
1109
+ ----------
1110
+ wms_path : `str` | `os.PathLike`
1111
+ The directory containing the submit side files (e.g., HTCondor files).
1112
+
1113
+ Returns
1114
+ -------
1115
+ state : `lsst.ctrl.bps.WmsStates`
1116
+ Status for the run.
1117
+ message : `str`
1118
+ Message to be printed.
1119
+ """
1120
+ wms_path = Path(wms_path).resolve()
1121
+ message = ""
1122
+ try:
1123
+ wms_workflow_id, dag_ad = read_dag_log(wms_path)
1124
+ except FileNotFoundError:
1125
+ wms_workflow_id = MISSING_ID
1126
+ message = f"DAGMan log not found in {wms_path}. Check path."
1127
+
1128
+ if wms_workflow_id == MISSING_ID:
1129
+ state = WmsStates.UNKNOWN
1130
+ else:
1131
+ state = _htc_status_to_wms_state(dag_ad[wms_workflow_id])
1132
+
1133
+ return state, message
1134
+
1135
+
1017
1136
  def _report_from_path(wms_path):
1018
1137
  """Gather run information from a given run directory.
1019
1138
 
@@ -1139,11 +1258,11 @@ def _get_info_from_schedd(
1139
1258
  ----------
1140
1259
  wms_workflow_id : `str`
1141
1260
  Limit to specific run based on id.
1142
- hist : `int`
1261
+ hist : `float`
1143
1262
  Limit history search to this many days.
1144
- schedds : `dict` [ `str`, `htcondor.Schedd` ], optional
1145
- HTCondor schedulers which to query for job information. If None
1146
- (default), all queries will be run against the local scheduler only.
1263
+ schedds : `dict` [ `str`, `htcondor.Schedd` ]
1264
+ HTCondor schedulers which to query for job information. If empty
1265
+ dictionary, all queries will be run against the local scheduler only.
1147
1266
 
1148
1267
  Returns
1149
1268
  -------
@@ -1152,6 +1271,8 @@ def _get_info_from_schedd(
1152
1271
  Scheduler, local HTCondor job ids are mapped to their respective
1153
1272
  classads.
1154
1273
  """
1274
+ _LOG.debug("_get_info_from_schedd: id=%s, hist=%s, schedds=%s", wms_workflow_id, hist, schedds)
1275
+
1155
1276
  dag_constraint = 'regexp("dagman$", Cmd)'
1156
1277
  try:
1157
1278
  cluster_id = int(float(wms_workflow_id))
@@ -1376,6 +1376,7 @@ def condor_search(constraint=None, hist=None, schedds=None):
1376
1376
 
1377
1377
  job_info = condor_q(constraint=constraint, schedds=schedds)
1378
1378
  if hist is not None:
1379
+ _LOG.debug("Searching history going back %s days", hist)
1379
1380
  epoch = (datetime.now() - timedelta(days=hist)).timestamp()
1380
1381
  constraint += f" && (CompletionDate >= {epoch} || JobFinishedHookDone >= {epoch})"
1381
1382
  hist_info = condor_history(constraint, schedds=schedds)
@@ -0,0 +1,2 @@
1
+ __all__ = ["__version__"]
2
+ __version__ = "29.2025.2200"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-ctrl-bps-htcondor
3
- Version: 29.2025.2100
3
+ Version: 29.2025.2200
4
4
  Summary: HTCondor plugin for lsst-ctrl-bps.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License
@@ -94,8 +94,8 @@ class HTCondorServiceTestCase(unittest.TestCase):
94
94
  self.assertEqual(self.service.defaults_uri, HTC_DEFAULTS_URI)
95
95
  self.assertFalse(self.service.defaults_uri.isdir())
96
96
 
97
- @unittest.mock.patch.object(htcondor.Collector, "locate", return_value=LOCATE_SUCCESS)
98
97
  @unittest.mock.patch.object(htcondor.SecMan, "ping", return_value=PING_SUCCESS)
98
+ @unittest.mock.patch.object(htcondor.Collector, "locate", return_value=LOCATE_SUCCESS)
99
99
  def testPingSuccess(self, mock_locate, mock_ping):
100
100
  status, message = self.service.ping(None)
101
101
  self.assertEqual(status, 0)
@@ -116,6 +116,71 @@ class HTCondorServiceTestCase(unittest.TestCase):
116
116
  self.assertEqual(status, 1)
117
117
  self.assertEqual(message, "Permission problem with Schedd service.")
118
118
 
119
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._get_status_from_id")
120
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._locate_schedds")
121
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._wms_id_type")
122
+ def testGetStatusLocal(self, mock_type, mock_locate, mock_status):
123
+ mock_type.return_value = htcondor_service.WmsIdType.LOCAL
124
+ mock_locate.return_value = {}
125
+ mock_status.return_value = (WmsStates.RUNNING, "")
126
+
127
+ fake_id = "100"
128
+ state, message = self.service.get_status(fake_id)
129
+
130
+ mock_type.assert_called_once_with(fake_id)
131
+ mock_locate.assert_called_once_with(locate_all=False)
132
+ mock_status.assert_called_once_with(fake_id, 1, schedds={})
133
+
134
+ self.assertEqual(state, WmsStates.RUNNING)
135
+ self.assertEqual(message, "")
136
+
137
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._get_status_from_id")
138
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._locate_schedds")
139
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._wms_id_type")
140
+ def testGetStatusGlobal(self, mock_type, mock_locate, mock_status):
141
+ mock_type.return_value = htcondor_service.WmsIdType.GLOBAL
142
+ mock_locate.return_value = {}
143
+ fake_message = ""
144
+ mock_status.return_value = (WmsStates.RUNNING, fake_message)
145
+
146
+ fake_id = "100"
147
+ state, message = self.service.get_status(fake_id, 2)
148
+
149
+ mock_type.assert_called_once_with(fake_id)
150
+ mock_locate.assert_called_once_with(locate_all=True)
151
+ mock_status.assert_called_once_with(fake_id, 2, schedds={})
152
+
153
+ self.assertEqual(state, WmsStates.RUNNING)
154
+ self.assertEqual(message, fake_message)
155
+
156
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._get_status_from_path")
157
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._wms_id_type")
158
+ def testGetStatusPath(self, mock_type, mock_status):
159
+ fake_message = "fake message"
160
+ mock_type.return_value = htcondor_service.WmsIdType.PATH
161
+ mock_status.return_value = (WmsStates.FAILED, fake_message)
162
+
163
+ fake_id = "/fake/path"
164
+ state, message = self.service.get_status(fake_id)
165
+
166
+ mock_type.assert_called_once_with(fake_id)
167
+ mock_status.assert_called_once_with(fake_id)
168
+
169
+ self.assertEqual(state, WmsStates.FAILED)
170
+ self.assertEqual(message, fake_message)
171
+
172
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._wms_id_type")
173
+ def testGetStatusUnknownType(self, mock_type):
174
+ mock_type.return_value = htcondor_service.WmsIdType.UNKNOWN
175
+
176
+ fake_id = "100.0"
177
+ state, message = self.service.get_status(fake_id)
178
+
179
+ mock_type.assert_called_once_with(fake_id)
180
+
181
+ self.assertEqual(state, WmsStates.UNKNOWN)
182
+ self.assertEqual(message, "Invalid job id")
183
+
119
184
 
120
185
  class GetExitCodeSummaryTestCase(unittest.TestCase):
121
186
  """Test the function responsible for creating exit code summary."""
@@ -1290,5 +1355,68 @@ class CreatePeriodicRemoveExprTestCase(unittest.TestCase):
1290
1355
  self.assertEqual(results, truth)
1291
1356
 
1292
1357
 
1358
+ class GetStatusFromIdTestCase(unittest.TestCase):
1359
+ """Test _get_status_from_id function."""
1360
+
1361
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._get_info_from_schedd")
1362
+ def testNotFound(self, mock_get):
1363
+ mock_get.return_value = {}
1364
+
1365
+ state, message = htcondor_service._get_status_from_id("100", 0, {})
1366
+
1367
+ mock_get.assert_called_once_with("100", 0, {})
1368
+
1369
+ self.assertEqual(state, WmsStates.UNKNOWN)
1370
+ self.assertEqual(message, "DAGMan job 100 not found in queue or history. Check id or try path.")
1371
+
1372
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._htc_status_to_wms_state")
1373
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._get_info_from_schedd")
1374
+ def testFound(self, mock_get, mock_conversion):
1375
+ fake_id = "100.0"
1376
+ dag_ads = {fake_id: {"JobStatus": lssthtc.JobStatus.RUNNING}}
1377
+ mock_get.return_value = {"schedd1": dag_ads}
1378
+ mock_conversion.return_value = WmsStates.RUNNING
1379
+
1380
+ state, message = htcondor_service._get_status_from_id(fake_id, 0, {})
1381
+
1382
+ mock_get.assert_called_once_with(fake_id, 0, {})
1383
+ mock_conversion.assert_called_once_with(dag_ads[fake_id])
1384
+
1385
+ self.assertEqual(state, WmsStates.RUNNING)
1386
+ self.assertEqual(message, "")
1387
+
1388
+
1389
+ class GetStatusFromPathTestCase(unittest.TestCase):
1390
+ """Test _get_status_from_path function."""
1391
+
1392
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service.read_dag_log")
1393
+ def testNoDagLog(self, mock_read):
1394
+ mock_read.side_effect = FileNotFoundError
1395
+
1396
+ fake_path = "/fake/path"
1397
+ state, message = htcondor_service._get_status_from_path(fake_path)
1398
+
1399
+ mock_read.assert_called_once_with(Path(fake_path))
1400
+
1401
+ self.assertEqual(state, WmsStates.UNKNOWN)
1402
+ self.assertEqual(message, f"DAGMan log not found in {fake_path}. Check path.")
1403
+
1404
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service._htc_status_to_wms_state")
1405
+ @unittest.mock.patch("lsst.ctrl.bps.htcondor.htcondor_service.read_dag_log")
1406
+ def testSuccess(self, mock_read, mock_conversion):
1407
+ dag_ads = {"100.0": {"JobStatus": lssthtc.JobStatus.COMPLETED, "ExitBySignal": False, "ExitCode": 0}}
1408
+ mock_read.return_value = ("100.0", dag_ads)
1409
+ mock_conversion.return_value = WmsStates.SUCCEEDED
1410
+
1411
+ fake_path = "/fake/path"
1412
+ state, message = htcondor_service._get_status_from_path(fake_path)
1413
+
1414
+ mock_read.assert_called_once_with(Path(fake_path))
1415
+ mock_conversion.assert_called_once_with(dag_ads["100.0"])
1416
+
1417
+ self.assertEqual(state, WmsStates.SUCCEEDED)
1418
+ self.assertEqual(message, "")
1419
+
1420
+
1293
1421
  if __name__ == "__main__":
1294
1422
  unittest.main()
@@ -1,2 +0,0 @@
1
- __all__ = ["__version__"]
2
- __version__ = "29.2025.2100"