lsst-ctrl-bps-htcondor 28.2025.800__py3-none-any.whl → 29.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lsst/ctrl/bps/htcondor/htcondor_service.py +81 -32
- lsst/ctrl/bps/htcondor/lssthtc.py +54 -45
- lsst/ctrl/bps/htcondor/version.py +1 -1
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info}/METADATA +3 -2
- lsst_ctrl_bps_htcondor-29.0.0.dist-info/RECORD +19 -0
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info}/WHEEL +1 -1
- lsst_ctrl_bps_htcondor-28.2025.800.dist-info/RECORD +0 -19
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses}/COPYRIGHT +0 -0
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses}/LICENSE +0 -0
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses}/bsd_license.txt +0 -0
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses}/gpl-v3.0.txt +0 -0
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info}/top_level.txt +0 -0
- {lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info}/zip-safe +0 -0
|
@@ -79,7 +79,7 @@ from .lssthtc import (
|
|
|
79
79
|
read_dag_log,
|
|
80
80
|
read_dag_status,
|
|
81
81
|
read_node_status,
|
|
82
|
-
|
|
82
|
+
summarize_dag,
|
|
83
83
|
write_dag_info,
|
|
84
84
|
)
|
|
85
85
|
from .provisioner import Provisioner
|
|
@@ -154,7 +154,7 @@ class HTCondorService(BaseWmsService):
|
|
|
154
154
|
if enable_provisioning:
|
|
155
155
|
provisioner = Provisioner(config)
|
|
156
156
|
provisioner.configure()
|
|
157
|
-
provisioner.prepare("
|
|
157
|
+
provisioner.prepare("provisioningJob.bash", prefix=out_prefix)
|
|
158
158
|
provisioner.provision(workflow.dag)
|
|
159
159
|
|
|
160
160
|
with time_this(
|
|
@@ -1317,9 +1317,9 @@ def _create_detailed_report_from_jobs(
|
|
|
1317
1317
|
job_state_counts=dag_ad.get("state_counts", state_counts),
|
|
1318
1318
|
exit_code_summary=_get_exit_code_summary(jobs),
|
|
1319
1319
|
)
|
|
1320
|
-
|
|
1320
|
+
specific_info = WmsSpecificInfo()
|
|
1321
1321
|
for job_id, job_ad in jobs.items():
|
|
1322
|
-
if not is_service_job(
|
|
1322
|
+
if not is_service_job(job_ad):
|
|
1323
1323
|
try:
|
|
1324
1324
|
job_report = WmsJobReport(
|
|
1325
1325
|
wms_id=job_id,
|
|
@@ -1334,33 +1334,85 @@ def _create_detailed_report_from_jobs(
|
|
|
1334
1334
|
_LOG.error("Job missing key '%s': %s", str(ex), job_ad)
|
|
1335
1335
|
raise
|
|
1336
1336
|
else:
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
)
|
|
1350
|
-
else:
|
|
1351
|
-
_LOG.warning(
|
|
1352
|
-
"Service job with id '%s' (label '%s'): no handler, no action taken", job_id, job_label
|
|
1353
|
-
)
|
|
1337
|
+
_LOG.debug(
|
|
1338
|
+
"Found service job: id='%s', name='%s', label='%s', NodeStatus='%s', JobStatus='%s'",
|
|
1339
|
+
job_id,
|
|
1340
|
+
job_ad["DAGNodeName"],
|
|
1341
|
+
job_ad.get("bps_job_label", "MISS"),
|
|
1342
|
+
job_ad.get("NodeStatus", "MISS"),
|
|
1343
|
+
job_ad.get("JobStatus", "MISS"),
|
|
1344
|
+
)
|
|
1345
|
+
_add_service_job_specific_info(job_ad, specific_info)
|
|
1346
|
+
|
|
1347
|
+
if specific_info:
|
|
1348
|
+
report.specific_info = specific_info
|
|
1354
1349
|
|
|
1355
1350
|
# Add the removed entry to restore the original content of the dictionary.
|
|
1356
1351
|
# The ordering of keys will be change permanently though.
|
|
1357
1352
|
jobs.update({wms_workflow_id: dag_ad})
|
|
1358
1353
|
|
|
1354
|
+
# Workflow will exit with non-zero DAG_STATUS if problem with
|
|
1355
|
+
# any of the wms jobs. So change FAILED to SUCCEEDED if all
|
|
1356
|
+
# payload jobs SUCCEEDED.
|
|
1357
|
+
if report.total_number_jobs == report.job_state_counts[WmsStates.SUCCEEDED]:
|
|
1358
|
+
report.state = WmsStates.SUCCEEDED
|
|
1359
|
+
|
|
1359
1360
|
run_reports = {report.wms_id: report}
|
|
1360
1361
|
_LOG.debug("_create_detailed_report: run_reports = %s", run_reports)
|
|
1361
1362
|
return run_reports
|
|
1362
1363
|
|
|
1363
1364
|
|
|
1365
|
+
def _add_service_job_specific_info(job_ad: dict[str, Any], specific_info: WmsSpecificInfo) -> None:
|
|
1366
|
+
"""Generate report information for service job.
|
|
1367
|
+
|
|
1368
|
+
Parameters
|
|
1369
|
+
----------
|
|
1370
|
+
job_ad : `dict` [`str`, `Any`]
|
|
1371
|
+
Provisioning job information.
|
|
1372
|
+
specific_info : `lsst.ctrl.bps.WmsSpecificInfo`
|
|
1373
|
+
Where to add message.
|
|
1374
|
+
"""
|
|
1375
|
+
status_details = ""
|
|
1376
|
+
job_status = _htc_status_to_wms_state(job_ad)
|
|
1377
|
+
|
|
1378
|
+
# Service jobs in queue are deleted when DAG is done.
|
|
1379
|
+
# To get accurate status, need to check other info.
|
|
1380
|
+
if (
|
|
1381
|
+
job_status == WmsStates.DELETED
|
|
1382
|
+
and "Reason" in job_ad
|
|
1383
|
+
and (
|
|
1384
|
+
"Removed by DAGMan" in job_ad["Reason"]
|
|
1385
|
+
or "removed because <OtherJobRemoveRequirements = DAGManJobId =?=" in job_ad["Reason"]
|
|
1386
|
+
or "DAG is exiting and writing rescue file." in job_ad["Reason"]
|
|
1387
|
+
)
|
|
1388
|
+
):
|
|
1389
|
+
if "HoldReason" in job_ad:
|
|
1390
|
+
# HoldReason exists even if released, so check.
|
|
1391
|
+
if "job_released_time" in job_ad and job_ad["job_held_time"] < job_ad["job_released_time"]:
|
|
1392
|
+
# If released, assume running until deleted.
|
|
1393
|
+
job_status = WmsStates.SUCCEEDED
|
|
1394
|
+
status_details = ""
|
|
1395
|
+
else:
|
|
1396
|
+
# If job held when deleted by DAGMan, still want to
|
|
1397
|
+
# report hold reason
|
|
1398
|
+
status_details = f"(Job was held for the following reason: {job_ad['HoldReason']})"
|
|
1399
|
+
|
|
1400
|
+
else:
|
|
1401
|
+
job_status = WmsStates.SUCCEEDED
|
|
1402
|
+
elif job_status == WmsStates.SUCCEEDED:
|
|
1403
|
+
status_details = "(Note: Finished before workflow.)"
|
|
1404
|
+
elif job_status == WmsStates.HELD:
|
|
1405
|
+
status_details = f"({job_ad['HoldReason']})"
|
|
1406
|
+
|
|
1407
|
+
template = "Status of {job_name}: {status} {status_details}"
|
|
1408
|
+
context = {
|
|
1409
|
+
"job_name": job_ad["DAGNodeName"],
|
|
1410
|
+
"status": job_status.name,
|
|
1411
|
+
"status_details": status_details,
|
|
1412
|
+
}
|
|
1413
|
+
specific_info.add_message(template=template, context=context)
|
|
1414
|
+
|
|
1415
|
+
|
|
1364
1416
|
def _summary_report(user, hist, pass_thru, schedds=None):
|
|
1365
1417
|
"""Gather run information to be used in generating summary reports.
|
|
1366
1418
|
|
|
@@ -1509,7 +1561,7 @@ def _get_run_summary(job):
|
|
|
1509
1561
|
"""
|
|
1510
1562
|
summary = job.get("bps_job_summary", job.get("bps_run_summary", None))
|
|
1511
1563
|
if not summary:
|
|
1512
|
-
summary, _ =
|
|
1564
|
+
summary, _, _ = summarize_dag(job["Iwd"])
|
|
1513
1565
|
if not summary:
|
|
1514
1566
|
_LOG.warning("Could not get run summary for htcondor job: %s", job)
|
|
1515
1567
|
_LOG.debug("_get_run_summary: summary=%s", summary)
|
|
@@ -1587,7 +1639,7 @@ def _get_state_counts_from_jobs(
|
|
|
1587
1639
|
"""
|
|
1588
1640
|
state_counts = dict.fromkeys(WmsStates, 0)
|
|
1589
1641
|
for job_id, job_ad in jobs.items():
|
|
1590
|
-
if job_id != wms_workflow_id and not is_service_job(
|
|
1642
|
+
if job_id != wms_workflow_id and not is_service_job(job_ad):
|
|
1591
1643
|
state_counts[_htc_status_to_wms_state(job_ad)] += 1
|
|
1592
1644
|
total_counted = sum(state_counts.values())
|
|
1593
1645
|
|
|
@@ -2143,13 +2195,13 @@ def _gather_site_values(config, compute_site):
|
|
|
2143
2195
|
return site_values
|
|
2144
2196
|
|
|
2145
2197
|
|
|
2146
|
-
def is_service_job(
|
|
2198
|
+
def is_service_job(job_ad: dict[str, Any]) -> bool:
|
|
2147
2199
|
"""Determine if a job is a service one.
|
|
2148
2200
|
|
|
2149
2201
|
Parameters
|
|
2150
2202
|
----------
|
|
2151
|
-
|
|
2152
|
-
HTCondor job
|
|
2203
|
+
job_ad : `dict` [`str`, Any]
|
|
2204
|
+
Information about an HTCondor job.
|
|
2153
2205
|
|
|
2154
2206
|
Returns
|
|
2155
2207
|
-------
|
|
@@ -2159,10 +2211,7 @@ def is_service_job(job_id: str) -> bool:
|
|
|
2159
2211
|
Notes
|
|
2160
2212
|
-----
|
|
2161
2213
|
At the moment, HTCondor does not provide a native way to distinguish
|
|
2162
|
-
between payload and service jobs in the workflow.
|
|
2163
|
-
|
|
2164
|
-
:py:func:`read_node_status()` (service jobs are given ids with ClusterId=0
|
|
2165
|
-
and ProcId=some integer). If it changes, this function needs to be
|
|
2166
|
-
updated too.
|
|
2214
|
+
between payload and service jobs in the workflow. This code depends
|
|
2215
|
+
on read_node_status adding bps_job_type.
|
|
2167
2216
|
"""
|
|
2168
|
-
return
|
|
2217
|
+
return job_ad.get("bps_job_type", "MISSING") == "service"
|
|
@@ -63,7 +63,8 @@ __all__ = [
|
|
|
63
63
|
"read_dag_nodes_log",
|
|
64
64
|
"read_dag_status",
|
|
65
65
|
"read_node_status",
|
|
66
|
-
"
|
|
66
|
+
"summarize_dag",
|
|
67
|
+
"update_job_info",
|
|
67
68
|
"update_job_info",
|
|
68
69
|
"write_dag_info",
|
|
69
70
|
]
|
|
@@ -1245,7 +1246,7 @@ def update_job_info(job_info, other_info):
|
|
|
1245
1246
|
return job_info
|
|
1246
1247
|
|
|
1247
1248
|
|
|
1248
|
-
def
|
|
1249
|
+
def summarize_dag(dir_name: str) -> tuple[str, dict[str, str], dict[str, str]]:
|
|
1249
1250
|
"""Build bps_run_summary string from dag file.
|
|
1250
1251
|
|
|
1251
1252
|
Parameters
|
|
@@ -1256,51 +1257,64 @@ def summary_from_dag(dir_name):
|
|
|
1256
1257
|
Returns
|
|
1257
1258
|
-------
|
|
1258
1259
|
summary : `str`
|
|
1259
|
-
Semi-colon separated list of job labels and counts
|
|
1260
|
+
Semi-colon separated list of job labels and counts
|
|
1260
1261
|
(Same format as saved in dag classad).
|
|
1261
1262
|
job_name_to_label : `dict` [`str`, `str`]
|
|
1262
1263
|
Mapping of job names to job labels.
|
|
1264
|
+
job_name_to_type : `dict` [`str`, `str`]
|
|
1265
|
+
Mapping of job names to job types
|
|
1266
|
+
(e.g., payload, final, service).
|
|
1263
1267
|
"""
|
|
1264
1268
|
# Later code depends upon insertion order
|
|
1265
|
-
counts = defaultdict(int)
|
|
1269
|
+
counts: defaultdict[str, int] = defaultdict(int) # counts of payload jobs per label
|
|
1266
1270
|
job_name_to_label = {}
|
|
1271
|
+
job_name_to_type = {}
|
|
1267
1272
|
try:
|
|
1268
1273
|
dag = next(Path(dir_name).glob("*.dag"))
|
|
1269
1274
|
with open(dag) as fh:
|
|
1270
1275
|
for line in fh:
|
|
1276
|
+
job_name = ""
|
|
1271
1277
|
if line.startswith("JOB"):
|
|
1272
|
-
m = re.match(r'JOB (\S+) "jobs/([^/]+)/', line)
|
|
1278
|
+
m = re.match(r'JOB (\S+) "?jobs/([^/]+)/', line)
|
|
1273
1279
|
if m:
|
|
1280
|
+
job_name = m.group(1)
|
|
1274
1281
|
label = m.group(2)
|
|
1275
1282
|
if label == "init":
|
|
1276
1283
|
label = "pipetaskInit"
|
|
1277
|
-
job_name_to_label[m.group(1)] = label
|
|
1278
1284
|
counts[label] += 1
|
|
1279
1285
|
else: # Check if Pegasus submission
|
|
1280
1286
|
m = re.match(r"JOB (\S+) (\S+)", line)
|
|
1281
1287
|
if m:
|
|
1288
|
+
job_name = m.group(1)
|
|
1282
1289
|
label = pegasus_name_to_label(m.group(1))
|
|
1283
|
-
job_name_to_label[m.group(1)] = label
|
|
1284
1290
|
counts[label] += 1
|
|
1285
1291
|
else:
|
|
1286
1292
|
_LOG.warning("Parse DAG: unmatched job line: %s", line)
|
|
1293
|
+
job_type = "payload"
|
|
1287
1294
|
elif line.startswith("FINAL"):
|
|
1288
1295
|
m = re.match(r"FINAL (\S+) jobs/([^/]+)/", line)
|
|
1289
1296
|
if m:
|
|
1297
|
+
job_name = m.group(1)
|
|
1290
1298
|
label = m.group(2)
|
|
1291
|
-
|
|
1292
|
-
|
|
1299
|
+
counts[label] += 1 # final counts a payload job.
|
|
1300
|
+
job_type = "final"
|
|
1293
1301
|
elif line.startswith("SERVICE"):
|
|
1294
1302
|
m = re.match(r"SERVICE (\S+) jobs/([^/]+)/", line)
|
|
1295
1303
|
if m:
|
|
1304
|
+
job_name = m.group(1)
|
|
1296
1305
|
label = m.group(2)
|
|
1297
|
-
|
|
1306
|
+
job_type = "service"
|
|
1307
|
+
|
|
1308
|
+
if job_name:
|
|
1309
|
+
job_name_to_label[job_name] = label
|
|
1310
|
+
job_name_to_type[job_name] = job_type
|
|
1311
|
+
|
|
1298
1312
|
except (OSError, PermissionError, StopIteration):
|
|
1299
1313
|
pass
|
|
1300
1314
|
|
|
1301
1315
|
summary = ";".join([f"{name}:{counts[name]}" for name in counts])
|
|
1302
|
-
_LOG.debug("
|
|
1303
|
-
return summary, job_name_to_label
|
|
1316
|
+
_LOG.debug("summarize_dag: %s %s %s", summary, job_name_to_label, job_name_to_type)
|
|
1317
|
+
return summary, job_name_to_label, job_name_to_type
|
|
1304
1318
|
|
|
1305
1319
|
|
|
1306
1320
|
def pegasus_name_to_label(name):
|
|
@@ -1400,7 +1414,7 @@ def read_node_status(wms_path):
|
|
|
1400
1414
|
file.
|
|
1401
1415
|
"""
|
|
1402
1416
|
# Get jobid info from other places to fill in gaps in info from node_status
|
|
1403
|
-
_, job_name_to_label =
|
|
1417
|
+
_, job_name_to_label, job_name_to_type = summarize_dag(wms_path)
|
|
1404
1418
|
wms_workflow_id, loginfo = read_dag_log(wms_path)
|
|
1405
1419
|
loginfo = read_dag_nodes_log(wms_path)
|
|
1406
1420
|
_LOG.debug("loginfo = %s", loginfo)
|
|
@@ -1409,17 +1423,17 @@ def read_node_status(wms_path):
|
|
|
1409
1423
|
if "LogNotes" in job_info:
|
|
1410
1424
|
m = re.match(r"DAG Node: (\S+)", job_info["LogNotes"])
|
|
1411
1425
|
if m:
|
|
1412
|
-
|
|
1413
|
-
|
|
1426
|
+
job_name = m.group(1)
|
|
1427
|
+
job_name_to_id[job_name] = job_id
|
|
1428
|
+
job_info["DAGNodeName"] = job_name
|
|
1429
|
+
job_info["bps_job_type"] = job_name_to_type[job_name]
|
|
1430
|
+
job_info["bps_job_label"] = job_name_to_label[job_name]
|
|
1414
1431
|
|
|
1432
|
+
jobs = loginfo
|
|
1433
|
+
fake_id = -1.0 # For nodes that do not yet have a job id, give fake one
|
|
1415
1434
|
try:
|
|
1416
1435
|
node_status = next(Path(wms_path).glob("*.node_status"))
|
|
1417
|
-
except StopIteration:
|
|
1418
|
-
return loginfo
|
|
1419
1436
|
|
|
1420
|
-
jobs = {}
|
|
1421
|
-
fake_id = -1.0 # For nodes that do not yet have a job id, give fake one
|
|
1422
|
-
try:
|
|
1423
1437
|
with open(node_status) as fh:
|
|
1424
1438
|
for ad in classad.parseAds(fh):
|
|
1425
1439
|
match ad["Type"]:
|
|
@@ -1438,22 +1452,19 @@ def read_node_status(wms_path):
|
|
|
1438
1452
|
# Make job info as if came from condor_q.
|
|
1439
1453
|
if job_name in job_name_to_id:
|
|
1440
1454
|
job_id = str(job_name_to_id[job_name])
|
|
1455
|
+
job = jobs[job_id]
|
|
1441
1456
|
else:
|
|
1442
1457
|
job_id = str(fake_id)
|
|
1458
|
+
job_name_to_id[job_name] = job_id
|
|
1459
|
+
job = dict(ad)
|
|
1460
|
+
jobs[job_id] = job
|
|
1443
1461
|
fake_id -= 1
|
|
1444
|
-
job = dict(ad)
|
|
1445
1462
|
job["ClusterId"] = int(float(job_id))
|
|
1446
1463
|
job["DAGManJobID"] = wms_workflow_id
|
|
1447
1464
|
job["DAGNodeName"] = job_name
|
|
1448
1465
|
job["bps_job_label"] = job_label
|
|
1466
|
+
job["bps_job_type"] = job_name_to_type[job_name]
|
|
1449
1467
|
|
|
1450
|
-
# Include information retrieved from the event log
|
|
1451
|
-
# if available.
|
|
1452
|
-
jobs[job_id] = job
|
|
1453
|
-
try:
|
|
1454
|
-
jobs[job_id] |= loginfo[job_id]
|
|
1455
|
-
except KeyError:
|
|
1456
|
-
pass
|
|
1457
1468
|
case "StatusEnd":
|
|
1458
1469
|
# Skip node status file "epilog".
|
|
1459
1470
|
pass
|
|
@@ -1463,24 +1474,22 @@ def read_node_status(wms_path):
|
|
|
1463
1474
|
ad["Type"],
|
|
1464
1475
|
wms_path,
|
|
1465
1476
|
)
|
|
1466
|
-
except (OSError, PermissionError):
|
|
1477
|
+
except (StopIteration, OSError, PermissionError):
|
|
1467
1478
|
pass
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
}
|
|
1481
|
-
|
|
1482
|
-
job_info["bps_job_label"] = job_name_to_label[job_id_to_name[job_id]]
|
|
1483
|
-
jobs[f"{job_info['ProcId']}.{job_info['ClusterId']}"] = job_info
|
|
1479
|
+
|
|
1480
|
+
# Check for missing jobs (e.g., submission failure or not submitted yet)
|
|
1481
|
+
# Use dag info to create job placeholders
|
|
1482
|
+
for name in set(job_name_to_label) - set(job_name_to_id):
|
|
1483
|
+
job = {}
|
|
1484
|
+
job["ClusterId"] = int(float(fake_id))
|
|
1485
|
+
job["ProcId"] = 0
|
|
1486
|
+
job["DAGManJobID"] = wms_workflow_id
|
|
1487
|
+
job["DAGNodeName"] = name
|
|
1488
|
+
job["bps_job_label"] = job_name_to_label[name]
|
|
1489
|
+
job["bps_job_type"] = job_name_to_type[name]
|
|
1490
|
+
job["NodeStatus"] = NodeStatus.NOT_READY
|
|
1491
|
+
jobs[f"{job['ClusterId']}.{job['ProcId']}"] = job
|
|
1492
|
+
fake_id -= 1
|
|
1484
1493
|
|
|
1485
1494
|
return jobs
|
|
1486
1495
|
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
__all__ = ["__version__"]
|
|
2
|
-
__version__ = "
|
|
2
|
+
__version__ = "29.0.0"
|
{lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: lsst-ctrl-bps-htcondor
|
|
3
|
-
Version:
|
|
3
|
+
Version: 29.0.0
|
|
4
4
|
Summary: HTCondor plugin for lsst-ctrl-bps.
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License: BSD 3-Clause License
|
|
@@ -26,6 +26,7 @@ Requires-Dist: lsst-utils
|
|
|
26
26
|
Provides-Extra: test
|
|
27
27
|
Requires-Dist: pytest>=3.2; extra == "test"
|
|
28
28
|
Requires-Dist: pytest-openfiles>=0.5.0; extra == "test"
|
|
29
|
+
Dynamic: license-file
|
|
29
30
|
|
|
30
31
|
#################
|
|
31
32
|
ctrl_bps_htcondor
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
lsst/ctrl/bps/htcondor/__init__.py,sha256=1gTmOVLJILvBqgqHVECo8uqoX8e4fiTeH_dHBUXgDvY,1417
|
|
2
|
+
lsst/ctrl/bps/htcondor/final_post.sh,sha256=chfaQV6Q7rGsK-8Hx58ch52m-PofvBanrl7VwCssHec,248
|
|
3
|
+
lsst/ctrl/bps/htcondor/handlers.py,sha256=2gM3Ac00in4ob9ckcP331W1LSEjs9UDKIqt4MULA4bg,11196
|
|
4
|
+
lsst/ctrl/bps/htcondor/htcondor_config.py,sha256=c4lCiYEwEXFdxgbMfEkbDm4LrvkRMF31SqLtQqzqIV4,1523
|
|
5
|
+
lsst/ctrl/bps/htcondor/htcondor_service.py,sha256=nWStzzQVO-Th_t8AEbrUIMN8qyKGdfYrKGjdWS8r66A,82060
|
|
6
|
+
lsst/ctrl/bps/htcondor/lssthtc.py,sha256=Um7xycgLLJO9EAZiTHBpFQ9xPTpWQzLf1rZaUvbhGdU,57273
|
|
7
|
+
lsst/ctrl/bps/htcondor/provisioner.py,sha256=hPN8YJUtwNHQylw68kfskF1S2vCeQvztF8W0d_QKqqM,7851
|
|
8
|
+
lsst/ctrl/bps/htcondor/version.py,sha256=Z2Nlq0m6lqeQ0k5S9XG3GcTp7ULAPdHm54gvGnA6B_E,49
|
|
9
|
+
lsst/ctrl/bps/htcondor/etc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
lsst/ctrl/bps/htcondor/etc/htcondor_defaults.yaml,sha256=xDRts4vHKov2PE_JRh-0nF3jfuNJXtKBXZqveASp_iA,1422
|
|
11
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses/COPYRIGHT,sha256=Lc6NoAEFQ65v_SmtS9NwfHTOuSUtC2Umbjv5zyowiQM,61
|
|
12
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
|
|
13
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
|
|
14
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
15
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/METADATA,sha256=woJj-ONC7RoaEZxnjk5rb17OzFjb7RGa8fnx9TFemcQ,2133
|
|
16
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
17
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
|
|
18
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
19
|
+
lsst_ctrl_bps_htcondor-29.0.0.dist-info/RECORD,,
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
lsst/ctrl/bps/htcondor/__init__.py,sha256=1gTmOVLJILvBqgqHVECo8uqoX8e4fiTeH_dHBUXgDvY,1417
|
|
2
|
-
lsst/ctrl/bps/htcondor/final_post.sh,sha256=chfaQV6Q7rGsK-8Hx58ch52m-PofvBanrl7VwCssHec,248
|
|
3
|
-
lsst/ctrl/bps/htcondor/handlers.py,sha256=2gM3Ac00in4ob9ckcP331W1LSEjs9UDKIqt4MULA4bg,11196
|
|
4
|
-
lsst/ctrl/bps/htcondor/htcondor_config.py,sha256=c4lCiYEwEXFdxgbMfEkbDm4LrvkRMF31SqLtQqzqIV4,1523
|
|
5
|
-
lsst/ctrl/bps/htcondor/htcondor_service.py,sha256=UGRxT4cT5giQbEmQQfU546oEm2dxDXQRr46YajhRqh4,80335
|
|
6
|
-
lsst/ctrl/bps/htcondor/lssthtc.py,sha256=kouYnXE9tiWGNupkNki0kU26r8p_PhFTOT4w9nNCsXs,56978
|
|
7
|
-
lsst/ctrl/bps/htcondor/provisioner.py,sha256=hPN8YJUtwNHQylw68kfskF1S2vCeQvztF8W0d_QKqqM,7851
|
|
8
|
-
lsst/ctrl/bps/htcondor/version.py,sha256=G6JpKNKUlr1JgGmh_qq-VfLkVHObq8Mw0srMbfP1tiM,54
|
|
9
|
-
lsst/ctrl/bps/htcondor/etc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
lsst/ctrl/bps/htcondor/etc/htcondor_defaults.yaml,sha256=xDRts4vHKov2PE_JRh-0nF3jfuNJXtKBXZqveASp_iA,1422
|
|
11
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/COPYRIGHT,sha256=Lc6NoAEFQ65v_SmtS9NwfHTOuSUtC2Umbjv5zyowiQM,61
|
|
12
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
|
|
13
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/METADATA,sha256=7soxDJkTVA_cwW_7J9eHCzaoHIjz4vqMhUULzbD5z54,2116
|
|
14
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
15
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
|
|
16
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
17
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
|
|
18
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
19
|
-
lsst_ctrl_bps_htcondor-28.2025.800.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lsst_ctrl_bps_htcondor-28.2025.800.dist-info → lsst_ctrl_bps_htcondor-29.0.0.dist-info}/zip-safe
RENAMED
|
File without changes
|