lsst-ctrl-bps-panda 29.2025.1900__py3-none-any.whl → 29.2025.2100__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -184,6 +184,8 @@ def replace_event_file(params, files):
184
184
  Example params:
185
185
  isr:eventservice_90^10+somethingelse. This part
186
186
  'isr:eventservice_90^10' is the EventService parameter.
187
+ isr:orderIdMap_10. This part is using order_id map file. But it
188
+ is not EventService.
187
189
  The format for the EventService parameter for LSST is
188
190
  'label:eventservice_<baseid>^<localid>'. The '<localid>' should
189
191
  start from 1, which means the first event of the file
@@ -217,6 +219,7 @@ def replace_event_file(params, files):
217
219
  """
218
220
  ret_status = True
219
221
  with_events = False
222
+ with_order_id_map = False
220
223
  files = files.split("+")
221
224
  file_map = {}
222
225
  for file in files:
@@ -272,8 +275,33 @@ def replace_event_file(params, files):
272
275
  ret_status = False
273
276
  break
274
277
 
275
- params_map[param] = {"event_index": event_index, "order_id_map": order_id_map[label]}
276
- return ret_status, with_events, params_map
278
+ params_map[param] = {"order_id": event_index, "order_id_map": order_id_map[label]}
279
+ elif "orderIdMap_" in param:
280
+ with_order_id_map = True
281
+ label, event = param.split(":")
282
+ order_id = event.split("_")[1]
283
+ if not order_id_map:
284
+ print("orderIdMap is enabled but order_id_map file doesn't exist.")
285
+ ret_status = False
286
+ break
287
+
288
+ if label not in order_id_map:
289
+ print(
290
+ f"orderIdMap is enabled but label {label} doesn't in the keys"
291
+ f" of order_id_map {order_id_map.keys()}"
292
+ )
293
+ ret_status = False
294
+ break
295
+ if order_id not in order_id_map[label]:
296
+ print(
297
+ f"orderIdMap is enabled but order_id {order_id} is not"
298
+ f" in order_id_map[{label}] {order_id_map[label].keys()}"
299
+ )
300
+ ret_status = False
301
+ break
302
+
303
+ params_map[param] = {"order_id": order_id, "order_id_map": order_id_map[label]}
304
+ return ret_status, with_events, with_order_id_map, params_map
277
305
 
278
306
 
279
307
  deliver_input_files(sys.argv[3], sys.argv[4], sys.argv[5])
@@ -281,23 +309,33 @@ cmd_line = str(binascii.unhexlify(sys.argv[1]).decode())
281
309
  data_params = sys.argv[2]
282
310
  cmd_line = replace_environment_vars(cmd_line)
283
311
 
312
+ print(f"cmd_line: {cmd_line}")
313
+ print(f"data_params: {data_params}")
314
+
284
315
  # If EventService is enabled, data_params will only contain event information.
285
316
  # So we need to convert the event information to LSST pseudo file names.
286
317
  # If EventService is not enabled, this part will not change data_params.
287
- ret_event_status, with_events, event_params_map = replace_event_file(data_params, sys.argv[4])
288
- print(f"ret_event_status: {ret_event_status}, with_events: {with_events}")
318
+ ret_rep = replace_event_file(data_params, sys.argv[4])
319
+ ret_event_status, with_events, with_order_id_map, event_params_map = ret_rep
320
+ print(
321
+ f"ret_event_status: {ret_event_status}, with_events: {with_events} with_order_id_map: {with_order_id_map}"
322
+ )
289
323
  if not ret_event_status:
290
- print("failed to map EventService parameters to original LSST pseudo file names")
324
+ print("failed to map EventService/orderIdMap parameters to original LSST pseudo file names")
291
325
  exit_code = 1
292
326
  sys.exit(exit_code)
293
327
 
294
328
  for event_param in event_params_map:
295
- event_index = event_params_map[event_param]["event_index"]
296
- pseudo_file_name = event_params_map[event_param]["order_id_map"][event_index]
297
- print(f"replacing event {event_param} with event_index {event_index} to: {pseudo_file_name}")
329
+ order_id = event_params_map[event_param]["order_id"]
330
+ pseudo_file_name = event_params_map[event_param]["order_id_map"][order_id]
331
+ print(f"replacing event {event_param} with order_id {order_id} to: {pseudo_file_name}")
298
332
  cmd_line = cmd_line.replace(event_param, pseudo_file_name)
299
333
  data_params = data_params.replace(event_param, pseudo_file_name)
300
334
 
335
+ # If job name map is enabled, data_params will only contain order_id
336
+ # information. Here we will convert order_id information to LSST pseudo
337
+ # file names.
338
+
301
339
  data_params = data_params.split("+")
302
340
 
303
341
  """Replace the pipetask command line placeholders
@@ -37,7 +37,14 @@ import re
37
37
 
38
38
  from idds.workflowv2.workflow import Workflow as IDDS_client_workflow
39
39
 
40
- from lsst.ctrl.bps import BaseWmsService, BaseWmsWorkflow, WmsRunReport, WmsStates
40
+ from lsst.ctrl.bps import (
41
+ DEFAULT_MEM_FMT,
42
+ DEFAULT_MEM_UNIT,
43
+ BaseWmsService,
44
+ BaseWmsWorkflow,
45
+ WmsRunReport,
46
+ WmsStates,
47
+ )
41
48
  from lsst.ctrl.bps.panda.constants import PANDA_DEFAULT_MAX_COPY_WORKERS
42
49
  from lsst.ctrl.bps.panda.utils import (
43
50
  add_final_idds_work,
@@ -48,6 +55,7 @@ from lsst.ctrl.bps.panda.utils import (
48
55
  get_idds_result,
49
56
  )
50
57
  from lsst.resources import ResourcePath
58
+ from lsst.utils.timer import time_this
51
59
 
52
60
  _LOG = logging.getLogger(__name__)
53
61
 
@@ -58,10 +66,22 @@ class PanDAService(BaseWmsService):
58
66
  def prepare(self, config, generic_workflow, out_prefix=None):
59
67
  # Docstring inherited from BaseWmsService.prepare.
60
68
  _LOG.debug("out_prefix = '%s'", out_prefix)
61
- workflow = PandaBpsWmsWorkflow.from_generic_workflow(
62
- config, generic_workflow, out_prefix, f"{self.__class__.__module__}.{self.__class__.__name__}"
63
- )
64
- workflow.write(out_prefix)
69
+
70
+ _LOG.info("Starting PanDA prepare stage (creating specific implementation of workflow)")
71
+
72
+ with time_this(
73
+ log=_LOG,
74
+ level=logging.INFO,
75
+ prefix=None,
76
+ msg="PanDA prepare stage completed",
77
+ mem_usage=True,
78
+ mem_unit=DEFAULT_MEM_UNIT,
79
+ mem_fmt=DEFAULT_MEM_FMT,
80
+ ):
81
+ workflow = PandaBpsWmsWorkflow.from_generic_workflow(
82
+ config, generic_workflow, out_prefix, f"{self.__class__.__module__}.{self.__class__.__name__}"
83
+ )
84
+ workflow.write(out_prefix)
65
85
  return workflow
66
86
 
67
87
  def submit(self, workflow, **kwargs):
@@ -222,7 +222,8 @@ def _make_doma_work(
222
222
  task_count,
223
223
  task_chunk,
224
224
  enable_event_service=False,
225
- es_files=None,
225
+ enable_job_name_map=False,
226
+ order_id_map_files=None,
226
227
  es_label=None,
227
228
  max_payloads_per_panda_job=PANDA_DEFAULT_MAX_PAYLOADS_PER_PANDA_JOB,
228
229
  max_wms_job_wall_time=None,
@@ -248,8 +249,8 @@ def _make_doma_work(
248
249
  local_pfns : `dict` [`str`, `str`]
249
250
  Files which need to be copied to a workflow staging area.
250
251
  """
251
- if es_files is None:
252
- es_files = {}
252
+ if order_id_map_files is None:
253
+ order_id_map_files = {}
253
254
  _LOG.debug("Using gwjob %s to create new PanDA task (gwjob=%s)", gwjob.name, gwjob)
254
255
  cvals = {"curr_cluster": gwjob.label}
255
256
  _, site = config.search("computeSite", opt={"curvals": cvals, "required": True})
@@ -338,13 +339,15 @@ def _make_doma_work(
338
339
 
339
340
  maxwalltime = gwjob.request_walltime if gwjob.request_walltime else PANDA_DEFAULT_MAX_WALLTIME
340
341
  if enable_event_service:
341
- for es_name in es_files:
342
- local_pfns[es_name] = es_files[es_name]
343
342
  if gwjob.request_walltime and max_payloads_per_panda_job:
344
343
  maxwalltime = gwjob.request_walltime * max_payloads_per_panda_job
345
344
  elif max_wms_job_wall_time:
346
345
  maxwalltime = max_wms_job_wall_time
347
346
 
347
+ if enable_event_service or enable_job_name_map:
348
+ for es_name in order_id_map_files:
349
+ local_pfns[es_name] = order_id_map_files[es_name]
350
+
348
351
  for gwfile in generic_workflow.get_job_inputs(gwjob.name, transfer_only=True):
349
352
  local_pfns[gwfile.name] = gwfile.src_uri
350
353
  if os.path.isdir(gwfile.src_uri):
@@ -596,6 +599,14 @@ def add_idds_work(config, generic_workflow, idds_workflow):
596
599
  )
597
600
  _LOG.info(my_log)
598
601
 
602
+ # job name map: Use a short job name to map the long job name
603
+ _, enable_job_name_map = config.search("enableJobNameMap", opt={"default": None})
604
+ _LOG.info(f"enable_job_name_map: {enable_job_name_map}, {type(enable_job_name_map)}")
605
+ if enable_event_service and not enable_job_name_map:
606
+ enable_job_name_map = True
607
+ my_log = "enable_event_service is set, set enable_job_name_map True."
608
+ _LOG.info(my_log)
609
+
599
610
  # Limit number of jobs in single PanDA task
600
611
  _, max_jobs_per_task = config.search("maxJobsPerTask", opt={"default": PANDA_DEFAULT_MAX_JOBS_PER_TASK})
601
612
 
@@ -614,14 +625,16 @@ def add_idds_work(config, generic_workflow, idds_workflow):
614
625
  archive_filename = create_archive_file(submit_path, archive_filename, files)
615
626
  remote_archive_filename = copy_files_to_pandacache(archive_filename)
616
627
 
617
- es_files = {}
628
+ order_id_map_files = {}
618
629
  name_works = {}
619
630
  order_id_map = {}
631
+ job_name_to_order_id_map = {}
620
632
  doma_tree = None
621
633
  order_id_map_file = None
622
634
  if enable_event_service:
623
635
  enable_event_service = enable_event_service.split(",")
624
636
  enable_event_service = [i.strip() for i in enable_event_service]
637
+ if enable_job_name_map:
625
638
  doma_tree = DomaTree(name=generic_workflow.name)
626
639
  submit_path = config[".bps_defined.submitPath"]
627
640
  _, order_id_map_filename = config.search(
@@ -629,8 +642,14 @@ def add_idds_work(config, generic_workflow, idds_workflow):
629
642
  )
630
643
  order_id_map_file = os.path.join(submit_path, order_id_map_filename)
631
644
  order_id_map = doma_tree.order_jobs_from_generic_workflow(generic_workflow, order_id_map_file)
632
- es_files = {"orderIdMapFilename": order_id_map_file}
633
- files_to_pre_stage.update(es_files)
645
+ order_id_map_files = {"orderIdMapFilename": order_id_map_file}
646
+ files_to_pre_stage.update(order_id_map_files)
647
+
648
+ # job name to order id map
649
+ job_name_to_order_id_map = {
650
+ label: {job_name: order_id for order_id, job_name in orders.items()}
651
+ for label, orders in order_id_map.items()
652
+ }
634
653
 
635
654
  # To avoid dying due to optimizing number of times through workflow,
636
655
  # catch dependency issues to loop through again later.
@@ -684,7 +703,8 @@ def add_idds_work(config, generic_workflow, idds_workflow):
684
703
  task_count,
685
704
  task_chunk,
686
705
  enable_event_service=work_enable_event_service,
687
- es_files=es_files,
706
+ enable_job_name_map=enable_job_name_map,
707
+ order_id_map_files=order_id_map_files,
688
708
  es_label=job_label,
689
709
  max_payloads_per_panda_job=max_payloads_per_panda_job,
690
710
  max_wms_job_wall_time=max_wms_job_wall_time,
@@ -707,19 +727,35 @@ def add_idds_work(config, generic_workflow, idds_workflow):
707
727
  missing_deps = True
708
728
  break
709
729
  else:
730
+ if enable_job_name_map:
731
+ parent_job = generic_workflow.get_job(parent_job_name)
732
+ parent_job_label = parent_job.label
733
+ parent_order_id = job_name_to_order_id_map[parent_job_label][parent_job_name]
734
+ inputname = f"{parent_job_label}:orderIdMap_{parent_order_id}"
735
+ else:
736
+ inputname = job_to_pseudo_filename[parent_job_name]
737
+
710
738
  deps.append(
711
739
  {
712
740
  "task": job_to_task[parent_job_name],
713
- "inputname": job_to_pseudo_filename[parent_job_name],
714
- "available": False,
741
+ "inputname": inputname,
715
742
  }
716
743
  )
717
744
  if not missing_deps:
745
+ f_name = f"{job_label}:orderIdMap_{order_id}" if enable_job_name_map else pseudo_filename
718
746
  work.dependency_map.append(
719
- {"name": pseudo_filename, "order_id": order_id, "dependencies": deps}
747
+ {
748
+ "name": f_name,
749
+ "order_id": order_id,
750
+ "dependencies": deps,
751
+ }
720
752
  )
721
753
  else:
722
- jobs_with_dependency_issues[gwjob.name] = {"work": work, "order_id": order_id}
754
+ jobs_with_dependency_issues[gwjob.name] = {
755
+ "work": work,
756
+ "order_id": order_id,
757
+ "label": job_label,
758
+ }
723
759
 
724
760
  # If there were any issues figuring out dependencies through earlier loop
725
761
  if jobs_with_dependency_issues:
@@ -729,6 +765,7 @@ def add_idds_work(config, generic_workflow, idds_workflow):
729
765
  deps = []
730
766
  work = work_item["work"]
731
767
  order_id = work_item["order_id"]
768
+ job_label = work_item["label"]
732
769
 
733
770
  for parent_job_name in generic_workflow.predecessors(job_name):
734
771
  if parent_job_name not in job_to_task:
@@ -736,15 +773,29 @@ def add_idds_work(config, generic_workflow, idds_workflow):
736
773
  raise RuntimeError(
737
774
  "Could not recover from dependency issues ({job_name} missing {parent_job_name})."
738
775
  )
776
+ if enable_job_name_map:
777
+ parent_job = generic_workflow.get_job(parent_job_name)
778
+ parent_job_label = parent_job.label
779
+ parent_order_id = job_name_to_order_id_map[parent_job_label][parent_job_name]
780
+ inputname = f"{parent_job_label}:orderIdMap_{parent_order_id}"
781
+ else:
782
+ inputname = job_to_pseudo_filename[parent_job_name]
783
+
739
784
  deps.append(
740
785
  {
741
786
  "task": job_to_task[parent_job_name],
742
- "inputname": job_to_pseudo_filename[parent_job_name],
743
- "available": False,
787
+ "inputname": inputname,
744
788
  }
745
789
  )
746
- pseudo_filename = job_to_pseudo_filename[job_name]
747
- work.dependency_map.append({"name": pseudo_filename, "order_id": order_id, "dependencies": deps})
790
+
791
+ work.dependency_map.append(
792
+ {
793
+ "name": f"{job_label}:orderIdMap_{order_id}" if enable_job_name_map else pseudo_filename,
794
+ "order_id": order_id,
795
+ "dependencies": deps,
796
+ }
797
+ )
798
+
748
799
  _LOG.info("Successfully recovered.")
749
800
 
750
801
  for task_name in name_works:
@@ -754,7 +805,7 @@ def add_idds_work(config, generic_workflow, idds_workflow):
754
805
  # 2) check to avoid duplicated items.
755
806
  work.dependency_map = work.dependency_map
756
807
 
757
- if enable_event_service:
808
+ if enable_job_name_map:
758
809
  for label_name in order_id_map:
759
810
  for order_id in order_id_map[label_name]:
760
811
  job_name = order_id_map[label_name][order_id]
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "29.2025.1900"
2
+ __version__ = "29.2025.2100"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-ctrl-bps-panda
3
- Version: 29.2025.1900
3
+ Version: 29.2025.2100
4
4
  Summary: PanDA plugin for lsst-ctrl-bps.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License
@@ -3,9 +3,9 @@ lsst/ctrl/bps/panda/cmd_line_embedder.py,sha256=_gyqWQQxM8ZtZzQbWlOdgjfgrotrwV3s
3
3
  lsst/ctrl/bps/panda/constants.py,sha256=hhV1CDHW9G-Z6z2wGaAc41EMlJ-yn2NN3A8psDyjTkw,1907
4
4
  lsst/ctrl/bps/panda/panda_auth_drivers.py,sha256=Ff0QsrTgQYbHDCK89_Gayu_2ZC1i3RRt-Dnnx10b8G4,2558
5
5
  lsst/ctrl/bps/panda/panda_auth_utils.py,sha256=wb-vlB9jvabVIHKlqukE1vILO_0Q9iixE3xXyROeN2s,5093
6
- lsst/ctrl/bps/panda/panda_service.py,sha256=60rcp18zaVVNwtw55gri7jshDIre6btBlsMnMqp9vsU,16793
7
- lsst/ctrl/bps/panda/utils.py,sha256=NdPHRNECTJeWBamcWpzhZttbtBo5QRXtdVRcesMLbJA,37414
8
- lsst/ctrl/bps/panda/version.py,sha256=duakYcQBT5t4KBaOX14WCtq_sSvpoXvNcxi3sUb8GvI,55
6
+ lsst/ctrl/bps/panda/panda_service.py,sha256=iil1bvfmBNhZ3ZQ04UwlP3vshxNyJk5mZhMAhIaObpI,17278
7
+ lsst/ctrl/bps/panda/utils.py,sha256=FmecYPsL3JbUwXQCgx5rpgDNG9yxlbPD0sJON-kbin4,39594
8
+ lsst/ctrl/bps/panda/version.py,sha256=czd5myijXlfzN1l1OFP7x6hn5ASHoE02wXhI32IqLec,55
9
9
  lsst/ctrl/bps/panda/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  lsst/ctrl/bps/panda/cli/panda_auth.py,sha256=i54ati_HoKSlyslQRBl7QpX1w5z8MjSfHHMpT43ZeXQ,2055
11
11
  lsst/ctrl/bps/panda/cli/cmd/__init__.py,sha256=WVcBiZ3z9rnG4FOsYMgp1QYGwlkM9n_edpMbGDBvCrs,1393
@@ -17,13 +17,13 @@ lsst/ctrl/bps/panda/conf_example/test_sdf.yaml,sha256=e7C8df1h59KA3vBCm_qD1Xmcm2
17
17
  lsst/ctrl/bps/panda/conf_example/test_usdf.yaml,sha256=WIbXCJZDaG7zYUHt7U96MUjUsPRdHlk8C0Rg00nsPjs,390
18
18
  lsst/ctrl/bps/panda/edgenode/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  lsst/ctrl/bps/panda/edgenode/build_cmd_line_decoder.py,sha256=CjB_ESDKLK67QPlcZHWoJzfaqgC733ih_iIQrwYkiUo,3067
20
- lsst/ctrl/bps/panda/edgenode/cmd_line_decoder.py,sha256=QOtG3VP6-iw6Q8n1ThzawJ9NKJ5h1hzzGPihQr8aDfk,12184
21
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/licenses/COPYRIGHT,sha256=5ATATZSyXxMNKoJuCJdATg4YNm56ubTwU_hDbShxIWw,116
22
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
23
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
24
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
25
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/METADATA,sha256=ZXcmInJMgn6OzJ7pP76YJ0BLZynsR0jFs2Mm5i0wiqQ,2375
26
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
27
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
28
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
29
- lsst_ctrl_bps_panda-29.2025.1900.dist-info/RECORD,,
20
+ lsst/ctrl/bps/panda/edgenode/cmd_line_decoder.py,sha256=3pxSE672LPF5J51Vc0h4dyyRi0Is8vSmXfNT3NYpcRg,13660
21
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/licenses/COPYRIGHT,sha256=5ATATZSyXxMNKoJuCJdATg4YNm56ubTwU_hDbShxIWw,116
22
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
23
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
24
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
25
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/METADATA,sha256=zNwRCa62S-BOkRBNqaVS5KLNeFfIasaDARYkLhya3F0,2375
26
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
27
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
28
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
29
+ lsst_ctrl_bps_panda-29.2025.2100.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.1)
2
+ Generator: setuptools (80.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5