dayhoff-tools 1.1.34__py3-none-any.whl → 1.1.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -286,15 +286,15 @@ def create_or_update_job_definition(
286
286
  raise ValueError("docker.container_entrypoint is required in configuration")
287
287
 
288
288
  # Create linux parameters with devices
289
- linux_params: dict[str, Any] = {
290
- "devices": [
289
+ linux_params: dict[str, Any] = {}
290
+ if compute_specs.get("gpus", 0) > 0:
291
+ linux_params["devices"] = [
291
292
  {
292
293
  "hostPath": "/dev/nvidia0",
293
294
  "containerPath": "/dev/nvidia0",
294
295
  "permissions": ["READ", "WRITE"],
295
296
  },
296
- ],
297
- }
297
+ ]
298
298
 
299
299
  # Add shared memory configuration if specified in docker config
300
300
  if "shared_memory" in config.get("docker", {}):
@@ -318,6 +318,32 @@ def create_or_update_job_definition(
318
318
  linux_params["sharedMemorySize"] = shared_memory_mib
319
319
  print(f"Setting shared memory size to {shared_memory_mib} MiB")
320
320
 
321
+ # Prepare containerProperties
322
+ container_properties = {
323
+ "image": image_uri,
324
+ "vcpus": compute_specs["vcpus"],
325
+ "memory": compute_specs["memory"],
326
+ "resourceRequirements": gpu_requirements,
327
+ "executionRoleArn": aws_config["execution_role_arn"],
328
+ "jobRoleArn": aws_config["job_role_arn"],
329
+ "privileged": compute_specs.get("gpus", 0) > 0,
330
+ "command": entrypoint_command,
331
+ }
332
+
333
+ if linux_params:
334
+ container_properties["linuxParameters"] = linux_params
335
+
336
+ # Add volumes and mount points if defined in AWS batch_job config
337
+ batch_job_config = aws_config.get("batch_job", {})
338
+ if "volumes" in batch_job_config:
339
+ container_properties["volumes"] = batch_job_config["volumes"]
340
+ print(f"Adding volumes to job definition: {batch_job_config['volumes']}")
341
+ if "mountPoints" in batch_job_config:
342
+ container_properties["mountPoints"] = batch_job_config["mountPoints"]
343
+ print(
344
+ f"Adding mount points to job definition: {batch_job_config['mountPoints']}"
345
+ )
346
+
321
347
  # Check if job definition already exists using the session client
322
348
  try:
323
349
  existing = batch.describe_job_definitions(
@@ -330,31 +356,20 @@ def create_or_update_job_definition(
330
356
  print(f"\nCreating new job definition: {job_def_name}")
331
357
 
332
358
  except batch.exceptions.ClientError as e:
333
- # Handle case where the error is specifically 'JobDefinitionNotFoundException'
334
- # Boto3 typically includes error codes in the response
335
- if (
336
- e.response.get("Error", {}).get("Code") == "ClientError"
337
- ): # Simple check, might need refinement
359
+ if e.response.get("Error", {}).get(
360
+ "Code"
361
+ ) == "ClientError" and "JobDefinitionNotFoundException" in str(
362
+ e
363
+ ): # More specific check for not found
338
364
  print(f"\nCreating new job definition: {job_def_name}")
339
365
  else:
340
- # Re-raise unexpected client errors
341
366
  raise
342
367
 
343
- # Prepare job definition properties
368
+ # Prepare job definition arguments
344
369
  job_definition_args = {
345
370
  "jobDefinitionName": job_def_name,
346
371
  "type": "container",
347
- "containerProperties": {
348
- "image": image_uri,
349
- "vcpus": compute_specs["vcpus"],
350
- "memory": compute_specs["memory"],
351
- "resourceRequirements": gpu_requirements,
352
- "executionRoleArn": aws_config["execution_role_arn"],
353
- "jobRoleArn": aws_config["job_role_arn"],
354
- "privileged": compute_specs.get("gpus", 0) > 0,
355
- "command": entrypoint_command,
356
- **({"linuxParameters": linux_params} if linux_params else {}),
357
- },
372
+ "containerProperties": container_properties,
358
373
  "platformCapabilities": ["EC2"],
359
374
  "timeout": {"attemptDurationSeconds": aws_config.get("timeout_seconds", 86400)},
360
375
  }
@@ -403,9 +418,30 @@ def submit_aws_batch_job(
403
418
  print(f"- Job Role: {aws_config['job_role_arn']}")
404
419
 
405
420
  # Get all environment variables, including special ones like WANDB_API_KEY and GCP credentials
406
- env_vars = get_container_env_vars(config)
421
+ env_vars_map = get_container_env_vars(config) # This returns a dict
422
+
423
+ # If EFS is configured for InterProScan, override INTERPROSCAN_INSTALL_DIR
424
+ # Check based on the conventional volume name used in interp_bulk.yaml
425
+ efs_interproscan_mount_path = None
426
+ aws_batch_job_config = aws_config.get("batch_job", {})
427
+ if "mountPoints" in aws_batch_job_config:
428
+ for mp in aws_batch_job_config["mountPoints"]:
429
+ if (
430
+ mp.get("sourceVolume") == "interproscan-efs-volume"
431
+ ): # Convention from YAML
432
+ efs_interproscan_mount_path = mp.get("containerPath")
433
+ break
434
+
435
+ if efs_interproscan_mount_path:
436
+ env_vars_map["INTERPROSCAN_INSTALL_DIR"] = efs_interproscan_mount_path
437
+ print(
438
+ f"INTERPROSCAN_INSTALL_DIR overridden to EFS mount path: {efs_interproscan_mount_path}"
439
+ )
407
440
 
408
- print("Environment Variables:", list(env_vars.keys()))
441
+ print(
442
+ "Environment Variables (after potential EFS override):",
443
+ list(env_vars_map.keys()),
444
+ )
409
445
 
410
446
  # Create/Update Job Definition using the config (now implicitly uses the correct session)
411
447
  job_definition = create_or_update_job_definition(image_uri, config)
@@ -418,7 +454,8 @@ def submit_aws_batch_job(
418
454
  "jobDefinition": job_definition,
419
455
  "containerOverrides": {
420
456
  "environment": [
421
- {"name": key, "value": str(value)} for key, value in env_vars.items()
457
+ {"name": key, "value": str(value)}
458
+ for key, value in env_vars_map.items()
422
459
  ],
423
460
  },
424
461
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dayhoff-tools
3
- Version: 1.1.34
3
+ Version: 1.1.35
4
4
  Summary: Common tools for all the repos at Dayhoff Labs
5
5
  Author: Daniel Martin-Alarcon
6
6
  Author-email: dma@dayhofflabs.com
@@ -7,7 +7,7 @@ dayhoff_tools/cli/main.py,sha256=47EGb28ALaYFc7oAUGlY1D66AIDmc4RZiXxN-gPVrpQ,451
7
7
  dayhoff_tools/cli/swarm_commands.py,sha256=5EyKj8yietvT5lfoz8Zx0iQvVaNgc3SJX1z2zQR6o6M,5614
8
8
  dayhoff_tools/cli/utility_commands.py,sha256=ER4VrJt4hu904MwrcltUXjwBWT4uFrP-aPXjdXyT3F8,24685
9
9
  dayhoff_tools/deployment/base.py,sha256=8tXwsPYvRo-zV-aNhHw1c7Rji-KWg8S5xoCCznFnVVI,17412
10
- dayhoff_tools/deployment/deploy_aws.py,sha256=O0gQxHioSU_sNU8T8MD4wSOPvWc--V8eRRZzlRu035I,16446
10
+ dayhoff_tools/deployment/deploy_aws.py,sha256=jQyQ0fbm2793jEHFO84lr5tNqiOpdBg6U0S5zCVJr1M,17884
11
11
  dayhoff_tools/deployment/deploy_gcp.py,sha256=jiEE_tBVeSavAI8o_6qPDPpaoXKexcaNIa4uXcv3y0M,8839
12
12
  dayhoff_tools/deployment/deploy_utils.py,sha256=StFwbqnr2_FWiKVg3xnJF4kagTHzndqqDkpaIOaAn_4,26027
13
13
  dayhoff_tools/deployment/job_runner.py,sha256=hljvFpH2Bw96uYyUup5Ths72PZRL_X27KxlYzBMgguo,5086
@@ -26,7 +26,7 @@ dayhoff_tools/intake/uniprot.py,sha256=BZYJQF63OtPcBBnQ7_P9gulxzJtqyorgyuDiPeOJq
26
26
  dayhoff_tools/logs.py,sha256=DKdeP0k0kliRcilwvX0mUB2eipO5BdWUeHwh-VnsICs,838
27
27
  dayhoff_tools/sqlite.py,sha256=jV55ikF8VpTfeQqqlHSbY8OgfyfHj8zgHNpZjBLos_E,18672
28
28
  dayhoff_tools/warehouse.py,sha256=TqV8nex1AluNaL4JuXH5zuu9P7qmE89lSo6f_oViy6U,14965
29
- dayhoff_tools-1.1.34.dist-info/METADATA,sha256=-fobKYRniLRERxoTFy2_twRo1vgGMzlUabXPVNY8NcI,2843
30
- dayhoff_tools-1.1.34.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
31
- dayhoff_tools-1.1.34.dist-info/entry_points.txt,sha256=iAf4jteNqW3cJm6CO6czLxjW3vxYKsyGLZ8WGmxamSc,49
32
- dayhoff_tools-1.1.34.dist-info/RECORD,,
29
+ dayhoff_tools-1.1.35.dist-info/METADATA,sha256=lula1chZ1oJh4kiICRO_J1Lh4nhG49THMge3MBuM428,2843
30
+ dayhoff_tools-1.1.35.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
31
+ dayhoff_tools-1.1.35.dist-info/entry_points.txt,sha256=iAf4jteNqW3cJm6CO6czLxjW3vxYKsyGLZ8WGmxamSc,49
32
+ dayhoff_tools-1.1.35.dist-info/RECORD,,