dayhoff-tools 1.14.15__py3-none-any.whl → 1.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dayhoff_tools/cli/main.py CHANGED
@@ -5,19 +5,13 @@ from importlib.metadata import PackageNotFoundError, version
5
5
 
6
6
  import typer
7
7
  from dayhoff_tools.cli.cloud_commands import aws_app, gcp_app
8
+ from dayhoff_tools.cli.engine1 import engine_app as engine1_app
9
+ from dayhoff_tools.cli.engine1 import studio_app as studio1_app
8
10
  from dayhoff_tools.cli.github_commands import gh_app
9
- from dayhoff_tools.cli.engine1 import (
10
- engine_app as engine1_app,
11
- studio_app as studio1_app,
12
- )
13
11
  from dayhoff_tools.cli.utility_commands import (
14
- add_dependency,
15
12
  build_and_upload_wheel,
16
13
  delete_local_branch,
17
- remove_dependency,
18
- sync_with_toml,
19
14
  test_github_actions_locally,
20
- update_dependencies,
21
15
  )
22
16
  from dayhoff_tools.warehouse import (
23
17
  _warn_if_gcp_default_sa,
@@ -48,19 +42,6 @@ app = typer.Typer(
48
42
  # Utility commands
49
43
  app.command("clean")(delete_local_branch)
50
44
 
51
- # Dependency Management
52
- app.command(
53
- "tomlsync",
54
- help="Sync environment with platform-specific TOML manifest (install/update dependencies).",
55
- )(sync_with_toml)
56
- app.command("add", help="Add a dependency to all platform manifests.")(add_dependency)
57
- app.command("remove", help="Remove a dependency from all platform manifests.")(
58
- remove_dependency
59
- )
60
- app.command("update", help="Update dayhoff-tools (or all deps) and sync environment.")(
61
- update_dependencies
62
- )
63
-
64
45
  # Other Utilities
65
46
  app.command("gha")(test_github_actions_locally)
66
47
  app.command("wadd")(add_to_warehouse_typer)
@@ -183,44 +164,6 @@ def build_and_upload_wheel_command(
183
164
  build_and_upload_wheel(bump_part=bump)
184
165
 
185
166
 
186
- # Use lazy loading for slow-loading swarm commands
187
- @app.command("reset")
188
- def reset_wrapper(
189
- firestore_collection: str = typer.Option(prompt=True),
190
- old_status: str = typer.Option(default="failed", prompt=True),
191
- new_status: str = typer.Option(default="available", prompt=True),
192
- delete_old: bool = typer.Option(default=True, prompt=True),
193
- ):
194
- """Find all the documents in the database with a given status, and
195
- make a new document with the same name and a new status."""
196
- from dayhoff_tools.cli.swarm_commands import reset_failed_cards
197
-
198
- reset_failed_cards(firestore_collection, old_status, new_status, delete_old)
199
-
200
-
201
- @app.command("zombie")
202
- def zombie_wrapper(
203
- firestore_collection: str = typer.Option(prompt=True),
204
- delete_old: bool = typer.Option(default=True, prompt=True),
205
- minutes_threshold: int = typer.Option(default=60, prompt=True),
206
- ):
207
- """Find all the documents in the database with status "assigned", and "last_updated"
208
- older than a specified threshold, and make a new "available" document for them."""
209
- from dayhoff_tools.cli.swarm_commands import reset_zombie_cards
210
-
211
- reset_zombie_cards(firestore_collection, delete_old, minutes_threshold)
212
-
213
-
214
- @app.command("status")
215
- def status_wrapper(
216
- firestore_collection: str = typer.Argument(),
217
- ):
218
- """Count the various statuses of items in a given collection."""
219
- from dayhoff_tools.cli.swarm_commands import get_firestore_collection_status
220
-
221
- get_firestore_collection_status(firestore_collection)
222
-
223
-
224
167
  # Deployment commands - use lazy loading but preserve argument passing
225
168
  @app.command("deploy")
226
169
  def deploy_command(
@@ -260,891 +260,3 @@ def build_and_upload_wheel(bump_part: str = "patch"):
260
260
  print(f"Warning: Failed to revert version change: {revert_e}")
261
261
 
262
262
 
263
- # --- Dependency Management Commands ---
264
-
265
-
266
- def sync_with_toml(
267
- install_project: bool = typer.Option(
268
- False,
269
- "--install-project",
270
- "-p",
271
- help="Install the local project package itself (with 'full' extras) into the environment.",
272
- ),
273
- ):
274
- """Sync environment with platform-specific TOML manifest (install/update dependencies).
275
-
276
- Behavior by platform:
277
- - Workstation (STUDIO_PLATFORM=workstation) with pyproject.workstation.toml:
278
- * Uses pip with constraints.txt to preserve NGC PyTorch
279
- * Parses dependencies directly from pyproject.workstation.toml
280
- * Installs into .venv_workstation with --system-site-packages
281
- - Mac (STUDIO_PLATFORM=mac) with pyproject.mac.toml:
282
- * Ensure `.mac_uv_project/pyproject.toml` is a copy of `pyproject.mac.toml`
283
- * Run `uv lock` and `uv sync` in `.mac_uv_project` targeting active venv with `--active`
284
- * If `install_project` is true, install the project from repo root into the active env (editable, [full])
285
- - AWS (default) with pyproject.aws.toml:
286
- * Uses UV in temp directory `.aws_uv_project` similar to Mac
287
- * Run `uv lock` and `uv sync` targeting active venv
288
- """
289
- # ANSI color codes
290
- BLUE = "\033[94m"
291
- RESET = "\033[0m"
292
-
293
- try:
294
- platform = os.environ.get("STUDIO_PLATFORM", "aws")
295
-
296
- # Workstation platform: use pip with constraints
297
- if platform == "workstation" and Path("pyproject.workstation.toml").exists():
298
- print(
299
- "Installing dependencies for workstation platform (using pip + constraints)..."
300
- )
301
-
302
- # Check for constraints.txt
303
- if not Path("constraints.txt").exists():
304
- print(
305
- "Error: constraints.txt not found. Run direnv to generate it first."
306
- )
307
- sys.exit(1)
308
-
309
- # Parse and install dependencies from pyproject.workstation.toml
310
- import re
311
-
312
- with open("pyproject.workstation.toml", "r") as f:
313
- content = f.read()
314
-
315
- # Extract dependencies list using line-by-line parsing to handle [] in package names
316
- lines = content.split("\n")
317
- in_deps = False
318
- deps_lines = []
319
-
320
- for line in lines:
321
- if re.match(r"\s*dependencies\s*=\s*\[", line):
322
- in_deps = True
323
- continue
324
- if in_deps:
325
- if re.match(r"^\s*\]\s*$", line):
326
- break
327
- deps_lines.append(line)
328
-
329
- deps = []
330
- for line in deps_lines:
331
- line = line.strip()
332
- if line.startswith('"') or line.startswith("'"):
333
- dep = re.sub(r'["\']', "", line)
334
- dep = re.sub(r",?\s*#.*$", "", dep)
335
- dep = dep.strip().rstrip(",")
336
- if dep:
337
- deps.append(dep)
338
-
339
- if deps:
340
- pip_cmd = (
341
- [sys.executable, "-m", "pip", "install"]
342
- + deps
343
- + ["-c", "constraints.txt"]
344
- )
345
- print(
346
- f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
347
- )
348
- subprocess.run(pip_cmd, check=True)
349
-
350
- # Install dev dependencies using line-by-line parsing
351
- in_dev_groups = False
352
- in_dev_list = False
353
- dev_lines = []
354
-
355
- for line in lines:
356
- if re.match(r"\s*\[dependency-groups\]", line):
357
- in_dev_groups = True
358
- continue
359
- if in_dev_groups and re.match(r"\s*dev\s*=\s*\[", line):
360
- in_dev_list = True
361
- continue
362
- if in_dev_list:
363
- if re.match(r"^\s*\]\s*$", line):
364
- break
365
- dev_lines.append(line)
366
-
367
- dev_deps = []
368
- for line in dev_lines:
369
- line = line.strip()
370
- if line.startswith('"') or line.startswith("'"):
371
- dep = re.sub(r'["\']', "", line)
372
- dep = re.sub(r",?\s*#.*$", "", dep)
373
- dep = dep.strip().rstrip(",")
374
- if dep:
375
- dev_deps.append(dep)
376
-
377
- if dev_deps:
378
- print("Installing dev dependencies...")
379
- pip_cmd = (
380
- [sys.executable, "-m", "pip", "install"]
381
- + dev_deps
382
- + ["-c", "constraints.txt"]
383
- )
384
- print(
385
- f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
386
- )
387
- subprocess.run(pip_cmd, check=True)
388
-
389
- # Install project if requested
390
- if install_project:
391
- repo_name = Path.cwd().name
392
- if repo_name == "dayhoff-tools":
393
- pip_cmd = [
394
- sys.executable,
395
- "-m",
396
- "pip",
397
- "install",
398
- "-e",
399
- ".[full]",
400
- "-c",
401
- "constraints.txt",
402
- ]
403
- else:
404
- pip_cmd = [
405
- sys.executable,
406
- "-m",
407
- "pip",
408
- "install",
409
- "-e",
410
- ".",
411
- "-c",
412
- "constraints.txt",
413
- ]
414
- print(f"Running command: {BLUE}{' '.join(pip_cmd)}{RESET}")
415
- subprocess.run(pip_cmd, check=True)
416
-
417
- print("✅ Dependencies installed successfully (workstation)")
418
- return
419
-
420
- # Mac platform: use UV with pyproject.mac.toml
421
- is_mac = platform == "mac"
422
- mac_manifest = Path("pyproject.mac.toml")
423
- if is_mac and mac_manifest.exists():
424
- # Mac devcontainer flow
425
- mac_uv_dir = Path(".mac_uv_project")
426
- mac_uv_dir.mkdir(parents=True, exist_ok=True)
427
- mac_pyproject = mac_uv_dir / "pyproject.toml"
428
- mac_pyproject.write_text(mac_manifest.read_text())
429
-
430
- # Copy README.md if it exists (required by some build backends)
431
- if Path("README.md").exists():
432
- (mac_uv_dir / "README.md").write_text(Path("README.md").read_text())
433
-
434
- # Ensure lock matches manifest (in mac temp dir)
435
- print("Ensuring lock file matches pyproject.mac.toml (Mac devcon)…")
436
- lock_cmd = ["uv", "lock"]
437
- print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
438
- subprocess.run(
439
- lock_cmd, check=True, capture_output=True, cwd=str(mac_uv_dir)
440
- )
441
-
442
- # Sync into the active environment
443
- if install_project:
444
- print(
445
- "Syncing dependencies into ACTIVE env (project installed separately)…"
446
- )
447
- sync_cmd = [
448
- "uv",
449
- "sync",
450
- "--all-groups",
451
- "--no-install-project",
452
- "--active",
453
- ]
454
- print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
455
- subprocess.run(sync_cmd, check=True, cwd=str(mac_uv_dir))
456
-
457
- # Install project from repo root (where source code actually is)
458
- # Temporarily create pyproject.toml at repo root for UV
459
- print("Installing project with 'full' extras from repo root…")
460
- temp_pyproject = False
461
- backup_created = False
462
- try:
463
- if not Path("pyproject.toml").exists():
464
- # Create temp pyproject.toml from platform manifest
465
- Path("pyproject.toml").write_text(mac_manifest.read_text())
466
- temp_pyproject = True
467
- elif Path("pyproject.toml").is_symlink():
468
- # Backup existing symlink
469
- Path("pyproject.toml").rename("pyproject.toml.sync.bak")
470
- Path("pyproject.toml").write_text(mac_manifest.read_text())
471
- backup_created = True
472
-
473
- pip_install_cmd = ["uv", "pip", "install", "-e", ".[full]"]
474
- print(f"Running command: {BLUE}{' '.join(pip_install_cmd)}{RESET}")
475
- subprocess.run(pip_install_cmd, check=True)
476
- print("Project installed with 'full' extras successfully.")
477
- finally:
478
- # Clean up temp pyproject.toml
479
- if temp_pyproject and Path("pyproject.toml").exists():
480
- Path("pyproject.toml").unlink()
481
- if backup_created and Path("pyproject.toml.sync.bak").exists():
482
- Path("pyproject.toml.sync.bak").rename("pyproject.toml")
483
- else:
484
- print("Syncing dependencies into ACTIVE env (project not installed)…")
485
- sync_cmd = [
486
- "uv",
487
- "sync",
488
- "--all-groups",
489
- "--no-install-project",
490
- "--active",
491
- ]
492
- print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
493
- subprocess.run(sync_cmd, check=True, cwd=str(mac_uv_dir))
494
- print("Dependencies synced successfully (project not installed).")
495
- else:
496
- # AWS platform (or fallback): use UV with pyproject.aws.toml
497
- aws_manifest = Path("pyproject.aws.toml")
498
- if aws_manifest.exists():
499
- # AWS devcontainer flow (similar to Mac)
500
- aws_uv_dir = Path(".aws_uv_project")
501
- aws_uv_dir.mkdir(parents=True, exist_ok=True)
502
- aws_pyproject = aws_uv_dir / "pyproject.toml"
503
- aws_pyproject.write_text(aws_manifest.read_text())
504
-
505
- # Copy README.md if it exists (required by some build backends)
506
- if Path("README.md").exists():
507
- (aws_uv_dir / "README.md").write_text(Path("README.md").read_text())
508
-
509
- # Ensure lock matches manifest (in aws temp dir)
510
- print("Ensuring lock file matches pyproject.aws.toml (AWS devcon)…")
511
- lock_cmd = ["uv", "lock"]
512
- print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
513
- subprocess.run(
514
- lock_cmd, check=True, capture_output=True, cwd=str(aws_uv_dir)
515
- )
516
-
517
- # Sync into the active environment
518
- if install_project:
519
- print(
520
- "Syncing dependencies into ACTIVE env (project installed separately)…"
521
- )
522
- sync_cmd = [
523
- "uv",
524
- "sync",
525
- "--all-groups",
526
- "--no-install-project",
527
- "--active",
528
- ]
529
- print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
530
- subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
531
-
532
- # Install project from repo root (where source code actually is)
533
- # Temporarily create pyproject.toml at repo root for UV
534
- print("Installing project with 'full' extras from repo root…")
535
- temp_pyproject = False
536
- backup_created = False
537
- try:
538
- if not Path("pyproject.toml").exists():
539
- # Create temp pyproject.toml from platform manifest
540
- Path("pyproject.toml").write_text(aws_manifest.read_text())
541
- temp_pyproject = True
542
- elif Path("pyproject.toml").is_symlink():
543
- # Backup existing symlink
544
- Path("pyproject.toml").rename("pyproject.toml.sync.bak")
545
- Path("pyproject.toml").write_text(aws_manifest.read_text())
546
- backup_created = True
547
-
548
- pip_install_cmd = ["uv", "pip", "install", "-e", ".[full]"]
549
- print(
550
- f"Running command: {BLUE}{' '.join(pip_install_cmd)}{RESET}"
551
- )
552
- subprocess.run(pip_install_cmd, check=True)
553
- print("Project installed with 'full' extras successfully.")
554
- finally:
555
- # Clean up temp pyproject.toml
556
- if temp_pyproject and Path("pyproject.toml").exists():
557
- Path("pyproject.toml").unlink()
558
- if backup_created and Path("pyproject.toml.sync.bak").exists():
559
- Path("pyproject.toml.sync.bak").rename("pyproject.toml")
560
- else:
561
- print(
562
- "Syncing dependencies into ACTIVE env (project not installed)…"
563
- )
564
- sync_cmd = [
565
- "uv",
566
- "sync",
567
- "--all-groups",
568
- "--no-install-project",
569
- "--active",
570
- ]
571
- print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
572
- subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
573
- print("Dependencies synced successfully (project not installed).")
574
- else:
575
- print(
576
- "Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
577
- )
578
- sys.exit(1)
579
-
580
- except subprocess.CalledProcessError as e:
581
- stderr_output = e.stderr.decode() if e.stderr else "No stderr output."
582
- print(f"Error occurred during dependency installation/sync: {e}")
583
- print(f"Stderr: {stderr_output}")
584
- if "NoSolution" in stderr_output:
585
- print(
586
- "\nHint: Could not find a compatible set of dependencies. Check constraints in pyproject.toml."
587
- )
588
- sys.exit(1)
589
- except FileNotFoundError:
590
- print("Error: 'uv' command not found. Is uv installed and in PATH?")
591
- sys.exit(1)
592
- except Exception as e:
593
- print(f"An unexpected error occurred: {e}")
594
- sys.exit(1)
595
-
596
-
597
- def _get_all_platform_manifests():
598
- """Get list of all platform manifests that exist."""
599
- manifest_files = []
600
- for fname in [
601
- "pyproject.aws.toml",
602
- "pyproject.mac.toml",
603
- "pyproject.workstation.toml",
604
- ]:
605
- if Path(fname).exists():
606
- manifest_files.append(Path(fname))
607
- return manifest_files
608
-
609
-
610
- def _resolve_package_version(package_name: str) -> str | None:
611
- """Resolve a package version by running uv lock and parsing the lock file.
612
-
613
- Args:
614
- package_name: Name of the package to resolve
615
-
616
- Returns:
617
- Resolved version string, or None if resolution failed
618
- """
619
- import os
620
-
621
- try:
622
- # Determine which manifest to use (prefer Mac, then AWS)
623
- platform = os.environ.get("STUDIO_PLATFORM", "aws")
624
- manifest_path = None
625
- uv_dir = None
626
-
627
- if platform == "mac" and Path("pyproject.mac.toml").exists():
628
- manifest_path = Path("pyproject.mac.toml")
629
- uv_dir = Path(".mac_uv_project")
630
- elif Path("pyproject.aws.toml").exists():
631
- manifest_path = Path("pyproject.aws.toml")
632
- uv_dir = Path(".aws_uv_project")
633
- elif Path("pyproject.mac.toml").exists():
634
- # Fallback to Mac if AWS doesn't exist
635
- manifest_path = Path("pyproject.mac.toml")
636
- uv_dir = Path(".mac_uv_project")
637
- else:
638
- return None
639
-
640
- # Create temp directory and copy manifest
641
- uv_dir.mkdir(parents=True, exist_ok=True)
642
- (uv_dir / "pyproject.toml").write_text(manifest_path.read_text())
643
-
644
- # Copy README if it exists
645
- if Path("README.md").exists():
646
- (uv_dir / "README.md").write_text(Path("README.md").read_text())
647
-
648
- # Run uv lock (suppress output)
649
- subprocess.run(["uv", "lock"], cwd=str(uv_dir), check=True, capture_output=True)
650
-
651
- # Parse lock file
652
- lock_file = uv_dir / "uv.lock"
653
- if not lock_file.exists():
654
- return None
655
-
656
- lock_data = toml.load(lock_file)
657
- for package in lock_data.get("package", []):
658
- if package.get("name") == package_name:
659
- return package.get("version")
660
-
661
- return None
662
-
663
- except Exception as e:
664
- print(f"Warning: Failed to resolve version: {e}")
665
- return None
666
-
667
-
668
- def _update_all_manifests_for_dayhoff_tools(new_version: str):
669
- """Update dayhoff-tools constraint in all platform manifests."""
670
- import re
671
-
672
- manifest_files = _get_all_platform_manifests()
673
-
674
- if not manifest_files:
675
- print("Warning: No platform manifests found to update.")
676
- return
677
-
678
- package_name = "dayhoff-tools"
679
- package_name_esc = re.escape(package_name)
680
-
681
- # Regex to match the dependency line, with optional extras and version spec
682
- pattern = re.compile(
683
- rf"^(\s*['\"]){package_name_esc}(\[[^]]+\])?(?:[><=~^][^'\"]*)?(['\"].*)$",
684
- re.MULTILINE,
685
- )
686
-
687
- new_constraint_text = f">={new_version}"
688
-
689
- def _repl(match: re.Match):
690
- prefix = match.group(1)
691
- extras = match.group(2) or ""
692
- suffix = match.group(3)
693
- return f"{prefix}{package_name}{extras}{new_constraint_text}{suffix}"
694
-
695
- # Update all manifest files
696
- for manifest_file in manifest_files:
697
- try:
698
- print(f"Updating {manifest_file} version constraint...")
699
- content = manifest_file.read_text()
700
- new_content, num_replacements = pattern.subn(_repl, content)
701
- if num_replacements > 0:
702
- manifest_file.write_text(new_content)
703
- print(
704
- f"Updated dayhoff-tools constraint in {manifest_file} to '{new_constraint_text}'"
705
- )
706
- else:
707
- print(
708
- f"Warning: Could not find dayhoff-tools dependency line in {manifest_file}"
709
- )
710
- except Exception as e:
711
- print(f"Error updating {manifest_file}: {e}")
712
-
713
-
714
- def add_dependency(
715
- package: str,
716
- dev: bool = typer.Option(
717
- False, "--dev", "-d", help="Add to dev dependencies instead of main."
718
- ),
719
- ):
720
- """Add a dependency to all platform-specific manifests.
721
-
722
- Args:
723
- package: Package specification (e.g., "numpy>=1.24.0" or "pandas")
724
- dev: If True, add to [dependency-groups] dev instead of [project] dependencies
725
- """
726
- import re
727
-
728
- # ANSI color codes
729
- BLUE = "\033[94m"
730
- RESET = "\033[0m"
731
-
732
- manifest_files = _get_all_platform_manifests()
733
-
734
- if not manifest_files:
735
- print(
736
- "Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
737
- )
738
- sys.exit(1)
739
-
740
- # Determine section to add to
741
- section_name = "dev dependencies" if dev else "main dependencies"
742
- print(f"Adding '{package}' to {section_name} in all platform manifests...")
743
-
744
- # Parse package name to check for duplicates and version specs
745
- package_name = re.split(r"[<>=~!\[]", package)[0].strip()
746
- has_version_spec = any(c in package for c in ["<", ">", "=", "~", "!"])
747
-
748
- added_count = 0
749
-
750
- for manifest_file in manifest_files:
751
- try:
752
- content = manifest_file.read_text()
753
-
754
- # Check if package already exists
755
- existing_check = re.search(
756
- rf'^(\s*["\']){re.escape(package_name)}[<>=~!\[]',
757
- content,
758
- re.MULTILINE,
759
- )
760
- if existing_check:
761
- print(
762
- f"⚠️ Package '{package_name}' already exists in {manifest_file}, skipping"
763
- )
764
- continue
765
-
766
- if dev:
767
- # Add to [dependency-groups] dev section
768
- # Use line-by-line parsing to handle [] in dependency names like dayhoff-tools[full]
769
- lines = content.split("\n")
770
- in_dev_groups = False
771
- in_dev_list = False
772
- dev_start_idx = None
773
- dev_end_idx = None
774
-
775
- for idx, line in enumerate(lines):
776
- if re.match(r"\s*\[dependency-groups\]", line):
777
- in_dev_groups = True
778
- continue
779
- if in_dev_groups and re.match(r"\s*dev\s*=\s*\[", line):
780
- in_dev_list = True
781
- dev_start_idx = idx
782
- continue
783
- if in_dev_list and re.match(r"^\s*\]\s*$", line):
784
- dev_end_idx = idx
785
- break
786
-
787
- if dev_start_idx is None or dev_end_idx is None:
788
- print(
789
- f"Warning: Could not find [dependency-groups] dev section in {manifest_file}"
790
- )
791
- continue
792
-
793
- # Insert new dependency before the closing ]
794
- new_dep = f' "{package}",'
795
- lines.insert(dev_end_idx, new_dep)
796
- new_content = "\n".join(lines)
797
- else:
798
- # Add to [project] dependencies section
799
- # Use line-by-line parsing to handle [] in dependency names like dayhoff-tools[full]
800
- lines = content.split("\n")
801
- in_deps = False
802
- deps_start_idx = None
803
- deps_end_idx = None
804
-
805
- for idx, line in enumerate(lines):
806
- if re.match(r"\s*dependencies\s*=\s*\[", line):
807
- in_deps = True
808
- deps_start_idx = idx
809
- continue
810
- if in_deps and re.match(r"^\s*\]\s*$", line):
811
- deps_end_idx = idx
812
- break
813
-
814
- if deps_start_idx is None or deps_end_idx is None:
815
- print(
816
- f"Warning: Could not find dependencies section in {manifest_file}"
817
- )
818
- continue
819
-
820
- # Insert new dependency before the closing ]
821
- new_dep = f' "{package}",'
822
- lines.insert(deps_end_idx, new_dep)
823
- new_content = "\n".join(lines)
824
-
825
- manifest_file.write_text(new_content)
826
- print(f"✅ Added '{package}' to {manifest_file}")
827
- added_count += 1
828
-
829
- except Exception as e:
830
- print(f"Error updating {manifest_file}: {e}")
831
-
832
- # If nothing was added, exit early
833
- if added_count == 0:
834
- print(f"\n⚠️ Package '{package_name}' already exists in all manifests")
835
- return
836
-
837
- print(f"\n✅ Added '{package}' to {added_count} platform manifest(s)")
838
-
839
- # If no version specified, resolve and add version constraint
840
- if not has_version_spec:
841
- print(f"\n🔍 Resolving version for '{package_name}'...")
842
- resolved_version = _resolve_package_version(package_name)
843
-
844
- if resolved_version:
845
- print(f"📌 Resolved to version {resolved_version}")
846
- print(
847
- f"Updating manifests with version constraint '>={resolved_version}'..."
848
- )
849
-
850
- # Update all manifests to add version constraint
851
- for manifest_file in manifest_files:
852
- try:
853
- content = manifest_file.read_text()
854
- # Replace unversioned package with versioned one
855
- pattern = re.compile(
856
- rf'^(\s*["\']){re.escape(package_name)}(["\'],?)(.*)$',
857
- re.MULTILINE,
858
- )
859
-
860
- def replace_with_version(match):
861
- prefix = match.group(1)
862
- suffix = match.group(2)
863
- rest = match.group(3)
864
- return (
865
- f"{prefix}{package_name}>={resolved_version}{suffix}{rest}"
866
- )
867
-
868
- new_content = pattern.sub(replace_with_version, content)
869
- manifest_file.write_text(new_content)
870
- print(f"✅ Updated {manifest_file} with version constraint")
871
- except Exception as e:
872
- print(f"Warning: Could not update version in {manifest_file}: {e}")
873
-
874
- print(
875
- f"\n✅ Added '{package_name}>={resolved_version}' to {added_count} platform manifest(s)"
876
- )
877
- else:
878
- print(
879
- f"⚠️ Could not resolve version for '{package_name}', left unversioned"
880
- )
881
-
882
- print(
883
- f"\nRun {BLUE}dh tomlsync{RESET} to install the new dependency in your environment."
884
- )
885
-
886
-
887
- def remove_dependency(
888
- package: str,
889
- dev: bool = typer.Option(
890
- False, "--dev", "-d", help="Remove from dev dependencies instead of main."
891
- ),
892
- ):
893
- """Remove a dependency from all platform-specific manifests.
894
-
895
- Args:
896
- package: Package name (e.g., "numpy" or "pandas")
897
- dev: If True, remove from [dependency-groups] dev instead of [project] dependencies
898
- """
899
- import re
900
-
901
- # ANSI color codes
902
- BLUE = "\033[94m"
903
- RESET = "\033[0m"
904
-
905
- manifest_files = _get_all_platform_manifests()
906
-
907
- if not manifest_files:
908
- print(
909
- "Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
910
- )
911
- sys.exit(1)
912
-
913
- section_name = "dev dependencies" if dev else "main dependencies"
914
- print(f"Removing '{package}' from {section_name} in all platform manifests...")
915
-
916
- # Escape package name for regex
917
- package_esc = re.escape(package)
918
-
919
- removed_count = 0
920
- for manifest_file in manifest_files:
921
- try:
922
- content = manifest_file.read_text()
923
-
924
- # Pattern to match the dependency line (with optional version spec)
925
- # Matches: "package...", or "package...",\n (including the newline)
926
- pattern = re.compile(
927
- rf'^\s*["\']({package_esc}[<>=~!\[].+?|{package_esc})["\'],?\s*(?:#.*)?$\n?',
928
- re.MULTILINE,
929
- )
930
-
931
- new_content, num_removed = pattern.subn("", content)
932
-
933
- if num_removed > 0:
934
- # Clean up any consecutive blank lines (more than one)
935
- new_content = re.sub(r"\n\n\n+", "\n\n", new_content)
936
- # Also clean up trailing whitespace on lines
937
- new_content = re.sub(r"[ \t]+$", "", new_content, flags=re.MULTILINE)
938
- manifest_file.write_text(new_content)
939
- print(f"✅ Removed '{package}' from {manifest_file}")
940
- removed_count += 1
941
- else:
942
- print(f"⚠️ Package '{package}' not found in {manifest_file}")
943
-
944
- except Exception as e:
945
- print(f"Error updating {manifest_file}: {e}")
946
-
947
- if removed_count > 0:
948
- print(f"\n✅ Removed '{package}' from {removed_count} platform manifest(s)")
949
- print(
950
- f"\nRun {BLUE}dh tomlsync{RESET} to uninstall the dependency from your environment."
951
- )
952
- else:
953
- print(f"\n⚠️ Package '{package}' was not found in any manifests")
954
-
955
-
956
- def update_dependencies(
957
- update_all: bool = typer.Option(
958
- False,
959
- "--all",
960
- "-a",
961
- help="Update all dependencies instead of just dayhoff-tools.",
962
- ),
963
- ):
964
- """Update dependencies to newer versions (platform-aware).
965
-
966
- - Default Action (no flags): Updates only 'dayhoff-tools' package to latest,
967
- updates ALL manifest files with the version constraint, and syncs.
968
- - Flags:
969
- --all/-a: Updates all dependencies and syncs.
970
-
971
- Cross-platform behavior:
972
- - Workstation: Uses pip to upgrade packages, regenerates constraints.txt
973
- - Mac/AWS: Uses UV with platform-specific manifests (.mac_uv_project or .aws_uv_project)
974
- - Always updates ALL platform manifests (pyproject.aws.toml, pyproject.mac.toml,
975
- pyproject.workstation.toml) to ensure version consistency
976
- """
977
- # ANSI color codes
978
- BLUE = "\033[94m"
979
- RESET = "\033[0m"
980
-
981
- platform = os.environ.get("STUDIO_PLATFORM", "aws")
982
-
983
- # Workstation platform: use pip upgrade
984
- if platform == "workstation" and Path("pyproject.workstation.toml").exists():
985
- print("Updating dependencies for workstation platform (using pip)...")
986
-
987
- if update_all:
988
- print("Error: --all flag not supported on workstation platform yet.")
989
- print("Use 'pip install --upgrade <package>' manually for now.")
990
- sys.exit(1)
991
-
992
- # Update dayhoff-tools only (default behavior)
993
- print("Upgrading dayhoff-tools to latest version...")
994
- upgrade_cmd = [
995
- sys.executable,
996
- "-m",
997
- "pip",
998
- "install",
999
- "--upgrade",
1000
- "dayhoff-tools[full]",
1001
- ]
1002
- print(f"Running command: {BLUE}{' '.join(upgrade_cmd)}{RESET}")
1003
- subprocess.run(upgrade_cmd, check=True)
1004
-
1005
- # Get new version
1006
- result = subprocess.run(
1007
- [sys.executable, "-m", "pip", "show", "dayhoff-tools"],
1008
- capture_output=True,
1009
- text=True,
1010
- check=True,
1011
- )
1012
- version_line = [
1013
- l for l in result.stdout.split("\n") if l.startswith("Version:")
1014
- ]
1015
- if version_line:
1016
- new_version = version_line[0].split(":", 1)[1].strip()
1017
- print(f"Updated to dayhoff-tools {new_version}")
1018
-
1019
- # Update all platform manifests with new constraint
1020
- _update_all_manifests_for_dayhoff_tools(new_version)
1021
-
1022
- print("✅ Dependencies updated successfully (workstation)")
1023
- return
1024
-
1025
- # Mac/AWS platforms: use UV
1026
- mac_manifest = Path("pyproject.mac.toml")
1027
- aws_manifest = Path("pyproject.aws.toml")
1028
- mac_uv_dir = Path(".mac_uv_project")
1029
- aws_uv_dir = Path(".aws_uv_project")
1030
- lock_file_path = Path("uv.lock")
1031
-
1032
- # Determine action based on flags
1033
- lock_cmd = ["uv", "lock"]
1034
- action_description = ""
1035
- run_pyproject_update = False
1036
-
1037
- if update_all:
1038
- lock_cmd.append("--upgrade")
1039
- action_description = (
1040
- "Updating lock file for all dependencies to latest versions..."
1041
- )
1042
- else: # Default behavior: update dayhoff-tools
1043
- lock_cmd.extend(["--upgrade-package", "dayhoff-tools"])
1044
- action_description = (
1045
- "Updating dayhoff-tools lock and pyproject.toml (default behavior)..."
1046
- )
1047
- run_pyproject_update = (
1048
- True # Only update pyproject if we are doing the dayhoff update
1049
- )
1050
-
1051
- try:
1052
- # Choose working directory for uv operations based on platform
1053
- uv_cwd = None
1054
- manifest_path_for_constraint = None
1055
-
1056
- if platform == "mac" and mac_manifest.exists():
1057
- mac_uv_dir.mkdir(parents=True, exist_ok=True)
1058
- (mac_uv_dir / "pyproject.toml").write_text(mac_manifest.read_text())
1059
- # Copy README.md if it exists (required by some build backends)
1060
- if Path("README.md").exists():
1061
- (mac_uv_dir / "README.md").write_text(Path("README.md").read_text())
1062
- uv_cwd = str(mac_uv_dir)
1063
- lock_file_path = mac_uv_dir / "uv.lock"
1064
- manifest_path_for_constraint = mac_manifest
1065
- elif aws_manifest.exists():
1066
- # AWS platform (default)
1067
- aws_uv_dir.mkdir(parents=True, exist_ok=True)
1068
- (aws_uv_dir / "pyproject.toml").write_text(aws_manifest.read_text())
1069
- # Copy README.md if it exists (required by some build backends)
1070
- if Path("README.md").exists():
1071
- (aws_uv_dir / "README.md").write_text(Path("README.md").read_text())
1072
- uv_cwd = str(aws_uv_dir)
1073
- lock_file_path = aws_uv_dir / "uv.lock"
1074
- manifest_path_for_constraint = aws_manifest
1075
- else:
1076
- print(
1077
- "Error: No platform-specific manifest found (pyproject.aws.toml or pyproject.mac.toml)"
1078
- )
1079
- sys.exit(1)
1080
-
1081
- # Step 1: Run the update lock command
1082
- print(action_description)
1083
- print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
1084
- subprocess.run(lock_cmd, check=True, capture_output=True, cwd=uv_cwd)
1085
-
1086
- # Step 2: Update both manifest files if doing the dayhoff update (default)
1087
- if run_pyproject_update:
1088
- print(f"Reading {lock_file_path} to find new dayhoff-tools version...")
1089
- if not lock_file_path.exists():
1090
- print(f"Error: {lock_file_path} not found after lock command.")
1091
- return
1092
- locked_version = None
1093
- try:
1094
- lock_data = toml.load(lock_file_path)
1095
- for package in lock_data.get("package", []):
1096
- if package.get("name") == "dayhoff-tools":
1097
- locked_version = package.get("version")
1098
- break
1099
- except toml.TomlDecodeError as e:
1100
- print(f"Error parsing {lock_file_path}: {e}")
1101
- return
1102
- except Exception as e:
1103
- print(f"Error reading lock file: {e}")
1104
- return
1105
-
1106
- if not locked_version:
1107
- print(
1108
- f"Error: Could not find dayhoff-tools version in {lock_file_path}."
1109
- )
1110
- return
1111
-
1112
- print(f"Found dayhoff-tools version {locked_version} in lock file.")
1113
-
1114
- # Update all platform manifest files to ensure consistency
1115
- _update_all_manifests_for_dayhoff_tools(locked_version)
1116
-
1117
- # Step 3: Sync environment
1118
- print("Syncing environment with updated lock file...")
1119
- # Always use --no-install-project for updates
1120
- sync_cmd = ["uv", "sync", "--all-groups", "--no-install-project", "--active"]
1121
- print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
1122
- subprocess.run(sync_cmd, check=True, cwd=uv_cwd)
1123
-
1124
- # Final status message
1125
- if update_all:
1126
- print("All dependencies updated and environment synced successfully.")
1127
- else: # Default case (dayhoff update)
1128
- print(
1129
- "dayhoff-tools updated, manifest files modified, and environment synced successfully."
1130
- )
1131
-
1132
- except subprocess.CalledProcessError as e:
1133
- stderr_output = e.stderr.decode() if e.stderr else "No stderr output."
1134
- print(f"Error occurred during dependency update/sync: {e}")
1135
- print(f"Stderr: {stderr_output}")
1136
- if "NoSolution" in stderr_output:
1137
- print(
1138
- "\nHint: Could not find a compatible set of dependencies. Check constraints in pyproject.toml."
1139
- )
1140
- elif "unrecognized arguments: --upgrade" in stderr_output:
1141
- print(
1142
- "\nHint: Your version of 'uv' might be too old to support '--upgrade'. Try updating uv."
1143
- )
1144
- sys.exit(1)
1145
- except FileNotFoundError:
1146
- print("Error: 'uv' command not found. Is uv installed and in PATH?")
1147
- sys.exit(1)
1148
- except Exception as e:
1149
- print(f"An unexpected error occurred: {e}")
1150
- sys.exit(1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dayhoff-tools
3
- Version: 1.14.15
3
+ Version: 1.15.0
4
4
  Summary: Common tools for all the repos at Dayhoff Labs
5
5
  Author: Daniel Martin-Alarcon
6
6
  Author-email: dma@dayhofflabs.com
@@ -48,9 +48,9 @@ dayhoff_tools/cli/engines_studios/simulators/studio_status_simulator.py,sha256=6
48
48
  dayhoff_tools/cli/engines_studios/ssh_config.py,sha256=UCv-jf_zSuk7FUStkCQBAJz1QkxiSEwZbdSrwt_9SMU,2932
49
49
  dayhoff_tools/cli/engines_studios/studio_commands.py,sha256=KGSNZQS8MmM_DfQzT9SRZvuR3OK6NdIdOrqI2wJFyes,25984
50
50
  dayhoff_tools/cli/github_commands.py,sha256=pfrxI68LObGm_gtPlQN-gHPahHV4l9k9T4GqO99NNL0,8948
51
- dayhoff_tools/cli/main.py,sha256=6ffnaFzui-bVd1ME7yThk_ZrMOofwStamEUkkYlminY,8503
51
+ dayhoff_tools/cli/main.py,sha256=E4UXtOBRRAhcrJ3RgLCQFpJdamFN-p9xViMkX45kNgw,6456
52
52
  dayhoff_tools/cli/swarm_commands.py,sha256=5EyKj8yietvT5lfoz8Zx0iQvVaNgc3SJX1z2zQR6o6M,5614
53
- dayhoff_tools/cli/utility_commands.py,sha256=O6vy3rONTeuPYZyhjnFeqf8GxUlyc7i2O11d1s3shH4,45513
53
+ dayhoff_tools/cli/utility_commands.py,sha256=IMFd20OZrl0vIGL7Xrg3OoHNxOhG5KdeeieAUctolUk,9834
54
54
  dayhoff_tools/deployment/base.py,sha256=uZnFvnPQx6pH_HmJbdThweAs3BrxMaDohpE3iX_-yk4,18377
55
55
  dayhoff_tools/deployment/deploy_aws.py,sha256=1j16aE4hmln4pQVtcSGuIGVWbOBfWwveytvihjofADo,21519
56
56
  dayhoff_tools/deployment/deploy_gcp.py,sha256=xgaOVsUDmP6wSEMYNkm1yRNcVskfdz80qJtCulkBIAM,8860
@@ -71,7 +71,7 @@ dayhoff_tools/intake/uniprot.py,sha256=BZYJQF63OtPcBBnQ7_P9gulxzJtqyorgyuDiPeOJq
71
71
  dayhoff_tools/logs.py,sha256=DKdeP0k0kliRcilwvX0mUB2eipO5BdWUeHwh-VnsICs,838
72
72
  dayhoff_tools/sqlite.py,sha256=jV55ikF8VpTfeQqqlHSbY8OgfyfHj8zgHNpZjBLos_E,18672
73
73
  dayhoff_tools/warehouse.py,sha256=UETBtZD3r7WgvURqfGbyHlT7cxoiVq8isjzMuerKw8I,24475
74
- dayhoff_tools-1.14.15.dist-info/METADATA,sha256=YBP4lpBDAIhxYuJ65DR_ADrajk6hyLC1e_UCVc_bwx8,3136
75
- dayhoff_tools-1.14.15.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
76
- dayhoff_tools-1.14.15.dist-info/entry_points.txt,sha256=iAf4jteNqW3cJm6CO6czLxjW3vxYKsyGLZ8WGmxamSc,49
77
- dayhoff_tools-1.14.15.dist-info/RECORD,,
74
+ dayhoff_tools-1.15.0.dist-info/METADATA,sha256=YvxzOOhTeXVfO3NVsZ16ciDQME_kK3YYaNwbRKkIBso,3135
75
+ dayhoff_tools-1.15.0.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
76
+ dayhoff_tools-1.15.0.dist-info/entry_points.txt,sha256=iAf4jteNqW3cJm6CO6czLxjW3vxYKsyGLZ8WGmxamSc,49
77
+ dayhoff_tools-1.15.0.dist-info/RECORD,,