dayhoff-tools 1.11.1__py3-none-any.whl → 1.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dayhoff_tools/cli/main.py +10 -4
- dayhoff_tools/cli/utility_commands.py +560 -145
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.2.dist-info}/METADATA +1 -1
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.2.dist-info}/RECORD +6 -6
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.2.dist-info}/WHEEL +0 -0
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.2.dist-info}/entry_points.txt +0 -0
dayhoff_tools/cli/main.py
CHANGED
|
@@ -4,13 +4,14 @@ import sys
|
|
|
4
4
|
from importlib.metadata import PackageNotFoundError, version
|
|
5
5
|
|
|
6
6
|
import typer
|
|
7
|
-
|
|
8
7
|
from dayhoff_tools.cli.cloud_commands import aws_app, gcp_app
|
|
9
8
|
from dayhoff_tools.cli.engine import engine_app, studio_app
|
|
10
9
|
from dayhoff_tools.cli.utility_commands import (
|
|
10
|
+
add_dependency,
|
|
11
11
|
build_and_upload_wheel,
|
|
12
12
|
delete_local_branch,
|
|
13
|
-
|
|
13
|
+
remove_dependency,
|
|
14
|
+
sync_with_toml,
|
|
14
15
|
test_github_actions_locally,
|
|
15
16
|
update_dependencies,
|
|
16
17
|
)
|
|
@@ -45,8 +46,13 @@ app.command("clean")(delete_local_branch)
|
|
|
45
46
|
|
|
46
47
|
# Dependency Management
|
|
47
48
|
app.command(
|
|
48
|
-
"
|
|
49
|
-
|
|
49
|
+
"tomlsync",
|
|
50
|
+
help="Sync environment with platform-specific TOML manifest (install/update dependencies).",
|
|
51
|
+
)(sync_with_toml)
|
|
52
|
+
app.command("add", help="Add a dependency to all platform manifests.")(add_dependency)
|
|
53
|
+
app.command("remove", help="Remove a dependency from all platform manifests.")(
|
|
54
|
+
remove_dependency
|
|
55
|
+
)
|
|
50
56
|
app.command("update", help="Update dayhoff-tools (or all deps) and sync environment.")(
|
|
51
57
|
update_dependencies
|
|
52
58
|
)
|
|
@@ -110,8 +110,24 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
110
110
|
publish_cmd = ["uv", "publish", "--token", token]
|
|
111
111
|
print("Using UV_PUBLISH_TOKEN for authentication.")
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
# Find the primary manifest (prefer AWS, then Mac, then Workstation)
|
|
114
|
+
pyproject_path = None
|
|
115
|
+
for candidate in [
|
|
116
|
+
"pyproject.aws.toml",
|
|
117
|
+
"pyproject.mac.toml",
|
|
118
|
+
"pyproject.workstation.toml",
|
|
119
|
+
]:
|
|
120
|
+
if Path(candidate).exists():
|
|
121
|
+
pyproject_path = candidate
|
|
122
|
+
break
|
|
123
|
+
|
|
124
|
+
if not pyproject_path:
|
|
125
|
+
print(
|
|
126
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
127
|
+
)
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
print(f"Using manifest: {pyproject_path}")
|
|
115
131
|
current_version = None # Initialize in case the first try block fails
|
|
116
132
|
|
|
117
133
|
try:
|
|
@@ -174,32 +190,54 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
174
190
|
f.write(new_content)
|
|
175
191
|
print(f"Updated {pyproject_path} with version {new_version}")
|
|
176
192
|
|
|
177
|
-
# Mirror version in
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
193
|
+
# Mirror version in all other platform manifests (best-effort)
|
|
194
|
+
other_manifests = []
|
|
195
|
+
for candidate in [
|
|
196
|
+
"pyproject.aws.toml",
|
|
197
|
+
"pyproject.mac.toml",
|
|
198
|
+
"pyproject.workstation.toml",
|
|
199
|
+
]:
|
|
200
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
201
|
+
other_manifests.append(Path(candidate))
|
|
202
|
+
|
|
203
|
+
for manifest_path in other_manifests:
|
|
204
|
+
try:
|
|
205
|
+
content = manifest_path.read_text()
|
|
206
|
+
pattern = re.compile(
|
|
184
207
|
f'^version\s*=\s*"{re.escape(current_version)}"', re.MULTILINE
|
|
185
208
|
)
|
|
186
|
-
|
|
187
|
-
f'version = "{new_version}"',
|
|
209
|
+
new_content, replacements = pattern.subn(
|
|
210
|
+
f'version = "{new_version}"', content
|
|
188
211
|
)
|
|
189
|
-
if
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
print(f"Warning: Could not update {mac_manifest_path}: {e}")
|
|
212
|
+
if replacements > 0:
|
|
213
|
+
manifest_path.write_text(new_content)
|
|
214
|
+
print(f"Updated {manifest_path} with version {new_version}")
|
|
215
|
+
except Exception as e:
|
|
216
|
+
print(f"Warning: Could not update {manifest_path}: {e}")
|
|
195
217
|
# --- End Version Bumping Logic ---
|
|
196
218
|
|
|
197
219
|
# Build wheel and sdist
|
|
220
|
+
# UV expects pyproject.toml, so temporarily copy the platform manifest
|
|
221
|
+
backup_created = False
|
|
222
|
+
if pyproject_path != "pyproject.toml":
|
|
223
|
+
if Path("pyproject.toml").exists():
|
|
224
|
+
Path("pyproject.toml").rename("pyproject.toml.build.bak")
|
|
225
|
+
backup_created = True
|
|
226
|
+
Path(pyproject_path).read_text()
|
|
227
|
+
with open("pyproject.toml", "w") as f:
|
|
228
|
+
f.write(Path(pyproject_path).read_text())
|
|
229
|
+
|
|
198
230
|
build_cmd = ["uv", "build"]
|
|
199
231
|
# Print command in blue
|
|
200
232
|
print(f"Running command: {BLUE}{' '.join(build_cmd)}{RESET}")
|
|
201
233
|
subprocess.run(build_cmd, check=True)
|
|
202
234
|
|
|
235
|
+
# Restore original state
|
|
236
|
+
if pyproject_path != "pyproject.toml":
|
|
237
|
+
Path("pyproject.toml").unlink()
|
|
238
|
+
if backup_created:
|
|
239
|
+
Path("pyproject.toml.build.bak").rename("pyproject.toml")
|
|
240
|
+
|
|
203
241
|
# Upload using uv publish with explicit arguments
|
|
204
242
|
# Print masked command in blue
|
|
205
243
|
print(f"Running command: {BLUE}{' '.join(publish_cmd_safe_print)}{RESET}")
|
|
@@ -260,23 +298,25 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
260
298
|
f"Warning: Could not find version {new_version} to revert in {pyproject_path}."
|
|
261
299
|
)
|
|
262
300
|
|
|
263
|
-
# Also revert
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
301
|
+
# Also revert other platform manifests
|
|
302
|
+
for candidate in [
|
|
303
|
+
"pyproject.aws.toml",
|
|
304
|
+
"pyproject.mac.toml",
|
|
305
|
+
"pyproject.workstation.toml",
|
|
306
|
+
]:
|
|
307
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
308
|
+
try:
|
|
309
|
+
content_revert = Path(candidate).read_text()
|
|
310
|
+
reverted, num = pattern_revert.subn(
|
|
311
|
+
f'version = "{current_version}"', content_revert
|
|
312
|
+
)
|
|
313
|
+
if num > 0:
|
|
314
|
+
Path(candidate).write_text(reverted)
|
|
315
|
+
print(f"Successfully reverted version in {candidate}.")
|
|
316
|
+
except Exception as e2:
|
|
273
317
|
print(
|
|
274
|
-
f"
|
|
318
|
+
f"Warning: Failed to revert version change in {candidate}: {e2}"
|
|
275
319
|
)
|
|
276
|
-
except Exception as e2:
|
|
277
|
-
print(
|
|
278
|
-
f"Warning: Failed to revert version change in {mac_manifest_path}: {e2}"
|
|
279
|
-
)
|
|
280
320
|
except Exception as revert_e:
|
|
281
321
|
print(
|
|
282
322
|
f"Warning: Failed to revert version change in {pyproject_path}: {revert_e}"
|
|
@@ -306,23 +346,25 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
306
346
|
print(
|
|
307
347
|
f"Warning: Could not find version {new_version} to revert in {pyproject_path}."
|
|
308
348
|
)
|
|
309
|
-
# Also revert
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
349
|
+
# Also revert other platform manifests
|
|
350
|
+
for candidate in [
|
|
351
|
+
"pyproject.aws.toml",
|
|
352
|
+
"pyproject.mac.toml",
|
|
353
|
+
"pyproject.workstation.toml",
|
|
354
|
+
]:
|
|
355
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
356
|
+
try:
|
|
357
|
+
content_revert = Path(candidate).read_text()
|
|
358
|
+
reverted, num = pattern_revert.subn(
|
|
359
|
+
f'version = "{current_version}"', content_revert
|
|
360
|
+
)
|
|
361
|
+
if num > 0:
|
|
362
|
+
Path(candidate).write_text(reverted)
|
|
363
|
+
print(f"Successfully reverted version in {candidate}.")
|
|
364
|
+
except Exception as e2:
|
|
319
365
|
print(
|
|
320
|
-
f"
|
|
366
|
+
f"Warning: Failed to revert version change in {candidate}: {e2}"
|
|
321
367
|
)
|
|
322
|
-
except Exception as e2:
|
|
323
|
-
print(
|
|
324
|
-
f"Warning: Failed to revert version change in {mac_manifest_path}: {e2}"
|
|
325
|
-
)
|
|
326
368
|
except Exception as revert_e:
|
|
327
369
|
print(
|
|
328
370
|
f"Warning: Failed to revert version change in {pyproject_path}: {revert_e}"
|
|
@@ -332,7 +374,7 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
332
374
|
# --- Dependency Management Commands ---
|
|
333
375
|
|
|
334
376
|
|
|
335
|
-
def
|
|
377
|
+
def sync_with_toml(
|
|
336
378
|
install_project: bool = typer.Option(
|
|
337
379
|
False,
|
|
338
380
|
"--install-project",
|
|
@@ -340,23 +382,133 @@ def install_dependencies(
|
|
|
340
382
|
help="Install the local project package itself (with 'full' extras) into the environment.",
|
|
341
383
|
),
|
|
342
384
|
):
|
|
343
|
-
"""
|
|
344
|
-
|
|
345
|
-
Behavior:
|
|
346
|
-
-
|
|
385
|
+
"""Sync environment with platform-specific TOML manifest (install/update dependencies).
|
|
386
|
+
|
|
387
|
+
Behavior by platform:
|
|
388
|
+
- Workstation (STUDIO_PLATFORM=workstation) with pyproject.workstation.toml:
|
|
389
|
+
* Uses pip with constraints.txt to preserve NGC PyTorch
|
|
390
|
+
* Parses dependencies directly from pyproject.workstation.toml
|
|
391
|
+
* Installs into .venv_workstation with --system-site-packages
|
|
392
|
+
- Mac (STUDIO_PLATFORM=mac) with pyproject.mac.toml:
|
|
347
393
|
* Ensure `.mac_uv_project/pyproject.toml` is a copy of `pyproject.mac.toml`
|
|
348
|
-
* Run `uv lock` and `uv sync` in `.mac_uv_project`
|
|
394
|
+
* Run `uv lock` and `uv sync` in `.mac_uv_project` targeting active venv with `--active`
|
|
349
395
|
* If `install_project` is true, install the project from repo root into the active env (editable, [full])
|
|
350
|
-
-
|
|
351
|
-
*
|
|
352
|
-
*
|
|
396
|
+
- AWS (default) with pyproject.aws.toml:
|
|
397
|
+
* Uses UV in temp directory `.aws_uv_project` similar to Mac
|
|
398
|
+
* Run `uv lock` and `uv sync` targeting active venv
|
|
353
399
|
"""
|
|
354
400
|
# ANSI color codes
|
|
355
401
|
BLUE = "\033[94m"
|
|
356
402
|
RESET = "\033[0m"
|
|
357
403
|
|
|
358
404
|
try:
|
|
359
|
-
|
|
405
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
406
|
+
|
|
407
|
+
# Workstation platform: use pip with constraints
|
|
408
|
+
if platform == "workstation" and Path("pyproject.workstation.toml").exists():
|
|
409
|
+
print(
|
|
410
|
+
"Installing dependencies for workstation platform (using pip + constraints)..."
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
# Check for constraints.txt
|
|
414
|
+
if not Path("constraints.txt").exists():
|
|
415
|
+
print(
|
|
416
|
+
"Error: constraints.txt not found. Run direnv to generate it first."
|
|
417
|
+
)
|
|
418
|
+
sys.exit(1)
|
|
419
|
+
|
|
420
|
+
# Parse and install dependencies from pyproject.workstation.toml
|
|
421
|
+
import re
|
|
422
|
+
|
|
423
|
+
with open("pyproject.workstation.toml", "r") as f:
|
|
424
|
+
content = f.read()
|
|
425
|
+
|
|
426
|
+
# Extract dependencies list
|
|
427
|
+
deps_match = re.search(r"dependencies\s*=\s*\[(.*?)\]", content, re.DOTALL)
|
|
428
|
+
if deps_match:
|
|
429
|
+
deps_text = deps_match.group(1)
|
|
430
|
+
deps = []
|
|
431
|
+
for line in deps_text.split("\n"):
|
|
432
|
+
line = line.strip()
|
|
433
|
+
if line.startswith('"') or line.startswith("'"):
|
|
434
|
+
dep = re.sub(r'["\']', "", line)
|
|
435
|
+
dep = re.sub(r",?\s*#.*$", "", dep)
|
|
436
|
+
dep = dep.strip().rstrip(",")
|
|
437
|
+
if dep:
|
|
438
|
+
deps.append(dep)
|
|
439
|
+
|
|
440
|
+
if deps:
|
|
441
|
+
pip_cmd = (
|
|
442
|
+
[sys.executable, "-m", "pip", "install"]
|
|
443
|
+
+ deps
|
|
444
|
+
+ ["-c", "constraints.txt"]
|
|
445
|
+
)
|
|
446
|
+
print(
|
|
447
|
+
f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
|
|
448
|
+
)
|
|
449
|
+
subprocess.run(pip_cmd, check=True)
|
|
450
|
+
|
|
451
|
+
# Install dev dependencies
|
|
452
|
+
dev_match = re.search(
|
|
453
|
+
r"\[dependency-groups\]\s*dev\s*=\s*\[(.*?)\]", content, re.DOTALL
|
|
454
|
+
)
|
|
455
|
+
if dev_match:
|
|
456
|
+
dev_text = dev_match.group(1)
|
|
457
|
+
dev_deps = []
|
|
458
|
+
for line in dev_text.split("\n"):
|
|
459
|
+
line = line.strip()
|
|
460
|
+
if line.startswith('"') or line.startswith("'"):
|
|
461
|
+
dep = re.sub(r'["\']', "", line)
|
|
462
|
+
dep = re.sub(r",?\s*#.*$", "", dep)
|
|
463
|
+
dep = dep.strip().rstrip(",")
|
|
464
|
+
if dep:
|
|
465
|
+
dev_deps.append(dep)
|
|
466
|
+
|
|
467
|
+
if dev_deps:
|
|
468
|
+
print("Installing dev dependencies...")
|
|
469
|
+
pip_cmd = (
|
|
470
|
+
[sys.executable, "-m", "pip", "install"]
|
|
471
|
+
+ dev_deps
|
|
472
|
+
+ ["-c", "constraints.txt"]
|
|
473
|
+
)
|
|
474
|
+
print(
|
|
475
|
+
f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
|
|
476
|
+
)
|
|
477
|
+
subprocess.run(pip_cmd, check=True)
|
|
478
|
+
|
|
479
|
+
# Install project if requested
|
|
480
|
+
if install_project:
|
|
481
|
+
repo_name = Path.cwd().name
|
|
482
|
+
if repo_name == "dayhoff-tools":
|
|
483
|
+
pip_cmd = [
|
|
484
|
+
sys.executable,
|
|
485
|
+
"-m",
|
|
486
|
+
"pip",
|
|
487
|
+
"install",
|
|
488
|
+
"-e",
|
|
489
|
+
".[full]",
|
|
490
|
+
"-c",
|
|
491
|
+
"constraints.txt",
|
|
492
|
+
]
|
|
493
|
+
else:
|
|
494
|
+
pip_cmd = [
|
|
495
|
+
sys.executable,
|
|
496
|
+
"-m",
|
|
497
|
+
"pip",
|
|
498
|
+
"install",
|
|
499
|
+
"-e",
|
|
500
|
+
".",
|
|
501
|
+
"-c",
|
|
502
|
+
"constraints.txt",
|
|
503
|
+
]
|
|
504
|
+
print(f"Running command: {BLUE}{' '.join(pip_cmd)}{RESET}")
|
|
505
|
+
subprocess.run(pip_cmd, check=True)
|
|
506
|
+
|
|
507
|
+
print("✅ Dependencies installed successfully (workstation)")
|
|
508
|
+
return
|
|
509
|
+
|
|
510
|
+
# Mac platform: use UV with pyproject.mac.toml
|
|
511
|
+
is_mac = platform == "mac"
|
|
360
512
|
mac_manifest = Path("pyproject.mac.toml")
|
|
361
513
|
if is_mac and mac_manifest.exists():
|
|
362
514
|
# Mac devcontainer flow
|
|
@@ -399,29 +551,55 @@ def install_dependencies(
|
|
|
399
551
|
subprocess.run(sync_cmd, check=True, cwd=str(mac_uv_dir))
|
|
400
552
|
print("Dependencies synced successfully (project not installed).")
|
|
401
553
|
else:
|
|
402
|
-
#
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
554
|
+
# AWS platform (or fallback): use UV with pyproject.aws.toml
|
|
555
|
+
aws_manifest = Path("pyproject.aws.toml")
|
|
556
|
+
if aws_manifest.exists():
|
|
557
|
+
# AWS devcontainer flow (similar to Mac)
|
|
558
|
+
aws_uv_dir = Path(".aws_uv_project")
|
|
559
|
+
aws_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
560
|
+
aws_pyproject = aws_uv_dir / "pyproject.toml"
|
|
561
|
+
aws_pyproject.write_text(aws_manifest.read_text())
|
|
562
|
+
|
|
563
|
+
# Ensure lock matches manifest (in aws temp dir)
|
|
564
|
+
print("Ensuring lock file matches pyproject.aws.toml (AWS devcon)…")
|
|
565
|
+
lock_cmd = ["uv", "lock"]
|
|
566
|
+
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
567
|
+
subprocess.run(
|
|
568
|
+
lock_cmd, check=True, capture_output=True, cwd=str(aws_uv_dir)
|
|
569
|
+
)
|
|
407
570
|
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
571
|
+
# Sync into the active environment
|
|
572
|
+
if install_project:
|
|
573
|
+
print(
|
|
574
|
+
"Syncing dependencies into ACTIVE env and installing project [full]…"
|
|
575
|
+
)
|
|
576
|
+
sync_cmd = ["uv", "sync", "--all-groups", "--active"]
|
|
577
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
578
|
+
subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
|
|
579
|
+
# Install project from repo root
|
|
580
|
+
pip_install_cmd = ["uv", "pip", "install", "-e", ".[full]"]
|
|
581
|
+
print(f"Running command: {BLUE}{' '.join(pip_install_cmd)}{RESET}")
|
|
582
|
+
subprocess.run(pip_install_cmd, check=True)
|
|
583
|
+
print("Project installed with 'full' extras successfully.")
|
|
584
|
+
else:
|
|
585
|
+
print(
|
|
586
|
+
"Syncing dependencies into ACTIVE env (project not installed)…"
|
|
587
|
+
)
|
|
588
|
+
sync_cmd = [
|
|
589
|
+
"uv",
|
|
590
|
+
"sync",
|
|
591
|
+
"--all-groups",
|
|
592
|
+
"--no-install-project",
|
|
593
|
+
"--active",
|
|
594
|
+
]
|
|
595
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
596
|
+
subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
|
|
597
|
+
print("Dependencies synced successfully (project not installed).")
|
|
413
598
|
else:
|
|
414
|
-
print(
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
"--all-groups",
|
|
419
|
-
"--no-install-project",
|
|
420
|
-
"--active",
|
|
421
|
-
]
|
|
422
|
-
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
423
|
-
subprocess.run(sync_cmd, check=True)
|
|
424
|
-
print("Dependencies synced successfully (project not installed).")
|
|
599
|
+
print(
|
|
600
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
601
|
+
)
|
|
602
|
+
sys.exit(1)
|
|
425
603
|
|
|
426
604
|
except subprocess.CalledProcessError as e:
|
|
427
605
|
stderr_output = e.stderr.decode() if e.stderr else "No stderr output."
|
|
@@ -440,6 +618,238 @@ def install_dependencies(
|
|
|
440
618
|
sys.exit(1)
|
|
441
619
|
|
|
442
620
|
|
|
621
|
+
def _get_all_platform_manifests():
|
|
622
|
+
"""Get list of all platform manifests that exist."""
|
|
623
|
+
manifest_files = []
|
|
624
|
+
for fname in [
|
|
625
|
+
"pyproject.aws.toml",
|
|
626
|
+
"pyproject.mac.toml",
|
|
627
|
+
"pyproject.workstation.toml",
|
|
628
|
+
]:
|
|
629
|
+
if Path(fname).exists():
|
|
630
|
+
manifest_files.append(Path(fname))
|
|
631
|
+
return manifest_files
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
def _update_all_manifests_for_dayhoff_tools(new_version: str):
|
|
635
|
+
"""Update dayhoff-tools constraint in all platform manifests."""
|
|
636
|
+
import re
|
|
637
|
+
|
|
638
|
+
manifest_files = _get_all_platform_manifests()
|
|
639
|
+
|
|
640
|
+
if not manifest_files:
|
|
641
|
+
print("Warning: No platform manifests found to update.")
|
|
642
|
+
return
|
|
643
|
+
|
|
644
|
+
package_name = "dayhoff-tools"
|
|
645
|
+
package_name_esc = re.escape(package_name)
|
|
646
|
+
|
|
647
|
+
# Regex to match the dependency line, with optional extras and version spec
|
|
648
|
+
pattern = re.compile(
|
|
649
|
+
rf"^(\s*['\"]){package_name_esc}(\[[^]]+\])?(?:[><=~^][^'\"]*)?(['\"].*)$",
|
|
650
|
+
re.MULTILINE,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
new_constraint_text = f">={new_version}"
|
|
654
|
+
|
|
655
|
+
def _repl(match: re.Match):
|
|
656
|
+
prefix = match.group(1)
|
|
657
|
+
extras = match.group(2) or ""
|
|
658
|
+
suffix = match.group(3)
|
|
659
|
+
return f"{prefix}{package_name}{extras}{new_constraint_text}{suffix}"
|
|
660
|
+
|
|
661
|
+
# Update all manifest files
|
|
662
|
+
for manifest_file in manifest_files:
|
|
663
|
+
try:
|
|
664
|
+
print(f"Updating {manifest_file} version constraint...")
|
|
665
|
+
content = manifest_file.read_text()
|
|
666
|
+
new_content, num_replacements = pattern.subn(_repl, content)
|
|
667
|
+
if num_replacements > 0:
|
|
668
|
+
manifest_file.write_text(new_content)
|
|
669
|
+
print(
|
|
670
|
+
f"Updated dayhoff-tools constraint in {manifest_file} to '{new_constraint_text}'"
|
|
671
|
+
)
|
|
672
|
+
else:
|
|
673
|
+
print(
|
|
674
|
+
f"Warning: Could not find dayhoff-tools dependency line in {manifest_file}"
|
|
675
|
+
)
|
|
676
|
+
except Exception as e:
|
|
677
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
def add_dependency(
|
|
681
|
+
package: str,
|
|
682
|
+
dev: bool = typer.Option(
|
|
683
|
+
False, "--dev", "-d", help="Add to dev dependencies instead of main."
|
|
684
|
+
),
|
|
685
|
+
):
|
|
686
|
+
"""Add a dependency to all platform-specific manifests.
|
|
687
|
+
|
|
688
|
+
Args:
|
|
689
|
+
package: Package specification (e.g., "numpy>=1.24.0" or "pandas")
|
|
690
|
+
dev: If True, add to [dependency-groups] dev instead of [project] dependencies
|
|
691
|
+
"""
|
|
692
|
+
import re
|
|
693
|
+
|
|
694
|
+
# ANSI color codes
|
|
695
|
+
BLUE = "\033[94m"
|
|
696
|
+
RESET = "\033[0m"
|
|
697
|
+
|
|
698
|
+
manifest_files = _get_all_platform_manifests()
|
|
699
|
+
|
|
700
|
+
if not manifest_files:
|
|
701
|
+
print(
|
|
702
|
+
"Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
703
|
+
)
|
|
704
|
+
sys.exit(1)
|
|
705
|
+
|
|
706
|
+
# Determine section to add to
|
|
707
|
+
section_name = "dev dependencies" if dev else "main dependencies"
|
|
708
|
+
print(f"Adding '{package}' to {section_name} in all platform manifests...")
|
|
709
|
+
|
|
710
|
+
# Parse package name to check for duplicates
|
|
711
|
+
package_name = re.split(r"[<>=~!\[]", package)[0].strip()
|
|
712
|
+
|
|
713
|
+
for manifest_file in manifest_files:
|
|
714
|
+
try:
|
|
715
|
+
content = manifest_file.read_text()
|
|
716
|
+
|
|
717
|
+
# Check if package already exists
|
|
718
|
+
existing_check = re.search(
|
|
719
|
+
rf'^(\s*["\']){re.escape(package_name)}[<>=~!\[]',
|
|
720
|
+
content,
|
|
721
|
+
re.MULTILINE,
|
|
722
|
+
)
|
|
723
|
+
if existing_check:
|
|
724
|
+
print(
|
|
725
|
+
f"⚠️ Package '{package_name}' already exists in {manifest_file}, skipping"
|
|
726
|
+
)
|
|
727
|
+
continue
|
|
728
|
+
|
|
729
|
+
if dev:
|
|
730
|
+
# Add to [dependency-groups] dev section
|
|
731
|
+
# Find the dev = [ ... ] block
|
|
732
|
+
dev_match = re.search(
|
|
733
|
+
r"(\[dependency-groups\]\s*dev\s*=\s*\[)(.*?)(\])",
|
|
734
|
+
content,
|
|
735
|
+
re.DOTALL,
|
|
736
|
+
)
|
|
737
|
+
if not dev_match:
|
|
738
|
+
print(
|
|
739
|
+
f"Warning: Could not find [dependency-groups] dev section in {manifest_file}"
|
|
740
|
+
)
|
|
741
|
+
continue
|
|
742
|
+
|
|
743
|
+
# Insert new dependency at the end of the list
|
|
744
|
+
before = dev_match.group(1)
|
|
745
|
+
deps_block = dev_match.group(2)
|
|
746
|
+
after = dev_match.group(3)
|
|
747
|
+
|
|
748
|
+
# Add with proper indentation
|
|
749
|
+
new_dep = f' "{package}",\n'
|
|
750
|
+
new_content = content.replace(
|
|
751
|
+
dev_match.group(0), f"{before}{deps_block}{new_dep}{after}"
|
|
752
|
+
)
|
|
753
|
+
else:
|
|
754
|
+
# Add to [project] dependencies section
|
|
755
|
+
deps_match = re.search(
|
|
756
|
+
r"(dependencies\s*=\s*\[)(.*?)(\])", content, re.DOTALL
|
|
757
|
+
)
|
|
758
|
+
if not deps_match:
|
|
759
|
+
print(
|
|
760
|
+
f"Warning: Could not find dependencies section in {manifest_file}"
|
|
761
|
+
)
|
|
762
|
+
continue
|
|
763
|
+
|
|
764
|
+
before = deps_match.group(1)
|
|
765
|
+
deps_block = deps_match.group(2)
|
|
766
|
+
after = deps_match.group(3)
|
|
767
|
+
|
|
768
|
+
# Add with proper indentation
|
|
769
|
+
new_dep = f' "{package}",\n'
|
|
770
|
+
new_content = content.replace(
|
|
771
|
+
deps_match.group(0), f"{before}{deps_block}{new_dep}{after}"
|
|
772
|
+
)
|
|
773
|
+
|
|
774
|
+
manifest_file.write_text(new_content)
|
|
775
|
+
print(f"✅ Added '{package}' to {manifest_file}")
|
|
776
|
+
|
|
777
|
+
except Exception as e:
|
|
778
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
779
|
+
|
|
780
|
+
print(f"\n✅ Added '{package}' to all platform manifests")
|
|
781
|
+
print(
|
|
782
|
+
f"\nRun {BLUE}dh tomlsync{RESET} to install the new dependency in your environment."
|
|
783
|
+
)
|
|
784
|
+
|
|
785
|
+
|
|
786
|
+
def remove_dependency(
|
|
787
|
+
package: str,
|
|
788
|
+
dev: bool = typer.Option(
|
|
789
|
+
False, "--dev", "-d", help="Remove from dev dependencies instead of main."
|
|
790
|
+
),
|
|
791
|
+
):
|
|
792
|
+
"""Remove a dependency from all platform-specific manifests.
|
|
793
|
+
|
|
794
|
+
Args:
|
|
795
|
+
package: Package name (e.g., "numpy" or "pandas")
|
|
796
|
+
dev: If True, remove from [dependency-groups] dev instead of [project] dependencies
|
|
797
|
+
"""
|
|
798
|
+
import re
|
|
799
|
+
|
|
800
|
+
# ANSI color codes
|
|
801
|
+
BLUE = "\033[94m"
|
|
802
|
+
RESET = "\033[0m"
|
|
803
|
+
|
|
804
|
+
manifest_files = _get_all_platform_manifests()
|
|
805
|
+
|
|
806
|
+
if not manifest_files:
|
|
807
|
+
print(
|
|
808
|
+
"Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
809
|
+
)
|
|
810
|
+
sys.exit(1)
|
|
811
|
+
|
|
812
|
+
section_name = "dev dependencies" if dev else "main dependencies"
|
|
813
|
+
print(f"Removing '{package}' from {section_name} in all platform manifests...")
|
|
814
|
+
|
|
815
|
+
# Escape package name for regex
|
|
816
|
+
package_esc = re.escape(package)
|
|
817
|
+
|
|
818
|
+
removed_count = 0
|
|
819
|
+
for manifest_file in manifest_files:
|
|
820
|
+
try:
|
|
821
|
+
content = manifest_file.read_text()
|
|
822
|
+
|
|
823
|
+
# Pattern to match the dependency line (with optional version spec)
|
|
824
|
+
# Matches: "package...", or "package...",\n
|
|
825
|
+
pattern = re.compile(
|
|
826
|
+
rf'^\s*["\']({package_esc}[<>=~!\[].+?|{package_esc})["\'],?\s*(?:#.*)?$',
|
|
827
|
+
re.MULTILINE,
|
|
828
|
+
)
|
|
829
|
+
|
|
830
|
+
new_content, num_removed = pattern.subn("", content)
|
|
831
|
+
|
|
832
|
+
if num_removed > 0:
|
|
833
|
+
# Clean up any double blank lines
|
|
834
|
+
new_content = re.sub(r"\n\n\n+", "\n\n", new_content)
|
|
835
|
+
manifest_file.write_text(new_content)
|
|
836
|
+
print(f"✅ Removed '{package}' from {manifest_file}")
|
|
837
|
+
removed_count += 1
|
|
838
|
+
else:
|
|
839
|
+
print(f"⚠️ Package '{package}' not found in {manifest_file}")
|
|
840
|
+
|
|
841
|
+
except Exception as e:
|
|
842
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
843
|
+
|
|
844
|
+
if removed_count > 0:
|
|
845
|
+
print(f"\n✅ Removed '{package}' from {removed_count} platform manifest(s)")
|
|
846
|
+
print(
|
|
847
|
+
f"\nRun {BLUE}dh tomlsync{RESET} to uninstall the dependency from your environment."
|
|
848
|
+
)
|
|
849
|
+
else:
|
|
850
|
+
print(f"\n⚠️ Package '{package}' was not found in any manifests")
|
|
851
|
+
|
|
852
|
+
|
|
443
853
|
def update_dependencies(
|
|
444
854
|
update_all: bool = typer.Option(
|
|
445
855
|
False,
|
|
@@ -448,29 +858,73 @@ def update_dependencies(
|
|
|
448
858
|
help="Update all dependencies instead of just dayhoff-tools.",
|
|
449
859
|
),
|
|
450
860
|
):
|
|
451
|
-
"""Update dependencies to newer versions (
|
|
861
|
+
"""Update dependencies to newer versions (platform-aware).
|
|
452
862
|
|
|
453
863
|
- Default Action (no flags): Updates only 'dayhoff-tools' package to latest,
|
|
454
864
|
updates ALL manifest files with the version constraint, and syncs.
|
|
455
865
|
- Flags:
|
|
456
|
-
--all/-a: Updates all dependencies
|
|
866
|
+
--all/-a: Updates all dependencies and syncs.
|
|
457
867
|
|
|
458
868
|
Cross-platform behavior:
|
|
459
|
-
-
|
|
460
|
-
|
|
461
|
-
-
|
|
462
|
-
|
|
463
|
-
- Always uses `--active` for sync to target the active venv.
|
|
869
|
+
- Workstation: Uses pip to upgrade packages, regenerates constraints.txt
|
|
870
|
+
- Mac/AWS: Uses UV with platform-specific manifests (.mac_uv_project or .aws_uv_project)
|
|
871
|
+
- Always updates ALL platform manifests (pyproject.aws.toml, pyproject.mac.toml,
|
|
872
|
+
pyproject.workstation.toml) to ensure version consistency
|
|
464
873
|
"""
|
|
465
874
|
# ANSI color codes
|
|
466
875
|
BLUE = "\033[94m"
|
|
467
876
|
RESET = "\033[0m"
|
|
468
877
|
|
|
469
|
-
|
|
878
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
879
|
+
|
|
880
|
+
# Workstation platform: use pip upgrade
|
|
881
|
+
if platform == "workstation" and Path("pyproject.workstation.toml").exists():
|
|
882
|
+
print("Updating dependencies for workstation platform (using pip)...")
|
|
883
|
+
|
|
884
|
+
if update_all:
|
|
885
|
+
print("Error: --all flag not supported on workstation platform yet.")
|
|
886
|
+
print("Use 'pip install --upgrade <package>' manually for now.")
|
|
887
|
+
sys.exit(1)
|
|
888
|
+
|
|
889
|
+
# Update dayhoff-tools only (default behavior)
|
|
890
|
+
print("Upgrading dayhoff-tools to latest version...")
|
|
891
|
+
upgrade_cmd = [
|
|
892
|
+
sys.executable,
|
|
893
|
+
"-m",
|
|
894
|
+
"pip",
|
|
895
|
+
"install",
|
|
896
|
+
"--upgrade",
|
|
897
|
+
"dayhoff-tools[full]",
|
|
898
|
+
]
|
|
899
|
+
print(f"Running command: {BLUE}{' '.join(upgrade_cmd)}{RESET}")
|
|
900
|
+
subprocess.run(upgrade_cmd, check=True)
|
|
901
|
+
|
|
902
|
+
# Get new version
|
|
903
|
+
result = subprocess.run(
|
|
904
|
+
[sys.executable, "-m", "pip", "show", "dayhoff-tools"],
|
|
905
|
+
capture_output=True,
|
|
906
|
+
text=True,
|
|
907
|
+
check=True,
|
|
908
|
+
)
|
|
909
|
+
version_line = [
|
|
910
|
+
l for l in result.stdout.split("\n") if l.startswith("Version:")
|
|
911
|
+
]
|
|
912
|
+
if version_line:
|
|
913
|
+
new_version = version_line[0].split(":", 1)[1].strip()
|
|
914
|
+
print(f"Updated to dayhoff-tools {new_version}")
|
|
915
|
+
|
|
916
|
+
# Update all platform manifests with new constraint
|
|
917
|
+
_update_all_manifests_for_dayhoff_tools(new_version)
|
|
918
|
+
|
|
919
|
+
print("✅ Dependencies updated successfully (workstation)")
|
|
920
|
+
return
|
|
921
|
+
|
|
922
|
+
# Mac/AWS platforms: use UV
|
|
470
923
|
mac_manifest = Path("pyproject.mac.toml")
|
|
924
|
+
aws_manifest = Path("pyproject.aws.toml")
|
|
471
925
|
mac_uv_dir = Path(".mac_uv_project")
|
|
926
|
+
aws_uv_dir = Path(".aws_uv_project")
|
|
472
927
|
lock_file_path = Path("uv.lock")
|
|
473
|
-
pyproject_path = Path("pyproject.toml")
|
|
474
928
|
|
|
475
929
|
# Determine action based on flags
|
|
476
930
|
lock_cmd = ["uv", "lock"]
|
|
@@ -492,15 +946,29 @@ def update_dependencies(
|
|
|
492
946
|
)
|
|
493
947
|
|
|
494
948
|
try:
|
|
495
|
-
# Choose working directory for uv operations
|
|
949
|
+
# Choose working directory for uv operations based on platform
|
|
496
950
|
uv_cwd = None
|
|
497
|
-
manifest_path_for_constraint =
|
|
498
|
-
|
|
951
|
+
manifest_path_for_constraint = None
|
|
952
|
+
|
|
953
|
+
if platform == "mac" and mac_manifest.exists():
|
|
499
954
|
mac_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
500
955
|
(mac_uv_dir / "pyproject.toml").write_text(mac_manifest.read_text())
|
|
501
956
|
uv_cwd = str(mac_uv_dir)
|
|
502
957
|
lock_file_path = mac_uv_dir / "uv.lock"
|
|
503
958
|
manifest_path_for_constraint = mac_manifest
|
|
959
|
+
elif aws_manifest.exists():
|
|
960
|
+
# AWS platform (default)
|
|
961
|
+
aws_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
962
|
+
(aws_uv_dir / "pyproject.toml").write_text(aws_manifest.read_text())
|
|
963
|
+
uv_cwd = str(aws_uv_dir)
|
|
964
|
+
lock_file_path = aws_uv_dir / "uv.lock"
|
|
965
|
+
manifest_path_for_constraint = aws_manifest
|
|
966
|
+
else:
|
|
967
|
+
print(
|
|
968
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml or pyproject.mac.toml)"
|
|
969
|
+
)
|
|
970
|
+
sys.exit(1)
|
|
971
|
+
|
|
504
972
|
# Step 1: Run the update lock command
|
|
505
973
|
print(action_description)
|
|
506
974
|
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
@@ -534,61 +1002,8 @@ def update_dependencies(
|
|
|
534
1002
|
|
|
535
1003
|
print(f"Found dayhoff-tools version {locked_version} in lock file.")
|
|
536
1004
|
|
|
537
|
-
# Update
|
|
538
|
-
|
|
539
|
-
if pyproject_path.exists():
|
|
540
|
-
manifest_files_to_update.append(pyproject_path)
|
|
541
|
-
if mac_manifest.exists():
|
|
542
|
-
manifest_files_to_update.append(mac_manifest)
|
|
543
|
-
|
|
544
|
-
if not manifest_files_to_update:
|
|
545
|
-
print("Warning: No manifest files found to update.")
|
|
546
|
-
return
|
|
547
|
-
|
|
548
|
-
package_name = "dayhoff-tools"
|
|
549
|
-
package_name_esc = re.escape(package_name)
|
|
550
|
-
|
|
551
|
-
# Regex to match the dependency line, with optional extras and version spec
|
|
552
|
-
pattern = re.compile(
|
|
553
|
-
rf"^(\s*['\"]){package_name_esc}(\[[^]]+\])?(?:[><=~^][^'\"]*)?(['\"].*)$",
|
|
554
|
-
re.MULTILINE,
|
|
555
|
-
)
|
|
556
|
-
|
|
557
|
-
new_constraint_text = f">={locked_version}"
|
|
558
|
-
|
|
559
|
-
def _repl(match: re.Match):
|
|
560
|
-
prefix = match.group(1)
|
|
561
|
-
extras = match.group(2) or ""
|
|
562
|
-
suffix = match.group(3)
|
|
563
|
-
return f"{prefix}{package_name}{extras}{new_constraint_text}{suffix}"
|
|
564
|
-
|
|
565
|
-
# Update all manifest files
|
|
566
|
-
updated_files = []
|
|
567
|
-
for manifest_file in manifest_files_to_update:
|
|
568
|
-
try:
|
|
569
|
-
print(f"Updating {manifest_file} version constraint...")
|
|
570
|
-
content = manifest_file.read_text()
|
|
571
|
-
new_content, num_replacements = pattern.subn(_repl, content)
|
|
572
|
-
if num_replacements > 0:
|
|
573
|
-
manifest_file.write_text(new_content)
|
|
574
|
-
print(
|
|
575
|
-
f"Updated dayhoff-tools constraint in {manifest_file} to '{new_constraint_text}'"
|
|
576
|
-
)
|
|
577
|
-
updated_files.append(str(manifest_file))
|
|
578
|
-
else:
|
|
579
|
-
print(
|
|
580
|
-
f"Warning: Could not find dayhoff-tools dependency line in {manifest_file}"
|
|
581
|
-
)
|
|
582
|
-
except FileNotFoundError:
|
|
583
|
-
print(f"Warning: {manifest_file} not found.")
|
|
584
|
-
except Exception as e:
|
|
585
|
-
print(f"Error updating {manifest_file}: {e}")
|
|
586
|
-
|
|
587
|
-
if not updated_files:
|
|
588
|
-
print(
|
|
589
|
-
"Warning: No manifest files were successfully updated with dayhoff-tools constraint."
|
|
590
|
-
)
|
|
591
|
-
print("Proceeding with sync despite manifest update failures.")
|
|
1005
|
+
# Update all platform manifest files to ensure consistency
|
|
1006
|
+
_update_all_manifests_for_dayhoff_tools(locked_version)
|
|
592
1007
|
|
|
593
1008
|
# Step 3: Sync environment
|
|
594
1009
|
print("Syncing environment with updated lock file...")
|
|
@@ -10,9 +10,9 @@ dayhoff_tools/cli/engine/engine_maintenance.py,sha256=S9w2_Ko2C3zKpzOux-iG8QUYn0
|
|
|
10
10
|
dayhoff_tools/cli/engine/engine_management.py,sha256=s_H3FtMlKsdfzR8pwV-j2W2QX-Fypkqj2kPC0aTqC1A,19072
|
|
11
11
|
dayhoff_tools/cli/engine/shared.py,sha256=Ecx6I1jtzmxQDn3BezKpgpQ4SJeZf4SZjUCLg-67p80,16844
|
|
12
12
|
dayhoff_tools/cli/engine/studio_commands.py,sha256=VwTQujz32-uMcYusDRE73SdzRpgvIkv7ZAF4zRv6AzA,30266
|
|
13
|
-
dayhoff_tools/cli/main.py,sha256=
|
|
13
|
+
dayhoff_tools/cli/main.py,sha256=Ii5boey--93yGthB_eS2LC7ZR3WHGsJXDHY7uElEtso,6169
|
|
14
14
|
dayhoff_tools/cli/swarm_commands.py,sha256=5EyKj8yietvT5lfoz8Zx0iQvVaNgc3SJX1z2zQR6o6M,5614
|
|
15
|
-
dayhoff_tools/cli/utility_commands.py,sha256=
|
|
15
|
+
dayhoff_tools/cli/utility_commands.py,sha256=YLNW1u68br4N4RK5TDVYLKlG2b-uMF-qwoW3CZ5bKZs,40929
|
|
16
16
|
dayhoff_tools/deployment/base.py,sha256=48KE76QlWMeIZJefcBOZVbyChS2V_mgs7IQ31odPV2o,17806
|
|
17
17
|
dayhoff_tools/deployment/deploy_aws.py,sha256=gfqh09hGbz0q3oPqVm0imd_CEjKF2k8moGNRIL26qqE,18614
|
|
18
18
|
dayhoff_tools/deployment/deploy_gcp.py,sha256=xgaOVsUDmP6wSEMYNkm1yRNcVskfdz80qJtCulkBIAM,8860
|
|
@@ -33,7 +33,7 @@ dayhoff_tools/intake/uniprot.py,sha256=BZYJQF63OtPcBBnQ7_P9gulxzJtqyorgyuDiPeOJq
|
|
|
33
33
|
dayhoff_tools/logs.py,sha256=DKdeP0k0kliRcilwvX0mUB2eipO5BdWUeHwh-VnsICs,838
|
|
34
34
|
dayhoff_tools/sqlite.py,sha256=jV55ikF8VpTfeQqqlHSbY8OgfyfHj8zgHNpZjBLos_E,18672
|
|
35
35
|
dayhoff_tools/warehouse.py,sha256=UETBtZD3r7WgvURqfGbyHlT7cxoiVq8isjzMuerKw8I,24475
|
|
36
|
-
dayhoff_tools-1.11.
|
|
37
|
-
dayhoff_tools-1.11.
|
|
38
|
-
dayhoff_tools-1.11.
|
|
39
|
-
dayhoff_tools-1.11.
|
|
36
|
+
dayhoff_tools-1.11.2.dist-info/METADATA,sha256=rVpxw6vSBegBs4W3-ukrjnLDX5np40EGzQwjrrPW-eo,2980
|
|
37
|
+
dayhoff_tools-1.11.2.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
38
|
+
dayhoff_tools-1.11.2.dist-info/entry_points.txt,sha256=iAf4jteNqW3cJm6CO6czLxjW3vxYKsyGLZ8WGmxamSc,49
|
|
39
|
+
dayhoff_tools-1.11.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|