dayhoff-tools 1.11.1__py3-none-any.whl → 1.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dayhoff_tools/cli/main.py +10 -4
- dayhoff_tools/cli/utility_commands.py +595 -145
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.3.dist-info}/METADATA +1 -1
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.3.dist-info}/RECORD +6 -6
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.3.dist-info}/WHEEL +0 -0
- {dayhoff_tools-1.11.1.dist-info → dayhoff_tools-1.11.3.dist-info}/entry_points.txt +0 -0
dayhoff_tools/cli/main.py
CHANGED
|
@@ -4,13 +4,14 @@ import sys
|
|
|
4
4
|
from importlib.metadata import PackageNotFoundError, version
|
|
5
5
|
|
|
6
6
|
import typer
|
|
7
|
-
|
|
8
7
|
from dayhoff_tools.cli.cloud_commands import aws_app, gcp_app
|
|
9
8
|
from dayhoff_tools.cli.engine import engine_app, studio_app
|
|
10
9
|
from dayhoff_tools.cli.utility_commands import (
|
|
10
|
+
add_dependency,
|
|
11
11
|
build_and_upload_wheel,
|
|
12
12
|
delete_local_branch,
|
|
13
|
-
|
|
13
|
+
remove_dependency,
|
|
14
|
+
sync_with_toml,
|
|
14
15
|
test_github_actions_locally,
|
|
15
16
|
update_dependencies,
|
|
16
17
|
)
|
|
@@ -45,8 +46,13 @@ app.command("clean")(delete_local_branch)
|
|
|
45
46
|
|
|
46
47
|
# Dependency Management
|
|
47
48
|
app.command(
|
|
48
|
-
"
|
|
49
|
-
|
|
49
|
+
"tomlsync",
|
|
50
|
+
help="Sync environment with platform-specific TOML manifest (install/update dependencies).",
|
|
51
|
+
)(sync_with_toml)
|
|
52
|
+
app.command("add", help="Add a dependency to all platform manifests.")(add_dependency)
|
|
53
|
+
app.command("remove", help="Remove a dependency from all platform manifests.")(
|
|
54
|
+
remove_dependency
|
|
55
|
+
)
|
|
50
56
|
app.command("update", help="Update dayhoff-tools (or all deps) and sync environment.")(
|
|
51
57
|
update_dependencies
|
|
52
58
|
)
|
|
@@ -110,8 +110,24 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
110
110
|
publish_cmd = ["uv", "publish", "--token", token]
|
|
111
111
|
print("Using UV_PUBLISH_TOKEN for authentication.")
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
# Find the primary manifest (prefer AWS, then Mac, then Workstation)
|
|
114
|
+
pyproject_path = None
|
|
115
|
+
for candidate in [
|
|
116
|
+
"pyproject.aws.toml",
|
|
117
|
+
"pyproject.mac.toml",
|
|
118
|
+
"pyproject.workstation.toml",
|
|
119
|
+
]:
|
|
120
|
+
if Path(candidate).exists():
|
|
121
|
+
pyproject_path = candidate
|
|
122
|
+
break
|
|
123
|
+
|
|
124
|
+
if not pyproject_path:
|
|
125
|
+
print(
|
|
126
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
127
|
+
)
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
print(f"Using manifest: {pyproject_path}")
|
|
115
131
|
current_version = None # Initialize in case the first try block fails
|
|
116
132
|
|
|
117
133
|
try:
|
|
@@ -174,32 +190,54 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
174
190
|
f.write(new_content)
|
|
175
191
|
print(f"Updated {pyproject_path} with version {new_version}")
|
|
176
192
|
|
|
177
|
-
# Mirror version in
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
193
|
+
# Mirror version in all other platform manifests (best-effort)
|
|
194
|
+
other_manifests = []
|
|
195
|
+
for candidate in [
|
|
196
|
+
"pyproject.aws.toml",
|
|
197
|
+
"pyproject.mac.toml",
|
|
198
|
+
"pyproject.workstation.toml",
|
|
199
|
+
]:
|
|
200
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
201
|
+
other_manifests.append(Path(candidate))
|
|
202
|
+
|
|
203
|
+
for manifest_path in other_manifests:
|
|
204
|
+
try:
|
|
205
|
+
content = manifest_path.read_text()
|
|
206
|
+
pattern = re.compile(
|
|
184
207
|
f'^version\s*=\s*"{re.escape(current_version)}"', re.MULTILINE
|
|
185
208
|
)
|
|
186
|
-
|
|
187
|
-
f'version = "{new_version}"',
|
|
209
|
+
new_content, replacements = pattern.subn(
|
|
210
|
+
f'version = "{new_version}"', content
|
|
188
211
|
)
|
|
189
|
-
if
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
print(f"Warning: Could not update {mac_manifest_path}: {e}")
|
|
212
|
+
if replacements > 0:
|
|
213
|
+
manifest_path.write_text(new_content)
|
|
214
|
+
print(f"Updated {manifest_path} with version {new_version}")
|
|
215
|
+
except Exception as e:
|
|
216
|
+
print(f"Warning: Could not update {manifest_path}: {e}")
|
|
195
217
|
# --- End Version Bumping Logic ---
|
|
196
218
|
|
|
197
219
|
# Build wheel and sdist
|
|
220
|
+
# UV expects pyproject.toml, so temporarily copy the platform manifest
|
|
221
|
+
backup_created = False
|
|
222
|
+
if pyproject_path != "pyproject.toml":
|
|
223
|
+
if Path("pyproject.toml").exists():
|
|
224
|
+
Path("pyproject.toml").rename("pyproject.toml.build.bak")
|
|
225
|
+
backup_created = True
|
|
226
|
+
Path(pyproject_path).read_text()
|
|
227
|
+
with open("pyproject.toml", "w") as f:
|
|
228
|
+
f.write(Path(pyproject_path).read_text())
|
|
229
|
+
|
|
198
230
|
build_cmd = ["uv", "build"]
|
|
199
231
|
# Print command in blue
|
|
200
232
|
print(f"Running command: {BLUE}{' '.join(build_cmd)}{RESET}")
|
|
201
233
|
subprocess.run(build_cmd, check=True)
|
|
202
234
|
|
|
235
|
+
# Restore original state
|
|
236
|
+
if pyproject_path != "pyproject.toml":
|
|
237
|
+
Path("pyproject.toml").unlink()
|
|
238
|
+
if backup_created:
|
|
239
|
+
Path("pyproject.toml.build.bak").rename("pyproject.toml")
|
|
240
|
+
|
|
203
241
|
# Upload using uv publish with explicit arguments
|
|
204
242
|
# Print masked command in blue
|
|
205
243
|
print(f"Running command: {BLUE}{' '.join(publish_cmd_safe_print)}{RESET}")
|
|
@@ -260,23 +298,25 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
260
298
|
f"Warning: Could not find version {new_version} to revert in {pyproject_path}."
|
|
261
299
|
)
|
|
262
300
|
|
|
263
|
-
# Also revert
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
301
|
+
# Also revert other platform manifests
|
|
302
|
+
for candidate in [
|
|
303
|
+
"pyproject.aws.toml",
|
|
304
|
+
"pyproject.mac.toml",
|
|
305
|
+
"pyproject.workstation.toml",
|
|
306
|
+
]:
|
|
307
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
308
|
+
try:
|
|
309
|
+
content_revert = Path(candidate).read_text()
|
|
310
|
+
reverted, num = pattern_revert.subn(
|
|
311
|
+
f'version = "{current_version}"', content_revert
|
|
312
|
+
)
|
|
313
|
+
if num > 0:
|
|
314
|
+
Path(candidate).write_text(reverted)
|
|
315
|
+
print(f"Successfully reverted version in {candidate}.")
|
|
316
|
+
except Exception as e2:
|
|
273
317
|
print(
|
|
274
|
-
f"
|
|
318
|
+
f"Warning: Failed to revert version change in {candidate}: {e2}"
|
|
275
319
|
)
|
|
276
|
-
except Exception as e2:
|
|
277
|
-
print(
|
|
278
|
-
f"Warning: Failed to revert version change in {mac_manifest_path}: {e2}"
|
|
279
|
-
)
|
|
280
320
|
except Exception as revert_e:
|
|
281
321
|
print(
|
|
282
322
|
f"Warning: Failed to revert version change in {pyproject_path}: {revert_e}"
|
|
@@ -306,23 +346,25 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
306
346
|
print(
|
|
307
347
|
f"Warning: Could not find version {new_version} to revert in {pyproject_path}."
|
|
308
348
|
)
|
|
309
|
-
# Also revert
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
349
|
+
# Also revert other platform manifests
|
|
350
|
+
for candidate in [
|
|
351
|
+
"pyproject.aws.toml",
|
|
352
|
+
"pyproject.mac.toml",
|
|
353
|
+
"pyproject.workstation.toml",
|
|
354
|
+
]:
|
|
355
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
356
|
+
try:
|
|
357
|
+
content_revert = Path(candidate).read_text()
|
|
358
|
+
reverted, num = pattern_revert.subn(
|
|
359
|
+
f'version = "{current_version}"', content_revert
|
|
360
|
+
)
|
|
361
|
+
if num > 0:
|
|
362
|
+
Path(candidate).write_text(reverted)
|
|
363
|
+
print(f"Successfully reverted version in {candidate}.")
|
|
364
|
+
except Exception as e2:
|
|
319
365
|
print(
|
|
320
|
-
f"
|
|
366
|
+
f"Warning: Failed to revert version change in {candidate}: {e2}"
|
|
321
367
|
)
|
|
322
|
-
except Exception as e2:
|
|
323
|
-
print(
|
|
324
|
-
f"Warning: Failed to revert version change in {mac_manifest_path}: {e2}"
|
|
325
|
-
)
|
|
326
368
|
except Exception as revert_e:
|
|
327
369
|
print(
|
|
328
370
|
f"Warning: Failed to revert version change in {pyproject_path}: {revert_e}"
|
|
@@ -332,7 +374,7 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
332
374
|
# --- Dependency Management Commands ---
|
|
333
375
|
|
|
334
376
|
|
|
335
|
-
def
|
|
377
|
+
def sync_with_toml(
|
|
336
378
|
install_project: bool = typer.Option(
|
|
337
379
|
False,
|
|
338
380
|
"--install-project",
|
|
@@ -340,23 +382,154 @@ def install_dependencies(
|
|
|
340
382
|
help="Install the local project package itself (with 'full' extras) into the environment.",
|
|
341
383
|
),
|
|
342
384
|
):
|
|
343
|
-
"""
|
|
344
|
-
|
|
345
|
-
Behavior:
|
|
346
|
-
-
|
|
385
|
+
"""Sync environment with platform-specific TOML manifest (install/update dependencies).
|
|
386
|
+
|
|
387
|
+
Behavior by platform:
|
|
388
|
+
- Workstation (STUDIO_PLATFORM=workstation) with pyproject.workstation.toml:
|
|
389
|
+
* Uses pip with constraints.txt to preserve NGC PyTorch
|
|
390
|
+
* Parses dependencies directly from pyproject.workstation.toml
|
|
391
|
+
* Installs into .venv_workstation with --system-site-packages
|
|
392
|
+
- Mac (STUDIO_PLATFORM=mac) with pyproject.mac.toml:
|
|
347
393
|
* Ensure `.mac_uv_project/pyproject.toml` is a copy of `pyproject.mac.toml`
|
|
348
|
-
* Run `uv lock` and `uv sync` in `.mac_uv_project`
|
|
394
|
+
* Run `uv lock` and `uv sync` in `.mac_uv_project` targeting active venv with `--active`
|
|
349
395
|
* If `install_project` is true, install the project from repo root into the active env (editable, [full])
|
|
350
|
-
-
|
|
351
|
-
*
|
|
352
|
-
*
|
|
396
|
+
- AWS (default) with pyproject.aws.toml:
|
|
397
|
+
* Uses UV in temp directory `.aws_uv_project` similar to Mac
|
|
398
|
+
* Run `uv lock` and `uv sync` targeting active venv
|
|
353
399
|
"""
|
|
354
400
|
# ANSI color codes
|
|
355
401
|
BLUE = "\033[94m"
|
|
356
402
|
RESET = "\033[0m"
|
|
357
403
|
|
|
358
404
|
try:
|
|
359
|
-
|
|
405
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
406
|
+
|
|
407
|
+
# Workstation platform: use pip with constraints
|
|
408
|
+
if platform == "workstation" and Path("pyproject.workstation.toml").exists():
|
|
409
|
+
print(
|
|
410
|
+
"Installing dependencies for workstation platform (using pip + constraints)..."
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
# Check for constraints.txt
|
|
414
|
+
if not Path("constraints.txt").exists():
|
|
415
|
+
print(
|
|
416
|
+
"Error: constraints.txt not found. Run direnv to generate it first."
|
|
417
|
+
)
|
|
418
|
+
sys.exit(1)
|
|
419
|
+
|
|
420
|
+
# Parse and install dependencies from pyproject.workstation.toml
|
|
421
|
+
import re
|
|
422
|
+
|
|
423
|
+
with open("pyproject.workstation.toml", "r") as f:
|
|
424
|
+
content = f.read()
|
|
425
|
+
|
|
426
|
+
# Extract dependencies list using line-by-line parsing to handle [] in package names
|
|
427
|
+
lines = content.split("\n")
|
|
428
|
+
in_deps = False
|
|
429
|
+
deps_lines = []
|
|
430
|
+
|
|
431
|
+
for line in lines:
|
|
432
|
+
if re.match(r"\s*dependencies\s*=\s*\[", line):
|
|
433
|
+
in_deps = True
|
|
434
|
+
continue
|
|
435
|
+
if in_deps:
|
|
436
|
+
if re.match(r"^\s*\]\s*$", line):
|
|
437
|
+
break
|
|
438
|
+
deps_lines.append(line)
|
|
439
|
+
|
|
440
|
+
deps = []
|
|
441
|
+
for line in deps_lines:
|
|
442
|
+
line = line.strip()
|
|
443
|
+
if line.startswith('"') or line.startswith("'"):
|
|
444
|
+
dep = re.sub(r'["\']', "", line)
|
|
445
|
+
dep = re.sub(r",?\s*#.*$", "", dep)
|
|
446
|
+
dep = dep.strip().rstrip(",")
|
|
447
|
+
if dep:
|
|
448
|
+
deps.append(dep)
|
|
449
|
+
|
|
450
|
+
if deps:
|
|
451
|
+
pip_cmd = (
|
|
452
|
+
[sys.executable, "-m", "pip", "install"]
|
|
453
|
+
+ deps
|
|
454
|
+
+ ["-c", "constraints.txt"]
|
|
455
|
+
)
|
|
456
|
+
print(
|
|
457
|
+
f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
|
|
458
|
+
)
|
|
459
|
+
subprocess.run(pip_cmd, check=True)
|
|
460
|
+
|
|
461
|
+
# Install dev dependencies using line-by-line parsing
|
|
462
|
+
in_dev_groups = False
|
|
463
|
+
in_dev_list = False
|
|
464
|
+
dev_lines = []
|
|
465
|
+
|
|
466
|
+
for line in lines:
|
|
467
|
+
if re.match(r"\s*\[dependency-groups\]", line):
|
|
468
|
+
in_dev_groups = True
|
|
469
|
+
continue
|
|
470
|
+
if in_dev_groups and re.match(r"\s*dev\s*=\s*\[", line):
|
|
471
|
+
in_dev_list = True
|
|
472
|
+
continue
|
|
473
|
+
if in_dev_list:
|
|
474
|
+
if re.match(r"^\s*\]\s*$", line):
|
|
475
|
+
break
|
|
476
|
+
dev_lines.append(line)
|
|
477
|
+
|
|
478
|
+
dev_deps = []
|
|
479
|
+
for line in dev_lines:
|
|
480
|
+
line = line.strip()
|
|
481
|
+
if line.startswith('"') or line.startswith("'"):
|
|
482
|
+
dep = re.sub(r'["\']', "", line)
|
|
483
|
+
dep = re.sub(r",?\s*#.*$", "", dep)
|
|
484
|
+
dep = dep.strip().rstrip(",")
|
|
485
|
+
if dep:
|
|
486
|
+
dev_deps.append(dep)
|
|
487
|
+
|
|
488
|
+
if dev_deps:
|
|
489
|
+
print("Installing dev dependencies...")
|
|
490
|
+
pip_cmd = (
|
|
491
|
+
[sys.executable, "-m", "pip", "install"]
|
|
492
|
+
+ dev_deps
|
|
493
|
+
+ ["-c", "constraints.txt"]
|
|
494
|
+
)
|
|
495
|
+
print(
|
|
496
|
+
f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
|
|
497
|
+
)
|
|
498
|
+
subprocess.run(pip_cmd, check=True)
|
|
499
|
+
|
|
500
|
+
# Install project if requested
|
|
501
|
+
if install_project:
|
|
502
|
+
repo_name = Path.cwd().name
|
|
503
|
+
if repo_name == "dayhoff-tools":
|
|
504
|
+
pip_cmd = [
|
|
505
|
+
sys.executable,
|
|
506
|
+
"-m",
|
|
507
|
+
"pip",
|
|
508
|
+
"install",
|
|
509
|
+
"-e",
|
|
510
|
+
".[full]",
|
|
511
|
+
"-c",
|
|
512
|
+
"constraints.txt",
|
|
513
|
+
]
|
|
514
|
+
else:
|
|
515
|
+
pip_cmd = [
|
|
516
|
+
sys.executable,
|
|
517
|
+
"-m",
|
|
518
|
+
"pip",
|
|
519
|
+
"install",
|
|
520
|
+
"-e",
|
|
521
|
+
".",
|
|
522
|
+
"-c",
|
|
523
|
+
"constraints.txt",
|
|
524
|
+
]
|
|
525
|
+
print(f"Running command: {BLUE}{' '.join(pip_cmd)}{RESET}")
|
|
526
|
+
subprocess.run(pip_cmd, check=True)
|
|
527
|
+
|
|
528
|
+
print("✅ Dependencies installed successfully (workstation)")
|
|
529
|
+
return
|
|
530
|
+
|
|
531
|
+
# Mac platform: use UV with pyproject.mac.toml
|
|
532
|
+
is_mac = platform == "mac"
|
|
360
533
|
mac_manifest = Path("pyproject.mac.toml")
|
|
361
534
|
if is_mac and mac_manifest.exists():
|
|
362
535
|
# Mac devcontainer flow
|
|
@@ -399,29 +572,55 @@ def install_dependencies(
|
|
|
399
572
|
subprocess.run(sync_cmd, check=True, cwd=str(mac_uv_dir))
|
|
400
573
|
print("Dependencies synced successfully (project not installed).")
|
|
401
574
|
else:
|
|
402
|
-
#
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
575
|
+
# AWS platform (or fallback): use UV with pyproject.aws.toml
|
|
576
|
+
aws_manifest = Path("pyproject.aws.toml")
|
|
577
|
+
if aws_manifest.exists():
|
|
578
|
+
# AWS devcontainer flow (similar to Mac)
|
|
579
|
+
aws_uv_dir = Path(".aws_uv_project")
|
|
580
|
+
aws_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
581
|
+
aws_pyproject = aws_uv_dir / "pyproject.toml"
|
|
582
|
+
aws_pyproject.write_text(aws_manifest.read_text())
|
|
583
|
+
|
|
584
|
+
# Ensure lock matches manifest (in aws temp dir)
|
|
585
|
+
print("Ensuring lock file matches pyproject.aws.toml (AWS devcon)…")
|
|
586
|
+
lock_cmd = ["uv", "lock"]
|
|
587
|
+
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
588
|
+
subprocess.run(
|
|
589
|
+
lock_cmd, check=True, capture_output=True, cwd=str(aws_uv_dir)
|
|
590
|
+
)
|
|
407
591
|
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
592
|
+
# Sync into the active environment
|
|
593
|
+
if install_project:
|
|
594
|
+
print(
|
|
595
|
+
"Syncing dependencies into ACTIVE env and installing project [full]…"
|
|
596
|
+
)
|
|
597
|
+
sync_cmd = ["uv", "sync", "--all-groups", "--active"]
|
|
598
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
599
|
+
subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
|
|
600
|
+
# Install project from repo root
|
|
601
|
+
pip_install_cmd = ["uv", "pip", "install", "-e", ".[full]"]
|
|
602
|
+
print(f"Running command: {BLUE}{' '.join(pip_install_cmd)}{RESET}")
|
|
603
|
+
subprocess.run(pip_install_cmd, check=True)
|
|
604
|
+
print("Project installed with 'full' extras successfully.")
|
|
605
|
+
else:
|
|
606
|
+
print(
|
|
607
|
+
"Syncing dependencies into ACTIVE env (project not installed)…"
|
|
608
|
+
)
|
|
609
|
+
sync_cmd = [
|
|
610
|
+
"uv",
|
|
611
|
+
"sync",
|
|
612
|
+
"--all-groups",
|
|
613
|
+
"--no-install-project",
|
|
614
|
+
"--active",
|
|
615
|
+
]
|
|
616
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
617
|
+
subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
|
|
618
|
+
print("Dependencies synced successfully (project not installed).")
|
|
413
619
|
else:
|
|
414
|
-
print(
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
"--all-groups",
|
|
419
|
-
"--no-install-project",
|
|
420
|
-
"--active",
|
|
421
|
-
]
|
|
422
|
-
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
423
|
-
subprocess.run(sync_cmd, check=True)
|
|
424
|
-
print("Dependencies synced successfully (project not installed).")
|
|
620
|
+
print(
|
|
621
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
622
|
+
)
|
|
623
|
+
sys.exit(1)
|
|
425
624
|
|
|
426
625
|
except subprocess.CalledProcessError as e:
|
|
427
626
|
stderr_output = e.stderr.decode() if e.stderr else "No stderr output."
|
|
@@ -440,6 +639,252 @@ def install_dependencies(
|
|
|
440
639
|
sys.exit(1)
|
|
441
640
|
|
|
442
641
|
|
|
642
|
+
def _get_all_platform_manifests():
|
|
643
|
+
"""Get list of all platform manifests that exist."""
|
|
644
|
+
manifest_files = []
|
|
645
|
+
for fname in [
|
|
646
|
+
"pyproject.aws.toml",
|
|
647
|
+
"pyproject.mac.toml",
|
|
648
|
+
"pyproject.workstation.toml",
|
|
649
|
+
]:
|
|
650
|
+
if Path(fname).exists():
|
|
651
|
+
manifest_files.append(Path(fname))
|
|
652
|
+
return manifest_files
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def _update_all_manifests_for_dayhoff_tools(new_version: str):
|
|
656
|
+
"""Update dayhoff-tools constraint in all platform manifests."""
|
|
657
|
+
import re
|
|
658
|
+
|
|
659
|
+
manifest_files = _get_all_platform_manifests()
|
|
660
|
+
|
|
661
|
+
if not manifest_files:
|
|
662
|
+
print("Warning: No platform manifests found to update.")
|
|
663
|
+
return
|
|
664
|
+
|
|
665
|
+
package_name = "dayhoff-tools"
|
|
666
|
+
package_name_esc = re.escape(package_name)
|
|
667
|
+
|
|
668
|
+
# Regex to match the dependency line, with optional extras and version spec
|
|
669
|
+
pattern = re.compile(
|
|
670
|
+
rf"^(\s*['\"]){package_name_esc}(\[[^]]+\])?(?:[><=~^][^'\"]*)?(['\"].*)$",
|
|
671
|
+
re.MULTILINE,
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
new_constraint_text = f">={new_version}"
|
|
675
|
+
|
|
676
|
+
def _repl(match: re.Match):
|
|
677
|
+
prefix = match.group(1)
|
|
678
|
+
extras = match.group(2) or ""
|
|
679
|
+
suffix = match.group(3)
|
|
680
|
+
return f"{prefix}{package_name}{extras}{new_constraint_text}{suffix}"
|
|
681
|
+
|
|
682
|
+
# Update all manifest files
|
|
683
|
+
for manifest_file in manifest_files:
|
|
684
|
+
try:
|
|
685
|
+
print(f"Updating {manifest_file} version constraint...")
|
|
686
|
+
content = manifest_file.read_text()
|
|
687
|
+
new_content, num_replacements = pattern.subn(_repl, content)
|
|
688
|
+
if num_replacements > 0:
|
|
689
|
+
manifest_file.write_text(new_content)
|
|
690
|
+
print(
|
|
691
|
+
f"Updated dayhoff-tools constraint in {manifest_file} to '{new_constraint_text}'"
|
|
692
|
+
)
|
|
693
|
+
else:
|
|
694
|
+
print(
|
|
695
|
+
f"Warning: Could not find dayhoff-tools dependency line in {manifest_file}"
|
|
696
|
+
)
|
|
697
|
+
except Exception as e:
|
|
698
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
def add_dependency(
|
|
702
|
+
package: str,
|
|
703
|
+
dev: bool = typer.Option(
|
|
704
|
+
False, "--dev", "-d", help="Add to dev dependencies instead of main."
|
|
705
|
+
),
|
|
706
|
+
):
|
|
707
|
+
"""Add a dependency to all platform-specific manifests.
|
|
708
|
+
|
|
709
|
+
Args:
|
|
710
|
+
package: Package specification (e.g., "numpy>=1.24.0" or "pandas")
|
|
711
|
+
dev: If True, add to [dependency-groups] dev instead of [project] dependencies
|
|
712
|
+
"""
|
|
713
|
+
import re
|
|
714
|
+
|
|
715
|
+
# ANSI color codes
|
|
716
|
+
BLUE = "\033[94m"
|
|
717
|
+
RESET = "\033[0m"
|
|
718
|
+
|
|
719
|
+
manifest_files = _get_all_platform_manifests()
|
|
720
|
+
|
|
721
|
+
if not manifest_files:
|
|
722
|
+
print(
|
|
723
|
+
"Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
724
|
+
)
|
|
725
|
+
sys.exit(1)
|
|
726
|
+
|
|
727
|
+
# Determine section to add to
|
|
728
|
+
section_name = "dev dependencies" if dev else "main dependencies"
|
|
729
|
+
print(f"Adding '{package}' to {section_name} in all platform manifests...")
|
|
730
|
+
|
|
731
|
+
# Parse package name to check for duplicates
|
|
732
|
+
package_name = re.split(r"[<>=~!\[]", package)[0].strip()
|
|
733
|
+
|
|
734
|
+
for manifest_file in manifest_files:
|
|
735
|
+
try:
|
|
736
|
+
content = manifest_file.read_text()
|
|
737
|
+
|
|
738
|
+
# Check if package already exists
|
|
739
|
+
existing_check = re.search(
|
|
740
|
+
rf'^(\s*["\']){re.escape(package_name)}[<>=~!\[]',
|
|
741
|
+
content,
|
|
742
|
+
re.MULTILINE,
|
|
743
|
+
)
|
|
744
|
+
if existing_check:
|
|
745
|
+
print(
|
|
746
|
+
f"⚠️ Package '{package_name}' already exists in {manifest_file}, skipping"
|
|
747
|
+
)
|
|
748
|
+
continue
|
|
749
|
+
|
|
750
|
+
if dev:
|
|
751
|
+
# Add to [dependency-groups] dev section
|
|
752
|
+
# Use line-by-line parsing to handle [] in dependency names like dayhoff-tools[full]
|
|
753
|
+
lines = content.split("\n")
|
|
754
|
+
in_dev_groups = False
|
|
755
|
+
in_dev_list = False
|
|
756
|
+
dev_start_idx = None
|
|
757
|
+
dev_end_idx = None
|
|
758
|
+
|
|
759
|
+
for idx, line in enumerate(lines):
|
|
760
|
+
if re.match(r"\s*\[dependency-groups\]", line):
|
|
761
|
+
in_dev_groups = True
|
|
762
|
+
continue
|
|
763
|
+
if in_dev_groups and re.match(r"\s*dev\s*=\s*\[", line):
|
|
764
|
+
in_dev_list = True
|
|
765
|
+
dev_start_idx = idx
|
|
766
|
+
continue
|
|
767
|
+
if in_dev_list and re.match(r"^\s*\]\s*$", line):
|
|
768
|
+
dev_end_idx = idx
|
|
769
|
+
break
|
|
770
|
+
|
|
771
|
+
if dev_start_idx is None or dev_end_idx is None:
|
|
772
|
+
print(
|
|
773
|
+
f"Warning: Could not find [dependency-groups] dev section in {manifest_file}"
|
|
774
|
+
)
|
|
775
|
+
continue
|
|
776
|
+
|
|
777
|
+
# Insert new dependency before the closing ]
|
|
778
|
+
new_dep = f' "{package}",'
|
|
779
|
+
lines.insert(dev_end_idx, new_dep)
|
|
780
|
+
new_content = "\n".join(lines)
|
|
781
|
+
else:
|
|
782
|
+
# Add to [project] dependencies section
|
|
783
|
+
# Use line-by-line parsing to handle [] in dependency names like dayhoff-tools[full]
|
|
784
|
+
lines = content.split("\n")
|
|
785
|
+
in_deps = False
|
|
786
|
+
deps_start_idx = None
|
|
787
|
+
deps_end_idx = None
|
|
788
|
+
|
|
789
|
+
for idx, line in enumerate(lines):
|
|
790
|
+
if re.match(r"\s*dependencies\s*=\s*\[", line):
|
|
791
|
+
in_deps = True
|
|
792
|
+
deps_start_idx = idx
|
|
793
|
+
continue
|
|
794
|
+
if in_deps and re.match(r"^\s*\]\s*$", line):
|
|
795
|
+
deps_end_idx = idx
|
|
796
|
+
break
|
|
797
|
+
|
|
798
|
+
if deps_start_idx is None or deps_end_idx is None:
|
|
799
|
+
print(
|
|
800
|
+
f"Warning: Could not find dependencies section in {manifest_file}"
|
|
801
|
+
)
|
|
802
|
+
continue
|
|
803
|
+
|
|
804
|
+
# Insert new dependency before the closing ]
|
|
805
|
+
new_dep = f' "{package}",'
|
|
806
|
+
lines.insert(deps_end_idx, new_dep)
|
|
807
|
+
new_content = "\n".join(lines)
|
|
808
|
+
|
|
809
|
+
manifest_file.write_text(new_content)
|
|
810
|
+
print(f"✅ Added '{package}' to {manifest_file}")
|
|
811
|
+
|
|
812
|
+
except Exception as e:
|
|
813
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
814
|
+
|
|
815
|
+
print(f"\n✅ Added '{package}' to all platform manifests")
|
|
816
|
+
print(
|
|
817
|
+
f"\nRun {BLUE}dh tomlsync{RESET} to install the new dependency in your environment."
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def remove_dependency(
|
|
822
|
+
package: str,
|
|
823
|
+
dev: bool = typer.Option(
|
|
824
|
+
False, "--dev", "-d", help="Remove from dev dependencies instead of main."
|
|
825
|
+
),
|
|
826
|
+
):
|
|
827
|
+
"""Remove a dependency from all platform-specific manifests.
|
|
828
|
+
|
|
829
|
+
Args:
|
|
830
|
+
package: Package name (e.g., "numpy" or "pandas")
|
|
831
|
+
dev: If True, remove from [dependency-groups] dev instead of [project] dependencies
|
|
832
|
+
"""
|
|
833
|
+
import re
|
|
834
|
+
|
|
835
|
+
# ANSI color codes
|
|
836
|
+
BLUE = "\033[94m"
|
|
837
|
+
RESET = "\033[0m"
|
|
838
|
+
|
|
839
|
+
manifest_files = _get_all_platform_manifests()
|
|
840
|
+
|
|
841
|
+
if not manifest_files:
|
|
842
|
+
print(
|
|
843
|
+
"Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
844
|
+
)
|
|
845
|
+
sys.exit(1)
|
|
846
|
+
|
|
847
|
+
section_name = "dev dependencies" if dev else "main dependencies"
|
|
848
|
+
print(f"Removing '{package}' from {section_name} in all platform manifests...")
|
|
849
|
+
|
|
850
|
+
# Escape package name for regex
|
|
851
|
+
package_esc = re.escape(package)
|
|
852
|
+
|
|
853
|
+
removed_count = 0
|
|
854
|
+
for manifest_file in manifest_files:
|
|
855
|
+
try:
|
|
856
|
+
content = manifest_file.read_text()
|
|
857
|
+
|
|
858
|
+
# Pattern to match the dependency line (with optional version spec)
|
|
859
|
+
# Matches: "package...", or "package...",\n
|
|
860
|
+
pattern = re.compile(
|
|
861
|
+
rf'^\s*["\']({package_esc}[<>=~!\[].+?|{package_esc})["\'],?\s*(?:#.*)?$',
|
|
862
|
+
re.MULTILINE,
|
|
863
|
+
)
|
|
864
|
+
|
|
865
|
+
new_content, num_removed = pattern.subn("", content)
|
|
866
|
+
|
|
867
|
+
if num_removed > 0:
|
|
868
|
+
# Clean up any double blank lines
|
|
869
|
+
new_content = re.sub(r"\n\n\n+", "\n\n", new_content)
|
|
870
|
+
manifest_file.write_text(new_content)
|
|
871
|
+
print(f"✅ Removed '{package}' from {manifest_file}")
|
|
872
|
+
removed_count += 1
|
|
873
|
+
else:
|
|
874
|
+
print(f"⚠️ Package '{package}' not found in {manifest_file}")
|
|
875
|
+
|
|
876
|
+
except Exception as e:
|
|
877
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
878
|
+
|
|
879
|
+
if removed_count > 0:
|
|
880
|
+
print(f"\n✅ Removed '{package}' from {removed_count} platform manifest(s)")
|
|
881
|
+
print(
|
|
882
|
+
f"\nRun {BLUE}dh tomlsync{RESET} to uninstall the dependency from your environment."
|
|
883
|
+
)
|
|
884
|
+
else:
|
|
885
|
+
print(f"\n⚠️ Package '{package}' was not found in any manifests")
|
|
886
|
+
|
|
887
|
+
|
|
443
888
|
def update_dependencies(
|
|
444
889
|
update_all: bool = typer.Option(
|
|
445
890
|
False,
|
|
@@ -448,29 +893,73 @@ def update_dependencies(
|
|
|
448
893
|
help="Update all dependencies instead of just dayhoff-tools.",
|
|
449
894
|
),
|
|
450
895
|
):
|
|
451
|
-
"""Update dependencies to newer versions (
|
|
896
|
+
"""Update dependencies to newer versions (platform-aware).
|
|
452
897
|
|
|
453
898
|
- Default Action (no flags): Updates only 'dayhoff-tools' package to latest,
|
|
454
899
|
updates ALL manifest files with the version constraint, and syncs.
|
|
455
900
|
- Flags:
|
|
456
|
-
--all/-a: Updates all dependencies
|
|
901
|
+
--all/-a: Updates all dependencies and syncs.
|
|
457
902
|
|
|
458
903
|
Cross-platform behavior:
|
|
459
|
-
-
|
|
460
|
-
|
|
461
|
-
-
|
|
462
|
-
|
|
463
|
-
- Always uses `--active` for sync to target the active venv.
|
|
904
|
+
- Workstation: Uses pip to upgrade packages, regenerates constraints.txt
|
|
905
|
+
- Mac/AWS: Uses UV with platform-specific manifests (.mac_uv_project or .aws_uv_project)
|
|
906
|
+
- Always updates ALL platform manifests (pyproject.aws.toml, pyproject.mac.toml,
|
|
907
|
+
pyproject.workstation.toml) to ensure version consistency
|
|
464
908
|
"""
|
|
465
909
|
# ANSI color codes
|
|
466
910
|
BLUE = "\033[94m"
|
|
467
911
|
RESET = "\033[0m"
|
|
468
912
|
|
|
469
|
-
|
|
913
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
914
|
+
|
|
915
|
+
# Workstation platform: use pip upgrade
|
|
916
|
+
if platform == "workstation" and Path("pyproject.workstation.toml").exists():
|
|
917
|
+
print("Updating dependencies for workstation platform (using pip)...")
|
|
918
|
+
|
|
919
|
+
if update_all:
|
|
920
|
+
print("Error: --all flag not supported on workstation platform yet.")
|
|
921
|
+
print("Use 'pip install --upgrade <package>' manually for now.")
|
|
922
|
+
sys.exit(1)
|
|
923
|
+
|
|
924
|
+
# Update dayhoff-tools only (default behavior)
|
|
925
|
+
print("Upgrading dayhoff-tools to latest version...")
|
|
926
|
+
upgrade_cmd = [
|
|
927
|
+
sys.executable,
|
|
928
|
+
"-m",
|
|
929
|
+
"pip",
|
|
930
|
+
"install",
|
|
931
|
+
"--upgrade",
|
|
932
|
+
"dayhoff-tools[full]",
|
|
933
|
+
]
|
|
934
|
+
print(f"Running command: {BLUE}{' '.join(upgrade_cmd)}{RESET}")
|
|
935
|
+
subprocess.run(upgrade_cmd, check=True)
|
|
936
|
+
|
|
937
|
+
# Get new version
|
|
938
|
+
result = subprocess.run(
|
|
939
|
+
[sys.executable, "-m", "pip", "show", "dayhoff-tools"],
|
|
940
|
+
capture_output=True,
|
|
941
|
+
text=True,
|
|
942
|
+
check=True,
|
|
943
|
+
)
|
|
944
|
+
version_line = [
|
|
945
|
+
l for l in result.stdout.split("\n") if l.startswith("Version:")
|
|
946
|
+
]
|
|
947
|
+
if version_line:
|
|
948
|
+
new_version = version_line[0].split(":", 1)[1].strip()
|
|
949
|
+
print(f"Updated to dayhoff-tools {new_version}")
|
|
950
|
+
|
|
951
|
+
# Update all platform manifests with new constraint
|
|
952
|
+
_update_all_manifests_for_dayhoff_tools(new_version)
|
|
953
|
+
|
|
954
|
+
print("✅ Dependencies updated successfully (workstation)")
|
|
955
|
+
return
|
|
956
|
+
|
|
957
|
+
# Mac/AWS platforms: use UV
|
|
470
958
|
mac_manifest = Path("pyproject.mac.toml")
|
|
959
|
+
aws_manifest = Path("pyproject.aws.toml")
|
|
471
960
|
mac_uv_dir = Path(".mac_uv_project")
|
|
961
|
+
aws_uv_dir = Path(".aws_uv_project")
|
|
472
962
|
lock_file_path = Path("uv.lock")
|
|
473
|
-
pyproject_path = Path("pyproject.toml")
|
|
474
963
|
|
|
475
964
|
# Determine action based on flags
|
|
476
965
|
lock_cmd = ["uv", "lock"]
|
|
@@ -492,15 +981,29 @@ def update_dependencies(
|
|
|
492
981
|
)
|
|
493
982
|
|
|
494
983
|
try:
|
|
495
|
-
# Choose working directory for uv operations
|
|
984
|
+
# Choose working directory for uv operations based on platform
|
|
496
985
|
uv_cwd = None
|
|
497
|
-
manifest_path_for_constraint =
|
|
498
|
-
|
|
986
|
+
manifest_path_for_constraint = None
|
|
987
|
+
|
|
988
|
+
if platform == "mac" and mac_manifest.exists():
|
|
499
989
|
mac_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
500
990
|
(mac_uv_dir / "pyproject.toml").write_text(mac_manifest.read_text())
|
|
501
991
|
uv_cwd = str(mac_uv_dir)
|
|
502
992
|
lock_file_path = mac_uv_dir / "uv.lock"
|
|
503
993
|
manifest_path_for_constraint = mac_manifest
|
|
994
|
+
elif aws_manifest.exists():
|
|
995
|
+
# AWS platform (default)
|
|
996
|
+
aws_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
997
|
+
(aws_uv_dir / "pyproject.toml").write_text(aws_manifest.read_text())
|
|
998
|
+
uv_cwd = str(aws_uv_dir)
|
|
999
|
+
lock_file_path = aws_uv_dir / "uv.lock"
|
|
1000
|
+
manifest_path_for_constraint = aws_manifest
|
|
1001
|
+
else:
|
|
1002
|
+
print(
|
|
1003
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml or pyproject.mac.toml)"
|
|
1004
|
+
)
|
|
1005
|
+
sys.exit(1)
|
|
1006
|
+
|
|
504
1007
|
# Step 1: Run the update lock command
|
|
505
1008
|
print(action_description)
|
|
506
1009
|
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
@@ -534,61 +1037,8 @@ def update_dependencies(
|
|
|
534
1037
|
|
|
535
1038
|
print(f"Found dayhoff-tools version {locked_version} in lock file.")
|
|
536
1039
|
|
|
537
|
-
# Update
|
|
538
|
-
|
|
539
|
-
if pyproject_path.exists():
|
|
540
|
-
manifest_files_to_update.append(pyproject_path)
|
|
541
|
-
if mac_manifest.exists():
|
|
542
|
-
manifest_files_to_update.append(mac_manifest)
|
|
543
|
-
|
|
544
|
-
if not manifest_files_to_update:
|
|
545
|
-
print("Warning: No manifest files found to update.")
|
|
546
|
-
return
|
|
547
|
-
|
|
548
|
-
package_name = "dayhoff-tools"
|
|
549
|
-
package_name_esc = re.escape(package_name)
|
|
550
|
-
|
|
551
|
-
# Regex to match the dependency line, with optional extras and version spec
|
|
552
|
-
pattern = re.compile(
|
|
553
|
-
rf"^(\s*['\"]){package_name_esc}(\[[^]]+\])?(?:[><=~^][^'\"]*)?(['\"].*)$",
|
|
554
|
-
re.MULTILINE,
|
|
555
|
-
)
|
|
556
|
-
|
|
557
|
-
new_constraint_text = f">={locked_version}"
|
|
558
|
-
|
|
559
|
-
def _repl(match: re.Match):
|
|
560
|
-
prefix = match.group(1)
|
|
561
|
-
extras = match.group(2) or ""
|
|
562
|
-
suffix = match.group(3)
|
|
563
|
-
return f"{prefix}{package_name}{extras}{new_constraint_text}{suffix}"
|
|
564
|
-
|
|
565
|
-
# Update all manifest files
|
|
566
|
-
updated_files = []
|
|
567
|
-
for manifest_file in manifest_files_to_update:
|
|
568
|
-
try:
|
|
569
|
-
print(f"Updating {manifest_file} version constraint...")
|
|
570
|
-
content = manifest_file.read_text()
|
|
571
|
-
new_content, num_replacements = pattern.subn(_repl, content)
|
|
572
|
-
if num_replacements > 0:
|
|
573
|
-
manifest_file.write_text(new_content)
|
|
574
|
-
print(
|
|
575
|
-
f"Updated dayhoff-tools constraint in {manifest_file} to '{new_constraint_text}'"
|
|
576
|
-
)
|
|
577
|
-
updated_files.append(str(manifest_file))
|
|
578
|
-
else:
|
|
579
|
-
print(
|
|
580
|
-
f"Warning: Could not find dayhoff-tools dependency line in {manifest_file}"
|
|
581
|
-
)
|
|
582
|
-
except FileNotFoundError:
|
|
583
|
-
print(f"Warning: {manifest_file} not found.")
|
|
584
|
-
except Exception as e:
|
|
585
|
-
print(f"Error updating {manifest_file}: {e}")
|
|
586
|
-
|
|
587
|
-
if not updated_files:
|
|
588
|
-
print(
|
|
589
|
-
"Warning: No manifest files were successfully updated with dayhoff-tools constraint."
|
|
590
|
-
)
|
|
591
|
-
print("Proceeding with sync despite manifest update failures.")
|
|
1040
|
+
# Update all platform manifest files to ensure consistency
|
|
1041
|
+
_update_all_manifests_for_dayhoff_tools(locked_version)
|
|
592
1042
|
|
|
593
1043
|
# Step 3: Sync environment
|
|
594
1044
|
print("Syncing environment with updated lock file...")
|
|
@@ -10,9 +10,9 @@ dayhoff_tools/cli/engine/engine_maintenance.py,sha256=S9w2_Ko2C3zKpzOux-iG8QUYn0
|
|
|
10
10
|
dayhoff_tools/cli/engine/engine_management.py,sha256=s_H3FtMlKsdfzR8pwV-j2W2QX-Fypkqj2kPC0aTqC1A,19072
|
|
11
11
|
dayhoff_tools/cli/engine/shared.py,sha256=Ecx6I1jtzmxQDn3BezKpgpQ4SJeZf4SZjUCLg-67p80,16844
|
|
12
12
|
dayhoff_tools/cli/engine/studio_commands.py,sha256=VwTQujz32-uMcYusDRE73SdzRpgvIkv7ZAF4zRv6AzA,30266
|
|
13
|
-
dayhoff_tools/cli/main.py,sha256=
|
|
13
|
+
dayhoff_tools/cli/main.py,sha256=Ii5boey--93yGthB_eS2LC7ZR3WHGsJXDHY7uElEtso,6169
|
|
14
14
|
dayhoff_tools/cli/swarm_commands.py,sha256=5EyKj8yietvT5lfoz8Zx0iQvVaNgc3SJX1z2zQR6o6M,5614
|
|
15
|
-
dayhoff_tools/cli/utility_commands.py,sha256=
|
|
15
|
+
dayhoff_tools/cli/utility_commands.py,sha256=Yk2s09Dqkmv1CWJhhMV07Gy0-hZpQhkfiTSmOlykAa8,42185
|
|
16
16
|
dayhoff_tools/deployment/base.py,sha256=48KE76QlWMeIZJefcBOZVbyChS2V_mgs7IQ31odPV2o,17806
|
|
17
17
|
dayhoff_tools/deployment/deploy_aws.py,sha256=gfqh09hGbz0q3oPqVm0imd_CEjKF2k8moGNRIL26qqE,18614
|
|
18
18
|
dayhoff_tools/deployment/deploy_gcp.py,sha256=xgaOVsUDmP6wSEMYNkm1yRNcVskfdz80qJtCulkBIAM,8860
|
|
@@ -33,7 +33,7 @@ dayhoff_tools/intake/uniprot.py,sha256=BZYJQF63OtPcBBnQ7_P9gulxzJtqyorgyuDiPeOJq
|
|
|
33
33
|
dayhoff_tools/logs.py,sha256=DKdeP0k0kliRcilwvX0mUB2eipO5BdWUeHwh-VnsICs,838
|
|
34
34
|
dayhoff_tools/sqlite.py,sha256=jV55ikF8VpTfeQqqlHSbY8OgfyfHj8zgHNpZjBLos_E,18672
|
|
35
35
|
dayhoff_tools/warehouse.py,sha256=UETBtZD3r7WgvURqfGbyHlT7cxoiVq8isjzMuerKw8I,24475
|
|
36
|
-
dayhoff_tools-1.11.
|
|
37
|
-
dayhoff_tools-1.11.
|
|
38
|
-
dayhoff_tools-1.11.
|
|
39
|
-
dayhoff_tools-1.11.
|
|
36
|
+
dayhoff_tools-1.11.3.dist-info/METADATA,sha256=tESeKSHZIZHMi5ekLoJRkRTyBTON40yJ0oegecixNnY,2980
|
|
37
|
+
dayhoff_tools-1.11.3.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
38
|
+
dayhoff_tools-1.11.3.dist-info/entry_points.txt,sha256=iAf4jteNqW3cJm6CO6czLxjW3vxYKsyGLZ8WGmxamSc,49
|
|
39
|
+
dayhoff_tools-1.11.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|