dayhoff-tools 1.1.10__py3-none-any.whl → 1.13.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dayhoff_tools/__init__.py +10 -0
- dayhoff_tools/cli/cloud_commands.py +179 -43
- dayhoff_tools/cli/engine1/__init__.py +323 -0
- dayhoff_tools/cli/engine1/engine_core.py +703 -0
- dayhoff_tools/cli/engine1/engine_lifecycle.py +136 -0
- dayhoff_tools/cli/engine1/engine_maintenance.py +431 -0
- dayhoff_tools/cli/engine1/engine_management.py +505 -0
- dayhoff_tools/cli/engine1/shared.py +501 -0
- dayhoff_tools/cli/engine1/studio_commands.py +825 -0
- dayhoff_tools/cli/engines_studios/__init__.py +6 -0
- dayhoff_tools/cli/engines_studios/api_client.py +351 -0
- dayhoff_tools/cli/engines_studios/auth.py +144 -0
- dayhoff_tools/cli/engines_studios/engine-studio-cli.md +1230 -0
- dayhoff_tools/cli/engines_studios/engine_commands.py +1151 -0
- dayhoff_tools/cli/engines_studios/progress.py +260 -0
- dayhoff_tools/cli/engines_studios/simulators/cli-simulators.md +151 -0
- dayhoff_tools/cli/engines_studios/simulators/demo.sh +75 -0
- dayhoff_tools/cli/engines_studios/simulators/engine_list_simulator.py +319 -0
- dayhoff_tools/cli/engines_studios/simulators/engine_status_simulator.py +369 -0
- dayhoff_tools/cli/engines_studios/simulators/idle_status_simulator.py +476 -0
- dayhoff_tools/cli/engines_studios/simulators/simulator_utils.py +180 -0
- dayhoff_tools/cli/engines_studios/simulators/studio_list_simulator.py +374 -0
- dayhoff_tools/cli/engines_studios/simulators/studio_status_simulator.py +164 -0
- dayhoff_tools/cli/engines_studios/studio_commands.py +755 -0
- dayhoff_tools/cli/main.py +106 -7
- dayhoff_tools/cli/utility_commands.py +896 -179
- dayhoff_tools/deployment/base.py +70 -6
- dayhoff_tools/deployment/deploy_aws.py +165 -25
- dayhoff_tools/deployment/deploy_gcp.py +78 -5
- dayhoff_tools/deployment/deploy_utils.py +20 -7
- dayhoff_tools/deployment/job_runner.py +9 -4
- dayhoff_tools/deployment/processors.py +230 -418
- dayhoff_tools/deployment/swarm.py +47 -12
- dayhoff_tools/embedders.py +28 -26
- dayhoff_tools/fasta.py +181 -64
- dayhoff_tools/warehouse.py +268 -1
- {dayhoff_tools-1.1.10.dist-info → dayhoff_tools-1.13.12.dist-info}/METADATA +20 -5
- dayhoff_tools-1.13.12.dist-info/RECORD +54 -0
- {dayhoff_tools-1.1.10.dist-info → dayhoff_tools-1.13.12.dist-info}/WHEEL +1 -1
- dayhoff_tools-1.1.10.dist-info/RECORD +0 -32
- {dayhoff_tools-1.1.10.dist-info → dayhoff_tools-1.13.12.dist-info}/entry_points.txt +0 -0
|
@@ -9,7 +9,8 @@ from pathlib import Path
|
|
|
9
9
|
|
|
10
10
|
import toml
|
|
11
11
|
import typer
|
|
12
|
-
|
|
12
|
+
|
|
13
|
+
# Import cloud helper lazily inside functions to avoid heavy deps at module load
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
def test_github_actions_locally():
|
|
@@ -23,108 +24,6 @@ def test_github_actions_locally():
|
|
|
23
24
|
print(f"Error occurred while running the script: {e}")
|
|
24
25
|
|
|
25
26
|
|
|
26
|
-
def get_ancestry(filepath: str) -> None:
|
|
27
|
-
"""Take a .dvc file created from import, and generate an ancestry entry
|
|
28
|
-
that can be manually copied into other .dvc files."""
|
|
29
|
-
with open(filepath, "r") as file:
|
|
30
|
-
assert filepath.endswith(".dvc"), "ERROR: Not a .dvc file"
|
|
31
|
-
ancestor_content = yaml.safe_load(file)
|
|
32
|
-
|
|
33
|
-
error_msg = "Unexpected file structure. Are you sure this is a .dvc file generated from `dvc import`?"
|
|
34
|
-
assert "deps" in ancestor_content, error_msg
|
|
35
|
-
|
|
36
|
-
error_msg = "Please only reference data imported from main branches."
|
|
37
|
-
assert "rev" not in ancestor_content["deps"][0]["repo"], error_msg
|
|
38
|
-
|
|
39
|
-
ancestor_info = {
|
|
40
|
-
"name": os.path.basename(ancestor_content["outs"][0]["path"]),
|
|
41
|
-
"file_md5_hash": ancestor_content["outs"][0]["md5"],
|
|
42
|
-
"size": ancestor_content["outs"][0]["size"],
|
|
43
|
-
"repo_url": ancestor_content["deps"][0]["repo"]["url"],
|
|
44
|
-
"repo_path": ancestor_content["deps"][0]["path"],
|
|
45
|
-
"commit_hash": ancestor_content["deps"][0]["repo"]["rev_lock"],
|
|
46
|
-
}
|
|
47
|
-
print()
|
|
48
|
-
yaml.safe_dump(
|
|
49
|
-
[ancestor_info], sys.stdout, default_flow_style=False, sort_keys=False
|
|
50
|
-
)
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def import_from_warehouse_typer() -> None:
|
|
54
|
-
"""Import a file from warehouse.
|
|
55
|
-
This is a thin wrapper around `cli.utils.import_from_warehouse`,
|
|
56
|
-
with interactive prompts using questionary.
|
|
57
|
-
"""
|
|
58
|
-
# Import only when the function is called
|
|
59
|
-
import questionary
|
|
60
|
-
from dayhoff_tools.warehouse import import_from_warehouse
|
|
61
|
-
|
|
62
|
-
# Ensure execution from root
|
|
63
|
-
cwd = Path(os.getcwd())
|
|
64
|
-
if cwd.parent.name != "workspaces" or str(cwd.parent.parent) != cwd.root:
|
|
65
|
-
raise Exception(
|
|
66
|
-
f"This command must be executed from the repo's root directory (/workspaces/reponame). Current directory: {cwd}"
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
# Use questionary for prompts instead of typer
|
|
70
|
-
warehouse_path = questionary.text("Warehouse path:").ask()
|
|
71
|
-
|
|
72
|
-
# Provide multiple-choice options for output folder
|
|
73
|
-
output_folder_choice = questionary.select(
|
|
74
|
-
"Output folder:",
|
|
75
|
-
choices=["data/imports", "same_as_warehouse", "Custom path..."],
|
|
76
|
-
).ask()
|
|
77
|
-
|
|
78
|
-
# If custom path is selected, ask for the path
|
|
79
|
-
if output_folder_choice == "Custom path...":
|
|
80
|
-
output_folder = questionary.text("Enter custom output folder:").ask()
|
|
81
|
-
else:
|
|
82
|
-
output_folder = output_folder_choice
|
|
83
|
-
|
|
84
|
-
branch = questionary.text("Branch (default: main):", default="main").ask()
|
|
85
|
-
|
|
86
|
-
final_path = import_from_warehouse(
|
|
87
|
-
warehouse_path=warehouse_path,
|
|
88
|
-
output_folder=output_folder,
|
|
89
|
-
branch=branch,
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def add_to_warehouse_typer() -> None:
|
|
94
|
-
"""Add a new data file to warehouse, and expand its .dvc file with
|
|
95
|
-
metadata, including ancestor files."""
|
|
96
|
-
# Import only when the function is called
|
|
97
|
-
import questionary
|
|
98
|
-
from dayhoff_tools.warehouse import add_to_warehouse
|
|
99
|
-
|
|
100
|
-
# Ensure execution from root
|
|
101
|
-
cwd = Path(os.getcwd())
|
|
102
|
-
if cwd.parent.name != "workspaces" or str(cwd.parent.parent) != cwd.root:
|
|
103
|
-
raise Exception(
|
|
104
|
-
f"This command must be executed from the repo's root directory (/workspaces/reponame). Current directory: {cwd}"
|
|
105
|
-
)
|
|
106
|
-
|
|
107
|
-
# Prompt for the data file path
|
|
108
|
-
warehouse_path = questionary.text("Data file to be registered:").ask()
|
|
109
|
-
|
|
110
|
-
# Prompt for the ancestor .dvc file paths
|
|
111
|
-
ancestor_dvc_paths = []
|
|
112
|
-
print("\nEnter the path of all ancestor .dvc files (or hit Enter to finish).")
|
|
113
|
-
print("These files must be generated by `dvc import` or `dh wimport`.")
|
|
114
|
-
while True:
|
|
115
|
-
ancestor_path = questionary.text("Ancestor path: ").ask()
|
|
116
|
-
if ancestor_path:
|
|
117
|
-
ancestor_dvc_paths.append(ancestor_path)
|
|
118
|
-
else:
|
|
119
|
-
print()
|
|
120
|
-
break
|
|
121
|
-
|
|
122
|
-
dvc_path = add_to_warehouse(
|
|
123
|
-
warehouse_path=warehouse_path,
|
|
124
|
-
ancestor_dvc_paths=ancestor_dvc_paths,
|
|
125
|
-
)
|
|
126
|
-
|
|
127
|
-
|
|
128
27
|
def delete_local_branch(branch_name: str, folder_path: str):
|
|
129
28
|
"""Delete a local Git branch after fetching with pruning.
|
|
130
29
|
|
|
@@ -211,7 +110,24 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
211
110
|
publish_cmd = ["uv", "publish", "--token", token]
|
|
212
111
|
print("Using UV_PUBLISH_TOKEN for authentication.")
|
|
213
112
|
|
|
214
|
-
|
|
113
|
+
# Find the primary manifest (prefer AWS, then Mac, then Workstation)
|
|
114
|
+
pyproject_path = None
|
|
115
|
+
for candidate in [
|
|
116
|
+
"pyproject.aws.toml",
|
|
117
|
+
"pyproject.mac.toml",
|
|
118
|
+
"pyproject.workstation.toml",
|
|
119
|
+
]:
|
|
120
|
+
if Path(candidate).exists():
|
|
121
|
+
pyproject_path = candidate
|
|
122
|
+
break
|
|
123
|
+
|
|
124
|
+
if not pyproject_path:
|
|
125
|
+
print(
|
|
126
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
127
|
+
)
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
print(f"Using manifest: {pyproject_path}")
|
|
215
131
|
current_version = None # Initialize in case the first try block fails
|
|
216
132
|
|
|
217
133
|
try:
|
|
@@ -273,23 +189,91 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
273
189
|
with open(pyproject_path, "w") as f:
|
|
274
190
|
f.write(new_content)
|
|
275
191
|
print(f"Updated {pyproject_path} with version {new_version}")
|
|
192
|
+
|
|
193
|
+
# Mirror version in all other platform manifests (best-effort)
|
|
194
|
+
other_manifests = []
|
|
195
|
+
for candidate in [
|
|
196
|
+
"pyproject.aws.toml",
|
|
197
|
+
"pyproject.mac.toml",
|
|
198
|
+
"pyproject.workstation.toml",
|
|
199
|
+
]:
|
|
200
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
201
|
+
other_manifests.append(Path(candidate))
|
|
202
|
+
|
|
203
|
+
for manifest_path in other_manifests:
|
|
204
|
+
try:
|
|
205
|
+
content = manifest_path.read_text()
|
|
206
|
+
pattern = re.compile(
|
|
207
|
+
f'^version\s*=\s*"{re.escape(current_version)}"', re.MULTILINE
|
|
208
|
+
)
|
|
209
|
+
new_content, replacements = pattern.subn(
|
|
210
|
+
f'version = "{new_version}"', content
|
|
211
|
+
)
|
|
212
|
+
if replacements > 0:
|
|
213
|
+
manifest_path.write_text(new_content)
|
|
214
|
+
print(f"Updated {manifest_path} with version {new_version}")
|
|
215
|
+
except Exception as e:
|
|
216
|
+
print(f"Warning: Could not update {manifest_path}: {e}")
|
|
276
217
|
# --- End Version Bumping Logic ---
|
|
277
218
|
|
|
278
219
|
# Build wheel and sdist
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
)
|
|
220
|
+
# UV expects pyproject.toml, so temporarily copy the platform manifest
|
|
221
|
+
backup_created = False
|
|
222
|
+
temp_pyproject_created = False
|
|
223
|
+
if pyproject_path != "pyproject.toml":
|
|
224
|
+
if Path("pyproject.toml").exists():
|
|
225
|
+
Path("pyproject.toml").rename("pyproject.toml.build.bak")
|
|
226
|
+
backup_created = True
|
|
227
|
+
Path(pyproject_path).read_text()
|
|
228
|
+
with open("pyproject.toml", "w") as f:
|
|
229
|
+
f.write(Path(pyproject_path).read_text())
|
|
230
|
+
temp_pyproject_created = True
|
|
291
231
|
|
|
292
|
-
|
|
232
|
+
try:
|
|
233
|
+
build_cmd = ["uv", "build"]
|
|
234
|
+
# Print command in blue
|
|
235
|
+
print(f"Running command: {BLUE}{' '.join(build_cmd)}{RESET}")
|
|
236
|
+
subprocess.run(build_cmd, check=True)
|
|
237
|
+
|
|
238
|
+
# Upload using uv publish with explicit arguments
|
|
239
|
+
# Print masked command in blue
|
|
240
|
+
print(f"Running command: {BLUE}{' '.join(publish_cmd_safe_print)}{RESET}")
|
|
241
|
+
subprocess.run(
|
|
242
|
+
publish_cmd, # Use the actual command with token
|
|
243
|
+
check=True,
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
print(f"Successfully built and uploaded version {new_version} to PyPI")
|
|
247
|
+
|
|
248
|
+
# Re-install DHT in current venv when building from DHT itself
|
|
249
|
+
# (Keep temp pyproject.toml until after this step)
|
|
250
|
+
try:
|
|
251
|
+
proj_name = None
|
|
252
|
+
try:
|
|
253
|
+
proj_toml = toml.load(pyproject_path)
|
|
254
|
+
proj_name = (
|
|
255
|
+
proj_toml.get("project", {}).get("name")
|
|
256
|
+
if isinstance(proj_toml, dict)
|
|
257
|
+
else None
|
|
258
|
+
)
|
|
259
|
+
except Exception:
|
|
260
|
+
pass
|
|
261
|
+
if proj_name == "dayhoff-tools":
|
|
262
|
+
print("Re-installing dayhoff-tools into the active environment…")
|
|
263
|
+
reinstall_cmd = ["uv", "pip", "install", "-e", ".[full]"]
|
|
264
|
+
print(f"Running command: {BLUE}{' '.join(reinstall_cmd)}{RESET}")
|
|
265
|
+
subprocess.run(reinstall_cmd, check=True)
|
|
266
|
+
print("dayhoff-tools reinstalled in the current environment.")
|
|
267
|
+
except subprocess.CalledProcessError as e:
|
|
268
|
+
print(f"Warning: Failed to reinstall dayhoff-tools locally: {e}")
|
|
269
|
+
|
|
270
|
+
finally:
|
|
271
|
+
# Restore original state (always clean up, even if errors occurred)
|
|
272
|
+
if temp_pyproject_created:
|
|
273
|
+
if Path("pyproject.toml").exists():
|
|
274
|
+
Path("pyproject.toml").unlink()
|
|
275
|
+
if backup_created and Path("pyproject.toml.build.bak").exists():
|
|
276
|
+
Path("pyproject.toml.build.bak").rename("pyproject.toml")
|
|
293
277
|
|
|
294
278
|
except FileNotFoundError:
|
|
295
279
|
print(f"Error: {pyproject_path} not found.")
|
|
@@ -319,6 +303,26 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
319
303
|
print(
|
|
320
304
|
f"Warning: Could not find version {new_version} to revert in {pyproject_path}."
|
|
321
305
|
)
|
|
306
|
+
|
|
307
|
+
# Also revert other platform manifests
|
|
308
|
+
for candidate in [
|
|
309
|
+
"pyproject.aws.toml",
|
|
310
|
+
"pyproject.mac.toml",
|
|
311
|
+
"pyproject.workstation.toml",
|
|
312
|
+
]:
|
|
313
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
314
|
+
try:
|
|
315
|
+
content_revert = Path(candidate).read_text()
|
|
316
|
+
reverted, num = pattern_revert.subn(
|
|
317
|
+
f'version = "{current_version}"', content_revert
|
|
318
|
+
)
|
|
319
|
+
if num > 0:
|
|
320
|
+
Path(candidate).write_text(reverted)
|
|
321
|
+
print(f"Successfully reverted version in {candidate}.")
|
|
322
|
+
except Exception as e2:
|
|
323
|
+
print(
|
|
324
|
+
f"Warning: Failed to revert version change in {candidate}: {e2}"
|
|
325
|
+
)
|
|
322
326
|
except Exception as revert_e:
|
|
323
327
|
print(
|
|
324
328
|
f"Warning: Failed to revert version change in {pyproject_path}: {revert_e}"
|
|
@@ -348,6 +352,25 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
348
352
|
print(
|
|
349
353
|
f"Warning: Could not find version {new_version} to revert in {pyproject_path}."
|
|
350
354
|
)
|
|
355
|
+
# Also revert other platform manifests
|
|
356
|
+
for candidate in [
|
|
357
|
+
"pyproject.aws.toml",
|
|
358
|
+
"pyproject.mac.toml",
|
|
359
|
+
"pyproject.workstation.toml",
|
|
360
|
+
]:
|
|
361
|
+
if Path(candidate).exists() and candidate != pyproject_path:
|
|
362
|
+
try:
|
|
363
|
+
content_revert = Path(candidate).read_text()
|
|
364
|
+
reverted, num = pattern_revert.subn(
|
|
365
|
+
f'version = "{current_version}"', content_revert
|
|
366
|
+
)
|
|
367
|
+
if num > 0:
|
|
368
|
+
Path(candidate).write_text(reverted)
|
|
369
|
+
print(f"Successfully reverted version in {candidate}.")
|
|
370
|
+
except Exception as e2:
|
|
371
|
+
print(
|
|
372
|
+
f"Warning: Failed to revert version change in {candidate}: {e2}"
|
|
373
|
+
)
|
|
351
374
|
except Exception as revert_e:
|
|
352
375
|
print(
|
|
353
376
|
f"Warning: Failed to revert version change in {pyproject_path}: {revert_e}"
|
|
@@ -357,40 +380,319 @@ def build_and_upload_wheel(bump_part: str = "patch"):
|
|
|
357
380
|
# --- Dependency Management Commands ---
|
|
358
381
|
|
|
359
382
|
|
|
360
|
-
def
|
|
383
|
+
def sync_with_toml(
|
|
361
384
|
install_project: bool = typer.Option(
|
|
362
385
|
False,
|
|
363
386
|
"--install-project",
|
|
364
387
|
"-p",
|
|
365
|
-
help="Install the local project package itself into the environment.",
|
|
388
|
+
help="Install the local project package itself (with 'full' extras) into the environment.",
|
|
366
389
|
),
|
|
367
390
|
):
|
|
368
|
-
"""
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
391
|
+
"""Sync environment with platform-specific TOML manifest (install/update dependencies).
|
|
392
|
+
|
|
393
|
+
Behavior by platform:
|
|
394
|
+
- Workstation (STUDIO_PLATFORM=workstation) with pyproject.workstation.toml:
|
|
395
|
+
* Uses pip with constraints.txt to preserve NGC PyTorch
|
|
396
|
+
* Parses dependencies directly from pyproject.workstation.toml
|
|
397
|
+
* Installs into .venv_workstation with --system-site-packages
|
|
398
|
+
- Mac (STUDIO_PLATFORM=mac) with pyproject.mac.toml:
|
|
399
|
+
* Ensure `.mac_uv_project/pyproject.toml` is a copy of `pyproject.mac.toml`
|
|
400
|
+
* Run `uv lock` and `uv sync` in `.mac_uv_project` targeting active venv with `--active`
|
|
401
|
+
* If `install_project` is true, install the project from repo root into the active env (editable, [full])
|
|
402
|
+
- AWS (default) with pyproject.aws.toml:
|
|
403
|
+
* Uses UV in temp directory `.aws_uv_project` similar to Mac
|
|
404
|
+
* Run `uv lock` and `uv sync` targeting active venv
|
|
373
405
|
"""
|
|
374
406
|
# ANSI color codes
|
|
375
407
|
BLUE = "\033[94m"
|
|
376
408
|
RESET = "\033[0m"
|
|
377
409
|
|
|
378
410
|
try:
|
|
379
|
-
|
|
380
|
-
print("Ensuring lock file matches pyproject.toml...")
|
|
381
|
-
lock_cmd = ["uv", "lock"]
|
|
382
|
-
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
383
|
-
subprocess.run(lock_cmd, check=True, capture_output=True)
|
|
411
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
384
412
|
|
|
385
|
-
#
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
413
|
+
# Workstation platform: use pip with constraints
|
|
414
|
+
if platform == "workstation" and Path("pyproject.workstation.toml").exists():
|
|
415
|
+
print(
|
|
416
|
+
"Installing dependencies for workstation platform (using pip + constraints)..."
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
# Check for constraints.txt
|
|
420
|
+
if not Path("constraints.txt").exists():
|
|
421
|
+
print(
|
|
422
|
+
"Error: constraints.txt not found. Run direnv to generate it first."
|
|
423
|
+
)
|
|
424
|
+
sys.exit(1)
|
|
425
|
+
|
|
426
|
+
# Parse and install dependencies from pyproject.workstation.toml
|
|
427
|
+
import re
|
|
428
|
+
|
|
429
|
+
with open("pyproject.workstation.toml", "r") as f:
|
|
430
|
+
content = f.read()
|
|
431
|
+
|
|
432
|
+
# Extract dependencies list using line-by-line parsing to handle [] in package names
|
|
433
|
+
lines = content.split("\n")
|
|
434
|
+
in_deps = False
|
|
435
|
+
deps_lines = []
|
|
436
|
+
|
|
437
|
+
for line in lines:
|
|
438
|
+
if re.match(r"\s*dependencies\s*=\s*\[", line):
|
|
439
|
+
in_deps = True
|
|
440
|
+
continue
|
|
441
|
+
if in_deps:
|
|
442
|
+
if re.match(r"^\s*\]\s*$", line):
|
|
443
|
+
break
|
|
444
|
+
deps_lines.append(line)
|
|
445
|
+
|
|
446
|
+
deps = []
|
|
447
|
+
for line in deps_lines:
|
|
448
|
+
line = line.strip()
|
|
449
|
+
if line.startswith('"') or line.startswith("'"):
|
|
450
|
+
dep = re.sub(r'["\']', "", line)
|
|
451
|
+
dep = re.sub(r",?\s*#.*$", "", dep)
|
|
452
|
+
dep = dep.strip().rstrip(",")
|
|
453
|
+
if dep:
|
|
454
|
+
deps.append(dep)
|
|
455
|
+
|
|
456
|
+
if deps:
|
|
457
|
+
pip_cmd = (
|
|
458
|
+
[sys.executable, "-m", "pip", "install"]
|
|
459
|
+
+ deps
|
|
460
|
+
+ ["-c", "constraints.txt"]
|
|
461
|
+
)
|
|
462
|
+
print(
|
|
463
|
+
f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
|
|
464
|
+
)
|
|
465
|
+
subprocess.run(pip_cmd, check=True)
|
|
466
|
+
|
|
467
|
+
# Install dev dependencies using line-by-line parsing
|
|
468
|
+
in_dev_groups = False
|
|
469
|
+
in_dev_list = False
|
|
470
|
+
dev_lines = []
|
|
471
|
+
|
|
472
|
+
for line in lines:
|
|
473
|
+
if re.match(r"\s*\[dependency-groups\]", line):
|
|
474
|
+
in_dev_groups = True
|
|
475
|
+
continue
|
|
476
|
+
if in_dev_groups and re.match(r"\s*dev\s*=\s*\[", line):
|
|
477
|
+
in_dev_list = True
|
|
478
|
+
continue
|
|
479
|
+
if in_dev_list:
|
|
480
|
+
if re.match(r"^\s*\]\s*$", line):
|
|
481
|
+
break
|
|
482
|
+
dev_lines.append(line)
|
|
483
|
+
|
|
484
|
+
dev_deps = []
|
|
485
|
+
for line in dev_lines:
|
|
486
|
+
line = line.strip()
|
|
487
|
+
if line.startswith('"') or line.startswith("'"):
|
|
488
|
+
dep = re.sub(r'["\']', "", line)
|
|
489
|
+
dep = re.sub(r",?\s*#.*$", "", dep)
|
|
490
|
+
dep = dep.strip().rstrip(",")
|
|
491
|
+
if dep:
|
|
492
|
+
dev_deps.append(dep)
|
|
493
|
+
|
|
494
|
+
if dev_deps:
|
|
495
|
+
print("Installing dev dependencies...")
|
|
496
|
+
pip_cmd = (
|
|
497
|
+
[sys.executable, "-m", "pip", "install"]
|
|
498
|
+
+ dev_deps
|
|
499
|
+
+ ["-c", "constraints.txt"]
|
|
500
|
+
)
|
|
501
|
+
print(
|
|
502
|
+
f"Running command: {BLUE}{' '.join(pip_cmd[:5])} ... -c constraints.txt{RESET}"
|
|
503
|
+
)
|
|
504
|
+
subprocess.run(pip_cmd, check=True)
|
|
505
|
+
|
|
506
|
+
# Install project if requested
|
|
507
|
+
if install_project:
|
|
508
|
+
repo_name = Path.cwd().name
|
|
509
|
+
if repo_name == "dayhoff-tools":
|
|
510
|
+
pip_cmd = [
|
|
511
|
+
sys.executable,
|
|
512
|
+
"-m",
|
|
513
|
+
"pip",
|
|
514
|
+
"install",
|
|
515
|
+
"-e",
|
|
516
|
+
".[full]",
|
|
517
|
+
"-c",
|
|
518
|
+
"constraints.txt",
|
|
519
|
+
]
|
|
520
|
+
else:
|
|
521
|
+
pip_cmd = [
|
|
522
|
+
sys.executable,
|
|
523
|
+
"-m",
|
|
524
|
+
"pip",
|
|
525
|
+
"install",
|
|
526
|
+
"-e",
|
|
527
|
+
".",
|
|
528
|
+
"-c",
|
|
529
|
+
"constraints.txt",
|
|
530
|
+
]
|
|
531
|
+
print(f"Running command: {BLUE}{' '.join(pip_cmd)}{RESET}")
|
|
532
|
+
subprocess.run(pip_cmd, check=True)
|
|
533
|
+
|
|
534
|
+
print("✅ Dependencies installed successfully (workstation)")
|
|
535
|
+
return
|
|
392
536
|
|
|
393
|
-
|
|
537
|
+
# Mac platform: use UV with pyproject.mac.toml
|
|
538
|
+
is_mac = platform == "mac"
|
|
539
|
+
mac_manifest = Path("pyproject.mac.toml")
|
|
540
|
+
if is_mac and mac_manifest.exists():
|
|
541
|
+
# Mac devcontainer flow
|
|
542
|
+
mac_uv_dir = Path(".mac_uv_project")
|
|
543
|
+
mac_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
544
|
+
mac_pyproject = mac_uv_dir / "pyproject.toml"
|
|
545
|
+
mac_pyproject.write_text(mac_manifest.read_text())
|
|
546
|
+
|
|
547
|
+
# Copy README.md if it exists (required by some build backends)
|
|
548
|
+
if Path("README.md").exists():
|
|
549
|
+
(mac_uv_dir / "README.md").write_text(Path("README.md").read_text())
|
|
550
|
+
|
|
551
|
+
# Ensure lock matches manifest (in mac temp dir)
|
|
552
|
+
print("Ensuring lock file matches pyproject.mac.toml (Mac devcon)…")
|
|
553
|
+
lock_cmd = ["uv", "lock"]
|
|
554
|
+
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
555
|
+
subprocess.run(
|
|
556
|
+
lock_cmd, check=True, capture_output=True, cwd=str(mac_uv_dir)
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
# Sync into the active environment
|
|
560
|
+
if install_project:
|
|
561
|
+
print(
|
|
562
|
+
"Syncing dependencies into ACTIVE env (project installed separately)…"
|
|
563
|
+
)
|
|
564
|
+
sync_cmd = [
|
|
565
|
+
"uv",
|
|
566
|
+
"sync",
|
|
567
|
+
"--all-groups",
|
|
568
|
+
"--no-install-project",
|
|
569
|
+
"--active",
|
|
570
|
+
]
|
|
571
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
572
|
+
subprocess.run(sync_cmd, check=True, cwd=str(mac_uv_dir))
|
|
573
|
+
|
|
574
|
+
# Install project from repo root (where source code actually is)
|
|
575
|
+
# Temporarily create pyproject.toml at repo root for UV
|
|
576
|
+
print("Installing project with 'full' extras from repo root…")
|
|
577
|
+
temp_pyproject = False
|
|
578
|
+
backup_created = False
|
|
579
|
+
try:
|
|
580
|
+
if not Path("pyproject.toml").exists():
|
|
581
|
+
# Create temp pyproject.toml from platform manifest
|
|
582
|
+
Path("pyproject.toml").write_text(mac_manifest.read_text())
|
|
583
|
+
temp_pyproject = True
|
|
584
|
+
elif Path("pyproject.toml").is_symlink():
|
|
585
|
+
# Backup existing symlink
|
|
586
|
+
Path("pyproject.toml").rename("pyproject.toml.sync.bak")
|
|
587
|
+
Path("pyproject.toml").write_text(mac_manifest.read_text())
|
|
588
|
+
backup_created = True
|
|
589
|
+
|
|
590
|
+
pip_install_cmd = ["uv", "pip", "install", "-e", ".[full]"]
|
|
591
|
+
print(f"Running command: {BLUE}{' '.join(pip_install_cmd)}{RESET}")
|
|
592
|
+
subprocess.run(pip_install_cmd, check=True)
|
|
593
|
+
print("Project installed with 'full' extras successfully.")
|
|
594
|
+
finally:
|
|
595
|
+
# Clean up temp pyproject.toml
|
|
596
|
+
if temp_pyproject and Path("pyproject.toml").exists():
|
|
597
|
+
Path("pyproject.toml").unlink()
|
|
598
|
+
if backup_created and Path("pyproject.toml.sync.bak").exists():
|
|
599
|
+
Path("pyproject.toml.sync.bak").rename("pyproject.toml")
|
|
600
|
+
else:
|
|
601
|
+
print("Syncing dependencies into ACTIVE env (project not installed)…")
|
|
602
|
+
sync_cmd = [
|
|
603
|
+
"uv",
|
|
604
|
+
"sync",
|
|
605
|
+
"--all-groups",
|
|
606
|
+
"--no-install-project",
|
|
607
|
+
"--active",
|
|
608
|
+
]
|
|
609
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
610
|
+
subprocess.run(sync_cmd, check=True, cwd=str(mac_uv_dir))
|
|
611
|
+
print("Dependencies synced successfully (project not installed).")
|
|
612
|
+
else:
|
|
613
|
+
# AWS platform (or fallback): use UV with pyproject.aws.toml
|
|
614
|
+
aws_manifest = Path("pyproject.aws.toml")
|
|
615
|
+
if aws_manifest.exists():
|
|
616
|
+
# AWS devcontainer flow (similar to Mac)
|
|
617
|
+
aws_uv_dir = Path(".aws_uv_project")
|
|
618
|
+
aws_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
619
|
+
aws_pyproject = aws_uv_dir / "pyproject.toml"
|
|
620
|
+
aws_pyproject.write_text(aws_manifest.read_text())
|
|
621
|
+
|
|
622
|
+
# Copy README.md if it exists (required by some build backends)
|
|
623
|
+
if Path("README.md").exists():
|
|
624
|
+
(aws_uv_dir / "README.md").write_text(Path("README.md").read_text())
|
|
625
|
+
|
|
626
|
+
# Ensure lock matches manifest (in aws temp dir)
|
|
627
|
+
print("Ensuring lock file matches pyproject.aws.toml (AWS devcon)…")
|
|
628
|
+
lock_cmd = ["uv", "lock"]
|
|
629
|
+
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
630
|
+
subprocess.run(
|
|
631
|
+
lock_cmd, check=True, capture_output=True, cwd=str(aws_uv_dir)
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
# Sync into the active environment
|
|
635
|
+
if install_project:
|
|
636
|
+
print(
|
|
637
|
+
"Syncing dependencies into ACTIVE env (project installed separately)…"
|
|
638
|
+
)
|
|
639
|
+
sync_cmd = [
|
|
640
|
+
"uv",
|
|
641
|
+
"sync",
|
|
642
|
+
"--all-groups",
|
|
643
|
+
"--no-install-project",
|
|
644
|
+
"--active",
|
|
645
|
+
]
|
|
646
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
647
|
+
subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
|
|
648
|
+
|
|
649
|
+
# Install project from repo root (where source code actually is)
|
|
650
|
+
# Temporarily create pyproject.toml at repo root for UV
|
|
651
|
+
print("Installing project with 'full' extras from repo root…")
|
|
652
|
+
temp_pyproject = False
|
|
653
|
+
backup_created = False
|
|
654
|
+
try:
|
|
655
|
+
if not Path("pyproject.toml").exists():
|
|
656
|
+
# Create temp pyproject.toml from platform manifest
|
|
657
|
+
Path("pyproject.toml").write_text(aws_manifest.read_text())
|
|
658
|
+
temp_pyproject = True
|
|
659
|
+
elif Path("pyproject.toml").is_symlink():
|
|
660
|
+
# Backup existing symlink
|
|
661
|
+
Path("pyproject.toml").rename("pyproject.toml.sync.bak")
|
|
662
|
+
Path("pyproject.toml").write_text(aws_manifest.read_text())
|
|
663
|
+
backup_created = True
|
|
664
|
+
|
|
665
|
+
pip_install_cmd = ["uv", "pip", "install", "-e", ".[full]"]
|
|
666
|
+
print(
|
|
667
|
+
f"Running command: {BLUE}{' '.join(pip_install_cmd)}{RESET}"
|
|
668
|
+
)
|
|
669
|
+
subprocess.run(pip_install_cmd, check=True)
|
|
670
|
+
print("Project installed with 'full' extras successfully.")
|
|
671
|
+
finally:
|
|
672
|
+
# Clean up temp pyproject.toml
|
|
673
|
+
if temp_pyproject and Path("pyproject.toml").exists():
|
|
674
|
+
Path("pyproject.toml").unlink()
|
|
675
|
+
if backup_created and Path("pyproject.toml.sync.bak").exists():
|
|
676
|
+
Path("pyproject.toml.sync.bak").rename("pyproject.toml")
|
|
677
|
+
else:
|
|
678
|
+
print(
|
|
679
|
+
"Syncing dependencies into ACTIVE env (project not installed)…"
|
|
680
|
+
)
|
|
681
|
+
sync_cmd = [
|
|
682
|
+
"uv",
|
|
683
|
+
"sync",
|
|
684
|
+
"--all-groups",
|
|
685
|
+
"--no-install-project",
|
|
686
|
+
"--active",
|
|
687
|
+
]
|
|
688
|
+
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
689
|
+
subprocess.run(sync_cmd, check=True, cwd=str(aws_uv_dir))
|
|
690
|
+
print("Dependencies synced successfully (project not installed).")
|
|
691
|
+
else:
|
|
692
|
+
print(
|
|
693
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
694
|
+
)
|
|
695
|
+
sys.exit(1)
|
|
394
696
|
|
|
395
697
|
except subprocess.CalledProcessError as e:
|
|
396
698
|
stderr_output = e.stderr.decode() if e.stderr else "No stderr output."
|
|
@@ -409,6 +711,365 @@ def install_dependencies(
|
|
|
409
711
|
sys.exit(1)
|
|
410
712
|
|
|
411
713
|
|
|
714
|
+
def _get_all_platform_manifests():
|
|
715
|
+
"""Get list of all platform manifests that exist."""
|
|
716
|
+
manifest_files = []
|
|
717
|
+
for fname in [
|
|
718
|
+
"pyproject.aws.toml",
|
|
719
|
+
"pyproject.mac.toml",
|
|
720
|
+
"pyproject.workstation.toml",
|
|
721
|
+
]:
|
|
722
|
+
if Path(fname).exists():
|
|
723
|
+
manifest_files.append(Path(fname))
|
|
724
|
+
return manifest_files
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
def _resolve_package_version(package_name: str) -> str | None:
|
|
728
|
+
"""Resolve a package version by running uv lock and parsing the lock file.
|
|
729
|
+
|
|
730
|
+
Args:
|
|
731
|
+
package_name: Name of the package to resolve
|
|
732
|
+
|
|
733
|
+
Returns:
|
|
734
|
+
Resolved version string, or None if resolution failed
|
|
735
|
+
"""
|
|
736
|
+
import os
|
|
737
|
+
|
|
738
|
+
try:
|
|
739
|
+
# Determine which manifest to use (prefer Mac, then AWS)
|
|
740
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
741
|
+
manifest_path = None
|
|
742
|
+
uv_dir = None
|
|
743
|
+
|
|
744
|
+
if platform == "mac" and Path("pyproject.mac.toml").exists():
|
|
745
|
+
manifest_path = Path("pyproject.mac.toml")
|
|
746
|
+
uv_dir = Path(".mac_uv_project")
|
|
747
|
+
elif Path("pyproject.aws.toml").exists():
|
|
748
|
+
manifest_path = Path("pyproject.aws.toml")
|
|
749
|
+
uv_dir = Path(".aws_uv_project")
|
|
750
|
+
elif Path("pyproject.mac.toml").exists():
|
|
751
|
+
# Fallback to Mac if AWS doesn't exist
|
|
752
|
+
manifest_path = Path("pyproject.mac.toml")
|
|
753
|
+
uv_dir = Path(".mac_uv_project")
|
|
754
|
+
else:
|
|
755
|
+
return None
|
|
756
|
+
|
|
757
|
+
# Create temp directory and copy manifest
|
|
758
|
+
uv_dir.mkdir(parents=True, exist_ok=True)
|
|
759
|
+
(uv_dir / "pyproject.toml").write_text(manifest_path.read_text())
|
|
760
|
+
|
|
761
|
+
# Copy README if it exists
|
|
762
|
+
if Path("README.md").exists():
|
|
763
|
+
(uv_dir / "README.md").write_text(Path("README.md").read_text())
|
|
764
|
+
|
|
765
|
+
# Run uv lock (suppress output)
|
|
766
|
+
subprocess.run(["uv", "lock"], cwd=str(uv_dir), check=True, capture_output=True)
|
|
767
|
+
|
|
768
|
+
# Parse lock file
|
|
769
|
+
lock_file = uv_dir / "uv.lock"
|
|
770
|
+
if not lock_file.exists():
|
|
771
|
+
return None
|
|
772
|
+
|
|
773
|
+
lock_data = toml.load(lock_file)
|
|
774
|
+
for package in lock_data.get("package", []):
|
|
775
|
+
if package.get("name") == package_name:
|
|
776
|
+
return package.get("version")
|
|
777
|
+
|
|
778
|
+
return None
|
|
779
|
+
|
|
780
|
+
except Exception as e:
|
|
781
|
+
print(f"Warning: Failed to resolve version: {e}")
|
|
782
|
+
return None
|
|
783
|
+
|
|
784
|
+
|
|
785
|
+
def _update_all_manifests_for_dayhoff_tools(new_version: str):
|
|
786
|
+
"""Update dayhoff-tools constraint in all platform manifests."""
|
|
787
|
+
import re
|
|
788
|
+
|
|
789
|
+
manifest_files = _get_all_platform_manifests()
|
|
790
|
+
|
|
791
|
+
if not manifest_files:
|
|
792
|
+
print("Warning: No platform manifests found to update.")
|
|
793
|
+
return
|
|
794
|
+
|
|
795
|
+
package_name = "dayhoff-tools"
|
|
796
|
+
package_name_esc = re.escape(package_name)
|
|
797
|
+
|
|
798
|
+
# Regex to match the dependency line, with optional extras and version spec
|
|
799
|
+
pattern = re.compile(
|
|
800
|
+
rf"^(\s*['\"]){package_name_esc}(\[[^]]+\])?(?:[><=~^][^'\"]*)?(['\"].*)$",
|
|
801
|
+
re.MULTILINE,
|
|
802
|
+
)
|
|
803
|
+
|
|
804
|
+
new_constraint_text = f">={new_version}"
|
|
805
|
+
|
|
806
|
+
def _repl(match: re.Match):
|
|
807
|
+
prefix = match.group(1)
|
|
808
|
+
extras = match.group(2) or ""
|
|
809
|
+
suffix = match.group(3)
|
|
810
|
+
return f"{prefix}{package_name}{extras}{new_constraint_text}{suffix}"
|
|
811
|
+
|
|
812
|
+
# Update all manifest files
|
|
813
|
+
for manifest_file in manifest_files:
|
|
814
|
+
try:
|
|
815
|
+
print(f"Updating {manifest_file} version constraint...")
|
|
816
|
+
content = manifest_file.read_text()
|
|
817
|
+
new_content, num_replacements = pattern.subn(_repl, content)
|
|
818
|
+
if num_replacements > 0:
|
|
819
|
+
manifest_file.write_text(new_content)
|
|
820
|
+
print(
|
|
821
|
+
f"Updated dayhoff-tools constraint in {manifest_file} to '{new_constraint_text}'"
|
|
822
|
+
)
|
|
823
|
+
else:
|
|
824
|
+
print(
|
|
825
|
+
f"Warning: Could not find dayhoff-tools dependency line in {manifest_file}"
|
|
826
|
+
)
|
|
827
|
+
except Exception as e:
|
|
828
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
829
|
+
|
|
830
|
+
|
|
831
|
+
def add_dependency(
|
|
832
|
+
package: str,
|
|
833
|
+
dev: bool = typer.Option(
|
|
834
|
+
False, "--dev", "-d", help="Add to dev dependencies instead of main."
|
|
835
|
+
),
|
|
836
|
+
):
|
|
837
|
+
"""Add a dependency to all platform-specific manifests.
|
|
838
|
+
|
|
839
|
+
Args:
|
|
840
|
+
package: Package specification (e.g., "numpy>=1.24.0" or "pandas")
|
|
841
|
+
dev: If True, add to [dependency-groups] dev instead of [project] dependencies
|
|
842
|
+
"""
|
|
843
|
+
import re
|
|
844
|
+
|
|
845
|
+
# ANSI color codes
|
|
846
|
+
BLUE = "\033[94m"
|
|
847
|
+
RESET = "\033[0m"
|
|
848
|
+
|
|
849
|
+
manifest_files = _get_all_platform_manifests()
|
|
850
|
+
|
|
851
|
+
if not manifest_files:
|
|
852
|
+
print(
|
|
853
|
+
"Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
854
|
+
)
|
|
855
|
+
sys.exit(1)
|
|
856
|
+
|
|
857
|
+
# Determine section to add to
|
|
858
|
+
section_name = "dev dependencies" if dev else "main dependencies"
|
|
859
|
+
print(f"Adding '{package}' to {section_name} in all platform manifests...")
|
|
860
|
+
|
|
861
|
+
# Parse package name to check for duplicates and version specs
|
|
862
|
+
package_name = re.split(r"[<>=~!\[]", package)[0].strip()
|
|
863
|
+
has_version_spec = any(c in package for c in ["<", ">", "=", "~", "!"])
|
|
864
|
+
|
|
865
|
+
added_count = 0
|
|
866
|
+
|
|
867
|
+
for manifest_file in manifest_files:
|
|
868
|
+
try:
|
|
869
|
+
content = manifest_file.read_text()
|
|
870
|
+
|
|
871
|
+
# Check if package already exists
|
|
872
|
+
existing_check = re.search(
|
|
873
|
+
rf'^(\s*["\']){re.escape(package_name)}[<>=~!\[]',
|
|
874
|
+
content,
|
|
875
|
+
re.MULTILINE,
|
|
876
|
+
)
|
|
877
|
+
if existing_check:
|
|
878
|
+
print(
|
|
879
|
+
f"⚠️ Package '{package_name}' already exists in {manifest_file}, skipping"
|
|
880
|
+
)
|
|
881
|
+
continue
|
|
882
|
+
|
|
883
|
+
if dev:
|
|
884
|
+
# Add to [dependency-groups] dev section
|
|
885
|
+
# Use line-by-line parsing to handle [] in dependency names like dayhoff-tools[full]
|
|
886
|
+
lines = content.split("\n")
|
|
887
|
+
in_dev_groups = False
|
|
888
|
+
in_dev_list = False
|
|
889
|
+
dev_start_idx = None
|
|
890
|
+
dev_end_idx = None
|
|
891
|
+
|
|
892
|
+
for idx, line in enumerate(lines):
|
|
893
|
+
if re.match(r"\s*\[dependency-groups\]", line):
|
|
894
|
+
in_dev_groups = True
|
|
895
|
+
continue
|
|
896
|
+
if in_dev_groups and re.match(r"\s*dev\s*=\s*\[", line):
|
|
897
|
+
in_dev_list = True
|
|
898
|
+
dev_start_idx = idx
|
|
899
|
+
continue
|
|
900
|
+
if in_dev_list and re.match(r"^\s*\]\s*$", line):
|
|
901
|
+
dev_end_idx = idx
|
|
902
|
+
break
|
|
903
|
+
|
|
904
|
+
if dev_start_idx is None or dev_end_idx is None:
|
|
905
|
+
print(
|
|
906
|
+
f"Warning: Could not find [dependency-groups] dev section in {manifest_file}"
|
|
907
|
+
)
|
|
908
|
+
continue
|
|
909
|
+
|
|
910
|
+
# Insert new dependency before the closing ]
|
|
911
|
+
new_dep = f' "{package}",'
|
|
912
|
+
lines.insert(dev_end_idx, new_dep)
|
|
913
|
+
new_content = "\n".join(lines)
|
|
914
|
+
else:
|
|
915
|
+
# Add to [project] dependencies section
|
|
916
|
+
# Use line-by-line parsing to handle [] in dependency names like dayhoff-tools[full]
|
|
917
|
+
lines = content.split("\n")
|
|
918
|
+
in_deps = False
|
|
919
|
+
deps_start_idx = None
|
|
920
|
+
deps_end_idx = None
|
|
921
|
+
|
|
922
|
+
for idx, line in enumerate(lines):
|
|
923
|
+
if re.match(r"\s*dependencies\s*=\s*\[", line):
|
|
924
|
+
in_deps = True
|
|
925
|
+
deps_start_idx = idx
|
|
926
|
+
continue
|
|
927
|
+
if in_deps and re.match(r"^\s*\]\s*$", line):
|
|
928
|
+
deps_end_idx = idx
|
|
929
|
+
break
|
|
930
|
+
|
|
931
|
+
if deps_start_idx is None or deps_end_idx is None:
|
|
932
|
+
print(
|
|
933
|
+
f"Warning: Could not find dependencies section in {manifest_file}"
|
|
934
|
+
)
|
|
935
|
+
continue
|
|
936
|
+
|
|
937
|
+
# Insert new dependency before the closing ]
|
|
938
|
+
new_dep = f' "{package}",'
|
|
939
|
+
lines.insert(deps_end_idx, new_dep)
|
|
940
|
+
new_content = "\n".join(lines)
|
|
941
|
+
|
|
942
|
+
manifest_file.write_text(new_content)
|
|
943
|
+
print(f"✅ Added '{package}' to {manifest_file}")
|
|
944
|
+
added_count += 1
|
|
945
|
+
|
|
946
|
+
except Exception as e:
|
|
947
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
948
|
+
|
|
949
|
+
# If nothing was added, exit early
|
|
950
|
+
if added_count == 0:
|
|
951
|
+
print(f"\n⚠️ Package '{package_name}' already exists in all manifests")
|
|
952
|
+
return
|
|
953
|
+
|
|
954
|
+
print(f"\n✅ Added '{package}' to {added_count} platform manifest(s)")
|
|
955
|
+
|
|
956
|
+
# If no version specified, resolve and add version constraint
|
|
957
|
+
if not has_version_spec:
|
|
958
|
+
print(f"\n🔍 Resolving version for '{package_name}'...")
|
|
959
|
+
resolved_version = _resolve_package_version(package_name)
|
|
960
|
+
|
|
961
|
+
if resolved_version:
|
|
962
|
+
print(f"📌 Resolved to version {resolved_version}")
|
|
963
|
+
print(
|
|
964
|
+
f"Updating manifests with version constraint '>={resolved_version}'..."
|
|
965
|
+
)
|
|
966
|
+
|
|
967
|
+
# Update all manifests to add version constraint
|
|
968
|
+
for manifest_file in manifest_files:
|
|
969
|
+
try:
|
|
970
|
+
content = manifest_file.read_text()
|
|
971
|
+
# Replace unversioned package with versioned one
|
|
972
|
+
pattern = re.compile(
|
|
973
|
+
rf'^(\s*["\']){re.escape(package_name)}(["\'],?)(.*)$',
|
|
974
|
+
re.MULTILINE,
|
|
975
|
+
)
|
|
976
|
+
|
|
977
|
+
def replace_with_version(match):
|
|
978
|
+
prefix = match.group(1)
|
|
979
|
+
suffix = match.group(2)
|
|
980
|
+
rest = match.group(3)
|
|
981
|
+
return (
|
|
982
|
+
f"{prefix}{package_name}>={resolved_version}{suffix}{rest}"
|
|
983
|
+
)
|
|
984
|
+
|
|
985
|
+
new_content = pattern.sub(replace_with_version, content)
|
|
986
|
+
manifest_file.write_text(new_content)
|
|
987
|
+
print(f"✅ Updated {manifest_file} with version constraint")
|
|
988
|
+
except Exception as e:
|
|
989
|
+
print(f"Warning: Could not update version in {manifest_file}: {e}")
|
|
990
|
+
|
|
991
|
+
print(
|
|
992
|
+
f"\n✅ Added '{package_name}>={resolved_version}' to {added_count} platform manifest(s)"
|
|
993
|
+
)
|
|
994
|
+
else:
|
|
995
|
+
print(
|
|
996
|
+
f"⚠️ Could not resolve version for '{package_name}', left unversioned"
|
|
997
|
+
)
|
|
998
|
+
|
|
999
|
+
print(
|
|
1000
|
+
f"\nRun {BLUE}dh tomlsync{RESET} to install the new dependency in your environment."
|
|
1001
|
+
)
|
|
1002
|
+
|
|
1003
|
+
|
|
1004
|
+
def remove_dependency(
|
|
1005
|
+
package: str,
|
|
1006
|
+
dev: bool = typer.Option(
|
|
1007
|
+
False, "--dev", "-d", help="Remove from dev dependencies instead of main."
|
|
1008
|
+
),
|
|
1009
|
+
):
|
|
1010
|
+
"""Remove a dependency from all platform-specific manifests.
|
|
1011
|
+
|
|
1012
|
+
Args:
|
|
1013
|
+
package: Package name (e.g., "numpy" or "pandas")
|
|
1014
|
+
dev: If True, remove from [dependency-groups] dev instead of [project] dependencies
|
|
1015
|
+
"""
|
|
1016
|
+
import re
|
|
1017
|
+
|
|
1018
|
+
# ANSI color codes
|
|
1019
|
+
BLUE = "\033[94m"
|
|
1020
|
+
RESET = "\033[0m"
|
|
1021
|
+
|
|
1022
|
+
manifest_files = _get_all_platform_manifests()
|
|
1023
|
+
|
|
1024
|
+
if not manifest_files:
|
|
1025
|
+
print(
|
|
1026
|
+
"Error: No platform-specific manifests found (pyproject.aws.toml, pyproject.mac.toml, or pyproject.workstation.toml)"
|
|
1027
|
+
)
|
|
1028
|
+
sys.exit(1)
|
|
1029
|
+
|
|
1030
|
+
section_name = "dev dependencies" if dev else "main dependencies"
|
|
1031
|
+
print(f"Removing '{package}' from {section_name} in all platform manifests...")
|
|
1032
|
+
|
|
1033
|
+
# Escape package name for regex
|
|
1034
|
+
package_esc = re.escape(package)
|
|
1035
|
+
|
|
1036
|
+
removed_count = 0
|
|
1037
|
+
for manifest_file in manifest_files:
|
|
1038
|
+
try:
|
|
1039
|
+
content = manifest_file.read_text()
|
|
1040
|
+
|
|
1041
|
+
# Pattern to match the dependency line (with optional version spec)
|
|
1042
|
+
# Matches: "package...", or "package...",\n (including the newline)
|
|
1043
|
+
pattern = re.compile(
|
|
1044
|
+
rf'^\s*["\']({package_esc}[<>=~!\[].+?|{package_esc})["\'],?\s*(?:#.*)?$\n?',
|
|
1045
|
+
re.MULTILINE,
|
|
1046
|
+
)
|
|
1047
|
+
|
|
1048
|
+
new_content, num_removed = pattern.subn("", content)
|
|
1049
|
+
|
|
1050
|
+
if num_removed > 0:
|
|
1051
|
+
# Clean up any consecutive blank lines (more than one)
|
|
1052
|
+
new_content = re.sub(r"\n\n\n+", "\n\n", new_content)
|
|
1053
|
+
# Also clean up trailing whitespace on lines
|
|
1054
|
+
new_content = re.sub(r"[ \t]+$", "", new_content, flags=re.MULTILINE)
|
|
1055
|
+
manifest_file.write_text(new_content)
|
|
1056
|
+
print(f"✅ Removed '{package}' from {manifest_file}")
|
|
1057
|
+
removed_count += 1
|
|
1058
|
+
else:
|
|
1059
|
+
print(f"⚠️ Package '{package}' not found in {manifest_file}")
|
|
1060
|
+
|
|
1061
|
+
except Exception as e:
|
|
1062
|
+
print(f"Error updating {manifest_file}: {e}")
|
|
1063
|
+
|
|
1064
|
+
if removed_count > 0:
|
|
1065
|
+
print(f"\n✅ Removed '{package}' from {removed_count} platform manifest(s)")
|
|
1066
|
+
print(
|
|
1067
|
+
f"\nRun {BLUE}dh tomlsync{RESET} to uninstall the dependency from your environment."
|
|
1068
|
+
)
|
|
1069
|
+
else:
|
|
1070
|
+
print(f"\n⚠️ Package '{package}' was not found in any manifests")
|
|
1071
|
+
|
|
1072
|
+
|
|
412
1073
|
def update_dependencies(
|
|
413
1074
|
update_all: bool = typer.Option(
|
|
414
1075
|
False,
|
|
@@ -417,21 +1078,73 @@ def update_dependencies(
|
|
|
417
1078
|
help="Update all dependencies instead of just dayhoff-tools.",
|
|
418
1079
|
),
|
|
419
1080
|
):
|
|
420
|
-
"""Update dependencies to newer versions.
|
|
421
|
-
|
|
422
|
-
Default Action (no flags): Updates only 'dayhoff-tools' package to latest,
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
1081
|
+
"""Update dependencies to newer versions (platform-aware).
|
|
1082
|
+
|
|
1083
|
+
- Default Action (no flags): Updates only 'dayhoff-tools' package to latest,
|
|
1084
|
+
updates ALL manifest files with the version constraint, and syncs.
|
|
1085
|
+
- Flags:
|
|
1086
|
+
--all/-a: Updates all dependencies and syncs.
|
|
1087
|
+
|
|
1088
|
+
Cross-platform behavior:
|
|
1089
|
+
- Workstation: Uses pip to upgrade packages, regenerates constraints.txt
|
|
1090
|
+
- Mac/AWS: Uses UV with platform-specific manifests (.mac_uv_project or .aws_uv_project)
|
|
1091
|
+
- Always updates ALL platform manifests (pyproject.aws.toml, pyproject.mac.toml,
|
|
1092
|
+
pyproject.workstation.toml) to ensure version consistency
|
|
428
1093
|
"""
|
|
429
1094
|
# ANSI color codes
|
|
430
1095
|
BLUE = "\033[94m"
|
|
431
1096
|
RESET = "\033[0m"
|
|
432
1097
|
|
|
1098
|
+
platform = os.environ.get("STUDIO_PLATFORM", "aws")
|
|
1099
|
+
|
|
1100
|
+
# Workstation platform: use pip upgrade
|
|
1101
|
+
if platform == "workstation" and Path("pyproject.workstation.toml").exists():
|
|
1102
|
+
print("Updating dependencies for workstation platform (using pip)...")
|
|
1103
|
+
|
|
1104
|
+
if update_all:
|
|
1105
|
+
print("Error: --all flag not supported on workstation platform yet.")
|
|
1106
|
+
print("Use 'pip install --upgrade <package>' manually for now.")
|
|
1107
|
+
sys.exit(1)
|
|
1108
|
+
|
|
1109
|
+
# Update dayhoff-tools only (default behavior)
|
|
1110
|
+
print("Upgrading dayhoff-tools to latest version...")
|
|
1111
|
+
upgrade_cmd = [
|
|
1112
|
+
sys.executable,
|
|
1113
|
+
"-m",
|
|
1114
|
+
"pip",
|
|
1115
|
+
"install",
|
|
1116
|
+
"--upgrade",
|
|
1117
|
+
"dayhoff-tools[full]",
|
|
1118
|
+
]
|
|
1119
|
+
print(f"Running command: {BLUE}{' '.join(upgrade_cmd)}{RESET}")
|
|
1120
|
+
subprocess.run(upgrade_cmd, check=True)
|
|
1121
|
+
|
|
1122
|
+
# Get new version
|
|
1123
|
+
result = subprocess.run(
|
|
1124
|
+
[sys.executable, "-m", "pip", "show", "dayhoff-tools"],
|
|
1125
|
+
capture_output=True,
|
|
1126
|
+
text=True,
|
|
1127
|
+
check=True,
|
|
1128
|
+
)
|
|
1129
|
+
version_line = [
|
|
1130
|
+
l for l in result.stdout.split("\n") if l.startswith("Version:")
|
|
1131
|
+
]
|
|
1132
|
+
if version_line:
|
|
1133
|
+
new_version = version_line[0].split(":", 1)[1].strip()
|
|
1134
|
+
print(f"Updated to dayhoff-tools {new_version}")
|
|
1135
|
+
|
|
1136
|
+
# Update all platform manifests with new constraint
|
|
1137
|
+
_update_all_manifests_for_dayhoff_tools(new_version)
|
|
1138
|
+
|
|
1139
|
+
print("✅ Dependencies updated successfully (workstation)")
|
|
1140
|
+
return
|
|
1141
|
+
|
|
1142
|
+
# Mac/AWS platforms: use UV
|
|
1143
|
+
mac_manifest = Path("pyproject.mac.toml")
|
|
1144
|
+
aws_manifest = Path("pyproject.aws.toml")
|
|
1145
|
+
mac_uv_dir = Path(".mac_uv_project")
|
|
1146
|
+
aws_uv_dir = Path(".aws_uv_project")
|
|
433
1147
|
lock_file_path = Path("uv.lock")
|
|
434
|
-
pyproject_path = Path("pyproject.toml")
|
|
435
1148
|
|
|
436
1149
|
# Determine action based on flags
|
|
437
1150
|
lock_cmd = ["uv", "lock"]
|
|
@@ -453,12 +1166,41 @@ def update_dependencies(
|
|
|
453
1166
|
)
|
|
454
1167
|
|
|
455
1168
|
try:
|
|
1169
|
+
# Choose working directory for uv operations based on platform
|
|
1170
|
+
uv_cwd = None
|
|
1171
|
+
manifest_path_for_constraint = None
|
|
1172
|
+
|
|
1173
|
+
if platform == "mac" and mac_manifest.exists():
|
|
1174
|
+
mac_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
1175
|
+
(mac_uv_dir / "pyproject.toml").write_text(mac_manifest.read_text())
|
|
1176
|
+
# Copy README.md if it exists (required by some build backends)
|
|
1177
|
+
if Path("README.md").exists():
|
|
1178
|
+
(mac_uv_dir / "README.md").write_text(Path("README.md").read_text())
|
|
1179
|
+
uv_cwd = str(mac_uv_dir)
|
|
1180
|
+
lock_file_path = mac_uv_dir / "uv.lock"
|
|
1181
|
+
manifest_path_for_constraint = mac_manifest
|
|
1182
|
+
elif aws_manifest.exists():
|
|
1183
|
+
# AWS platform (default)
|
|
1184
|
+
aws_uv_dir.mkdir(parents=True, exist_ok=True)
|
|
1185
|
+
(aws_uv_dir / "pyproject.toml").write_text(aws_manifest.read_text())
|
|
1186
|
+
# Copy README.md if it exists (required by some build backends)
|
|
1187
|
+
if Path("README.md").exists():
|
|
1188
|
+
(aws_uv_dir / "README.md").write_text(Path("README.md").read_text())
|
|
1189
|
+
uv_cwd = str(aws_uv_dir)
|
|
1190
|
+
lock_file_path = aws_uv_dir / "uv.lock"
|
|
1191
|
+
manifest_path_for_constraint = aws_manifest
|
|
1192
|
+
else:
|
|
1193
|
+
print(
|
|
1194
|
+
"Error: No platform-specific manifest found (pyproject.aws.toml or pyproject.mac.toml)"
|
|
1195
|
+
)
|
|
1196
|
+
sys.exit(1)
|
|
1197
|
+
|
|
456
1198
|
# Step 1: Run the update lock command
|
|
457
1199
|
print(action_description)
|
|
458
1200
|
print(f"Running command: {BLUE}{' '.join(lock_cmd)}{RESET}")
|
|
459
|
-
subprocess.run(lock_cmd, check=True, capture_output=True)
|
|
1201
|
+
subprocess.run(lock_cmd, check=True, capture_output=True, cwd=uv_cwd)
|
|
460
1202
|
|
|
461
|
-
# Step 2: Update
|
|
1203
|
+
# Step 2: Update both manifest files if doing the dayhoff update (default)
|
|
462
1204
|
if run_pyproject_update:
|
|
463
1205
|
print(f"Reading {lock_file_path} to find new dayhoff-tools version...")
|
|
464
1206
|
if not lock_file_path.exists():
|
|
@@ -485,48 +1227,23 @@ def update_dependencies(
|
|
|
485
1227
|
return
|
|
486
1228
|
|
|
487
1229
|
print(f"Found dayhoff-tools version {locked_version} in lock file.")
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
pattern = re.compile(
|
|
492
|
-
"^(\s*['\"])dayhoff-tools(?:[><=~^][^'\"\[,]*)?(['\"].*)$", # Match rest of line
|
|
493
|
-
re.MULTILINE,
|
|
494
|
-
)
|
|
495
|
-
package_name = "dayhoff-tools"
|
|
496
|
-
new_constraint_text = f">={locked_version}"
|
|
497
|
-
replacement_string = f"\g<1>{package_name}{new_constraint_text}\g<2>"
|
|
498
|
-
new_content, num_replacements = pattern.subn(
|
|
499
|
-
replacement_string, content
|
|
500
|
-
)
|
|
501
|
-
if num_replacements > 0:
|
|
502
|
-
pyproject_path.write_text(new_content)
|
|
503
|
-
print(
|
|
504
|
-
f"Updated dayhoff-tools constraint in {pyproject_path} to '{new_constraint_text}'"
|
|
505
|
-
)
|
|
506
|
-
else:
|
|
507
|
-
print(
|
|
508
|
-
f"Warning: Could not find dayhoff-tools dependency line in {pyproject_path} to update constraint."
|
|
509
|
-
)
|
|
510
|
-
except FileNotFoundError:
|
|
511
|
-
print(f"Error: {pyproject_path} not found.")
|
|
512
|
-
return
|
|
513
|
-
except Exception as e:
|
|
514
|
-
print(f"Error updating {pyproject_path}: {e}")
|
|
515
|
-
print("Proceeding with sync despite pyproject.toml update error.")
|
|
1230
|
+
|
|
1231
|
+
# Update all platform manifest files to ensure consistency
|
|
1232
|
+
_update_all_manifests_for_dayhoff_tools(locked_version)
|
|
516
1233
|
|
|
517
1234
|
# Step 3: Sync environment
|
|
518
1235
|
print("Syncing environment with updated lock file...")
|
|
519
1236
|
# Always use --no-install-project for updates
|
|
520
|
-
sync_cmd = ["uv", "sync", "--all-groups", "--no-install-project"]
|
|
1237
|
+
sync_cmd = ["uv", "sync", "--all-groups", "--no-install-project", "--active"]
|
|
521
1238
|
print(f"Running command: {BLUE}{' '.join(sync_cmd)}{RESET}")
|
|
522
|
-
subprocess.run(sync_cmd, check=True)
|
|
1239
|
+
subprocess.run(sync_cmd, check=True, cwd=uv_cwd)
|
|
523
1240
|
|
|
524
1241
|
# Final status message
|
|
525
1242
|
if update_all:
|
|
526
1243
|
print("All dependencies updated and environment synced successfully.")
|
|
527
1244
|
else: # Default case (dayhoff update)
|
|
528
1245
|
print(
|
|
529
|
-
"dayhoff-tools updated,
|
|
1246
|
+
"dayhoff-tools updated, manifest files modified, and environment synced successfully."
|
|
530
1247
|
)
|
|
531
1248
|
|
|
532
1249
|
except subprocess.CalledProcessError as e:
|