wafer-cli 0.2.30__tar.gz → 0.2.31__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/PKG-INFO +1 -1
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/pyproject.toml +1 -1
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_cli_coverage.py +1 -47
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_cli_parity_integration.py +0 -47
- wafer_cli-0.2.31/wafer/baseline.py +661 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/cli.py +41 -321
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/evaluate.py +27 -6
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/templates/optimize_kernel.py +4 -2
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer_cli.egg-info/PKG-INFO +1 -1
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer_cli.egg-info/SOURCES.txt +1 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/README.md +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/setup.cfg +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_analytics.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_auth.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_billing.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_config_integration.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_file_operations_integration.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_kernel_scope_cli.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_nsys_analyze.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_nsys_profile.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_output.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_rocprof_compute_integration.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_skill_commands.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_ssh_integration.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_targets_ops.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_wevin_cli.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/tests/test_workflow_integration.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/GUIDE.md +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/__init__.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/agent_defaults.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/analytics.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/api_client.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/auth.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/autotuner.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/billing.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/cli_instructions.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/config.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/corpus.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/global_config.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/gpu_run.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/inference.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/kernel_scope.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/ncu_analyze.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/nsys_analyze.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/nsys_profile.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/output.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/problems.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/rocprof_compute.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/rocprof_sdk.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/rocprof_systems.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/skills/wafer-guide/SKILL.md +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/specs_cli.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/ssh_keys.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/target_lock.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/targets.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/targets_cli.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/targets_ops.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/templates/__init__.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/templates/ask_docs.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/templates/optimize_kernelbench.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/templates/trace_analyze.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/tests/test_eval_cli_parity.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/trace_compare.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/tracelens.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/wevin_cli.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer/workspaces.py +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer_cli.egg-info/dependency_links.txt +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer_cli.egg-info/entry_points.txt +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer_cli.egg-info/requires.txt +0 -0
- {wafer_cli-0.2.30 → wafer_cli-0.2.31}/wafer_cli.egg-info/top_level.txt +0 -0
|
@@ -13,7 +13,7 @@ what external systems produce. They're less bad than API mocks because:
|
|
|
13
13
|
|
|
14
14
|
But they can still go stale. Ideally we'd also have slow integration tests that
|
|
15
15
|
generate these files fresh by running real profiling commands on GPUs.
|
|
16
|
-
TODO: Add slow tests that generate .ncu-rep/.nsys-rep files via
|
|
16
|
+
TODO: Add slow tests that generate .ncu-rep/.nsys-rep files via targets exec.
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
19
|
import json
|
|
@@ -544,52 +544,6 @@ class TestTargetsCommands:
|
|
|
544
544
|
assert result.exit_code == 0
|
|
545
545
|
|
|
546
546
|
|
|
547
|
-
class TestRemoteRunCommand:
|
|
548
|
-
"""Test wafer remote-run command."""
|
|
549
|
-
|
|
550
|
-
@pytest.fixture
|
|
551
|
-
def vultr_available(self) -> bool:
|
|
552
|
-
"""Check if vultr-b200 target is configured."""
|
|
553
|
-
target_path = Path.home() / ".wafer" / "targets" / "vultr-b200.toml"
|
|
554
|
-
return target_path.exists()
|
|
555
|
-
|
|
556
|
-
def test_remote_run_simple(self) -> None:
|
|
557
|
-
"""remote-run executes command on remote GPU."""
|
|
558
|
-
result = runner.invoke(app, ["remote-run", "--", "echo", "hello"])
|
|
559
|
-
# Should succeed or fail with auth/target error
|
|
560
|
-
if result.exit_code == 0:
|
|
561
|
-
assert "hello" in result.output
|
|
562
|
-
else:
|
|
563
|
-
# Auth or target error is acceptable
|
|
564
|
-
combined = result.output.lower()
|
|
565
|
-
assert "auth" in combined or "target" in combined or "error" in combined
|
|
566
|
-
|
|
567
|
-
def test_remote_run_nvidia_smi(self, vultr_available: bool) -> None:
|
|
568
|
-
"""Run nvidia-smi on real GPU target."""
|
|
569
|
-
if not vultr_available:
|
|
570
|
-
pytest.skip("vultr-b200 target not configured")
|
|
571
|
-
|
|
572
|
-
result = runner.invoke(
|
|
573
|
-
app,
|
|
574
|
-
[
|
|
575
|
-
"remote-run",
|
|
576
|
-
"--direct",
|
|
577
|
-
"--target",
|
|
578
|
-
"vultr-b200",
|
|
579
|
-
"--",
|
|
580
|
-
"nvidia-smi",
|
|
581
|
-
"--query-gpu=name",
|
|
582
|
-
"--format=csv,noheader",
|
|
583
|
-
],
|
|
584
|
-
)
|
|
585
|
-
if result.exit_code == 0:
|
|
586
|
-
assert "NVIDIA" in result.output or "B200" in result.output
|
|
587
|
-
else:
|
|
588
|
-
# SSH connection issues are acceptable in CI
|
|
589
|
-
combined = result.output.lower()
|
|
590
|
-
assert "error" in combined or "connection" in combined or "timeout" in combined
|
|
591
|
-
|
|
592
|
-
|
|
593
547
|
class TestEvaluateCommand:
|
|
594
548
|
"""Test wafer evaluate command."""
|
|
595
549
|
|
|
@@ -235,53 +235,6 @@ int main() {
|
|
|
235
235
|
)
|
|
236
236
|
|
|
237
237
|
|
|
238
|
-
class TestRemoteRunWorkflow:
|
|
239
|
-
"""Test: User runs a command on remote GPU."""
|
|
240
|
-
|
|
241
|
-
B200_HOST = "chiraag@45.76.244.62"
|
|
242
|
-
SSH_KEY = "~/.ssh/id_ed25519"
|
|
243
|
-
|
|
244
|
-
@pytest.fixture
|
|
245
|
-
def vultr_available(self) -> bool:
|
|
246
|
-
"""Check if vultr-b200 is reachable."""
|
|
247
|
-
target_path = Path.home() / ".wafer" / "targets" / "vultr-b200.toml"
|
|
248
|
-
if not target_path.exists():
|
|
249
|
-
return False
|
|
250
|
-
result = subprocess.run(
|
|
251
|
-
[
|
|
252
|
-
"ssh",
|
|
253
|
-
"-i",
|
|
254
|
-
str(Path(self.SSH_KEY).expanduser()),
|
|
255
|
-
"-o",
|
|
256
|
-
"ConnectTimeout=5",
|
|
257
|
-
self.B200_HOST,
|
|
258
|
-
"echo ok",
|
|
259
|
-
],
|
|
260
|
-
capture_output=True,
|
|
261
|
-
text=True,
|
|
262
|
-
)
|
|
263
|
-
return result.returncode == 0
|
|
264
|
-
|
|
265
|
-
def test_remote_run_nvidia_smi(self, vultr_available: bool) -> None:
|
|
266
|
-
"""Run nvidia-smi on remote GPU."""
|
|
267
|
-
if not vultr_available:
|
|
268
|
-
pytest.skip("vultr-b200 not available")
|
|
269
|
-
|
|
270
|
-
result = run_cli(
|
|
271
|
-
"remote-run",
|
|
272
|
-
"--direct",
|
|
273
|
-
"--target",
|
|
274
|
-
"vultr-b200",
|
|
275
|
-
"--",
|
|
276
|
-
"nvidia-smi",
|
|
277
|
-
"--query-gpu=name",
|
|
278
|
-
"--format=csv,noheader",
|
|
279
|
-
)
|
|
280
|
-
|
|
281
|
-
# Should contain GPU name
|
|
282
|
-
assert "NVIDIA" in result.stdout or "B200" in result.stdout
|
|
283
|
-
|
|
284
|
-
|
|
285
238
|
class TestEvaluateWorkflow:
|
|
286
239
|
"""Test: User evaluates a kernel implementation."""
|
|
287
240
|
|