nvidia-nat-test 1.3.0a20251013__py3-none-any.whl → 1.3.0a20251021__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nvidia-nat-test might be problematic. Click here for more details.
- nat/test/plugin.py +173 -0
- nat/test/utils.py +30 -2
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/METADATA +2 -2
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/RECORD +9 -9
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/WHEEL +0 -0
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/entry_points.txt +0 -0
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/licenses/LICENSE.md +0 -0
- {nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/top_level.txt +0 -0
nat/test/plugin.py
CHANGED
|
@@ -15,10 +15,14 @@
|
|
|
15
15
|
|
|
16
16
|
import os
|
|
17
17
|
import subprocess
|
|
18
|
+
import types
|
|
18
19
|
import typing
|
|
20
|
+
from collections.abc import AsyncGenerator
|
|
21
|
+
from collections.abc import Generator
|
|
19
22
|
from pathlib import Path
|
|
20
23
|
|
|
21
24
|
import pytest
|
|
25
|
+
import pytest_asyncio
|
|
22
26
|
|
|
23
27
|
if typing.TYPE_CHECKING:
|
|
24
28
|
from docker.client import DockerClient
|
|
@@ -220,6 +224,32 @@ def azure_openai_keys_fixture(fail_missing: bool):
|
|
|
220
224
|
fail_missing=fail_missing)
|
|
221
225
|
|
|
222
226
|
|
|
227
|
+
@pytest.fixture(name="wandb_api_key", scope='session')
|
|
228
|
+
def wandb_api_key_fixture(fail_missing: bool):
|
|
229
|
+
"""
|
|
230
|
+
Use for integration tests that require a Weights & Biases API key.
|
|
231
|
+
"""
|
|
232
|
+
yield require_env_variables(
|
|
233
|
+
varnames=["WANDB_API_KEY"],
|
|
234
|
+
reason="Weights & Biases integration tests require the `WANDB_API_KEY` environment variable to be defined.",
|
|
235
|
+
fail_missing=fail_missing)
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
@pytest.fixture(name="weave", scope='session')
|
|
239
|
+
def require_weave_fixture(fail_missing: bool) -> types.ModuleType:
|
|
240
|
+
"""
|
|
241
|
+
Use for integration tests that require Weave to be running.
|
|
242
|
+
"""
|
|
243
|
+
try:
|
|
244
|
+
import weave
|
|
245
|
+
return weave
|
|
246
|
+
except Exception as e:
|
|
247
|
+
reason = "Weave must be installed to run weave based tests"
|
|
248
|
+
if fail_missing:
|
|
249
|
+
raise RuntimeError(reason) from e
|
|
250
|
+
pytest.skip(reason=reason)
|
|
251
|
+
|
|
252
|
+
|
|
223
253
|
@pytest.fixture(name="require_docker", scope='session')
|
|
224
254
|
def require_docker_fixture(fail_missing: bool) -> "DockerClient":
|
|
225
255
|
"""
|
|
@@ -256,6 +286,18 @@ def root_repo_dir_fixture() -> Path:
|
|
|
256
286
|
return locate_repo_root()
|
|
257
287
|
|
|
258
288
|
|
|
289
|
+
@pytest.fixture(name="examples_dir", scope='session')
|
|
290
|
+
def examples_dir_fixture(root_repo_dir: Path) -> Path:
|
|
291
|
+
return root_repo_dir / "examples"
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
@pytest.fixture(name="env_without_nat_log_level", scope='function')
|
|
295
|
+
def env_without_nat_log_level_fixture() -> dict[str, str]:
|
|
296
|
+
env = os.environ.copy()
|
|
297
|
+
env.pop("NAT_LOG_LEVEL", None)
|
|
298
|
+
return env
|
|
299
|
+
|
|
300
|
+
|
|
259
301
|
@pytest.fixture(name="require_etcd", scope="session")
|
|
260
302
|
def require_etcd_fixture(fail_missing: bool = False) -> bool:
|
|
261
303
|
"""
|
|
@@ -343,3 +385,134 @@ def populate_milvus_fixture(milvus_uri: str, root_repo_dir: Path):
|
|
|
343
385
|
"wikipedia_docs"
|
|
344
386
|
],
|
|
345
387
|
check=True)
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
@pytest.fixture(name="require_nest_asyncio", scope="session")
|
|
391
|
+
def require_nest_asyncio_fixture():
|
|
392
|
+
"""
|
|
393
|
+
Some tests require nest_asyncio to be installed to allow nested event loops, calling nest_asyncio.apply() more than
|
|
394
|
+
once is a no-op so it's safe to call this fixture even if one of our dependencies already called it.
|
|
395
|
+
"""
|
|
396
|
+
import nest_asyncio
|
|
397
|
+
nest_asyncio.apply()
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
@pytest.fixture(name="phoenix_url", scope="session")
|
|
401
|
+
def phoenix_url_fixture(fail_missing: bool) -> str:
|
|
402
|
+
"""
|
|
403
|
+
To run these tests, a phoenix server must be running.
|
|
404
|
+
The phoenix server can be started by running the following command:
|
|
405
|
+
docker run -p 6006:6006 -p 4317:4317 arizephoenix/phoenix:latest
|
|
406
|
+
"""
|
|
407
|
+
import requests
|
|
408
|
+
|
|
409
|
+
url = os.getenv("NAT_CI_PHOENIX_URL", "http://localhost:6006")
|
|
410
|
+
try:
|
|
411
|
+
response = requests.get(url, timeout=5)
|
|
412
|
+
response.raise_for_status()
|
|
413
|
+
|
|
414
|
+
return url
|
|
415
|
+
except Exception as e:
|
|
416
|
+
reason = f"Unable to connect to Phoenix server at {url}: {e}"
|
|
417
|
+
if fail_missing:
|
|
418
|
+
raise RuntimeError(reason)
|
|
419
|
+
pytest.skip(reason=reason)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
@pytest.fixture(name="phoenix_trace_url", scope="session")
|
|
423
|
+
def phoenix_trace_url_fixture(phoenix_url: str) -> str:
|
|
424
|
+
"""
|
|
425
|
+
Some of our tools expect the base url provided by the phoenix_url fixture, however the
|
|
426
|
+
general.telemetry.tracing["phoenix"].endpoint expects the trace url which is what this fixture provides.
|
|
427
|
+
"""
|
|
428
|
+
return f"{phoenix_url}/v1/traces"
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
@pytest.fixture(name="redis_server", scope="session")
|
|
432
|
+
def fixture_redis_server(fail_missing: bool) -> Generator[dict[str, str | int]]:
|
|
433
|
+
"""Fixture to safely skip redis based tests if redis is not running"""
|
|
434
|
+
host = os.environ.get("NAT_CI_REDIS_HOST", "localhost")
|
|
435
|
+
port = int(os.environ.get("NAT_CI_REDIS_PORT", "6379"))
|
|
436
|
+
db = int(os.environ.get("NAT_CI_REDIS_DB", "0"))
|
|
437
|
+
bucket_name = os.environ.get("NAT_CI_REDIS_BUCKET_NAME", "test")
|
|
438
|
+
|
|
439
|
+
try:
|
|
440
|
+
import redis
|
|
441
|
+
client = redis.Redis(host=host, port=port, db=db)
|
|
442
|
+
if not client.ping():
|
|
443
|
+
raise RuntimeError("Failed to connect to Redis")
|
|
444
|
+
yield {"host": host, "port": port, "db": db, "bucket_name": bucket_name}
|
|
445
|
+
except ImportError:
|
|
446
|
+
if fail_missing:
|
|
447
|
+
raise
|
|
448
|
+
pytest.skip("redis not installed, skipping redis tests")
|
|
449
|
+
except Exception as e:
|
|
450
|
+
if fail_missing:
|
|
451
|
+
raise
|
|
452
|
+
pytest.skip(f"Error connecting to Redis server: {e}, skipping redis tests")
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
@pytest_asyncio.fixture(name="mysql_server", scope="module")
|
|
456
|
+
async def fixture_mysql_server(fail_missing: bool) -> AsyncGenerator[dict[str, str | int]]:
|
|
457
|
+
"""Fixture to safely skip MySQL based tests if MySQL is not running"""
|
|
458
|
+
host = os.environ.get('NAT_CI_MYSQL_HOST', '127.0.0.1')
|
|
459
|
+
port = int(os.environ.get('NAT_CI_MYSQL_PORT', '3306'))
|
|
460
|
+
user = os.environ.get('NAT_CI_MYSQL_USER', 'root')
|
|
461
|
+
password = os.environ.get('MYSQL_ROOT_PASSWORD', 'my_password')
|
|
462
|
+
bucket_name = os.environ.get('NAT_CI_MYSQL_BUCKET_NAME', 'test')
|
|
463
|
+
try:
|
|
464
|
+
import aiomysql
|
|
465
|
+
conn = await aiomysql.connect(host=host, port=port, user=user, password=password)
|
|
466
|
+
yield {"host": host, "port": port, "username": user, "password": password, "bucket_name": bucket_name}
|
|
467
|
+
conn.close()
|
|
468
|
+
except ImportError:
|
|
469
|
+
if fail_missing:
|
|
470
|
+
raise
|
|
471
|
+
pytest.skip("aiomysql not installed, skipping MySQL tests")
|
|
472
|
+
except Exception as e:
|
|
473
|
+
if fail_missing:
|
|
474
|
+
raise
|
|
475
|
+
pytest.skip(f"Error connecting to MySQL server: {e}, skipping MySQL tests")
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
@pytest.fixture(name="minio_server", scope="module")
|
|
479
|
+
def minio_server_fixture(fail_missing: bool) -> Generator[dict[str, str | int]]:
|
|
480
|
+
"""Fixture to safely skip MinIO based tests if MinIO is not running"""
|
|
481
|
+
host = os.getenv("NAT_CI_MINIO_HOST", "localhost")
|
|
482
|
+
port = int(os.getenv("NAT_CI_MINIO_PORT", "9000"))
|
|
483
|
+
bucket_name = os.getenv("NAT_CI_MINIO_BUCKET_NAME", "test")
|
|
484
|
+
aws_access_key_id = os.getenv("NAT_CI_MINIO_ACCESS_KEY_ID", "minioadmin")
|
|
485
|
+
aws_secret_access_key = os.getenv("NAT_CI_MINIO_SECRET_ACCESS_KEY", "minioadmin")
|
|
486
|
+
endpoint_url = f"http://{host}:{port}"
|
|
487
|
+
|
|
488
|
+
minio_info = {
|
|
489
|
+
"host": host,
|
|
490
|
+
"port": port,
|
|
491
|
+
"bucket_name": bucket_name,
|
|
492
|
+
"endpoint_url": endpoint_url,
|
|
493
|
+
"aws_access_key_id": aws_access_key_id,
|
|
494
|
+
"aws_secret_access_key": aws_secret_access_key,
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
try:
|
|
498
|
+
import botocore.session
|
|
499
|
+
session = botocore.session.get_session()
|
|
500
|
+
|
|
501
|
+
client = session.create_client("s3",
|
|
502
|
+
aws_access_key_id=aws_access_key_id,
|
|
503
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
504
|
+
endpoint_url=endpoint_url)
|
|
505
|
+
client.head_bucket(Bucket=bucket_name)
|
|
506
|
+
yield minio_info
|
|
507
|
+
except ImportError:
|
|
508
|
+
if fail_missing:
|
|
509
|
+
raise
|
|
510
|
+
pytest.skip("aioboto3 not installed, skipping MinIO tests")
|
|
511
|
+
except Exception as e:
|
|
512
|
+
import botocore.exceptions
|
|
513
|
+
if isinstance(e, botocore.exceptions.ClientError) and e.response['Error']['Code'] == '404':
|
|
514
|
+
yield minio_info # Bucket does not exist, but server is reachable
|
|
515
|
+
elif fail_missing:
|
|
516
|
+
raise
|
|
517
|
+
else:
|
|
518
|
+
pytest.skip(f"Error connecting to MinIO server: {e}, skipping MinIO tests")
|
nat/test/utils.py
CHANGED
|
@@ -15,6 +15,7 @@
|
|
|
15
15
|
|
|
16
16
|
import importlib.resources
|
|
17
17
|
import inspect
|
|
18
|
+
import json
|
|
18
19
|
import subprocess
|
|
19
20
|
import typing
|
|
20
21
|
from contextlib import asynccontextmanager
|
|
@@ -68,11 +69,12 @@ def locate_example_config(example_config_class: type,
|
|
|
68
69
|
|
|
69
70
|
|
|
70
71
|
async def run_workflow(
|
|
71
|
-
|
|
72
|
+
*,
|
|
73
|
+
config: "Config | None" = None,
|
|
74
|
+
config_file: "StrPath | None" = None,
|
|
72
75
|
question: str,
|
|
73
76
|
expected_answer: str,
|
|
74
77
|
assert_expected_answer: bool = True,
|
|
75
|
-
config: "Config | None" = None,
|
|
76
78
|
) -> str:
|
|
77
79
|
from nat.builder.workflow_builder import WorkflowBuilder
|
|
78
80
|
from nat.runtime.loader import load_config
|
|
@@ -80,6 +82,7 @@ async def run_workflow(
|
|
|
80
82
|
|
|
81
83
|
if config is None:
|
|
82
84
|
assert config_file is not None, "Either config_file or config must be provided"
|
|
85
|
+
assert Path(config_file).exists(), f"Config file {config_file} does not exist"
|
|
83
86
|
config = load_config(config_file)
|
|
84
87
|
|
|
85
88
|
async with WorkflowBuilder.from_config(config=config) as workflow_builder:
|
|
@@ -125,3 +128,28 @@ async def build_nat_client(
|
|
|
125
128
|
async with LifespanManager(app):
|
|
126
129
|
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
|
|
127
130
|
yield client
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def validate_workflow_output(workflow_output_file: Path) -> None:
|
|
134
|
+
"""
|
|
135
|
+
Validate the contents of the workflow output file.
|
|
136
|
+
WIP: output format should be published as a schema and this validation should be done against that schema.
|
|
137
|
+
"""
|
|
138
|
+
# Ensure the workflow_output.json file was created
|
|
139
|
+
assert workflow_output_file.exists(), "The workflow_output.json file was not created"
|
|
140
|
+
|
|
141
|
+
# Read and validate the workflow_output.json file
|
|
142
|
+
try:
|
|
143
|
+
with open(workflow_output_file, encoding="utf-8") as f:
|
|
144
|
+
result_json = json.load(f)
|
|
145
|
+
except json.JSONDecodeError as err:
|
|
146
|
+
raise RuntimeError("Failed to parse workflow_output.json as valid JSON") from err
|
|
147
|
+
|
|
148
|
+
assert isinstance(result_json, list), "The workflow_output.json file is not a list"
|
|
149
|
+
assert len(result_json) > 0, "The workflow_output.json file is empty"
|
|
150
|
+
assert isinstance(result_json[0], dict), "The workflow_output.json file is not a list of dictionaries"
|
|
151
|
+
|
|
152
|
+
# Ensure required keys exist
|
|
153
|
+
required_keys = ["id", "question", "answer", "generated_answer", "intermediate_steps"]
|
|
154
|
+
for key in required_keys:
|
|
155
|
+
assert all(item.get(key) for item in result_json), f"The '{key}' key is missing in workflow_output.json"
|
{nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nvidia-nat-test
|
|
3
|
-
Version: 1.3.
|
|
3
|
+
Version: 1.3.0a20251021
|
|
4
4
|
Summary: Testing utilities for NeMo Agent toolkit
|
|
5
5
|
Author: NVIDIA Corporation
|
|
6
6
|
Maintainer: NVIDIA Corporation
|
|
@@ -16,7 +16,7 @@ Requires-Python: <3.14,>=3.11
|
|
|
16
16
|
Description-Content-Type: text/markdown
|
|
17
17
|
License-File: LICENSE-3rd-party.txt
|
|
18
18
|
License-File: LICENSE.md
|
|
19
|
-
Requires-Dist: nvidia-nat==v1.3.
|
|
19
|
+
Requires-Dist: nvidia-nat==v1.3.0a20251021
|
|
20
20
|
Requires-Dist: langchain-community~=0.3
|
|
21
21
|
Requires-Dist: pytest~=8.3
|
|
22
22
|
Dynamic: license-file
|
{nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/RECORD
RENAMED
|
@@ -5,14 +5,14 @@ nat/test/functions.py,sha256=ZxXVzfaLBGOpR5qtmMrKU7q-M9-vVGGj3Xi5mrw4vHY,3557
|
|
|
5
5
|
nat/test/llm.py,sha256=f6bz6arAQjhjuOKFrLfu_U1LbiyFzQmpM-q8b-WKSrU,9550
|
|
6
6
|
nat/test/memory.py,sha256=xki_A2yiMhEZuQk60K7t04QRqf32nQqnfzD5Iv7fkvw,1456
|
|
7
7
|
nat/test/object_store_tests.py,sha256=PyJioOtoSzILPq6LuD-sOZ_89PIcgXWZweoHBQpK2zQ,4281
|
|
8
|
-
nat/test/plugin.py,sha256=
|
|
8
|
+
nat/test/plugin.py,sha256=dvRXq_GHdXs95kHeJhG2PL1H6u5jbPgnvsdsedJFyGg,18386
|
|
9
9
|
nat/test/register.py,sha256=o1BEA5fyxyFyCxXhQ6ArmtuNpgRyTEfvw6HdBgECPLI,897
|
|
10
10
|
nat/test/tool_test_runner.py,sha256=SxavwXHkvCQDl_PUiiiqgvGfexKJJTeBdI5i1qk6AzI,21712
|
|
11
|
-
nat/test/utils.py,sha256=
|
|
12
|
-
nvidia_nat_test-1.3.
|
|
13
|
-
nvidia_nat_test-1.3.
|
|
14
|
-
nvidia_nat_test-1.3.
|
|
15
|
-
nvidia_nat_test-1.3.
|
|
16
|
-
nvidia_nat_test-1.3.
|
|
17
|
-
nvidia_nat_test-1.3.
|
|
18
|
-
nvidia_nat_test-1.3.
|
|
11
|
+
nat/test/utils.py,sha256=Lml187P9SUP3IB_HhBaU1XNhiljcpOFFZOAxgQR1vQo,5936
|
|
12
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/licenses/LICENSE-3rd-party.txt,sha256=fOk5jMmCX9YoKWyYzTtfgl-SUy477audFC5hNY4oP7Q,284609
|
|
13
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
14
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/METADATA,sha256=eauzqfhkBkZSzhF4TeYmUKiZMc0oaC_Col5ihqbMX5M,1925
|
|
15
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
16
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/entry_points.txt,sha256=7dOP9XB6iMDqvav3gYx9VWUwA8RrFzhbAa8nGeC8e4Y,99
|
|
17
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
|
|
18
|
+
nvidia_nat_test-1.3.0a20251021.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{nvidia_nat_test-1.3.0a20251013.dist-info → nvidia_nat_test-1.3.0a20251021.dist-info}/top_level.txt
RENAMED
|
File without changes
|