nvidia-nat-test 1.3.0rc2__py3-none-any.whl → 1.3.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nvidia-nat-test might be problematic. Click here for more details.

nat/test/llm.py CHANGED
@@ -203,3 +203,34 @@ async def test_llm_agno(config: TestLLMConfig, builder: Builder):
203
203
  yield chooser.next_response()
204
204
 
205
205
  yield AgnoTestLLM()
206
+
207
+
208
+ @register_llm_client(config_type=TestLLMConfig, wrapper_type=LLMFrameworkEnum.ADK)
209
+ async def test_llm_adk(config: TestLLMConfig, builder: Builder):
210
+ """LLM client for Google ADK."""
211
+
212
+ try:
213
+ from google.adk.models.base_llm import BaseLlm
214
+ from google.adk.models.llm_request import LlmRequest
215
+ from google.adk.models.llm_response import LlmResponse
216
+ from google.genai import types
217
+ except ImportError as exc:
218
+ raise ImportError("Google ADK is required for using the test_llm with ADK. "
219
+ "Please install the `nvidia-nat-adk` package. ") from exc
220
+
221
+ chooser = _ResponseChooser(response_seq=config.response_seq, delay_ms=config.delay_ms)
222
+
223
+ class ADKTestLLM(BaseLlm):
224
+
225
+ async def generate_content_async(self,
226
+ llm_request: LlmRequest,
227
+ stream: bool = False) -> AsyncGenerator[LlmResponse, None]:
228
+ self._maybe_append_user_content(llm_request)
229
+ await chooser.async_sleep()
230
+ text = chooser.next_response()
231
+ yield LlmResponse(content=types.Content(role="model", parts=[types.Part.from_text(text=text)]))
232
+
233
+ def connect(self, *_args: Any, **_kwargs: Any) -> None:
234
+ return None
235
+
236
+ yield ADKTestLLM(model="nat_test_llm")
nat/test/plugin.py CHANGED
@@ -16,9 +16,12 @@
16
16
  import os
17
17
  import subprocess
18
18
  import typing
19
+ from collections.abc import AsyncGenerator
20
+ from collections.abc import Generator
19
21
  from pathlib import Path
20
22
 
21
23
  import pytest
24
+ import pytest_asyncio
22
25
 
23
26
  if typing.TYPE_CHECKING:
24
27
  from docker.client import DockerClient
@@ -153,7 +156,7 @@ def nvidia_api_key_fixture(fail_missing: bool):
153
156
  @pytest.fixture(name="serp_api_key", scope='session')
154
157
  def serp_api_key_fixture(fail_missing: bool):
155
158
  """
156
- Use for integration tests that require a SERP API key.
159
+ Use for integration tests that require a SERP API (serpapi.com) key.
157
160
  """
158
161
  yield require_env_variables(
159
162
  varnames=["SERP_API_KEY"],
@@ -161,6 +164,17 @@ def serp_api_key_fixture(fail_missing: bool):
161
164
  fail_missing=fail_missing)
162
165
 
163
166
 
167
+ @pytest.fixture(name="serperdev", scope='session')
168
+ def serperdev_api_key_fixture(fail_missing: bool):
169
+ """
170
+ Use for integration tests that require a Serper Dev API (https://serper.dev) key.
171
+ """
172
+ yield require_env_variables(
173
+ varnames=["SERPERDEV_API_KEY"],
174
+ reason="SERPERDEV integration tests require the `SERPERDEV_API_KEY` environment variable to be defined.",
175
+ fail_missing=fail_missing)
176
+
177
+
164
178
  @pytest.fixture(name="tavily_api_key", scope='session')
165
179
  def tavily_api_key_fixture(fail_missing: bool):
166
180
  """
@@ -245,6 +259,11 @@ def root_repo_dir_fixture() -> Path:
245
259
  return locate_repo_root()
246
260
 
247
261
 
262
+ @pytest.fixture(name="examples_dir", scope='session')
263
+ def examples_dir_fixture(root_repo_dir: Path) -> Path:
264
+ return root_repo_dir / "examples"
265
+
266
+
248
267
  @pytest.fixture(name="require_etcd", scope="session")
249
268
  def require_etcd_fixture(fail_missing: bool = False) -> bool:
250
269
  """
@@ -332,3 +351,134 @@ def populate_milvus_fixture(milvus_uri: str, root_repo_dir: Path):
332
351
  "wikipedia_docs"
333
352
  ],
334
353
  check=True)
354
+
355
+
356
+ @pytest.fixture(name="require_nest_asyncio", scope="session")
357
+ def require_nest_asyncio_fixture():
358
+ """
359
+ Some tests require nest_asyncio to be installed to allow nested event loops, calling nest_asyncio.apply() more than
360
+ once is a no-op so it's safe to call this fixture even if one of our dependencies already called it.
361
+ """
362
+ import nest_asyncio
363
+ nest_asyncio.apply()
364
+
365
+
366
+ @pytest.fixture(name="phoenix_url", scope="session")
367
+ def phoenix_url_fixture(fail_missing: bool) -> str:
368
+ """
369
+ To run these tests, a phoenix server must be running.
370
+ The phoenix server can be started by running the following command:
371
+ docker run -p 6006:6006 -p 4317:4317 arizephoenix/phoenix:latest
372
+ """
373
+ import requests
374
+
375
+ url = os.getenv("NAT_CI_PHOENIX_URL", "http://localhost:6006")
376
+ try:
377
+ response = requests.get(url, timeout=5)
378
+ response.raise_for_status()
379
+
380
+ return url
381
+ except Exception as e:
382
+ reason = f"Unable to connect to Phoenix server at {url}: {e}"
383
+ if fail_missing:
384
+ raise RuntimeError(reason)
385
+ pytest.skip(reason=reason)
386
+
387
+
388
+ @pytest.fixture(name="phoenix_trace_url", scope="session")
389
+ def phoenix_trace_url_fixture(phoenix_url: str) -> str:
390
+ """
391
+ Some of our tools expect the base url provided by the phoenix_url fixture, however the
392
+ general.telemetry.tracing["phoenix"].endpoint expects the trace url which is what this fixture provides.
393
+ """
394
+ return f"{phoenix_url}/v1/traces"
395
+
396
+
397
+ @pytest.fixture(name="redis_server", scope="session")
398
+ def fixture_redis_server(fail_missing: bool) -> Generator[dict[str, str | int]]:
399
+ """Fixture to safely skip redis based tests if redis is not running"""
400
+ host = os.environ.get("NAT_CI_REDIS_HOST", "localhost")
401
+ port = int(os.environ.get("NAT_CI_REDIS_PORT", "6379"))
402
+ db = int(os.environ.get("NAT_CI_REDIS_DB", "0"))
403
+ bucket_name = os.environ.get("NAT_CI_REDIS_BUCKET_NAME", "test")
404
+
405
+ try:
406
+ import redis
407
+ client = redis.Redis(host=host, port=port, db=db)
408
+ if not client.ping():
409
+ raise RuntimeError("Failed to connect to Redis")
410
+ yield {"host": host, "port": port, "db": db, "bucket_name": bucket_name}
411
+ except ImportError:
412
+ if fail_missing:
413
+ raise
414
+ pytest.skip("redis not installed, skipping redis tests")
415
+ except Exception as e:
416
+ if fail_missing:
417
+ raise
418
+ pytest.skip(f"Error connecting to Redis server: {e}, skipping redis tests")
419
+
420
+
421
+ @pytest_asyncio.fixture(name="mysql_server", scope="module")
422
+ async def fixture_mysql_server(fail_missing: bool) -> AsyncGenerator[dict[str, str | int]]:
423
+ """Fixture to safely skip MySQL based tests if MySQL is not running"""
424
+ host = os.environ.get('NAT_CI_MYSQL_HOST', '127.0.0.1')
425
+ port = int(os.environ.get('NAT_CI_MYSQL_PORT', '3306'))
426
+ user = os.environ.get('NAT_CI_MYSQL_USER', 'root')
427
+ password = os.environ.get('MYSQL_ROOT_PASSWORD', 'my_password')
428
+ bucket_name = os.environ.get('NAT_CI_MYSQL_BUCKET_NAME', 'test')
429
+ try:
430
+ import aiomysql
431
+ conn = await aiomysql.connect(host=host, port=port, user=user, password=password)
432
+ yield {"host": host, "port": port, "username": user, "password": password, "bucket_name": bucket_name}
433
+ conn.close()
434
+ except ImportError:
435
+ if fail_missing:
436
+ raise
437
+ pytest.skip("aiomysql not installed, skipping MySQL tests")
438
+ except Exception as e:
439
+ if fail_missing:
440
+ raise
441
+ pytest.skip(f"Error connecting to MySQL server: {e}, skipping MySQL tests")
442
+
443
+
444
+ @pytest.fixture(name="minio_server", scope="module")
445
+ def minio_server_fixture(fail_missing: bool) -> Generator[dict[str, str | int]]:
446
+ """Fixture to safely skip MinIO based tests if MinIO is not running"""
447
+ host = os.getenv("NAT_CI_MINIO_HOST", "localhost")
448
+ port = int(os.getenv("NAT_CI_MINIO_PORT", "9000"))
449
+ bucket_name = os.getenv("NAT_CI_MINIO_BUCKET_NAME", "test")
450
+ aws_access_key_id = os.getenv("NAT_CI_MINIO_ACCESS_KEY_ID", "minioadmin")
451
+ aws_secret_access_key = os.getenv("NAT_CI_MINIO_SECRET_ACCESS_KEY", "minioadmin")
452
+ endpoint_url = f"http://{host}:{port}"
453
+
454
+ minio_info = {
455
+ "host": host,
456
+ "port": port,
457
+ "bucket_name": bucket_name,
458
+ "endpoint_url": endpoint_url,
459
+ "aws_access_key_id": aws_access_key_id,
460
+ "aws_secret_access_key": aws_secret_access_key,
461
+ }
462
+
463
+ try:
464
+ import botocore.session
465
+ session = botocore.session.get_session()
466
+
467
+ client = session.create_client("s3",
468
+ aws_access_key_id=aws_access_key_id,
469
+ aws_secret_access_key=aws_secret_access_key,
470
+ endpoint_url=endpoint_url)
471
+ client.head_bucket(Bucket=bucket_name)
472
+ yield minio_info
473
+ except ImportError:
474
+ if fail_missing:
475
+ raise
476
+ pytest.skip("aioboto3 not installed, skipping MinIO tests")
477
+ except Exception as e:
478
+ import botocore.exceptions
479
+ if isinstance(e, botocore.exceptions.ClientError) and e.response['Error']['Code'] == '404':
480
+ yield minio_info # Bucket does not exist, but server is reachable
481
+ elif fail_missing:
482
+ raise
483
+ else:
484
+ pytest.skip(f"Error connecting to MinIO server: {e}, skipping MinIO tests")
nat/test/utils.py CHANGED
@@ -15,12 +15,19 @@
15
15
 
16
16
  import importlib.resources
17
17
  import inspect
18
+ import json
18
19
  import subprocess
19
20
  import typing
21
+ from contextlib import asynccontextmanager
20
22
  from pathlib import Path
21
23
 
22
24
  if typing.TYPE_CHECKING:
25
+ from collections.abc import AsyncIterator
26
+
27
+ from httpx import AsyncClient
28
+
23
29
  from nat.data_models.config import Config
30
+ from nat.front_ends.fastapi.fastapi_front_end_plugin_worker import FastApiFrontEndPluginWorker
24
31
  from nat.utils.type_utils import StrPath
25
32
 
26
33
 
@@ -62,11 +69,12 @@ def locate_example_config(example_config_class: type,
62
69
 
63
70
 
64
71
  async def run_workflow(
65
- config_file: "StrPath | None",
72
+ *,
73
+ config: "Config | None" = None,
74
+ config_file: "StrPath | None" = None,
66
75
  question: str,
67
76
  expected_answer: str,
68
77
  assert_expected_answer: bool = True,
69
- config: "Config | None" = None,
70
78
  ) -> str:
71
79
  from nat.builder.workflow_builder import WorkflowBuilder
72
80
  from nat.runtime.loader import load_config
@@ -74,6 +82,7 @@ async def run_workflow(
74
82
 
75
83
  if config is None:
76
84
  assert config_file is not None, "Either config_file or config must be provided"
85
+ assert Path(config_file).exists(), f"Config file {config_file} does not exist"
77
86
  config = load_config(config_file)
78
87
 
79
88
  async with WorkflowBuilder.from_config(config=config) as workflow_builder:
@@ -85,3 +94,62 @@ async def run_workflow(
85
94
  assert expected_answer.lower() in result.lower(), f"Expected '{expected_answer}' in '{result}'"
86
95
 
87
96
  return result
97
+
98
+
99
+ @asynccontextmanager
100
+ async def build_nat_client(
101
+ config: "Config",
102
+ worker_class: "type[FastApiFrontEndPluginWorker] | None" = None) -> "AsyncIterator[AsyncClient]":
103
+ """
104
+ Build a NAT client for testing purposes.
105
+
106
+ Creates a test client with an ASGI transport for the specified configuration.
107
+ The client is backed by a FastAPI application built from the provided worker class.
108
+
109
+ Args:
110
+ config: The NAT configuration to use for building the client.
111
+ worker_class: Optional worker class to use. Defaults to FastApiFrontEndPluginWorker.
112
+
113
+ Yields:
114
+ An AsyncClient instance configured for testing.
115
+ """
116
+ from asgi_lifespan import LifespanManager
117
+ from httpx import ASGITransport
118
+ from httpx import AsyncClient
119
+
120
+ from nat.front_ends.fastapi.fastapi_front_end_plugin_worker import FastApiFrontEndPluginWorker
121
+
122
+ if worker_class is None:
123
+ worker_class = FastApiFrontEndPluginWorker
124
+
125
+ worker = worker_class(config)
126
+ app = worker.build_app()
127
+
128
+ async with LifespanManager(app):
129
+ async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
130
+ yield client
131
+
132
+
133
+ def validate_workflow_output(workflow_output_file: Path) -> None:
134
+ """
135
+ Validate the contents of the workflow output file.
136
+ WIP: output format should be published as a schema and this validation should be done against that schema.
137
+ """
138
+ # Ensure the workflow_output.json file was created
139
+ assert workflow_output_file.exists(), "The workflow_output.json file was not created"
140
+
141
+ # Read and validate the workflow_output.json file
142
+ try:
143
+ with open(workflow_output_file, encoding="utf-8") as f:
144
+ result_json = json.load(f)
145
+ except json.JSONDecodeError as err:
146
+ raise RuntimeError("Failed to parse workflow_output.json as valid JSON") from err
147
+
148
+ assert isinstance(result_json, list), "The workflow_output.json file is not a list"
149
+ assert len(result_json) > 0, "The workflow_output.json file is empty"
150
+ assert isinstance(result_json[0], dict), "The workflow_output.json file is not a list of dictionaries"
151
+
152
+ # Ensure required keys exist
153
+ required_keys = ["id", "question", "answer", "generated_answer", "intermediate_steps"]
154
+ for key in required_keys:
155
+ assert all(item.get(key) for item in result_json), f"The '{key}' key is missing in workflow_output.json"
@@ -1,7 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nvidia-nat-test
3
- Version: 1.3.0rc2
3
+ Version: 1.3.0rc4
4
4
  Summary: Testing utilities for NeMo Agent toolkit
5
+ Author: NVIDIA Corporation
6
+ Maintainer: NVIDIA Corporation
7
+ License: Apache-2.0
8
+ Project-URL: documentation, https://docs.nvidia.com/nemo/agent-toolkit/latest/
9
+ Project-URL: source, https://github.com/NVIDIA/NeMo-Agent-Toolkit
5
10
  Keywords: ai,rag,agents
6
11
  Classifier: Programming Language :: Python
7
12
  Classifier: Programming Language :: Python :: 3.11
@@ -9,9 +14,12 @@ Classifier: Programming Language :: Python :: 3.12
9
14
  Classifier: Programming Language :: Python :: 3.13
10
15
  Requires-Python: <3.14,>=3.11
11
16
  Description-Content-Type: text/markdown
12
- Requires-Dist: nvidia-nat==v1.3.0-rc2
17
+ License-File: LICENSE-3rd-party.txt
18
+ License-File: LICENSE.md
19
+ Requires-Dist: nvidia-nat==v1.3.0-rc4
13
20
  Requires-Dist: langchain-community~=0.3
14
21
  Requires-Dist: pytest~=8.3
22
+ Dynamic: license-file
15
23
 
16
24
  <!--
17
25
  SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
@@ -0,0 +1,18 @@
1
+ nat/meta/pypi.md,sha256=LLKJHg5oN1-M9Pqfk3Bmphkk4O2TFsyiixuK5T0Y-gw,1100
2
+ nat/test/__init__.py,sha256=_RnTJnsUucHvla_nYKqD4O4g8Bz0tcuDRzWk1bEhcy0,875
3
+ nat/test/embedder.py,sha256=ClDyK1kna4hCBSlz71gK1B-ZjlwcBHTDQRekoNM81Bs,1809
4
+ nat/test/functions.py,sha256=ZxXVzfaLBGOpR5qtmMrKU7q-M9-vVGGj3Xi5mrw4vHY,3557
5
+ nat/test/llm.py,sha256=f6bz6arAQjhjuOKFrLfu_U1LbiyFzQmpM-q8b-WKSrU,9550
6
+ nat/test/memory.py,sha256=xki_A2yiMhEZuQk60K7t04QRqf32nQqnfzD5Iv7fkvw,1456
7
+ nat/test/object_store_tests.py,sha256=PyJioOtoSzILPq6LuD-sOZ_89PIcgXWZweoHBQpK2zQ,4281
8
+ nat/test/plugin.py,sha256=Sh2iJy0XSKeTsODz_8azMKDCyv_T-ZLU6CLpOjm0Pu4,17306
9
+ nat/test/register.py,sha256=o1BEA5fyxyFyCxXhQ6ArmtuNpgRyTEfvw6HdBgECPLI,897
10
+ nat/test/tool_test_runner.py,sha256=SxavwXHkvCQDl_PUiiiqgvGfexKJJTeBdI5i1qk6AzI,21712
11
+ nat/test/utils.py,sha256=Lml187P9SUP3IB_HhBaU1XNhiljcpOFFZOAxgQR1vQo,5936
12
+ nvidia_nat_test-1.3.0rc4.dist-info/licenses/LICENSE-3rd-party.txt,sha256=fOk5jMmCX9YoKWyYzTtfgl-SUy477audFC5hNY4oP7Q,284609
13
+ nvidia_nat_test-1.3.0rc4.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
14
+ nvidia_nat_test-1.3.0rc4.dist-info/METADATA,sha256=KIPlC8vJ6if6Z1KtZwNq8ehzmud1--gKUX_o6kyY18Y,1914
15
+ nvidia_nat_test-1.3.0rc4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ nvidia_nat_test-1.3.0rc4.dist-info/entry_points.txt,sha256=7dOP9XB6iMDqvav3gYx9VWUwA8RrFzhbAa8nGeC8e4Y,99
17
+ nvidia_nat_test-1.3.0rc4.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
18
+ nvidia_nat_test-1.3.0rc4.dist-info/RECORD,,