nvidia-nat-test 1.3.0a20251013__py3-none-any.whl → 1.3.0a20251022__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nvidia-nat-test might be problematic. Click here for more details.

nat/test/plugin.py CHANGED
@@ -15,10 +15,14 @@
15
15
 
16
16
  import os
17
17
  import subprocess
18
+ import types
18
19
  import typing
20
+ from collections.abc import AsyncGenerator
21
+ from collections.abc import Generator
19
22
  from pathlib import Path
20
23
 
21
24
  import pytest
25
+ import pytest_asyncio
22
26
 
23
27
  if typing.TYPE_CHECKING:
24
28
  from docker.client import DockerClient
@@ -216,10 +220,48 @@ def azure_openai_keys_fixture(fail_missing: bool):
216
220
  yield require_env_variables(
217
221
  varnames=["AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT"],
218
222
  reason="Azure integration tests require the `AZURE_OPENAI_API_KEY` and `AZURE_OPENAI_ENDPOINT` environment "
219
- "variable to be defined.",
223
+ "variables to be defined.",
220
224
  fail_missing=fail_missing)
221
225
 
222
226
 
227
+ @pytest.fixture(name="langfuse_keys", scope='session')
228
+ def langfuse_keys_fixture(fail_missing: bool):
229
+ """
230
+ Use for integration tests that require Langfuse credentials.
231
+ """
232
+ yield require_env_variables(
233
+ varnames=["LANGFUSE_PUBLIC_KEY", "LANGFUSE_SECRET_KEY"],
234
+ reason="Langfuse integration tests require the `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` environment "
235
+ "variables to be defined.",
236
+ fail_missing=fail_missing)
237
+
238
+
239
+ @pytest.fixture(name="wandb_api_key", scope='session')
240
+ def wandb_api_key_fixture(fail_missing: bool):
241
+ """
242
+ Use for integration tests that require a Weights & Biases API key.
243
+ """
244
+ yield require_env_variables(
245
+ varnames=["WANDB_API_KEY"],
246
+ reason="Weights & Biases integration tests require the `WANDB_API_KEY` environment variable to be defined.",
247
+ fail_missing=fail_missing)
248
+
249
+
250
+ @pytest.fixture(name="weave", scope='session')
251
+ def require_weave_fixture(fail_missing: bool) -> types.ModuleType:
252
+ """
253
+ Use for integration tests that require Weave to be running.
254
+ """
255
+ try:
256
+ import weave
257
+ return weave
258
+ except Exception as e:
259
+ reason = "Weave must be installed to run weave based tests"
260
+ if fail_missing:
261
+ raise RuntimeError(reason) from e
262
+ pytest.skip(reason=reason)
263
+
264
+
223
265
  @pytest.fixture(name="require_docker", scope='session')
224
266
  def require_docker_fixture(fail_missing: bool) -> "DockerClient":
225
267
  """
@@ -256,8 +298,20 @@ def root_repo_dir_fixture() -> Path:
256
298
  return locate_repo_root()
257
299
 
258
300
 
259
- @pytest.fixture(name="require_etcd", scope="session")
260
- def require_etcd_fixture(fail_missing: bool = False) -> bool:
301
+ @pytest.fixture(name="examples_dir", scope='session')
302
+ def examples_dir_fixture(root_repo_dir: Path) -> Path:
303
+ return root_repo_dir / "examples"
304
+
305
+
306
+ @pytest.fixture(name="env_without_nat_log_level", scope='function')
307
+ def env_without_nat_log_level_fixture() -> dict[str, str]:
308
+ env = os.environ.copy()
309
+ env.pop("NAT_LOG_LEVEL", None)
310
+ return env
311
+
312
+
313
+ @pytest.fixture(name="etcd_url", scope="session")
314
+ def etcd_url_fixture(fail_missing: bool = False) -> str:
261
315
  """
262
316
  To run these tests, an etcd server must be running
263
317
  """
@@ -265,21 +319,22 @@ def require_etcd_fixture(fail_missing: bool = False) -> bool:
265
319
 
266
320
  host = os.getenv("NAT_CI_ETCD_HOST", "localhost")
267
321
  port = os.getenv("NAT_CI_ETCD_PORT", "2379")
268
- health_url = f"http://{host}:{port}/health"
322
+ url = f"http://{host}:{port}"
323
+ health_url = f"{url}/health"
269
324
 
270
325
  try:
271
326
  response = requests.get(health_url, timeout=5)
272
327
  response.raise_for_status()
273
- return True
328
+ return url
274
329
  except: # noqa: E722
275
- failure_reason = f"Unable to connect to etcd server at {health_url}"
330
+ failure_reason = f"Unable to connect to etcd server at {url}"
276
331
  if fail_missing:
277
332
  raise RuntimeError(failure_reason)
278
333
  pytest.skip(reason=failure_reason)
279
334
 
280
335
 
281
336
  @pytest.fixture(name="milvus_uri", scope="session")
282
- def milvus_uri_fixture(require_etcd: bool, fail_missing: bool = False) -> str:
337
+ def milvus_uri_fixture(etcd_url: str, fail_missing: bool = False) -> str:
283
338
  """
284
339
  To run these tests, a Milvus server must be running
285
340
  """
@@ -343,3 +398,193 @@ def populate_milvus_fixture(milvus_uri: str, root_repo_dir: Path):
343
398
  "wikipedia_docs"
344
399
  ],
345
400
  check=True)
401
+
402
+
403
+ @pytest.fixture(name="require_nest_asyncio", scope="session")
404
+ def require_nest_asyncio_fixture():
405
+ """
406
+ Some tests require nest_asyncio to be installed to allow nested event loops, calling nest_asyncio.apply() more than
407
+ once is a no-op so it's safe to call this fixture even if one of our dependencies already called it.
408
+ """
409
+ import nest_asyncio
410
+ nest_asyncio.apply()
411
+
412
+
413
+ @pytest.fixture(name="phoenix_url", scope="session")
414
+ def phoenix_url_fixture(fail_missing: bool) -> str:
415
+ """
416
+ To run these tests, a phoenix server must be running.
417
+ The phoenix server can be started by running the following command:
418
+ docker run -p 6006:6006 -p 4317:4317 arizephoenix/phoenix:latest
419
+ """
420
+ import requests
421
+
422
+ url = os.getenv("NAT_CI_PHOENIX_URL", "http://localhost:6006")
423
+ try:
424
+ response = requests.get(url, timeout=5)
425
+ response.raise_for_status()
426
+
427
+ return url
428
+ except Exception as e:
429
+ reason = f"Unable to connect to Phoenix server at {url}: {e}"
430
+ if fail_missing:
431
+ raise RuntimeError(reason)
432
+ pytest.skip(reason=reason)
433
+
434
+
435
+ @pytest.fixture(name="phoenix_trace_url", scope="session")
436
+ def phoenix_trace_url_fixture(phoenix_url: str) -> str:
437
+ """
438
+ Some of our tools expect the base url provided by the phoenix_url fixture, however the
439
+ general.telemetry.tracing["phoenix"].endpoint expects the trace url which is what this fixture provides.
440
+ """
441
+ return f"{phoenix_url}/v1/traces"
442
+
443
+
444
+ @pytest.fixture(name="redis_server", scope="session")
445
+ def fixture_redis_server(fail_missing: bool) -> Generator[dict[str, str | int]]:
446
+ """Fixture to safely skip redis based tests if redis is not running"""
447
+ host = os.environ.get("NAT_CI_REDIS_HOST", "localhost")
448
+ port = int(os.environ.get("NAT_CI_REDIS_PORT", "6379"))
449
+ db = int(os.environ.get("NAT_CI_REDIS_DB", "0"))
450
+ bucket_name = os.environ.get("NAT_CI_REDIS_BUCKET_NAME", "test")
451
+
452
+ try:
453
+ import redis
454
+ client = redis.Redis(host=host, port=port, db=db)
455
+ if not client.ping():
456
+ raise RuntimeError("Failed to connect to Redis")
457
+ yield {"host": host, "port": port, "db": db, "bucket_name": bucket_name}
458
+ except ImportError:
459
+ if fail_missing:
460
+ raise
461
+ pytest.skip("redis not installed, skipping redis tests")
462
+ except Exception as e:
463
+ if fail_missing:
464
+ raise
465
+ pytest.skip(f"Error connecting to Redis server: {e}, skipping redis tests")
466
+
467
+
468
+ @pytest_asyncio.fixture(name="mysql_server", scope="session")
469
+ async def fixture_mysql_server(fail_missing: bool) -> AsyncGenerator[dict[str, str | int]]:
470
+ """Fixture to safely skip MySQL based tests if MySQL is not running"""
471
+ host = os.environ.get('NAT_CI_MYSQL_HOST', '127.0.0.1')
472
+ port = int(os.environ.get('NAT_CI_MYSQL_PORT', '3306'))
473
+ user = os.environ.get('NAT_CI_MYSQL_USER', 'root')
474
+ password = os.environ.get('MYSQL_ROOT_PASSWORD', 'my_password')
475
+ bucket_name = os.environ.get('NAT_CI_MYSQL_BUCKET_NAME', 'test')
476
+ try:
477
+ import aiomysql
478
+ conn = await aiomysql.connect(host=host, port=port, user=user, password=password)
479
+ yield {"host": host, "port": port, "username": user, "password": password, "bucket_name": bucket_name}
480
+ conn.close()
481
+ except ImportError:
482
+ if fail_missing:
483
+ raise
484
+ pytest.skip("aiomysql not installed, skipping MySQL tests")
485
+ except Exception as e:
486
+ if fail_missing:
487
+ raise
488
+ pytest.skip(f"Error connecting to MySQL server: {e}, skipping MySQL tests")
489
+
490
+
491
+ @pytest.fixture(name="minio_server", scope="session")
492
+ def minio_server_fixture(fail_missing: bool) -> Generator[dict[str, str | int]]:
493
+ """Fixture to safely skip MinIO based tests if MinIO is not running"""
494
+ host = os.getenv("NAT_CI_MINIO_HOST", "localhost")
495
+ port = int(os.getenv("NAT_CI_MINIO_PORT", "9000"))
496
+ bucket_name = os.getenv("NAT_CI_MINIO_BUCKET_NAME", "test")
497
+ aws_access_key_id = os.getenv("NAT_CI_MINIO_ACCESS_KEY_ID", "minioadmin")
498
+ aws_secret_access_key = os.getenv("NAT_CI_MINIO_SECRET_ACCESS_KEY", "minioadmin")
499
+ endpoint_url = f"http://{host}:{port}"
500
+
501
+ minio_info = {
502
+ "host": host,
503
+ "port": port,
504
+ "bucket_name": bucket_name,
505
+ "endpoint_url": endpoint_url,
506
+ "aws_access_key_id": aws_access_key_id,
507
+ "aws_secret_access_key": aws_secret_access_key,
508
+ }
509
+
510
+ try:
511
+ import botocore.session
512
+ session = botocore.session.get_session()
513
+
514
+ client = session.create_client("s3",
515
+ aws_access_key_id=aws_access_key_id,
516
+ aws_secret_access_key=aws_secret_access_key,
517
+ endpoint_url=endpoint_url)
518
+ client.list_buckets()
519
+ yield minio_info
520
+ except ImportError:
521
+ if fail_missing:
522
+ raise
523
+ pytest.skip("aioboto3 not installed, skipping MinIO tests")
524
+ except Exception as e:
525
+ if fail_missing:
526
+ raise
527
+ else:
528
+ pytest.skip(f"Error connecting to MinIO server: {e}, skipping MinIO tests")
529
+
530
+
531
+ @pytest.fixture(name="langfuse_bucket", scope="session")
532
+ def langfuse_bucket_fixture(fail_missing: bool, minio_server: dict[str, str | int]) -> Generator[str]:
533
+
534
+ bucket_name = os.getenv("NAT_CI_LANGFUSE_BUCKET", "langfuse")
535
+ try:
536
+ import botocore.session
537
+ session = botocore.session.get_session()
538
+
539
+ client = session.create_client("s3",
540
+ aws_access_key_id=minio_server["aws_access_key_id"],
541
+ aws_secret_access_key=minio_server["aws_secret_access_key"],
542
+ endpoint_url=minio_server["endpoint_url"])
543
+
544
+ buckets = client.list_buckets()
545
+ bucket_names = [b['Name'] for b in buckets['Buckets']]
546
+ if bucket_name not in bucket_names:
547
+ client.create_bucket(Bucket=bucket_name)
548
+
549
+ yield bucket_name
550
+ except ImportError:
551
+ if fail_missing:
552
+ raise
553
+ pytest.skip("aioboto3 not installed, skipping MinIO tests")
554
+ except Exception as e:
555
+ if fail_missing:
556
+ raise
557
+ else:
558
+ pytest.skip(f"Error connecting to MinIO server: {e}, skipping MinIO tests")
559
+
560
+
561
+ @pytest.fixture(name="langfuse_url", scope="session")
562
+ def langfuse_url_fixture(fail_missing: bool, langfuse_bucket: str) -> str:
563
+ """
564
+ To run these tests, a langfuse server must be running.
565
+ """
566
+ import requests
567
+
568
+ host = os.getenv("NAT_CI_LANGFUSE_HOST", "localhost")
569
+ port = int(os.getenv("NAT_CI_LANGFUSE_PORT", "3000"))
570
+ url = f"http://{host}:{port}"
571
+ health_endpoint = f"{url}/api/public/health"
572
+ try:
573
+ response = requests.get(health_endpoint, timeout=5)
574
+ response.raise_for_status()
575
+
576
+ return url
577
+ except Exception as e:
578
+ reason = f"Unable to connect to Langfuse server at {url}: {e}"
579
+ if fail_missing:
580
+ raise RuntimeError(reason)
581
+ pytest.skip(reason=reason)
582
+
583
+
584
+ @pytest.fixture(name="langfuse_trace_url", scope="session")
585
+ def langfuse_trace_url_fixture(langfuse_url: str) -> str:
586
+ """
587
+ The langfuse_url fixture provides the base url, however the general.telemetry.tracing["langfuse"].endpoint expects
588
+ the trace url which is what this fixture provides.
589
+ """
590
+ return f"{langfuse_url}/api/public/otel/v1/traces"
nat/test/utils.py CHANGED
@@ -15,6 +15,7 @@
15
15
 
16
16
  import importlib.resources
17
17
  import inspect
18
+ import json
18
19
  import subprocess
19
20
  import typing
20
21
  from contextlib import asynccontextmanager
@@ -68,11 +69,12 @@ def locate_example_config(example_config_class: type,
68
69
 
69
70
 
70
71
  async def run_workflow(
71
- config_file: "StrPath | None",
72
+ *,
73
+ config: "Config | None" = None,
74
+ config_file: "StrPath | None" = None,
72
75
  question: str,
73
76
  expected_answer: str,
74
77
  assert_expected_answer: bool = True,
75
- config: "Config | None" = None,
76
78
  ) -> str:
77
79
  from nat.builder.workflow_builder import WorkflowBuilder
78
80
  from nat.runtime.loader import load_config
@@ -80,6 +82,7 @@ async def run_workflow(
80
82
 
81
83
  if config is None:
82
84
  assert config_file is not None, "Either config_file or config must be provided"
85
+ assert Path(config_file).exists(), f"Config file {config_file} does not exist"
83
86
  config = load_config(config_file)
84
87
 
85
88
  async with WorkflowBuilder.from_config(config=config) as workflow_builder:
@@ -125,3 +128,28 @@ async def build_nat_client(
125
128
  async with LifespanManager(app):
126
129
  async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
127
130
  yield client
131
+
132
+
133
+ def validate_workflow_output(workflow_output_file: Path) -> None:
134
+ """
135
+ Validate the contents of the workflow output file.
136
+ WIP: output format should be published as a schema and this validation should be done against that schema.
137
+ """
138
+ # Ensure the workflow_output.json file was created
139
+ assert workflow_output_file.exists(), "The workflow_output.json file was not created"
140
+
141
+ # Read and validate the workflow_output.json file
142
+ try:
143
+ with open(workflow_output_file, encoding="utf-8") as f:
144
+ result_json = json.load(f)
145
+ except json.JSONDecodeError as err:
146
+ raise RuntimeError("Failed to parse workflow_output.json as valid JSON") from err
147
+
148
+ assert isinstance(result_json, list), "The workflow_output.json file is not a list"
149
+ assert len(result_json) > 0, "The workflow_output.json file is empty"
150
+ assert isinstance(result_json[0], dict), "The workflow_output.json file is not a list of dictionaries"
151
+
152
+ # Ensure required keys exist
153
+ required_keys = ["id", "question", "answer", "generated_answer", "intermediate_steps"]
154
+ for key in required_keys:
155
+ assert all(item.get(key) for item in result_json), f"The '{key}' key is missing in workflow_output.json"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nvidia-nat-test
3
- Version: 1.3.0a20251013
3
+ Version: 1.3.0a20251022
4
4
  Summary: Testing utilities for NeMo Agent toolkit
5
5
  Author: NVIDIA Corporation
6
6
  Maintainer: NVIDIA Corporation
@@ -16,7 +16,7 @@ Requires-Python: <3.14,>=3.11
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE-3rd-party.txt
18
18
  License-File: LICENSE.md
19
- Requires-Dist: nvidia-nat==v1.3.0a20251013
19
+ Requires-Dist: nvidia-nat==v1.3.0a20251022
20
20
  Requires-Dist: langchain-community~=0.3
21
21
  Requires-Dist: pytest~=8.3
22
22
  Dynamic: license-file
@@ -5,14 +5,14 @@ nat/test/functions.py,sha256=ZxXVzfaLBGOpR5qtmMrKU7q-M9-vVGGj3Xi5mrw4vHY,3557
5
5
  nat/test/llm.py,sha256=f6bz6arAQjhjuOKFrLfu_U1LbiyFzQmpM-q8b-WKSrU,9550
6
6
  nat/test/memory.py,sha256=xki_A2yiMhEZuQk60K7t04QRqf32nQqnfzD5Iv7fkvw,1456
7
7
  nat/test/object_store_tests.py,sha256=PyJioOtoSzILPq6LuD-sOZ_89PIcgXWZweoHBQpK2zQ,4281
8
- nat/test/plugin.py,sha256=b9DsqeRDYrBA00egilznvNpr_lQmdnkUQilsWX07mTA,11688
8
+ nat/test/plugin.py,sha256=NV4JcUnqhBhQzO4sc3P8JDBlwWohmu5TYVCWRbNPwVo,20911
9
9
  nat/test/register.py,sha256=o1BEA5fyxyFyCxXhQ6ArmtuNpgRyTEfvw6HdBgECPLI,897
10
10
  nat/test/tool_test_runner.py,sha256=SxavwXHkvCQDl_PUiiiqgvGfexKJJTeBdI5i1qk6AzI,21712
11
- nat/test/utils.py,sha256=wXa9uH7-_HH7eg0bKpBrlVhffYrc2-F2MYc5ZBwSbAQ,4593
12
- nvidia_nat_test-1.3.0a20251013.dist-info/licenses/LICENSE-3rd-party.txt,sha256=fOk5jMmCX9YoKWyYzTtfgl-SUy477audFC5hNY4oP7Q,284609
13
- nvidia_nat_test-1.3.0a20251013.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
14
- nvidia_nat_test-1.3.0a20251013.dist-info/METADATA,sha256=rBeseEclUebrB4PqhBej8YnDDYr7aCneyISkC7iNGng,1925
15
- nvidia_nat_test-1.3.0a20251013.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- nvidia_nat_test-1.3.0a20251013.dist-info/entry_points.txt,sha256=7dOP9XB6iMDqvav3gYx9VWUwA8RrFzhbAa8nGeC8e4Y,99
17
- nvidia_nat_test-1.3.0a20251013.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
18
- nvidia_nat_test-1.3.0a20251013.dist-info/RECORD,,
11
+ nat/test/utils.py,sha256=Lml187P9SUP3IB_HhBaU1XNhiljcpOFFZOAxgQR1vQo,5936
12
+ nvidia_nat_test-1.3.0a20251022.dist-info/licenses/LICENSE-3rd-party.txt,sha256=fOk5jMmCX9YoKWyYzTtfgl-SUy477audFC5hNY4oP7Q,284609
13
+ nvidia_nat_test-1.3.0a20251022.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
14
+ nvidia_nat_test-1.3.0a20251022.dist-info/METADATA,sha256=ZTJn_nteBj_DVGb2tPzokSsYetEnCvThAvSnKyaGP1Y,1925
15
+ nvidia_nat_test-1.3.0a20251022.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ nvidia_nat_test-1.3.0a20251022.dist-info/entry_points.txt,sha256=7dOP9XB6iMDqvav3gYx9VWUwA8RrFzhbAa8nGeC8e4Y,99
17
+ nvidia_nat_test-1.3.0a20251022.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
18
+ nvidia_nat_test-1.3.0a20251022.dist-info/RECORD,,