nvidia-nat-test 1.4.0a20251023__py3-none-any.whl → 1.4.0a20251129__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nvidia-nat-test might be problematic. Click here for more details.

nat/test/llm.py CHANGED
@@ -96,6 +96,14 @@ async def test_llm_langchain(config: TestLLMConfig, builder: Builder):
96
96
  await chooser.async_sleep()
97
97
  yield chooser.next_response()
98
98
 
99
+ def bind_tools(self, tools: Any, **_kwargs: Any) -> "LangChainTestLLM":
100
+ """Bind tools to the LLM. Returns self to maintain fluent interface."""
101
+ return self
102
+
103
+ def bind(self, **_kwargs: Any) -> "LangChainTestLLM":
104
+ """Bind additional parameters to the LLM. Returns self to maintain fluent interface."""
105
+ return self
106
+
99
107
  yield LangChainTestLLM()
100
108
 
101
109
 
nat/test/plugin.py CHANGED
@@ -14,7 +14,9 @@
14
14
  # limitations under the License.
15
15
 
16
16
  import os
17
+ import random
17
18
  import subprocess
19
+ import time
18
20
  import types
19
21
  import typing
20
22
  from collections.abc import AsyncGenerator
@@ -25,6 +27,10 @@ import pytest
25
27
  import pytest_asyncio
26
28
 
27
29
  if typing.TYPE_CHECKING:
30
+ import galileo.log_streams
31
+ import galileo.projects
32
+ import langsmith.client
33
+
28
34
  from docker.client import DockerClient
29
35
 
30
36
 
@@ -262,6 +268,116 @@ def require_weave_fixture(fail_missing: bool) -> types.ModuleType:
262
268
  pytest.skip(reason=reason)
263
269
 
264
270
 
271
+ @pytest.fixture(name="langsmith_api_key", scope='session')
272
+ def langsmith_api_key_fixture(fail_missing: bool):
273
+ """
274
+ Use for integration tests that require a LangSmith API key.
275
+ """
276
+ yield require_env_variables(
277
+ varnames=["LANGSMITH_API_KEY"],
278
+ reason="LangSmith integration tests require the `LANGSMITH_API_KEY` environment variable to be defined.",
279
+ fail_missing=fail_missing)
280
+
281
+
282
+ @pytest.fixture(name="langsmith_client")
283
+ def langsmith_client_fixture(langsmith_api_key: str, fail_missing: bool) -> "langsmith.client.Client":
284
+ try:
285
+ import langsmith.client
286
+ client = langsmith.client.Client()
287
+ return client
288
+ except ImportError:
289
+ reason = "LangSmith integration tests require the `langsmith` package to be installed."
290
+ if fail_missing:
291
+ raise RuntimeError(reason)
292
+ pytest.skip(reason=reason)
293
+
294
+
295
+ @pytest.fixture(name="project_name")
296
+ def project_name_fixture() -> str:
297
+ # Create a unique project name for each test run
298
+ return f"nat-e2e-test-{time.time()}-{random.random()}"
299
+
300
+
301
+ @pytest.fixture(name="langsmith_project_name")
302
+ def langsmith_project_name_fixture(langsmith_client: "langsmith.client.Client", project_name: str) -> Generator[str]:
303
+ langsmith_client.create_project(project_name)
304
+ yield project_name
305
+
306
+ langsmith_client.delete_project(project_name=project_name)
307
+
308
+
309
+ @pytest.fixture(name="galileo_api_key", scope='session')
310
+ def galileo_api_key_fixture(fail_missing: bool):
311
+ """
312
+ Use for integration tests that require a Galileo API key.
313
+ """
314
+ yield require_env_variables(
315
+ varnames=["GALILEO_API_KEY"],
316
+ reason="Galileo integration tests require the `GALILEO_API_KEY` environment variable to be defined.",
317
+ fail_missing=fail_missing)
318
+
319
+
320
+ @pytest.fixture(name="galileo_project")
321
+ def galileo_project_fixture(galileo_api_key: str, fail_missing: bool,
322
+ project_name: str) -> Generator["galileo.projects.Project"]:
323
+ """
324
+ Creates a unique Galileo project and deletes it after the test run.
325
+ """
326
+ try:
327
+ import galileo.projects
328
+ project = galileo.projects.create_project(name=project_name)
329
+ yield project
330
+
331
+ galileo.projects.delete_project(id=project.id)
332
+ except ImportError as e:
333
+ reason = "Galileo integration tests require the `galileo` package to be installed."
334
+ if fail_missing:
335
+ raise RuntimeError(reason) from e
336
+ pytest.skip(reason=reason)
337
+
338
+
339
+ @pytest.fixture(name="galileo_log_stream")
340
+ def galileo_log_stream_fixture(galileo_project: "galileo.projects.Project") -> "galileo.log_streams.LogStream":
341
+ """
342
+ Creates a Galileo log stream for integration tests.
343
+
344
+ The log stream is automatically deleted when the associated project is deleted.
345
+ """
346
+ import galileo.log_streams
347
+ return galileo.log_streams.create_log_stream(project_id=galileo_project.id, name="test")
348
+
349
+
350
+ @pytest.fixture(name="catalyst_keys", scope='session')
351
+ def catalyst_keys_fixture(fail_missing: bool):
352
+ """
353
+ Use for integration tests that require RagaAI Catalyst credentials.
354
+ """
355
+ yield require_env_variables(
356
+ varnames=["CATALYST_ACCESS_KEY", "CATALYST_SECRET_KEY"],
357
+ reason="Catalyst integration tests require the `CATALYST_ACCESS_KEY` and `CATALYST_SECRET_KEY` environment "
358
+ "variables to be defined.",
359
+ fail_missing=fail_missing)
360
+
361
+
362
+ @pytest.fixture(name="catalyst_project_name")
363
+ def catalyst_project_name_fixture(catalyst_keys) -> str:
364
+ return os.environ.get("NAT_CI_CATALYST_PROJECT_NAME", "nat-e2e")
365
+
366
+
367
+ @pytest.fixture(name="catalyst_dataset_name")
368
+ def catalyst_dataset_name_fixture(catalyst_project_name: str, project_name: str) -> str:
369
+ """
370
+ We can't create and delete projects, but we can create and delete datasets, so use a unique dataset name
371
+ """
372
+ dataset_name = project_name.replace('.', '-')
373
+ yield dataset_name
374
+
375
+ from ragaai_catalyst import Dataset
376
+ ds = Dataset(catalyst_project_name)
377
+ if dataset_name in ds.list_datasets():
378
+ ds.delete_dataset(dataset_name)
379
+
380
+
265
381
  @pytest.fixture(name="require_docker", scope='session')
266
382
  def require_docker_fixture(fail_missing: bool) -> "DockerClient":
267
383
  """
@@ -400,14 +516,22 @@ def populate_milvus_fixture(milvus_uri: str, root_repo_dir: Path):
400
516
  check=True)
401
517
 
402
518
 
403
- @pytest.fixture(name="require_nest_asyncio", scope="session")
519
+ @pytest.fixture(name="require_nest_asyncio", scope="session", autouse=True)
404
520
  def require_nest_asyncio_fixture():
405
521
  """
406
- Some tests require nest_asyncio to be installed to allow nested event loops, calling nest_asyncio.apply() more than
407
- once is a no-op so it's safe to call this fixture even if one of our dependencies already called it.
522
+ Some tests require the nest_asyncio2 patch to be applied to allow nested event loops, calling
523
+ `nest_asyncio2.apply()` more than once is a no-op. However we need to ensure that the nest_asyncio2 patch is
524
+ applied prior to the older nest_asyncio patch is applied. Requiring us to ensure that any library which will apply
525
+ the patch on import is lazily imported.
408
526
  """
409
- import nest_asyncio
410
- nest_asyncio.apply()
527
+ import nest_asyncio2
528
+ try:
529
+ nest_asyncio2.apply(error_on_mispatched=True)
530
+ except RuntimeError as e:
531
+ raise RuntimeError(
532
+ "nest_asyncio2 fixture called but asyncio is already patched, most likely this is due to the nest_asyncio "
533
+ "being applied first, which is not compatible with Python 3.12+. Please ensure that any libraries which "
534
+ "apply nest_asyncio on import are lazily imported.") from e
411
535
 
412
536
 
413
537
  @pytest.fixture(name="phoenix_url", scope="session")
@@ -447,14 +571,15 @@ def fixture_redis_server(fail_missing: bool) -> Generator[dict[str, str | int]]:
447
571
  host = os.environ.get("NAT_CI_REDIS_HOST", "localhost")
448
572
  port = int(os.environ.get("NAT_CI_REDIS_PORT", "6379"))
449
573
  db = int(os.environ.get("NAT_CI_REDIS_DB", "0"))
574
+ password = os.environ.get("REDIS_PASSWORD", "redis")
450
575
  bucket_name = os.environ.get("NAT_CI_REDIS_BUCKET_NAME", "test")
451
576
 
452
577
  try:
453
578
  import redis
454
- client = redis.Redis(host=host, port=port, db=db)
579
+ client = redis.Redis(host=host, port=port, db=db, password=password)
455
580
  if not client.ping():
456
581
  raise RuntimeError("Failed to connect to Redis")
457
- yield {"host": host, "port": port, "db": db, "bucket_name": bucket_name}
582
+ yield {"host": host, "port": port, "db": db, "bucket_name": bucket_name, "password": password}
458
583
  except ImportError:
459
584
  if fail_missing:
460
585
  raise
@@ -588,3 +713,162 @@ def langfuse_trace_url_fixture(langfuse_url: str) -> str:
588
713
  the trace url which is what this fixture provides.
589
714
  """
590
715
  return f"{langfuse_url}/api/public/otel/v1/traces"
716
+
717
+
718
+ @pytest.fixture(name="oauth2_server_url", scope="session")
719
+ def oauth2_server_url_fixture(fail_missing: bool) -> str:
720
+ """
721
+ To run these tests, an oauth2 server must be running.
722
+ """
723
+ import requests
724
+
725
+ host = os.getenv("NAT_CI_OAUTH2_HOST", "localhost")
726
+ port = int(os.getenv("NAT_CI_OAUTH2_PORT", "5001"))
727
+ url = f"http://{host}:{port}"
728
+ try:
729
+ response = requests.get(url, timeout=5)
730
+ response.raise_for_status()
731
+
732
+ return url
733
+ except Exception as e:
734
+ reason = f"Unable to connect to OAuth2 server at {url}: {e}"
735
+ if fail_missing:
736
+ raise RuntimeError(reason)
737
+ pytest.skip(reason=reason)
738
+
739
+
740
+ @pytest.fixture(name="oauth2_client_credentials", scope="session")
741
+ def oauth2_client_credentials_fixture(oauth2_server_url: str, fail_missing: bool) -> dict[str, typing.Any]:
742
+ """
743
+ Fixture to provide OAuth2 client credentials for testing
744
+
745
+ Simulates the steps a user would take in a web browser to create a new OAuth2 client as documented in:
746
+ examples/front_ends/simple_auth/README.md
747
+ """
748
+
749
+ try:
750
+ import requests
751
+ from bs4 import BeautifulSoup
752
+ username = os.getenv("NAT_CI_OAUTH2_CLIENT_USERNAME", "Testy Testerson")
753
+
754
+ # This post request responds with a cookie that we need for future requests and a 302 redirect, the response
755
+ # for the redirected url doesn't contain the cookie, so we disable the redirect here to capture the cookie
756
+ user_create_response = requests.post(oauth2_server_url,
757
+ data=[("username", username)],
758
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
759
+ allow_redirects=False,
760
+ timeout=5)
761
+ user_create_response.raise_for_status()
762
+ cookies = user_create_response.cookies
763
+
764
+ client_create_response = requests.post(f"{oauth2_server_url}/create_client",
765
+ cookies=cookies,
766
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
767
+ data=[
768
+ ("client_name", "test"),
769
+ ("client_uri", "https://test.com"),
770
+ ("scope", "openid profile email"),
771
+ ("redirect_uri", "http://localhost:8000/auth/redirect"),
772
+ ("grant_type", "authorization_code\nrefresh_token"),
773
+ ("response_type", "code"),
774
+ ("token_endpoint_auth_method", "client_secret_post"),
775
+ ],
776
+ timeout=5)
777
+ client_create_response.raise_for_status()
778
+
779
+ # Unfortunately the response is HTML so we need to parse it to get the client ID and secret, which are not
780
+ # locatable via ID tags
781
+ soup = BeautifulSoup(client_create_response.text, 'html.parser')
782
+ strong_tags = soup.find_all('strong')
783
+ i = 0
784
+ client_id = None
785
+ client_secret = None
786
+ while i < len(strong_tags) and None in (client_id, client_secret):
787
+ tag = strong_tags[i]
788
+ contents = "".join(tag.contents)
789
+ if client_id is None and "client_id:" in contents:
790
+ client_id = tag.next_sibling.strip()
791
+ elif client_secret is None and "client_secret:" in contents:
792
+ client_secret = tag.next_sibling.strip()
793
+
794
+ i += 1
795
+
796
+ assert client_id is not None and client_secret is not None, "Failed to parse client credentials from response"
797
+
798
+ return {
799
+ "id": client_id,
800
+ "secret": client_secret,
801
+ "username": username,
802
+ "url": oauth2_server_url,
803
+ "cookies": cookies
804
+ }
805
+
806
+ except Exception as e:
807
+ reason = f"Unable to create OAuth2 client: {e}"
808
+ if fail_missing:
809
+ raise RuntimeError(reason)
810
+ pytest.skip(reason=reason)
811
+
812
+
813
+ @pytest.fixture(name="local_sandbox_url", scope="session")
814
+ def local_sandbox_url_fixture(fail_missing: bool) -> str:
815
+ """Check if sandbox server is running before running tests."""
816
+ import requests
817
+ url = os.environ.get("NAT_CI_SANDBOX_URL", "http://127.0.0.1:6000")
818
+ try:
819
+ response = requests.get(url, timeout=5)
820
+ response.raise_for_status()
821
+ return url
822
+ except Exception:
823
+ reason = (f"Sandbox server is not running at {url}. "
824
+ "Please start it with: cd src/nat/tool/code_execution/local_sandbox && ./start_local_sandbox.sh")
825
+ if fail_missing:
826
+ raise RuntimeError(reason)
827
+ pytest.skip(reason)
828
+
829
+
830
+ @pytest.fixture(name="sandbox_config", scope="session")
831
+ def sandbox_config_fixture(local_sandbox_url: str) -> dict[str, typing.Any]:
832
+ """Configuration for sandbox testing."""
833
+ return {
834
+ "base_url": local_sandbox_url,
835
+ "execute_url": f"{local_sandbox_url.rstrip('/')}/execute",
836
+ "timeout": int(os.environ.get("SANDBOX_TIMEOUT", "30")),
837
+ "connection_timeout": 5
838
+ }
839
+
840
+
841
+ @pytest.fixture(name="piston_url", scope="session")
842
+ def piston_url_fixture(fail_missing: bool) -> str:
843
+ """
844
+ Configuration for piston testing.
845
+
846
+ The public piston server limits usage to five requests per minute.
847
+ """
848
+ import requests
849
+ url = os.environ.get("NAT_CI_PISTON_URL", "https://emkc.org/api/v2/piston")
850
+ try:
851
+ response = requests.get(f"{url.rstrip('/')}/runtimes", timeout=30)
852
+ response.raise_for_status()
853
+ return url
854
+ except Exception:
855
+ reason = (f"Piston server is not running at {url}. "
856
+ "Please start it with: cd src/nat/tool/code_execution/local_sandbox && ./start_local_sandbox.sh")
857
+ if fail_missing:
858
+ raise RuntimeError(reason)
859
+ pytest.skip(reason)
860
+
861
+
862
+ @pytest.fixture(autouse=True, scope="session")
863
+ def import_adk_early():
864
+ """
865
+ Import ADK early to work-around slow import issue (https://github.com/google/adk-python/issues/2433),
866
+ when ADK is imported early it takes about 8 seconds, however if we wait until the `packages/nvidia_nat_adk/tests`
867
+ run the same import will take about 70 seconds.
868
+
869
+ Since ADK is an optional dependency, we will ignore any import errors.
870
+ """
871
+ try:
872
+ import google.adk # noqa: F401
873
+ except ImportError:
874
+ pass
@@ -29,18 +29,21 @@ from nat.builder.function import FunctionGroup
29
29
  from nat.builder.function_info import FunctionInfo
30
30
  from nat.cli.type_registry import GlobalTypeRegistry
31
31
  from nat.data_models.authentication import AuthProviderBaseConfig
32
+ from nat.data_models.component_ref import MiddlewareRef
32
33
  from nat.data_models.embedder import EmbedderBaseConfig
33
34
  from nat.data_models.function import FunctionBaseConfig
34
35
  from nat.data_models.function import FunctionGroupBaseConfig
35
36
  from nat.data_models.function_dependencies import FunctionDependencies
36
37
  from nat.data_models.llm import LLMBaseConfig
37
38
  from nat.data_models.memory import MemoryBaseConfig
39
+ from nat.data_models.middleware import FunctionMiddlewareBaseConfig
38
40
  from nat.data_models.object_store import ObjectStoreBaseConfig
39
41
  from nat.data_models.retriever import RetrieverBaseConfig
40
42
  from nat.data_models.ttc_strategy import TTCStrategyBaseConfig
41
43
  from nat.experimental.test_time_compute.models.stage_enums import PipelineTypeEnum
42
44
  from nat.experimental.test_time_compute.models.stage_enums import StageTypeEnum
43
45
  from nat.memory.interfaces import MemoryEditor
46
+ from nat.middleware import FunctionMiddleware
44
47
  from nat.object_store.interfaces import ObjectStore
45
48
  from nat.runtime.loader import PluginTypes
46
49
  from nat.runtime.loader import discover_and_register_plugins
@@ -289,6 +292,19 @@ class MockBuilder(Builder):
289
292
  """Mock implementation."""
290
293
  return FunctionDependencies()
291
294
 
295
+ async def get_middleware(self, middleware_name: str | MiddlewareRef) -> FunctionMiddleware:
296
+ """Mock implementation."""
297
+ return FunctionMiddleware()
298
+
299
+ def get_middleware_config(self, middleware_name: str | MiddlewareRef) -> FunctionMiddlewareBaseConfig:
300
+ """Mock implementation."""
301
+ return FunctionMiddlewareBaseConfig()
302
+
303
+ async def add_middleware(self, name: str | MiddlewareRef,
304
+ config: FunctionMiddlewareBaseConfig) -> FunctionMiddleware:
305
+ """Mock implementation."""
306
+ return FunctionMiddleware()
307
+
292
308
 
293
309
  class ToolTestRunner:
294
310
  """
nat/test/utils.py CHANGED
@@ -68,27 +68,20 @@ def locate_example_config(example_config_class: type,
68
68
  return config_path
69
69
 
70
70
 
71
- async def run_workflow(
72
- *,
73
- config: "Config | None" = None,
74
- config_file: "StrPath | None" = None,
75
- question: str,
76
- expected_answer: str,
77
- assert_expected_answer: bool = True,
78
- ) -> str:
79
- from nat.builder.workflow_builder import WorkflowBuilder
80
- from nat.runtime.loader import load_config
81
- from nat.runtime.session import SessionManager
82
-
83
- if config is None:
84
- assert config_file is not None, "Either config_file or config must be provided"
85
- assert Path(config_file).exists(), f"Config file {config_file} does not exist"
86
- config = load_config(config_file)
87
-
88
- async with WorkflowBuilder.from_config(config=config) as workflow_builder:
89
- workflow = SessionManager(await workflow_builder.build())
90
- async with workflow.run(question) as runner:
91
- result = await runner.result(to_type=str)
71
+ async def run_workflow(*,
72
+ config: "Config | None" = None,
73
+ config_file: "StrPath | None" = None,
74
+ question: str,
75
+ expected_answer: str,
76
+ assert_expected_answer: bool = True,
77
+ **kwargs) -> str:
78
+ """
79
+ Test specific wrapper for `nat.utils.run_workflow` to run a workflow with a question and validate the expected
80
+ answer. This variant always sets the result type to `str`.
81
+ """
82
+ from nat.utils import run_workflow as nat_run_workflow
83
+
84
+ result = await nat_run_workflow(config=config, config_file=config_file, prompt=question, to_type=str, **kwargs)
92
85
 
93
86
  if assert_expected_answer:
94
87
  assert expected_answer.lower() in result.lower(), f"Expected '{expected_answer}' in '{result}'"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nvidia-nat-test
3
- Version: 1.4.0a20251023
3
+ Version: 1.4.0a20251129
4
4
  Summary: Testing utilities for NeMo Agent toolkit
5
5
  Author: NVIDIA Corporation
6
6
  Maintainer: NVIDIA Corporation
@@ -16,7 +16,7 @@ Requires-Python: <3.14,>=3.11
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE-3rd-party.txt
18
18
  License-File: LICENSE.md
19
- Requires-Dist: nvidia-nat==v1.4.0a20251023
19
+ Requires-Dist: nvidia-nat==v1.4.0a20251129
20
20
  Requires-Dist: langchain-community~=0.3
21
21
  Requires-Dist: pytest~=8.3
22
22
  Dynamic: license-file
@@ -2,17 +2,17 @@ nat/meta/pypi.md,sha256=LLKJHg5oN1-M9Pqfk3Bmphkk4O2TFsyiixuK5T0Y-gw,1100
2
2
  nat/test/__init__.py,sha256=_RnTJnsUucHvla_nYKqD4O4g8Bz0tcuDRzWk1bEhcy0,875
3
3
  nat/test/embedder.py,sha256=ClDyK1kna4hCBSlz71gK1B-ZjlwcBHTDQRekoNM81Bs,1809
4
4
  nat/test/functions.py,sha256=ZxXVzfaLBGOpR5qtmMrKU7q-M9-vVGGj3Xi5mrw4vHY,3557
5
- nat/test/llm.py,sha256=f6bz6arAQjhjuOKFrLfu_U1LbiyFzQmpM-q8b-WKSrU,9550
5
+ nat/test/llm.py,sha256=dbFoWFrSAlUoKm6QGfS4VJdrhgxwkXzm1oaFd6K7jnM,9926
6
6
  nat/test/memory.py,sha256=xki_A2yiMhEZuQk60K7t04QRqf32nQqnfzD5Iv7fkvw,1456
7
7
  nat/test/object_store_tests.py,sha256=PyJioOtoSzILPq6LuD-sOZ_89PIcgXWZweoHBQpK2zQ,4281
8
- nat/test/plugin.py,sha256=NV4JcUnqhBhQzO4sc3P8JDBlwWohmu5TYVCWRbNPwVo,20911
8
+ nat/test/plugin.py,sha256=HF25W2YPTiXaoIJggnZTstiTMaspQckvL_thQSseDEc,32434
9
9
  nat/test/register.py,sha256=o1BEA5fyxyFyCxXhQ6ArmtuNpgRyTEfvw6HdBgECPLI,897
10
- nat/test/tool_test_runner.py,sha256=SxavwXHkvCQDl_PUiiiqgvGfexKJJTeBdI5i1qk6AzI,21712
11
- nat/test/utils.py,sha256=Lml187P9SUP3IB_HhBaU1XNhiljcpOFFZOAxgQR1vQo,5936
12
- nvidia_nat_test-1.4.0a20251023.dist-info/licenses/LICENSE-3rd-party.txt,sha256=fOk5jMmCX9YoKWyYzTtfgl-SUy477audFC5hNY4oP7Q,284609
13
- nvidia_nat_test-1.4.0a20251023.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
14
- nvidia_nat_test-1.4.0a20251023.dist-info/METADATA,sha256=QQJbkpZC8mW0xYQ7mdoxuw2--olf1TaOVpzOtXP3AZ0,1925
15
- nvidia_nat_test-1.4.0a20251023.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- nvidia_nat_test-1.4.0a20251023.dist-info/entry_points.txt,sha256=7dOP9XB6iMDqvav3gYx9VWUwA8RrFzhbAa8nGeC8e4Y,99
17
- nvidia_nat_test-1.4.0a20251023.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
18
- nvidia_nat_test-1.4.0a20251023.dist-info/RECORD,,
10
+ nat/test/tool_test_runner.py,sha256=WDwIRo3160raBoEkj1-MgnLSCaaF2Ud_cARRIM3Qdag,22463
11
+ nat/test/utils.py,sha256=GyhxIZ1CcUPcc8RMRyCzpHBEwVifeqiGxT3c9Pp0KAU,5774
12
+ nvidia_nat_test-1.4.0a20251129.dist-info/licenses/LICENSE-3rd-party.txt,sha256=fOk5jMmCX9YoKWyYzTtfgl-SUy477audFC5hNY4oP7Q,284609
13
+ nvidia_nat_test-1.4.0a20251129.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
14
+ nvidia_nat_test-1.4.0a20251129.dist-info/METADATA,sha256=9v96TNp_WS6cN3JQN9Aj2ecg2vupk_YPHatlqRYdcj4,1925
15
+ nvidia_nat_test-1.4.0a20251129.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ nvidia_nat_test-1.4.0a20251129.dist-info/entry_points.txt,sha256=7dOP9XB6iMDqvav3gYx9VWUwA8RrFzhbAa8nGeC8e4Y,99
17
+ nvidia_nat_test-1.4.0a20251129.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
18
+ nvidia_nat_test-1.4.0a20251129.dist-info/RECORD,,