nvbenjo 0.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. nvbenjo-0.0.1/.dockerignore +16 -0
  2. nvbenjo-0.0.1/.github/workflows/release.yml +30 -0
  3. nvbenjo-0.0.1/.github/workflows/ruff.yml +33 -0
  4. nvbenjo-0.0.1/.github/workflows/test.yml +27 -0
  5. nvbenjo-0.0.1/.gitignore +21 -0
  6. nvbenjo-0.0.1/.python-version +1 -0
  7. nvbenjo-0.0.1/.vscode/launch.json +32 -0
  8. nvbenjo-0.0.1/Dockerfile +26 -0
  9. nvbenjo-0.0.1/LICENSE +7 -0
  10. nvbenjo-0.0.1/PKG-INFO +72 -0
  11. nvbenjo-0.0.1/README.md +45 -0
  12. nvbenjo-0.0.1/docs/Makefile +20 -0
  13. nvbenjo-0.0.1/docs/conf.py +86 -0
  14. nvbenjo-0.0.1/docs/configuration.md +34 -0
  15. nvbenjo-0.0.1/docs/examples.md +41 -0
  16. nvbenjo-0.0.1/docs/index.md +73 -0
  17. nvbenjo-0.0.1/docs/make.bat +35 -0
  18. nvbenjo-0.0.1/docs/python_api.md +37 -0
  19. nvbenjo-0.0.1/noxfile.py +66 -0
  20. nvbenjo-0.0.1/pyproject.toml +67 -0
  21. nvbenjo-0.0.1/setup.cfg +4 -0
  22. nvbenjo-0.0.1/src/nvbenjo/__init__.py +14 -0
  23. nvbenjo-0.0.1/src/nvbenjo/benchmark.py +441 -0
  24. nvbenjo-0.0.1/src/nvbenjo/cfg.py +327 -0
  25. nvbenjo-0.0.1/src/nvbenjo/cli.py +95 -0
  26. nvbenjo-0.0.1/src/nvbenjo/conf/bert.yaml +22 -0
  27. nvbenjo-0.0.1/src/nvbenjo/conf/complex_example.yaml +56 -0
  28. nvbenjo-0.0.1/src/nvbenjo/conf/default.yaml +12 -0
  29. nvbenjo-0.0.1/src/nvbenjo/conf/example.yaml +56 -0
  30. nvbenjo-0.0.1/src/nvbenjo/conf/huggingface.yaml +123 -0
  31. nvbenjo-0.0.1/src/nvbenjo/conf/hydra/help/nvbenjo_app_help.yaml +37 -0
  32. nvbenjo-0.0.1/src/nvbenjo/conf/onnx.yaml +20 -0
  33. nvbenjo-0.0.1/src/nvbenjo/conf/onnxsilero.yaml +30 -0
  34. nvbenjo-0.0.1/src/nvbenjo/conf/small.yaml +21 -0
  35. nvbenjo-0.0.1/src/nvbenjo/conf/torchvision.yaml +50 -0
  36. nvbenjo-0.0.1/src/nvbenjo/onnx_utils.py +348 -0
  37. nvbenjo-0.0.1/src/nvbenjo/plot.py +179 -0
  38. nvbenjo-0.0.1/src/nvbenjo/plugin.py +9 -0
  39. nvbenjo-0.0.1/src/nvbenjo/system_info.py +110 -0
  40. nvbenjo-0.0.1/src/nvbenjo/torch_utils.py +332 -0
  41. nvbenjo-0.0.1/src/nvbenjo/utils.py +196 -0
  42. nvbenjo-0.0.1/src/nvbenjo.egg-info/PKG-INFO +72 -0
  43. nvbenjo-0.0.1/src/nvbenjo.egg-info/SOURCES.txt +60 -0
  44. nvbenjo-0.0.1/src/nvbenjo.egg-info/dependency_links.txt +1 -0
  45. nvbenjo-0.0.1/src/nvbenjo.egg-info/entry_points.txt +2 -0
  46. nvbenjo-0.0.1/src/nvbenjo.egg-info/requires.txt +19 -0
  47. nvbenjo-0.0.1/src/nvbenjo.egg-info/top_level.txt +1 -0
  48. nvbenjo-0.0.1/tests/__init__.py +0 -0
  49. nvbenjo-0.0.1/tests/conf/default.yaml +6 -0
  50. nvbenjo-0.0.1/tests/conf/input_min_max.yaml +18 -0
  51. nvbenjo-0.0.1/tests/conf/small_single.yaml +28 -0
  52. nvbenjo-0.0.1/tests/conf/torch_load.yaml +15 -0
  53. nvbenjo-0.0.1/tests/conf/torch_load_multiinput.yaml +12 -0
  54. nvbenjo-0.0.1/tests/test_cli.py +340 -0
  55. nvbenjo-0.0.1/tests/test_onnx_cli.py +87 -0
  56. nvbenjo-0.0.1/tests/test_onnx_utils.py +93 -0
  57. nvbenjo-0.0.1/tests/test_system_info.py +27 -0
  58. nvbenjo-0.0.1/tests/test_torch_python_api.py +42 -0
  59. nvbenjo-0.0.1/tests/test_torch_utils.py +73 -0
  60. nvbenjo-0.0.1/tests/test_utils.py +88 -0
  61. nvbenjo-0.0.1/todos.txt +28 -0
  62. nvbenjo-0.0.1/uv.lock +2803 -0
@@ -0,0 +1,16 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ .ruff_cache
10
+ .venv
11
+ .vscode
12
+ build
13
+
14
+
15
+ Dockerfile
16
+ outputs
@@ -0,0 +1,30 @@
1
+ name: release
2
+
3
+ on:
4
+ release:
5
+ types:
6
+ - published
7
+
8
+ jobs:
9
+ publish:
10
+ runs-on: ubuntu-latest
11
+ permissions:
12
+ id-token: write
13
+ steps:
14
+ - uses: actions/checkout@v4
15
+
16
+ - name: Install uv
17
+ uses: astral-sh/setup-uv@v5
18
+
19
+ - name: Install the project with onnx
20
+ run: uv sync --extra onnx-cpu --dev
21
+
22
+ - name: Build
23
+ run: |
24
+ # This strips the 'v' from the tag (e.g., v1.0.0 -> 1.0.0)
25
+ TAG_NAME="${{ github.event.release.tag_name }}"
26
+ export SETUPTOOLS_SCM_PRETEND_VERSION="${TAG_NAME#v}"
27
+ uv build
28
+
29
+ - name: Publish package
30
+ run: uv publish
@@ -0,0 +1,33 @@
1
+ name: Ruff
2
+ on: [push, pull_request]
3
+ jobs:
4
+ ruff-check:
5
+ runs-on: ubuntu-latest
6
+ steps:
7
+ - uses: actions/checkout@v4
8
+ - uses: astral-sh/ruff-action@v3
9
+ with:
10
+ src: "./src/nvbenjo"
11
+
12
+ ruff-format:
13
+ runs-on: ubuntu-latest
14
+ steps:
15
+ - uses: actions/checkout@v4
16
+ - uses: astral-sh/ruff-action@v3
17
+ with:
18
+ args: "format --check"
19
+ src: "./src/nvbenjo"
20
+
21
+ ty-check:
22
+ runs-on: ubuntu-latest
23
+ steps:
24
+ - uses: actions/checkout@v4
25
+
26
+ - name: Install uv
27
+ uses: astral-sh/setup-uv@v5
28
+
29
+ - name: Install the project
30
+ run: uv sync --extra onnx-cpu
31
+
32
+ - name: Run ty
33
+ run: uv run ty check .
@@ -0,0 +1,27 @@
1
+ name: Test
2
+ on: [push, pull_request]
3
+ jobs:
4
+ pytest:
5
+ runs-on: ubuntu-latest
6
+ steps:
7
+ - uses: actions/checkout@v4
8
+
9
+ - name: Install uv
10
+ uses: astral-sh/setup-uv@v5
11
+
12
+ - name: "Set up Python"
13
+ uses: actions/setup-python@v5
14
+ with:
15
+ python-version-file: "pyproject.toml"
16
+
17
+ - name: Install the project with onnx
18
+ run: uv sync --extra onnx-cpu
19
+
20
+ - name: Run tests
21
+ run: uv run pytest
22
+
23
+ - name: Install the project without onnx
24
+ run: uv sync
25
+
26
+ - name: Run tests
27
+ run: uv run pytest
@@ -0,0 +1,21 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+ .coverage
9
+
10
+ # Log files
11
+ *.log
12
+
13
+ # Virtual environments
14
+ .venv
15
+
16
+ # Hydra outputs
17
+ nvbenjo/outputs
18
+ outputs
19
+
20
+ # Sphinx build files
21
+ docs/_build/
@@ -0,0 +1 @@
1
+ 3.12
@@ -0,0 +1,32 @@
1
+ {
2
+ // Use IntelliSense to learn about possible attributes.
3
+ // Hover to view descriptions of existing attributes.
4
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5
+ "version": "0.2.0",
6
+ "configurations": [
7
+ {
8
+ "name": "debug",
9
+ "type": "debugpy",
10
+ "request": "launch",
11
+ "module": "nvbenjo.cli",
12
+ "console": "integratedTerminal",
13
+ "cwd": "${workspaceFolder}",
14
+ "args": [
15
+ "-cn", "/home/lukas/Documents/git-repos/nvbenjo/src/nvbenjo/conf/onnxsilero.yaml"
16
+ ]
17
+ },
18
+ {
19
+ "name": "debug python test",
20
+ "type": "debugpy",
21
+ "request": "launch",
22
+ "justMyCode": true,
23
+ "console": "integratedTerminal",
24
+ "env": {},
25
+ "cwd": "${workspaceFolder}",
26
+ "module": "pytest",
27
+ "args": [
28
+ "tests/test_cli.py::test_cli_cn_path_arg"
29
+ ]
30
+ },
31
+ ]
32
+ }
@@ -0,0 +1,26 @@
1
+ FROM nvidia/cuda:12.8.1-devel-ubuntu22.04
2
+
3
+ # Nvbandwith
4
+ RUN apt-get update && apt-get install -y \
5
+ git \
6
+ cmake \
7
+ build-essential \
8
+ libboost-program-options-dev && \
9
+ rm -rf /var/lib/apt/lists/*
10
+
11
+ RUN git clone https://github.com/NVIDIA/nvbandwidth.git
12
+ WORKDIR /nvbandwidth
13
+ RUN mkdir build && cd build && cmake .. && make
14
+ ENV PATH="/nvbandwidth/build:$PATH"
15
+
16
+ # get uv
17
+ COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /bin/
18
+ ENV PATH="/root/.local/bin:${PATH}"
19
+
20
+ # install nvbenjo
21
+ RUN which uv
22
+ COPY . /nvbenjo
23
+ RUN --mount=type=cache,target=/root/.cache/uv \
24
+ uv tool install /nvbenjo
25
+
26
+ CMD ["nvbenjo"]
nvbenjo-0.0.1/LICENSE ADDED
@@ -0,0 +1,7 @@
1
+ Copyright 2026 lukas-jkl
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4
+
5
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6
+
7
+ THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
nvbenjo-0.0.1/PKG-INFO ADDED
@@ -0,0 +1,72 @@
1
+ Metadata-Version: 2.4
2
+ Name: nvbenjo
3
+ Version: 0.0.1
4
+ Summary: Nvbenjo is a utility for benchmarking inference of deep learning models on NVIDIA GPUs.
5
+ Author: lukas_jkl
6
+ Requires-Python: <3.15,>=3.10
7
+ Description-Content-Type: text/markdown
8
+ License-File: LICENSE
9
+ Requires-Dist: torchvision
10
+ Requires-Dist: torchaudio
11
+ Requires-Dist: nvitop
12
+ Requires-Dist: psutil
13
+ Requires-Dist: py-cpuinfo
14
+ Requires-Dist: hydra-core
15
+ Requires-Dist: hydra_colorlog
16
+ Requires-Dist: nvidia-ml-py
17
+ Requires-Dist: pandas
18
+ Requires-Dist: seaborn
19
+ Requires-Dist: rich
20
+ Provides-Extra: onnx-cpu
21
+ Requires-Dist: onnx>=1.14.0; extra == "onnx-cpu"
22
+ Requires-Dist: onnxruntime>=1.15.0; extra == "onnx-cpu"
23
+ Provides-Extra: onnx-gpu
24
+ Requires-Dist: onnx>=1.14.0; extra == "onnx-gpu"
25
+ Requires-Dist: onnxruntime-gpu>=1.15.0; extra == "onnx-gpu"
26
+ Dynamic: license-file
27
+
28
+ # Nvbenjo
29
+
30
+ [![Nox](https://img.shields.io/badge/%F0%9F%A6%8A-Nox-D85E00.svg)](https://github.com/wntrblm/nox)
31
+ ![Ruff](https://github.com/lukas-jkl/nvbenjo/actions/workflows/ruff.yml/badge.svg)
32
+ ![Tests](https://github.com/lukas-jkl/nvbenjo/actions/workflows/test.yml/badge.svg)
33
+
34
+ Nvbenjo is a utility for benchmarking inference of deep learning models on NVIDIA GPUs.
35
+ It supports models in [Onnx](https://onnx.ai/) format as well as [PyTorch](https://pytorch.org/) models.
36
+
37
+ ## Usage
38
+
39
+ ```bash
40
+ # Specify models to run in the command line
41
+ nvbenjo \
42
+ "+nvbenjo.models={\
43
+ efficientnet: {type_or_path: 'torchvision:efficientnet_b0', shape:['B',3,224,224], batch_sizes: [16,32]},\
44
+ resnet: {type_or_path: 'torchvision:wide_resnet101_2', shape: ['B',3,224,224], batch_sizes: [16,32]}\
45
+ }"
46
+
47
+ # or better, specify your own config (or one of the pre-defined config files)
48
+ nvbenjo -cn small
49
+ nvbenjo -cn="/my/config/path/myconfig.yaml"
50
+
51
+ # override single arguments of your config
52
+ nvbenjo -cn="/my/config/path/myconfig.yaml" nvbenjo.models.mymodel.num_batches=10
53
+
54
+ # show current config and help
55
+ nvbenjo -cn="/my/config/path/myconfig.yaml" --help
56
+ ```
57
+
58
+ ## Development
59
+
60
+ Example using uv:
61
+
62
+ ```bash
63
+ uv sync --extra dev --extra onnx-cpu # or gpu
64
+ uv run nvbenjo
65
+
66
+ # for a quick run
67
+ uv run nvbenjo -cn small
68
+
69
+ # tests
70
+ uv run pytest
71
+ uv run nox
72
+ ```
@@ -0,0 +1,45 @@
1
+ # Nvbenjo
2
+
3
+ [![Nox](https://img.shields.io/badge/%F0%9F%A6%8A-Nox-D85E00.svg)](https://github.com/wntrblm/nox)
4
+ ![Ruff](https://github.com/lukas-jkl/nvbenjo/actions/workflows/ruff.yml/badge.svg)
5
+ ![Tests](https://github.com/lukas-jkl/nvbenjo/actions/workflows/test.yml/badge.svg)
6
+
7
+ Nvbenjo is a utility for benchmarking inference of deep learning models on NVIDIA GPUs.
8
+ It supports models in [Onnx](https://onnx.ai/) format as well as [PyTorch](https://pytorch.org/) models.
9
+
10
+ ## Usage
11
+
12
+ ```bash
13
+ # Specify models to run in the command line
14
+ nvbenjo \
15
+ "+nvbenjo.models={\
16
+ efficientnet: {type_or_path: 'torchvision:efficientnet_b0', shape:['B',3,224,224], batch_sizes: [16,32]},\
17
+ resnet: {type_or_path: 'torchvision:wide_resnet101_2', shape: ['B',3,224,224], batch_sizes: [16,32]}\
18
+ }"
19
+
20
+ # or better, specify your own config (or one of the pre-defined config files)
21
+ nvbenjo -cn small
22
+ nvbenjo -cn="/my/config/path/myconfig.yaml"
23
+
24
+ # override single arguments of your config
25
+ nvbenjo -cn="/my/config/path/myconfig.yaml" nvbenjo.models.mymodel.num_batches=10
26
+
27
+ # show current config and help
28
+ nvbenjo -cn="/my/config/path/myconfig.yaml" --help
29
+ ```
30
+
31
+ ## Development
32
+
33
+ Example using uv:
34
+
35
+ ```bash
36
+ uv sync --extra dev --extra onnx-cpu # or gpu
37
+ uv run nvbenjo
38
+
39
+ # for a quick run
40
+ uv run nvbenjo -cn small
41
+
42
+ # tests
43
+ uv run pytest
44
+ uv run nox
45
+ ```
@@ -0,0 +1,20 @@
1
+ # Minimal makefile for Sphinx documentation
2
+ #
3
+
4
+ # You can set these variables from the command line, and also
5
+ # from the environment for the first two.
6
+ SPHINXOPTS ?=
7
+ SPHINXBUILD ?= sphinx-build
8
+ SOURCEDIR = .
9
+ BUILDDIR = _build
10
+
11
+ # Put it first so that "make" without argument is like "make help".
12
+ help:
13
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14
+
15
+ .PHONY: help Makefile
16
+
17
+ # Catch-all target: route all unknown targets to Sphinx using the new
18
+ # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19
+ %: Makefile
20
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@@ -0,0 +1,86 @@
1
+ # Configuration file for the Sphinx documentation builder.
2
+ #
3
+ # For the full list of built-in configuration values, see the documentation:
4
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html
5
+
6
+ import os
7
+ import sys
8
+
9
+ # Add the src directory to the path so Sphinx can find nvbenjo
10
+ sys.path.insert(0, os.path.abspath("../src"))
11
+
12
+ # -- Project information -----------------------------------------------------
13
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
14
+
15
+ project = "Nvbenjo"
16
+ copyright = "2025, lukas-jkl"
17
+ author = "lukas-jkl"
18
+
19
+ # -- General configuration ---------------------------------------------------
20
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
21
+
22
+ extensions = [
23
+ "sphinx.ext.autodoc", # Auto-generate docs from docstrings
24
+ "sphinx.ext.napoleon", # Support for NumPy/Google style docstrings
25
+ "sphinx.ext.viewcode", # Add links to source code
26
+ "sphinx.ext.intersphinx", # Link to other projects' documentation
27
+ "myst_parser",
28
+ "sphinx_copybutton",
29
+ ]
30
+ source_suffix = {
31
+ ".rst": "restructuredtext",
32
+ ".md": "markdown",
33
+ }
34
+
35
+ templates_path = ["_templates"]
36
+ exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
37
+ # Napoleon settings for better NumPy-style formatting
38
+ napoleon_google_docstring = False
39
+ napoleon_numpy_docstring = True
40
+ napoleon_use_param = True
41
+ napoleon_use_rtype = True
42
+ napoleon_preprocess_types = True
43
+ napoleon_type_aliases = {
44
+ "TensorLike": "nvbenjo.utils.TensorLike",
45
+ "array_like": ":term:`array_like`",
46
+ }
47
+ napoleon_attr_annotations = True
48
+
49
+ # Add NumPy intersphinx for better links
50
+ intersphinx_mapping = {
51
+ "python": ("https://docs.python.org/3", None),
52
+ "torch": ("https://pytorch.org/docs/stable/", None),
53
+ "numpy": ("https://numpy.org/doc/stable/", None),
54
+ "pandas": ("https://pandas.pydata.org/docs/", None),
55
+ }
56
+ # -- Options for HTML output -------------------------------------------------
57
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
58
+
59
+ html_theme = "pydata_sphinx_theme"
60
+ html_static_path = ["_static"]
61
+
62
+ # PyData theme options
63
+ html_theme_options = {
64
+ "github_url": "https://github.com/lukas-jkl/nvbenjo",
65
+ "show_nav_level": 2,
66
+ "navigation_depth": 3,
67
+ "show_toc_level": 2,
68
+ }
69
+ # Remove left sidebar (don't need it for now)
70
+ html_sidebars = {"**": []}
71
+ html_show_sourcelink = False
72
+ html_copy_source = False
73
+
74
+ # -- Options for autodoc extension -------------------------------------------
75
+ autodoc_default_options = {
76
+ "member-order": "bysource",
77
+ "members": False,
78
+ "undoc-members": False,
79
+ "exclude-members": "__weakref__, __init__",
80
+ "inherited-members": False,
81
+ }
82
+
83
+ # Show type hints in signature only
84
+ autodoc_typehints = "description"
85
+ autodoc_typehints_format = "short"
86
+ autodoc_class_signature = "mixed"
@@ -0,0 +1,34 @@
1
+ # Configuration
2
+
3
+ Nvbenjo uses [Hydra](https://hydra.cc/) for configuration using the dataclasses listed below.
4
+
5
+ ## Main configuration classes
6
+
7
+ ```{eval-rst}
8
+ .. autoclass:: nvbenjo.cfg.BenchConfig
9
+ :members:
10
+
11
+ .. autoclass:: nvbenjo.cfg.NvbenjoConfig
12
+ :members:
13
+ ```
14
+
15
+ ## Pytorch
16
+
17
+ ```{eval-rst}
18
+ .. autoclass:: nvbenjo.cfg.TorchModelConfig
19
+ :members:
20
+
21
+ .. autoclass:: nvbenjo.cfg.TorchRuntimeConfig
22
+ :members:
23
+
24
+ ```
25
+
26
+ ## Onnx
27
+
28
+ ```{eval-rst}
29
+ .. autoclass:: nvbenjo.cfg.OnnxModelConfig
30
+ :members:
31
+
32
+ .. autoclass:: nvbenjo.cfg.OnnxRuntimeConfig
33
+ :members:
34
+ ```
@@ -0,0 +1,41 @@
1
+ # Examples
2
+
3
+
4
+ ```{eval-rst}
5
+ .. literalinclude:: ../src/nvbenjo/conf/example.yaml
6
+ :language: yaml
7
+ :caption: Example Configuration
8
+ ```
9
+
10
+ ## Onnx
11
+
12
+ ```{eval-rst}
13
+ .. literalinclude:: ../src/nvbenjo/conf/onnx.yaml
14
+ :language: yaml
15
+ :caption: Example Configuration of Onnx Model
16
+ ```
17
+
18
+ ```{eval-rst}
19
+ .. literalinclude:: ../src/nvbenjo/conf/onnxsilero.yaml
20
+ :language: yaml
21
+ :caption: Example Configuration of Onnx-Silero Model
22
+ ```
23
+
24
+
25
+ ## PyTorch
26
+
27
+ ### Torchvision
28
+
29
+ ```{eval-rst}
30
+ .. literalinclude:: ../src/nvbenjo/conf/torchvision.yaml
31
+ :language: yaml
32
+ :caption: Example Configuration of Torchvision Models
33
+ ```
34
+
35
+ ### Huggingface
36
+
37
+ ```{eval-rst}
38
+ .. literalinclude:: ../src/nvbenjo/conf/huggingface.yaml
39
+ :language: yaml
40
+ :caption: Example Configuration of Huggingface Models
41
+ ```
@@ -0,0 +1,73 @@
1
+ # Nvbenjo Documentation
2
+
3
+ Nvbenjo is a utility for benchmarking inference of deep learning models on NVIDIA GPUs.
4
+ It supports models in [Onnx](https://onnx.ai/) format as well as [PyTorch](https://pytorch.org/) models.
5
+
6
+ ```{toctree}
7
+ :maxdepth: 2
8
+ :caption: Contents
9
+
10
+ self
11
+ configuration
12
+ python_api
13
+ examples
14
+ ```
15
+
16
+ (usage)=
17
+ ## Usage
18
+ Nvbenjo can be used as command line tool and uses [hydra](https://hydra.cc/) configuration.
19
+
20
+ Specify configuration directly from the command line:
21
+ ```bash
22
+ nvbenjo \
23
+ "+nvbenjo.models={\
24
+ efficientnet: {type_or_path: 'torchvision:efficientnet_b0', shape:['B',3,224,224], batch_sizes: [16,32]},\
25
+ resnet: {type_or_path: 'torchvision:wide_resnet101_2', shape: ['B',3,224,224], batch_sizes: [16,32]}\
26
+ }"
27
+ ```
28
+
29
+ ### Usage with Config File
30
+
31
+ Or better, specify your own config (or one of the pre-defined config files)
32
+ ```
33
+ nvbenjo -cn small
34
+ nvbenjo -cn="/my/config/path/myconfig.yaml"
35
+ ```
36
+
37
+ Override single arguments of your config
38
+ ```
39
+ nvbenjo -cn="/my/config/path/myconfig.yaml" nvbenjo.models.mymodel.num_batches=10
40
+ ```
41
+
42
+
43
+ ```{eval-rst}
44
+ .. literalinclude:: ../src/nvbenjo/conf/example.yaml
45
+ :language: yaml
46
+ :caption: Example Configuration
47
+ ```
48
+
49
+ ### Usage with Python API
50
+
51
+ See the [Python API Reference](python_api.md) for detailed documentation of all available functions and classes and [Configuration Reference](configuration.md) for config objects.
52
+
53
+
54
+ ```python
55
+ from nvbenjo import cfg
56
+ from nvbenjo.utils import PrecisionType
57
+ from nvbenjo import benchmark
58
+
59
+
60
+ model_cfg = cfg.TorchModelConfig(
61
+ name="torch-shufflenet-v2-x0-5",
62
+ type_or_path="torchvision:shufflenet_v2_x0_5",
63
+ shape=(("B", 3, 224, 224),),
64
+ devices=["cpu"],
65
+ batch_sizes=[1],
66
+ num_warmup_batches=1,
67
+ num_batches=2,
68
+ runtime_options={
69
+ "test1": cfg.TorchRuntimeConfig(compile=False, precision=PrecisionType.FP32),
70
+ },
71
+ )
72
+ results = benchmark.benchmark_models({"model_1": model_cfg})
73
+ ```
@@ -0,0 +1,35 @@
1
+ @ECHO OFF
2
+
3
+ pushd %~dp0
4
+
5
+ REM Command file for Sphinx documentation
6
+
7
+ if "%SPHINXBUILD%" == "" (
8
+ set SPHINXBUILD=sphinx-build
9
+ )
10
+ set SOURCEDIR=.
11
+ set BUILDDIR=_build
12
+
13
+ %SPHINXBUILD% >NUL 2>NUL
14
+ if errorlevel 9009 (
15
+ echo.
16
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17
+ echo.installed, then set the SPHINXBUILD environment variable to point
18
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
19
+ echo.may add the Sphinx directory to PATH.
20
+ echo.
21
+ echo.If you don't have Sphinx installed, grab it from
22
+ echo.https://www.sphinx-doc.org/
23
+ exit /b 1
24
+ )
25
+
26
+ if "%1" == "" goto help
27
+
28
+ %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29
+ goto end
30
+
31
+ :help
32
+ %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33
+
34
+ :end
35
+ popd
@@ -0,0 +1,37 @@
1
+ # Python API Reference
2
+
3
+ ## Benchmark Module
4
+
5
+ ```{eval-rst}
6
+ .. autofunction:: nvbenjo.benchmark.benchmark_models
7
+
8
+ .. autofunction:: nvbenjo.benchmark.load_model
9
+ ```
10
+
11
+ ## PyTorch Utilities
12
+
13
+ ```{eval-rst}
14
+ .. autofunction:: nvbenjo.torch_utils.get_model
15
+
16
+ .. autofunction:: nvbenjo.torch_utils.measure_memory_allocation
17
+
18
+ .. autofunction:: nvbenjo.torch_utils.measure_repeated_inference_timing
19
+ ```
20
+
21
+ ## ONNX Utilities
22
+
23
+ ```{eval-rst}
24
+ .. autofunction:: nvbenjo.onnx_utils.get_model
25
+
26
+ .. autofunction:: nvbenjo.onnx_utils.measure_memory_allocation
27
+
28
+ .. autofunction:: nvbenjo.onnx_utils.measure_repeated_inference_timing
29
+ ```
30
+
31
+ ## System Information
32
+
33
+ ```{eval-rst}
34
+ .. autofunction:: nvbenjo.system_info.get_system_info
35
+
36
+ .. autofunction:: nvbenjo.system_info.get_gpu_info
37
+ ```