eizen-nsga 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Eizen.ai Team
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,5 @@
1
+ include README.md
2
+ include LICENSE
3
+ include pyproject.toml
4
+ recursive-include eizen_nsga *.py
5
+ recursive-include eizen_nsga/sota *.py
@@ -0,0 +1,191 @@
1
+ Metadata-Version: 2.4
2
+ Name: eizen-nsga
3
+ Version: 1.0.0
4
+ Summary: Simple inference package for NSGA-Net trained models
5
+ Home-page: https://github.com/eizen-ai/nsga-net
6
+ Author: Eizen.ai Team
7
+ Author-email: "Eizen.ai Team" <support@eizen.ai>
8
+ License: MIT
9
+ Project-URL: Homepage, https://eizen.ai
10
+ Project-URL: Repository, https://github.com/eizen-ai/nsga-net
11
+ Project-URL: Issues, https://github.com/eizen-ai/nsga-net/issues
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Science/Research
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
21
+ Requires-Python: >=3.8
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Requires-Dist: torch>=1.9.0
25
+ Requires-Dist: torchvision>=0.10.0
26
+ Requires-Dist: numpy>=1.19.0
27
+ Requires-Dist: pillow>=8.0.0
28
+ Provides-Extra: sota
29
+ Requires-Dist: ultralytics>=8.0.0; extra == "sota"
30
+ Provides-Extra: nn
31
+ Requires-Dist: pandas>=1.0.0; extra == "nn"
32
+ Requires-Dist: scikit-learn>=0.24.0; extra == "nn"
33
+ Provides-Extra: transformer
34
+ Requires-Dist: transformers>=4.0.0; extra == "transformer"
35
+ Requires-Dist: tokenizers>=0.10.0; extra == "transformer"
36
+ Provides-Extra: all
37
+ Requires-Dist: ultralytics>=8.0.0; extra == "all"
38
+ Requires-Dist: pandas>=1.0.0; extra == "all"
39
+ Requires-Dist: scikit-learn>=0.24.0; extra == "all"
40
+ Requires-Dist: transformers>=4.0.0; extra == "all"
41
+ Requires-Dist: tokenizers>=0.10.0; extra == "all"
42
+ Dynamic: author
43
+ Dynamic: home-page
44
+ Dynamic: license-file
45
+ Dynamic: requires-python
46
+
47
+ # eizen-nsga
48
+
49
+ **Standalone** inference package for NSGA-Net trained models. Works just like ultralytics YOLO.
50
+
51
+ ✅ **Fully independent** - no external dependencies on NSGA-Net codebase
52
+ ✅ **Simple API** - load model from ZIP or URL, run inference
53
+ ✅ **PyPI ready** - publish and use anywhere
54
+ ✅ **Supports URLs** - load models from HTTP, S3, GCS, and more
55
+
56
+ ## Installation
57
+
58
+ ```bash
59
+ # For SOTA (computer vision) models
60
+ pip install eizen-nsga[sota]
61
+
62
+ # For NN (tabular) models
63
+ pip install eizen-nsga[nn]
64
+
65
+ # For Transformer (LLM) models
66
+ pip install eizen-nsga[transformer]
67
+
68
+ # Install all dependencies
69
+ pip install eizen-nsga[all]
70
+ ```
71
+
72
+ ## Quick Start
73
+
74
+ ```python
75
+ from eizen_nsga import NASModel
76
+
77
+ # Load model from local ZIP file
78
+ model = NASModel("trained_model.zip")
79
+
80
+ # Or load from URL (HTTP, HTTPS, S3, GCS, etc.)
81
+ model = NASModel("https://example.com/models/model.zip")
82
+ model = NASModel("s3://mybucket/models/model.zip")
83
+
84
+ # Run inference (accepts file path, PIL Image, or numpy array)
85
+ result = model.predict("image.jpg")
86
+
87
+ # Classification results
88
+ print(f"Top class: {result['class']}")
89
+ print(f"Confidence: {result['confidence']:.2%}")
90
+ print(f"Top 5 predictions: {result['predictions']}")
91
+
92
+ # Or call directly like YOLO
93
+ result = model("image.jpg")
94
+ ```
95
+
96
+ ## Detection Models
97
+
98
+ ```python
99
+ from nsga_inference import NASModel
100
+
101
+ # Load detection model
102
+ model = NASModel("detector.zip")
103
+
104
+ # Run detection
105
+ result = model.predict("image.jpg", conf_threshold=0.3, iou_threshold=0.45)
106
+
107
+ # Detection results
108
+ print(f"Found {result['count']} objects")
109
+ for det in result['detections']:
110
+ print(f" Box: {det['box']}, Class: {det['class']}, Conf: {det['confidence']}")
111
+ ```
112
+
113
+ ## Model Info
114
+
115
+ ```python
116
+ # Get model information
117
+ info = model.info()
118
+ print(f"Category: {info['category']}")
119
+ print(f"Task: {info['task']}")
120
+ print(f"Backbone: {info['backbone']}")
121
+ print(f"Classes: {info['num_classes']}")
122
+ ```
123
+
124
+ ## Device Selection
125
+
126
+ ```python
127
+ # Auto-detect device (default)
128
+ model = NASModel("model.zip")
129
+
130
+ # Specify device
131
+ model = NASModel("model.zip", device="cuda")
132
+ model = NASModel("model.zip", device="cpu")
133
+
134
+ # Move to different device
135
+ model.to("cuda")
136
+ ```
137
+
138
+ ## How It Works
139
+
140
+ 1. **Extract**: Automatically extracts model ZIP file
141
+ 2. **Parse**: Reads `log.txt` to get model configuration (genome, backbone, etc.)
142
+ 3. **Build**: Constructs model architecture from genome encoding
143
+ 4. **Load**: Loads trained weights from `weights.pt`
144
+ 5. **Predict**: Runs inference on your images
145
+
146
+ ## Supported Model Categories
147
+
148
+ - **SOTA**: Computer vision models (YOLOv8, ResNet, EfficientNet backbones) ✅
149
+ - **NN**: Tabular data models (coming soon)
150
+ - **Transformer**: LLM models (coming soon)
151
+
152
+ ## Model ZIP Structure
153
+
154
+ Your trained model ZIP should contain:
155
+ ```
156
+ model.zip
157
+ ├── weights.pt # Trained model weights
158
+ └── log.txt # Model configuration (genome, backbone, etc.)
159
+ ```
160
+
161
+ ## Package is Fully Standalone
162
+
163
+ This package includes all necessary SOTA modules internally:
164
+ - Model builders (micro/macro architectures)
165
+ - Genome encoders/decoders
166
+ - Neural operations
167
+ - Backbone registry
168
+ - Detection heads
169
+
170
+ No need to have the NSGA-Net training codebase installed!
171
+
172
+ ## Requirements
173
+
174
+ **Core dependencies:**
175
+ - torch >= 1.9.0
176
+ - torchvision >= 0.10.0
177
+ - numpy >= 1.19.0
178
+ - pillow >= 8.0.0
179
+
180
+ **Optional (for SOTA models):**
181
+ - ultralytics >= 8.0.0
182
+
183
+ ## License
184
+
185
+ MIT License - Copyright (c) 2024 Eizen.ai Team
186
+
187
+ ## Links
188
+
189
+ - Homepage: https://eizen.ai
190
+ - GitHub: https://github.com/eizen-ai/nsga-net
191
+ - Issues: https://github.com/eizen-ai/nsga-net/issues
@@ -0,0 +1,145 @@
1
+ # eizen-nsga
2
+
3
+ **Standalone** inference package for NSGA-Net trained models. Works just like ultralytics YOLO.
4
+
5
+ ✅ **Fully independent** - no external dependencies on NSGA-Net codebase
6
+ ✅ **Simple API** - load model from ZIP or URL, run inference
7
+ ✅ **PyPI ready** - publish and use anywhere
8
+ ✅ **Supports URLs** - load models from HTTP, S3, GCS, and more
9
+
10
+ ## Installation
11
+
12
+ ```bash
13
+ # For SOTA (computer vision) models
14
+ pip install eizen-nsga[sota]
15
+
16
+ # For NN (tabular) models
17
+ pip install eizen-nsga[nn]
18
+
19
+ # For Transformer (LLM) models
20
+ pip install eizen-nsga[transformer]
21
+
22
+ # Install all dependencies
23
+ pip install eizen-nsga[all]
24
+ ```
25
+
26
+ ## Quick Start
27
+
28
+ ```python
29
+ from eizen_nsga import NASModel
30
+
31
+ # Load model from local ZIP file
32
+ model = NASModel("trained_model.zip")
33
+
34
+ # Or load from URL (HTTP, HTTPS, S3, GCS, etc.)
35
+ model = NASModel("https://example.com/models/model.zip")
36
+ model = NASModel("s3://mybucket/models/model.zip")
37
+
38
+ # Run inference (accepts file path, PIL Image, or numpy array)
39
+ result = model.predict("image.jpg")
40
+
41
+ # Classification results
42
+ print(f"Top class: {result['class']}")
43
+ print(f"Confidence: {result['confidence']:.2%}")
44
+ print(f"Top 5 predictions: {result['predictions']}")
45
+
46
+ # Or call directly like YOLO
47
+ result = model("image.jpg")
48
+ ```
49
+
50
+ ## Detection Models
51
+
52
+ ```python
53
+ from nsga_inference import NASModel
54
+
55
+ # Load detection model
56
+ model = NASModel("detector.zip")
57
+
58
+ # Run detection
59
+ result = model.predict("image.jpg", conf_threshold=0.3, iou_threshold=0.45)
60
+
61
+ # Detection results
62
+ print(f"Found {result['count']} objects")
63
+ for det in result['detections']:
64
+ print(f" Box: {det['box']}, Class: {det['class']}, Conf: {det['confidence']}")
65
+ ```
66
+
67
+ ## Model Info
68
+
69
+ ```python
70
+ # Get model information
71
+ info = model.info()
72
+ print(f"Category: {info['category']}")
73
+ print(f"Task: {info['task']}")
74
+ print(f"Backbone: {info['backbone']}")
75
+ print(f"Classes: {info['num_classes']}")
76
+ ```
77
+
78
+ ## Device Selection
79
+
80
+ ```python
81
+ # Auto-detect device (default)
82
+ model = NASModel("model.zip")
83
+
84
+ # Specify device
85
+ model = NASModel("model.zip", device="cuda")
86
+ model = NASModel("model.zip", device="cpu")
87
+
88
+ # Move to different device
89
+ model.to("cuda")
90
+ ```
91
+
92
+ ## How It Works
93
+
94
+ 1. **Extract**: Automatically extracts model ZIP file
95
+ 2. **Parse**: Reads `log.txt` to get model configuration (genome, backbone, etc.)
96
+ 3. **Build**: Constructs model architecture from genome encoding
97
+ 4. **Load**: Loads trained weights from `weights.pt`
98
+ 5. **Predict**: Runs inference on your images
99
+
100
+ ## Supported Model Categories
101
+
102
+ - **SOTA**: Computer vision models (YOLOv8, ResNet, EfficientNet backbones) ✅
103
+ - **NN**: Tabular data models (coming soon)
104
+ - **Transformer**: LLM models (coming soon)
105
+
106
+ ## Model ZIP Structure
107
+
108
+ Your trained model ZIP should contain:
109
+ ```
110
+ model.zip
111
+ ├── weights.pt # Trained model weights
112
+ └── log.txt # Model configuration (genome, backbone, etc.)
113
+ ```
114
+
115
+ ## Package is Fully Standalone
116
+
117
+ This package includes all necessary SOTA modules internally:
118
+ - Model builders (micro/macro architectures)
119
+ - Genome encoders/decoders
120
+ - Neural operations
121
+ - Backbone registry
122
+ - Detection heads
123
+
124
+ No need to have the NSGA-Net training codebase installed!
125
+
126
+ ## Requirements
127
+
128
+ **Core dependencies:**
129
+ - torch >= 1.9.0
130
+ - torchvision >= 0.10.0
131
+ - numpy >= 1.19.0
132
+ - pillow >= 8.0.0
133
+
134
+ **Optional (for SOTA models):**
135
+ - ultralytics >= 8.0.0
136
+
137
+ ## License
138
+
139
+ MIT License - Copyright (c) 2024 Eizen.ai Team
140
+
141
+ ## Links
142
+
143
+ - Homepage: https://eizen.ai
144
+ - GitHub: https://github.com/eizen-ai/nsga-net
145
+ - Issues: https://github.com/eizen-ai/nsga-net/issues
@@ -0,0 +1,191 @@
1
+ Metadata-Version: 2.4
2
+ Name: eizen-nsga
3
+ Version: 1.0.0
4
+ Summary: Simple inference package for NSGA-Net trained models
5
+ Home-page: https://github.com/eizen-ai/nsga-net
6
+ Author: Eizen.ai Team
7
+ Author-email: "Eizen.ai Team" <support@eizen.ai>
8
+ License: MIT
9
+ Project-URL: Homepage, https://eizen.ai
10
+ Project-URL: Repository, https://github.com/eizen-ai/nsga-net
11
+ Project-URL: Issues, https://github.com/eizen-ai/nsga-net/issues
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Science/Research
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
21
+ Requires-Python: >=3.8
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Requires-Dist: torch>=1.9.0
25
+ Requires-Dist: torchvision>=0.10.0
26
+ Requires-Dist: numpy>=1.19.0
27
+ Requires-Dist: pillow>=8.0.0
28
+ Provides-Extra: sota
29
+ Requires-Dist: ultralytics>=8.0.0; extra == "sota"
30
+ Provides-Extra: nn
31
+ Requires-Dist: pandas>=1.0.0; extra == "nn"
32
+ Requires-Dist: scikit-learn>=0.24.0; extra == "nn"
33
+ Provides-Extra: transformer
34
+ Requires-Dist: transformers>=4.0.0; extra == "transformer"
35
+ Requires-Dist: tokenizers>=0.10.0; extra == "transformer"
36
+ Provides-Extra: all
37
+ Requires-Dist: ultralytics>=8.0.0; extra == "all"
38
+ Requires-Dist: pandas>=1.0.0; extra == "all"
39
+ Requires-Dist: scikit-learn>=0.24.0; extra == "all"
40
+ Requires-Dist: transformers>=4.0.0; extra == "all"
41
+ Requires-Dist: tokenizers>=0.10.0; extra == "all"
42
+ Dynamic: author
43
+ Dynamic: home-page
44
+ Dynamic: license-file
45
+ Dynamic: requires-python
46
+
47
+ # eizen-nsga
48
+
49
+ **Standalone** inference package for NSGA-Net trained models. Works just like ultralytics YOLO.
50
+
51
+ ✅ **Fully independent** - no external dependencies on NSGA-Net codebase
52
+ ✅ **Simple API** - load model from ZIP or URL, run inference
53
+ ✅ **PyPI ready** - publish and use anywhere
54
+ ✅ **Supports URLs** - load models from HTTP, S3, GCS, and more
55
+
56
+ ## Installation
57
+
58
+ ```bash
59
+ # For SOTA (computer vision) models
60
+ pip install eizen-nsga[sota]
61
+
62
+ # For NN (tabular) models
63
+ pip install eizen-nsga[nn]
64
+
65
+ # For Transformer (LLM) models
66
+ pip install eizen-nsga[transformer]
67
+
68
+ # Install all dependencies
69
+ pip install eizen-nsga[all]
70
+ ```
71
+
72
+ ## Quick Start
73
+
74
+ ```python
75
+ from eizen_nsga import NASModel
76
+
77
+ # Load model from local ZIP file
78
+ model = NASModel("trained_model.zip")
79
+
80
+ # Or load from URL (HTTP, HTTPS, S3, GCS, etc.)
81
+ model = NASModel("https://example.com/models/model.zip")
82
+ model = NASModel("s3://mybucket/models/model.zip")
83
+
84
+ # Run inference (accepts file path, PIL Image, or numpy array)
85
+ result = model.predict("image.jpg")
86
+
87
+ # Classification results
88
+ print(f"Top class: {result['class']}")
89
+ print(f"Confidence: {result['confidence']:.2%}")
90
+ print(f"Top 5 predictions: {result['predictions']}")
91
+
92
+ # Or call directly like YOLO
93
+ result = model("image.jpg")
94
+ ```
95
+
96
+ ## Detection Models
97
+
98
+ ```python
99
+ from nsga_inference import NASModel
100
+
101
+ # Load detection model
102
+ model = NASModel("detector.zip")
103
+
104
+ # Run detection
105
+ result = model.predict("image.jpg", conf_threshold=0.3, iou_threshold=0.45)
106
+
107
+ # Detection results
108
+ print(f"Found {result['count']} objects")
109
+ for det in result['detections']:
110
+ print(f" Box: {det['box']}, Class: {det['class']}, Conf: {det['confidence']}")
111
+ ```
112
+
113
+ ## Model Info
114
+
115
+ ```python
116
+ # Get model information
117
+ info = model.info()
118
+ print(f"Category: {info['category']}")
119
+ print(f"Task: {info['task']}")
120
+ print(f"Backbone: {info['backbone']}")
121
+ print(f"Classes: {info['num_classes']}")
122
+ ```
123
+
124
+ ## Device Selection
125
+
126
+ ```python
127
+ # Auto-detect device (default)
128
+ model = NASModel("model.zip")
129
+
130
+ # Specify device
131
+ model = NASModel("model.zip", device="cuda")
132
+ model = NASModel("model.zip", device="cpu")
133
+
134
+ # Move to different device
135
+ model.to("cuda")
136
+ ```
137
+
138
+ ## How It Works
139
+
140
+ 1. **Extract**: Automatically extracts model ZIP file
141
+ 2. **Parse**: Reads `log.txt` to get model configuration (genome, backbone, etc.)
142
+ 3. **Build**: Constructs model architecture from genome encoding
143
+ 4. **Load**: Loads trained weights from `weights.pt`
144
+ 5. **Predict**: Runs inference on your images
145
+
146
+ ## Supported Model Categories
147
+
148
+ - **SOTA**: Computer vision models (YOLOv8, ResNet, EfficientNet backbones) ✅
149
+ - **NN**: Tabular data models (coming soon)
150
+ - **Transformer**: LLM models (coming soon)
151
+
152
+ ## Model ZIP Structure
153
+
154
+ Your trained model ZIP should contain:
155
+ ```
156
+ model.zip
157
+ ├── weights.pt # Trained model weights
158
+ └── log.txt # Model configuration (genome, backbone, etc.)
159
+ ```
160
+
161
+ ## Package is Fully Standalone
162
+
163
+ This package includes all necessary SOTA modules internally:
164
+ - Model builders (micro/macro architectures)
165
+ - Genome encoders/decoders
166
+ - Neural operations
167
+ - Backbone registry
168
+ - Detection heads
169
+
170
+ No need to have the NSGA-Net training codebase installed!
171
+
172
+ ## Requirements
173
+
174
+ **Core dependencies:**
175
+ - torch >= 1.9.0
176
+ - torchvision >= 0.10.0
177
+ - numpy >= 1.19.0
178
+ - pillow >= 8.0.0
179
+
180
+ **Optional (for SOTA models):**
181
+ - ultralytics >= 8.0.0
182
+
183
+ ## License
184
+
185
+ MIT License - Copyright (c) 2024 Eizen.ai Team
186
+
187
+ ## Links
188
+
189
+ - Homepage: https://eizen.ai
190
+ - GitHub: https://github.com/eizen-ai/nsga-net
191
+ - Issues: https://github.com/eizen-ai/nsga-net/issues
@@ -0,0 +1,18 @@
1
+ LICENSE
2
+ MANIFEST.in
3
+ README.md
4
+ pyproject.toml
5
+ setup.py
6
+ eizen_nsga.egg-info/PKG-INFO
7
+ eizen_nsga.egg-info/SOURCES.txt
8
+ eizen_nsga.egg-info/dependency_links.txt
9
+ eizen_nsga.egg-info/requires.txt
10
+ eizen_nsga.egg-info/top_level.txt
11
+ sota/__init__.py
12
+ sota/detection_heads.py
13
+ sota/macro_encoding.py
14
+ sota/macro_models.py
15
+ sota/micro_encoding.py
16
+ sota/micro_models.py
17
+ sota/micro_operations.py
18
+ sota/model_registry.py
@@ -0,0 +1,22 @@
1
+ torch>=1.9.0
2
+ torchvision>=0.10.0
3
+ numpy>=1.19.0
4
+ pillow>=8.0.0
5
+
6
+ [all]
7
+ ultralytics>=8.0.0
8
+ pandas>=1.0.0
9
+ scikit-learn>=0.24.0
10
+ transformers>=4.0.0
11
+ tokenizers>=0.10.0
12
+
13
+ [nn]
14
+ pandas>=1.0.0
15
+ scikit-learn>=0.24.0
16
+
17
+ [sota]
18
+ ultralytics>=8.0.0
19
+
20
+ [transformer]
21
+ transformers>=4.0.0
22
+ tokenizers>=0.10.0
@@ -0,0 +1,49 @@
1
+ [build-system]
2
+ requires = ["setuptools>=45", "wheel", "setuptools_scm>=6.2"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "eizen-nsga"
7
+ version = "1.0.0"
8
+ description = "Simple inference package for NSGA-Net trained models"
9
+ readme = "README.md"
10
+ requires-python = ">=3.8"
11
+ license = {text = "MIT"}
12
+ authors = [
13
+ {name = "Eizen.ai Team", email = "support@eizen.ai"}
14
+ ]
15
+ classifiers = [
16
+ "Development Status :: 4 - Beta",
17
+ "Intended Audience :: Developers",
18
+ "Intended Audience :: Science/Research",
19
+ "License :: OSI Approved :: MIT License",
20
+ "Programming Language :: Python :: 3",
21
+ "Programming Language :: Python :: 3.8",
22
+ "Programming Language :: Python :: 3.9",
23
+ "Programming Language :: Python :: 3.10",
24
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
25
+ ]
26
+
27
+ dependencies = [
28
+ "torch>=1.9.0",
29
+ "torchvision>=0.10.0",
30
+ "numpy>=1.19.0",
31
+ "pillow>=8.0.0",
32
+ ]
33
+
34
+ [project.optional-dependencies]
35
+ sota = ["ultralytics>=8.0.0"]
36
+ nn = ["pandas>=1.0.0", "scikit-learn>=0.24.0"]
37
+ transformer = ["transformers>=4.0.0", "tokenizers>=0.10.0"]
38
+ all = [
39
+ "ultralytics>=8.0.0",
40
+ "pandas>=1.0.0",
41
+ "scikit-learn>=0.24.0",
42
+ "transformers>=4.0.0",
43
+ "tokenizers>=0.10.0",
44
+ ]
45
+
46
+ [project.urls]
47
+ Homepage = "https://eizen.ai"
48
+ Repository = "https://github.com/eizen-ai/nsga-net"
49
+ Issues = "https://github.com/eizen-ai/nsga-net/issues"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+