dgenerate-ultralytics-headless 8.3.222__py3-none-any.whl → 8.3.225__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. {dgenerate_ultralytics_headless-8.3.222.dist-info → dgenerate_ultralytics_headless-8.3.225.dist-info}/METADATA +2 -2
  2. dgenerate_ultralytics_headless-8.3.225.dist-info/RECORD +286 -0
  3. tests/conftest.py +5 -8
  4. tests/test_cli.py +1 -8
  5. tests/test_python.py +1 -2
  6. ultralytics/__init__.py +1 -1
  7. ultralytics/cfg/__init__.py +34 -49
  8. ultralytics/cfg/datasets/ImageNet.yaml +1 -1
  9. ultralytics/cfg/datasets/kitti.yaml +27 -0
  10. ultralytics/cfg/datasets/lvis.yaml +5 -5
  11. ultralytics/cfg/datasets/open-images-v7.yaml +1 -1
  12. ultralytics/data/annotator.py +3 -4
  13. ultralytics/data/augment.py +244 -323
  14. ultralytics/data/base.py +12 -22
  15. ultralytics/data/build.py +47 -40
  16. ultralytics/data/converter.py +32 -42
  17. ultralytics/data/dataset.py +43 -71
  18. ultralytics/data/loaders.py +22 -34
  19. ultralytics/data/split.py +5 -6
  20. ultralytics/data/split_dota.py +8 -15
  21. ultralytics/data/utils.py +27 -36
  22. ultralytics/engine/exporter.py +49 -116
  23. ultralytics/engine/model.py +144 -180
  24. ultralytics/engine/predictor.py +18 -29
  25. ultralytics/engine/results.py +165 -231
  26. ultralytics/engine/trainer.py +11 -19
  27. ultralytics/engine/tuner.py +13 -23
  28. ultralytics/engine/validator.py +6 -10
  29. ultralytics/hub/__init__.py +7 -12
  30. ultralytics/hub/auth.py +6 -12
  31. ultralytics/hub/google/__init__.py +7 -10
  32. ultralytics/hub/session.py +15 -25
  33. ultralytics/hub/utils.py +3 -6
  34. ultralytics/models/fastsam/model.py +6 -8
  35. ultralytics/models/fastsam/predict.py +5 -10
  36. ultralytics/models/fastsam/utils.py +1 -2
  37. ultralytics/models/fastsam/val.py +2 -4
  38. ultralytics/models/nas/model.py +5 -8
  39. ultralytics/models/nas/predict.py +7 -9
  40. ultralytics/models/nas/val.py +1 -2
  41. ultralytics/models/rtdetr/model.py +5 -8
  42. ultralytics/models/rtdetr/predict.py +15 -18
  43. ultralytics/models/rtdetr/train.py +10 -13
  44. ultralytics/models/rtdetr/val.py +13 -20
  45. ultralytics/models/sam/amg.py +12 -18
  46. ultralytics/models/sam/build.py +6 -9
  47. ultralytics/models/sam/model.py +16 -23
  48. ultralytics/models/sam/modules/blocks.py +62 -84
  49. ultralytics/models/sam/modules/decoders.py +17 -24
  50. ultralytics/models/sam/modules/encoders.py +40 -56
  51. ultralytics/models/sam/modules/memory_attention.py +10 -16
  52. ultralytics/models/sam/modules/sam.py +41 -47
  53. ultralytics/models/sam/modules/tiny_encoder.py +64 -83
  54. ultralytics/models/sam/modules/transformer.py +17 -27
  55. ultralytics/models/sam/modules/utils.py +31 -42
  56. ultralytics/models/sam/predict.py +172 -209
  57. ultralytics/models/utils/loss.py +14 -26
  58. ultralytics/models/utils/ops.py +13 -17
  59. ultralytics/models/yolo/classify/predict.py +8 -11
  60. ultralytics/models/yolo/classify/train.py +8 -16
  61. ultralytics/models/yolo/classify/val.py +13 -20
  62. ultralytics/models/yolo/detect/predict.py +4 -8
  63. ultralytics/models/yolo/detect/train.py +11 -20
  64. ultralytics/models/yolo/detect/val.py +38 -48
  65. ultralytics/models/yolo/model.py +35 -47
  66. ultralytics/models/yolo/obb/predict.py +5 -8
  67. ultralytics/models/yolo/obb/train.py +11 -14
  68. ultralytics/models/yolo/obb/val.py +20 -28
  69. ultralytics/models/yolo/pose/predict.py +5 -8
  70. ultralytics/models/yolo/pose/train.py +4 -8
  71. ultralytics/models/yolo/pose/val.py +31 -39
  72. ultralytics/models/yolo/segment/predict.py +9 -14
  73. ultralytics/models/yolo/segment/train.py +3 -6
  74. ultralytics/models/yolo/segment/val.py +16 -26
  75. ultralytics/models/yolo/world/train.py +8 -14
  76. ultralytics/models/yolo/world/train_world.py +11 -16
  77. ultralytics/models/yolo/yoloe/predict.py +16 -23
  78. ultralytics/models/yolo/yoloe/train.py +30 -43
  79. ultralytics/models/yolo/yoloe/train_seg.py +5 -10
  80. ultralytics/models/yolo/yoloe/val.py +15 -20
  81. ultralytics/nn/autobackend.py +10 -18
  82. ultralytics/nn/modules/activation.py +4 -6
  83. ultralytics/nn/modules/block.py +99 -185
  84. ultralytics/nn/modules/conv.py +45 -90
  85. ultralytics/nn/modules/head.py +44 -98
  86. ultralytics/nn/modules/transformer.py +44 -76
  87. ultralytics/nn/modules/utils.py +14 -19
  88. ultralytics/nn/tasks.py +86 -146
  89. ultralytics/nn/text_model.py +25 -40
  90. ultralytics/solutions/ai_gym.py +10 -16
  91. ultralytics/solutions/analytics.py +7 -10
  92. ultralytics/solutions/config.py +4 -5
  93. ultralytics/solutions/distance_calculation.py +9 -12
  94. ultralytics/solutions/heatmap.py +7 -13
  95. ultralytics/solutions/instance_segmentation.py +5 -8
  96. ultralytics/solutions/object_blurrer.py +7 -10
  97. ultralytics/solutions/object_counter.py +8 -12
  98. ultralytics/solutions/object_cropper.py +5 -8
  99. ultralytics/solutions/parking_management.py +12 -14
  100. ultralytics/solutions/queue_management.py +4 -6
  101. ultralytics/solutions/region_counter.py +7 -10
  102. ultralytics/solutions/security_alarm.py +14 -19
  103. ultralytics/solutions/similarity_search.py +7 -12
  104. ultralytics/solutions/solutions.py +31 -53
  105. ultralytics/solutions/speed_estimation.py +6 -9
  106. ultralytics/solutions/streamlit_inference.py +2 -4
  107. ultralytics/solutions/trackzone.py +7 -10
  108. ultralytics/solutions/vision_eye.py +5 -8
  109. ultralytics/trackers/basetrack.py +2 -4
  110. ultralytics/trackers/bot_sort.py +6 -11
  111. ultralytics/trackers/byte_tracker.py +10 -15
  112. ultralytics/trackers/track.py +3 -6
  113. ultralytics/trackers/utils/gmc.py +6 -12
  114. ultralytics/trackers/utils/kalman_filter.py +35 -43
  115. ultralytics/trackers/utils/matching.py +6 -10
  116. ultralytics/utils/__init__.py +61 -100
  117. ultralytics/utils/autobatch.py +2 -4
  118. ultralytics/utils/autodevice.py +11 -13
  119. ultralytics/utils/benchmarks.py +25 -35
  120. ultralytics/utils/callbacks/base.py +8 -10
  121. ultralytics/utils/callbacks/clearml.py +2 -4
  122. ultralytics/utils/callbacks/comet.py +30 -44
  123. ultralytics/utils/callbacks/dvc.py +13 -18
  124. ultralytics/utils/callbacks/mlflow.py +4 -5
  125. ultralytics/utils/callbacks/neptune.py +4 -6
  126. ultralytics/utils/callbacks/raytune.py +3 -4
  127. ultralytics/utils/callbacks/tensorboard.py +4 -6
  128. ultralytics/utils/callbacks/wb.py +10 -13
  129. ultralytics/utils/checks.py +29 -56
  130. ultralytics/utils/cpu.py +1 -2
  131. ultralytics/utils/dist.py +8 -12
  132. ultralytics/utils/downloads.py +17 -27
  133. ultralytics/utils/errors.py +6 -8
  134. ultralytics/utils/events.py +2 -4
  135. ultralytics/utils/export/__init__.py +4 -239
  136. ultralytics/utils/export/engine.py +237 -0
  137. ultralytics/utils/export/imx.py +11 -17
  138. ultralytics/utils/export/tensorflow.py +217 -0
  139. ultralytics/utils/files.py +10 -15
  140. ultralytics/utils/git.py +5 -7
  141. ultralytics/utils/instance.py +30 -51
  142. ultralytics/utils/logger.py +11 -15
  143. ultralytics/utils/loss.py +8 -14
  144. ultralytics/utils/metrics.py +98 -138
  145. ultralytics/utils/nms.py +13 -16
  146. ultralytics/utils/ops.py +47 -74
  147. ultralytics/utils/patches.py +11 -18
  148. ultralytics/utils/plotting.py +29 -42
  149. ultralytics/utils/tal.py +25 -39
  150. ultralytics/utils/torch_utils.py +45 -73
  151. ultralytics/utils/tqdm.py +6 -8
  152. ultralytics/utils/triton.py +9 -12
  153. ultralytics/utils/tuner.py +1 -2
  154. dgenerate_ultralytics_headless-8.3.222.dist-info/RECORD +0 -283
  155. {dgenerate_ultralytics_headless-8.3.222.dist-info → dgenerate_ultralytics_headless-8.3.225.dist-info}/WHEEL +0 -0
  156. {dgenerate_ultralytics_headless-8.3.222.dist-info → dgenerate_ultralytics_headless-8.3.225.dist-info}/entry_points.txt +0 -0
  157. {dgenerate_ultralytics_headless-8.3.222.dist-info → dgenerate_ultralytics_headless-8.3.225.dist-info}/licenses/LICENSE +0 -0
  158. {dgenerate_ultralytics_headless-8.3.222.dist-info → dgenerate_ultralytics_headless-8.3.225.dist-info}/top_level.txt +0 -0
@@ -63,8 +63,7 @@ from ultralytics.utils.torch_utils import (
63
63
 
64
64
 
65
65
  class BaseTrainer:
66
- """
67
- A base class for creating trainers.
66
+ """A base class for creating trainers.
68
67
 
69
68
  This class provides the foundation for training YOLO models, handling the training loop, validation, checkpointing,
70
69
  and various training utilities. It supports both single-GPU and multi-GPU distributed training.
@@ -114,8 +113,7 @@ class BaseTrainer:
114
113
  """
115
114
 
116
115
  def __init__(self, cfg=DEFAULT_CFG, overrides=None, _callbacks=None):
117
- """
118
- Initialize the BaseTrainer class.
116
+ """Initialize the BaseTrainer class.
119
117
 
120
118
  Args:
121
119
  cfg (str, optional): Path to a configuration file.
@@ -620,8 +618,7 @@ class BaseTrainer:
620
618
  (self.wdir / f"epoch{self.epoch}.pt").write_bytes(serialized_ckpt) # save epoch, i.e. 'epoch3.pt'
621
619
 
622
620
  def get_dataset(self):
623
- """
624
- Get train and validation datasets from data dictionary.
621
+ """Get train and validation datasets from data dictionary.
625
622
 
626
623
  Returns:
627
624
  (dict): A dictionary containing the training/validation/test dataset and category names.
@@ -656,8 +653,7 @@ class BaseTrainer:
656
653
  return data
657
654
 
658
655
  def setup_model(self):
659
- """
660
- Load, create, or download model for any task.
656
+ """Load, create, or download model for any task.
661
657
 
662
658
  Returns:
663
659
  (dict): Optional checkpoint to resume training from.
@@ -690,8 +686,7 @@ class BaseTrainer:
690
686
  return batch
691
687
 
692
688
  def validate(self):
693
- """
694
- Run validation on val set using self.validator.
689
+ """Run validation on val set using self.validator.
695
690
 
696
691
  Returns:
697
692
  metrics (dict): Dictionary of validation metrics.
@@ -726,10 +721,9 @@ class BaseTrainer:
726
721
  raise NotImplementedError("build_dataset function not implemented in trainer")
727
722
 
728
723
  def label_loss_items(self, loss_items=None, prefix="train"):
729
- """
730
- Return a loss dict with labeled training loss items tensor.
724
+ """Return a loss dict with labeled training loss items tensor.
731
725
 
732
- Note:
726
+ Notes:
733
727
  This is not needed for classification but necessary for segmentation & detection
734
728
  """
735
729
  return {"loss": loss_items} if loss_items is not None else ["loss"]
@@ -895,18 +889,16 @@ class BaseTrainer:
895
889
  self.train_loader.dataset.close_mosaic(hyp=copy(self.args))
896
890
 
897
891
  def build_optimizer(self, model, name="auto", lr=0.001, momentum=0.9, decay=1e-5, iterations=1e5):
898
- """
899
- Construct an optimizer for the given model.
892
+ """Construct an optimizer for the given model.
900
893
 
901
894
  Args:
902
895
  model (torch.nn.Module): The model for which to build an optimizer.
903
- name (str, optional): The name of the optimizer to use. If 'auto', the optimizer is selected
904
- based on the number of iterations.
896
+ name (str, optional): The name of the optimizer to use. If 'auto', the optimizer is selected based on the
897
+ number of iterations.
905
898
  lr (float, optional): The learning rate for the optimizer.
906
899
  momentum (float, optional): The momentum factor for the optimizer.
907
900
  decay (float, optional): The weight decay for the optimizer.
908
- iterations (float, optional): The number of iterations, which determines the optimizer if
909
- name is 'auto'.
901
+ iterations (float, optional): The number of iterations, which determines the optimizer if name is 'auto'.
910
902
 
911
903
  Returns:
912
904
  (torch.optim.Optimizer): The constructed optimizer.
@@ -34,12 +34,11 @@ from ultralytics.utils.plotting import plot_tune_results
34
34
 
35
35
 
36
36
  class Tuner:
37
- """
38
- A class for hyperparameter tuning of YOLO models.
37
+ """A class for hyperparameter tuning of YOLO models.
39
38
 
40
39
  The class evolves YOLO model hyperparameters over a given number of iterations by mutating them according to the
41
- search space and retraining the model to evaluate their performance. Supports both local CSV storage and
42
- distributed MongoDB Atlas coordination for multi-machine hyperparameter optimization.
40
+ search space and retraining the model to evaluate their performance. Supports both local CSV storage and distributed
41
+ MongoDB Atlas coordination for multi-machine hyperparameter optimization.
43
42
 
44
43
  Attributes:
45
44
  space (dict[str, tuple]): Hyperparameter search space containing bounds and scaling factors for mutation.
@@ -83,8 +82,7 @@ class Tuner:
83
82
  """
84
83
 
85
84
  def __init__(self, args=DEFAULT_CFG, _callbacks: list | None = None):
86
- """
87
- Initialize the Tuner with configurations.
85
+ """Initialize the Tuner with configurations.
88
86
 
89
87
  Args:
90
88
  args (dict): Configuration for hyperparameter evolution.
@@ -142,8 +140,7 @@ class Tuner:
142
140
  )
143
141
 
144
142
  def _connect(self, uri: str = "mongodb+srv://username:password@cluster.mongodb.net/", max_retries: int = 3):
145
- """
146
- Create MongoDB client with exponential backoff retry on connection failures.
143
+ """Create MongoDB client with exponential backoff retry on connection failures.
147
144
 
148
145
  Args:
149
146
  uri (str): MongoDB connection string with credentials and cluster information.
@@ -183,12 +180,10 @@ class Tuner:
183
180
  time.sleep(wait_time)
184
181
 
185
182
  def _init_mongodb(self, mongodb_uri="", mongodb_db="", mongodb_collection=""):
186
- """
187
- Initialize MongoDB connection for distributed tuning.
183
+ """Initialize MongoDB connection for distributed tuning.
188
184
 
189
- Connects to MongoDB Atlas for distributed hyperparameter optimization across multiple machines.
190
- Each worker saves results to a shared collection and reads the latest best hyperparameters
191
- from all workers for evolution.
185
+ Connects to MongoDB Atlas for distributed hyperparameter optimization across multiple machines. Each worker
186
+ saves results to a shared collection and reads the latest best hyperparameters from all workers for evolution.
192
187
 
193
188
  Args:
194
189
  mongodb_uri (str): MongoDB connection string, e.g. 'mongodb+srv://username:password@cluster.mongodb.net/'.
@@ -206,8 +201,7 @@ class Tuner:
206
201
  LOGGER.info(f"{self.prefix}Using MongoDB Atlas for distributed tuning")
207
202
 
208
203
  def _get_mongodb_results(self, n: int = 5) -> list:
209
- """
210
- Get top N results from MongoDB sorted by fitness.
204
+ """Get top N results from MongoDB sorted by fitness.
211
205
 
212
206
  Args:
213
207
  n (int): Number of top results to retrieve.
@@ -221,8 +215,7 @@ class Tuner:
221
215
  return []
222
216
 
223
217
  def _save_to_mongodb(self, fitness: float, hyperparameters: dict[str, float], metrics: dict, iteration: int):
224
- """
225
- Save results to MongoDB with proper type conversion.
218
+ """Save results to MongoDB with proper type conversion.
226
219
 
227
220
  Args:
228
221
  fitness (float): Fitness score achieved with these hyperparameters.
@@ -244,8 +237,7 @@ class Tuner:
244
237
  LOGGER.warning(f"{self.prefix}MongoDB save failed: {e}")
245
238
 
246
239
  def _sync_mongodb_to_csv(self):
247
- """
248
- Sync MongoDB results to CSV for plotting compatibility.
240
+ """Sync MongoDB results to CSV for plotting compatibility.
249
241
 
250
242
  Downloads all results from MongoDB and writes them to the local CSV file in chronological order. This enables
251
243
  the existing plotting functions to work seamlessly with distributed MongoDB data.
@@ -288,8 +280,7 @@ class Tuner:
288
280
  mutation: float = 0.5,
289
281
  sigma: float = 0.2,
290
282
  ) -> dict[str, float]:
291
- """
292
- Mutate hyperparameters based on bounds and scaling factors specified in `self.space`.
283
+ """Mutate hyperparameters based on bounds and scaling factors specified in `self.space`.
293
284
 
294
285
  Args:
295
286
  parent (str): Parent selection method (kept for API compatibility, unused in BLX mode).
@@ -349,8 +340,7 @@ class Tuner:
349
340
  return hyp
350
341
 
351
342
  def __call__(self, model=None, iterations: int = 10, cleanup: bool = True):
352
- """
353
- Execute the hyperparameter evolution process when the Tuner instance is called.
343
+ """Execute the hyperparameter evolution process when the Tuner instance is called.
354
344
 
355
345
  This method iterates through the specified number of iterations, performing the following steps:
356
346
  1. Sync MongoDB results to CSV (if using distributed mode)
@@ -41,8 +41,7 @@ from ultralytics.utils.torch_utils import attempt_compile, select_device, smart_
41
41
 
42
42
 
43
43
  class BaseValidator:
44
- """
45
- A base class for creating validators.
44
+ """A base class for creating validators.
46
45
 
47
46
  This class provides the foundation for validation processes, including model evaluation, metric computation, and
48
47
  result visualization.
@@ -62,8 +61,8 @@ class BaseValidator:
62
61
  nc (int): Number of classes.
63
62
  iouv (torch.Tensor): IoU thresholds from 0.50 to 0.95 in spaces of 0.05.
64
63
  jdict (list): List to store JSON validation results.
65
- speed (dict): Dictionary with keys 'preprocess', 'inference', 'loss', 'postprocess' and their respective
66
- batch processing times in milliseconds.
64
+ speed (dict): Dictionary with keys 'preprocess', 'inference', 'loss', 'postprocess' and their respective batch
65
+ processing times in milliseconds.
67
66
  save_dir (Path): Directory to save results.
68
67
  plots (dict): Dictionary to store plots for visualization.
69
68
  callbacks (dict): Dictionary to store various callback functions.
@@ -93,8 +92,7 @@ class BaseValidator:
93
92
  """
94
93
 
95
94
  def __init__(self, dataloader=None, save_dir=None, args=None, _callbacks=None):
96
- """
97
- Initialize a BaseValidator instance.
95
+ """Initialize a BaseValidator instance.
98
96
 
99
97
  Args:
100
98
  dataloader (torch.utils.data.DataLoader, optional): Dataloader to be used for validation.
@@ -131,8 +129,7 @@ class BaseValidator:
131
129
 
132
130
  @smart_inference_mode()
133
131
  def __call__(self, trainer=None, model=None):
134
- """
135
- Execute validation process, running inference on dataloader and computing performance metrics.
132
+ """Execute validation process, running inference on dataloader and computing performance metrics.
136
133
 
137
134
  Args:
138
135
  trainer (object, optional): Trainer object that contains the model to validate.
@@ -269,8 +266,7 @@ class BaseValidator:
269
266
  def match_predictions(
270
267
  self, pred_classes: torch.Tensor, true_classes: torch.Tensor, iou: torch.Tensor, use_scipy: bool = False
271
268
  ) -> torch.Tensor:
272
- """
273
- Match predictions to ground truth objects using IoU.
269
+ """Match predictions to ground truth objects using IoU.
274
270
 
275
271
  Args:
276
272
  pred_classes (torch.Tensor): Predicted class indices of shape (N,).
@@ -23,15 +23,14 @@ __all__ = (
23
23
 
24
24
 
25
25
  def login(api_key: str | None = None, save: bool = True) -> bool:
26
- """
27
- Log in to the Ultralytics HUB API using the provided API key.
26
+ """Log in to the Ultralytics HUB API using the provided API key.
28
27
 
29
28
  The session is not stored; a new session is created when needed using the saved SETTINGS or the HUB_API_KEY
30
29
  environment variable if successfully authenticated.
31
30
 
32
31
  Args:
33
- api_key (str, optional): API key to use for authentication. If not provided, it will be retrieved from
34
- SETTINGS or HUB_API_KEY environment variable.
32
+ api_key (str, optional): API key to use for authentication. If not provided, it will be retrieved from SETTINGS
33
+ or HUB_API_KEY environment variable.
35
34
  save (bool, optional): Whether to save the API key to SETTINGS if authentication is successful.
36
35
 
37
36
  Returns:
@@ -91,8 +90,7 @@ def export_fmts_hub():
91
90
 
92
91
 
93
92
  def export_model(model_id: str = "", format: str = "torchscript"):
94
- """
95
- Export a model to a specified format for deployment via the Ultralytics HUB API.
93
+ """Export a model to a specified format for deployment via the Ultralytics HUB API.
96
94
 
97
95
  Args:
98
96
  model_id (str): The ID of the model to export. An empty string will use the default model.
@@ -117,13 +115,11 @@ def export_model(model_id: str = "", format: str = "torchscript"):
117
115
 
118
116
 
119
117
  def get_export(model_id: str = "", format: str = "torchscript"):
120
- """
121
- Retrieve an exported model in the specified format from Ultralytics HUB using the model ID.
118
+ """Retrieve an exported model in the specified format from Ultralytics HUB using the model ID.
122
119
 
123
120
  Args:
124
121
  model_id (str): The ID of the model to retrieve from Ultralytics HUB.
125
- format (str): The export format to retrieve. Must be one of the supported formats returned by
126
- export_fmts_hub().
122
+ format (str): The export format to retrieve. Must be one of the supported formats returned by export_fmts_hub().
127
123
 
128
124
  Returns:
129
125
  (dict): JSON response containing the exported model information.
@@ -148,8 +144,7 @@ def get_export(model_id: str = "", format: str = "torchscript"):
148
144
 
149
145
 
150
146
  def check_dataset(path: str, task: str) -> None:
151
- """
152
- Check HUB dataset Zip file for errors before upload.
147
+ """Check HUB dataset Zip file for errors before upload.
153
148
 
154
149
  Args:
155
150
  path (str): Path to data.zip (with data.yaml inside data.zip).
ultralytics/hub/auth.py CHANGED
@@ -7,8 +7,7 @@ API_KEY_URL = f"{HUB_WEB_ROOT}/settings?tab=api+keys"
7
7
 
8
8
 
9
9
  class Auth:
10
- """
11
- Manages authentication processes including API key handling, cookie-based authentication, and header generation.
10
+ """Manages authentication processes including API key handling, cookie-based authentication, and header generation.
12
11
 
13
12
  The class supports different methods of authentication:
14
13
  1. Directly using an API key.
@@ -37,8 +36,7 @@ class Auth:
37
36
  id_token = api_key = model_key = False
38
37
 
39
38
  def __init__(self, api_key: str = "", verbose: bool = False):
40
- """
41
- Initialize Auth class and authenticate user.
39
+ """Initialize Auth class and authenticate user.
42
40
 
43
41
  Handles API key validation, Google Colab authentication, and new key requests. Updates SETTINGS upon successful
44
42
  authentication.
@@ -82,8 +80,7 @@ class Auth:
82
80
  LOGGER.info(f"{PREFIX}Get API key from {API_KEY_URL} and then run 'yolo login API_KEY'")
83
81
 
84
82
  def request_api_key(self, max_attempts: int = 3) -> bool:
85
- """
86
- Prompt the user to input their API key.
83
+ """Prompt the user to input their API key.
87
84
 
88
85
  Args:
89
86
  max_attempts (int): Maximum number of authentication attempts.
@@ -102,8 +99,7 @@ class Auth:
102
99
  raise ConnectionError(emojis(f"{PREFIX}Failed to authenticate ❌"))
103
100
 
104
101
  def authenticate(self) -> bool:
105
- """
106
- Attempt to authenticate with the server using either id_token or API key.
102
+ """Attempt to authenticate with the server using either id_token or API key.
107
103
 
108
104
  Returns:
109
105
  (bool): True if authentication is successful, False otherwise.
@@ -123,8 +119,7 @@ class Auth:
123
119
  return False
124
120
 
125
121
  def auth_with_cookies(self) -> bool:
126
- """
127
- Attempt to fetch authentication via cookies and set id_token.
122
+ """Attempt to fetch authentication via cookies and set id_token.
128
123
 
129
124
  User must be logged in to HUB and running in a supported browser.
130
125
 
@@ -145,8 +140,7 @@ class Auth:
145
140
  return False
146
141
 
147
142
  def get_auth_header(self):
148
- """
149
- Get the authentication header for making API requests.
143
+ """Get the authentication header for making API requests.
150
144
 
151
145
  Returns:
152
146
  (dict | None): The authentication header if id_token or API key is set, None otherwise.
@@ -8,11 +8,10 @@ import time
8
8
 
9
9
 
10
10
  class GCPRegions:
11
- """
12
- A class for managing and analyzing Google Cloud Platform (GCP) regions.
11
+ """A class for managing and analyzing Google Cloud Platform (GCP) regions.
13
12
 
14
- This class provides functionality to initialize, categorize, and analyze GCP regions based on their
15
- geographical location, tier classification, and network latency.
13
+ This class provides functionality to initialize, categorize, and analyze GCP regions based on their geographical
14
+ location, tier classification, and network latency.
16
15
 
17
16
  Attributes:
18
17
  regions (dict[str, tuple[int, str, str]]): A dictionary of GCP regions with their tier, city, and country.
@@ -82,8 +81,7 @@ class GCPRegions:
82
81
 
83
82
  @staticmethod
84
83
  def _ping_region(region: str, attempts: int = 1) -> tuple[str, float, float, float, float]:
85
- """
86
- Ping a specified GCP region and measure network latency statistics.
84
+ """Ping a specified GCP region and measure network latency statistics.
87
85
 
88
86
  Args:
89
87
  region (str): The GCP region identifier to ping (e.g., 'us-central1').
@@ -126,8 +124,7 @@ class GCPRegions:
126
124
  tier: int | None = None,
127
125
  attempts: int = 1,
128
126
  ) -> list[tuple[str, float, float, float, float]]:
129
- """
130
- Determine the GCP regions with the lowest latency based on ping tests.
127
+ """Determine the GCP regions with the lowest latency based on ping tests.
131
128
 
132
129
  Args:
133
130
  top (int, optional): Number of top regions to return.
@@ -136,8 +133,8 @@ class GCPRegions:
136
133
  attempts (int, optional): Number of ping attempts per region.
137
134
 
138
135
  Returns:
139
- (list[tuple[str, float, float, float, float]]): List of tuples containing region information and
140
- latency statistics. Each tuple contains (region, mean_latency, std_dev, min_latency, max_latency).
136
+ (list[tuple[str, float, float, float, float]]): List of tuples containing region information and latency
137
+ statistics. Each tuple contains (region, mean_latency, std_dev, min_latency, max_latency).
141
138
 
142
139
  Examples:
143
140
  >>> regions = GCPRegions()
@@ -19,8 +19,7 @@ AGENT_NAME = f"python-{__version__}-colab" if IS_COLAB else f"python-{__version_
19
19
 
20
20
 
21
21
  class HUBTrainingSession:
22
- """
23
- HUB training session for Ultralytics HUB YOLO models.
22
+ """HUB training session for Ultralytics HUB YOLO models.
24
23
 
25
24
  This class encapsulates the functionality for interacting with Ultralytics HUB during model training, including
26
25
  model creation, metrics tracking, and checkpoint uploading.
@@ -45,12 +44,11 @@ class HUBTrainingSession:
45
44
  """
46
45
 
47
46
  def __init__(self, identifier: str):
48
- """
49
- Initialize the HUBTrainingSession with the provided model identifier.
47
+ """Initialize the HUBTrainingSession with the provided model identifier.
50
48
 
51
49
  Args:
52
- identifier (str): Model identifier used to initialize the HUB training session. It can be a URL string
53
- or a model key with specific format.
50
+ identifier (str): Model identifier used to initialize the HUB training session. It can be a URL string or a
51
+ model key with specific format.
54
52
 
55
53
  Raises:
56
54
  ValueError: If the provided model identifier is invalid.
@@ -93,8 +91,7 @@ class HUBTrainingSession:
93
91
 
94
92
  @classmethod
95
93
  def create_session(cls, identifier: str, args: dict[str, Any] | None = None):
96
- """
97
- Create an authenticated HUBTrainingSession or return None.
94
+ """Create an authenticated HUBTrainingSession or return None.
98
95
 
99
96
  Args:
100
97
  identifier (str): Model identifier used to initialize the HUB training session.
@@ -114,8 +111,7 @@ class HUBTrainingSession:
114
111
  return None
115
112
 
116
113
  def load_model(self, model_id: str):
117
- """
118
- Load an existing model from Ultralytics HUB using the provided model identifier.
114
+ """Load an existing model from Ultralytics HUB using the provided model identifier.
119
115
 
120
116
  Args:
121
117
  model_id (str): The identifier of the model to load.
@@ -140,8 +136,7 @@ class HUBTrainingSession:
140
136
  LOGGER.info(f"{PREFIX}View model at {self.model_url} 🚀")
141
137
 
142
138
  def create_model(self, model_args: dict[str, Any]):
143
- """
144
- Initialize a HUB training session with the specified model arguments.
139
+ """Initialize a HUB training session with the specified model arguments.
145
140
 
146
141
  Args:
147
142
  model_args (dict[str, Any]): Arguments for creating the model, including batch size, epochs, image size,
@@ -186,8 +181,7 @@ class HUBTrainingSession:
186
181
 
187
182
  @staticmethod
188
183
  def _parse_identifier(identifier: str):
189
- """
190
- Parse the given identifier to determine the type and extract relevant components.
184
+ """Parse the given identifier to determine the type and extract relevant components.
191
185
 
192
186
  The method supports different identifier formats:
193
187
  - A HUB model URL https://hub.ultralytics.com/models/MODEL
@@ -218,12 +212,11 @@ class HUBTrainingSession:
218
212
  return api_key, model_id, filename
219
213
 
220
214
  def _set_train_args(self):
221
- """
222
- Initialize training arguments and create a model entry on the Ultralytics HUB.
215
+ """Initialize training arguments and create a model entry on the Ultralytics HUB.
223
216
 
224
- This method sets up training arguments based on the model's state and updates them with any additional
225
- arguments provided. It handles different states of the model, such as whether it's resumable, pretrained,
226
- or requires specific file setup.
217
+ This method sets up training arguments based on the model's state and updates them with any additional arguments
218
+ provided. It handles different states of the model, such as whether it's resumable, pretrained, or requires
219
+ specific file setup.
227
220
 
228
221
  Raises:
229
222
  ValueError: If the model is already trained, if required dataset information is missing, or if there are
@@ -261,8 +254,7 @@ class HUBTrainingSession:
261
254
  *args,
262
255
  **kwargs,
263
256
  ):
264
- """
265
- Execute request_func with retries, timeout handling, optional threading, and progress tracking.
257
+ """Execute request_func with retries, timeout handling, optional threading, and progress tracking.
266
258
 
267
259
  Args:
268
260
  request_func (callable): The function to execute.
@@ -342,8 +334,7 @@ class HUBTrainingSession:
342
334
  return status_code in retry_codes
343
335
 
344
336
  def _get_failure_message(self, response, retry: int, timeout: int) -> str:
345
- """
346
- Generate a retry message based on the response status code.
337
+ """Generate a retry message based on the response status code.
347
338
 
348
339
  Args:
349
340
  response (requests.Response): The HTTP response object.
@@ -379,8 +370,7 @@ class HUBTrainingSession:
379
370
  map: float = 0.0,
380
371
  final: bool = False,
381
372
  ) -> None:
382
- """
383
- Upload a model checkpoint to Ultralytics HUB.
373
+ """Upload a model checkpoint to Ultralytics HUB.
384
374
 
385
375
  Args:
386
376
  epoch (int): The current training epoch.
ultralytics/hub/utils.py CHANGED
@@ -21,8 +21,7 @@ HELP_MSG = "If this issue persists please visit https://github.com/ultralytics/h
21
21
 
22
22
 
23
23
  def request_with_credentials(url: str) -> Any:
24
- """
25
- Make an AJAX request with cookies attached in a Google Colab environment.
24
+ """Make an AJAX request with cookies attached in a Google Colab environment.
26
25
 
27
26
  Args:
28
27
  url (str): The URL to make the request to.
@@ -62,8 +61,7 @@ def request_with_credentials(url: str) -> Any:
62
61
 
63
62
 
64
63
  def requests_with_progress(method: str, url: str, **kwargs):
65
- """
66
- Make an HTTP request using the specified method and URL, with an optional progress bar.
64
+ """Make an HTTP request using the specified method and URL, with an optional progress bar.
67
65
 
68
66
  Args:
69
67
  method (str): The HTTP method to use (e.g. 'GET', 'POST').
@@ -106,8 +104,7 @@ def smart_request(
106
104
  progress: bool = False,
107
105
  **kwargs,
108
106
  ):
109
- """
110
- Make an HTTP request using the 'requests' library, with exponential backoff retries up to a specified timeout.
107
+ """Make an HTTP request using the 'requests' library, with exponential backoff retries up to a specified timeout.
111
108
 
112
109
  Args:
113
110
  method (str): The HTTP method to use for the request. Choices are 'post' and 'get'.
@@ -12,8 +12,7 @@ from .val import FastSAMValidator
12
12
 
13
13
 
14
14
  class FastSAM(Model):
15
- """
16
- FastSAM model interface for segment anything tasks.
15
+ """FastSAM model interface for segment anything tasks.
17
16
 
18
17
  This class extends the base Model class to provide specific functionality for the FastSAM (Fast Segment Anything
19
18
  Model) implementation, allowing for efficient and accurate image segmentation with optional prompting support.
@@ -53,15 +52,14 @@ class FastSAM(Model):
53
52
  texts: list | None = None,
54
53
  **kwargs: Any,
55
54
  ):
56
- """
57
- Perform segmentation prediction on image or video source.
55
+ """Perform segmentation prediction on image or video source.
58
56
 
59
- Supports prompted segmentation with bounding boxes, points, labels, and texts. The method packages these
60
- prompts and passes them to the parent class predict method for processing.
57
+ Supports prompted segmentation with bounding boxes, points, labels, and texts. The method packages these prompts
58
+ and passes them to the parent class predict method for processing.
61
59
 
62
60
  Args:
63
- source (str | PIL.Image | np.ndarray): Input source for prediction, can be a file path, URL, PIL image,
64
- or numpy array.
61
+ source (str | PIL.Image | np.ndarray): Input source for prediction, can be a file path, URL, PIL image, or
62
+ numpy array.
65
63
  stream (bool): Whether to enable real-time streaming mode for video inputs.
66
64
  bboxes (list, optional): Bounding box coordinates for prompted segmentation in format [[x1, y1, x2, y2]].
67
65
  points (list, optional): Point coordinates for prompted segmentation in format [[x, y]].
@@ -13,8 +13,7 @@ from .utils import adjust_bboxes_to_image_border
13
13
 
14
14
 
15
15
  class FastSAMPredictor(SegmentationPredictor):
16
- """
17
- FastSAMPredictor is specialized for fast SAM (Segment Anything Model) segmentation prediction tasks.
16
+ """FastSAMPredictor is specialized for fast SAM (Segment Anything Model) segmentation prediction tasks.
18
17
 
19
18
  This class extends the SegmentationPredictor, customizing the prediction pipeline specifically for fast SAM. It
20
19
  adjusts post-processing steps to incorporate mask prediction and non-maximum suppression while optimizing for
@@ -33,8 +32,7 @@ class FastSAMPredictor(SegmentationPredictor):
33
32
  """
34
33
 
35
34
  def __init__(self, cfg=DEFAULT_CFG, overrides=None, _callbacks=None):
36
- """
37
- Initialize the FastSAMPredictor with configuration and callbacks.
35
+ """Initialize the FastSAMPredictor with configuration and callbacks.
38
36
 
39
37
  This initializes a predictor specialized for Fast SAM (Segment Anything Model) segmentation tasks. The predictor
40
38
  extends SegmentationPredictor with custom post-processing for mask prediction and non-maximum suppression
@@ -49,8 +47,7 @@ class FastSAMPredictor(SegmentationPredictor):
49
47
  self.prompts = {}
50
48
 
51
49
  def postprocess(self, preds, img, orig_imgs):
52
- """
53
- Apply postprocessing to FastSAM predictions and handle prompts.
50
+ """Apply postprocessing to FastSAM predictions and handle prompts.
54
51
 
55
52
  Args:
56
53
  preds (list[torch.Tensor]): Raw predictions from the model.
@@ -77,8 +74,7 @@ class FastSAMPredictor(SegmentationPredictor):
77
74
  return self.prompt(results, bboxes=bboxes, points=points, labels=labels, texts=texts)
78
75
 
79
76
  def prompt(self, results, bboxes=None, points=None, labels=None, texts=None):
80
- """
81
- Perform image segmentation inference based on cues like bounding boxes, points, and text prompts.
77
+ """Perform image segmentation inference based on cues like bounding boxes, points, and text prompts.
82
78
 
83
79
  Args:
84
80
  results (Results | list[Results]): Original inference results from FastSAM models without any prompts.
@@ -151,8 +147,7 @@ class FastSAMPredictor(SegmentationPredictor):
151
147
  return prompt_results
152
148
 
153
149
  def _clip_inference(self, images, texts):
154
- """
155
- Perform CLIP inference to calculate similarity between images and text prompts.
150
+ """Perform CLIP inference to calculate similarity between images and text prompts.
156
151
 
157
152
  Args:
158
153
  images (list[PIL.Image]): List of source images, each should be PIL.Image with RGB channel order.
@@ -2,8 +2,7 @@
2
2
 
3
3
 
4
4
  def adjust_bboxes_to_image_border(boxes, image_shape, threshold=20):
5
- """
6
- Adjust bounding boxes to stick to image border if they are within a certain threshold.
5
+ """Adjust bounding boxes to stick to image border if they are within a certain threshold.
7
6
 
8
7
  Args:
9
8
  boxes (torch.Tensor): Bounding boxes with shape (N, 4) in xyxy format.
@@ -4,8 +4,7 @@ from ultralytics.models.yolo.segment import SegmentationValidator
4
4
 
5
5
 
6
6
  class FastSAMValidator(SegmentationValidator):
7
- """
8
- Custom validation class for Fast SAM (Segment Anything Model) segmentation in Ultralytics YOLO framework.
7
+ """Custom validation class for Fast SAM (Segment Anything Model) segmentation in Ultralytics YOLO framework.
9
8
 
10
9
  Extends the SegmentationValidator class, customizing the validation process specifically for Fast SAM. This class
11
10
  sets the task to 'segment' and uses the SegmentMetrics for evaluation. Additionally, plotting features are disabled
@@ -23,8 +22,7 @@ class FastSAMValidator(SegmentationValidator):
23
22
  """
24
23
 
25
24
  def __init__(self, dataloader=None, save_dir=None, args=None, _callbacks=None):
26
- """
27
- Initialize the FastSAMValidator class, setting the task to 'segment' and metrics to SegmentMetrics.
25
+ """Initialize the FastSAMValidator class, setting the task to 'segment' and metrics to SegmentMetrics.
28
26
 
29
27
  Args:
30
28
  dataloader (torch.utils.data.DataLoader, optional): Dataloader to be used for validation.