ultralytics 8.2.19__py3-none-any.whl → 8.2.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

@@ -83,6 +83,7 @@ from ultralytics.utils import (
83
83
  WINDOWS,
84
84
  __version__,
85
85
  callbacks,
86
+ checks,
86
87
  colorstr,
87
88
  get_default_args,
88
89
  yaml_save,
@@ -184,6 +185,7 @@ class Exporter:
184
185
  if sum(flags) != 1:
185
186
  raise ValueError(f"Invalid export format='{fmt}'. Valid formats are {fmts}")
186
187
  jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs, paddle, ncnn = flags # export booleans
188
+ is_tf_format = any((saved_model, pb, tflite, edgetpu, tfjs))
187
189
 
188
190
  # Device
189
191
  if fmt == "engine" and self.args.device is None:
@@ -243,7 +245,7 @@ class Exporter:
243
245
  m.dynamic = self.args.dynamic
244
246
  m.export = True
245
247
  m.format = self.args.format
246
- elif isinstance(m, C2f) and not any((saved_model, pb, tflite, edgetpu, tfjs)):
248
+ elif isinstance(m, C2f) and not is_tf_format:
247
249
  # EdgeTPU does not support FlexSplitV while split provides cleaner ONNX graph
248
250
  m.forward = m.forward_split
249
251
 
@@ -303,7 +305,7 @@ class Exporter:
303
305
  f[3], _ = self.export_openvino()
304
306
  if coreml: # CoreML
305
307
  f[4], _ = self.export_coreml()
306
- if any((saved_model, pb, tflite, edgetpu, tfjs)): # TensorFlow formats
308
+ if is_tf_format: # TensorFlow formats
307
309
  self.args.int8 |= edgetpu
308
310
  f[5], keras_model = self.export_saved_model()
309
311
  if pb or tfjs: # pb prerequisite to tfjs
@@ -777,11 +779,10 @@ class Exporter:
777
779
  _ = self.cache.write_bytes(cache)
778
780
 
779
781
  # Load dataset w/ builder (for batching) and calibrate
780
- dataset = self.get_int8_calibration_dataloader(prefix)
781
782
  config.int8_calibrator = EngineCalibrator(
782
- dataset=dataset,
783
+ dataset=self.get_int8_calibration_dataloader(prefix),
783
784
  batch=2 * self.args.batch,
784
- cache=self.file.with_suffix(".cache"),
785
+ cache=str(self.file.with_suffix(".cache")),
785
786
  )
786
787
 
787
788
  elif half:
@@ -813,7 +814,7 @@ class Exporter:
813
814
  except ImportError:
814
815
  suffix = "-macos" if MACOS else "-aarch64" if ARM64 else "" if cuda else "-cpu"
815
816
  version = "" if ARM64 else "<=2.13.1"
816
- check_requirements(f"tensorflow{suffix}{version}")
817
+ check_requirements((f"tensorflow{suffix}{version}", "keras"))
817
818
  import tensorflow as tf # noqa
818
819
  if ARM64:
819
820
  check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
@@ -828,8 +829,8 @@ class Exporter:
828
829
  "flatbuffers>=23.5.26,<100", # update old 'flatbuffers' included inside tensorflow package
829
830
  "onnxruntime-gpu" if cuda else "onnxruntime",
830
831
  ),
831
- cmds="--extra-index-url https://pypi.ngc.nvidia.com",
832
- ) # onnx_graphsurgeon only on NVIDIA
832
+ cmds="--extra-index-url https://pypi.ngc.nvidia.com", # onnx_graphsurgeon only on NVIDIA
833
+ )
833
834
 
834
835
  LOGGER.info(f"\n{prefix} starting export with tensorflow {tf.__version__}...")
835
836
  check_version(
@@ -855,24 +856,17 @@ class Exporter:
855
856
  f_onnx, _ = self.export_onnx()
856
857
 
857
858
  # Export to TF
858
- tmp_file = f / "tmp_tflite_int8_calibration_images.npy" # int8 calibration images file
859
859
  np_data = None
860
860
  if self.args.int8:
861
+ tmp_file = f / "tmp_tflite_int8_calibration_images.npy" # int8 calibration images file
861
862
  verbosity = "info"
862
863
  if self.args.data:
863
- # Generate calibration data for integer quantization
864
- dataloader = self.get_int8_calibration_dataloader(prefix)
865
- images = []
866
- for i, batch in enumerate(dataloader):
867
- if i >= 100: # maximum number of calibration images
868
- break
869
- im = batch["img"].permute(1, 2, 0)[None] # list to nparray, CHW to BHWC
870
- images.append(im)
871
864
  f.mkdir()
865
+ images = [batch["img"].permute(0, 2, 3, 1) for batch in self.get_int8_calibration_dataloader(prefix)]
872
866
  images = torch.cat(images, 0).float()
873
867
  # mean = images.view(-1, 3).mean(0) # imagenet mean [123.675, 116.28, 103.53]
874
868
  # std = images.view(-1, 3).std(0) # imagenet std [58.395, 57.12, 57.375]
875
- np.save(str(tmp_file), images.numpy()) # BHWC
869
+ np.save(str(tmp_file), images.numpy().astype(np.float32)) # BHWC
876
870
  np_data = [["images", tmp_file, [[[[0, 0, 0]]]], [[[[255, 255, 255]]]]]]
877
871
  else:
878
872
  verbosity = "error"
@@ -1015,12 +1009,18 @@ class Exporter:
1015
1009
 
1016
1010
  def _add_tflite_metadata(self, file):
1017
1011
  """Add metadata to *.tflite models per https://www.tensorflow.org/lite/models/convert/metadata."""
1018
- from tflite_support import flatbuffers # noqa
1019
- from tflite_support import metadata as _metadata # noqa
1020
- from tflite_support import metadata_schema_py_generated as _metadata_fb # noqa
1012
+ import flatbuffers
1013
+
1014
+ if ARM64:
1015
+ from tflite_support import metadata # noqa
1016
+ from tflite_support import metadata_schema_py_generated as schema # noqa
1017
+ else:
1018
+ # TFLite Support bug https://github.com/tensorflow/tflite-support/issues/954#issuecomment-2108570845
1019
+ from tensorflow_lite_support.metadata import metadata_schema_py_generated as schema # noqa
1020
+ from tensorflow_lite_support.metadata.python import metadata # noqa
1021
1021
 
1022
1022
  # Create model info
1023
- model_meta = _metadata_fb.ModelMetadataT()
1023
+ model_meta = schema.ModelMetadataT()
1024
1024
  model_meta.name = self.metadata["description"]
1025
1025
  model_meta.version = self.metadata["version"]
1026
1026
  model_meta.author = self.metadata["author"]
@@ -1031,41 +1031,41 @@ class Exporter:
1031
1031
  with open(tmp_file, "w") as f:
1032
1032
  f.write(str(self.metadata))
1033
1033
 
1034
- label_file = _metadata_fb.AssociatedFileT()
1034
+ label_file = schema.AssociatedFileT()
1035
1035
  label_file.name = tmp_file.name
1036
- label_file.type = _metadata_fb.AssociatedFileType.TENSOR_AXIS_LABELS
1036
+ label_file.type = schema.AssociatedFileType.TENSOR_AXIS_LABELS
1037
1037
 
1038
1038
  # Create input info
1039
- input_meta = _metadata_fb.TensorMetadataT()
1039
+ input_meta = schema.TensorMetadataT()
1040
1040
  input_meta.name = "image"
1041
1041
  input_meta.description = "Input image to be detected."
1042
- input_meta.content = _metadata_fb.ContentT()
1043
- input_meta.content.contentProperties = _metadata_fb.ImagePropertiesT()
1044
- input_meta.content.contentProperties.colorSpace = _metadata_fb.ColorSpaceType.RGB
1045
- input_meta.content.contentPropertiesType = _metadata_fb.ContentProperties.ImageProperties
1042
+ input_meta.content = schema.ContentT()
1043
+ input_meta.content.contentProperties = schema.ImagePropertiesT()
1044
+ input_meta.content.contentProperties.colorSpace = schema.ColorSpaceType.RGB
1045
+ input_meta.content.contentPropertiesType = schema.ContentProperties.ImageProperties
1046
1046
 
1047
1047
  # Create output info
1048
- output1 = _metadata_fb.TensorMetadataT()
1048
+ output1 = schema.TensorMetadataT()
1049
1049
  output1.name = "output"
1050
1050
  output1.description = "Coordinates of detected objects, class labels, and confidence score"
1051
1051
  output1.associatedFiles = [label_file]
1052
1052
  if self.model.task == "segment":
1053
- output2 = _metadata_fb.TensorMetadataT()
1053
+ output2 = schema.TensorMetadataT()
1054
1054
  output2.name = "output"
1055
1055
  output2.description = "Mask protos"
1056
1056
  output2.associatedFiles = [label_file]
1057
1057
 
1058
1058
  # Create subgraph info
1059
- subgraph = _metadata_fb.SubGraphMetadataT()
1059
+ subgraph = schema.SubGraphMetadataT()
1060
1060
  subgraph.inputTensorMetadata = [input_meta]
1061
1061
  subgraph.outputTensorMetadata = [output1, output2] if self.model.task == "segment" else [output1]
1062
1062
  model_meta.subgraphMetadata = [subgraph]
1063
1063
 
1064
1064
  b = flatbuffers.Builder(0)
1065
- b.Finish(model_meta.Pack(b), _metadata.MetadataPopulator.METADATA_FILE_IDENTIFIER)
1065
+ b.Finish(model_meta.Pack(b), metadata.MetadataPopulator.METADATA_FILE_IDENTIFIER)
1066
1066
  metadata_buf = b.Output()
1067
1067
 
1068
- populator = _metadata.MetadataPopulator.with_model_file(str(file))
1068
+ populator = metadata.MetadataPopulator.with_model_file(str(file))
1069
1069
  populator.load_metadata_buffer(metadata_buf)
1070
1070
  populator.load_associated_files([str(tmp_file)])
1071
1071
  populator.populate()
@@ -402,8 +402,8 @@ class Results(SimpleClass):
402
402
  )
403
403
  return results
404
404
 
405
- data = self.boxes or self.obb
406
405
  is_obb = self.obb is not None
406
+ data = self.obb if is_obb else self.boxes
407
407
  h, w = self.orig_shape if normalize else (1, 1)
408
408
  for i, row in enumerate(data): # xyxy, track_id if tracking, conf, class_id
409
409
  class_id, conf = int(row.cls), round(row.conf.item(), decimals)
@@ -1,6 +1,7 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
3
  from .ai_gym import AIGym
4
+ from .analytics import Analytics
4
5
  from .distance_calculation import DistanceCalculation
5
6
  from .heatmap import Heatmap
6
7
  from .object_counter import ObjectCounter
@@ -16,4 +17,5 @@ __all__ = (
16
17
  "ParkingManagement",
17
18
  "QueueManager",
18
19
  "SpeedEstimator",
20
+ "Analytics",
19
21
  )
@@ -73,11 +73,11 @@ class AIGym:
73
73
  self.stage = ["-" for _ in results[0]]
74
74
 
75
75
  self.keypoints = results[0].keypoints.data
76
- self.annotator = Annotator(im0, line_width=2)
76
+ self.annotator = Annotator(im0, line_width=self.tf)
77
77
 
78
78
  for ind, k in enumerate(reversed(self.keypoints)):
79
79
  # Estimate angle and draw specific points based on pose type
80
- if self.pose_type in {"pushup", "pullup", "abworkout"}:
80
+ if self.pose_type in {"pushup", "pullup", "abworkout", "squat"}:
81
81
  self.angle[ind] = self.annotator.estimate_pose_angle(
82
82
  k[int(self.kpts_to_check[0])].cpu(),
83
83
  k[int(self.kpts_to_check[1])].cpu(),
@@ -93,7 +93,7 @@ class AIGym:
93
93
  self.stage[ind] = "up"
94
94
  self.count[ind] += 1
95
95
 
96
- elif self.pose_type == "pushup":
96
+ elif self.pose_type == "pushup" or self.pose_type == "squat":
97
97
  if self.angle[ind] > self.poseup_angle:
98
98
  self.stage[ind] = "up"
99
99
  if self.angle[ind] < self.posedown_angle and self.stage[ind] == "up":
@@ -0,0 +1,197 @@
1
+ from itertools import cycle
2
+
3
+ import cv2
4
+ import matplotlib.pyplot as plt
5
+ import numpy as np
6
+ from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
7
+ from matplotlib.figure import Figure
8
+
9
+
10
+ class Analytics:
11
+ """A class to create and update various types of charts (line, bar, pie) for visual analytics."""
12
+
13
+ def __init__(
14
+ self,
15
+ type,
16
+ writer,
17
+ im0_shape,
18
+ title="ultralytics",
19
+ x_label="x",
20
+ y_label="y",
21
+ bg_color="white",
22
+ fg_color="black",
23
+ line_color="yellow",
24
+ line_width=2,
25
+ fontsize=13,
26
+ view_img=False,
27
+ save_img=True,
28
+ ):
29
+ """
30
+ Initialize the Analytics class with various chart types.
31
+
32
+ Args:
33
+ type (str): Type of chart to initialize ('line', 'bar', or 'pie').
34
+ writer: Video writer object to save the frames.
35
+ im0_shape (tuple): Shape of the input image (width, height).
36
+ title (str): Title of the chart.
37
+ x_label (str): Label for the x-axis.
38
+ y_label (str): Label for the y-axis.
39
+ bg_color (str): Background color of the chart.
40
+ fg_color (str): Foreground (text) color of the chart.
41
+ line_color (str): Line color for line charts.
42
+ line_width (int): Width of the lines in line charts.
43
+ fontsize (int): Font size for chart text.
44
+ view_img (bool): Whether to display the image.
45
+ save_img (bool): Whether to save the image.
46
+ """
47
+
48
+ self.bg_color = bg_color
49
+ self.fg_color = fg_color
50
+ self.view_img = view_img
51
+ self.save_img = save_img
52
+ self.title = title
53
+ self.writer = writer
54
+
55
+ # Set figure size based on image shape
56
+ figsize = (im0_shape[0] / 100, im0_shape[1] / 100)
57
+
58
+ if type == "line":
59
+ # Initialize line plot
60
+ fig = Figure(facecolor=self.bg_color, figsize=figsize)
61
+ self.canvas = FigureCanvas(fig)
62
+ self.ax = fig.add_subplot(111, facecolor=self.bg_color)
63
+ (self.line,) = self.ax.plot([], [], color=line_color, linewidth=line_width)
64
+
65
+ elif type == "bar" or type == "pie":
66
+ # Initialize bar or pie plot
67
+ self.fig, self.ax = plt.subplots(figsize=figsize, facecolor=self.bg_color)
68
+ self.ax.set_facecolor(self.bg_color)
69
+ color_palette = [
70
+ (31, 119, 180),
71
+ (255, 127, 14),
72
+ (44, 160, 44),
73
+ (214, 39, 40),
74
+ (148, 103, 189),
75
+ (140, 86, 75),
76
+ (227, 119, 194),
77
+ (127, 127, 127),
78
+ (188, 189, 34),
79
+ (23, 190, 207),
80
+ ]
81
+ self.color_palette = [(r / 255, g / 255, b / 255, 1) for r, g, b in color_palette]
82
+ self.color_cycle = cycle(self.color_palette)
83
+ self.color_mapping = {}
84
+
85
+ # Ensure pie chart is circular
86
+ self.ax.axis("equal") if type == "pie" else None
87
+
88
+ # Set common axis properties
89
+ self.ax.set_title(self.title, color=self.fg_color, fontsize=fontsize)
90
+ self.ax.set_xlabel(x_label, color=self.fg_color, fontsize=fontsize - 3)
91
+ self.ax.set_ylabel(y_label, color=self.fg_color, fontsize=fontsize - 3)
92
+ self.ax.tick_params(axis="both", colors=self.fg_color)
93
+
94
+ def update_line(self, frame_number, total_counts):
95
+ """
96
+ Update the line graph with new data.
97
+
98
+ Args:
99
+ frame_number (int): The current frame number.
100
+ total_counts (int): The total counts to plot.
101
+ """
102
+
103
+ # Update line graph data
104
+ x_data = self.line.get_xdata()
105
+ y_data = self.line.get_ydata()
106
+ x_data = np.append(x_data, float(frame_number))
107
+ y_data = np.append(y_data, float(total_counts))
108
+ self.line.set_data(x_data, y_data)
109
+ self.ax.relim()
110
+ self.ax.autoscale_view()
111
+ self.canvas.draw()
112
+ im0 = np.array(self.canvas.renderer.buffer_rgba())
113
+ im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
114
+
115
+ # Display and save the updated graph
116
+ cv2.imshow(self.title, im0) if self.view_img else None
117
+ self.writer.write(im0) if self.save_img else None
118
+
119
+ def update_bar(self, count_dict):
120
+ """
121
+ Update the bar graph with new data.
122
+
123
+ Args:
124
+ count_dict (dict): Dictionary containing the count data to plot.
125
+ """
126
+
127
+ # Update bar graph data
128
+ self.ax.clear()
129
+ self.ax.set_facecolor(self.bg_color)
130
+ labels = list(count_dict.keys())
131
+ counts = list(count_dict.values())
132
+
133
+ # Map labels to colors
134
+ for label in labels:
135
+ if label not in self.color_mapping:
136
+ self.color_mapping[label] = next(self.color_cycle)
137
+
138
+ colors = [self.color_mapping[label] for label in labels]
139
+
140
+ bars = self.ax.bar(labels, counts, color=colors)
141
+ for bar, count in zip(bars, counts):
142
+ self.ax.text(
143
+ bar.get_x() + bar.get_width() / 2,
144
+ bar.get_height(),
145
+ str(count),
146
+ ha="center",
147
+ va="bottom",
148
+ color=self.fg_color,
149
+ )
150
+
151
+ # Display and save the updated graph
152
+ canvas = FigureCanvas(self.fig)
153
+ canvas.draw()
154
+ buf = canvas.buffer_rgba()
155
+ im0 = np.asarray(buf)
156
+ im0 = cv2.cvtColor(im0, cv2.COLOR_RGBA2BGR)
157
+
158
+ self.writer.write(im0) if self.save_img else None
159
+ cv2.imshow(self.title, im0) if self.view_img else None
160
+
161
+ def update_pie(self, classes_dict):
162
+ """
163
+ Update the pie chart with new data.
164
+
165
+ Args:
166
+ classes_dict (dict): Dictionary containing the class data to plot.
167
+ """
168
+
169
+ # Update pie chart data
170
+ labels = list(classes_dict.keys())
171
+ sizes = list(classes_dict.values())
172
+ total = sum(sizes)
173
+ percentages = [size / total * 100 for size in sizes]
174
+ start_angle = 90
175
+ self.ax.clear()
176
+
177
+ # Create pie chart without labels inside the slices
178
+ wedges, autotexts = self.ax.pie(sizes, autopct=None, startangle=start_angle, textprops={"color": self.fg_color})
179
+
180
+ # Construct legend labels with percentages
181
+ legend_labels = [f"{label} ({percentage:.1f}%)" for label, percentage in zip(labels, percentages)]
182
+ self.ax.legend(wedges, legend_labels, title="Classes", loc="center left", bbox_to_anchor=(1, 0, 0.5, 1))
183
+
184
+ # Adjust layout to fit the legend
185
+ self.fig.tight_layout()
186
+ self.fig.subplots_adjust(left=0.1, right=0.75)
187
+
188
+ # Display and save the updated chart
189
+ im0 = self.fig.canvas.draw()
190
+ im0 = np.array(self.fig.canvas.renderer.buffer_rgba())
191
+ im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
192
+ self.writer.write(im0) if self.save_img else None
193
+ cv2.imshow(self.title, im0) if self.view_img else None
194
+
195
+
196
+ if __name__ == "__main__":
197
+ Analytics("line", writer=None, im0_shape=None)
@@ -23,7 +23,6 @@ from ultralytics.utils import (
23
23
  ASSETS,
24
24
  AUTOINSTALL,
25
25
  IS_COLAB,
26
- IS_DOCKER,
27
26
  IS_JUPYTER,
28
27
  IS_KAGGLE,
29
28
  IS_PIP_PACKAGE,
@@ -322,17 +321,18 @@ def check_font(font="Arial.ttf"):
322
321
  return file
323
322
 
324
323
 
325
- def check_python(minimum: str = "3.8.0") -> bool:
324
+ def check_python(minimum: str = "3.8.0", hard: bool = True) -> bool:
326
325
  """
327
326
  Check current python version against the required minimum version.
328
327
 
329
328
  Args:
330
329
  minimum (str): Required minimum version of python.
330
+ hard (bool, optional): If True, raise an AssertionError if the requirement is not met.
331
331
 
332
332
  Returns:
333
333
  (bool): Whether the installed Python version meets the minimum constraints.
334
334
  """
335
- return check_version(PYTHON_VERSION, minimum, name="Python ", hard=True)
335
+ return check_version(PYTHON_VERSION, minimum, name="Python", hard=hard)
336
336
 
337
337
 
338
338
  @TryExcept()
@@ -735,4 +735,5 @@ def cuda_is_available() -> bool:
735
735
 
736
736
 
737
737
  # Define constants
738
+ IS_PYTHON_MINIMUM_3_10 = check_python("3.10", hard=False)
738
739
  IS_PYTHON_3_12 = PYTHON_VERSION.startswith("3.12")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.19
3
+ Version: 8.2.21
4
4
  Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
@@ -1,4 +1,13 @@
1
- ultralytics/__init__.py,sha256=NObQc9-CGvcpMj8hOJ6v61xO8ImAy6eciiFIiIqfVVA,633
1
+ tests/__init__.py,sha256=9evx3lOdKZeY1iWXvH-FkMkgf8jLucWICoabzeD6aYg,626
2
+ tests/conftest.py,sha256=WOrMDmrxdYskt1nQmbPPhZ6zo1cJzS4vO7gVcKuEo2k,2545
3
+ tests/test_cli.py,sha256=BOvCGdTxYEKb5hPeKkDtfN2sX2FxjfmUYq9mbPKL7hU,4866
4
+ tests/test_cuda.py,sha256=Ga_fRdcfhKXwy3_C5x0a9P8MdsoIo1ko73IwaOqwtX0,4799
5
+ tests/test_engine.py,sha256=GA3igv3CTEMb4YYwZDlYPESv5RAuaZ-t3BscvBpQ5AY,4706
6
+ tests/test_explorer.py,sha256=r1pWer2y290Y0DqsM-La7egfEY0497YCdC4rwq3URV4,2178
7
+ tests/test_exports.py,sha256=TTmwlY3FAcVPffUFP9La0vpzNnfKYjej5nKqurCbwZg,6863
8
+ tests/test_integrations.py,sha256=J3D_LAy5gaawoLEMDrCj4HO8965o4kK1t4UBRot6EEw,5848
9
+ tests/test_python.py,sha256=invyej59wozoSZV4Rb7zwkgyVkRZrL149e-xb-c7xew,20086
10
+ ultralytics/__init__.py,sha256=t6lbjDI5WGi4FmWuecXODe58PRDP7YbTpDJlVIvyN_E,633
2
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
3
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
4
13
  ultralytics/cfg/__init__.py,sha256=lR6jykSO_0cigsjrqSyFj_8JG_LvYi796viasyWhcfs,21358
@@ -79,10 +88,10 @@ ultralytics/data/explorer/utils.py,sha256=EvvukQiQUTBrsZznmMnyEX2EqTuwZo_Geyc8yf
79
88
  ultralytics/data/explorer/gui/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
80
89
  ultralytics/data/explorer/gui/dash.py,sha256=2oAbNroR2lfS45v53M1sRqZklLXbbj6qXqNxvplulC0,10087
81
90
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
82
- ultralytics/engine/exporter.py,sha256=n2_2oTirHfdPDoeSD1feRNvWAXZHugo92JB6W0GWeiw,58316
91
+ ultralytics/engine/exporter.py,sha256=c5Ky8_cElAjPm8y3-2-pJgn0bMGyqivvC2XhFH5gDsY,58222
83
92
  ultralytics/engine/model.py,sha256=IE6HE9VIzqO3DscxSLexub0LUR673eiPFrCPCt6ozEE,40103
84
93
  ultralytics/engine/predictor.py,sha256=wQRKdWGDTP5A6CS0gTC6U3RPDMhP3QkEzWSPm6eqCkU,17022
85
- ultralytics/engine/results.py,sha256=1ZY6eXb5uHmDShAXPmXZ-117ZlqeffEZLd2LqFgg8Ik,30975
94
+ ultralytics/engine/results.py,sha256=zRuEIrBtpoCQ3M6a_YscnyXrWSP-zpL3ACv0gTdrDaw,30987
86
95
  ultralytics/engine/trainer.py,sha256=P5XbPxh5hj4TLwuKeriuAlvcBWmILStm40-SgPrYvLk,35149
87
96
  ultralytics/engine/tuner.py,sha256=iZrgMmXSDpfuDu4bdFRflmAsscys2-8W8qAGxSyOVJE,11844
88
97
  ultralytics/engine/validator.py,sha256=Y21Uo8_Zto4qjk_YqQk6k7tyfpq_Qk9cfjeXeyDRxs8,14643
@@ -154,8 +163,9 @@ ultralytics/nn/modules/conv.py,sha256=Ywe87IhuaS22mR2JJ9xjnW8Sb-m7WTjxuqIxV_Dv8l
154
163
  ultralytics/nn/modules/head.py,sha256=3N_4zW1UvhI1jCrIxIkNYxQDdiW6HxtxpaNAAudq6NU,22236
155
164
  ultralytics/nn/modules/transformer.py,sha256=AxD9uURpCl-EqvXe3DiG6JW-pBzB16G-AahLdZ7yayo,17909
156
165
  ultralytics/nn/modules/utils.py,sha256=779QnnKp9v8jv251ESduTXJ0ol8HkIOLbGQWwEGQjhU,3196
157
- ultralytics/solutions/__init__.py,sha256=Ogwo0ckEqzULTiRopo6ZeqjBwlHgsg2WYyO_rGARtQA,490
158
- ultralytics/solutions/ai_gym.py,sha256=xpKBX37VmAjJpSPUfC8w1IQ0wqOcURAeWZ_3LoVrXSs,4639
166
+ ultralytics/solutions/__init__.py,sha256=ZeIPizmm2SM0e7KZ7nmh9WWNYChSszWwDmHB-WAfaAY,540
167
+ ultralytics/solutions/ai_gym.py,sha256=HDzzvBVFqWgQw2IgtEx5Eo3tEKbFRY3gkiVqax-4j2w,4683
168
+ ultralytics/solutions/analytics.py,sha256=ddv-5ertQAzZmhfIi9G6PK3Z1mK8mCKWUwRVgJACves,7050
159
169
  ultralytics/solutions/distance_calculation.py,sha256=pSIkyytHGRAaNzIrkkNkiOnSVWU1PYvURlCIV_jRORA,6505
160
170
  ultralytics/solutions/heatmap.py,sha256=AHXnmXhoQ95ph74zsdrvX_Lfy3wF0SsH0MIeTixE7Qg,10386
161
171
  ultralytics/solutions/object_counter.py,sha256=htcQGWJX1y-vXVV1yUiTDT3sm8ByItjSNfu2Rl2IEmk,10808
@@ -174,7 +184,7 @@ ultralytics/trackers/utils/matching.py,sha256=UxhSGa5pN6WoYwYSBAkkt-O7xMxUR47VuU
174
184
  ultralytics/utils/__init__.py,sha256=AjzSdFGfEPMPyFFX9JaONkmI5xgWLHFMO77aBA0ghpM,39518
175
185
  ultralytics/utils/autobatch.py,sha256=ygZ3f2ByIkcujB89ENcTnGWWnAQw5Pbg6nBuShg-5t4,3863
176
186
  ultralytics/utils/benchmarks.py,sha256=PlnUqhl2Om7jp7bKICDj9a2ABpJSl31VFI3ESnGdme8,23552
177
- ultralytics/utils/checks.py,sha256=lca-tqMVbMgsHXb_g-LaVqXv6zoDKFXHqc2SXvO0njM,28124
187
+ ultralytics/utils/checks.py,sha256=VRZH50Spdx1OVaPaUxgEArpF2iI1Re6bVBdMhvFUPoE,28280
178
188
  ultralytics/utils/dist.py,sha256=3HeNbY2gp7vYhcvVhsrvTrQXpQmgT8tpmnzApf3eQRA,2267
179
189
  ultralytics/utils/downloads.py,sha256=cmO2Ev1DV1m_lYgQ2yGDG5xVRIBVS_z9nS_Frec_NeU,21496
180
190
  ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
@@ -200,9 +210,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
200
210
  ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
201
211
  ultralytics/utils/callbacks/tensorboard.py,sha256=Z1veCVcn9THPhdplWuIzwlsW2yF7y-On9IZIk3khM0Y,4135
202
212
  ultralytics/utils/callbacks/wb.py,sha256=DViD0KeXH_i3eVT_CLR4bZFs1TMMUZBVBBYIS3aUfp0,6745
203
- ultralytics-8.2.19.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
204
- ultralytics-8.2.19.dist-info/METADATA,sha256=rclTaqVO3NUEG2sUMvgs2nXlYADCmdzjQqFiqTofyN4,40694
205
- ultralytics-8.2.19.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
206
- ultralytics-8.2.19.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
207
- ultralytics-8.2.19.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
208
- ultralytics-8.2.19.dist-info/RECORD,,
213
+ ultralytics-8.2.21.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
214
+ ultralytics-8.2.21.dist-info/METADATA,sha256=ghZ90c82ushTyA0ZcMQNYD8Gyb9sZpA05EWJRMsPKm8,40694
215
+ ultralytics-8.2.21.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
216
+ ultralytics-8.2.21.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
217
+ ultralytics-8.2.21.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
218
+ ultralytics-8.2.21.dist-info/RECORD,,