bplusplus 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bplusplus might be problematic. Click here for more details.

Files changed (97) hide show
  1. bplusplus/__init__.py +4 -2
  2. bplusplus/collect.py +72 -3
  3. bplusplus/hierarchical/test.py +670 -0
  4. bplusplus/hierarchical/train.py +676 -0
  5. bplusplus/prepare.py +236 -71
  6. bplusplus/resnet/test.py +473 -0
  7. bplusplus/resnet/train.py +329 -0
  8. bplusplus-1.2.1.dist-info/METADATA +252 -0
  9. bplusplus-1.2.1.dist-info/RECORD +12 -0
  10. bplusplus/yolov5detect/__init__.py +0 -1
  11. bplusplus/yolov5detect/detect.py +0 -444
  12. bplusplus/yolov5detect/export.py +0 -1530
  13. bplusplus/yolov5detect/insect.yaml +0 -8
  14. bplusplus/yolov5detect/models/__init__.py +0 -0
  15. bplusplus/yolov5detect/models/common.py +0 -1109
  16. bplusplus/yolov5detect/models/experimental.py +0 -130
  17. bplusplus/yolov5detect/models/hub/anchors.yaml +0 -56
  18. bplusplus/yolov5detect/models/hub/yolov3-spp.yaml +0 -52
  19. bplusplus/yolov5detect/models/hub/yolov3-tiny.yaml +0 -42
  20. bplusplus/yolov5detect/models/hub/yolov3.yaml +0 -52
  21. bplusplus/yolov5detect/models/hub/yolov5-bifpn.yaml +0 -49
  22. bplusplus/yolov5detect/models/hub/yolov5-fpn.yaml +0 -43
  23. bplusplus/yolov5detect/models/hub/yolov5-p2.yaml +0 -55
  24. bplusplus/yolov5detect/models/hub/yolov5-p34.yaml +0 -42
  25. bplusplus/yolov5detect/models/hub/yolov5-p6.yaml +0 -57
  26. bplusplus/yolov5detect/models/hub/yolov5-p7.yaml +0 -68
  27. bplusplus/yolov5detect/models/hub/yolov5-panet.yaml +0 -49
  28. bplusplus/yolov5detect/models/hub/yolov5l6.yaml +0 -61
  29. bplusplus/yolov5detect/models/hub/yolov5m6.yaml +0 -61
  30. bplusplus/yolov5detect/models/hub/yolov5n6.yaml +0 -61
  31. bplusplus/yolov5detect/models/hub/yolov5s-LeakyReLU.yaml +0 -50
  32. bplusplus/yolov5detect/models/hub/yolov5s-ghost.yaml +0 -49
  33. bplusplus/yolov5detect/models/hub/yolov5s-transformer.yaml +0 -49
  34. bplusplus/yolov5detect/models/hub/yolov5s6.yaml +0 -61
  35. bplusplus/yolov5detect/models/hub/yolov5x6.yaml +0 -61
  36. bplusplus/yolov5detect/models/segment/yolov5l-seg.yaml +0 -49
  37. bplusplus/yolov5detect/models/segment/yolov5m-seg.yaml +0 -49
  38. bplusplus/yolov5detect/models/segment/yolov5n-seg.yaml +0 -49
  39. bplusplus/yolov5detect/models/segment/yolov5s-seg.yaml +0 -49
  40. bplusplus/yolov5detect/models/segment/yolov5x-seg.yaml +0 -49
  41. bplusplus/yolov5detect/models/tf.py +0 -797
  42. bplusplus/yolov5detect/models/yolo.py +0 -495
  43. bplusplus/yolov5detect/models/yolov5l.yaml +0 -49
  44. bplusplus/yolov5detect/models/yolov5m.yaml +0 -49
  45. bplusplus/yolov5detect/models/yolov5n.yaml +0 -49
  46. bplusplus/yolov5detect/models/yolov5s.yaml +0 -49
  47. bplusplus/yolov5detect/models/yolov5x.yaml +0 -49
  48. bplusplus/yolov5detect/utils/__init__.py +0 -97
  49. bplusplus/yolov5detect/utils/activations.py +0 -134
  50. bplusplus/yolov5detect/utils/augmentations.py +0 -448
  51. bplusplus/yolov5detect/utils/autoanchor.py +0 -175
  52. bplusplus/yolov5detect/utils/autobatch.py +0 -70
  53. bplusplus/yolov5detect/utils/aws/__init__.py +0 -0
  54. bplusplus/yolov5detect/utils/aws/mime.sh +0 -26
  55. bplusplus/yolov5detect/utils/aws/resume.py +0 -41
  56. bplusplus/yolov5detect/utils/aws/userdata.sh +0 -27
  57. bplusplus/yolov5detect/utils/callbacks.py +0 -72
  58. bplusplus/yolov5detect/utils/dataloaders.py +0 -1385
  59. bplusplus/yolov5detect/utils/docker/Dockerfile +0 -73
  60. bplusplus/yolov5detect/utils/docker/Dockerfile-arm64 +0 -40
  61. bplusplus/yolov5detect/utils/docker/Dockerfile-cpu +0 -42
  62. bplusplus/yolov5detect/utils/downloads.py +0 -136
  63. bplusplus/yolov5detect/utils/flask_rest_api/README.md +0 -70
  64. bplusplus/yolov5detect/utils/flask_rest_api/example_request.py +0 -17
  65. bplusplus/yolov5detect/utils/flask_rest_api/restapi.py +0 -49
  66. bplusplus/yolov5detect/utils/general.py +0 -1294
  67. bplusplus/yolov5detect/utils/google_app_engine/Dockerfile +0 -25
  68. bplusplus/yolov5detect/utils/google_app_engine/additional_requirements.txt +0 -6
  69. bplusplus/yolov5detect/utils/google_app_engine/app.yaml +0 -16
  70. bplusplus/yolov5detect/utils/loggers/__init__.py +0 -476
  71. bplusplus/yolov5detect/utils/loggers/clearml/README.md +0 -222
  72. bplusplus/yolov5detect/utils/loggers/clearml/__init__.py +0 -0
  73. bplusplus/yolov5detect/utils/loggers/clearml/clearml_utils.py +0 -230
  74. bplusplus/yolov5detect/utils/loggers/clearml/hpo.py +0 -90
  75. bplusplus/yolov5detect/utils/loggers/comet/README.md +0 -250
  76. bplusplus/yolov5detect/utils/loggers/comet/__init__.py +0 -551
  77. bplusplus/yolov5detect/utils/loggers/comet/comet_utils.py +0 -151
  78. bplusplus/yolov5detect/utils/loggers/comet/hpo.py +0 -126
  79. bplusplus/yolov5detect/utils/loggers/comet/optimizer_config.json +0 -135
  80. bplusplus/yolov5detect/utils/loggers/wandb/__init__.py +0 -0
  81. bplusplus/yolov5detect/utils/loggers/wandb/wandb_utils.py +0 -210
  82. bplusplus/yolov5detect/utils/loss.py +0 -259
  83. bplusplus/yolov5detect/utils/metrics.py +0 -381
  84. bplusplus/yolov5detect/utils/plots.py +0 -517
  85. bplusplus/yolov5detect/utils/segment/__init__.py +0 -0
  86. bplusplus/yolov5detect/utils/segment/augmentations.py +0 -100
  87. bplusplus/yolov5detect/utils/segment/dataloaders.py +0 -366
  88. bplusplus/yolov5detect/utils/segment/general.py +0 -160
  89. bplusplus/yolov5detect/utils/segment/loss.py +0 -198
  90. bplusplus/yolov5detect/utils/segment/metrics.py +0 -225
  91. bplusplus/yolov5detect/utils/segment/plots.py +0 -152
  92. bplusplus/yolov5detect/utils/torch_utils.py +0 -482
  93. bplusplus/yolov5detect/utils/triton.py +0 -90
  94. bplusplus-1.1.0.dist-info/METADATA +0 -179
  95. bplusplus-1.1.0.dist-info/RECORD +0 -92
  96. {bplusplus-1.1.0.dist-info → bplusplus-1.2.1.dist-info}/LICENSE +0 -0
  97. {bplusplus-1.1.0.dist-info → bplusplus-1.2.1.dist-info}/WHEEL +0 -0
@@ -1,49 +0,0 @@
1
- # Ultralytics YOLOv5 🚀, AGPL-3.0 license
2
-
3
- # Parameters
4
- nc: 80 # number of classes
5
- depth_multiple: 1.33 # model depth multiple
6
- width_multiple: 1.25 # layer channel multiple
7
- anchors:
8
- - [10, 13, 16, 30, 33, 23] # P3/8
9
- - [30, 61, 62, 45, 59, 119] # P4/16
10
- - [116, 90, 156, 198, 373, 326] # P5/32
11
-
12
- # YOLOv5 v6.0 backbone
13
- backbone:
14
- # [from, number, module, args]
15
- [
16
- [-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2
17
- [-1, 1, Conv, [128, 3, 2]], # 1-P2/4
18
- [-1, 3, C3, [128]],
19
- [-1, 1, Conv, [256, 3, 2]], # 3-P3/8
20
- [-1, 6, C3, [256]],
21
- [-1, 1, Conv, [512, 3, 2]], # 5-P4/16
22
- [-1, 9, C3, [512]],
23
- [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32
24
- [-1, 3, C3, [1024]],
25
- [-1, 1, SPPF, [1024, 5]], # 9
26
- ]
27
-
28
- # YOLOv5 v6.0 head
29
- head: [
30
- [-1, 1, Conv, [512, 1, 1]],
31
- [-1, 1, nn.Upsample, [None, 2, "nearest"]],
32
- [[-1, 6], 1, Concat, [1]], # cat backbone P4
33
- [-1, 3, C3, [512, False]], # 13
34
-
35
- [-1, 1, Conv, [256, 1, 1]],
36
- [-1, 1, nn.Upsample, [None, 2, "nearest"]],
37
- [[-1, 4], 1, Concat, [1]], # cat backbone P3
38
- [-1, 3, C3, [256, False]], # 17 (P3/8-small)
39
-
40
- [-1, 1, Conv, [256, 3, 2]],
41
- [[-1, 14], 1, Concat, [1]], # cat head P4
42
- [-1, 3, C3, [512, False]], # 20 (P4/16-medium)
43
-
44
- [-1, 1, Conv, [512, 3, 2]],
45
- [[-1, 10], 1, Concat, [1]], # cat head P5
46
- [-1, 3, C3, [1024, False]], # 23 (P5/32-large)
47
-
48
- [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5)
49
- ]
@@ -1,97 +0,0 @@
1
- # Ultralytics YOLOv5 🚀, AGPL-3.0 license
2
- """utils/initialization."""
3
-
4
- import contextlib
5
- import platform
6
- import threading
7
-
8
-
9
- def emojis(str=""):
10
- """Returns an emoji-safe version of a string, stripped of emojis on Windows platforms."""
11
- return str.encode().decode("ascii", "ignore") if platform.system() == "Windows" else str
12
-
13
-
14
- class TryExcept(contextlib.ContextDecorator):
15
- """A context manager and decorator for error handling that prints an optional message with emojis on exception."""
16
-
17
- def __init__(self, msg=""):
18
- """Initializes TryExcept with an optional message, used as a decorator or context manager for error handling."""
19
- self.msg = msg
20
-
21
- def __enter__(self):
22
- """Enter the runtime context related to this object for error handling with an optional message."""
23
- pass
24
-
25
- def __exit__(self, exc_type, value, traceback):
26
- """Context manager exit method that prints an error message with emojis if an exception occurred, always returns
27
- True.
28
- """
29
- if value:
30
- print(emojis(f"{self.msg}{': ' if self.msg else ''}{value}"))
31
- return True
32
-
33
-
34
- def threaded(func):
35
- """Decorator @threaded to run a function in a separate thread, returning the thread instance."""
36
-
37
- def wrapper(*args, **kwargs):
38
- """Runs the decorated function in a separate daemon thread and returns the thread instance."""
39
- thread = threading.Thread(target=func, args=args, kwargs=kwargs, daemon=True)
40
- thread.start()
41
- return thread
42
-
43
- return wrapper
44
-
45
-
46
- def join_threads(verbose=False):
47
- """
48
- Joins all daemon threads, optionally printing their names if verbose is True.
49
-
50
- Example: atexit.register(lambda: join_threads())
51
- """
52
- main_thread = threading.current_thread()
53
- for t in threading.enumerate():
54
- if t is not main_thread:
55
- if verbose:
56
- print(f"Joining thread {t.name}")
57
- t.join()
58
-
59
-
60
- def notebook_init(verbose=True):
61
- """Initializes notebook environment by checking requirements, cleaning up, and displaying system info."""
62
- print("Checking setup...")
63
-
64
- import os
65
- import shutil
66
-
67
- from ultralytics.utils.checks import check_requirements
68
-
69
- from utils.general import check_font, is_colab
70
- from utils.torch_utils import select_device # imports
71
-
72
- check_font()
73
-
74
- import psutil
75
-
76
- if check_requirements("wandb", install=False):
77
- os.system("pip uninstall -y wandb") # eliminate unexpected account creation prompt with infinite hang
78
- if is_colab():
79
- shutil.rmtree("/content/sample_data", ignore_errors=True) # remove colab /sample_data directory
80
-
81
- # System info
82
- display = None
83
- if verbose:
84
- gb = 1 << 30 # bytes to GiB (1024 ** 3)
85
- ram = psutil.virtual_memory().total
86
- total, used, free = shutil.disk_usage("/")
87
- with contextlib.suppress(Exception): # clear display if ipython is installed
88
- from IPython import display
89
-
90
- display.clear_output()
91
- s = f"({os.cpu_count()} CPUs, {ram / gb:.1f} GB RAM, {(total - free) / gb:.1f}/{total / gb:.1f} GB disk)"
92
- else:
93
- s = ""
94
-
95
- select_device(newline=False)
96
- print(emojis(f"Setup complete ✅ {s}"))
97
- return display
@@ -1,134 +0,0 @@
1
- # Ultralytics YOLOv5 🚀, AGPL-3.0 license
2
- """Activation functions."""
3
-
4
- import torch
5
- import torch.nn as nn
6
- import torch.nn.functional as F
7
-
8
-
9
- class SiLU(nn.Module):
10
- """Applies the Sigmoid-weighted Linear Unit (SiLU) activation function, also known as Swish."""
11
-
12
- @staticmethod
13
- def forward(x):
14
- """
15
- Applies the Sigmoid-weighted Linear Unit (SiLU) activation function.
16
-
17
- https://arxiv.org/pdf/1606.08415.pdf.
18
- """
19
- return x * torch.sigmoid(x)
20
-
21
-
22
- class Hardswish(nn.Module):
23
- """Applies the Hardswish activation function, which is efficient for mobile and embedded devices."""
24
-
25
- @staticmethod
26
- def forward(x):
27
- """
28
- Applies the Hardswish activation function, compatible with TorchScript, CoreML, and ONNX.
29
-
30
- Equivalent to x * F.hardsigmoid(x)
31
- """
32
- return x * F.hardtanh(x + 3, 0.0, 6.0) / 6.0 # for TorchScript, CoreML and ONNX
33
-
34
-
35
- class Mish(nn.Module):
36
- """Mish activation https://github.com/digantamisra98/Mish."""
37
-
38
- @staticmethod
39
- def forward(x):
40
- """Applies the Mish activation function, a smooth alternative to ReLU."""
41
- return x * F.softplus(x).tanh()
42
-
43
-
44
- class MemoryEfficientMish(nn.Module):
45
- """Efficiently applies the Mish activation function using custom autograd for reduced memory usage."""
46
-
47
- class F(torch.autograd.Function):
48
- """Implements a custom autograd function for memory-efficient Mish activation."""
49
-
50
- @staticmethod
51
- def forward(ctx, x):
52
- """Applies the Mish activation function, a smooth ReLU alternative, to the input tensor `x`."""
53
- ctx.save_for_backward(x)
54
- return x.mul(torch.tanh(F.softplus(x))) # x * tanh(ln(1 + exp(x)))
55
-
56
- @staticmethod
57
- def backward(ctx, grad_output):
58
- """Computes the gradient of the Mish activation function with respect to input `x`."""
59
- x = ctx.saved_tensors[0]
60
- sx = torch.sigmoid(x)
61
- fx = F.softplus(x).tanh()
62
- return grad_output * (fx + x * sx * (1 - fx * fx))
63
-
64
- def forward(self, x):
65
- """Applies the Mish activation function to the input tensor `x`."""
66
- return self.F.apply(x)
67
-
68
-
69
- class FReLU(nn.Module):
70
- """FReLU activation https://arxiv.org/abs/2007.11824."""
71
-
72
- def __init__(self, c1, k=3): # ch_in, kernel
73
- """Initializes FReLU activation with channel `c1` and kernel size `k`."""
74
- super().__init__()
75
- self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1, bias=False)
76
- self.bn = nn.BatchNorm2d(c1)
77
-
78
- def forward(self, x):
79
- """
80
- Applies FReLU activation with max operation between input and BN-convolved input.
81
-
82
- https://arxiv.org/abs/2007.11824
83
- """
84
- return torch.max(x, self.bn(self.conv(x)))
85
-
86
-
87
- class AconC(nn.Module):
88
- """
89
- ACON activation (activate or not) function.
90
-
91
- AconC: (p1*x-p2*x) * sigmoid(beta*(p1*x-p2*x)) + p2*x, beta is a learnable parameter
92
- See "Activate or Not: Learning Customized Activation" https://arxiv.org/pdf/2009.04759.pdf.
93
- """
94
-
95
- def __init__(self, c1):
96
- """Initializes AconC with learnable parameters p1, p2, and beta for channel-wise activation control."""
97
- super().__init__()
98
- self.p1 = nn.Parameter(torch.randn(1, c1, 1, 1))
99
- self.p2 = nn.Parameter(torch.randn(1, c1, 1, 1))
100
- self.beta = nn.Parameter(torch.ones(1, c1, 1, 1))
101
-
102
- def forward(self, x):
103
- """Applies AconC activation function with learnable parameters for channel-wise control on input tensor x."""
104
- dpx = (self.p1 - self.p2) * x
105
- return dpx * torch.sigmoid(self.beta * dpx) + self.p2 * x
106
-
107
-
108
- class MetaAconC(nn.Module):
109
- """
110
- ACON activation (activate or not) function.
111
-
112
- AconC: (p1*x-p2*x) * sigmoid(beta*(p1*x-p2*x)) + p2*x, beta is a learnable parameter
113
- See "Activate or Not: Learning Customized Activation" https://arxiv.org/pdf/2009.04759.pdf.
114
- """
115
-
116
- def __init__(self, c1, k=1, s=1, r=16):
117
- """Initializes MetaAconC with params: channel_in (c1), kernel size (k=1), stride (s=1), reduction (r=16)."""
118
- super().__init__()
119
- c2 = max(r, c1 // r)
120
- self.p1 = nn.Parameter(torch.randn(1, c1, 1, 1))
121
- self.p2 = nn.Parameter(torch.randn(1, c1, 1, 1))
122
- self.fc1 = nn.Conv2d(c1, c2, k, s, bias=True)
123
- self.fc2 = nn.Conv2d(c2, c1, k, s, bias=True)
124
- # self.bn1 = nn.BatchNorm2d(c2)
125
- # self.bn2 = nn.BatchNorm2d(c1)
126
-
127
- def forward(self, x):
128
- """Applies a forward pass transforming input `x` using learnable parameters and sigmoid activation."""
129
- y = x.mean(dim=2, keepdims=True).mean(dim=3, keepdims=True)
130
- # batch-size 1 bug/instabilities https://github.com/ultralytics/yolov5/issues/2891
131
- # beta = torch.sigmoid(self.bn2(self.fc2(self.bn1(self.fc1(y))))) # bug/unstable
132
- beta = torch.sigmoid(self.fc2(self.fc1(y))) # bug patch BN layers removed
133
- dpx = (self.p1 - self.p2) * x
134
- return dpx * torch.sigmoid(beta * dpx) + self.p2 * x