python-wml 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of python-wml might be problematic. Click here for more details.
- python_wml-3.0.0.dist-info/LICENSE +23 -0
- python_wml-3.0.0.dist-info/METADATA +51 -0
- python_wml-3.0.0.dist-info/RECORD +164 -0
- python_wml-3.0.0.dist-info/WHEEL +5 -0
- python_wml-3.0.0.dist-info/top_level.txt +1 -0
- wml/__init__.py +0 -0
- wml/basic_data_def/__init__.py +2 -0
- wml/basic_data_def/detection_data_def.py +279 -0
- wml/basic_data_def/io_data_def.py +2 -0
- wml/basic_img_utils.py +816 -0
- wml/img_patch.py +92 -0
- wml/img_utils.py +571 -0
- wml/iotoolkit/__init__.py +17 -0
- wml/iotoolkit/aic_keypoint.py +115 -0
- wml/iotoolkit/baidu_mask_toolkit.py +244 -0
- wml/iotoolkit/base_dataset.py +210 -0
- wml/iotoolkit/bboxes_statistics.py +515 -0
- wml/iotoolkit/build.py +0 -0
- wml/iotoolkit/cityscapes_toolkit.py +183 -0
- wml/iotoolkit/classification_data_statistics.py +25 -0
- wml/iotoolkit/coco_data_fwd.py +225 -0
- wml/iotoolkit/coco_keypoints.py +118 -0
- wml/iotoolkit/coco_keypoints_fmt2.py +103 -0
- wml/iotoolkit/coco_toolkit.py +397 -0
- wml/iotoolkit/coco_wholebody.py +269 -0
- wml/iotoolkit/common.py +108 -0
- wml/iotoolkit/crowd_pose.py +146 -0
- wml/iotoolkit/fast_labelme.py +110 -0
- wml/iotoolkit/image_folder.py +95 -0
- wml/iotoolkit/imgs_cache.py +58 -0
- wml/iotoolkit/imgs_reader_mt.py +73 -0
- wml/iotoolkit/labelme_base.py +102 -0
- wml/iotoolkit/labelme_json_to_img.py +49 -0
- wml/iotoolkit/labelme_toolkit.py +117 -0
- wml/iotoolkit/labelme_toolkit_fwd.py +733 -0
- wml/iotoolkit/labelmemckeypoints_dataset.py +169 -0
- wml/iotoolkit/lspet.py +48 -0
- wml/iotoolkit/mapillary_vistas_toolkit.py +269 -0
- wml/iotoolkit/mat_data.py +90 -0
- wml/iotoolkit/mckeypoints_statistics.py +28 -0
- wml/iotoolkit/mot_datasets.py +62 -0
- wml/iotoolkit/mpii.py +108 -0
- wml/iotoolkit/npmckeypoints_dataset.py +164 -0
- wml/iotoolkit/o365_to_coco.py +136 -0
- wml/iotoolkit/object365_toolkit.py +156 -0
- wml/iotoolkit/object365v2_toolkit.py +71 -0
- wml/iotoolkit/pascal_voc_data.py +51 -0
- wml/iotoolkit/pascal_voc_toolkit.py +194 -0
- wml/iotoolkit/pascal_voc_toolkit_fwd.py +473 -0
- wml/iotoolkit/penn_action.py +57 -0
- wml/iotoolkit/rawframe_dataset.py +129 -0
- wml/iotoolkit/rewrite_pascal_voc.py +28 -0
- wml/iotoolkit/semantic_data.py +49 -0
- wml/iotoolkit/split_file_by_type.py +29 -0
- wml/iotoolkit/sports_mot_datasets.py +78 -0
- wml/iotoolkit/vis_objectdetection_dataset.py +70 -0
- wml/iotoolkit/vis_torch_data.py +39 -0
- wml/iotoolkit/yolo_toolkit.py +38 -0
- wml/object_detection2/__init__.py +4 -0
- wml/object_detection2/basic_visualization.py +37 -0
- wml/object_detection2/bboxes.py +812 -0
- wml/object_detection2/data_process_toolkit.py +146 -0
- wml/object_detection2/keypoints.py +292 -0
- wml/object_detection2/mask.py +120 -0
- wml/object_detection2/metrics/__init__.py +3 -0
- wml/object_detection2/metrics/build.py +15 -0
- wml/object_detection2/metrics/classifier_toolkit.py +440 -0
- wml/object_detection2/metrics/common.py +71 -0
- wml/object_detection2/metrics/mckps_toolkit.py +338 -0
- wml/object_detection2/metrics/toolkit.py +1953 -0
- wml/object_detection2/npod_toolkit.py +361 -0
- wml/object_detection2/odtools.py +243 -0
- wml/object_detection2/standard_names.py +75 -0
- wml/object_detection2/visualization.py +956 -0
- wml/object_detection2/wmath.py +34 -0
- wml/semantic/__init__.py +0 -0
- wml/semantic/basic_toolkit.py +65 -0
- wml/semantic/mask_utils.py +156 -0
- wml/semantic/semantic_test.py +21 -0
- wml/semantic/structures.py +1 -0
- wml/semantic/toolkit.py +105 -0
- wml/semantic/visualization_utils.py +658 -0
- wml/threadtoolkit.py +50 -0
- wml/walgorithm.py +228 -0
- wml/wcollections.py +212 -0
- wml/wfilesystem.py +487 -0
- wml/wml_utils.py +657 -0
- wml/wstructures/__init__.py +4 -0
- wml/wstructures/common.py +9 -0
- wml/wstructures/keypoints_train_toolkit.py +149 -0
- wml/wstructures/kps_structures.py +579 -0
- wml/wstructures/mask_structures.py +1161 -0
- wml/wtorch/__init__.py +8 -0
- wml/wtorch/bboxes.py +104 -0
- wml/wtorch/classes_suppression.py +24 -0
- wml/wtorch/conv_module.py +181 -0
- wml/wtorch/conv_ws.py +144 -0
- wml/wtorch/data/__init__.py +16 -0
- wml/wtorch/data/_utils/__init__.py +45 -0
- wml/wtorch/data/_utils/collate.py +183 -0
- wml/wtorch/data/_utils/fetch.py +47 -0
- wml/wtorch/data/_utils/pin_memory.py +121 -0
- wml/wtorch/data/_utils/signal_handling.py +72 -0
- wml/wtorch/data/_utils/worker.py +227 -0
- wml/wtorch/data/base_data_loader_iter.py +93 -0
- wml/wtorch/data/dataloader.py +501 -0
- wml/wtorch/data/datapipes/__init__.py +1 -0
- wml/wtorch/data/datapipes/iter/__init__.py +12 -0
- wml/wtorch/data/datapipes/iter/batch.py +126 -0
- wml/wtorch/data/datapipes/iter/callable.py +92 -0
- wml/wtorch/data/datapipes/iter/listdirfiles.py +37 -0
- wml/wtorch/data/datapipes/iter/loadfilesfromdisk.py +30 -0
- wml/wtorch/data/datapipes/iter/readfilesfromtar.py +60 -0
- wml/wtorch/data/datapipes/iter/readfilesfromzip.py +63 -0
- wml/wtorch/data/datapipes/iter/sampler.py +94 -0
- wml/wtorch/data/datapipes/utils/__init__.py +0 -0
- wml/wtorch/data/datapipes/utils/common.py +65 -0
- wml/wtorch/data/dataset.py +354 -0
- wml/wtorch/data/datasets/__init__.py +4 -0
- wml/wtorch/data/datasets/common.py +53 -0
- wml/wtorch/data/datasets/listdirfilesdataset.py +36 -0
- wml/wtorch/data/datasets/loadfilesfromdiskdataset.py +30 -0
- wml/wtorch/data/distributed.py +135 -0
- wml/wtorch/data/multi_processing_data_loader_iter.py +866 -0
- wml/wtorch/data/sampler.py +267 -0
- wml/wtorch/data/single_process_data_loader_iter.py +24 -0
- wml/wtorch/data/test_data_loader.py +26 -0
- wml/wtorch/dataset_toolkit.py +67 -0
- wml/wtorch/depthwise_separable_conv_module.py +98 -0
- wml/wtorch/dist.py +591 -0
- wml/wtorch/dropblock/__init__.py +6 -0
- wml/wtorch/dropblock/dropblock.py +228 -0
- wml/wtorch/dropblock/dropout.py +40 -0
- wml/wtorch/dropblock/scheduler.py +48 -0
- wml/wtorch/ema.py +61 -0
- wml/wtorch/fc_module.py +73 -0
- wml/wtorch/functional.py +34 -0
- wml/wtorch/iter_dataset.py +26 -0
- wml/wtorch/loss.py +69 -0
- wml/wtorch/nets/__init__.py +0 -0
- wml/wtorch/nets/ckpt_toolkit.py +219 -0
- wml/wtorch/nets/fpn.py +276 -0
- wml/wtorch/nets/hrnet/__init__.py +0 -0
- wml/wtorch/nets/hrnet/config.py +2 -0
- wml/wtorch/nets/hrnet/hrnet.py +494 -0
- wml/wtorch/nets/misc.py +249 -0
- wml/wtorch/nets/resnet/__init__.py +0 -0
- wml/wtorch/nets/resnet/layers/__init__.py +17 -0
- wml/wtorch/nets/resnet/layers/aspp.py +144 -0
- wml/wtorch/nets/resnet/layers/batch_norm.py +231 -0
- wml/wtorch/nets/resnet/layers/blocks.py +111 -0
- wml/wtorch/nets/resnet/layers/wrappers.py +110 -0
- wml/wtorch/nets/resnet/r50_config.py +38 -0
- wml/wtorch/nets/resnet/resnet.py +691 -0
- wml/wtorch/nets/shape_spec.py +20 -0
- wml/wtorch/nets/simple_fpn.py +101 -0
- wml/wtorch/nms.py +109 -0
- wml/wtorch/nn.py +896 -0
- wml/wtorch/ocr_block.py +193 -0
- wml/wtorch/summary.py +331 -0
- wml/wtorch/train_toolkit.py +603 -0
- wml/wtorch/transformer_blocks.py +266 -0
- wml/wtorch/utils.py +719 -0
- wml/wtorch/wlr_scheduler.py +100 -0
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from torch.optim.lr_scheduler import _LRScheduler
|
|
2
|
+
import math
|
|
3
|
+
|
|
4
|
+
class WarmupCosLR(_LRScheduler):
|
|
5
|
+
def __init__(self,optimizer, warmup_total_iters=1000,total_iters=120000,warmup_lr_start=1e-6,min_lr_ratio=0.01,last_epoch=-1, verbose=False):
|
|
6
|
+
self.warmup_lr_start = warmup_lr_start
|
|
7
|
+
self.warmup_total_iters = warmup_total_iters
|
|
8
|
+
self.total_iters = total_iters
|
|
9
|
+
self.min_lr_ratio = min_lr_ratio
|
|
10
|
+
#init after member value initialized
|
|
11
|
+
super().__init__(optimizer, last_epoch, verbose)
|
|
12
|
+
|
|
13
|
+
def get_lr(self):
|
|
14
|
+
return [self.__get_lr(x) for x in self.base_lrs]
|
|
15
|
+
|
|
16
|
+
def __get_lr(self,lr):
|
|
17
|
+
min_lr = lr * self.min_lr_ratio
|
|
18
|
+
warmup_lr_start = self.warmup_lr_start
|
|
19
|
+
iters = self.last_epoch
|
|
20
|
+
warmup_total_iters = self.warmup_total_iters
|
|
21
|
+
total_iters = self.total_iters
|
|
22
|
+
|
|
23
|
+
if iters <= warmup_total_iters:
|
|
24
|
+
# lr = (lr - warmup_lr_start) * iters / float(warmup_total_iters) + warmup_lr_start
|
|
25
|
+
lr = (lr - warmup_lr_start) * pow(
|
|
26
|
+
iters / float(warmup_total_iters), 2
|
|
27
|
+
) + warmup_lr_start
|
|
28
|
+
elif iters >= total_iters:
|
|
29
|
+
lr = min_lr
|
|
30
|
+
else:
|
|
31
|
+
lr = min_lr + 0.5 * (lr - min_lr) * (
|
|
32
|
+
1.0
|
|
33
|
+
+ math.cos(
|
|
34
|
+
math.pi
|
|
35
|
+
* (iters - warmup_total_iters)
|
|
36
|
+
/ (total_iters - warmup_total_iters)
|
|
37
|
+
)
|
|
38
|
+
)
|
|
39
|
+
return lr
|
|
40
|
+
|
|
41
|
+
class WarmupStepLR(_LRScheduler):
|
|
42
|
+
"""Decays the learning rate of each parameter group by gamma every
|
|
43
|
+
step_size epochs. Notice that such decay can happen simultaneously with
|
|
44
|
+
other changes to the learning rate from outside this scheduler. When
|
|
45
|
+
last_epoch=-1, sets initial lr as lr.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
optimizer (Optimizer): Wrapped optimizer.
|
|
49
|
+
step_size (int): Period of learning rate decay.
|
|
50
|
+
gamma (float): Multiplicative factor of learning rate decay.
|
|
51
|
+
Default: 0.1.
|
|
52
|
+
last_epoch (int): The index of last epoch. Default: -1.
|
|
53
|
+
verbose (bool): If ``True``, prints a message to stdout for
|
|
54
|
+
each update. Default: ``False``.
|
|
55
|
+
|
|
56
|
+
Example:
|
|
57
|
+
>>> # Assuming optimizer uses lr = 0.05 for all groups
|
|
58
|
+
>>> # lr = 0.05 if epoch < 30
|
|
59
|
+
>>> # lr = 0.005 if 30 <= epoch < 60
|
|
60
|
+
>>> # lr = 0.0005 if 60 <= epoch < 90
|
|
61
|
+
>>> # ...
|
|
62
|
+
>>> scheduler = StepLR(optimizer, step_size=30, gamma=0.1)
|
|
63
|
+
>>> for epoch in range(100):
|
|
64
|
+
>>> train(...)
|
|
65
|
+
>>> validate(...)
|
|
66
|
+
>>> scheduler.step()
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
def __init__(self, optimizer, step_size, gamma=0.1, warmup_total_iters=1000,total_iters=120000,warmup_lr_start=0,last_epoch=-1, verbose=False):
|
|
70
|
+
self.step_size = step_size
|
|
71
|
+
self.gamma = gamma
|
|
72
|
+
super().__init__(optimizer, last_epoch, verbose)
|
|
73
|
+
|
|
74
|
+
def get_lr(self):
|
|
75
|
+
if not self._get_lr_called_within_step:
|
|
76
|
+
print("To get the last learning rate computed by the scheduler, "
|
|
77
|
+
"please use `get_last_lr()`.")
|
|
78
|
+
|
|
79
|
+
iters = self.last_epoch
|
|
80
|
+
warmup_total_iters = self.warmup_total_iters
|
|
81
|
+
if iters <= warmup_total_iters:
|
|
82
|
+
return [self.__get_warmup_lr(x) for x in self.base_lrs]
|
|
83
|
+
if (self.last_epoch == 0) or (self.last_epoch % self.step_size != 0):
|
|
84
|
+
return [group['lr'] for group in self.optimizer.param_groups]
|
|
85
|
+
return [group['lr'] * self.gamma
|
|
86
|
+
for group in self.optimizer.param_groups]
|
|
87
|
+
|
|
88
|
+
def _get_closed_form_lr(self):
|
|
89
|
+
return [base_lr * self.gamma ** (self.last_epoch // self.step_size)
|
|
90
|
+
for base_lr in self.base_lrs]
|
|
91
|
+
|
|
92
|
+
def __get_warmup_lr(self, lr):
|
|
93
|
+
warmup_lr_start = self.warmup_lr_start
|
|
94
|
+
iters = self.last_epoch
|
|
95
|
+
warmup_total_iters = self.warmup_total_iters
|
|
96
|
+
|
|
97
|
+
lr = (lr - warmup_lr_start) * pow(
|
|
98
|
+
iters / float(warmup_total_iters), 2
|
|
99
|
+
) + warmup_lr_start
|
|
100
|
+
return lr
|