ocnn 2.2.1__tar.gz → 2.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {ocnn-2.2.1 → ocnn-2.2.2}/LICENSE +21 -21
  2. {ocnn-2.2.1 → ocnn-2.2.2}/MANIFEST.in +1 -1
  3. {ocnn-2.2.1/ocnn.egg-info → ocnn-2.2.2}/PKG-INFO +67 -63
  4. {ocnn-2.2.1 → ocnn-2.2.2}/README.md +48 -48
  5. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/__init__.py +24 -24
  6. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/dataset.py +160 -158
  7. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/__init__.py +29 -27
  8. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/autoencoder.py +155 -165
  9. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/hrnet.py +192 -192
  10. ocnn-2.2.2/ocnn/models/image2shape.py +128 -0
  11. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/lenet.py +46 -46
  12. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/ounet.py +94 -94
  13. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/resnet.py +53 -53
  14. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/segnet.py +72 -72
  15. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/models/unet.py +105 -105
  16. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/modules/__init__.py +20 -20
  17. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/modules/modules.py +193 -231
  18. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/modules/resblocks.py +124 -124
  19. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/__init__.py +42 -42
  20. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree2col.py +53 -53
  21. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree2vox.py +50 -50
  22. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_align.py +46 -46
  23. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_conv.py +411 -411
  24. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_drop.py +55 -55
  25. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_dwconv.py +204 -204
  26. ocnn-2.2.2/ocnn/nn/octree_gconv.py +79 -0
  27. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_interp.py +196 -196
  28. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_norm.py +86 -86
  29. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_pad.py +39 -39
  30. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/nn/octree_pool.py +200 -200
  31. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/octree/__init__.py +22 -21
  32. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/octree/octree.py +639 -601
  33. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/octree/points.py +317 -298
  34. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/octree/shuffled_key.py +115 -115
  35. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn/utils.py +202 -153
  36. {ocnn-2.2.1 → ocnn-2.2.2/ocnn.egg-info}/PKG-INFO +67 -63
  37. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn.egg-info/SOURCES.txt +2 -0
  38. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn.egg-info/not-zip-safe +1 -1
  39. ocnn-2.2.2/ocnn.egg-info/requires.txt +4 -0
  40. {ocnn-2.2.1 → ocnn-2.2.2}/setup.cfg +9 -9
  41. {ocnn-2.2.1 → ocnn-2.2.2}/setup.py +35 -35
  42. ocnn-2.2.1/ocnn.egg-info/requires.txt +0 -2
  43. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn.egg-info/dependency_links.txt +0 -0
  44. {ocnn-2.2.1 → ocnn-2.2.2}/ocnn.egg-info/top_level.txt +0 -0
@@ -1,21 +1,21 @@
1
- MIT License
2
-
3
- Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in
13
- all copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
- THE SOFTWARE.
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
@@ -1 +1 @@
1
- recursive-exclude test *
1
+ recursive-exclude test *
@@ -1,63 +1,67 @@
1
- Metadata-Version: 2.1
2
- Name: ocnn
3
- Version: 2.2.1
4
- Summary: Octree-based Sparse Convolutional Neural Networks
5
- Home-page: https://github.com/octree-nn/ocnn-pytorch
6
- Author: Peng-Shuai Wang
7
- Author-email: wangps@hotmail.com
8
- License: MIT
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Operating System :: OS Independent
12
- Requires-Python: >=3.6
13
- Description-Content-Type: text/markdown
14
- License-File: LICENSE
15
-
16
- # O-CNN
17
-
18
- **[Documentation](https://ocnn-pytorch.readthedocs.io)**
19
-
20
- [![Documentation Status](https://readthedocs.org/projects/ocnn-pytorch/badge/?version=latest)](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
21
- [![downloads](https://pepy.tech/badge/ocnn)](https://pepy.tech/project/ocnn)
22
- [![PyPI](https://img.shields.io/pypi/v/ocnn)](https://pypi.org/project/ocnn/)
23
-
24
- This repository contains the **pure PyTorch**-based implementation of
25
- [O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
26
- `Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
27
-
28
- O-CNN is an octree-based sparse convolutional neural network framework for 3D
29
- deep learning. O-CNN constrains the CNN storage and computation into non-empty
30
- sparse voxels for efficiency and uses the `octree` data structure to organize
31
- and index these sparse voxels.
32
-
33
- The concept of sparse convolution in O-CNN is the same with
34
- [H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
35
- [SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
36
- and
37
- [MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
38
- The key difference is that our O-CNN uses the `octree` to index the sparse
39
- voxels, while these 3 works use the `Hash Table`.
40
-
41
- Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
42
- SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
43
- CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
44
- was officially accepted in March, 2017. The camera-ready version of our O-CNN was
45
- submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
46
- during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
47
- computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
48
- Currently, this type of 3D convolution is known as Sparse Convolution in the
49
- research community.
50
-
51
- ## Key benefits of ocnn-pytorch
52
-
53
- - **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
54
- can be installed with a simple command:`pip install ocnn`. Other sparse
55
- convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
56
- configure the compiling environment.
57
-
58
- - **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
59
- convolution frameworks. It only takes 18 hours to train the network on
60
- ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
61
- training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
62
- takes 30 hours.
63
-
1
+ Metadata-Version: 2.1
2
+ Name: ocnn
3
+ Version: 2.2.2
4
+ Summary: Octree-based Sparse Convolutional Neural Networks
5
+ Home-page: https://github.com/octree-nn/ocnn-pytorch
6
+ Author: Peng-Shuai Wang
7
+ Author-email: wangps@hotmail.com
8
+ License: MIT
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.6
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: torch
16
+ Requires-Dist: torchvision
17
+ Requires-Dist: numpy
18
+ Requires-Dist: packaging
19
+
20
+ # O-CNN
21
+
22
+ **[Documentation](https://ocnn-pytorch.readthedocs.io)**
23
+
24
+ [![Documentation Status](https://readthedocs.org/projects/ocnn-pytorch/badge/?version=latest)](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
25
+ [![Downloads](https://static.pepy.tech/badge/ocnn)](https://pepy.tech/project/ocnn)
26
+ [![PyPI](https://img.shields.io/pypi/v/ocnn)](https://pypi.org/project/ocnn/)
27
+
28
+ This repository contains the **pure PyTorch**-based implementation of
29
+ [O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
30
+ `Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
31
+
32
+ O-CNN is an octree-based sparse convolutional neural network framework for 3D
33
+ deep learning. O-CNN constrains the CNN storage and computation into non-empty
34
+ sparse voxels for efficiency and uses the `octree` data structure to organize
35
+ and index these sparse voxels.
36
+
37
+ The concept of sparse convolution in O-CNN is the same with
38
+ [H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
39
+ [SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
40
+ and
41
+ [MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
42
+ The key difference is that our O-CNN uses the `octree` to index the sparse
43
+ voxels, while these 3 works use the `Hash Table`.
44
+
45
+ Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
46
+ SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
47
+ CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
48
+ was officially accepted in March, 2017. The camera-ready version of our O-CNN was
49
+ submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
50
+ during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
51
+ computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
52
+ Currently, this type of 3D convolution is known as Sparse Convolution in the
53
+ research community.
54
+
55
+ ## Key benefits of ocnn-pytorch
56
+
57
+ - **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
58
+ can be installed with a simple command:`pip install ocnn`. Other sparse
59
+ convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
60
+ configure the compiling environment.
61
+
62
+ - **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
63
+ convolution frameworks. It only takes 18 hours to train the network on
64
+ ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
65
+ training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
66
+ takes 30 hours.
67
+
@@ -1,48 +1,48 @@
1
- # O-CNN
2
-
3
- **[Documentation](https://ocnn-pytorch.readthedocs.io)**
4
-
5
- [![Documentation Status](https://readthedocs.org/projects/ocnn-pytorch/badge/?version=latest)](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
6
- [![downloads](https://pepy.tech/badge/ocnn)](https://pepy.tech/project/ocnn)
7
- [![PyPI](https://img.shields.io/pypi/v/ocnn)](https://pypi.org/project/ocnn/)
8
-
9
- This repository contains the **pure PyTorch**-based implementation of
10
- [O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
11
- `Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
12
-
13
- O-CNN is an octree-based sparse convolutional neural network framework for 3D
14
- deep learning. O-CNN constrains the CNN storage and computation into non-empty
15
- sparse voxels for efficiency and uses the `octree` data structure to organize
16
- and index these sparse voxels.
17
-
18
- The concept of sparse convolution in O-CNN is the same with
19
- [H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
20
- [SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
21
- and
22
- [MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
23
- The key difference is that our O-CNN uses the `octree` to index the sparse
24
- voxels, while these 3 works use the `Hash Table`.
25
-
26
- Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
27
- SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
28
- CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
29
- was officially accepted in March, 2017. The camera-ready version of our O-CNN was
30
- submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
31
- during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
32
- computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
33
- Currently, this type of 3D convolution is known as Sparse Convolution in the
34
- research community.
35
-
36
- ## Key benefits of ocnn-pytorch
37
-
38
- - **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
39
- can be installed with a simple command:`pip install ocnn`. Other sparse
40
- convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
41
- configure the compiling environment.
42
-
43
- - **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
44
- convolution frameworks. It only takes 18 hours to train the network on
45
- ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
46
- training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
47
- takes 30 hours.
48
-
1
+ # O-CNN
2
+
3
+ **[Documentation](https://ocnn-pytorch.readthedocs.io)**
4
+
5
+ [![Documentation Status](https://readthedocs.org/projects/ocnn-pytorch/badge/?version=latest)](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
6
+ [![Downloads](https://static.pepy.tech/badge/ocnn)](https://pepy.tech/project/ocnn)
7
+ [![PyPI](https://img.shields.io/pypi/v/ocnn)](https://pypi.org/project/ocnn/)
8
+
9
+ This repository contains the **pure PyTorch**-based implementation of
10
+ [O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
11
+ `Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
12
+
13
+ O-CNN is an octree-based sparse convolutional neural network framework for 3D
14
+ deep learning. O-CNN constrains the CNN storage and computation into non-empty
15
+ sparse voxels for efficiency and uses the `octree` data structure to organize
16
+ and index these sparse voxels.
17
+
18
+ The concept of sparse convolution in O-CNN is the same with
19
+ [H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
20
+ [SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
21
+ and
22
+ [MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
23
+ The key difference is that our O-CNN uses the `octree` to index the sparse
24
+ voxels, while these 3 works use the `Hash Table`.
25
+
26
+ Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
27
+ SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
28
+ CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
29
+ was officially accepted in March, 2017. The camera-ready version of our O-CNN was
30
+ submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
31
+ during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
32
+ computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
33
+ Currently, this type of 3D convolution is known as Sparse Convolution in the
34
+ research community.
35
+
36
+ ## Key benefits of ocnn-pytorch
37
+
38
+ - **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
39
+ can be installed with a simple command:`pip install ocnn`. Other sparse
40
+ convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
41
+ configure the compiling environment.
42
+
43
+ - **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
44
+ convolution frameworks. It only takes 18 hours to train the network on
45
+ ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
46
+ training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
47
+ takes 30 hours.
48
+
@@ -1,24 +1,24 @@
1
- # --------------------------------------------------------
2
- # Octree-based Sparse Convolutional Neural Networks
3
- # Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
4
- # Licensed under The MIT License [see LICENSE for details]
5
- # Written by Peng-Shuai Wang
6
- # --------------------------------------------------------
7
-
8
- from . import octree
9
- from . import nn
10
- from . import modules
11
- from . import models
12
- from . import dataset
13
- from . import utils
14
-
15
- __version__ = '2.2.1'
16
-
17
- __all__ = [
18
- 'octree',
19
- 'nn',
20
- 'modules',
21
- 'models',
22
- 'dataset',
23
- 'utils'
24
- ]
1
+ # --------------------------------------------------------
2
+ # Octree-based Sparse Convolutional Neural Networks
3
+ # Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
4
+ # Licensed under The MIT License [see LICENSE for details]
5
+ # Written by Peng-Shuai Wang
6
+ # --------------------------------------------------------
7
+
8
+ from . import octree
9
+ from . import nn
10
+ from . import modules
11
+ from . import models
12
+ from . import dataset
13
+ from . import utils
14
+
15
+ __version__ = '2.2.2'
16
+
17
+ __all__ = [
18
+ 'octree',
19
+ 'nn',
20
+ 'modules',
21
+ 'models',
22
+ 'dataset',
23
+ 'utils'
24
+ ]