ocnn 2.2.0__tar.gz → 2.2.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ocnn-2.2.0 → ocnn-2.2.2}/LICENSE +21 -21
- {ocnn-2.2.0 → ocnn-2.2.2}/MANIFEST.in +1 -1
- {ocnn-2.2.0/ocnn.egg-info → ocnn-2.2.2}/PKG-INFO +67 -63
- {ocnn-2.2.0 → ocnn-2.2.2}/README.md +48 -48
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/__init__.py +24 -24
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/dataset.py +160 -158
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/__init__.py +29 -24
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/autoencoder.py +155 -165
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/hrnet.py +192 -191
- ocnn-2.2.2/ocnn/models/image2shape.py +128 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/lenet.py +46 -46
- ocnn-2.2.2/ocnn/models/ounet.py +94 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/resnet.py +53 -53
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/segnet.py +72 -72
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/models/unet.py +105 -105
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/modules/__init__.py +20 -20
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/modules/modules.py +193 -231
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/modules/resblocks.py +124 -124
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/__init__.py +42 -40
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree2col.py +53 -53
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree2vox.py +50 -50
- ocnn-2.2.2/ocnn/nn/octree_align.py +46 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_conv.py +411 -411
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_drop.py +55 -55
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_dwconv.py +204 -204
- ocnn-2.2.2/ocnn/nn/octree_gconv.py +79 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_interp.py +196 -196
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_norm.py +86 -56
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_pad.py +39 -39
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/nn/octree_pool.py +200 -197
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/octree/__init__.py +22 -21
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/octree/octree.py +639 -581
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/octree/points.py +317 -298
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/octree/shuffled_key.py +115 -115
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn/utils.py +202 -153
- {ocnn-2.2.0 → ocnn-2.2.2/ocnn.egg-info}/PKG-INFO +67 -63
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn.egg-info/SOURCES.txt +4 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn.egg-info/not-zip-safe +1 -1
- ocnn-2.2.2/ocnn.egg-info/requires.txt +4 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/setup.cfg +9 -9
- {ocnn-2.2.0 → ocnn-2.2.2}/setup.py +35 -35
- ocnn-2.2.0/ocnn.egg-info/requires.txt +0 -2
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn.egg-info/dependency_links.txt +0 -0
- {ocnn-2.2.0 → ocnn-2.2.2}/ocnn.egg-info/top_level.txt +0 -0
|
@@ -1,21 +1,21 @@
|
|
|
1
|
-
MIT License
|
|
2
|
-
|
|
3
|
-
Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
|
|
4
|
-
|
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
-
in the Software without restriction, including without limitation the rights
|
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
-
furnished to do so, subject to the following conditions:
|
|
11
|
-
|
|
12
|
-
The above copyright notice and this permission notice shall be included in
|
|
13
|
-
all copies or substantial portions of the Software.
|
|
14
|
-
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
-
THE SOFTWARE.
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
|
13
|
+
all copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
+
THE SOFTWARE.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
recursive-exclude test *
|
|
1
|
+
recursive-exclude test *
|
|
@@ -1,63 +1,67 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: ocnn
|
|
3
|
-
Version: 2.2.
|
|
4
|
-
Summary: Octree-based Sparse Convolutional Neural Networks
|
|
5
|
-
Home-page: https://github.com/octree-nn/ocnn-pytorch
|
|
6
|
-
Author: Peng-Shuai Wang
|
|
7
|
-
Author-email: wangps@hotmail.com
|
|
8
|
-
License: MIT
|
|
9
|
-
Classifier: Programming Language :: Python :: 3
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Operating System :: OS Independent
|
|
12
|
-
Requires-Python: >=3.6
|
|
13
|
-
Description-Content-Type: text/markdown
|
|
14
|
-
License-File: LICENSE
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
[
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
[
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
convolution frameworks
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: ocnn
|
|
3
|
+
Version: 2.2.2
|
|
4
|
+
Summary: Octree-based Sparse Convolutional Neural Networks
|
|
5
|
+
Home-page: https://github.com/octree-nn/ocnn-pytorch
|
|
6
|
+
Author: Peng-Shuai Wang
|
|
7
|
+
Author-email: wangps@hotmail.com
|
|
8
|
+
License: MIT
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Requires-Python: >=3.6
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: torch
|
|
16
|
+
Requires-Dist: torchvision
|
|
17
|
+
Requires-Dist: numpy
|
|
18
|
+
Requires-Dist: packaging
|
|
19
|
+
|
|
20
|
+
# O-CNN
|
|
21
|
+
|
|
22
|
+
**[Documentation](https://ocnn-pytorch.readthedocs.io)**
|
|
23
|
+
|
|
24
|
+
[](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
|
|
25
|
+
[](https://pepy.tech/project/ocnn)
|
|
26
|
+
[](https://pypi.org/project/ocnn/)
|
|
27
|
+
|
|
28
|
+
This repository contains the **pure PyTorch**-based implementation of
|
|
29
|
+
[O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
|
|
30
|
+
`Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
|
|
31
|
+
|
|
32
|
+
O-CNN is an octree-based sparse convolutional neural network framework for 3D
|
|
33
|
+
deep learning. O-CNN constrains the CNN storage and computation into non-empty
|
|
34
|
+
sparse voxels for efficiency and uses the `octree` data structure to organize
|
|
35
|
+
and index these sparse voxels.
|
|
36
|
+
|
|
37
|
+
The concept of sparse convolution in O-CNN is the same with
|
|
38
|
+
[H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
|
|
39
|
+
[SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
|
|
40
|
+
and
|
|
41
|
+
[MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
|
|
42
|
+
The key difference is that our O-CNN uses the `octree` to index the sparse
|
|
43
|
+
voxels, while these 3 works use the `Hash Table`.
|
|
44
|
+
|
|
45
|
+
Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
|
|
46
|
+
SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
|
|
47
|
+
CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
|
|
48
|
+
was officially accepted in March, 2017. The camera-ready version of our O-CNN was
|
|
49
|
+
submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
|
|
50
|
+
during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
|
|
51
|
+
computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
|
|
52
|
+
Currently, this type of 3D convolution is known as Sparse Convolution in the
|
|
53
|
+
research community.
|
|
54
|
+
|
|
55
|
+
## Key benefits of ocnn-pytorch
|
|
56
|
+
|
|
57
|
+
- **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
|
|
58
|
+
can be installed with a simple command:`pip install ocnn`. Other sparse
|
|
59
|
+
convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
|
|
60
|
+
configure the compiling environment.
|
|
61
|
+
|
|
62
|
+
- **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
|
|
63
|
+
convolution frameworks. It only takes 18 hours to train the network on
|
|
64
|
+
ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
|
|
65
|
+
training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
|
|
66
|
+
takes 30 hours.
|
|
67
|
+
|
|
@@ -1,48 +1,48 @@
|
|
|
1
|
-
# O-CNN
|
|
2
|
-
|
|
3
|
-
**[Documentation](https://ocnn-pytorch.readthedocs.io)**
|
|
4
|
-
|
|
5
|
-
[](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
|
|
6
|
-
[](https://pypi.org/project/ocnn/)
|
|
8
|
-
|
|
9
|
-
This repository contains the **pure PyTorch**-based implementation of
|
|
10
|
-
[O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
|
|
11
|
-
`Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
|
|
12
|
-
|
|
13
|
-
O-CNN is an octree-based sparse convolutional neural network framework for 3D
|
|
14
|
-
deep learning. O-CNN constrains the CNN storage and computation into non-empty
|
|
15
|
-
sparse voxels for efficiency and uses the `octree` data structure to organize
|
|
16
|
-
and index these sparse voxels.
|
|
17
|
-
|
|
18
|
-
The concept of sparse convolution in O-CNN is the same with
|
|
19
|
-
[H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
|
|
20
|
-
[SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
|
|
21
|
-
and
|
|
22
|
-
[MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
|
|
23
|
-
The key difference is that our O-CNN uses the `octree` to index the sparse
|
|
24
|
-
voxels, while these 3 works use the `Hash Table`.
|
|
25
|
-
|
|
26
|
-
Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
|
|
27
|
-
SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
|
|
28
|
-
CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
|
|
29
|
-
was officially accepted in March, 2017. The camera-ready version of our O-CNN was
|
|
30
|
-
submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
|
|
31
|
-
during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
|
|
32
|
-
computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
|
|
33
|
-
Currently, this type of 3D convolution is known as Sparse Convolution in the
|
|
34
|
-
research community.
|
|
35
|
-
|
|
36
|
-
## Key benefits of ocnn-pytorch
|
|
37
|
-
|
|
38
|
-
- **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
|
|
39
|
-
can be installed with a simple command:`pip install ocnn`. Other sparse
|
|
40
|
-
convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
|
|
41
|
-
configure the compiling environment.
|
|
42
|
-
|
|
43
|
-
- **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
|
|
44
|
-
convolution frameworks. It only takes 18 hours to train the network on
|
|
45
|
-
ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
|
|
46
|
-
training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
|
|
47
|
-
takes 30 hours.
|
|
48
|
-
|
|
1
|
+
# O-CNN
|
|
2
|
+
|
|
3
|
+
**[Documentation](https://ocnn-pytorch.readthedocs.io)**
|
|
4
|
+
|
|
5
|
+
[](https://ocnn-pytorch.readthedocs.io/en/latest/?badge=latest)
|
|
6
|
+
[](https://pepy.tech/project/ocnn)
|
|
7
|
+
[](https://pypi.org/project/ocnn/)
|
|
8
|
+
|
|
9
|
+
This repository contains the **pure PyTorch**-based implementation of
|
|
10
|
+
[O-CNN](https://wang-ps.github.io/O-CNN.html). The code has been tested with
|
|
11
|
+
`Pytorch>=1.6.0`, and `Pytorch>=1.9.0` is preferred.
|
|
12
|
+
|
|
13
|
+
O-CNN is an octree-based sparse convolutional neural network framework for 3D
|
|
14
|
+
deep learning. O-CNN constrains the CNN storage and computation into non-empty
|
|
15
|
+
sparse voxels for efficiency and uses the `octree` data structure to organize
|
|
16
|
+
and index these sparse voxels.
|
|
17
|
+
|
|
18
|
+
The concept of sparse convolution in O-CNN is the same with
|
|
19
|
+
[H-CNN](https://ieeexplore.ieee.org/abstract/document/8580422),
|
|
20
|
+
[SparseConvNet](https://openaccess.thecvf.com/content_cvpr_2018/papers/Graham_3D_Semantic_Segmentation_CVPR_2018_paper.pdf),
|
|
21
|
+
and
|
|
22
|
+
[MinkowskiNet](https://openaccess.thecvf.com/content_CVPR_2019/papers/Choy_4D_Spatio-Temporal_ConvNets_Minkowski_Convolutional_Neural_Networks_CVPR_2019_paper.pdf).
|
|
23
|
+
The key difference is that our O-CNN uses the `octree` to index the sparse
|
|
24
|
+
voxels, while these 3 works use the `Hash Table`.
|
|
25
|
+
|
|
26
|
+
Our O-CNN is published in SIGGRAPH 2017, H-CNN is published in TVCG 2018,
|
|
27
|
+
SparseConvNet is published in CVPR 2018, and MinkowskiNet is published in
|
|
28
|
+
CVPR 2019. Actually, our O-CNN was submitted to SIGGRAPH in the end of 2016 and
|
|
29
|
+
was officially accepted in March, 2017. The camera-ready version of our O-CNN was
|
|
30
|
+
submitted to SIGGRAPH in April, 2017. We just did not post our paper on Arxiv
|
|
31
|
+
during the review process of SIGGRAPH. Therefore, **the idea of constraining CNN
|
|
32
|
+
computation into sparse non-emtpry voxels is first proposed by our O-CNN**.
|
|
33
|
+
Currently, this type of 3D convolution is known as Sparse Convolution in the
|
|
34
|
+
research community.
|
|
35
|
+
|
|
36
|
+
## Key benefits of ocnn-pytorch
|
|
37
|
+
|
|
38
|
+
- **Simplicity**. The ocnn-pytorch is based on pure PyTorch, it is portable and
|
|
39
|
+
can be installed with a simple command:`pip install ocnn`. Other sparse
|
|
40
|
+
convolution frameworks heavily rely on C++ and CUDA, and it is complicated to
|
|
41
|
+
configure the compiling environment.
|
|
42
|
+
|
|
43
|
+
- **Efficiency**. The ocnn-pytorch is very efficient compared with other sparse
|
|
44
|
+
convolution frameworks. It only takes 18 hours to train the network on
|
|
45
|
+
ScanNet for 600 epochs with 4 V100 GPUs. For reference, under the same
|
|
46
|
+
training settings, MinkowskiNet 0.4.3 takes 60 hours and MinkowskiNet 0.5.4
|
|
47
|
+
takes 30 hours.
|
|
48
|
+
|
|
@@ -1,24 +1,24 @@
|
|
|
1
|
-
# --------------------------------------------------------
|
|
2
|
-
# Octree-based Sparse Convolutional Neural Networks
|
|
3
|
-
# Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
|
|
4
|
-
# Licensed under The MIT License [see LICENSE for details]
|
|
5
|
-
# Written by Peng-Shuai Wang
|
|
6
|
-
# --------------------------------------------------------
|
|
7
|
-
|
|
8
|
-
from . import octree
|
|
9
|
-
from . import nn
|
|
10
|
-
from . import modules
|
|
11
|
-
from . import models
|
|
12
|
-
from . import dataset
|
|
13
|
-
from . import utils
|
|
14
|
-
|
|
15
|
-
__version__ = '2.2.
|
|
16
|
-
|
|
17
|
-
__all__ = [
|
|
18
|
-
'octree',
|
|
19
|
-
'nn',
|
|
20
|
-
'modules',
|
|
21
|
-
'models',
|
|
22
|
-
'dataset',
|
|
23
|
-
'utils'
|
|
24
|
-
]
|
|
1
|
+
# --------------------------------------------------------
|
|
2
|
+
# Octree-based Sparse Convolutional Neural Networks
|
|
3
|
+
# Copyright (c) 2022 Peng-Shuai Wang <wangps@hotmail.com>
|
|
4
|
+
# Licensed under The MIT License [see LICENSE for details]
|
|
5
|
+
# Written by Peng-Shuai Wang
|
|
6
|
+
# --------------------------------------------------------
|
|
7
|
+
|
|
8
|
+
from . import octree
|
|
9
|
+
from . import nn
|
|
10
|
+
from . import modules
|
|
11
|
+
from . import models
|
|
12
|
+
from . import dataset
|
|
13
|
+
from . import utils
|
|
14
|
+
|
|
15
|
+
__version__ = '2.2.2'
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
'octree',
|
|
19
|
+
'nn',
|
|
20
|
+
'modules',
|
|
21
|
+
'models',
|
|
22
|
+
'dataset',
|
|
23
|
+
'utils'
|
|
24
|
+
]
|