learning3d 0.0.7__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- learning3d/examples/test_dcp.py +4 -2
- learning3d/examples/test_deepgmr.py +4 -2
- learning3d/examples/test_masknet.py +3 -1
- learning3d/examples/test_masknet2.py +3 -1
- learning3d/examples/test_pcn.py +4 -2
- learning3d/examples/test_pcrnet.py +3 -1
- learning3d/examples/test_pnlk.py +3 -1
- learning3d/examples/test_pointconv.py +3 -1
- learning3d/examples/test_pointnet.py +3 -1
- learning3d/examples/test_prnet.py +4 -2
- learning3d/examples/test_rpmnet.py +3 -1
- learning3d/examples/train_PointNetLK.py +4 -2
- learning3d/examples/train_dcp.py +4 -2
- learning3d/examples/train_deepgmr.py +4 -2
- learning3d/examples/train_masknet.py +4 -2
- learning3d/examples/train_pcn.py +4 -2
- learning3d/examples/train_pcrnet.py +4 -2
- learning3d/examples/train_pointconv.py +4 -2
- learning3d/examples/train_pointnet.py +4 -2
- learning3d/examples/train_prnet.py +4 -2
- learning3d/examples/train_rpmnet.py +4 -2
- {learning3d-0.0.7.dist-info → learning3d-0.1.0.dist-info}/METADATA +1 -1
- {learning3d-0.0.7.dist-info → learning3d-0.1.0.dist-info}/RECORD +26 -26
- {learning3d-0.0.7.dist-info → learning3d-0.1.0.dist-info}/LICENSE +0 -0
- {learning3d-0.0.7.dist-info → learning3d-0.1.0.dist-info}/WHEEL +0 -0
- {learning3d-0.0.7.dist-info → learning3d-0.1.0.dist-info}/top_level.txt +0 -0
learning3d/examples/test_dcp.py
CHANGED
@@ -88,6 +88,8 @@ def options():
|
|
88
88
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
89
89
|
parser.add_argument('--num_points', default=1024, type=int,
|
90
90
|
metavar='N', help='points in point-cloud (default: 1024)')
|
91
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
92
|
+
help='path of the data where modelnet files are downloaded.')
|
91
93
|
|
92
94
|
# settings for PointNet
|
93
95
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -114,8 +116,8 @@ def main():
|
|
114
116
|
args = options()
|
115
117
|
torch.backends.cudnn.deterministic = True
|
116
118
|
|
117
|
-
trainset = RegistrationData('DCP', ModelNet40Data(train=True))
|
118
|
-
testset = RegistrationData('DCP', ModelNet40Data(train=False))
|
119
|
+
trainset = RegistrationData('DCP', ModelNet40Data(train=True, root_dir=args.root_dir))
|
120
|
+
testset = RegistrationData('DCP', ModelNet40Data(train=False, root_dir=args.root_dir))
|
119
121
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
120
122
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
121
123
|
|
@@ -103,6 +103,8 @@ def options():
|
|
103
103
|
metavar='K', help='No of nearest neighbors to be estimated.')
|
104
104
|
parser.add_argument('--use_rri', default=True, type=bool,
|
105
105
|
help='Find nearest neighbors to estimate features from PointNet.')
|
106
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
107
|
+
help='path of the data where modelnet files are downloaded.')
|
106
108
|
|
107
109
|
# settings for on training
|
108
110
|
parser.add_argument('-j', '--workers', default=4, type=int,
|
@@ -121,8 +123,8 @@ def main():
|
|
121
123
|
args = options()
|
122
124
|
torch.backends.cudnn.deterministic = True
|
123
125
|
|
124
|
-
trainset = RegistrationData('DeepGMR', ModelNet40Data(train=True))
|
125
|
-
testset = RegistrationData('DeepGMR', ModelNet40Data(train=False))
|
126
|
+
trainset = RegistrationData('DeepGMR', ModelNet40Data(train=True, root_dir=args.root_dir))
|
127
|
+
testset = RegistrationData('DeepGMR', ModelNet40Data(train=False, root_dir=args.root_dir))
|
126
128
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
127
129
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
128
130
|
|
@@ -117,6 +117,8 @@ def options():
|
|
117
117
|
help='Add noise in source point clouds.')
|
118
118
|
parser.add_argument('--outliers', default=False, type=bool,
|
119
119
|
help='Add outliers to template point cloud.')
|
120
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
121
|
+
help='path of the data where modelnet files are downloaded.')
|
120
122
|
|
121
123
|
# settings for on testing
|
122
124
|
parser.add_argument('-j', '--workers', default=1, type=int,
|
@@ -137,7 +139,7 @@ def main():
|
|
137
139
|
args = options()
|
138
140
|
torch.backends.cudnn.deterministic = True
|
139
141
|
|
140
|
-
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False, num_points=args.num_points),
|
142
|
+
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False, num_points=args.num_points, root_dir=args.root_dir),
|
141
143
|
partial_source=args.partial_source, noise=args.noise,
|
142
144
|
additional_params={'use_masknet': True})
|
143
145
|
test_loader = DataLoader(testset, batch_size=args.test_batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
@@ -120,6 +120,8 @@ def options():
|
|
120
120
|
help='Add noise in source point clouds.')
|
121
121
|
parser.add_argument('--outliers', default=False, type=bool,
|
122
122
|
help='Add outliers to template point cloud.')
|
123
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
124
|
+
help='path of the data where modelnet files are downloaded.')
|
123
125
|
|
124
126
|
# settings for on testing
|
125
127
|
parser.add_argument('-j', '--workers', default=1, type=int,
|
@@ -140,7 +142,7 @@ def main():
|
|
140
142
|
args = options()
|
141
143
|
torch.backends.cudnn.deterministic = True
|
142
144
|
|
143
|
-
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False, num_points=args.num_points),
|
145
|
+
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False, num_points=args.num_points, root_dir=args.root_dir),
|
144
146
|
partial_template=args.partial_template, partial_source=args.partial_source,
|
145
147
|
noise=args.noise, additional_params={'use_masknet': True, 'partial_point_cloud_method': 'planar_crop'})
|
146
148
|
test_loader = DataLoader(testset, batch_size=args.test_batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
learning3d/examples/test_pcn.py
CHANGED
@@ -70,6 +70,8 @@ def options():
|
|
70
70
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
71
71
|
parser.add_argument('--num_points', default=1024, type=int,
|
72
72
|
metavar='N', help='points in point-cloud (default: 1024)')
|
73
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
74
|
+
help='path of the data where modelnet files are downloaded.')
|
73
75
|
|
74
76
|
# settings for PCN
|
75
77
|
parser.add_argument('--emb_dims', default=1024, type=int,
|
@@ -95,8 +97,8 @@ def main():
|
|
95
97
|
args = options()
|
96
98
|
args.dataset_path = os.path.join(os.getcwd(), os.pardir, os.pardir, 'ModelNet40', 'ModelNet40')
|
97
99
|
|
98
|
-
trainset = ClassificationData(ModelNet40Data(train=True))
|
99
|
-
testset = ClassificationData(ModelNet40Data(train=False))
|
100
|
+
trainset = ClassificationData(ModelNet40Data(train=True, root_dir=args.root_dir))
|
101
|
+
testset = ClassificationData(ModelNet40Data(train=False, root_dir=args.root_dir))
|
100
102
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
101
103
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
102
104
|
|
@@ -74,6 +74,8 @@ def options():
|
|
74
74
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
75
75
|
parser.add_argument('--num_points', default=1024, type=int,
|
76
76
|
metavar='N', help='points in point-cloud (default: 1024)')
|
77
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
78
|
+
help='path of the data where modelnet files are downloaded.')
|
77
79
|
|
78
80
|
# settings for PointNet
|
79
81
|
parser.add_argument('--emb_dims', default=1024, type=int,
|
@@ -97,7 +99,7 @@ def options():
|
|
97
99
|
def main():
|
98
100
|
args = options()
|
99
101
|
|
100
|
-
testset = RegistrationData('PCRNet', ModelNet40Data(train=False))
|
102
|
+
testset = RegistrationData('PCRNet', ModelNet40Data(train=False, root_dir=args.root_dir))
|
101
103
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
102
104
|
|
103
105
|
if not torch.cuda.is_available():
|
learning3d/examples/test_pnlk.py
CHANGED
@@ -74,6 +74,8 @@ def options():
|
|
74
74
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
75
75
|
parser.add_argument('--num_points', default=1024, type=int,
|
76
76
|
metavar='N', help='points in point-cloud (default: 1024)')
|
77
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
78
|
+
help='path of the data where modelnet files are downloaded.')
|
77
79
|
|
78
80
|
# settings for PointNet
|
79
81
|
parser.add_argument('--emb_dims', default=1024, type=int,
|
@@ -98,7 +100,7 @@ def options():
|
|
98
100
|
def main():
|
99
101
|
args = options()
|
100
102
|
|
101
|
-
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False))
|
103
|
+
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False, root_dir=args.root_dir))
|
102
104
|
test_loader = DataLoader(testset, batch_size=8, shuffle=False, drop_last=False, num_workers=args.workers)
|
103
105
|
|
104
106
|
if not torch.cuda.is_available():
|
@@ -73,6 +73,8 @@ def options():
|
|
73
73
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
74
74
|
parser.add_argument('--num_points', default=1024, type=int,
|
75
75
|
metavar='N', help='points in point-cloud (default: 1024)')
|
76
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
77
|
+
help='path of the data where modelnet files are downloaded.')
|
76
78
|
|
77
79
|
# settings for PointNet
|
78
80
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -99,7 +101,7 @@ def main():
|
|
99
101
|
args = options()
|
100
102
|
args.dataset_path = os.path.join(os.getcwd(), os.pardir, os.pardir, 'ModelNet40', 'ModelNet40')
|
101
103
|
|
102
|
-
testset = ClassificationData(ModelNet40Data(train=False))
|
104
|
+
testset = ClassificationData(ModelNet40Data(train=False, root_dir=args.root_dir))
|
103
105
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
104
106
|
|
105
107
|
if not torch.cuda.is_available():
|
@@ -73,6 +73,8 @@ def options():
|
|
73
73
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
74
74
|
parser.add_argument('--num_points', default=1024, type=int,
|
75
75
|
metavar='N', help='points in point-cloud (default: 1024)')
|
76
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
77
|
+
help='path of the data where modelnet files are downloaded.')
|
76
78
|
|
77
79
|
# settings for PointNet
|
78
80
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -99,7 +101,7 @@ def main():
|
|
99
101
|
args = options()
|
100
102
|
args.dataset_path = os.path.join(os.getcwd(), os.pardir, os.pardir, 'ModelNet40', 'ModelNet40')
|
101
103
|
|
102
|
-
testset = ClassificationData(ModelNet40Data(train=False))
|
104
|
+
testset = ClassificationData(ModelNet40Data(train=False, root_dir=args.root_dir))
|
103
105
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
104
106
|
|
105
107
|
if not torch.cuda.is_available():
|
@@ -79,6 +79,8 @@ def options():
|
|
79
79
|
# settings for input data
|
80
80
|
parser.add_argument('--dataset_type', default='modelnet', choices=['modelnet', 'shapenet2'],
|
81
81
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
82
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
83
|
+
help='path of the data where modelnet files are downloaded.')
|
82
84
|
|
83
85
|
# settings for PointNet
|
84
86
|
parser.add_argument('--emb_dims', default=512, type=int,
|
@@ -102,8 +104,8 @@ def main():
|
|
102
104
|
args = options()
|
103
105
|
torch.backends.cudnn.deterministic = True
|
104
106
|
|
105
|
-
trainset = RegistrationData('PRNet', ModelNet40Data(train=True), partial_source=True, partial_template=True)
|
106
|
-
testset = RegistrationData('PRNet', ModelNet40Data(train=False), partial_source=True, partial_template=True)
|
107
|
+
trainset = RegistrationData('PRNet', ModelNet40Data(train=True, root_dir=args.root_dir), partial_source=True, partial_template=True)
|
108
|
+
testset = RegistrationData('PRNet', ModelNet40Data(train=False, root_dir=args.root_dir), partial_source=True, partial_template=True)
|
107
109
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
108
110
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
109
111
|
|
@@ -74,6 +74,8 @@ def options():
|
|
74
74
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
75
75
|
parser.add_argument('--num_points', default=1024, type=int,
|
76
76
|
metavar='N', help='points in point-cloud (default: 1024)')
|
77
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
78
|
+
help='path of the data where modelnet files are downloaded.')
|
77
79
|
|
78
80
|
# settings for PointNet
|
79
81
|
parser.add_argument('--emb_dims', default=1024, type=int,
|
@@ -98,7 +100,7 @@ def options():
|
|
98
100
|
def main():
|
99
101
|
args = options()
|
100
102
|
|
101
|
-
testset = RegistrationData('RPMNet', ModelNet40Data(train=False, num_points=args.num_points, use_normals=True), partial_source=True, partial_template=False)
|
103
|
+
testset = RegistrationData('RPMNet', ModelNet40Data(train=False, num_points=args.num_points, use_normals=True, root_dir=args.root_dir), partial_source=True, partial_template=False)
|
102
104
|
test_loader = DataLoader(testset, batch_size=1, shuffle=False, drop_last=False, num_workers=args.workers)
|
103
105
|
|
104
106
|
if not torch.cuda.is_available():
|
@@ -147,6 +147,8 @@ def options():
|
|
147
147
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
148
148
|
parser.add_argument('--num_points', default=1024, type=int,
|
149
149
|
metavar='N', help='points in point-cloud (default: 1024)')
|
150
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
151
|
+
help='path of the data where modelnet files are downloaded.')
|
150
152
|
|
151
153
|
# settings for PointNet
|
152
154
|
parser.add_argument('--fine_tune_pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -195,8 +197,8 @@ def main():
|
|
195
197
|
textio.cprint(str(args))
|
196
198
|
|
197
199
|
|
198
|
-
trainset = RegistrationData('PointNetLK', ModelNet40Data(train=True))
|
199
|
-
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False))
|
200
|
+
trainset = RegistrationData('PointNetLK', ModelNet40Data(train=True, root_dir=args.root_dir))
|
201
|
+
testset = RegistrationData('PointNetLK', ModelNet40Data(train=False, root_dir=args.root_dir))
|
200
202
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
201
203
|
test_loader = DataLoader(testset, batch_size=8, shuffle=False, drop_last=False, num_workers=args.workers)
|
202
204
|
|
learning3d/examples/train_dcp.py
CHANGED
@@ -168,6 +168,8 @@ def options():
|
|
168
168
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
169
169
|
parser.add_argument('--num_points', default=1024, type=int,
|
170
170
|
metavar='N', help='points in point-cloud (default: 1024)')
|
171
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
172
|
+
help='path of the data where modelnet files are downloaded.')
|
171
173
|
|
172
174
|
# settings for PointNet
|
173
175
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -214,8 +216,8 @@ def main():
|
|
214
216
|
textio.cprint(str(args))
|
215
217
|
|
216
218
|
|
217
|
-
trainset = RegistrationData('DCP', ModelNet40Data(train=True))
|
218
|
-
testset = RegistrationData('DCP', ModelNet40Data(train=False))
|
219
|
+
trainset = RegistrationData('DCP', ModelNet40Data(train=True, root_dir=args.root_dir))
|
220
|
+
testset = RegistrationData('DCP', ModelNet40Data(train=False, root_dir=args.root_dir))
|
219
221
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
220
222
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
221
223
|
|
@@ -165,6 +165,8 @@ def options():
|
|
165
165
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
166
166
|
parser.add_argument('--num_points', default=1024, type=int,
|
167
167
|
metavar='N', help='points in point-cloud (default: 1024)')
|
168
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
169
|
+
help='path of the data where modelnet files are downloaded.')
|
168
170
|
|
169
171
|
parser.add_argument('--nearest_neighbors', default=20, type=int,
|
170
172
|
metavar='K', help='No of nearest neighbors to be estimated.')
|
@@ -211,8 +213,8 @@ def main():
|
|
211
213
|
textio = IOStream('checkpoints/' + args.exp_name + '/run.log')
|
212
214
|
textio.cprint(str(args))
|
213
215
|
|
214
|
-
trainset = RegistrationData('DeepGMR', ModelNet40Data(train=True), additional_params={'nearest_neighbors': args.nearest_neighbors})
|
215
|
-
testset = RegistrationData('DeepGMR', ModelNet40Data(train=False), additional_params={'nearest_neighbors': args.nearest_neighbors})
|
216
|
+
trainset = RegistrationData('DeepGMR', ModelNet40Data(train=True, root_dir=args.root_dir), additional_params={'nearest_neighbors': args.nearest_neighbors})
|
217
|
+
testset = RegistrationData('DeepGMR', ModelNet40Data(train=False, root_dir=args.root_dir), additional_params={'nearest_neighbors': args.nearest_neighbors})
|
216
218
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
217
219
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
218
220
|
|
@@ -160,6 +160,8 @@ def options():
|
|
160
160
|
help='Add noise in source point clouds.')
|
161
161
|
parser.add_argument('--outliers', default=False, type=bool,
|
162
162
|
help='Add outliers to template point cloud.')
|
163
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
164
|
+
help='path of the data where modelnet files are downloaded.')
|
163
165
|
|
164
166
|
# settings for on training
|
165
167
|
parser.add_argument('--seed', type=int, default=1234)
|
@@ -202,10 +204,10 @@ def main():
|
|
202
204
|
textio = IOStream('checkpoints/' + args.exp_name + '/run.log')
|
203
205
|
textio.cprint(str(args))
|
204
206
|
|
205
|
-
trainset = RegistrationData(ModelNet40Data(train=True, num_points=args.num_points, unseen=args.unseen),
|
207
|
+
trainset = RegistrationData(ModelNet40Data(train=True, num_points=args.num_points, unseen=args.unseen, root_dir=args.root_dir),
|
206
208
|
partial_source=args.partial_source, noise=args.noise, outliers=args.outliers,
|
207
209
|
additional_params={'use_masknet': True})
|
208
|
-
testset = RegistrationData(ModelNet40Data(train=False, num_points=args.num_points, unseen=args.unseen),
|
210
|
+
testset = RegistrationData(ModelNet40Data(train=False, num_points=args.num_points, unseen=args.unseen, root_dir=args.root_dir),
|
209
211
|
partial_source=args.partial_source, noise=args.noise, outliers=args.outliers,
|
210
212
|
additional_params={'use_masknet': True})
|
211
213
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
learning3d/examples/train_pcn.py
CHANGED
@@ -138,6 +138,8 @@ def options():
|
|
138
138
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
139
139
|
parser.add_argument('--num_points', default=1024, type=int,
|
140
140
|
metavar='N', help='points in point-cloud (default: 1024)')
|
141
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
142
|
+
help='path of the data where modelnet files are downloaded.')
|
141
143
|
|
142
144
|
# settings for PCN
|
143
145
|
parser.add_argument('--emb_dims', default=1024, type=int,
|
@@ -183,8 +185,8 @@ def main():
|
|
183
185
|
textio.cprint(str(args))
|
184
186
|
|
185
187
|
|
186
|
-
trainset = ClassificationData(ModelNet40Data(train=True))
|
187
|
-
testset = ClassificationData(ModelNet40Data(train=False))
|
188
|
+
trainset = ClassificationData(ModelNet40Data(train=True, root_dir=args.root_dir))
|
189
|
+
testset = ClassificationData(ModelNet40Data(train=False, root_dir=args.root_dir))
|
188
190
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
189
191
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
190
192
|
|
@@ -147,6 +147,8 @@ def options():
|
|
147
147
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
148
148
|
parser.add_argument('--num_points', default=1024, type=int,
|
149
149
|
metavar='N', help='points in point-cloud (default: 1024)')
|
150
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
151
|
+
help='path of the data where modelnet files are downloaded.')
|
150
152
|
|
151
153
|
# settings for PointNet
|
152
154
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -193,8 +195,8 @@ def main():
|
|
193
195
|
textio.cprint(str(args))
|
194
196
|
|
195
197
|
|
196
|
-
trainset = RegistrationData('PCRNet', ModelNet40Data(train=True))
|
197
|
-
testset = RegistrationData('PCRNet', ModelNet40Data(train=False))
|
198
|
+
trainset = RegistrationData('PCRNet', ModelNet40Data(train=True, root_dir=args.root_dir))
|
199
|
+
testset = RegistrationData('PCRNet', ModelNet40Data(train=False, root_dir=args.root_dir))
|
198
200
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
199
201
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
200
202
|
|
@@ -163,6 +163,8 @@ def options():
|
|
163
163
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
164
164
|
parser.add_argument('--num_points', default=1024, type=int,
|
165
165
|
metavar='N', help='points in point-cloud (default: 1024)')
|
166
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
167
|
+
help='path of the data where modelnet files are downloaded.')
|
166
168
|
|
167
169
|
# settings for PointNet
|
168
170
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -210,8 +212,8 @@ def main():
|
|
210
212
|
textio.cprint(str(args))
|
211
213
|
|
212
214
|
|
213
|
-
trainset = ClassificationData(ModelNet40Data(train=True))
|
214
|
-
testset = ClassificationData(ModelNet40Data(train=False))
|
215
|
+
trainset = ClassificationData(ModelNet40Data(train=True, root_dir=args.root_dir))
|
216
|
+
testset = ClassificationData(ModelNet40Data(train=False, root_dir=args.root_dir))
|
215
217
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
216
218
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
217
219
|
|
@@ -163,6 +163,8 @@ def options():
|
|
163
163
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
164
164
|
parser.add_argument('--num_points', default=1024, type=int,
|
165
165
|
metavar='N', help='points in point-cloud (default: 1024)')
|
166
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
167
|
+
help='path of the data where modelnet files are downloaded.')
|
166
168
|
|
167
169
|
# settings for PointNet
|
168
170
|
parser.add_argument('--pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -210,8 +212,8 @@ def main():
|
|
210
212
|
textio.cprint(str(args))
|
211
213
|
|
212
214
|
|
213
|
-
trainset = ClassificationData(ModelNet40Data(train=True))
|
214
|
-
testset = ClassificationData(ModelNet40Data(train=False))
|
215
|
+
trainset = ClassificationData(ModelNet40Data(train=True, root_dir=args.root_dir))
|
216
|
+
testset = ClassificationData(ModelNet40Data(train=False, root_dir=args.root_dir))
|
215
217
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
216
218
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
217
219
|
|
@@ -157,6 +157,8 @@ def options():
|
|
157
157
|
metavar='K', help='dim. of the feature vector (default: 1024)')
|
158
158
|
parser.add_argument('--num_iterations', default=3, type=int,
|
159
159
|
help='Number of Iterations')
|
160
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
161
|
+
help='path of the data where modelnet files are downloaded.')
|
160
162
|
|
161
163
|
# settings for on training
|
162
164
|
parser.add_argument('--seed', type=int, default=1234)
|
@@ -195,8 +197,8 @@ def main():
|
|
195
197
|
textio.cprint(str(args))
|
196
198
|
|
197
199
|
|
198
|
-
trainset = RegistrationData('PRNet', ModelNet40Data(train=True), partial_source=True, partial_template=True)
|
199
|
-
testset = RegistrationData('PRNet', ModelNet40Data(train=False), partial_source=True, partial_template=True)
|
200
|
+
trainset = RegistrationData('PRNet', ModelNet40Data(train=True, root_dir=args.root_dir), partial_source=True, partial_template=True)
|
201
|
+
testset = RegistrationData('PRNet', ModelNet40Data(train=False, root_dir=args.root_dir), partial_source=True, partial_template=True)
|
200
202
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
201
203
|
test_loader = DataLoader(testset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=args.workers)
|
202
204
|
|
@@ -146,6 +146,8 @@ def options():
|
|
146
146
|
metavar='DATASET', help='dataset type (default: modelnet)')
|
147
147
|
parser.add_argument('--num_points', default=1024, type=int,
|
148
148
|
metavar='N', help='points in point-cloud (default: 1024)')
|
149
|
+
parser.add_argument('--root_dir', default='./', type=str,
|
150
|
+
help='path of the data where modelnet files are downloaded.')
|
149
151
|
|
150
152
|
# settings for PointNet
|
151
153
|
parser.add_argument('--fine_tune_pointnet', default='tune', type=str, choices=['fixed', 'tune'],
|
@@ -194,8 +196,8 @@ def main():
|
|
194
196
|
textio.cprint(str(args))
|
195
197
|
|
196
198
|
|
197
|
-
trainset = RegistrationData('RPMNet', ModelNet40Data(train=True, num_points=args.num_points, use_normals=True), partial_source=True, partial_template=True)
|
198
|
-
testset = RegistrationData('RPMNet', ModelNet40Data(train=False, num_points=args.num_points, use_normals=True), partial_source=True, partial_template=True)
|
199
|
+
trainset = RegistrationData('RPMNet', ModelNet40Data(train=True, num_points=args.num_points, use_normals=True, root_dir=args.root_dir), partial_source=True, partial_template=True)
|
200
|
+
testset = RegistrationData('RPMNet', ModelNet40Data(train=False, num_points=args.num_points, use_normals=True, root_dir=args.root_dir), partial_source=True, partial_template=True)
|
199
201
|
train_loader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, drop_last=True, num_workers=args.workers)
|
200
202
|
test_loader = DataLoader(testset, batch_size=8, shuffle=False, drop_last=False, num_workers=args.workers)
|
201
203
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: learning3d
|
3
|
-
Version: 0.0
|
3
|
+
Version: 0.1.0
|
4
4
|
Summary: Learning3D: A Modern Library for Deep Learning on 3D Point Clouds Data
|
5
5
|
Author-email: Vinit Sarode <vinitsarode5@gmail.com>
|
6
6
|
Project-URL: Homepage, https://github.com/vinits5/learning3d
|
@@ -2,27 +2,27 @@ learning3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
learning3d/data_utils/__init__.py,sha256=iYAVh0FThnVlG42QIgmDYrC3NGVYuzKX8s1oRqAI1YU,261
|
3
3
|
learning3d/data_utils/dataloaders.py,sha256=QlRd7c7ir19bMliZXq9PF-iNrQ_BtNTonpBCpmjYiFU,14675
|
4
4
|
learning3d/data_utils/user_data.py,sha256=ADDGeCUCr6TcXhcxvAFncIeLO71xoRHYi4H418ktvQs,4828
|
5
|
-
learning3d/examples/test_dcp.py,sha256=
|
6
|
-
learning3d/examples/test_deepgmr.py,sha256=
|
7
|
-
learning3d/examples/test_masknet.py,sha256
|
8
|
-
learning3d/examples/test_masknet2.py,sha256=
|
9
|
-
learning3d/examples/test_pcn.py,sha256=
|
10
|
-
learning3d/examples/test_pcrnet.py,sha256=
|
11
|
-
learning3d/examples/test_pnlk.py,sha256=
|
12
|
-
learning3d/examples/test_pointconv.py,sha256=
|
13
|
-
learning3d/examples/test_pointnet.py,sha256=
|
14
|
-
learning3d/examples/test_prnet.py,sha256=
|
15
|
-
learning3d/examples/test_rpmnet.py,sha256=
|
16
|
-
learning3d/examples/train_PointNetLK.py,sha256=
|
17
|
-
learning3d/examples/train_dcp.py,sha256=
|
18
|
-
learning3d/examples/train_deepgmr.py,sha256=
|
19
|
-
learning3d/examples/train_masknet.py,sha256=
|
20
|
-
learning3d/examples/train_pcn.py,sha256=
|
21
|
-
learning3d/examples/train_pcrnet.py,sha256=
|
22
|
-
learning3d/examples/train_pointconv.py,sha256=
|
23
|
-
learning3d/examples/train_pointnet.py,sha256=
|
24
|
-
learning3d/examples/train_prnet.py,sha256=
|
25
|
-
learning3d/examples/train_rpmnet.py,sha256=
|
5
|
+
learning3d/examples/test_dcp.py,sha256=EYXgCz0yh2-bjJE9s8XEUY96LfUE9_hZcdMffyrW3x0,5783
|
6
|
+
learning3d/examples/test_deepgmr.py,sha256=f9nv7Y5se9_iZ3UGasTUDnErBZh3FX0fRNY4IqeqjCk,5774
|
7
|
+
learning3d/examples/test_masknet.py,sha256=-xiGaaG9SN_u1POh51sM358QFsyx9QoYIhaNWtBNCXY,6558
|
8
|
+
learning3d/examples/test_masknet2.py,sha256=ABNDRa9R6Sp9S_H1euj2bx3LnLt6ONKRNvp5bat0jAQ,7012
|
9
|
+
learning3d/examples/test_pcn.py,sha256=rvKsqbCRmj4CLU1xWI-VzUh63t89QY6sIHi-39uhzmM,4519
|
10
|
+
learning3d/examples/test_pcrnet.py,sha256=JTq7rQN5cFxucVD-0gQ6t0K0ASuX4b_sNI7uxgUKsdg,4519
|
11
|
+
learning3d/examples/test_pnlk.py,sha256=WhZfD9s-YUg-nne_JCOSIcs0akJJV30G19QBkMVK2gc,4609
|
12
|
+
learning3d/examples/test_pointconv.py,sha256=CA-Z3KLGXtqkCWIUi1IIlpaHCyTMMd5QuNyi37D8Iz8,4826
|
13
|
+
learning3d/examples/test_pointnet.py,sha256=Cd0h-x4UcuiV-hDVMeriOYN75-uc_ZQ34mn2ikLEIbg,4477
|
14
|
+
learning3d/examples/test_prnet.py,sha256=F5Ju6BafuDocPa3qMa9ZDASaE9zP_5vgp1hgRkpIvso,5078
|
15
|
+
learning3d/examples/test_rpmnet.py,sha256=dKORbcOuil2qlUrY5VdHtaUUOGxDc8Paj7FVYmcHqEs,4629
|
16
|
+
learning3d/examples/train_PointNetLK.py,sha256=MnYJuU1ThTLRSuylqXZp6Eakeggk2Xd99BJuTyVIvxI,8950
|
17
|
+
learning3d/examples/train_dcp.py,sha256=QcyFKKCKwUH3RDUQU7-eET7Q3tIc5hLGlBBrMwv4ans,9688
|
18
|
+
learning3d/examples/train_deepgmr.py,sha256=I9GBE78u5tVrKo0zmRDjrTk0Lq-R8TRH27mEKYJU5us,9574
|
19
|
+
learning3d/examples/train_masknet.py,sha256=MRlySX4Llfp_0Eu8jwkHg3FdLlWLtlB4tVUNFbSsBbM,9066
|
20
|
+
learning3d/examples/train_pcn.py,sha256=QKJLY31Hcx8w2K4XhmY3U3zho5vFePEWr7xHo4fAUvY,7719
|
21
|
+
learning3d/examples/train_pcrnet.py,sha256=pUBYK1gtDFhBBon1UBF2yB5-2cGhILkntmplOYRBqiQ,8375
|
22
|
+
learning3d/examples/train_pointconv.py,sha256=Vn_WghOFYOOiL-U7yJQ3TB7osYTLbwEQ1WVWTvBhPVs,9116
|
23
|
+
learning3d/examples/train_pointnet.py,sha256=bpPMt0QtUoJlVgoMoPX191sO25VCk14pIKdBTCPS62I,9017
|
24
|
+
learning3d/examples/train_prnet.py,sha256=aruTiJB8WCad0cbsu5XdfhMSybdibWSGRPilbzi6xWI,8550
|
25
|
+
learning3d/examples/train_rpmnet.py,sha256=Q_kB7UnRdcIpFk_w47oNcuhIsmHLLij4dS7vz2GzTIY,8707
|
26
26
|
learning3d/losses/__init__.py,sha256=zjjZeA_NvAhZlxiYBbtgjEsvMyLFhFWXlZioitrlGWw,425
|
27
27
|
learning3d/losses/chamfer_distance.py,sha256=UTZ6x5cGwL3L5hJZOWoC35gTzcKh1S0yCg8vGuGXU1w,2121
|
28
28
|
learning3d/losses/classification.py,sha256=QgDHC5VgSga6BiuD4Ee70t6vvchlE97BY0KExevhdgk,374
|
@@ -73,8 +73,8 @@ learning3d/utils/pointconv_util.py,sha256=kJxGztai7X15YsGuorMOc50SPtj_k1yfkP4XCT
|
|
73
73
|
learning3d/utils/ppfnet_util.py,sha256=HEoxkgUBlawKZLWspfQm3caWUyAMIrW-ECtStNYbe2Y,7989
|
74
74
|
learning3d/utils/svd.py,sha256=yCYQt2SKqeIzCBnBEr_8xFR79m4fIoNVFnp77epn1dM,1936
|
75
75
|
learning3d/utils/transformer.py,sha256=UDgJvnh7ekWyijaAn-a3ckeFeMxlK_chXzWlhAGDiPM,8974
|
76
|
-
learning3d-0.0.
|
77
|
-
learning3d-0.0.
|
78
|
-
learning3d-0.0.
|
79
|
-
learning3d-0.0.
|
80
|
-
learning3d-0.0.
|
76
|
+
learning3d-0.1.0.dist-info/LICENSE,sha256=3qY3_NeQIvalbLlsHFtOfuUKjs_U2k6u7rf6YVx6ac0,1098
|
77
|
+
learning3d-0.1.0.dist-info/METADATA,sha256=kKzkNAYiiVf_t_gv-_OGGr3C1QZWQ5BCKh5sPZUbLts,15813
|
78
|
+
learning3d-0.1.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
79
|
+
learning3d-0.1.0.dist-info/top_level.txt,sha256=nTmYW8NhbNV1_15DGNpl_OvvSFtQP98sy3qrrHr0eLo,11
|
80
|
+
learning3d-0.1.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|