reduced-3dgs 1.8.19__cp311-cp311-win_amd64.whl → 1.9.1__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of reduced-3dgs might be problematic. Click here for more details.
- reduced_3dgs/diff_gaussian_rasterization/_C.cp311-win_amd64.pyd +0 -0
- reduced_3dgs/importance/combinations.py +19 -3
- reduced_3dgs/importance/diff_gaussian_rasterization/_C.cp311-win_amd64.pyd +0 -0
- reduced_3dgs/importance/trainer.py +117 -6
- reduced_3dgs/simple_knn/_C.cp311-win_amd64.pyd +0 -0
- {reduced_3dgs-1.8.19.dist-info → reduced_3dgs-1.9.1.dist-info}/METADATA +1 -1
- {reduced_3dgs-1.8.19.dist-info → reduced_3dgs-1.9.1.dist-info}/RECORD +10 -10
- {reduced_3dgs-1.8.19.dist-info → reduced_3dgs-1.9.1.dist-info}/WHEEL +0 -0
- {reduced_3dgs-1.8.19.dist-info → reduced_3dgs-1.9.1.dist-info}/licenses/LICENSE.md +0 -0
- {reduced_3dgs-1.8.19.dist-info → reduced_3dgs-1.9.1.dist-info}/top_level.txt +0 -0
|
Binary file
|
|
@@ -10,9 +10,17 @@ def BaseImportancePrunerInDensifyTrainer(
|
|
|
10
10
|
scene_extent: float,
|
|
11
11
|
dataset: List[Camera],
|
|
12
12
|
*args,
|
|
13
|
-
importance_prune_from_iter=
|
|
14
|
-
importance_prune_until_iter=
|
|
15
|
-
importance_prune_interval=
|
|
13
|
+
importance_prune_from_iter=15000,
|
|
14
|
+
importance_prune_until_iter=20000,
|
|
15
|
+
importance_prune_interval: int = 1000,
|
|
16
|
+
importance_prune_type="comprehensive",
|
|
17
|
+
importance_prune_percent=0.1,
|
|
18
|
+
importance_prune_thr_important_score=None,
|
|
19
|
+
importance_prune_thr_v_important_score=1.0,
|
|
20
|
+
importance_prune_thr_max_v_important_score=None,
|
|
21
|
+
importance_prune_thr_count=1,
|
|
22
|
+
importance_prune_thr_T_alpha=0.01,
|
|
23
|
+
importance_v_pow=0.1,
|
|
16
24
|
**kwargs):
|
|
17
25
|
return DensificationTrainerWrapper(
|
|
18
26
|
lambda model, scene_extent: ImportancePruner(
|
|
@@ -21,6 +29,14 @@ def BaseImportancePrunerInDensifyTrainer(
|
|
|
21
29
|
importance_prune_from_iter=importance_prune_from_iter,
|
|
22
30
|
importance_prune_until_iter=importance_prune_until_iter,
|
|
23
31
|
importance_prune_interval=importance_prune_interval,
|
|
32
|
+
importance_prune_type=importance_prune_type,
|
|
33
|
+
importance_prune_percent=importance_prune_percent,
|
|
34
|
+
importance_prune_thr_important_score=importance_prune_thr_important_score,
|
|
35
|
+
importance_prune_thr_v_important_score=importance_prune_thr_v_important_score,
|
|
36
|
+
importance_prune_thr_max_v_important_score=importance_prune_thr_max_v_important_score,
|
|
37
|
+
importance_prune_thr_count=importance_prune_thr_count,
|
|
38
|
+
importance_prune_thr_T_alpha=importance_prune_thr_T_alpha,
|
|
39
|
+
importance_v_pow=importance_v_pow,
|
|
24
40
|
),
|
|
25
41
|
model,
|
|
26
42
|
scene_extent,
|
|
Binary file
|
|
@@ -75,7 +75,7 @@ def count_render(self: GaussianModel, viewpoint_camera: Camera):
|
|
|
75
75
|
}
|
|
76
76
|
|
|
77
77
|
|
|
78
|
-
def
|
|
78
|
+
def prune_list(model: GaussianModel, dataset: CameraDataset):
|
|
79
79
|
gaussian_count = torch.zeros(model.get_xyz.shape[0], device=model.get_xyz.device, dtype=torch.int)
|
|
80
80
|
opacity_important_score = torch.zeros(model.get_xyz.shape[0], device=model.get_xyz.device, dtype=torch.float)
|
|
81
81
|
T_alpha_important_score = torch.zeros(model.get_xyz.shape[0], device=model.get_xyz.device, dtype=torch.float)
|
|
@@ -84,7 +84,80 @@ def prune_gaussians(model: GaussianModel, dataset: CameraDataset):
|
|
|
84
84
|
gaussian_count += out["gaussians_count"]
|
|
85
85
|
opacity_important_score += out["opacity_important_score"]
|
|
86
86
|
T_alpha_important_score += out["T_alpha_important_score"]
|
|
87
|
-
return
|
|
87
|
+
return gaussian_count, opacity_important_score, T_alpha_important_score
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# return importance score with adaptive volume measure described in paper
|
|
91
|
+
def calculate_v_imp_score(gaussians: GaussianModel, imp_list, v_pow):
|
|
92
|
+
"""
|
|
93
|
+
:param gaussians: A data structure containing Gaussian components with a get_scaling method.
|
|
94
|
+
:param imp_list: The importance scores for each Gaussian component.
|
|
95
|
+
:param v_pow: The power to which the volume ratios are raised.
|
|
96
|
+
:return: A list of adjusted values (v_list) used for pruning.
|
|
97
|
+
"""
|
|
98
|
+
# Calculate the volume of each Gaussian component
|
|
99
|
+
volume = torch.prod(gaussians.get_scaling, dim=1)
|
|
100
|
+
# Determine the kth_percent_largest value
|
|
101
|
+
index = int(len(volume) * 0.9)
|
|
102
|
+
sorted_volume, _ = torch.sort(volume, descending=True)
|
|
103
|
+
kth_percent_largest = sorted_volume[index]
|
|
104
|
+
# Calculate v_list
|
|
105
|
+
v_list = torch.pow(volume / kth_percent_largest, v_pow)
|
|
106
|
+
v_list = v_list * imp_list
|
|
107
|
+
return v_list
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def score2mask(percent, import_score: list, threshold=None):
|
|
111
|
+
sorted_tensor, _ = torch.sort(import_score, dim=0)
|
|
112
|
+
index_nth_percentile = int(percent * (sorted_tensor.shape[0] - 1))
|
|
113
|
+
value_nth_percentile = sorted_tensor[index_nth_percentile]
|
|
114
|
+
thr = min(threshold, value_nth_percentile) if threshold is not None else value_nth_percentile
|
|
115
|
+
prune_mask = (import_score <= thr)
|
|
116
|
+
return prune_mask
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def prune_gaussians(
|
|
120
|
+
gaussians: GaussianModel, dataset: CameraDataset,
|
|
121
|
+
prune_type="comprehensive",
|
|
122
|
+
prune_percent=0.1,
|
|
123
|
+
prune_thr_important_score=None,
|
|
124
|
+
prune_thr_v_important_score=1.0,
|
|
125
|
+
prune_thr_max_v_important_score=None,
|
|
126
|
+
prune_thr_count=1,
|
|
127
|
+
prune_thr_T_alpha=0.01,
|
|
128
|
+
v_pow=0.1):
|
|
129
|
+
gaussian_list, opacity_imp_list, T_alpha_imp_list = prune_list(gaussians, dataset)
|
|
130
|
+
match prune_type:
|
|
131
|
+
case "important_score":
|
|
132
|
+
mask = score2mask(prune_percent, opacity_imp_list, prune_thr_important_score)
|
|
133
|
+
case "v_important_score":
|
|
134
|
+
v_list = calculate_v_imp_score(gaussians, opacity_imp_list, v_pow)
|
|
135
|
+
mask = score2mask(prune_percent, v_list, prune_thr_v_important_score)
|
|
136
|
+
case "max_v_important_score":
|
|
137
|
+
v_list = opacity_imp_list * torch.max(gaussians.get_scaling, dim=1)[0]
|
|
138
|
+
mask = score2mask(prune_percent, v_list, prune_thr_max_v_important_score)
|
|
139
|
+
case "count":
|
|
140
|
+
mask = score2mask(prune_percent, gaussian_list, prune_thr_count)
|
|
141
|
+
case "T_alpha":
|
|
142
|
+
# new importance score defined by doji
|
|
143
|
+
mask = score2mask(prune_percent, T_alpha_imp_list, prune_thr_T_alpha)
|
|
144
|
+
case "comprehensive":
|
|
145
|
+
mask = torch.zeros_like(gaussian_list, dtype=torch.bool)
|
|
146
|
+
if prune_thr_important_score is not None:
|
|
147
|
+
mask |= score2mask(prune_percent, opacity_imp_list, prune_thr_important_score)
|
|
148
|
+
if prune_thr_v_important_score is not None:
|
|
149
|
+
v_list = calculate_v_imp_score(gaussians, opacity_imp_list, v_pow)
|
|
150
|
+
mask |= score2mask(prune_percent, v_list, prune_thr_v_important_score)
|
|
151
|
+
if prune_thr_max_v_important_score is not None:
|
|
152
|
+
v_list = opacity_imp_list * torch.max(gaussians.get_scaling, dim=1)[0]
|
|
153
|
+
mask |= score2mask(prune_percent, v_list, prune_thr_max_v_important_score)
|
|
154
|
+
if prune_thr_count is not None:
|
|
155
|
+
mask |= score2mask(prune_percent, gaussian_list, prune_thr_count)
|
|
156
|
+
if prune_thr_T_alpha is not None:
|
|
157
|
+
mask |= score2mask(prune_percent, T_alpha_imp_list, prune_thr_T_alpha)
|
|
158
|
+
case _:
|
|
159
|
+
raise Exception("Unsupportive prunning method")
|
|
160
|
+
return mask
|
|
88
161
|
|
|
89
162
|
|
|
90
163
|
class ImportancePruner(DensifierWrapper):
|
|
@@ -94,17 +167,39 @@ class ImportancePruner(DensifierWrapper):
|
|
|
94
167
|
importance_prune_from_iter=15000,
|
|
95
168
|
importance_prune_until_iter=20000,
|
|
96
169
|
importance_prune_interval: int = 1000,
|
|
170
|
+
importance_prune_type="comprehensive",
|
|
171
|
+
importance_prune_percent=0.1,
|
|
172
|
+
importance_prune_thr_important_score=None,
|
|
173
|
+
importance_prune_thr_v_important_score=1.0,
|
|
174
|
+
importance_prune_thr_max_v_important_score=None,
|
|
175
|
+
importance_prune_thr_count=1,
|
|
176
|
+
importance_prune_thr_T_alpha=0.01,
|
|
177
|
+
importance_v_pow=0.1
|
|
97
178
|
):
|
|
98
179
|
super().__init__(base_densifier)
|
|
99
180
|
self.dataset = dataset
|
|
100
181
|
self.importance_prune_from_iter = importance_prune_from_iter
|
|
101
182
|
self.importance_prune_until_iter = importance_prune_until_iter
|
|
102
183
|
self.importance_prune_interval = importance_prune_interval
|
|
184
|
+
self.prune_percent = importance_prune_percent
|
|
185
|
+
self.prune_thr_important_score = importance_prune_thr_important_score
|
|
186
|
+
self.prune_thr_v_important_score = importance_prune_thr_v_important_score
|
|
187
|
+
self.prune_thr_max_v_important_score = importance_prune_thr_max_v_important_score
|
|
188
|
+
self.prune_thr_count = importance_prune_thr_count
|
|
189
|
+
self.prune_thr_T_alpha = importance_prune_thr_T_alpha
|
|
190
|
+
self.v_pow = importance_v_pow
|
|
191
|
+
self.prune_type = importance_prune_type
|
|
103
192
|
|
|
104
193
|
def densify_and_prune(self, loss, out, camera, step: int):
|
|
105
194
|
ret = super().densify_and_prune(loss, out, camera, step)
|
|
106
195
|
if self.importance_prune_from_iter <= step <= self.importance_prune_until_iter and step % self.importance_prune_interval == 0:
|
|
107
|
-
remove_mask = prune_gaussians(
|
|
196
|
+
remove_mask = prune_gaussians(
|
|
197
|
+
self.model, self.dataset,
|
|
198
|
+
self.prune_type, self.prune_percent,
|
|
199
|
+
self.prune_thr_important_score, self.prune_thr_v_important_score,
|
|
200
|
+
self.prune_thr_max_v_important_score, self.prune_thr_count,
|
|
201
|
+
self.prune_thr_T_alpha, self.v_pow,
|
|
202
|
+
)
|
|
108
203
|
ret = ret._replace(remove_mask=remove_mask if ret.remove_mask is None else torch.logical_or(ret.remove_mask, remove_mask))
|
|
109
204
|
return ret
|
|
110
205
|
|
|
@@ -114,9 +209,17 @@ def BaseImportancePruningTrainer(
|
|
|
114
209
|
scene_extent: float,
|
|
115
210
|
dataset: List[Camera],
|
|
116
211
|
*args,
|
|
117
|
-
importance_prune_from_iter=
|
|
118
|
-
importance_prune_until_iter=
|
|
119
|
-
importance_prune_interval: int =
|
|
212
|
+
importance_prune_from_iter=15000,
|
|
213
|
+
importance_prune_until_iter=20000,
|
|
214
|
+
importance_prune_interval: int = 1000,
|
|
215
|
+
importance_prune_type="comprehensive",
|
|
216
|
+
importance_prune_percent=0.1,
|
|
217
|
+
importance_prune_thr_important_score=None,
|
|
218
|
+
importance_prune_thr_v_important_score=1.0,
|
|
219
|
+
importance_prune_thr_max_v_important_score=None,
|
|
220
|
+
importance_prune_thr_count=1,
|
|
221
|
+
importance_prune_thr_T_alpha=0.01,
|
|
222
|
+
importance_v_pow=0.1,
|
|
120
223
|
**kwargs):
|
|
121
224
|
return DensificationTrainer(
|
|
122
225
|
model, scene_extent,
|
|
@@ -126,5 +229,13 @@ def BaseImportancePruningTrainer(
|
|
|
126
229
|
importance_prune_from_iter=importance_prune_from_iter,
|
|
127
230
|
importance_prune_until_iter=importance_prune_until_iter,
|
|
128
231
|
importance_prune_interval=importance_prune_interval,
|
|
232
|
+
importance_prune_type=importance_prune_type,
|
|
233
|
+
importance_prune_percent=importance_prune_percent,
|
|
234
|
+
importance_prune_thr_important_score=importance_prune_thr_important_score,
|
|
235
|
+
importance_prune_thr_v_important_score=importance_prune_thr_v_important_score,
|
|
236
|
+
importance_prune_thr_max_v_important_score=importance_prune_thr_max_v_important_score,
|
|
237
|
+
importance_prune_thr_count=importance_prune_thr_count,
|
|
238
|
+
importance_prune_thr_T_alpha=importance_prune_thr_T_alpha,
|
|
239
|
+
importance_v_pow=importance_v_pow,
|
|
129
240
|
), *args, **kwargs
|
|
130
241
|
)
|
|
Binary file
|
|
@@ -2,12 +2,12 @@ reduced_3dgs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
2
2
|
reduced_3dgs/combinations.py,sha256=FrZLxd3AZlHPSq_WJeEdGWH5zh40rAuV5txxr8HsSPY,7031
|
|
3
3
|
reduced_3dgs/quantize.py,sha256=Y44qHyFdOIqke7NoeqXmyKloS43j-al74ZiNsuZZHbM,2527
|
|
4
4
|
reduced_3dgs/train.py,sha256=jXHdXk05o_ebHjx_VBzcY6fRNn9EdKve6Tf5YC5an0o,9803
|
|
5
|
-
reduced_3dgs/diff_gaussian_rasterization/_C.cp311-win_amd64.pyd,sha256=
|
|
5
|
+
reduced_3dgs/diff_gaussian_rasterization/_C.cp311-win_amd64.pyd,sha256=RBnGVfTJh1P_C6CDui3eMP84PgFKjRxZZ-YtUE0JMik,1632256
|
|
6
6
|
reduced_3dgs/diff_gaussian_rasterization/__init__.py,sha256=oV6JjTc-50MscX4XHeIWSgLr3l8Y25knBIs-0gRbJr4,7932
|
|
7
7
|
reduced_3dgs/importance/__init__.py,sha256=neJsbY5cLikEGBQGdR4MjwCQ5VWVikT1357DwL0EtWU,289
|
|
8
|
-
reduced_3dgs/importance/combinations.py,sha256=
|
|
9
|
-
reduced_3dgs/importance/trainer.py,sha256=
|
|
10
|
-
reduced_3dgs/importance/diff_gaussian_rasterization/_C.cp311-win_amd64.pyd,sha256=
|
|
8
|
+
reduced_3dgs/importance/combinations.py,sha256=Q2WqwXNuclPWBsw15aR14xQ72JewVeZo9igMeaSfuf8,2693
|
|
9
|
+
reduced_3dgs/importance/trainer.py,sha256=ozzjPSXIpHNP4H1mkXlyAv5KJXWt1k4NKCikx9E9S1E,11151
|
|
10
|
+
reduced_3dgs/importance/diff_gaussian_rasterization/_C.cp311-win_amd64.pyd,sha256=1bWY2QSSugSLztOSu9NZjZL5wxRDdHHGeJ7_zBos8qU,1315840
|
|
11
11
|
reduced_3dgs/importance/diff_gaussian_rasterization/__init__.py,sha256=Tix8auyXBb_QFQtXrV3sLE9kdnl5zgHH0BbqcFzDp84,12850
|
|
12
12
|
reduced_3dgs/pruning/__init__.py,sha256=E_YxJ9cDV_B6EJbYUBEcuRYMIht_C72rI1VJUXFCLpM,201
|
|
13
13
|
reduced_3dgs/pruning/combinations.py,sha256=UivTfbSMmaWYVi9E4OF-_AZA-WBWniMiX-wKUftezF8,2331
|
|
@@ -20,9 +20,9 @@ reduced_3dgs/quantization/wrapper.py,sha256=cyXqfJgo9b3fS7DYXxOk5LmQudvrEhweOebF
|
|
|
20
20
|
reduced_3dgs/shculling/__init__.py,sha256=nP2BejDCUdCmJNRbg0hfhHREO6jyZXwIcRiw6ttVgqo,149
|
|
21
21
|
reduced_3dgs/shculling/gaussian_model.py,sha256=f8QWaL09vaV9Tcf6Dngjg_Fmk1wTQPAjWhuhI_N02Y8,2877
|
|
22
22
|
reduced_3dgs/shculling/trainer.py,sha256=9hwR77djhZpyf-URhwKHjnLbe0ZAOS-DIw58RzkcHXQ,6369
|
|
23
|
-
reduced_3dgs/simple_knn/_C.cp311-win_amd64.pyd,sha256=
|
|
24
|
-
reduced_3dgs-1.
|
|
25
|
-
reduced_3dgs-1.
|
|
26
|
-
reduced_3dgs-1.
|
|
27
|
-
reduced_3dgs-1.
|
|
28
|
-
reduced_3dgs-1.
|
|
23
|
+
reduced_3dgs/simple_knn/_C.cp311-win_amd64.pyd,sha256=d56351BP_34vH4GnQmrZqXxDk9lpa6sw8ocuQYhyH0o,1263616
|
|
24
|
+
reduced_3dgs-1.9.1.dist-info/licenses/LICENSE.md,sha256=LQ4_LAqlncGkg_mQy5ykMAFtQDSPB0eKmIEtBut0yjw,4916
|
|
25
|
+
reduced_3dgs-1.9.1.dist-info/METADATA,sha256=TIrUop8Y3QjsjnVAxNEGADwBMtVArYSbYhhzMYLWMYc,13014
|
|
26
|
+
reduced_3dgs-1.9.1.dist-info/WHEEL,sha256=ZhKniLpVlRfVU2a9ty7N0kTvsYDNee6K-jhM1zk8cMw,101
|
|
27
|
+
reduced_3dgs-1.9.1.dist-info/top_level.txt,sha256=PpU5aT3-baSCdqCtTaZknoB32H93UeKCkYDkRCCZMEI,13
|
|
28
|
+
reduced_3dgs-1.9.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|