reduced-3dgs 1.9.1__tar.gz → 1.9.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of reduced-3dgs might be problematic. Click here for more details.

Files changed (47) hide show
  1. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/PKG-INFO +1 -1
  2. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/importance/combinations.py +4 -2
  3. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/importance/trainer.py +22 -10
  4. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs.egg-info/PKG-INFO +1 -1
  5. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/setup.py +1 -1
  6. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/LICENSE.md +0 -0
  7. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/README.md +0 -0
  8. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/__init__.py +0 -0
  9. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/combinations.py +0 -0
  10. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/importance/__init__.py +0 -0
  11. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/pruning/__init__.py +0 -0
  12. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/pruning/combinations.py +0 -0
  13. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/pruning/trainer.py +0 -0
  14. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/quantization/__init__.py +0 -0
  15. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/quantization/abc.py +0 -0
  16. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/quantization/exclude_zeros.py +0 -0
  17. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/quantization/quantizer.py +0 -0
  18. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/quantization/wrapper.py +0 -0
  19. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/quantize.py +0 -0
  20. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/shculling/__init__.py +0 -0
  21. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/shculling/gaussian_model.py +0 -0
  22. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/shculling/trainer.py +0 -0
  23. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs/train.py +0 -0
  24. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs.egg-info/SOURCES.txt +0 -0
  25. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs.egg-info/dependency_links.txt +0 -0
  26. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs.egg-info/requires.txt +0 -0
  27. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/reduced_3dgs.egg-info/top_level.txt +0 -0
  28. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/setup.cfg +0 -0
  29. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/cuda_rasterizer/backward.cu +0 -0
  30. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/cuda_rasterizer/forward.cu +0 -0
  31. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/cuda_rasterizer/rasterizer_impl.cu +0 -0
  32. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/diff_gaussian_rasterization/__init__.py +0 -0
  33. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/ext.cpp +0 -0
  34. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/rasterize_points.cu +0 -0
  35. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/reduced_3dgs/kmeans.cu +0 -0
  36. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/reduced_3dgs/redundancy_score.cu +0 -0
  37. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/reduced_3dgs/sh_culling.cu +0 -0
  38. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/diff-gaussian-rasterization/reduced_3dgs.cu +0 -0
  39. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/gaussian-importance/cuda_rasterizer/backward.cu +0 -0
  40. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/gaussian-importance/cuda_rasterizer/forward.cu +0 -0
  41. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/gaussian-importance/cuda_rasterizer/rasterizer_impl.cu +0 -0
  42. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/gaussian-importance/diff_gaussian_rasterization/__init__.py +0 -0
  43. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/gaussian-importance/ext.cpp +0 -0
  44. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/gaussian-importance/rasterize_points.cu +0 -0
  45. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/simple-knn/ext.cpp +0 -0
  46. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/simple-knn/simple_knn.cu +0 -0
  47. {reduced_3dgs-1.9.1 → reduced_3dgs-1.9.3}/submodules/simple-knn/spatial.cu +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reduced_3dgs
3
- Version: 1.9.1
3
+ Version: 1.9.3
4
4
  Summary: Refactored code for the paper "Reducing the Memory Footprint of 3D Gaussian Splatting"
5
5
  Home-page: https://github.com/yindaheng98/reduced-3dgs
6
6
  Author: yindaheng98
@@ -16,10 +16,11 @@ def BaseImportancePrunerInDensifyTrainer(
16
16
  importance_prune_type="comprehensive",
17
17
  importance_prune_percent=0.1,
18
18
  importance_prune_thr_important_score=None,
19
- importance_prune_thr_v_important_score=1.0,
19
+ importance_prune_thr_v_important_score=3.0,
20
20
  importance_prune_thr_max_v_important_score=None,
21
21
  importance_prune_thr_count=1,
22
- importance_prune_thr_T_alpha=0.01,
22
+ importance_prune_thr_T_alpha=0.1,
23
+ importance_prune_thr_T_alpha_avg=0.001,
23
24
  importance_v_pow=0.1,
24
25
  **kwargs):
25
26
  return DensificationTrainerWrapper(
@@ -36,6 +37,7 @@ def BaseImportancePrunerInDensifyTrainer(
36
37
  importance_prune_thr_max_v_important_score=importance_prune_thr_max_v_important_score,
37
38
  importance_prune_thr_count=importance_prune_thr_count,
38
39
  importance_prune_thr_T_alpha=importance_prune_thr_T_alpha,
40
+ importance_prune_thr_T_alpha_avg=importance_prune_thr_T_alpha_avg,
39
41
  importance_v_pow=importance_v_pow,
40
42
  ),
41
43
  model,
@@ -121,10 +121,11 @@ def prune_gaussians(
121
121
  prune_type="comprehensive",
122
122
  prune_percent=0.1,
123
123
  prune_thr_important_score=None,
124
- prune_thr_v_important_score=1.0,
124
+ prune_thr_v_important_score=None,
125
125
  prune_thr_max_v_important_score=None,
126
- prune_thr_count=1,
127
- prune_thr_T_alpha=0.01,
126
+ prune_thr_count=None,
127
+ prune_thr_T_alpha=None,
128
+ prune_thr_T_alpha_avg=None,
128
129
  v_pow=0.1):
129
130
  gaussian_list, opacity_imp_list, T_alpha_imp_list = prune_list(gaussians, dataset)
130
131
  match prune_type:
@@ -141,6 +142,10 @@ def prune_gaussians(
141
142
  case "T_alpha":
142
143
  # new importance score defined by doji
143
144
  mask = score2mask(prune_percent, T_alpha_imp_list, prune_thr_T_alpha)
145
+ case "T_alpha_avg":
146
+ v_list = T_alpha_imp_list / gaussian_list
147
+ v_list[gaussian_list <= 0] = 0
148
+ mask = score2mask(prune_percent, v_list, prune_thr_T_alpha_avg)
144
149
  case "comprehensive":
145
150
  mask = torch.zeros_like(gaussian_list, dtype=torch.bool)
146
151
  if prune_thr_important_score is not None:
@@ -155,6 +160,10 @@ def prune_gaussians(
155
160
  mask |= score2mask(prune_percent, gaussian_list, prune_thr_count)
156
161
  if prune_thr_T_alpha is not None:
157
162
  mask |= score2mask(prune_percent, T_alpha_imp_list, prune_thr_T_alpha)
163
+ if prune_thr_T_alpha_avg is not None:
164
+ v_list = T_alpha_imp_list / gaussian_list
165
+ v_list[gaussian_list <= 0] = 0
166
+ mask |= score2mask(prune_percent, v_list, prune_thr_T_alpha_avg)
158
167
  case _:
159
168
  raise Exception("Unsupportive prunning method")
160
169
  return mask
@@ -170,12 +179,12 @@ class ImportancePruner(DensifierWrapper):
170
179
  importance_prune_type="comprehensive",
171
180
  importance_prune_percent=0.1,
172
181
  importance_prune_thr_important_score=None,
173
- importance_prune_thr_v_important_score=1.0,
182
+ importance_prune_thr_v_important_score=3.0,
174
183
  importance_prune_thr_max_v_important_score=None,
175
184
  importance_prune_thr_count=1,
176
- importance_prune_thr_T_alpha=0.01,
177
- importance_v_pow=0.1
178
- ):
185
+ importance_prune_thr_T_alpha=1,
186
+ importance_prune_thr_T_alpha_avg=0.001,
187
+ importance_v_pow=0.1):
179
188
  super().__init__(base_densifier)
180
189
  self.dataset = dataset
181
190
  self.importance_prune_from_iter = importance_prune_from_iter
@@ -187,6 +196,7 @@ class ImportancePruner(DensifierWrapper):
187
196
  self.prune_thr_max_v_important_score = importance_prune_thr_max_v_important_score
188
197
  self.prune_thr_count = importance_prune_thr_count
189
198
  self.prune_thr_T_alpha = importance_prune_thr_T_alpha
199
+ self.prune_thr_T_alpha_avg = importance_prune_thr_T_alpha_avg
190
200
  self.v_pow = importance_v_pow
191
201
  self.prune_type = importance_prune_type
192
202
 
@@ -198,7 +208,7 @@ class ImportancePruner(DensifierWrapper):
198
208
  self.prune_type, self.prune_percent,
199
209
  self.prune_thr_important_score, self.prune_thr_v_important_score,
200
210
  self.prune_thr_max_v_important_score, self.prune_thr_count,
201
- self.prune_thr_T_alpha, self.v_pow,
211
+ self.prune_thr_T_alpha, self.prune_thr_T_alpha_avg, self.v_pow,
202
212
  )
203
213
  ret = ret._replace(remove_mask=remove_mask if ret.remove_mask is None else torch.logical_or(ret.remove_mask, remove_mask))
204
214
  return ret
@@ -215,10 +225,11 @@ def BaseImportancePruningTrainer(
215
225
  importance_prune_type="comprehensive",
216
226
  importance_prune_percent=0.1,
217
227
  importance_prune_thr_important_score=None,
218
- importance_prune_thr_v_important_score=1.0,
228
+ importance_prune_thr_v_important_score=3.0,
219
229
  importance_prune_thr_max_v_important_score=None,
220
230
  importance_prune_thr_count=1,
221
- importance_prune_thr_T_alpha=0.01,
231
+ importance_prune_thr_T_alpha=0.1,
232
+ importance_prune_thr_T_alpha_avg=0.001,
222
233
  importance_v_pow=0.1,
223
234
  **kwargs):
224
235
  return DensificationTrainer(
@@ -236,6 +247,7 @@ def BaseImportancePruningTrainer(
236
247
  importance_prune_thr_max_v_important_score=importance_prune_thr_max_v_important_score,
237
248
  importance_prune_thr_count=importance_prune_thr_count,
238
249
  importance_prune_thr_T_alpha=importance_prune_thr_T_alpha,
250
+ importance_prune_thr_T_alpha_avg=importance_prune_thr_T_alpha_avg,
239
251
  importance_v_pow=importance_v_pow,
240
252
  ), *args, **kwargs
241
253
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reduced_3dgs
3
- Version: 1.9.1
3
+ Version: 1.9.3
4
4
  Summary: Refactored code for the paper "Reducing the Memory Footprint of 3D Gaussian Splatting"
5
5
  Home-page: https://github.com/yindaheng98/reduced-3dgs
6
6
  Author: yindaheng98
@@ -60,7 +60,7 @@ if os.name == 'nt':
60
60
 
61
61
  setup(
62
62
  name="reduced_3dgs",
63
- version='1.9.1',
63
+ version='1.9.3',
64
64
  author='yindaheng98',
65
65
  author_email='yindaheng98@gmail.com',
66
66
  url='https://github.com/yindaheng98/reduced-3dgs',
File without changes
File without changes
File without changes