gsvvcompressor 1.2.0__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. gsvvcompressor/__init__.py +13 -0
  2. gsvvcompressor/__main__.py +243 -0
  3. gsvvcompressor/combinations/__init__.py +84 -0
  4. gsvvcompressor/combinations/registry.py +52 -0
  5. gsvvcompressor/combinations/vq_xyz_1mask.py +89 -0
  6. gsvvcompressor/combinations/vq_xyz_1mask_zstd.py +103 -0
  7. gsvvcompressor/combinations/vq_xyz_draco.py +468 -0
  8. gsvvcompressor/combinations/vq_xyz_draco_2pass.py +156 -0
  9. gsvvcompressor/combinations/vq_xyz_zstd.py +106 -0
  10. gsvvcompressor/compress/__init__.py +5 -0
  11. gsvvcompressor/compress/zstd.py +144 -0
  12. gsvvcompressor/decoder.py +155 -0
  13. gsvvcompressor/deserializer.py +42 -0
  14. gsvvcompressor/draco/__init__.py +34 -0
  15. gsvvcompressor/draco/draco_decoder.exe +0 -0
  16. gsvvcompressor/draco/draco_encoder.exe +0 -0
  17. gsvvcompressor/draco/dracoreduced3dgs.cp311-win_amd64.pyd +0 -0
  18. gsvvcompressor/draco/interface.py +339 -0
  19. gsvvcompressor/draco/serialize.py +235 -0
  20. gsvvcompressor/draco/twopass.py +359 -0
  21. gsvvcompressor/encoder.py +122 -0
  22. gsvvcompressor/interframe/__init__.py +11 -0
  23. gsvvcompressor/interframe/combine.py +271 -0
  24. gsvvcompressor/interframe/decoder.py +99 -0
  25. gsvvcompressor/interframe/encoder.py +92 -0
  26. gsvvcompressor/interframe/interface.py +221 -0
  27. gsvvcompressor/interframe/twopass.py +226 -0
  28. gsvvcompressor/io/__init__.py +31 -0
  29. gsvvcompressor/io/bytes.py +103 -0
  30. gsvvcompressor/io/config.py +78 -0
  31. gsvvcompressor/io/gaussian_model.py +127 -0
  32. gsvvcompressor/movecameras.py +33 -0
  33. gsvvcompressor/payload.py +34 -0
  34. gsvvcompressor/serializer.py +42 -0
  35. gsvvcompressor/vq/__init__.py +15 -0
  36. gsvvcompressor/vq/interface.py +324 -0
  37. gsvvcompressor/vq/singlemask.py +127 -0
  38. gsvvcompressor/vq/twopass.py +1 -0
  39. gsvvcompressor/xyz/__init__.py +26 -0
  40. gsvvcompressor/xyz/dense.py +39 -0
  41. gsvvcompressor/xyz/interface.py +382 -0
  42. gsvvcompressor/xyz/knn.py +141 -0
  43. gsvvcompressor/xyz/quant.py +143 -0
  44. gsvvcompressor/xyz/size.py +44 -0
  45. gsvvcompressor/xyz/twopass.py +1 -0
  46. gsvvcompressor-1.2.0.dist-info/METADATA +690 -0
  47. gsvvcompressor-1.2.0.dist-info/RECORD +50 -0
  48. gsvvcompressor-1.2.0.dist-info/WHEEL +5 -0
  49. gsvvcompressor-1.2.0.dist-info/licenses/LICENSE +21 -0
  50. gsvvcompressor-1.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,359 @@
1
+ """
2
+ Two-pass Draco-based serialization and deserialization.
3
+
4
+ This module accumulates multiple frames during serialization, then compresses
5
+ all data as a single block during flush. This enables better compression by
6
+ treating all frames as one continuous point cloud.
7
+
8
+ WARNING: cloudpickle uses pickle under the hood. Do NOT use with untrusted
9
+ data sources. Only use with trusted data (local files, same-process, etc.).
10
+ """
11
+
12
+ import struct
13
+ from dataclasses import dataclass
14
+ from typing import Iterator, List, Optional, Self
15
+
16
+ import cloudpickle as pickle
17
+ import numpy as np
18
+ import zstandard as zstd
19
+
20
+ from ..deserializer import AbstractDeserializer
21
+ from ..payload import Payload
22
+ from ..serializer import AbstractSerializer
23
+ from .interface import DracoPayload
24
+
25
+ from . import dracoreduced3dgs
26
+
27
+ # 4-byte big-endian length prefix (max 4GB per object)
28
+ _LEN = struct.Struct(">I")
29
+
30
+
31
+ @dataclass
32
+ class TwoPassDracoPayload(DracoPayload):
33
+ """
34
+ Payload containing data in Draco-compatible format with point count.
35
+
36
+ This extends DracoPayload with a num_points field to track the number
37
+ of points in each frame, enabling reconstruction after batched compression.
38
+
39
+ Attributes:
40
+ positions: Point positions, shape (N, 3), dtype float32/float64
41
+ scales: Scale indices or values, shape (N, 1), dtype int32 for VQ indices
42
+ rotations: Rotation indices or values, shape (N, 2), dtype int32 for VQ indices
43
+ opacities: Opacity indices or values, shape (N, 1), dtype int32 for VQ indices
44
+ features_dc: DC feature indices or values, shape (N, 1), dtype int32 for VQ indices
45
+ features_rest: Rest feature indices or values, shape (N, 9), dtype int32 for VQ indices
46
+ extra: Optional additional payload for codec-specific data
47
+ num_points: Number of points in this frame
48
+ """
49
+
50
+ num_points: int = 0
51
+
52
+ def to(self, device) -> Self:
53
+ """
54
+ Move the Payload to the specified device.
55
+
56
+ Since TwoPassDracoPayload uses numpy arrays (CPU-only), only the extra
57
+ payload (if present) is moved to the target device.
58
+
59
+ Args:
60
+ device: The target device (e.g., 'cpu', 'cuda', torch.device).
61
+
62
+ Returns:
63
+ A new TwoPassDracoPayload with the extra payload moved to the target device.
64
+ """
65
+ return TwoPassDracoPayload(
66
+ positions=self.positions,
67
+ scales=self.scales,
68
+ rotations=self.rotations,
69
+ opacities=self.opacities,
70
+ features_dc=self.features_dc,
71
+ features_rest=self.features_rest,
72
+ extra=self.extra.to(device) if self.extra is not None else None,
73
+ num_points=self.num_points,
74
+ )
75
+
76
+
77
+ class TwoPassDracoSerializer(AbstractSerializer):
78
+ """
79
+ Two-pass serializer that accumulates frames and compresses as a single block.
80
+
81
+ This serializer collects all frame data during serialize_frame calls,
82
+ concatenating positions, scales, rotations, etc. The actual compression
83
+ happens only during flush(), where all data is encoded as one Draco
84
+ point cloud and compressed with zstd.
85
+
86
+ Frame format after flush:
87
+ [num_frames][num_points_per_frame...][draco_len][draco_bytes][extras_len][extras_bytes]
88
+ """
89
+
90
+ def __init__(
91
+ self,
92
+ zstd_level: int = 7,
93
+ draco_level: int = 0,
94
+ qp: int = 30,
95
+ qscale: int = 30,
96
+ qrotation: int = 30,
97
+ qopacity: int = 30,
98
+ qfeaturedc: int = 30,
99
+ qfeaturerest: int = 30,
100
+ ):
101
+ """
102
+ Initialize the serializer.
103
+
104
+ Args:
105
+ zstd_level: Zstd compression level (1-22). Default is 7.
106
+ draco_level: Draco compression level (0-10). Default is 0.
107
+ qp: Quantization bits for positions (0 to disable).
108
+ qscale: Quantization bits for scales (0 to disable).
109
+ qrotation: Quantization bits for rotations (0 to disable).
110
+ qopacity: Quantization bits for opacities (0 to disable).
111
+ qfeaturedc: Quantization bits for features_dc (0 to disable).
112
+ qfeaturerest: Quantization bits for features_rest (0 to disable).
113
+ """
114
+ self._zstd_level = zstd_level
115
+ self._draco_level = draco_level
116
+ self._qp = qp
117
+ self._qscale = qscale
118
+ self._qrotation = qrotation
119
+ self._qopacity = qopacity
120
+ self._qfeaturedc = qfeaturedc
121
+ self._qfeaturerest = qfeaturerest
122
+
123
+ # Accumulators for frame data
124
+ self._positions_list: List[np.ndarray] = []
125
+ self._scales_list: List[np.ndarray] = []
126
+ self._rotations_list: List[np.ndarray] = []
127
+ self._opacities_list: List[np.ndarray] = []
128
+ self._features_dc_list: List[np.ndarray] = []
129
+ self._features_rest_list: List[np.ndarray] = []
130
+ self._num_points_list: List[int] = []
131
+ self._extras_list: List[Optional[Payload]] = []
132
+
133
+ def serialize_frame(self, payload: DracoPayload) -> Iterator[bytes]:
134
+ """
135
+ Accumulate a DracoPayload for later compression.
136
+
137
+ This method does not yield any bytes - all compression happens in flush().
138
+
139
+ Args:
140
+ payload: A DracoPayload instance to accumulate.
141
+
142
+ Yields:
143
+ Nothing (compression is deferred to flush).
144
+ """
145
+ num_points = payload.positions.shape[0]
146
+
147
+ self._positions_list.append(payload.positions)
148
+ self._scales_list.append(payload.scales)
149
+ self._rotations_list.append(payload.rotations)
150
+ self._opacities_list.append(payload.opacities)
151
+ self._features_dc_list.append(payload.features_dc)
152
+ self._features_rest_list.append(payload.features_rest)
153
+ self._num_points_list.append(num_points)
154
+ self._extras_list.append(payload.extra)
155
+
156
+ # Yield nothing - compression happens in flush
157
+ return
158
+ yield # Make this a generator
159
+
160
+ def flush(self) -> Iterator[bytes]:
161
+ """
162
+ Compress all accumulated frames as a single block.
163
+
164
+ Concatenates all frame data, encodes with Draco, pickles extras,
165
+ and compresses everything with zstd.
166
+
167
+ Yields:
168
+ Compressed byte chunks containing all frames.
169
+ """
170
+ if not self._positions_list:
171
+ return
172
+
173
+ # Concatenate all arrays
174
+ all_positions = np.concatenate(self._positions_list, axis=0)
175
+ all_scales = np.concatenate(self._scales_list, axis=0)
176
+ all_rotations = np.concatenate(self._rotations_list, axis=0)
177
+ all_opacities = np.concatenate(self._opacities_list, axis=0)
178
+ all_features_dc = np.concatenate(self._features_dc_list, axis=0)
179
+ all_features_rest = np.concatenate(self._features_rest_list, axis=0)
180
+
181
+ # Encode all point cloud data using Draco
182
+ draco_encoded = dracoreduced3dgs.encode(
183
+ all_positions,
184
+ all_scales,
185
+ all_rotations,
186
+ all_opacities,
187
+ all_features_dc,
188
+ all_features_rest,
189
+ self._draco_level,
190
+ self._qp,
191
+ self._qscale,
192
+ self._qrotation,
193
+ self._qopacity,
194
+ self._qfeaturedc,
195
+ self._qfeaturerest,
196
+ )
197
+
198
+ # Pickle extras list
199
+ extras_pickled = pickle.dumps(self._extras_list, protocol=pickle.DEFAULT_PROTOCOL)
200
+
201
+ # Build frame structure:
202
+ # [num_frames][num_points_per_frame...][draco_len][draco_bytes][extras_len][extras_bytes]
203
+ num_frames = len(self._num_points_list)
204
+
205
+ frame_header = _LEN.pack(num_frames)
206
+ for n in self._num_points_list:
207
+ frame_header += _LEN.pack(n)
208
+
209
+ framed = (
210
+ frame_header
211
+ + _LEN.pack(len(draco_encoded))
212
+ + draco_encoded
213
+ + _LEN.pack(len(extras_pickled))
214
+ + extras_pickled
215
+ )
216
+
217
+ # Compress with zstd
218
+ compressor = zstd.ZstdCompressor(level=self._zstd_level)
219
+ compressed = compressor.compress(framed)
220
+
221
+ yield compressed
222
+
223
+ # Clear accumulators
224
+ self._positions_list.clear()
225
+ self._scales_list.clear()
226
+ self._rotations_list.clear()
227
+ self._opacities_list.clear()
228
+ self._features_dc_list.clear()
229
+ self._features_rest_list.clear()
230
+ self._num_points_list.clear()
231
+ self._extras_list.clear()
232
+
233
+
234
+ class TwoPassDracoDeserializer(AbstractDeserializer):
235
+ """
236
+ Deserializer for two-pass Draco-compressed frame blocks.
237
+
238
+ This deserializer takes a single compressed block containing multiple frames
239
+ and returns them as a list of TwoPassDracoPayload objects.
240
+ """
241
+
242
+ def __init__(self):
243
+ """Initialize the deserializer."""
244
+ self._buffer = bytearray()
245
+
246
+ def deserialize_frame(self, data: bytes) -> Iterator[TwoPassDracoPayload]:
247
+ """
248
+ Decompress and deserialize a block of frames.
249
+
250
+ Args:
251
+ data: Compressed bytes containing all frames.
252
+
253
+ Yields:
254
+ TwoPassDracoPayload objects for each frame in the block.
255
+ """
256
+ # Accumulate data
257
+ self._buffer.extend(data)
258
+
259
+ # Try to decompress and parse
260
+ if not self._buffer:
261
+ return
262
+
263
+ try:
264
+ # Decompress with zstd
265
+ decompressor = zstd.ZstdDecompressor()
266
+ decompressed = decompressor.decompress(bytes(self._buffer))
267
+ except zstd.ZstdError:
268
+ # Not enough data yet, wait for more
269
+ return
270
+
271
+ # Clear buffer after successful decompression
272
+ self._buffer.clear()
273
+
274
+ # Parse frame structure
275
+ offset = 0
276
+
277
+ # Read num_frames
278
+ if len(decompressed) < offset + _LEN.size:
279
+ raise ValueError("Incomplete data: missing num_frames")
280
+ (num_frames,) = _LEN.unpack(decompressed[offset: offset + _LEN.size])
281
+ offset += _LEN.size
282
+
283
+ # Read num_points for each frame
284
+ num_points_list = []
285
+ for _ in range(num_frames):
286
+ if len(decompressed) < offset + _LEN.size:
287
+ raise ValueError("Incomplete data: missing num_points")
288
+ (n,) = _LEN.unpack(decompressed[offset: offset + _LEN.size])
289
+ num_points_list.append(n)
290
+ offset += _LEN.size
291
+
292
+ # Read draco data
293
+ if len(decompressed) < offset + _LEN.size:
294
+ raise ValueError("Incomplete data: missing draco_len")
295
+ (draco_len,) = _LEN.unpack(decompressed[offset: offset + _LEN.size])
296
+ offset += _LEN.size
297
+
298
+ if len(decompressed) < offset + draco_len:
299
+ raise ValueError("Incomplete data: missing draco_bytes")
300
+ draco_bytes = decompressed[offset: offset + draco_len]
301
+ offset += draco_len
302
+
303
+ # Read extras
304
+ if len(decompressed) < offset + _LEN.size:
305
+ raise ValueError("Incomplete data: missing extras_len")
306
+ (extras_len,) = _LEN.unpack(decompressed[offset: offset + _LEN.size])
307
+ offset += _LEN.size
308
+
309
+ if len(decompressed) < offset + extras_len:
310
+ raise ValueError("Incomplete data: missing extras_bytes")
311
+ extras_bytes = decompressed[offset: offset + extras_len]
312
+ offset += extras_len
313
+
314
+ # Decode draco data
315
+ pc = dracoreduced3dgs.decode(draco_bytes)
316
+
317
+ # Unpickle extras list
318
+ extras_list: List[Optional[Payload]] = pickle.loads(extras_bytes)
319
+
320
+ # Split arrays by num_points
321
+ all_positions = pc.positions
322
+ all_scales = pc.scales
323
+ all_rotations = pc.rotations
324
+ all_opacities = pc.opacities
325
+ all_features_dc = pc.features_dc
326
+ all_features_rest = pc.features_rest
327
+
328
+ start_idx = 0
329
+ for i, num_points in enumerate(num_points_list):
330
+ end_idx = start_idx + num_points
331
+
332
+ yield TwoPassDracoPayload(
333
+ positions=all_positions[start_idx:end_idx],
334
+ scales=all_scales[start_idx:end_idx],
335
+ rotations=all_rotations[start_idx:end_idx],
336
+ opacities=all_opacities[start_idx:end_idx],
337
+ features_dc=all_features_dc[start_idx:end_idx],
338
+ features_rest=all_features_rest[start_idx:end_idx],
339
+ extra=extras_list[i] if i < len(extras_list) else None,
340
+ num_points=num_points,
341
+ )
342
+
343
+ start_idx = end_idx
344
+
345
+ def flush(self) -> Iterator[TwoPassDracoPayload]:
346
+ """
347
+ Flush any remaining buffered data.
348
+
349
+ Yields:
350
+ Any remaining TwoPassDracoPayload objects.
351
+ """
352
+ # If there's leftover data, it's incomplete/corrupted
353
+ if self._buffer:
354
+ raise ValueError(
355
+ f"Incomplete data in buffer: {len(self._buffer)} bytes remaining"
356
+ )
357
+
358
+ return
359
+ yield # Make this a generator
@@ -0,0 +1,122 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Iterator
3
+
4
+ from gaussian_splatting import GaussianModel
5
+
6
+ from .payload import Payload
7
+ from .serializer import AbstractSerializer
8
+
9
+
10
+ class AbstractEncoder(ABC):
11
+ """
12
+ Abstract base class for compression algorithms.
13
+
14
+ This encoder uses a two-stage process:
15
+ 1. Pack frames into Payload objects (via `pack`)
16
+ 2. Serialize Payloads to bytes (via the serializer)
17
+
18
+ Subclasses must implement `pack` and `flush_pack`, and provide a serializer.
19
+ This design separates frame packing logic from serialization format.
20
+ """
21
+
22
+ def __init__(self, serializer: AbstractSerializer, payload_device=None):
23
+ """
24
+ Initialize the encoder.
25
+
26
+ Args:
27
+ serializer: The serializer to use for converting Payload to bytes.
28
+ payload_device: The target device for encoded Payloads before
29
+ serialization (e.g., 'cpu', 'cuda'). If None, no device
30
+ transfer is performed.
31
+ """
32
+ self._serializer = serializer
33
+ self._payload_device = payload_device
34
+
35
+ @abstractmethod
36
+ def pack(self, frame: GaussianModel) -> Iterator[Payload]:
37
+ """
38
+ Pack a single frame into Payload objects.
39
+
40
+ This method transforms the input frame into Payload objects that
41
+ can be serialized.
42
+
43
+ Args:
44
+ frame: A GaussianModel instance to pack.
45
+
46
+ Yields:
47
+ Packed Payload instances. May yield zero, one, or multiple
48
+ payloads. When the iterator is exhausted, all payloads for this
49
+ frame have been yielded.
50
+ """
51
+ pass
52
+
53
+ @abstractmethod
54
+ def flush_pack(self) -> Iterator[Payload]:
55
+ """
56
+ Flush any remaining buffered payloads from the packing stage.
57
+
58
+ This method should be called after all frames have been packed
59
+ to ensure any remaining buffered payloads are output.
60
+
61
+ Yields:
62
+ Remaining buffered Payload instances. May yield zero, one, or
63
+ multiple payloads until all buffered data has been flushed.
64
+ """
65
+ pass
66
+
67
+ def encode_frame(self, frame: GaussianModel) -> Iterator[bytes]:
68
+ """
69
+ Encode a single frame of GaussianModel.
70
+
71
+ This method packs the frame into Payloads and then serializes them.
72
+
73
+ Args:
74
+ frame: A GaussianModel instance to encode.
75
+
76
+ Yields:
77
+ Encoded byte chunks. May yield zero, one, or multiple chunks.
78
+ When the iterator is exhausted, all data for this frame has been
79
+ encoded.
80
+ """
81
+ for payload in self.pack(frame):
82
+ if self._payload_device is not None:
83
+ payload = payload.to(self._payload_device)
84
+ yield from self._serializer.serialize_frame(payload)
85
+
86
+ def flush(self) -> Iterator[bytes]:
87
+ """
88
+ Flush any remaining buffered data from both packing and serialization.
89
+
90
+ This method should be called after all frames have been encoded
91
+ to ensure any remaining buffered data is output.
92
+
93
+ Yields:
94
+ Remaining buffered byte chunks. May yield zero, one, or multiple
95
+ chunks until all buffered data has been flushed.
96
+ """
97
+ # Flush packing stage and serialize any remaining payloads
98
+ for payload in self.flush_pack():
99
+ if self._payload_device is not None:
100
+ payload = payload.to(self._payload_device)
101
+ yield from self._serializer.serialize_frame(payload)
102
+
103
+ # Flush serialization stage
104
+ yield from self._serializer.flush()
105
+
106
+ def encode_stream(self, stream: Iterator[GaussianModel]) -> Iterator[bytes]:
107
+ """
108
+ Encode a stream of GaussianModel frames.
109
+
110
+ This method packs each frame and serializes the payloads.
111
+ It handles the flush logic for both packing and serialization stages.
112
+
113
+ Args:
114
+ stream: An iterator that yields GaussianModel instances to encode.
115
+
116
+ Yields:
117
+ Encoded bytes for each packed frame or flush operation.
118
+ """
119
+ for frame in stream:
120
+ yield from self.encode_frame(frame)
121
+
122
+ yield from self.flush()
@@ -0,0 +1,11 @@
1
+ from .interface import InterframeEncoderInitConfig, InterframeCodecContext, InterframeCodecInterface
2
+ from .encoder import InterframeEncoder
3
+ from .decoder import InterframeDecoder
4
+
5
+ __all__ = [
6
+ 'InterframeEncoderInitConfig',
7
+ 'InterframeCodecContext',
8
+ 'InterframeCodecInterface',
9
+ 'InterframeEncoder',
10
+ 'InterframeDecoder',
11
+ ]