wildflow-splat 0.1.3__cp312-cp312-win32.whl → 0.1.4__cp312-cp312-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wildflow/__init__.py +1 -1
- wildflow/splat/__init__.py +17 -2
- wildflow/splat/_core.cp312-win32.pyd +0 -0
- wildflow/splat/patches.py +65 -0
- wildflow_splat-0.1.4.dist-info/METADATA +74 -0
- wildflow_splat-0.1.4.dist-info/RECORD +9 -0
- {wildflow_splat-0.1.3.dist-info → wildflow_splat-0.1.4.dist-info}/WHEEL +1 -1
- wildflow_splat-0.1.3.dist-info/METADATA +0 -170
- wildflow_splat-0.1.3.dist-info/RECORD +0 -8
- {wildflow_splat-0.1.3.dist-info → wildflow_splat-0.1.4.dist-info}/licenses/LICENSE +0 -0
wildflow/__init__.py
CHANGED
wildflow/splat/__init__.py
CHANGED
@@ -1,9 +1,24 @@
|
|
1
1
|
"""
|
2
2
|
wildflow.splat - creating 3D Gaussian Splatting models of coral reefs.
|
3
|
+
|
4
|
+
This module provides tools for optimal camera pose partitioning and
|
5
|
+
point cloud processing for 3D Gaussian splatting workflows.
|
3
6
|
"""
|
4
7
|
|
8
|
+
# Core partitioning functionality
|
9
|
+
from .patches import patches, BoundingBox
|
10
|
+
|
11
|
+
# PLY processing functionality
|
5
12
|
from ._core import Config, Patch
|
6
13
|
from .split import split_point_cloud
|
7
14
|
|
8
|
-
__version__ = "0.1.
|
9
|
-
|
15
|
+
__version__ = "0.1.4"
|
16
|
+
|
17
|
+
# Public API
|
18
|
+
__all__ = [
|
19
|
+
"patches",
|
20
|
+
"BoundingBox",
|
21
|
+
"Config",
|
22
|
+
"Patch",
|
23
|
+
"split_point_cloud",
|
24
|
+
]
|
Binary file
|
@@ -0,0 +1,65 @@
|
|
1
|
+
"""
|
2
|
+
Split COLMAP camera poses into patches for GPU training.
|
3
|
+
|
4
|
+
When you have a large 3D scene with many camera poses (like from COLMAP),
|
5
|
+
you need to split it into smaller patches that can fit on your GPU for
|
6
|
+
3D Gaussian Splatting training. This finds the optimal way to partition
|
7
|
+
your cameras into rectangular patches.
|
8
|
+
|
9
|
+
Basic usage:
|
10
|
+
>>> from wildflow.splat import patches
|
11
|
+
>>> # Your camera positions as (x, y) tuples
|
12
|
+
>>> cameras = [(0, 0), (1, 1), (2, 2), (10, 10), (11, 11)]
|
13
|
+
>>> # Split into patches with max 3 cameras each
|
14
|
+
>>> result = patches(cameras, max_cameras=3)
|
15
|
+
>>> print(f"Split into {len(result)} patches")
|
16
|
+
|
17
|
+
Advanced usage:
|
18
|
+
>>> # More control over the splitting
|
19
|
+
>>> result = patches(
|
20
|
+
... cameras,
|
21
|
+
... max_cameras=100, # Max cameras per patch
|
22
|
+
... buffer_meters=1.5, # Safety buffer around patches
|
23
|
+
... target_bins=50 # Granularity of splitting
|
24
|
+
... )
|
25
|
+
|
26
|
+
Loading from COLMAP:
|
27
|
+
>>> import pycolmap
|
28
|
+
>>> model = pycolmap.Reconstruction("path/to/colmap")
|
29
|
+
>>> cameras = [(img.projection_center()[0], img.projection_center()[1])
|
30
|
+
... for img in model.images.values()]
|
31
|
+
>>> result = patches(cameras)
|
32
|
+
"""
|
33
|
+
|
34
|
+
from typing import List, Tuple, Sequence
|
35
|
+
from ._core import patches as _patches, BoundingBox
|
36
|
+
|
37
|
+
# Simple data structures
|
38
|
+
Point = Tuple[float, float]
|
39
|
+
|
40
|
+
def patches(camera_positions: Sequence[Point],
|
41
|
+
max_cameras: int = 700,
|
42
|
+
buffer_meters: float = 1.5,
|
43
|
+
target_bins: int = 100) -> List[BoundingBox]:
|
44
|
+
"""
|
45
|
+
Split camera positions into optimal rectangular patches for GPU training.
|
46
|
+
|
47
|
+
Args:
|
48
|
+
camera_positions: List of (x, y) camera positions
|
49
|
+
max_cameras: Maximum cameras allowed per patch
|
50
|
+
buffer_meters: Safety buffer around each patch in meters
|
51
|
+
target_bins: Granularity of the splitting algorithm (higher = more precise)
|
52
|
+
|
53
|
+
Returns:
|
54
|
+
List of BoundingBox patches that cover all cameras optimally
|
55
|
+
|
56
|
+
Example:
|
57
|
+
>>> cameras = [(0, 0), (1, 1), (2, 2)]
|
58
|
+
>>> result = patches(cameras, max_cameras=10)
|
59
|
+
>>> print(f"Created {len(result)} patches")
|
60
|
+
"""
|
61
|
+
# Convert to list of tuples for Rust compatibility
|
62
|
+
camera_list = [(float(pos[0]), float(pos[1])) for pos in camera_positions]
|
63
|
+
|
64
|
+
# Call the high-performance Rust implementation
|
65
|
+
return _patches(camera_list, max_cameras, buffer_meters, target_bins)
|
@@ -0,0 +1,74 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: wildflow-splat
|
3
|
+
Version: 0.1.4
|
4
|
+
Classifier: Development Status :: 4 - Beta
|
5
|
+
Classifier: Intended Audience :: Developers
|
6
|
+
Classifier: Intended Audience :: Science/Research
|
7
|
+
Classifier: License :: OSI Approved :: MIT License
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
9
|
+
Classifier: Programming Language :: Python :: 3.8
|
10
|
+
Classifier: Programming Language :: Python :: 3.9
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
14
|
+
Classifier: Programming Language :: Rust
|
15
|
+
Classifier: Topic :: Scientific/Engineering
|
16
|
+
Classifier: Topic :: Multimedia :: Graphics :: 3D Modeling
|
17
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
18
|
+
Requires-Dist: maturin>=1.0 ; extra == 'dev'
|
19
|
+
Requires-Dist: pytest>=7.0 ; extra == 'dev'
|
20
|
+
Provides-Extra: dev
|
21
|
+
License-File: LICENSE
|
22
|
+
Summary: Fast PLY point cloud processing for 3D Gaussian splatting workflows
|
23
|
+
Keywords: ply,point-cloud,3d-gaussian-splatting,colmap,photogrammetry,computer-vision
|
24
|
+
Author-email: Wildflow AI <info@wildflow.ai>
|
25
|
+
License: MIT
|
26
|
+
Requires-Python: >=3.8
|
27
|
+
Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
|
28
|
+
Project-URL: repository, https://github.com/wildflowai/wildflow-splat
|
29
|
+
Project-URL: documentation, https://docs.wildflow.ai
|
30
|
+
|
31
|
+
Super hacky library to work with coral reef splats.
|
32
|
+
|
33
|
+
You can swim with a few GoPros around a reef (e.g. [wildflow.ai/protocol](https://wildflow.ai/protocol)) and then turn the footage into 3D models (e.g. [wildflow.ai/demo](https://wildflow.ai/demo)) to track changes over time, run different analysis on top of it, and ultimately see which conservation/restoration methods work best.
|
34
|
+
|
35
|
+
This is a bunch of primitives to process the data.
|
36
|
+
|
37
|
+
# Usage
|
38
|
+
Install with
|
39
|
+
```
|
40
|
+
pip install wildflow
|
41
|
+
```
|
42
|
+
So you can play with it from python:
|
43
|
+
```py
|
44
|
+
from wildflow import splat
|
45
|
+
splat.split(...)
|
46
|
+
```
|
47
|
+
# Workflow
|
48
|
+
|
49
|
+
## SfM workflow
|
50
|
+
Turns images from cameras 3D point cloud and
|
51
|
+
|
52
|
+

|
53
|
+
|
54
|
+
# Local Development
|
55
|
+
|
56
|
+
This library uses Rust extensions built with Maturin. To set up locally:
|
57
|
+
|
58
|
+
```bash
|
59
|
+
# Create virtual environment
|
60
|
+
python3 -m venv venv
|
61
|
+
source venv/bin/activate
|
62
|
+
|
63
|
+
# Install Rust (if not already installed)
|
64
|
+
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
65
|
+
source "$HOME/.cargo/env"
|
66
|
+
|
67
|
+
# Install dependencies and build
|
68
|
+
pip install maturin
|
69
|
+
pip install -r requirements.txt
|
70
|
+
maturin develop
|
71
|
+
```
|
72
|
+
|
73
|
+
After making changes to Rust code, rebuild with `maturin develop`.
|
74
|
+
|
@@ -0,0 +1,9 @@
|
|
1
|
+
wildflow/__init__.py,sha256=mY5dWni3TZNOGbUa-7d_VY-Z1GTD7Ebe_OOxwF3Yrls,143
|
2
|
+
wildflow/splat/__init__.py,sha256=EPO1QD1-YfTeKxOimF7ACQUexKQZsJHV1fk17cUXW_U,551
|
3
|
+
wildflow/splat/_core.cp312-win32.pyd,sha256=AWmpD68XbFEOWWW8AEGOC_BC-QqeyFTusyKNCkzBX6E,629248
|
4
|
+
wildflow/splat/patches.py,sha256=OQpqlOpPRRbm_YotxZMDTcnLONmxpjrH7CsNfSekSF0,2515
|
5
|
+
wildflow/splat/split.py,sha256=KENk-wXQNEAgZLIokIkmBT3vUpQiSC1NhQAXPNcd1RE,1244
|
6
|
+
wildflow_splat-0.1.4.dist-info/METADATA,sha256=nM8wtC5z3OgVGT8RAra8hEDbv8COU_IsQFxZCJ1lCb0,2557
|
7
|
+
wildflow_splat-0.1.4.dist-info/WHEEL,sha256=_hRredGV19BUVIQG50bodYTQ5yHFvjyY42Je1tLhKbY,92
|
8
|
+
wildflow_splat-0.1.4.dist-info/licenses/LICENSE,sha256=ILWNDv8HAaOBG3VaDh2DsV4XCYCmvNJiSDSimZvyC8U,1108
|
9
|
+
wildflow_splat-0.1.4.dist-info/RECORD,,
|
@@ -1,170 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.4
|
2
|
-
Name: wildflow-splat
|
3
|
-
Version: 0.1.3
|
4
|
-
Classifier: Development Status :: 4 - Beta
|
5
|
-
Classifier: Intended Audience :: Developers
|
6
|
-
Classifier: Intended Audience :: Science/Research
|
7
|
-
Classifier: License :: OSI Approved :: MIT License
|
8
|
-
Classifier: Programming Language :: Python :: 3
|
9
|
-
Classifier: Programming Language :: Python :: 3.8
|
10
|
-
Classifier: Programming Language :: Python :: 3.9
|
11
|
-
Classifier: Programming Language :: Python :: 3.10
|
12
|
-
Classifier: Programming Language :: Python :: 3.11
|
13
|
-
Classifier: Programming Language :: Python :: 3.12
|
14
|
-
Classifier: Programming Language :: Rust
|
15
|
-
Classifier: Topic :: Scientific/Engineering
|
16
|
-
Classifier: Topic :: Multimedia :: Graphics :: 3D Modeling
|
17
|
-
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
18
|
-
Requires-Dist: maturin>=1.0 ; extra == 'dev'
|
19
|
-
Requires-Dist: pytest>=7.0 ; extra == 'dev'
|
20
|
-
Provides-Extra: dev
|
21
|
-
License-File: LICENSE
|
22
|
-
Summary: Fast PLY point cloud processing for 3D Gaussian splatting workflows
|
23
|
-
Keywords: ply,point-cloud,3d-gaussian-splatting,colmap,photogrammetry,computer-vision
|
24
|
-
Author-email: Wildflow AI <info@wildflow.ai>
|
25
|
-
License: MIT
|
26
|
-
Requires-Python: >=3.8
|
27
|
-
Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
|
28
|
-
Project-URL: repository, https://github.com/wildflowai/wildflow-splat
|
29
|
-
Project-URL: documentation, https://docs.wildflow.ai
|
30
|
-
|
31
|
-
Super hacky library to work with splats.
|
32
|
-
|
33
|
-
Usage:
|
34
|
-
|
35
|
-
```sh
|
36
|
-
git clone https://github.com/wildflowai/splat.git
|
37
|
-
cd splat
|
38
|
-
pip install -r requirements.txt
|
39
|
-
```
|
40
|
-
|
41
|
-
# Train Reef Splats Workflow
|
42
|
-
|
43
|
-
Someone swim with a few GoPros around a reef and we create 3D model so that you can see corals in the browser and how they change over time (e.g. [wildflow.ai/demo](https://wildflow.ai/demo))
|
44
|
-
|
45
|
-
This is a workflow below takes Metashape project as input, trains 3DGS model and deploys it for users to see it.
|
46
|
-
|
47
|
-

|
48
|
-
|
49
|
-
# 0. Metashape | 100 years
|
50
|
-
|
51
|
-
Metashape project that we recieved for a given coral reef site. Camera positions reconstructed. Everything is scaled correctly. Time-series 3D models are co-registered (aligned in space).
|
52
|
-
|
53
|
-
```
|
54
|
-
out:
|
55
|
-
metashape/site/time
|
56
|
-
/project_file
|
57
|
-
|
58
|
-
retention: R0
|
59
|
-
```
|
60
|
-
|
61
|
-
# 1. Prep
|
62
|
-
|
63
|
-
## 1.a. Colmap | H100 linux | 2 hours
|
64
|
-
|
65
|
-
Export camera positions, point cloud, and images (warped) in Colmap format. Keep image size under 2-3MB (sometimes they blow up to 10MB after this).
|
66
|
-
|
67
|
-
```
|
68
|
-
dep: 0
|
69
|
-
out:
|
70
|
-
3dgs/site/time/1_prep/all/images
|
71
|
-
/sparse/0
|
72
|
-
retention: R1 - because metashape and takes time
|
73
|
-
```
|
74
|
-
|
75
|
-
## 1.b. Colour correction | H100 linux or Windows with GPU | 5 hours
|
76
|
-
|
77
|
-
Lightroom or AI colour correction. Keep original warped images.
|
78
|
-
|
79
|
-
```
|
80
|
-
dep: 1.a
|
81
|
-
out:
|
82
|
-
3dgs/site/time/1_prep/all/images
|
83
|
-
/original_images
|
84
|
-
retention: R1 - because it takes time to correct
|
85
|
-
```
|
86
|
-
|
87
|
-
## 1.c. Train split | H100 linux | 15 min per patch
|
88
|
-
|
89
|
-
Split large model into smaller ones for training so that it fits into VRAM. Usually 15x15 meters approx. Use Metashape API to save in COLMAP format these patches.
|
90
|
-
|
91
|
-
```
|
92
|
-
dep: 0
|
93
|
-
out:
|
94
|
-
3dgs/site/time/1_prep/patch/sparse/0
|
95
|
-
|
96
|
-
retention: R1 - because of metashape api
|
97
|
-
```
|
98
|
-
|
99
|
-
## 1.d. Copy images | Any computer would do | 5 min
|
100
|
-
|
101
|
-
Copy colour corrected images from the folder with all picturs here, so we have a colmap project for a given patch. Alternatively move colmap artifacts to the folder with all images.
|
102
|
-
|
103
|
-
```
|
104
|
-
dep: 1.b, 1.c
|
105
|
-
out:
|
106
|
-
3dgs/site/time/1_prep/patch/images
|
107
|
-
retention: R4 - disposable
|
108
|
-
```
|
109
|
-
|
110
|
-
# 2. Train | WINDOWS
|
111
|
-
|
112
|
-
Train 3DGS for each splat using Postshot. Only works on Windows. Keep both high-resolution and low-resolution models.
|
113
|
-
|
114
|
-
```
|
115
|
-
dep: 1.d
|
116
|
-
|
117
|
-
inp:
|
118
|
-
3dgs/site/time/1_prep/patch
|
119
|
-
|
120
|
-
out:
|
121
|
-
3dgs/site/time/2_train/high/site-time-patch.ply
|
122
|
-
3dgs/site/time/2_train/low/site-time-patch.ply
|
123
|
-
|
124
|
-
retention: R2 - training takes time and windows
|
125
|
-
```
|
126
|
-
|
127
|
-
# 3. Cleanup
|
128
|
-
|
129
|
-
## 3.a Merge and split | Any computer would do
|
130
|
-
|
131
|
-
Split large patches into smaller ones 5x5 cells. Each cell should have 50cm margin (needed for cleaning algo to work well).
|
132
|
-
|
133
|
-
```
|
134
|
-
dep: 2
|
135
|
-
|
136
|
-
out:
|
137
|
-
3dgs/site/time/3_clean/high-raw/site-time-cell.ply
|
138
|
-
3dgs/site/time/3_clean/low-raw/site-time-cell.ply
|
139
|
-
|
140
|
-
retention: R4
|
141
|
-
```
|
142
|
-
|
143
|
-
## 3.b Cleanup | H100 linux
|
144
|
-
|
145
|
-
For each cell run cleanup process to remove wacky splats.
|
146
|
-
High-res keep as 5x5 cells.
|
147
|
-
Low-res merge together into one model.
|
148
|
-
|
149
|
-
```
|
150
|
-
dep: 3.a
|
151
|
-
|
152
|
-
out:
|
153
|
-
3dgs/site/time/3_clean/high/site-time-cell.ply
|
154
|
-
3dgs/site/time/3_clean/low/site-time-cell.ply
|
155
|
-
|
156
|
-
retention: R4 - because we deploy this data straightaway if it's good
|
157
|
-
```
|
158
|
-
|
159
|
-
# 4. Deploy | Any computer would do
|
160
|
-
|
161
|
-
Convert clean models into Octree format. Deploy everything to GCS bucket (production) along with credits doc. Now users could fly over coral reefs.
|
162
|
-
|
163
|
-
```
|
164
|
-
dep: 3.b
|
165
|
-
inp: credits.md
|
166
|
-
out: gs://wildflow/site-time-hash/...
|
167
|
-
|
168
|
-
retention: R1 - because it's user-facing
|
169
|
-
```
|
170
|
-
|
@@ -1,8 +0,0 @@
|
|
1
|
-
wildflow/__init__.py,sha256=c1W_QjZ04hwU8Hhkzk5-17Fj4FjB_ZQIUbFVo7u6-9M,143
|
2
|
-
wildflow/splat/__init__.py,sha256=e2sMSGeE4gYJly6EfMcRwI47-yGqh74fCB7rgL-e_uQ,233
|
3
|
-
wildflow/splat/_core.cp312-win32.pyd,sha256=9b7CXgW721Q0yCXzm9fUmziKId8JBko2GfLFdOYGvZc,572416
|
4
|
-
wildflow/splat/split.py,sha256=KENk-wXQNEAgZLIokIkmBT3vUpQiSC1NhQAXPNcd1RE,1244
|
5
|
-
wildflow_splat-0.1.3.dist-info/METADATA,sha256=E0kOni6tHqQkYxUIDa6iILfSs-SioWWnm-zy-hARh3I,4892
|
6
|
-
wildflow_splat-0.1.3.dist-info/WHEEL,sha256=iip_4jp1VbEiG4s9Isod9Q3MUK3z1pesoEwCO-vGFn0,92
|
7
|
-
wildflow_splat-0.1.3.dist-info/licenses/LICENSE,sha256=ILWNDv8HAaOBG3VaDh2DsV4XCYCmvNJiSDSimZvyC8U,1108
|
8
|
-
wildflow_splat-0.1.3.dist-info/RECORD,,
|
File without changes
|