copick 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,161 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/#use-with-ide
110
+ .pdm.toml
111
+
112
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113
+ __pypackages__/
114
+
115
+ # Celery stuff
116
+ celerybeat-schedule
117
+ celerybeat.pid
118
+
119
+ # SageMath parsed files
120
+ *.sage.py
121
+
122
+ # Environments
123
+ .env
124
+ .venv
125
+ env/
126
+ venv/
127
+ ENV/
128
+ env.bak/
129
+ venv.bak/
130
+
131
+ # Spyder project settings
132
+ .spyderproject
133
+ .spyproject
134
+
135
+ # Rope project settings
136
+ .ropeproject
137
+
138
+ # mkdocs documentation
139
+ /site
140
+
141
+ # mypy
142
+ .mypy_cache/
143
+ .dmypy.json
144
+ dmypy.json
145
+
146
+ # Pyre type checker
147
+ .pyre/
148
+
149
+ # pytype static type analyzer
150
+ .pytype/
151
+
152
+ # Cython debug symbols
153
+ cython_debug/
154
+
155
+ # PyCharm
156
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
159
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160
+ .idea/
161
+ .DS_Store/
@@ -0,0 +1,20 @@
1
+ repos:
2
+ - repo: https://github.com/psf/black-pre-commit-mirror
3
+ rev: 23.9.1
4
+ hooks:
5
+ - id: black
6
+ - repo: https://github.com/astral-sh/ruff-pre-commit
7
+ rev: v0.0.292
8
+ hooks:
9
+ - id: ruff
10
+ args:
11
+ - --fix
12
+ - repo: https://github.com/pre-commit/pre-commit-hooks
13
+ rev: v4.5.0
14
+ hooks:
15
+ - id: check-toml
16
+ - id: check-yaml
17
+ - id: check-json
18
+ - id: check-merge-conflict
19
+ - id: end-of-file-fixer
20
+ - id: trailing-whitespace
copick-0.4.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Utz Ermel
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
copick-0.4.0/PKG-INFO ADDED
@@ -0,0 +1,262 @@
1
+ Metadata-Version: 2.3
2
+ Name: copick
3
+ Version: 0.4.0
4
+ Summary: Definitions for a collaborative cryoET annotation tool.
5
+ Project-URL: Repository, https://github.com/uermel/copick.git
6
+ Project-URL: Issues, https://github.com/uermel/copick/issues
7
+ Author-email: "Utz H. Ermel" <utz.ermel@czii.org>, "Kyle I. S. Harrington" <kyle@kyleharrington.com>
8
+ License: MIT License
9
+
10
+ Copyright (c) 2024 Utz Ermel
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+ License-File: LICENSE
30
+ Keywords: annotation,collaborative,copick,cryo-et,cryoet,segmentation,tomography
31
+ Classifier: Development Status :: 3 - Alpha
32
+ Classifier: License :: OSI Approved :: MIT License
33
+ Requires-Python: >=3.10
34
+ Requires-Dist: fsspec
35
+ Requires-Dist: numpy
36
+ Requires-Dist: pydantic
37
+ Requires-Dist: qtpy
38
+ Requires-Dist: trimesh
39
+ Requires-Dist: zarr
40
+ Provides-Extra: dev
41
+ Requires-Dist: black; extra == 'dev'
42
+ Requires-Dist: ipython; extra == 'dev'
43
+ Requires-Dist: notebook; extra == 'dev'
44
+ Requires-Dist: pre-commit; extra == 'dev'
45
+ Requires-Dist: ruff; extra == 'dev'
46
+ Description-Content-Type: text/markdown
47
+
48
+ # copick
49
+ Definitions for a collaborative cryoET annotation tool.
50
+
51
+ ## Data Spec
52
+
53
+ Shared data is organized as follows:
54
+
55
+ ```
56
+ [copick_root]/
57
+ |-- copick_config.json (spec: src/models.py:CopickConfig)
58
+ |-- ParticleMrcs/
59
+ |-- [object_id]_[object_name].mrc (index: src/models.py:CopickConfig.pickable_objects.object_name)
60
+ |-- ExperimentRuns
61
+ |-- [run_name]/ (index: src/io/copick_models.py:CopickPicks.runs)
62
+ |-- VoxelSpacing[xx.yyy]/
63
+ | |-- [tomotype].zarr/
64
+ | | |-- [multiscale subdirectories according to OME-NGFF spec at 100%, 50% and 25% scale]
65
+ | |-- [tomotype]_[feature_type]_features.zarr/
66
+ | |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale]
67
+ |-- VoxelSpacing[x2.yy2]/
68
+ | |-- [tomotype].zarr/
69
+ | | |-- [multiscale subdirectories according to OME-NGFF spec at 100%, 50% and 25% scale]
70
+ | |-- [tomotype]_[feature_type]_features.zarr/
71
+ | |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale]
72
+ |-- Picks/
73
+ | |-- [user_id | tool_name]_[session_id | 0]_[object_name].json (spec: src/models.py:CopickPicks)
74
+ |-- Meshes/
75
+ | |-- [user_id | tool_name]_[session_id | 0]_[object_name].glb (spec: src/models.py:TBD)
76
+ |-- Segmentations/
77
+ |-- [xx.yyy]_[user_id | tool_name]_[session_id | 0]_[object_name].zarr (spec: src/models.py:TBD)
78
+ | |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale, 50% and 25% scale]
79
+ |-- [xx.yyy]_[user_id | tool_name]_[session_id | 0]_[name]-multilabel.zarr (spec: src/models.py:TBD)
80
+ |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale, 50% and 25% scale]
81
+ ```
82
+
83
+ ## Sample Data
84
+
85
+ A test set is hosted on [zenodo](https://doi.org/10.5281/zenodo.10905908).
86
+
87
+ The fsspec implementation allows the dataset to be split into a static and an overlay part. The static part is read-only
88
+ and contains the original data. The overlay part is read-write and contains the user-specific annotations.
89
+
90
+ ### Config for identical location
91
+
92
+ ```json
93
+ {
94
+ "name": "test",
95
+ "description": "A test project.",
96
+ "version": "1.0.0",
97
+
98
+ "pickable_objects": [
99
+ {
100
+ "name": "proteasome",
101
+ "is_particle": true,
102
+ "pdb_id": "3J9I",
103
+ "label": 1,
104
+ "color": [255, 0, 0, 255]
105
+ },
106
+ {
107
+ "name": "ribosome",
108
+ "is_particle": true,
109
+ "pdb_id": "7P6Z",
110
+ "label": 2,
111
+ "color": [0, 255, 0, 255]
112
+ },
113
+ {
114
+ "name": "membrane",
115
+ "is_particle": false,
116
+ "label": 3,
117
+ "color": [0, 0, 0, 255]
118
+ }
119
+ ],
120
+
121
+ "overlay_root": "local:///PATH/TO/sample_project",
122
+ "static_root": "local:///PATH/TO/sample_project",
123
+
124
+ "overlay_fs_args": {
125
+ "auto_mkdir": true
126
+ }
127
+ }
128
+ ```
129
+
130
+ ### Config for static remote and mutable local dataset
131
+
132
+ This has the additional `s3fs` requirement.
133
+
134
+ ```json
135
+ {
136
+ "name": "test",
137
+ "description": "A test project.",
138
+ "version": "1.0.0",
139
+
140
+ "pickable_objects": [
141
+ {
142
+ "name": "proteasome",
143
+ "is_particle": true,
144
+ "pdb_id": "3J9I",
145
+ "label": 1,
146
+ "color": [255, 0, 0, 255]
147
+ },
148
+ {
149
+ "name": "ribosome",
150
+ "is_particle": true,
151
+ "pdb_id": "7P6Z",
152
+ "label": 2,
153
+ "color": [0, 255, 0, 255]
154
+ },
155
+ {
156
+ "name": "membrane",
157
+ "is_particle": false,
158
+ "label": 3,
159
+ "color": [0, 0, 0, 255]
160
+ }
161
+ ],
162
+
163
+ "overlay_root": "local:///PATH/TO/sample_project",
164
+ "static_root": "s3://bucket/path/to/sample_project",
165
+
166
+ "overlay_fs_args": {
167
+ "auto_mkdir": true
168
+ }
169
+ }
170
+ ```
171
+
172
+ ### API overview
173
+ ```python
174
+ from copick.impl.filesystem import CopickRootFSSpec
175
+ import zarr
176
+
177
+ # Project root
178
+ root = CopickRootFSSpec.from_file("/PATH/TO/sample_project/copick_config_filesystem.json")
179
+
180
+ ## Root API
181
+ root.config # CopickConfig object
182
+ root.runs # List of run objects (lazy loading from filesystem location(s))
183
+ root.get_run("run_name") # Get a run by name
184
+ run = root.new_run("run_name") # Create a new run (appends to the list of runs and creates directory in overlay fs location)
185
+ root.refresh() # Refresh the list of runs from filesystem location(s)
186
+
187
+ ## Run API
188
+ # Hierarchical objects (lazy loading from filesystem location(s))
189
+ run.picks # List of pick objects
190
+ run.meshes # List of mesh objects
191
+ run.segmentations # List of segmentation objects
192
+ run.voxel_spacings # List of voxel spacing objects
193
+
194
+ # Create new objects
195
+ run.new_pick("user_id", "session_id", "object_name") # Create a new pick object (appends to the list of picks and creates file in overlay fs location)
196
+ run.new_mesh("user_id", "session_id", "object_name") # Create a new mesh object (appends to the list of meshes and creates file in overlay fs location)
197
+ run.new_segmentation("user_id", "session_id") # Create a new segmentation object (appends to the list of segmentations and creates zarr file in overlay fs location)
198
+ run.new_voxel_spacing(10.000) # Create a new voxel spacing object (appends to the list of voxel spacings and creates directory in overlay fs location)
199
+
200
+ # Get objects by name
201
+ run.get_picks(object_name="object_name") # Get all picks (list) for this run with a given object name
202
+ # ... similar for meshes, segmentations, voxel spacings
203
+
204
+ ## Pick API
205
+ pick = run.picks[0] # Get a pick object
206
+ pick.points # List of CopickPoint objects
207
+
208
+ ## Mesh API
209
+ mesh = run.meshes[0] # Get a mesh object
210
+ mesh.mesh # Trimesh scene object
211
+
212
+ ## Segmentation API
213
+ segmentation = run.segmentations[0] # Get a segmentation object
214
+ segmentation.zarr() # zarr.storage.FSStore object
215
+
216
+ ## VoxelSpacing API
217
+ voxel_spacing = run.voxel_spacings[0] # Get a voxel spacing object
218
+ voxel_spacing.tomograms # List of CopickTomogram objects
219
+
220
+ ## Tomogram API
221
+ tomogram = voxel_spacing.tomograms[0] # Get a tomogram object
222
+ tomogram.zarr() # zarr.storage.FSStore object
223
+ tomogram.features # List of CopickTomogramFeature objects
224
+
225
+
226
+ # Example usage
227
+ # List of runs
228
+ print(root.runs)
229
+
230
+ # Points
231
+ print(root.runs[0].picks[0].points)
232
+
233
+ # List of meshes
234
+ print(root.runs[0].meshes)
235
+
236
+ # List of segmentations
237
+ print(root.runs[0].segmentations)
238
+
239
+ # List of voxel spacings
240
+ print(root.runs[0].voxel_spacings)
241
+
242
+ # List of tomograms
243
+ print(root.runs[0].voxel_spacings[0].tomograms)
244
+
245
+ # Get Zarr store for a tomogram
246
+ print(zarr.open_group(root.runs[0].voxel_spacings[0].tomograms[0].zarr()).info)
247
+
248
+ # Get Zarr store for a tomogram feature
249
+ print(root.runs[0].voxel_spacings[0].tomograms[1].features)
250
+ print(zarr.open_group(root.runs[0].voxel_spacings[0].tomograms[1].features[0].zarr()).info)
251
+
252
+ # Get a pick file's contents
253
+ print(root.runs[0].picks[0].load())
254
+
255
+ # Get a mesh file's contents
256
+ print(root.runs[0].meshes[0].mesh)
257
+
258
+ # Get a Zarr store for a segmentation
259
+ print(root.runs[0].segmentations[0].path)
260
+ print(zarr.open_group(root.runs[0].segmentations[0].zarr()).info)
261
+
262
+ ```
copick-0.4.0/README.md ADDED
@@ -0,0 +1,215 @@
1
+ # copick
2
+ Definitions for a collaborative cryoET annotation tool.
3
+
4
+ ## Data Spec
5
+
6
+ Shared data is organized as follows:
7
+
8
+ ```
9
+ [copick_root]/
10
+ |-- copick_config.json (spec: src/models.py:CopickConfig)
11
+ |-- ParticleMrcs/
12
+ |-- [object_id]_[object_name].mrc (index: src/models.py:CopickConfig.pickable_objects.object_name)
13
+ |-- ExperimentRuns
14
+ |-- [run_name]/ (index: src/io/copick_models.py:CopickPicks.runs)
15
+ |-- VoxelSpacing[xx.yyy]/
16
+ | |-- [tomotype].zarr/
17
+ | | |-- [multiscale subdirectories according to OME-NGFF spec at 100%, 50% and 25% scale]
18
+ | |-- [tomotype]_[feature_type]_features.zarr/
19
+ | |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale]
20
+ |-- VoxelSpacing[x2.yy2]/
21
+ | |-- [tomotype].zarr/
22
+ | | |-- [multiscale subdirectories according to OME-NGFF spec at 100%, 50% and 25% scale]
23
+ | |-- [tomotype]_[feature_type]_features.zarr/
24
+ | |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale]
25
+ |-- Picks/
26
+ | |-- [user_id | tool_name]_[session_id | 0]_[object_name].json (spec: src/models.py:CopickPicks)
27
+ |-- Meshes/
28
+ | |-- [user_id | tool_name]_[session_id | 0]_[object_name].glb (spec: src/models.py:TBD)
29
+ |-- Segmentations/
30
+ |-- [xx.yyy]_[user_id | tool_name]_[session_id | 0]_[object_name].zarr (spec: src/models.py:TBD)
31
+ | |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale, 50% and 25% scale]
32
+ |-- [xx.yyy]_[user_id | tool_name]_[session_id | 0]_[name]-multilabel.zarr (spec: src/models.py:TBD)
33
+ |-- [multiscale subdirectories according to OME-NGFF spec at 100% scale, 50% and 25% scale]
34
+ ```
35
+
36
+ ## Sample Data
37
+
38
+ A test set is hosted on [zenodo](https://doi.org/10.5281/zenodo.10905908).
39
+
40
+ The fsspec implementation allows the dataset to be split into a static and an overlay part. The static part is read-only
41
+ and contains the original data. The overlay part is read-write and contains the user-specific annotations.
42
+
43
+ ### Config for identical location
44
+
45
+ ```json
46
+ {
47
+ "name": "test",
48
+ "description": "A test project.",
49
+ "version": "1.0.0",
50
+
51
+ "pickable_objects": [
52
+ {
53
+ "name": "proteasome",
54
+ "is_particle": true,
55
+ "pdb_id": "3J9I",
56
+ "label": 1,
57
+ "color": [255, 0, 0, 255]
58
+ },
59
+ {
60
+ "name": "ribosome",
61
+ "is_particle": true,
62
+ "pdb_id": "7P6Z",
63
+ "label": 2,
64
+ "color": [0, 255, 0, 255]
65
+ },
66
+ {
67
+ "name": "membrane",
68
+ "is_particle": false,
69
+ "label": 3,
70
+ "color": [0, 0, 0, 255]
71
+ }
72
+ ],
73
+
74
+ "overlay_root": "local:///PATH/TO/sample_project",
75
+ "static_root": "local:///PATH/TO/sample_project",
76
+
77
+ "overlay_fs_args": {
78
+ "auto_mkdir": true
79
+ }
80
+ }
81
+ ```
82
+
83
+ ### Config for static remote and mutable local dataset
84
+
85
+ This has the additional `s3fs` requirement.
86
+
87
+ ```json
88
+ {
89
+ "name": "test",
90
+ "description": "A test project.",
91
+ "version": "1.0.0",
92
+
93
+ "pickable_objects": [
94
+ {
95
+ "name": "proteasome",
96
+ "is_particle": true,
97
+ "pdb_id": "3J9I",
98
+ "label": 1,
99
+ "color": [255, 0, 0, 255]
100
+ },
101
+ {
102
+ "name": "ribosome",
103
+ "is_particle": true,
104
+ "pdb_id": "7P6Z",
105
+ "label": 2,
106
+ "color": [0, 255, 0, 255]
107
+ },
108
+ {
109
+ "name": "membrane",
110
+ "is_particle": false,
111
+ "label": 3,
112
+ "color": [0, 0, 0, 255]
113
+ }
114
+ ],
115
+
116
+ "overlay_root": "local:///PATH/TO/sample_project",
117
+ "static_root": "s3://bucket/path/to/sample_project",
118
+
119
+ "overlay_fs_args": {
120
+ "auto_mkdir": true
121
+ }
122
+ }
123
+ ```
124
+
125
+ ### API overview
126
+ ```python
127
+ from copick.impl.filesystem import CopickRootFSSpec
128
+ import zarr
129
+
130
+ # Project root
131
+ root = CopickRootFSSpec.from_file("/PATH/TO/sample_project/copick_config_filesystem.json")
132
+
133
+ ## Root API
134
+ root.config # CopickConfig object
135
+ root.runs # List of run objects (lazy loading from filesystem location(s))
136
+ root.get_run("run_name") # Get a run by name
137
+ run = root.new_run("run_name") # Create a new run (appends to the list of runs and creates directory in overlay fs location)
138
+ root.refresh() # Refresh the list of runs from filesystem location(s)
139
+
140
+ ## Run API
141
+ # Hierarchical objects (lazy loading from filesystem location(s))
142
+ run.picks # List of pick objects
143
+ run.meshes # List of mesh objects
144
+ run.segmentations # List of segmentation objects
145
+ run.voxel_spacings # List of voxel spacing objects
146
+
147
+ # Create new objects
148
+ run.new_pick("user_id", "session_id", "object_name") # Create a new pick object (appends to the list of picks and creates file in overlay fs location)
149
+ run.new_mesh("user_id", "session_id", "object_name") # Create a new mesh object (appends to the list of meshes and creates file in overlay fs location)
150
+ run.new_segmentation("user_id", "session_id") # Create a new segmentation object (appends to the list of segmentations and creates zarr file in overlay fs location)
151
+ run.new_voxel_spacing(10.000) # Create a new voxel spacing object (appends to the list of voxel spacings and creates directory in overlay fs location)
152
+
153
+ # Get objects by name
154
+ run.get_picks(object_name="object_name") # Get all picks (list) for this run with a given object name
155
+ # ... similar for meshes, segmentations, voxel spacings
156
+
157
+ ## Pick API
158
+ pick = run.picks[0] # Get a pick object
159
+ pick.points # List of CopickPoint objects
160
+
161
+ ## Mesh API
162
+ mesh = run.meshes[0] # Get a mesh object
163
+ mesh.mesh # Trimesh scene object
164
+
165
+ ## Segmentation API
166
+ segmentation = run.segmentations[0] # Get a segmentation object
167
+ segmentation.zarr() # zarr.storage.FSStore object
168
+
169
+ ## VoxelSpacing API
170
+ voxel_spacing = run.voxel_spacings[0] # Get a voxel spacing object
171
+ voxel_spacing.tomograms # List of CopickTomogram objects
172
+
173
+ ## Tomogram API
174
+ tomogram = voxel_spacing.tomograms[0] # Get a tomogram object
175
+ tomogram.zarr() # zarr.storage.FSStore object
176
+ tomogram.features # List of CopickTomogramFeature objects
177
+
178
+
179
+ # Example usage
180
+ # List of runs
181
+ print(root.runs)
182
+
183
+ # Points
184
+ print(root.runs[0].picks[0].points)
185
+
186
+ # List of meshes
187
+ print(root.runs[0].meshes)
188
+
189
+ # List of segmentations
190
+ print(root.runs[0].segmentations)
191
+
192
+ # List of voxel spacings
193
+ print(root.runs[0].voxel_spacings)
194
+
195
+ # List of tomograms
196
+ print(root.runs[0].voxel_spacings[0].tomograms)
197
+
198
+ # Get Zarr store for a tomogram
199
+ print(zarr.open_group(root.runs[0].voxel_spacings[0].tomograms[0].zarr()).info)
200
+
201
+ # Get Zarr store for a tomogram feature
202
+ print(root.runs[0].voxel_spacings[0].tomograms[1].features)
203
+ print(zarr.open_group(root.runs[0].voxel_spacings[0].tomograms[1].features[0].zarr()).info)
204
+
205
+ # Get a pick file's contents
206
+ print(root.runs[0].picks[0].load())
207
+
208
+ # Get a mesh file's contents
209
+ print(root.runs[0].meshes[0].mesh)
210
+
211
+ # Get a Zarr store for a segmentation
212
+ print(root.runs[0].segmentations[0].path)
213
+ print(zarr.open_group(root.runs[0].segmentations[0].zarr()).info)
214
+
215
+ ```