nmcp-precomputed 3.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nmcp/__init__.py +4 -0
- nmcp/__main__.py +13 -0
- nmcp/data/__init__.py +2 -0
- nmcp/data/precomputed_entry.py +10 -0
- nmcp/data/remote_data_client.py +466 -0
- nmcp/from_json.py +34 -0
- nmcp/from_service.py +32 -0
- nmcp/list_skeletons.py +24 -0
- nmcp/precomputed/__init__.py +5 -0
- nmcp/precomputed/nmcp_precomputed.py +252 -0
- nmcp/precomputed/nmcp_skeleton.py +142 -0
- nmcp/precomputed/segment_info.py +61 -0
- nmcp/precomputed/segment_property.py +33 -0
- nmcp/precomputed/segment_tag_property.py +85 -0
- nmcp/precomputed_worker.py +273 -0
- nmcp/remove_skeleton.py +21 -0
- nmcp_precomputed-3.0.4.dist-info/METADATA +17 -0
- nmcp_precomputed-3.0.4.dist-info/RECORD +19 -0
- nmcp_precomputed-3.0.4.dist-info/WHEEL +4 -0
nmcp/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
from .precomputed import SegmentInfo, SegmentProperty, SegmentTagProperty, SomaSegmentTagProperty, NmcpPropertyValues
|
|
2
|
+
from .precomputed import (ensure_bucket_folders, create_from_json_files, create_from_dict, create_from_data,
|
|
3
|
+
remove_skeleton, list_skeletons, extract_neuron_properties, SkeletonComponents)
|
|
4
|
+
from .data import RemoteDataClient, PrecomputedEntry
|
nmcp/__main__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
|
|
3
|
+
from .precomputed_worker import main
|
|
4
|
+
|
|
5
|
+
parser = argparse.ArgumentParser()
|
|
6
|
+
|
|
7
|
+
parser.add_argument("-u", "--url", help="URL of the GraphQL service")
|
|
8
|
+
parser.add_argument("-a", "--authkey", help="authorization header for GraphQL service")
|
|
9
|
+
parser.add_argument("-o", "--output", help="the output cloud volume location")
|
|
10
|
+
|
|
11
|
+
args = parser.parse_args()
|
|
12
|
+
|
|
13
|
+
main(args.url, args.authkey, args.output)
|
nmcp/data/__init__.py
ADDED
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from enum import IntEnum
|
|
4
|
+
from typing_extensions import List, Dict, Any, Optional
|
|
5
|
+
|
|
6
|
+
from gql import Client, gql
|
|
7
|
+
from gql.transport.requests import RequestsHTTPTransport
|
|
8
|
+
|
|
9
|
+
from .precomputed_entry import PrecomputedEntry
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
atlas_pending_query = gql(
|
|
14
|
+
"""
|
|
15
|
+
query QueryPrecomputed {
|
|
16
|
+
pendingPrecomputed {
|
|
17
|
+
id
|
|
18
|
+
skeletonId
|
|
19
|
+
version
|
|
20
|
+
generatedAt
|
|
21
|
+
reconstructionId
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
"""
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
atlas_update_mutation = gql(
|
|
28
|
+
"""
|
|
29
|
+
mutation UpdatePrecomputed($id: String!, $status: Int! $version: Int!, $generatedAt: Date!) {
|
|
30
|
+
updatePrecomputed(id: $id, status: $status, version: $version, generatedAt: $generatedAt) {
|
|
31
|
+
id
|
|
32
|
+
skeletonId
|
|
33
|
+
version
|
|
34
|
+
generatedAt
|
|
35
|
+
reconstructionId
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
"""
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
atlas_reconstruction_data_query = gql(
|
|
42
|
+
"""
|
|
43
|
+
query reconstructionAsJson($id: String!, $options: PortalReconstructionInput) {
|
|
44
|
+
reconstructionAsJson(id: $id, options: $options) {
|
|
45
|
+
comment
|
|
46
|
+
neurons {
|
|
47
|
+
id
|
|
48
|
+
idString
|
|
49
|
+
DOI
|
|
50
|
+
soma {
|
|
51
|
+
x
|
|
52
|
+
y
|
|
53
|
+
z
|
|
54
|
+
allenId
|
|
55
|
+
}
|
|
56
|
+
sample {
|
|
57
|
+
genotype
|
|
58
|
+
}
|
|
59
|
+
axon {
|
|
60
|
+
x
|
|
61
|
+
y
|
|
62
|
+
z
|
|
63
|
+
radius
|
|
64
|
+
sampleNumber
|
|
65
|
+
parentNumber
|
|
66
|
+
allenId
|
|
67
|
+
structureIdentifier
|
|
68
|
+
}
|
|
69
|
+
axonChunkInfo {
|
|
70
|
+
totalCount
|
|
71
|
+
offset
|
|
72
|
+
limit
|
|
73
|
+
hasMore
|
|
74
|
+
}
|
|
75
|
+
dendrite {
|
|
76
|
+
x
|
|
77
|
+
y
|
|
78
|
+
z
|
|
79
|
+
radius
|
|
80
|
+
sampleNumber
|
|
81
|
+
parentNumber
|
|
82
|
+
allenId
|
|
83
|
+
structureIdentifier
|
|
84
|
+
}
|
|
85
|
+
dendriteChunkInfo {
|
|
86
|
+
totalCount
|
|
87
|
+
offset
|
|
88
|
+
limit
|
|
89
|
+
hasMore
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
"""
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
specimen_pending_query = gql(
|
|
98
|
+
"""
|
|
99
|
+
query QuerySpecimenSpacePrecomputed {
|
|
100
|
+
specimenSpacePendingPrecomputed {
|
|
101
|
+
id
|
|
102
|
+
skeletonId
|
|
103
|
+
version
|
|
104
|
+
generatedAt
|
|
105
|
+
reconstructionId
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
"""
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
specimen_update_mutation = gql(
|
|
112
|
+
"""
|
|
113
|
+
mutation UpdateSpecimenSpacePrecomputed($id: String!, $status: Int! $version: Int!, $generatedAt: Date!) {
|
|
114
|
+
updateSpecimenSpacePrecomputed(id: $id, status: $status, version: $version, generatedAt: $generatedAt) {
|
|
115
|
+
id
|
|
116
|
+
skeletonId
|
|
117
|
+
version
|
|
118
|
+
generatedAt
|
|
119
|
+
reconstructionId
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
"""
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
specimen_reconstruction_data_query = gql(
|
|
126
|
+
"""
|
|
127
|
+
query SpecimenSpaceReconstructionAsJson($id: String!) {
|
|
128
|
+
specimenSpaceReconstructionAsJson(id: $id) {
|
|
129
|
+
comment
|
|
130
|
+
neurons {
|
|
131
|
+
id
|
|
132
|
+
idString
|
|
133
|
+
DOI
|
|
134
|
+
soma {
|
|
135
|
+
x
|
|
136
|
+
y
|
|
137
|
+
z
|
|
138
|
+
}
|
|
139
|
+
sample {
|
|
140
|
+
genotype
|
|
141
|
+
}
|
|
142
|
+
axon {
|
|
143
|
+
x
|
|
144
|
+
y
|
|
145
|
+
z
|
|
146
|
+
radius
|
|
147
|
+
sampleNumber
|
|
148
|
+
parentNumber
|
|
149
|
+
structureIdentifier
|
|
150
|
+
}
|
|
151
|
+
dendrite {
|
|
152
|
+
x
|
|
153
|
+
y
|
|
154
|
+
z
|
|
155
|
+
radius
|
|
156
|
+
sampleNumber
|
|
157
|
+
parentNumber
|
|
158
|
+
structureIdentifier
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
"""
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
PRECOMPUTED_VERSION = 1
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class PrecomputedStatus(IntEnum):
|
|
170
|
+
Initialized = 0,
|
|
171
|
+
Pending = 100,
|
|
172
|
+
Passed = 200,
|
|
173
|
+
FailedToLoad = 300,
|
|
174
|
+
FailedToGenerate = 400
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class RemoteDataClient:
|
|
178
|
+
def __init__(self, url: str, auth_key: str):
|
|
179
|
+
transport = RequestsHTTPTransport(
|
|
180
|
+
url=url,
|
|
181
|
+
verify=True,
|
|
182
|
+
retries=3,
|
|
183
|
+
headers={"Content-Type": "application/json", "Authorization": auth_key}
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
self._client = Client(transport=transport, fetch_schema_from_transport=False)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def find_specimen_space_pending(self) -> List[PrecomputedEntry]:
|
|
190
|
+
pending = list()
|
|
191
|
+
|
|
192
|
+
result = self._client.execute(specimen_pending_query)
|
|
193
|
+
|
|
194
|
+
for precomputed in result["specimenSpacePendingPrecomputed"]:
|
|
195
|
+
pending.append(PrecomputedEntry(**precomputed))
|
|
196
|
+
|
|
197
|
+
return pending
|
|
198
|
+
|
|
199
|
+
def mark_specimen_generated(self, entry_id: str) -> None:
|
|
200
|
+
params = {"id": entry_id, "status": PrecomputedStatus.Passed, "version": PRECOMPUTED_VERSION,
|
|
201
|
+
"generatedAt": datetime.now().timestamp() * 1000}
|
|
202
|
+
self._client.execute(specimen_update_mutation, variable_values=params)
|
|
203
|
+
|
|
204
|
+
def mark_specimen_failed_load(self, entry_id: str) -> None:
|
|
205
|
+
params = {"id": entry_id, "status": PrecomputedStatus.FailedToLoad, "version": PRECOMPUTED_VERSION,
|
|
206
|
+
"generatedAt": datetime.now().timestamp() * 1000}
|
|
207
|
+
self._client.execute(specimen_update_mutation, variable_values=params)
|
|
208
|
+
|
|
209
|
+
def mark_specimen_failed_generate(self, entry_id: str) -> None:
|
|
210
|
+
params = {"id": entry_id, "status": PrecomputedStatus.FailedToGenerate, "version": PRECOMPUTED_VERSION,
|
|
211
|
+
"generatedAt": datetime.now().timestamp() * 1000}
|
|
212
|
+
self._client.execute(specimen_update_mutation, variable_values=params)
|
|
213
|
+
|
|
214
|
+
def find_atlas_pending(self) -> List[PrecomputedEntry]:
|
|
215
|
+
pending = list()
|
|
216
|
+
|
|
217
|
+
result = self._client.execute(atlas_pending_query)
|
|
218
|
+
|
|
219
|
+
for precomputed in result["pendingPrecomputed"]:
|
|
220
|
+
pending.append(PrecomputedEntry(**precomputed))
|
|
221
|
+
|
|
222
|
+
return pending
|
|
223
|
+
|
|
224
|
+
def mark_atlas_generated(self, entry_id: str) -> None:
|
|
225
|
+
params = {"id": entry_id, "status": PrecomputedStatus.Passed, "version": PRECOMPUTED_VERSION,
|
|
226
|
+
"generatedAt": datetime.now().timestamp() * 1000}
|
|
227
|
+
self._client.execute(atlas_update_mutation, variable_values=params)
|
|
228
|
+
|
|
229
|
+
def mark_atlas_failed_load(self, entry_id: str) -> None:
|
|
230
|
+
params = {"id": entry_id, "status": PrecomputedStatus.FailedToLoad, "version": PRECOMPUTED_VERSION,
|
|
231
|
+
"generatedAt": datetime.now().timestamp() * 1000}
|
|
232
|
+
self._client.execute(atlas_update_mutation, variable_values=params)
|
|
233
|
+
|
|
234
|
+
def mark_atlas_failed_generate(self, entry_id: str) -> None:
|
|
235
|
+
params = {"id": entry_id, "status": PrecomputedStatus.FailedToGenerate, "version": PRECOMPUTED_VERSION,
|
|
236
|
+
"generatedAt": datetime.now().timestamp() * 1000}
|
|
237
|
+
self._client.execute(atlas_update_mutation, variable_values=params)
|
|
238
|
+
|
|
239
|
+
def _get_reconstruction_part(self, result: Dict[str, Any], name: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
|
240
|
+
if not result or "reconstructionAsJson" not in result:
|
|
241
|
+
return None
|
|
242
|
+
|
|
243
|
+
if not "neurons" in result["reconstructionAsJson"]:
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
if len(result["reconstructionAsJson"]["neurons"]) == 0:
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
if name is not None:
|
|
250
|
+
if name in result["reconstructionAsJson"]["neurons"][0]:
|
|
251
|
+
return result["reconstructionAsJson"]["neurons"][0][name]
|
|
252
|
+
else:
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
return result["reconstructionAsJson"]["neurons"][0]
|
|
256
|
+
|
|
257
|
+
def get_reconstruction_header(self, reconstruction_id: str):
|
|
258
|
+
"""Get header information for a reconstruction."""
|
|
259
|
+
try:
|
|
260
|
+
# Do not include axon or dendrite data
|
|
261
|
+
header_input = {
|
|
262
|
+
"axonLimit": 0,
|
|
263
|
+
"dendriteLimit": 0
|
|
264
|
+
}
|
|
265
|
+
params = {"id": reconstruction_id, "options": header_input}
|
|
266
|
+
result = self._client.execute(atlas_reconstruction_data_query, variable_values=params)
|
|
267
|
+
|
|
268
|
+
return self._get_reconstruction_part(result)
|
|
269
|
+
|
|
270
|
+
except Exception as ex:
|
|
271
|
+
logger.error(f"Error getting reconstruction header for {reconstruction_id}: {ex}")
|
|
272
|
+
|
|
273
|
+
return None
|
|
274
|
+
|
|
275
|
+
def get_axon_chunks(self, reconstruction_id: str, chunk_size: int = 25000, offset: int = 0, limit: int = None):
|
|
276
|
+
"""Get axon data in chunks for a reconstruction.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
reconstruction_id: The ID of the reconstruction
|
|
280
|
+
chunk_size: Number of points to retrieve per request
|
|
281
|
+
offset: Starting offset for retrieval
|
|
282
|
+
limit: Maximum total number of points to retrieve (None for all)
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Dict with "data" (list of axon points) and "chunk_info" (pagination info)
|
|
286
|
+
"""
|
|
287
|
+
try:
|
|
288
|
+
axon_data = []
|
|
289
|
+
current_offset = offset
|
|
290
|
+
remaining_limit = limit
|
|
291
|
+
|
|
292
|
+
while True:
|
|
293
|
+
# Calculate limit for this chunk
|
|
294
|
+
request_limit = chunk_size
|
|
295
|
+
if remaining_limit is not None:
|
|
296
|
+
request_limit = min(chunk_size, remaining_limit)
|
|
297
|
+
if request_limit <= 0:
|
|
298
|
+
break
|
|
299
|
+
|
|
300
|
+
axon_input = {
|
|
301
|
+
"axonOffset": current_offset,
|
|
302
|
+
"axonLimit": request_limit
|
|
303
|
+
}
|
|
304
|
+
params = {"id": reconstruction_id, "options": axon_input}
|
|
305
|
+
result = self._client.execute(atlas_reconstruction_data_query, variable_values=params)
|
|
306
|
+
|
|
307
|
+
chunk_data = self._get_reconstruction_part(result)
|
|
308
|
+
|
|
309
|
+
if chunk_data:
|
|
310
|
+
chunk_points = chunk_data["axon"] or []
|
|
311
|
+
axon_data.extend(chunk_points)
|
|
312
|
+
|
|
313
|
+
# Update tracking variables
|
|
314
|
+
if remaining_limit is not None:
|
|
315
|
+
remaining_limit -= len(chunk_points)
|
|
316
|
+
current_offset += len(chunk_points)
|
|
317
|
+
|
|
318
|
+
# Check if we have more data and should continue
|
|
319
|
+
chunk_info = chunk_data["axonChunkInfo"]
|
|
320
|
+
if not chunk_info or not chunk_info["hasMore"] or len(chunk_points) == 0:
|
|
321
|
+
break
|
|
322
|
+
|
|
323
|
+
# If we got fewer points than requested, we"re done
|
|
324
|
+
if len(chunk_points) < request_limit:
|
|
325
|
+
break
|
|
326
|
+
else:
|
|
327
|
+
break
|
|
328
|
+
|
|
329
|
+
return {
|
|
330
|
+
"data": axon_data,
|
|
331
|
+
"chunk_info": {
|
|
332
|
+
"total_retrieved": len(axon_data),
|
|
333
|
+
"offset": offset,
|
|
334
|
+
"requested_limit": limit
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
except Exception as ex:
|
|
339
|
+
logger.error(f"Error getting axon chunks for {reconstruction_id}: {ex}")
|
|
340
|
+
|
|
341
|
+
return None
|
|
342
|
+
|
|
343
|
+
def get_dendrite_chunks(self, reconstruction_id: str, chunk_size: int = 25000, offset: int = 0, limit: int = None):
|
|
344
|
+
"""Get dendrite data in chunks for a reconstruction.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
reconstruction_id: The ID of the reconstruction
|
|
348
|
+
chunk_size: Number of points to retrieve per request
|
|
349
|
+
offset: Starting offset for retrieval
|
|
350
|
+
limit: Maximum total number of points to retrieve (None for all)
|
|
351
|
+
|
|
352
|
+
Returns:
|
|
353
|
+
Dict with "data" (list of dendrite points) and "chunk_info" (pagination info)
|
|
354
|
+
"""
|
|
355
|
+
try:
|
|
356
|
+
dendrite_data = []
|
|
357
|
+
current_offset = offset
|
|
358
|
+
remaining_limit = limit
|
|
359
|
+
|
|
360
|
+
while True:
|
|
361
|
+
# Calculate limit for this chunk
|
|
362
|
+
request_limit = chunk_size
|
|
363
|
+
if remaining_limit is not None:
|
|
364
|
+
request_limit = min(chunk_size, remaining_limit)
|
|
365
|
+
if request_limit <= 0:
|
|
366
|
+
break
|
|
367
|
+
|
|
368
|
+
dendrite_input = {
|
|
369
|
+
"dendriteOffset": current_offset,
|
|
370
|
+
"dendriteLimit": request_limit
|
|
371
|
+
}
|
|
372
|
+
params = {"id": reconstruction_id, "options": dendrite_input}
|
|
373
|
+
result = self._client.execute(atlas_reconstruction_data_query, variable_values=params)
|
|
374
|
+
|
|
375
|
+
chunk_data = self._get_reconstruction_part(result)
|
|
376
|
+
|
|
377
|
+
if chunk_data:
|
|
378
|
+
chunk_points = chunk_data["dendrite"] or []
|
|
379
|
+
dendrite_data.extend(chunk_points)
|
|
380
|
+
|
|
381
|
+
# Update tracking variables
|
|
382
|
+
if remaining_limit is not None:
|
|
383
|
+
remaining_limit -= len(chunk_points)
|
|
384
|
+
current_offset += len(chunk_points)
|
|
385
|
+
|
|
386
|
+
# Check if we have more data and should continue
|
|
387
|
+
chunk_info = chunk_data["dendriteChunkInfo"]
|
|
388
|
+
if not chunk_info or not chunk_info["hasMore"] or len(chunk_points) == 0:
|
|
389
|
+
break
|
|
390
|
+
|
|
391
|
+
# If we got fewer points than requested, we"re done
|
|
392
|
+
if len(chunk_points) < request_limit:
|
|
393
|
+
break
|
|
394
|
+
else:
|
|
395
|
+
break
|
|
396
|
+
|
|
397
|
+
return {
|
|
398
|
+
"data": dendrite_data,
|
|
399
|
+
"chunk_info": {
|
|
400
|
+
"total_retrieved": len(dendrite_data),
|
|
401
|
+
"offset": offset,
|
|
402
|
+
"requested_limit": limit
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
except Exception as ex:
|
|
407
|
+
logger.error(f"Error getting dendrite chunks for {reconstruction_id}: {ex}")
|
|
408
|
+
|
|
409
|
+
return None
|
|
410
|
+
|
|
411
|
+
def get_atlas_reconstruction_data(self, reconstruction_id: str):
|
|
412
|
+
"""Get complete reconstruction data using the individual chunk methods.
|
|
413
|
+
|
|
414
|
+
Maintains backward compatibility with the original interface.
|
|
415
|
+
"""
|
|
416
|
+
try:
|
|
417
|
+
header = self.get_reconstruction_header(reconstruction_id)
|
|
418
|
+
if not header:
|
|
419
|
+
return None
|
|
420
|
+
|
|
421
|
+
neuron = {
|
|
422
|
+
"id": header["id"],
|
|
423
|
+
"idString": header["idString"],
|
|
424
|
+
"DOI": header["DOI"],
|
|
425
|
+
"soma": header["soma"],
|
|
426
|
+
"axon": [], "dendrite": [],
|
|
427
|
+
"allenInformation": header["allenInformation"]
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
axon_result = self.get_axon_chunks(reconstruction_id)
|
|
431
|
+
if axon_result and axon_result["data"]:
|
|
432
|
+
neuron["axon"] = axon_result["data"]
|
|
433
|
+
|
|
434
|
+
dendrite_result = self.get_dendrite_chunks(reconstruction_id)
|
|
435
|
+
if dendrite_result and dendrite_result["data"]:
|
|
436
|
+
neuron["dendrite"] = dendrite_result["data"]
|
|
437
|
+
|
|
438
|
+
return neuron
|
|
439
|
+
|
|
440
|
+
except Exception as ex:
|
|
441
|
+
logger.error(f"Error getting reconstruction data for {reconstruction_id}: {ex}")
|
|
442
|
+
|
|
443
|
+
return None
|
|
444
|
+
|
|
445
|
+
def get_specimen_space_reconstruction_data(self, reconstruction_id: str):
|
|
446
|
+
try:
|
|
447
|
+
params = {"id": reconstruction_id}
|
|
448
|
+
|
|
449
|
+
result = self._client.execute(specimen_reconstruction_data_query, variable_values=params)
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
if not result or "specimenSpaceReconstructionAsJson" not in result:
|
|
453
|
+
return None
|
|
454
|
+
|
|
455
|
+
if not "neurons" in result["specimenSpaceReconstructionAsJson"]:
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
if len(result["specimenSpaceReconstructionAsJson"]["neurons"]) == 0:
|
|
459
|
+
return None
|
|
460
|
+
|
|
461
|
+
return result["specimenSpaceReconstructionAsJson"]["neurons"][0]
|
|
462
|
+
|
|
463
|
+
except Exception as ex:
|
|
464
|
+
logger.error(f"Error getting reconstruction header for {reconstruction_id}: {ex}")
|
|
465
|
+
|
|
466
|
+
return None
|
nmcp/from_json.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import glob
|
|
3
|
+
import logging
|
|
4
|
+
import os.path
|
|
5
|
+
import sys
|
|
6
|
+
|
|
7
|
+
from .precomputed import create_from_json_files
|
|
8
|
+
|
|
9
|
+
logging.basicConfig(level=logging.WARNING)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def main():
|
|
13
|
+
parser = argparse.ArgumentParser()
|
|
14
|
+
|
|
15
|
+
parser.add_argument("input", help="the input json file")
|
|
16
|
+
parser.add_argument("output", help="the output cloud volume location")
|
|
17
|
+
|
|
18
|
+
args = parser.parse_args()
|
|
19
|
+
|
|
20
|
+
if os.path.isdir(args.input):
|
|
21
|
+
input_files = glob.glob(f"{args.input}/*.json")
|
|
22
|
+
else:
|
|
23
|
+
input_files = [args.input]
|
|
24
|
+
|
|
25
|
+
create_from_json_files(input_files, args.output)
|
|
26
|
+
|
|
27
|
+
return True
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
if __name__ == '__main__':
|
|
31
|
+
if main():
|
|
32
|
+
sys.exit(0)
|
|
33
|
+
else:
|
|
34
|
+
sys.exit(1)
|
nmcp/from_service.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
from .data import RemoteDataClient
|
|
5
|
+
from .precomputed import create_from_dict
|
|
6
|
+
|
|
7
|
+
logging.basicConfig(level=logging.WARNING)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def main():
|
|
11
|
+
parser = argparse.ArgumentParser()
|
|
12
|
+
|
|
13
|
+
parser.add_argument("-o", "--output", help="the output cloud volume location")
|
|
14
|
+
parser.add_argument("-u", "--url", help="URL of the GraphQL service")
|
|
15
|
+
parser.add_argument("-a", "--authkey", help="authorization header for GraphQL service")
|
|
16
|
+
|
|
17
|
+
args = parser.parse_args()
|
|
18
|
+
|
|
19
|
+
client = RemoteDataClient(args.url, args.authkey)
|
|
20
|
+
|
|
21
|
+
pending = client.find_atlas_pending()
|
|
22
|
+
|
|
23
|
+
print(pending)
|
|
24
|
+
|
|
25
|
+
for pend in pending:
|
|
26
|
+
data = client.get_atlas_reconstruction_data(pend.reconstructionId)
|
|
27
|
+
data["skeleton_id"] = pend.skeletonSegmentId
|
|
28
|
+
create_from_dict(data, args.output)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
if __name__ == "__main__":
|
|
32
|
+
main()
|
nmcp/list_skeletons.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
from .precomputed import list_skeletons
|
|
5
|
+
|
|
6
|
+
logging.basicConfig(level=logging.WARNING)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def main():
|
|
10
|
+
parser = argparse.ArgumentParser()
|
|
11
|
+
|
|
12
|
+
parser.add_argument("-o", "--output", help="the output cloud volume location")
|
|
13
|
+
|
|
14
|
+
args = parser.parse_args()
|
|
15
|
+
|
|
16
|
+
ids = list_skeletons(args.output)
|
|
17
|
+
|
|
18
|
+
print(f"{len(ids)} skeletons in {args.output}")
|
|
19
|
+
|
|
20
|
+
print(ids)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
if __name__ == "__main__":
|
|
24
|
+
main()
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
from .segment_property import SegmentProperty
|
|
2
|
+
from .segment_tag_property import SegmentTagProperty, SomaSegmentTagProperty
|
|
3
|
+
from .segment_info import SegmentInfo, NmcpPropertyValues
|
|
4
|
+
from .nmcp_precomputed import (ensure_bucket_folders, create_from_json_files, create_from_dict, create_from_data,
|
|
5
|
+
remove_skeleton, list_skeletons, extract_neuron_properties, SkeletonComponents)
|