dendrotweaks 0.4.4__py3-none-any.whl → 0.4.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dendrotweaks/__init__.py +1 -1
- dendrotweaks/analysis/__init__.py +2 -1
- dendrotweaks/analysis/ephys_analysis.py +140 -62
- dendrotweaks/biophys/default_mod/vecstim.mod +1 -11
- dendrotweaks/biophys/default_templates/jaxley.py +131 -0
- dendrotweaks/biophys/distributions.py +3 -3
- dendrotweaks/biophys/io/converter.py +4 -0
- dendrotweaks/biophys/mechanisms.py +11 -1
- dendrotweaks/model.py +151 -1088
- dendrotweaks/model_io.py +736 -39
- dendrotweaks/model_simulation.py +326 -0
- dendrotweaks/morphology/io/factories.py +2 -2
- dendrotweaks/morphology/io/reader.py +12 -3
- dendrotweaks/morphology/point_trees.py +1 -1
- dendrotweaks/path_manager.py +2 -2
- dendrotweaks/prerun.py +63 -0
- dendrotweaks/utils.py +148 -40
- {dendrotweaks-0.4.4.dist-info → dendrotweaks-0.4.6.dist-info}/METADATA +1 -1
- {dendrotweaks-0.4.4.dist-info → dendrotweaks-0.4.6.dist-info}/RECORD +22 -19
- {dendrotweaks-0.4.4.dist-info → dendrotweaks-0.4.6.dist-info}/WHEEL +0 -0
- {dendrotweaks-0.4.4.dist-info → dendrotweaks-0.4.6.dist-info}/licenses/LICENSE +0 -0
- {dendrotweaks-0.4.4.dist-info → dendrotweaks-0.4.6.dist-info}/top_level.txt +0 -0
dendrotweaks/model_io.py
CHANGED
@@ -1,21 +1,741 @@
|
|
1
|
-
|
1
|
+
# Imports
|
2
2
|
import os
|
3
|
+
import json
|
3
4
|
from collections import defaultdict
|
4
5
|
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
6
|
+
import jinja2
|
7
|
+
import pandas as pd
|
8
|
+
|
9
|
+
# DendroTweaks imports
|
10
|
+
from dendrotweaks.morphology.io import create_point_tree, create_section_tree, create_segment_tree
|
11
|
+
from dendrotweaks.biophys.io import create_channel, standardize_channel
|
12
|
+
from dendrotweaks.biophys.groups import SegmentGroup
|
13
|
+
from dendrotweaks.biophys.distributions import Distribution
|
14
|
+
from dendrotweaks.biophys.mechanisms import LeakChannel, CaDynamics
|
15
|
+
from dendrotweaks.biophys.mechanisms import FallbackChannel
|
16
|
+
from dendrotweaks.stimuli import Population
|
17
|
+
from dendrotweaks.utils import DOMAIN_TO_GROUP, DOMAINS_TO_NEURON
|
18
|
+
|
19
|
+
# Warnings configuration
|
20
|
+
import warnings
|
21
|
+
|
22
|
+
def custom_warning_formatter(message, category, filename, lineno, file=None, line=None):
|
23
|
+
return f"WARNING: {message}\n({os.path.basename(filename)}, line {lineno})\n"
|
24
|
+
|
25
|
+
warnings.formatwarning = custom_warning_formatter
|
26
|
+
|
27
|
+
class IOMixin():
|
28
|
+
"""
|
29
|
+
Mixin for the Model class to handle file I/O operations.
|
30
|
+
"""
|
31
|
+
|
32
|
+
# -----------------------------------------------------------------------
|
33
|
+
# DIRECTORY MANAGEMENT
|
34
|
+
# -----------------------------------------------------------------------
|
35
|
+
|
36
|
+
def print_directory_tree(self, *args, **kwargs):
|
37
|
+
"""
|
38
|
+
Print the directory tree.
|
39
|
+
"""
|
40
|
+
return self.path_manager.print_directory_tree(*args, **kwargs)
|
41
|
+
|
42
|
+
|
43
|
+
def list_morphologies(self, extension='swc'):
|
44
|
+
"""
|
45
|
+
List the morphologies available for the model.
|
46
|
+
"""
|
47
|
+
return self.path_manager.list_files('morphology', extension=extension)
|
48
|
+
|
49
|
+
|
50
|
+
def list_biophys(self, extension='json'):
|
51
|
+
"""
|
52
|
+
List the biophysical configurations available for the model.
|
53
|
+
"""
|
54
|
+
return self.path_manager.list_files('biophys', extension=extension)
|
55
|
+
|
56
|
+
|
57
|
+
def list_mechanisms(self, extension='mod'):
|
58
|
+
"""
|
59
|
+
List the mechanisms available for the model.
|
60
|
+
"""
|
61
|
+
return self.path_manager.list_files('mod', extension=extension)
|
62
|
+
|
63
|
+
|
64
|
+
def list_stimuli(self, extension='json'):
|
65
|
+
"""
|
66
|
+
List the stimuli configurations available for the model.
|
67
|
+
"""
|
68
|
+
return self.path_manager.list_files('stimuli', extension=extension)
|
69
|
+
|
70
|
+
|
71
|
+
# -----------------------------------------------------------------------
|
72
|
+
# MORPHOLOGY I/O
|
73
|
+
# -----------------------------------------------------------------------
|
74
|
+
|
75
|
+
def load_morphology(self, file_name, soma_notation='3PS',
|
76
|
+
align=True, sort_children=True, force=False) -> None:
|
77
|
+
"""
|
78
|
+
Read an SWC file and build the SWC and section trees.
|
79
|
+
|
80
|
+
Parameters
|
81
|
+
----------
|
82
|
+
file_name : str
|
83
|
+
The name of the SWC file to read.
|
84
|
+
soma_notation : str, optional
|
85
|
+
The notation of the soma in the SWC file. Can be '3PS' (three-point soma) or '1PS'. Default is '3PS'.
|
86
|
+
align : bool, optional
|
87
|
+
Whether to align the morphology to the soma center and align the apical dendrite (if present).
|
88
|
+
sort_children : bool, optional
|
89
|
+
Whether to sort the children of each node by increasing subtree size
|
90
|
+
in the tree sorting algorithms. If True, the traversal visits
|
91
|
+
children with shorter subtrees first and assigns them lower indices. If False, children
|
92
|
+
are visited in their original SWC file order (matching NEURON's behavior).
|
93
|
+
"""
|
94
|
+
# self.name = file_name.split('.')[0]
|
95
|
+
self.morphology_name = file_name.replace('.swc', '')
|
96
|
+
path_to_swc_file = self.path_manager.get_file_path('morphology', file_name, extension='swc')
|
97
|
+
point_tree = create_point_tree(path_to_swc_file)
|
98
|
+
# point_tree.remove_overlaps()
|
99
|
+
point_tree.change_soma_notation(soma_notation)
|
100
|
+
point_tree.sort(sort_children=sort_children, force=force)
|
101
|
+
if align:
|
102
|
+
point_tree.shift_coordinates_to_soma_center()
|
103
|
+
point_tree.align_apical_dendrite()
|
104
|
+
point_tree.round_coordinates(8)
|
105
|
+
self.point_tree = point_tree
|
106
|
+
|
107
|
+
sec_tree = create_section_tree(point_tree)
|
108
|
+
sec_tree.sort(sort_children=sort_children, force=force)
|
109
|
+
self.sec_tree = sec_tree
|
110
|
+
|
111
|
+
self.create_and_reference_sections_in_simulator()
|
112
|
+
seg_tree = create_segment_tree(sec_tree)
|
113
|
+
self.seg_tree = seg_tree
|
114
|
+
|
115
|
+
self._add_default_segment_groups()
|
116
|
+
self._initialize_domains_to_mechs()
|
117
|
+
|
118
|
+
d_lambda = self.d_lambda
|
119
|
+
self.set_segmentation(d_lambda=d_lambda)
|
120
|
+
|
121
|
+
|
122
|
+
def create_and_reference_sections_in_simulator(self):
|
123
|
+
"""
|
124
|
+
Create and reference sections in the simulator.
|
125
|
+
"""
|
126
|
+
if self.verbose: print(f'Building sections in {self.simulator_name}...')
|
127
|
+
for sec in self.sec_tree.sections:
|
128
|
+
sec.create_and_reference()
|
129
|
+
n_sec = len([sec._ref for sec in self.sec_tree.sections
|
130
|
+
if sec._ref is not None])
|
131
|
+
if self.verbose: print(f'{n_sec} sections created.')
|
132
|
+
|
133
|
+
|
134
|
+
def _add_default_segment_groups(self):
|
135
|
+
self.add_group('all', list(self.domains.keys()))
|
136
|
+
for domain_name in self.domains:
|
137
|
+
group_name = DOMAIN_TO_GROUP.get(domain_name, domain_name)
|
138
|
+
self.add_group(group_name, [domain_name])
|
139
|
+
|
140
|
+
|
141
|
+
def _initialize_domains_to_mechs(self):
|
142
|
+
for domain_name in self.domains:
|
143
|
+
# Only if haven't been defined for the previous morphology
|
144
|
+
# TODO: Check that domains match
|
145
|
+
if not domain_name in self.domains_to_mechs:
|
146
|
+
self.domains_to_mechs[domain_name] = set()
|
147
|
+
for domain_name, mech_names in self.domains_to_mechs.items():
|
148
|
+
for mech_name in mech_names:
|
149
|
+
mech = self.mechanisms[mech_name]
|
150
|
+
self.insert_mechanism(mech, domain_name)
|
151
|
+
|
152
|
+
|
153
|
+
def export_morphology(self, file_name):
|
154
|
+
"""
|
155
|
+
Write the SWC tree to an SWC file.
|
156
|
+
|
157
|
+
Parameters
|
158
|
+
----------
|
159
|
+
version : str, optional
|
160
|
+
The version of the morphology appended to the morphology name.
|
161
|
+
"""
|
162
|
+
path_to_file = self.path_manager.get_file_path('morphology', file_name, extension='swc')
|
163
|
+
|
164
|
+
self.point_tree.to_swc(path_to_file)
|
165
|
+
|
166
|
+
|
167
|
+
# =======================================================================
|
168
|
+
# BIOPHYSICS I/O
|
169
|
+
# ========================================================================
|
170
|
+
|
171
|
+
# -----------------------------------------------------------------------
|
172
|
+
# MECHANISMS
|
173
|
+
# -----------------------------------------------------------------------
|
174
|
+
|
175
|
+
def add_default_mechanisms(self, recompile=False):
|
176
|
+
"""
|
177
|
+
Add default mechanisms to the model.
|
178
|
+
|
179
|
+
Parameters
|
180
|
+
----------
|
181
|
+
recompile : bool, optional
|
182
|
+
Whether to recompile the mechanisms.
|
183
|
+
"""
|
184
|
+
leak = LeakChannel()
|
185
|
+
self.mechanisms[leak.name] = leak
|
186
|
+
|
187
|
+
cadyn = CaDynamics()
|
188
|
+
self.mechanisms[cadyn.name] = cadyn
|
189
|
+
|
190
|
+
self.load_mechanisms('default_mod', recompile=recompile)
|
191
|
+
|
192
|
+
|
193
|
+
def add_mechanisms(self, dir_name:str = 'mod', recompile=True) -> None:
|
194
|
+
"""
|
195
|
+
Add a set of mechanisms from an archive to the model.
|
196
|
+
|
197
|
+
Parameters
|
198
|
+
----------
|
199
|
+
dir_name : str, optional
|
200
|
+
The name of the archive to load mechanisms from. Default is 'mod'.
|
201
|
+
recompile : bool, optional
|
202
|
+
Whether to recompile the mechanisms.
|
203
|
+
"""
|
204
|
+
# Create Mechanism objects and add them to the model
|
205
|
+
for mechanism_name in self.path_manager.list_files(dir_name, extension='mod'):
|
206
|
+
self.add_mechanism(mechanism_name,
|
207
|
+
load=True,
|
208
|
+
dir_name=dir_name,
|
209
|
+
recompile=recompile)
|
210
|
+
|
211
|
+
|
212
|
+
def add_mechanism(self, mechanism_name: str,
|
213
|
+
python_template_name: str = 'default',
|
214
|
+
load=True, dir_name: str = 'mod', recompile=True
|
215
|
+
) -> None:
|
216
|
+
"""
|
217
|
+
Create a Mechanism object from the MOD file (or LeakChannel).
|
218
|
+
|
219
|
+
Parameters
|
220
|
+
----------
|
221
|
+
mechanism_name : str
|
222
|
+
The name of the mechanism to add.
|
223
|
+
python_template_name : str, optional
|
224
|
+
The name of the Python template to use. Default is 'default'.
|
225
|
+
load : bool, optional
|
226
|
+
Whether to load the mechanism using neuron.load_mechanisms.
|
227
|
+
"""
|
228
|
+
paths = self.path_manager.get_channel_paths(
|
229
|
+
mechanism_name,
|
230
|
+
python_template_name=python_template_name
|
231
|
+
)
|
232
|
+
try:
|
233
|
+
mech = create_channel(**paths)
|
234
|
+
except NotImplementedError as e:
|
235
|
+
if "KINETIC" in str(e):
|
236
|
+
warnings.warn(
|
237
|
+
f"Could not import the '{mechanism_name}' channel because it uses an unsupported KINETIC block."
|
238
|
+
" A minimal fallback channel will be created for simulation only, supporting only the 'gbar' parameter."
|
239
|
+
)
|
240
|
+
mech = FallbackChannel(mechanism_name)
|
241
|
+
else:
|
242
|
+
raise
|
243
|
+
# Add the mechanism to the model
|
244
|
+
self.mechanisms[mech.name] = mech
|
245
|
+
# Update the global parameters
|
246
|
+
|
247
|
+
if load:
|
248
|
+
self.load_mechanism(mechanism_name, dir_name, recompile)
|
249
|
+
|
250
|
+
|
251
|
+
def load_mechanisms(self, dir_name: str = 'mod', recompile=True) -> None:
|
252
|
+
"""
|
253
|
+
Load mechanisms from an archive.
|
254
|
+
|
255
|
+
Parameters
|
256
|
+
----------
|
257
|
+
dir_name : str, optional
|
258
|
+
The name of the archive to load mechanisms from.
|
259
|
+
recompile : bool, optional
|
260
|
+
Whether to recompile the mechanisms.
|
261
|
+
"""
|
262
|
+
mod_files = self.path_manager.list_files(dir_name, extension='mod')
|
263
|
+
for mechanism_name in mod_files:
|
264
|
+
self.load_mechanism(mechanism_name, dir_name, recompile)
|
265
|
+
|
266
|
+
|
267
|
+
def load_mechanism(self, mechanism_name, dir_name='mod', recompile=False) -> None:
|
268
|
+
"""
|
269
|
+
Load a mechanism from the specified archive.
|
270
|
+
|
271
|
+
Parameters
|
272
|
+
----------
|
273
|
+
mechanism_name : str
|
274
|
+
The name of the mechanism to load.
|
275
|
+
dir_name : str, optional
|
276
|
+
The name of the directory to load the mechanism from. Default is 'mod'.
|
277
|
+
recompile : bool, optional
|
278
|
+
Whether to recompile the mechanism.
|
279
|
+
"""
|
280
|
+
path_to_mod_file = self.path_manager.get_file_path(
|
281
|
+
dir_name, mechanism_name, extension='mod'
|
282
|
+
)
|
283
|
+
self.mod_loader.load_mechanism(
|
284
|
+
path_to_mod_file=path_to_mod_file, recompile=recompile
|
285
|
+
)
|
286
|
+
|
287
|
+
|
288
|
+
def standardize_channel(self, channel_name,
|
289
|
+
python_template_name=None, mod_template_name=None, remove_old=True):
|
290
|
+
"""
|
291
|
+
Standardize a channel by creating a new channel with the same kinetic
|
292
|
+
properties using the standard equations.
|
293
|
+
|
294
|
+
Parameters
|
295
|
+
----------
|
296
|
+
channel_name : str
|
297
|
+
The name of the channel to standardize.
|
298
|
+
python_template_name : str, optional
|
299
|
+
The name of the Python template to use.
|
300
|
+
mod_template_name : str, optional
|
301
|
+
The name of the MOD template to use.
|
302
|
+
remove_old : bool, optional
|
303
|
+
Whether to remove the old channel from the model. Default is True.
|
304
|
+
"""
|
305
|
+
|
306
|
+
# Get data to transfer
|
307
|
+
channel = self.mechanisms[channel_name]
|
308
|
+
channel_domain_names = [domain_name for domain_name, mech_names
|
309
|
+
in self.domains_to_mechs.items() if channel_name in mech_names]
|
310
|
+
gbar_name = f'gbar_{channel_name}'
|
311
|
+
gbar_distributions = self.params[gbar_name]
|
312
|
+
# Kinetic variables cannot be transferred
|
313
|
+
|
314
|
+
# Uninsert the old channel
|
315
|
+
for domain_name in self.domains:
|
316
|
+
if channel_name in self.domains_to_mechs[domain_name]:
|
317
|
+
self.uninsert_mechanism(channel_name, domain_name)
|
318
|
+
|
319
|
+
# Remove the old channel
|
320
|
+
if remove_old:
|
321
|
+
self.mechanisms.pop(channel_name)
|
322
|
+
|
323
|
+
# Create, add and load a new channel
|
324
|
+
paths = self.path_manager.get_standard_channel_paths(
|
325
|
+
channel_name,
|
326
|
+
mod_template_name=mod_template_name
|
327
|
+
)
|
328
|
+
standard_channel = standardize_channel(channel, **paths)
|
329
|
+
|
330
|
+
self.mechanisms[standard_channel.name] = standard_channel
|
331
|
+
self.load_mechanism(standard_channel.name, recompile=True)
|
332
|
+
|
333
|
+
# Insert the new channel
|
334
|
+
for domain_name in channel_domain_names:
|
335
|
+
self.insert_mechanism(standard_channel.name, domain_name)
|
336
|
+
|
337
|
+
# Transfer data
|
338
|
+
gbar_name = f'gbar_{standard_channel.name}'
|
339
|
+
for group_name, distribution in gbar_distributions.items():
|
340
|
+
self.set_param(gbar_name, group_name,
|
341
|
+
distribution.function_name, **distribution.parameters)
|
342
|
+
|
343
|
+
|
344
|
+
# -----------------------------------------------------------------------
|
345
|
+
# CONFIGURATION
|
346
|
+
# -----------------------------------------------------------------------
|
347
|
+
|
348
|
+
def to_dict(self):
|
349
|
+
"""
|
350
|
+
Return a dictionary representation of the model.
|
351
|
+
|
352
|
+
Returns
|
353
|
+
-------
|
354
|
+
dict
|
355
|
+
The dictionary representation of the model.
|
356
|
+
"""
|
357
|
+
return {
|
358
|
+
'metadata': {
|
359
|
+
'name': self.name,
|
360
|
+
},
|
361
|
+
'd_lambda': self.d_lambda,
|
362
|
+
'domains': {domain: sorted(list(mechs)) for domain, mechs in self.domains_to_mechs.items()},
|
363
|
+
'groups': [
|
364
|
+
group.to_dict() for group in self._groups
|
365
|
+
],
|
366
|
+
'params': {
|
367
|
+
param_name: {
|
368
|
+
group_name: distribution if isinstance(distribution, str) else distribution.to_dict()
|
369
|
+
for group_name, distribution in distributions.items()
|
370
|
+
}
|
371
|
+
for param_name, distributions in self.params.items()
|
372
|
+
},
|
373
|
+
}
|
374
|
+
|
375
|
+
|
376
|
+
def from_dict(self, data):
|
377
|
+
"""
|
378
|
+
Load the model from a dictionary.
|
379
|
+
|
380
|
+
Parameters
|
381
|
+
----------
|
382
|
+
data : dict
|
383
|
+
The dictionary representation of the model.
|
384
|
+
"""
|
385
|
+
if not self.name == data['metadata']['name']:
|
386
|
+
raise ValueError('Model name does not match the data.')
|
387
|
+
|
388
|
+
self.d_lambda = data['d_lambda']
|
389
|
+
|
390
|
+
# Domains and mechanisms
|
391
|
+
self.domains_to_mechs = {
|
392
|
+
domain: set(mechs) for domain, mechs in data['domains'].items()
|
393
|
+
}
|
394
|
+
if self.verbose: print('Inserting mechanisms...')
|
395
|
+
for domain_name, mechs in self.domains_to_mechs.items():
|
396
|
+
for mech_name in mechs:
|
397
|
+
self.insert_mechanism(mech_name, domain_name, distribute=False)
|
398
|
+
# print('Distributing parameters...')
|
399
|
+
# self.distribute_all()
|
400
|
+
|
401
|
+
# Groups
|
402
|
+
if self.verbose: print('Adding groups...')
|
403
|
+
self._groups = [SegmentGroup.from_dict(group) for group in data['groups']]
|
404
|
+
|
405
|
+
if self.verbose: print('Distributing parameters...')
|
406
|
+
# Parameters
|
407
|
+
self.params = {
|
408
|
+
param_name: {
|
409
|
+
group_name: distribution if isinstance(distribution, str) else Distribution.from_dict(distribution)
|
410
|
+
for group_name, distribution in distributions.items()
|
411
|
+
}
|
412
|
+
for param_name, distributions in data['params'].items()
|
413
|
+
}
|
414
|
+
|
415
|
+
if self.verbose: print('Setting segmentation...')
|
416
|
+
if self.sec_tree is not None:
|
417
|
+
d_lambda = self.d_lambda
|
418
|
+
self.set_segmentation(d_lambda=d_lambda)
|
419
|
+
|
420
|
+
|
421
|
+
def export_biophys(self, file_name, **kwargs):
|
422
|
+
"""
|
423
|
+
Export the biophysical properties of the model to a JSON file.
|
424
|
+
|
425
|
+
Parameters
|
426
|
+
----------
|
427
|
+
file_name : str
|
428
|
+
The name of the file to write to.
|
429
|
+
**kwargs : dict
|
430
|
+
Additional keyword arguments to pass to `json.dump`.
|
431
|
+
"""
|
432
|
+
|
433
|
+
path_to_json = self.path_manager.get_file_path('biophys', file_name, extension='json')
|
434
|
+
if not kwargs.get('indent'):
|
435
|
+
kwargs['indent'] = 4
|
436
|
+
|
437
|
+
data = self.to_dict()
|
438
|
+
with open(path_to_json, 'w') as f:
|
439
|
+
json.dump(data, f, **kwargs)
|
440
|
+
|
441
|
+
|
442
|
+
def load_biophys(self, file_name, recompile=True):
|
443
|
+
"""
|
444
|
+
Load the biophysical properties of the model from a JSON file.
|
445
|
+
|
446
|
+
Parameters
|
447
|
+
----------
|
448
|
+
file_name : str
|
449
|
+
The name of the file to read from.
|
450
|
+
recompile : bool, optional
|
451
|
+
Whether to recompile the mechanisms after loading. Default is True.
|
452
|
+
"""
|
453
|
+
self.add_default_mechanisms()
|
454
|
+
|
455
|
+
|
456
|
+
path_to_json = self.path_manager.get_file_path('biophys', file_name, extension='json')
|
457
|
+
|
458
|
+
with open(path_to_json, 'r') as f:
|
459
|
+
data = json.load(f)
|
460
|
+
|
461
|
+
for mech_name in {mech for mechs in data['domains'].values() for mech in mechs}:
|
462
|
+
if mech_name in ['Leak', 'CaDyn', 'Independent']:
|
463
|
+
continue
|
464
|
+
self.add_mechanism(mech_name, dir_name='mod', recompile=recompile)
|
465
|
+
|
466
|
+
self.from_dict(data)
|
467
|
+
|
468
|
+
|
469
|
+
# =======================================================================
|
470
|
+
# STIMULI I/O
|
471
|
+
# ========================================================================
|
472
|
+
|
473
|
+
def stimuli_to_dict(self):
|
474
|
+
"""
|
475
|
+
Convert the stimuli to a dictionary representation.
|
476
|
+
|
477
|
+
Returns
|
478
|
+
-------
|
479
|
+
dict
|
480
|
+
The dictionary representation of the stimuli.
|
481
|
+
"""
|
482
|
+
return {
|
483
|
+
'metadata': {
|
484
|
+
'name': self.name,
|
485
|
+
},
|
486
|
+
'simulation': {
|
487
|
+
**self.simulator.to_dict(),
|
488
|
+
},
|
489
|
+
'stimuli': {
|
490
|
+
'recordings': [
|
491
|
+
{
|
492
|
+
'name': f'rec_{i}',
|
493
|
+
'var': var
|
494
|
+
}
|
495
|
+
for var, recs in self.simulator.recordings.items()
|
496
|
+
for i, _ in enumerate(recs)
|
497
|
+
],
|
498
|
+
'iclamps': [
|
499
|
+
{
|
500
|
+
'name': f'iclamp_{i}',
|
501
|
+
'amp': iclamp.amp,
|
502
|
+
'delay': iclamp.delay,
|
503
|
+
'dur': iclamp.dur
|
504
|
+
}
|
505
|
+
for i, (seg, iclamp) in enumerate(self.iclamps.items())
|
506
|
+
],
|
507
|
+
'populations': {
|
508
|
+
syn_type: [pop.to_dict() for pop in pops.values()]
|
509
|
+
for syn_type, pops in self.populations.items()
|
510
|
+
}
|
511
|
+
},
|
512
|
+
}
|
513
|
+
|
514
|
+
|
515
|
+
def _stimuli_to_csv(self, path_to_csv=None):
|
516
|
+
"""
|
517
|
+
Write the model to a CSV file.
|
518
|
+
|
519
|
+
Parameters
|
520
|
+
----------
|
521
|
+
path_to_csv : str
|
522
|
+
The path to the CSV file to write.
|
523
|
+
"""
|
524
|
+
|
525
|
+
rec_data = {
|
526
|
+
'type': [],
|
527
|
+
'idx': [],
|
528
|
+
'sec_idx': [],
|
529
|
+
'loc': [],
|
530
|
+
}
|
531
|
+
for var, recs in self.simulator.recordings.items():
|
532
|
+
rec_data['type'].extend(['rec'] * len(recs))
|
533
|
+
rec_data['idx'].extend([i for i in range(len(recs))])
|
534
|
+
rec_data['sec_idx'].extend([seg._section.idx for seg in recs])
|
535
|
+
rec_data['loc'].extend([seg.x for seg in recs])
|
536
|
+
|
537
|
+
iclamp_data = {
|
538
|
+
'type': ['iclamp'] * len(self.iclamps),
|
539
|
+
'idx': [i for i in range(len(self.iclamps))],
|
540
|
+
'sec_idx': [seg._section.idx for seg in self.iclamps],
|
541
|
+
'loc': [seg.x for seg in self.iclamps],
|
542
|
+
}
|
543
|
+
|
544
|
+
synapses_data = {
|
545
|
+
'type': [],
|
546
|
+
'idx': [],
|
547
|
+
'sec_idx': [],
|
548
|
+
'loc': [],
|
549
|
+
}
|
550
|
+
|
551
|
+
for syn_type, pops in self.populations.items():
|
552
|
+
for pop_name, pop in pops.items():
|
553
|
+
pop_data = pop.to_csv()
|
554
|
+
synapses_data['type'] += pop_data['syn_type']
|
555
|
+
synapses_data['idx'] += [int(name.rsplit('_', 1)[1]) for name in pop_data['name']]
|
556
|
+
synapses_data['sec_idx'] += pop_data['sec_idx']
|
557
|
+
synapses_data['loc'] += pop_data['loc']
|
558
|
+
|
559
|
+
df = pd.concat([
|
560
|
+
pd.DataFrame(rec_data),
|
561
|
+
pd.DataFrame(iclamp_data),
|
562
|
+
pd.DataFrame(synapses_data)
|
563
|
+
], ignore_index=True)
|
564
|
+
df['idx'] = df['idx'].astype(int)
|
565
|
+
df['sec_idx'] = df['sec_idx'].astype(int)
|
566
|
+
if path_to_csv: df.to_csv(path_to_csv, index=False)
|
567
|
+
|
568
|
+
return df
|
569
|
+
|
570
|
+
|
571
|
+
def export_stimuli(self, file_name, **kwargs):
|
572
|
+
"""
|
573
|
+
Export the stimuli to a JSON and CSV file.
|
574
|
+
|
575
|
+
Parameters
|
576
|
+
----------
|
577
|
+
file_name : str
|
578
|
+
The name of the file to write to.
|
579
|
+
**kwargs : dict
|
580
|
+
Additional keyword arguments to pass to `json.dump`.
|
581
|
+
"""
|
582
|
+
path_to_json = self.path_manager.get_file_path('stimuli', file_name, extension='json')
|
583
|
+
|
584
|
+
data = self.stimuli_to_dict()
|
585
|
+
|
586
|
+
if not kwargs.get('indent'):
|
587
|
+
kwargs['indent'] = 4
|
588
|
+
with open(path_to_json, 'w') as f:
|
589
|
+
json.dump(data, f, **kwargs)
|
590
|
+
|
591
|
+
path_to_stimuli_csv = self.path_manager.get_file_path('stimuli', file_name, extension='csv')
|
592
|
+
self._stimuli_to_csv(path_to_stimuli_csv)
|
593
|
+
|
594
|
+
|
595
|
+
def load_stimuli(self, file_name):
|
596
|
+
"""
|
597
|
+
Load the stimuli from a JSON file.
|
598
|
+
|
599
|
+
Parameters
|
600
|
+
----------
|
601
|
+
file_name : str
|
602
|
+
The name of the file to read from.
|
603
|
+
"""
|
604
|
+
|
605
|
+
path_to_json = self.path_manager.get_file_path('stimuli', file_name, extension='json')
|
606
|
+
path_to_stimuli_csv = self.path_manager.get_file_path('stimuli', file_name, extension='csv')
|
607
|
+
|
608
|
+
with open(path_to_json, 'r') as f:
|
609
|
+
data = json.load(f)
|
610
|
+
|
611
|
+
if not self.name == data['metadata']['name']:
|
612
|
+
raise ValueError('Model name does not match the data.')
|
613
|
+
|
614
|
+
df_stimuli = pd.read_csv(path_to_stimuli_csv)
|
615
|
+
|
616
|
+
self.simulator.from_dict(data['simulation'])
|
617
|
+
|
618
|
+
# Clear all stimuli and recordings
|
619
|
+
self.remove_all_stimuli()
|
620
|
+
self.remove_all_recordings()
|
621
|
+
|
622
|
+
# IClamps -----------------------------------------------------------
|
623
|
+
|
624
|
+
df_iclamps = df_stimuli[df_stimuli['type'] == 'iclamp'].reset_index(drop=True, inplace=False)
|
625
|
+
|
626
|
+
for row in df_iclamps.itertuples(index=False):
|
627
|
+
self.add_iclamp(
|
628
|
+
self.sec_tree.sections[row.sec_idx],
|
629
|
+
row.loc,
|
630
|
+
data['stimuli']['iclamps'][row.idx]['amp'],
|
631
|
+
data['stimuli']['iclamps'][row.idx]['delay'],
|
632
|
+
data['stimuli']['iclamps'][row.idx]['dur']
|
633
|
+
)
|
634
|
+
|
635
|
+
# Populations -------------------------------------------------------
|
636
|
+
|
637
|
+
syn_types = ['AMPA', 'NMDA', 'AMPA_NMDA', 'GABAa']
|
638
|
+
|
639
|
+
for syn_type in syn_types:
|
640
|
+
|
641
|
+
df_syn = df_stimuli[df_stimuli['type'] == syn_type]
|
642
|
+
|
643
|
+
for i, pop_data in enumerate(data['stimuli']['populations'][syn_type]):
|
644
|
+
|
645
|
+
df_pop = df_syn[df_syn['idx'] == i]
|
646
|
+
|
647
|
+
segments = [self.sec_tree.sections[sec_idx](loc)
|
648
|
+
for sec_idx, loc in zip(df_pop['sec_idx'], df_pop['loc'])]
|
649
|
+
|
650
|
+
pop = Population(idx=i,
|
651
|
+
segments=segments,
|
652
|
+
N=pop_data['N'],
|
653
|
+
syn_type=syn_type)
|
654
|
+
|
655
|
+
syn_locs = [(self.sec_tree.sections[sec_idx], loc) for sec_idx, loc in zip(df_pop['sec_idx'].tolist(), df_pop['loc'].tolist())]
|
656
|
+
|
657
|
+
pop.allocate_synapses(syn_locs=syn_locs)
|
658
|
+
pop.update_kinetic_params(**pop_data['kinetic_params'])
|
659
|
+
pop.update_input_params(**pop_data['input_params'])
|
660
|
+
self._add_population(pop)
|
661
|
+
|
662
|
+
# Recordings ---------------------------------------------------------
|
663
|
+
|
664
|
+
df_recs = df_stimuli[df_stimuli['type'] == 'rec'].reset_index(drop=True, inplace=False)
|
665
|
+
for row in df_recs.itertuples(index=False):
|
666
|
+
var = data['stimuli']['recordings'][row.idx]['var']
|
667
|
+
self.add_recording(
|
668
|
+
self.sec_tree.sections[row.sec_idx], row.loc, var
|
669
|
+
)
|
670
|
+
|
671
|
+
# ========================================================================
|
672
|
+
# EXPORT TO PLAIN SIMULATOR CODE
|
673
|
+
# ========================================================================
|
674
|
+
|
675
|
+
def export_to_NEURON(self, file_name, include_kinetic_params=True):
|
676
|
+
"""
|
677
|
+
Export the model to a python file with plain NEURON code to reproduce the model.
|
678
|
+
|
679
|
+
Parameters
|
680
|
+
----------
|
681
|
+
file_name : str
|
682
|
+
The name of the file to write to.
|
683
|
+
"""
|
684
|
+
|
685
|
+
params_to_valid_domains = get_params_to_valid_domains(self)
|
686
|
+
params = self.params if include_kinetic_params else filter_params(self)
|
687
|
+
path_to_template = self.path_manager.get_file_path('templates', 'NEURON_template', extension='py')
|
688
|
+
|
689
|
+
output = render_template(path_to_template,
|
690
|
+
{
|
691
|
+
'param_dict': params,
|
692
|
+
'groups_dict': self.groups,
|
693
|
+
'params_to_mechs': self.params_to_mechs,
|
694
|
+
'domains_to_mechs': self.domains_to_mechs,
|
695
|
+
'iclamps': self.iclamps,
|
696
|
+
'recordings': self.simulator.recordings,
|
697
|
+
'params_to_valid_domains': params_to_valid_domains,
|
698
|
+
'domains_to_NEURON': {domain: get_neuron_domain(domain) for domain in self.domains},
|
699
|
+
})
|
700
|
+
|
701
|
+
if not file_name.endswith('.py'):
|
702
|
+
file_name += '.py'
|
703
|
+
path_to_model = self.path_manager.path_to_model
|
704
|
+
output_path = os.path.join(path_to_model, file_name)
|
705
|
+
with open(output_path, 'w') as f:
|
706
|
+
f.write(output)
|
707
|
+
|
708
|
+
|
709
|
+
def export_to_Jaxley(self, file_name):
|
710
|
+
raise NotImplementedError("Export to plain Jaxley code is not implemented yet.")
|
711
|
+
|
712
|
+
|
713
|
+
# =======================================================================
|
714
|
+
# HELPER FUNCTIONS
|
715
|
+
# =======================================================================
|
716
|
+
|
717
|
+
def filter_params(model):
|
718
|
+
"""
|
719
|
+
Filter out kinetic parameters from the model.
|
720
|
+
|
721
|
+
Parameters
|
722
|
+
----------
|
723
|
+
model : Model
|
724
|
+
The model to filter.
|
725
|
+
|
726
|
+
Returns
|
727
|
+
-------
|
728
|
+
Model
|
729
|
+
The model with kinetic parameters filtered out.
|
730
|
+
"""
|
731
|
+
filtered_params = {
|
732
|
+
param: {
|
733
|
+
group_name: distribution
|
734
|
+
for group_name, distribution in distributions.items()
|
735
|
+
if param in list(model.conductances.keys()) + ['cm', 'Ra', 'ena', 'ek', 'eca']}
|
736
|
+
for param, distributions in model.params.items()}
|
737
|
+
return filtered_params
|
738
|
+
|
19
739
|
|
20
740
|
def get_neuron_domain(domain_name):
|
21
741
|
base_domain, _, idx = domain_name.partition('_')
|
@@ -49,27 +769,4 @@ def get_params_to_valid_domains(model):
|
|
49
769
|
valid_domains = [get_neuron_domain(domain) for domain in group.domains if mech == 'Independent' or mech in model.domains_to_mechs[domain]]
|
50
770
|
params_to_valid_domains[param][group_name] = valid_domains
|
51
771
|
|
52
|
-
return dict(params_to_valid_domains)
|
53
|
-
|
54
|
-
|
55
|
-
def filter_params(model):
|
56
|
-
"""
|
57
|
-
Filter out kinetic parameters from the model.
|
58
|
-
|
59
|
-
Parameters
|
60
|
-
----------
|
61
|
-
model : Model
|
62
|
-
The model to filter.
|
63
|
-
|
64
|
-
Returns
|
65
|
-
-------
|
66
|
-
Model
|
67
|
-
The model with kinetic parameters filtered out.
|
68
|
-
"""
|
69
|
-
filtered_params = {
|
70
|
-
param: {
|
71
|
-
group_name: distribution
|
72
|
-
for group_name, distribution in distributions.items()
|
73
|
-
if param in list(model.conductances.keys()) + ['cm', 'Ra', 'ena', 'ek', 'eca']}
|
74
|
-
for param, distributions in model.params.items()}
|
75
|
-
return filtered_params
|
772
|
+
return dict(params_to_valid_domains)
|