siibra 0.4a76__tar.gz → 0.5a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (83) hide show
  1. {siibra-0.4a76/siibra.egg-info → siibra-0.5a1}/PKG-INFO +1 -1
  2. siibra-0.5a1/siibra/VERSION +1 -0
  3. {siibra-0.4a76 → siibra-0.5a1}/siibra/commons.py +3 -2
  4. {siibra-0.4a76 → siibra-0.5a1}/siibra/configuration/configuration.py +6 -2
  5. {siibra-0.4a76 → siibra-0.5a1}/siibra/configuration/factory.py +48 -27
  6. siibra-0.5a1/siibra/explorer/__init__.py +1 -0
  7. siibra-0.5a1/siibra/explorer/url.py +162 -0
  8. siibra-0.5a1/siibra/explorer/util.py +65 -0
  9. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/anchor.py +36 -9
  10. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/connectivity/__init__.py +6 -2
  11. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/connectivity/functional_connectivity.py +21 -0
  12. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/connectivity/regional_connectivity.py +91 -86
  13. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/dataset/ebrains.py +1 -1
  14. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/feature.py +331 -35
  15. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/bigbrain_intensity_profile.py +5 -2
  16. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/cell_density_profile.py +3 -1
  17. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/cortical_profile.py +14 -10
  18. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/gene_expression.py +0 -2
  19. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/layerwise_bigbrain_intensities.py +3 -2
  20. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/receptor_density_profile.py +7 -1
  21. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/regional_timeseries_activity.py +81 -102
  22. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/tabular.py +21 -9
  23. {siibra-0.4a76 → siibra-0.5a1}/siibra/livequeries/bigbrain.py +11 -22
  24. siibra-0.5a1/siibra/locations/__init__.py +84 -0
  25. {siibra-0.4a76 → siibra-0.5a1}/siibra/locations/boundingbox.py +0 -16
  26. {siibra-0.4a76 → siibra-0.5a1}/siibra/locations/location.py +13 -0
  27. {siibra-0.4a76 → siibra-0.5a1}/siibra/locations/pointset.py +1 -3
  28. {siibra-0.4a76 → siibra-0.5a1}/siibra/retrieval/cache.py +5 -3
  29. {siibra-0.4a76 → siibra-0.5a1}/siibra/retrieval/datasets.py +27 -27
  30. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/neuroglancer.py +6 -9
  31. {siibra-0.4a76 → siibra-0.5a1/siibra.egg-info}/PKG-INFO +1 -1
  32. {siibra-0.4a76 → siibra-0.5a1}/siibra.egg-info/SOURCES.txt +3 -0
  33. siibra-0.4a76/siibra/VERSION +0 -1
  34. siibra-0.4a76/siibra/locations/__init__.py +0 -20
  35. {siibra-0.4a76 → siibra-0.5a1}/LICENSE +0 -0
  36. {siibra-0.4a76 → siibra-0.5a1}/MANIFEST.in +0 -0
  37. {siibra-0.4a76 → siibra-0.5a1}/README.rst +0 -0
  38. {siibra-0.4a76 → siibra-0.5a1}/setup.cfg +0 -0
  39. {siibra-0.4a76 → siibra-0.5a1}/setup.py +0 -0
  40. {siibra-0.4a76 → siibra-0.5a1}/siibra/__init__.py +0 -0
  41. {siibra-0.4a76 → siibra-0.5a1}/siibra/configuration/__init__.py +0 -0
  42. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/__init__.py +0 -0
  43. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/atlas.py +0 -0
  44. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/concept.py +0 -0
  45. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/parcellation.py +0 -0
  46. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/region.py +0 -0
  47. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/relation_qualification.py +0 -0
  48. {siibra-0.4a76 → siibra-0.5a1}/siibra/core/space.py +0 -0
  49. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/__init__.py +0 -0
  50. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/connectivity/streamline_counts.py +0 -0
  51. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/connectivity/streamline_lengths.py +0 -0
  52. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/connectivity/tracing_connectivity.py +0 -0
  53. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/dataset/__init__.py +0 -0
  54. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/image/__init__.py +0 -0
  55. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/image/image.py +0 -0
  56. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/image/sections.py +0 -0
  57. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/image/volume_of_interest.py +0 -0
  58. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/__init__.py +0 -0
  59. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/layerwise_cell_density.py +0 -0
  60. {siibra-0.4a76 → siibra-0.5a1}/siibra/features/tabular/receptor_density_fingerprint.py +0 -0
  61. {siibra-0.4a76 → siibra-0.5a1}/siibra/livequeries/__init__.py +0 -0
  62. {siibra-0.4a76 → siibra-0.5a1}/siibra/livequeries/allen.py +0 -0
  63. {siibra-0.4a76 → siibra-0.5a1}/siibra/livequeries/ebrains.py +0 -0
  64. {siibra-0.4a76 → siibra-0.5a1}/siibra/livequeries/query.py +0 -0
  65. {siibra-0.4a76 → siibra-0.5a1}/siibra/locations/point.py +0 -0
  66. {siibra-0.4a76 → siibra-0.5a1}/siibra/retrieval/__init__.py +0 -0
  67. {siibra-0.4a76 → siibra-0.5a1}/siibra/retrieval/exceptions/__init__.py +0 -0
  68. {siibra-0.4a76 → siibra-0.5a1}/siibra/retrieval/repositories.py +0 -0
  69. {siibra-0.4a76 → siibra-0.5a1}/siibra/retrieval/requests.py +0 -0
  70. {siibra-0.4a76 → siibra-0.5a1}/siibra/vocabularies/__init__.py +0 -0
  71. {siibra-0.4a76 → siibra-0.5a1}/siibra/vocabularies/gene_names.json +0 -0
  72. {siibra-0.4a76 → siibra-0.5a1}/siibra/vocabularies/receptor_symbols.json +0 -0
  73. {siibra-0.4a76 → siibra-0.5a1}/siibra/vocabularies/region_aliases.json +0 -0
  74. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/__init__.py +0 -0
  75. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/gifti.py +0 -0
  76. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/nifti.py +0 -0
  77. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/parcellationmap.py +0 -0
  78. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/sparsemap.py +0 -0
  79. {siibra-0.4a76 → siibra-0.5a1}/siibra/volumes/volume.py +0 -0
  80. {siibra-0.4a76 → siibra-0.5a1}/siibra.egg-info/dependency_links.txt +0 -0
  81. {siibra-0.4a76 → siibra-0.5a1}/siibra.egg-info/requires.txt +0 -0
  82. {siibra-0.4a76 → siibra-0.5a1}/siibra.egg-info/top_level.txt +0 -0
  83. {siibra-0.4a76 → siibra-0.5a1}/test/test_siibra.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: siibra
3
- Version: 0.4a76
3
+ Version: 0.5a1
4
4
  Summary: siibra - Software interfaces for interacting with brain atlases
5
5
  Home-page: https://github.com/FZJ-INM1-BDA/siibra-python
6
6
  Author: Big Data Analytics Group, Forschungszentrum Juelich, Institute of Neuroscience and Medicine (INM-1)
@@ -0,0 +1 @@
1
+ 0.5a01
@@ -45,8 +45,9 @@ KEYCLOAK_CLIENT_SECRET = os.getenv("KEYCLOAK_CLIENT_SECRET")
45
45
  SIIBRA_CACHEDIR = os.getenv("SIIBRA_CACHEDIR")
46
46
  SIIBRA_LOG_LEVEL = os.getenv("SIIBRA_LOG_LEVEL", "INFO")
47
47
  SIIBRA_USE_CONFIGURATION = os.getenv("SIIBRA_USE_CONFIGURATION")
48
-
49
48
  SIIBRA_USE_LOCAL_SNAPSPOT = os.getenv("SIIBRA_USE_LOCAL_SNAPSPOT")
49
+ SKIP_CACHEINIT_MAINTENANCE = os.getenv("SKIP_CACHEINIT_MAINTENANCE")
50
+ NEUROGLANCER_MAX_GIB = os.getenv("NEUROGLANCER_MAX_GIB", 0.2)
50
51
 
51
52
  with open(os.path.join(ROOT_DIR, "VERSION"), "r") as fp:
52
53
  __version__ = fp.read().strip()
@@ -735,7 +736,7 @@ class Species(Enum):
735
736
  if key in cls.__members__.keys():
736
737
  return getattr(cls, key)
737
738
  else:
738
- if isinstance(spec, list):
739
+ if isinstance(spec, (list, set)):
739
740
  next_specs = spec
740
741
  elif isinstance(spec, dict):
741
742
  next_specs = spec.values()
@@ -159,15 +159,19 @@ class Configuration:
159
159
  **{'filename': specloaders[0][0]}
160
160
  )
161
161
  )
162
+ obj_class = obj0[0].__class__.__name__ if isinstance(obj0, list) else obj0.__class__.__name__
162
163
 
163
164
  for fname, loader in siibra_tqdm(
164
165
  specloaders,
165
166
  total=len(specloaders),
166
- desc=f"Loading preconfigured {obj0.__class__.__name__} instances"
167
+ desc=f"Loading preconfigured {obj_class} instances"
167
168
  ):
168
169
  # filename is added to allow Factory creating reasonable default object identifiers\
169
170
  obj = Factory.from_json(dict(loader.data, **{'filename': fname}))
170
- result.append(obj)
171
+ if isinstance(obj, list):
172
+ result.extend(obj)
173
+ else:
174
+ result.append(obj)
171
175
 
172
176
  return result
173
177
 
@@ -448,56 +448,77 @@ class Factory:
448
448
  raise ValueError(f"No method for building image section feature type {modality}.")
449
449
 
450
450
  @classmethod
451
- @build_type("siibra/feature/connectivitymatrix/v0.2")
451
+ @build_type("siibra/feature/connectivitymatrix/v0.3")
452
452
  def build_connectivity_matrix(cls, spec):
453
+ files = spec.get("files", {})
453
454
  modality = spec["modality"]
455
+ try:
456
+ conn_cls = getattr(connectivity, modality)
457
+ except Exception:
458
+ raise ValueError(f"No method for building connectivity matrix of type {modality}.")
459
+
460
+ decoder_func = cls.extract_decoder(spec)
461
+ repo_connector = cls.extract_connector(spec) if spec.get('repository', None) else None
462
+ if repo_connector is None:
463
+ base_url = spec.get("base_url", "")
454
464
  kwargs = {
455
465
  "cohort": spec.get("cohort", ""),
456
466
  "modality": modality,
457
467
  "regions": spec["regions"],
458
- "connector": cls.extract_connector(spec),
459
- "decode_func": cls.extract_decoder(spec),
460
- "files": spec.get("files", {}),
468
+ "connector": repo_connector,
469
+ "decode_func": decoder_func,
461
470
  "anchor": cls.extract_anchor(spec),
462
471
  "description": spec.get("description", ""),
463
- "datasets": cls.extract_datasets(spec),
472
+ "datasets": cls.extract_datasets(spec)
464
473
  }
465
- if modality == "StreamlineCounts":
466
- return connectivity.StreamlineCounts(**kwargs)
467
- elif modality == "StreamlineLengths":
468
- return connectivity.StreamlineLengths(**kwargs)
469
- elif modality == "Functional":
470
- kwargs["paradigm"] = spec.get("paradigm")
471
- return connectivity.FunctionalConnectivity(**kwargs)
472
- elif modality == "RestingState":
473
- kwargs["paradigm"] = spec.get("paradigm", "RestingState")
474
- return connectivity.FunctionalConnectivity(**kwargs)
475
- elif modality == "Tracing":
476
- return connectivity.TracingConnectivity(**kwargs)
477
- else:
478
- raise ValueError(f"No method for building connectivity matrix of type {modality}.")
474
+ paradigm = spec.get("paradigm")
475
+ if paradigm:
476
+ kwargs["paradigm"] = paradigm
477
+ files_indexed_by = spec.get("files_indexed_by", "subject")
478
+ assert files_indexed_by in ["subject", "feature"]
479
+ conn_by_file = []
480
+ for fkey, filename in files.items():
481
+ kwargs.update({
482
+ "filename": filename,
483
+ "subject": fkey if files_indexed_by == "subject" else "average",
484
+ "feature": fkey if files_indexed_by == "feature" else None,
485
+ "connector": repo_connector or base_url + filename
486
+ })
487
+ conn_by_file.append(conn_cls(**kwargs))
488
+ return conn_by_file
479
489
 
480
490
  @classmethod
481
491
  @build_type("siibra/feature/timeseries/activity/v0.1")
482
492
  def build_activity_timeseries(cls, spec):
493
+ files = spec.get("files", {})
483
494
  modality = spec["modality"]
495
+ try:
496
+ timeseries_cls = getattr(regional_timeseries_activity, modality)
497
+ except Exception:
498
+ raise ValueError(f"No method for building signal table of type {modality}.")
499
+
484
500
  kwargs = {
485
- "cohort": spec["cohort"],
501
+ "cohort": spec.get("cohort", ""),
486
502
  "modality": modality,
487
503
  "regions": spec["regions"],
488
504
  "connector": cls.extract_connector(spec),
489
505
  "decode_func": cls.extract_decoder(spec),
490
- "files": spec.get("files", {}),
491
506
  "anchor": cls.extract_anchor(spec),
492
507
  "description": spec.get("description", ""),
493
508
  "datasets": cls.extract_datasets(spec),
494
- "timestep": spec.get("timestep", ("1 no_unit"))
509
+ "timestep": spec.get("timestep")
495
510
  }
496
- if modality == "Regional BOLD signal":
497
- kwargs["paradigm"] = spec.get("paradigm", "")
498
- return regional_timeseries_activity.RegionalBOLD(**kwargs)
499
- else:
500
- raise ValueError(f"No method for building signal table of type {modality}.")
511
+ paradigm = spec.get("paradigm")
512
+ if paradigm:
513
+ kwargs["paradigm"] = paradigm
514
+ timeseries_by_file = []
515
+ for fkey, filename in files.items():
516
+ kwargs.update({
517
+ "filename": filename,
518
+ "subject": fkey
519
+ })
520
+ timeseries_by_file.append(timeseries_cls(**kwargs))
521
+ return timeseries_by_file
501
522
 
502
523
  @classmethod
503
524
  def from_json(cls, spec: dict):
@@ -0,0 +1 @@
1
+ from .url import encode_url, decode_url
@@ -0,0 +1,162 @@
1
+ from typing import Optional, TYPE_CHECKING
2
+ from urllib.parse import quote_plus
3
+ from numpy import int32
4
+ import numpy as np
5
+ import re
6
+ from dataclasses import dataclass
7
+ import math
8
+
9
+ from .util import encode_number, separator, cipher, neg, decode_number, post_process
10
+
11
+ if TYPE_CHECKING:
12
+ from siibra.core.atlas import Atlas
13
+ from siibra.core.space import Space
14
+ from siibra.locations import BoundingBox, Point
15
+ from siibra.core.parcellation import Parcellation
16
+ from siibra.core.region import Region
17
+ from siibra.features.feature import Feature
18
+
19
+ class DecodeNavigationException(Exception): pass
20
+
21
+ min_int32=-2_147_483_648
22
+ max_int32=2_147_483_647
23
+
24
+
25
+ default_root_url='https://atlases.ebrains.eu/viewer/'
26
+
27
+ def sanitize_id(id: str):
28
+ return id.replace('/', ':')
29
+
30
+ def get_perspective_zoom(atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]):
31
+ import siibra
32
+ if atlas is siibra.atlases['rat'] or atlas is siibra.atlases['mouse']:
33
+ return 200000
34
+ return 2000000
35
+
36
+ def get_zoom(atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]):
37
+ import siibra
38
+ if atlas is siibra.atlases['rat'] or atlas is siibra.atlases['mouse']:
39
+ return 35000
40
+ return 350000
41
+
42
+ supported_prefix = (
43
+ "nifti://",
44
+ "swc://",
45
+ "precomputed://",
46
+ "deepzoom://"
47
+ )
48
+
49
+ def append_query_params(url: str, *args, query_params={}, **kwargs):
50
+ query_str = "&".join([f"{key}={quote_plus(value)}" for key, value in query_params.items()])
51
+ if len(query_str) > 0:
52
+ query_str = "?" + query_str
53
+ return url + query_str
54
+
55
+ @post_process(append_query_params)
56
+ def encode_url(atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]=None, *, root_url=default_root_url, external_url:str=None, feature: "Feature"=None, ignore_warning=False, query_params={}):
57
+
58
+ overlay_url = None
59
+ if external_url:
60
+ assert any([external_url.startswith(prefix) for prefix in supported_prefix]), f"url needs to start with {(' , '.join(supported_prefix))}"
61
+ overlay_url = '/x-overlay-layer:{url}'.format(
62
+ url=external_url.replace("/", "%2F")
63
+ )
64
+
65
+ zoom = get_zoom(atlas, space, parc, region)
66
+ pzoom = get_perspective_zoom(atlas, space, parc, region)
67
+
68
+ zoom_kwargs = {
69
+ "encoded_pzoom": encode_number(pzoom, False),
70
+ "encoded_zoom": encode_number(zoom, False)
71
+ }
72
+ nav_string='/@:0.0.0.-W000.._eCwg.2-FUe3._-s_W.2_evlu..{encoded_pzoom}..{encoded_nav}..{encoded_zoom}'
73
+
74
+ return_url='{root_url}#/a:{atlas_id}/t:{template_id}/p:{parc_id}{overlay_url}'.format(
75
+ root_url = root_url,
76
+ atlas_id = sanitize_id(atlas.id),
77
+ template_id = sanitize_id(space.id),
78
+ parc_id = sanitize_id(parc.id),
79
+ overlay_url = overlay_url if overlay_url else "",
80
+ )
81
+
82
+ if feature is not None:
83
+ return_url = return_url + f"/f:{sanitize_id(feature.id)}"
84
+
85
+ if region is None:
86
+ return return_url + nav_string.format(encoded_nav='0.0.0', **zoom_kwargs)
87
+
88
+ return_url=f'{return_url}/rn:{get_hash(region.name)}'
89
+
90
+ try:
91
+ result_props=region.spatial_props(space, maptype='labelled')
92
+ if len(result_props.components) == 0:
93
+ return return_url + nav_string.format(encoded_nav='0.0.0', **zoom_kwargs)
94
+ except Exception as e:
95
+ print(f'Cannot get_spatial_props {str(e)}')
96
+ if not ignore_warning:
97
+ raise e
98
+ return return_url + nav_string.format(encoded_nav='0.0.0', **zoom_kwargs)
99
+
100
+ centroid=result_props.components[0].centroid
101
+
102
+ encoded_centroid=separator.join([ encode_number(math.floor(val * 1e6)) for val in centroid ])
103
+ return_url=return_url + nav_string.format(encoded_nav=encoded_centroid, **zoom_kwargs)
104
+ return return_url
105
+
106
+ @dataclass
107
+ class DecodedUrl:
108
+ bounding_box: "BoundingBox"
109
+
110
+ def decode_url(url: str, vp_length=1000):
111
+ import siibra
112
+ try:
113
+ space_match = re.search(r'/t:(?P<space_id>[^/]+)', url)
114
+ space_id = space_match.group("space_id")
115
+ space_id = space_id.replace(":", "/")
116
+ space = siibra.spaces[space_id]
117
+ except Exception as e:
118
+ raise DecodeNavigationException from e
119
+
120
+ nav_match = re.search(r'/@:(?P<navigation_str>.+)/?', url)
121
+ navigation_str = nav_match.group("navigation_str")
122
+ for char in navigation_str:
123
+ assert char in cipher or char in [neg, separator], f"char {char} not in cipher, nor separator/neg"
124
+
125
+ try:
126
+ ori_enc, pers_ori_enc, pers_zoom_enc, pos_enc, zoomm_enc = navigation_str.split(f"{separator}{separator}")
127
+ except Exception as e:
128
+ raise DecodeNavigationException from e
129
+
130
+ try:
131
+ x_enc, y_enc, z_enc = pos_enc.split(separator)
132
+ pos = [decode_number(val) for val in [x_enc, y_enc, z_enc]]
133
+ zoom = decode_number(zoomm_enc)
134
+
135
+ # zoom = nm/pixel
136
+ pt1 = [(coord - (zoom * vp_length / 2)) / 1e6 for coord in pos]
137
+ pt1 = Point(pt1, space)
138
+
139
+ pt2 = [(coord + (zoom * vp_length / 2)) / 1e6 for coord in pos]
140
+ pt2 = Point(pt2, space)
141
+
142
+ except Exception as e:
143
+ raise DecodeNavigationException from e
144
+
145
+ bbx = BoundingBox(pt1, pt2, space)
146
+ return DecodedUrl(bounding_box=bbx)
147
+
148
+ def get_hash(full_string: str):
149
+ return_val=0
150
+ with np.errstate(over="ignore"):
151
+ for char in full_string:
152
+ # overflowing is expected and in fact the whole reason why convert number to int32
153
+
154
+ # in windows, int32((0 - min_int32) << 5), rather than overflow to wraper around, raises OverflowError
155
+ shifted_5 = int32(
156
+ (return_val - min_int32) if return_val > max_int32 else return_val
157
+ << 5)
158
+
159
+ return_val = int32(shifted_5 - return_val + ord(char))
160
+ return_val = return_val & return_val
161
+ hex_val = hex(return_val)
162
+ return hex_val[3:]
@@ -0,0 +1,65 @@
1
+ import math
2
+ import struct
3
+ from functools import wraps
4
+ from typing import Callable
5
+
6
+ cipher = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_-'
7
+ separator = '.'
8
+ neg = '~'
9
+ def encode_number(n, float_flag=False):
10
+ if float_flag:
11
+ b=struct.pack('f', n)
12
+ new_n=struct.unpack('i',b)
13
+ return encode_int(new_n[0])
14
+ else:
15
+ return encode_int(n)
16
+
17
+ def encode_int(n):
18
+ if not isinstance(n, int):
19
+ raise ValueError('Cannot encode int')
20
+
21
+ residual=None
22
+ result=''
23
+ if n < 0:
24
+ result += neg
25
+ residual = n * -1
26
+ else:
27
+ residual = n
28
+
29
+ while True:
30
+ result = cipher[residual % 64] + result
31
+ residual = math.floor(residual / 64)
32
+
33
+ if residual == 0:
34
+ break
35
+ return result
36
+
37
+ def decode_int(n):
38
+ neg_flag = False
39
+ if n[-1] == neg:
40
+ neg_flag = True
41
+ n = n[:-1]
42
+
43
+ result = 0
44
+ for char in n:
45
+ val = cipher.index(char)
46
+ result = result * 64 + val
47
+
48
+ if neg_flag:
49
+ result = result * -1
50
+ return result
51
+
52
+ def decode_number(n, float_flag=False):
53
+ if float_flag:
54
+ raise NotImplementedError
55
+ return decode_int(n)
56
+
57
+
58
+ def post_process(post_process: Callable):
59
+ def outer(fn):
60
+ @wraps(fn)
61
+ def inner(*args, **kwargs):
62
+ val = fn(*args, **kwargs)
63
+ return post_process(val, *args, **kwargs)
64
+ return inner
65
+ return outer
@@ -57,15 +57,19 @@ class AnatomicalAnchor:
57
57
  self._location_cached = location
58
58
  self._assignments: Dict[Union[AtlasConcept, Location], List[AnatomicalAssignment]] = {}
59
59
  self._last_matched_concept = None
60
- self._regions_cached = None
61
- self._regionspec = None
62
- if isinstance(region, Region):
63
- self._regions_cached = {region: AssignmentQualification.EXACT}
64
- elif isinstance(region, str):
65
- self._regionspec = region
60
+ if isinstance(region, dict):
61
+ self._regions_cached = region
62
+ self._regionspec = ", ".join({r.name for r in region.keys()})
66
63
  else:
67
- if region is not None:
68
- raise ValueError(f"Invalid region specification: {region}")
64
+ self._regions_cached = None
65
+ self._regionspec = None
66
+ if isinstance(region, Region):
67
+ self._regions_cached = {region: AssignmentQualification.EXACT}
68
+ elif isinstance(region, str):
69
+ self._regionspec = region
70
+ else:
71
+ if region is not None:
72
+ raise ValueError(f"Invalid region specification: {region}")
69
73
  self._aliases_cached = None
70
74
 
71
75
  @property
@@ -135,7 +139,10 @@ class AnatomicalAnchor:
135
139
  region = "" if self._regionspec is None else str(self._regionspec)
136
140
  location = "" if self.location is None else str(self.location)
137
141
  separator = " " if min(len(region), len(location)) > 0 else ""
138
- return region + separator + location
142
+ if region and location:
143
+ return region + " with " + location
144
+ else:
145
+ return region + separator + location
139
146
 
140
147
  def __repr__(self):
141
148
  return self.__str__()
@@ -316,3 +323,23 @@ class AnatomicalAnchor:
316
323
  return ""
317
324
  else:
318
325
  return ' and '.join({str(_) for _ in self.last_match_result})
326
+
327
+ def __add__(self, other: 'AnatomicalAnchor') -> 'AnatomicalAnchor':
328
+ if not isinstance(other, AnatomicalAnchor):
329
+ raise ValueError(f"Cannot combine an AnatomicalAnchor with {other.__class__}")
330
+
331
+ if self.species != other.species:
332
+ raise ValueError("Cannot combine an AnatomicalAnchor from different species.")
333
+ else:
334
+ species = self.species.union(other.species)
335
+
336
+ regions = self.regions
337
+ regions.update(other.regions)
338
+
339
+ location = Location.union(self.location, other.location)
340
+
341
+ return AnatomicalAnchor(species, location, regions)
342
+
343
+ def __radd__(self, other):
344
+ # required to enable `sum`
345
+ return self if other == 0 else self.__add__(other)
@@ -14,7 +14,10 @@
14
14
  # limitations under the License.
15
15
  """Multimodal data features concerning connectivity data."""
16
16
 
17
- from .functional_connectivity import FunctionalConnectivity
17
+ from .functional_connectivity import (
18
+ FunctionalConnectivity,
19
+ AnatomoFunctionalConnectivity
20
+ )
18
21
  from .streamline_counts import StreamlineCounts
19
22
  from .streamline_lengths import StreamlineLengths
20
23
  from .tracing_connectivity import TracingConnectivity
@@ -25,5 +28,6 @@ def __dir__():
25
28
  "FunctionalConnectivity",
26
29
  "StreamlineCounts",
27
30
  "StreamlineLengths",
28
- "TracingConnectivity"
31
+ "TracingConnectivity",
32
+ "AnatomoFunctionalConnectivity"
29
33
  ]
@@ -14,6 +14,7 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from . import regional_connectivity
17
+
17
18
  from hashlib import md5
18
19
 
19
20
 
@@ -24,6 +25,9 @@ class FunctionalConnectivity(
24
25
  ):
25
26
  """Functional connectivity matrix grouped by a parcellation."""
26
27
 
28
+ _filter_attrs = regional_connectivity.RegionalConnectivity._filter_attrs + ["paradigm"]
29
+ _compound_attrs = regional_connectivity.RegionalConnectivity._compound_attrs + ["paradigm"]
30
+
27
31
  def __init__(self, paradigm: str, **kwargs):
28
32
  regional_connectivity.RegionalConnectivity.__init__(self, **kwargs)
29
33
  self.paradigm = paradigm
@@ -34,3 +38,20 @@ class FunctionalConnectivity(
34
38
  @property
35
39
  def id(self):
36
40
  return super().id + "--" + md5(self.paradigm.encode("utf-8")).hexdigest()
41
+
42
+ @property
43
+ def name(self):
44
+ return f"{super().name}, {self.paradigm} paradigm"
45
+
46
+
47
+ class AnatomoFunctionalConnectivity(
48
+ regional_connectivity.RegionalConnectivity,
49
+ configuration_folder="features/connectivity/regional/anatomofunctional",
50
+ category="connectivity"
51
+ ):
52
+ """Functional connectivity matrix grouped by a parcellation."""
53
+
54
+ _filter_attrs = ["modality", "cohort", "feature"]
55
+
56
+ def __init__(self, **kwargs):
57
+ regional_connectivity.RegionalConnectivity.__init__(self, **kwargs)