mrzerocore 0.4.3__cp37-abi3-musllinux_1_2_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. MRzeroCore/__init__.py +22 -0
  2. MRzeroCore/_prepass.abi3.so +0 -0
  3. MRzeroCore/phantom/brainweb/.gitignore +1 -0
  4. MRzeroCore/phantom/brainweb/__init__.py +192 -0
  5. MRzeroCore/phantom/brainweb/brainweb_data.json +92 -0
  6. MRzeroCore/phantom/brainweb/brainweb_data_sources.txt +74 -0
  7. MRzeroCore/phantom/brainweb/output/.gitkeep +0 -0
  8. MRzeroCore/phantom/custom_voxel_phantom.py +240 -0
  9. MRzeroCore/phantom/nifti_phantom.py +210 -0
  10. MRzeroCore/phantom/sim_data.py +200 -0
  11. MRzeroCore/phantom/tissue_dict.py +269 -0
  12. MRzeroCore/phantom/voxel_grid_phantom.py +610 -0
  13. MRzeroCore/pulseq/exporter.py +374 -0
  14. MRzeroCore/pulseq/exporter_v2.py +650 -0
  15. MRzeroCore/pulseq/helpers.py +228 -0
  16. MRzeroCore/pulseq/pulseq_exporter.py +553 -0
  17. MRzeroCore/pulseq/pulseq_loader/__init__.py +66 -0
  18. MRzeroCore/pulseq/pulseq_loader/adc.py +48 -0
  19. MRzeroCore/pulseq/pulseq_loader/helpers.py +75 -0
  20. MRzeroCore/pulseq/pulseq_loader/pulse.py +80 -0
  21. MRzeroCore/pulseq/pulseq_loader/pulseq_file/__init__.py +235 -0
  22. MRzeroCore/pulseq/pulseq_loader/pulseq_file/adc.py +68 -0
  23. MRzeroCore/pulseq/pulseq_loader/pulseq_file/block.py +98 -0
  24. MRzeroCore/pulseq/pulseq_loader/pulseq_file/definitons.py +68 -0
  25. MRzeroCore/pulseq/pulseq_loader/pulseq_file/gradient.py +70 -0
  26. MRzeroCore/pulseq/pulseq_loader/pulseq_file/helpers.py +156 -0
  27. MRzeroCore/pulseq/pulseq_loader/pulseq_file/rf.py +91 -0
  28. MRzeroCore/pulseq/pulseq_loader/pulseq_file/trap.py +69 -0
  29. MRzeroCore/pulseq/pulseq_loader/spoiler.py +33 -0
  30. MRzeroCore/reconstruction.py +104 -0
  31. MRzeroCore/sequence.py +747 -0
  32. MRzeroCore/simulation/isochromat_sim.py +254 -0
  33. MRzeroCore/simulation/main_pass.py +286 -0
  34. MRzeroCore/simulation/pre_pass.py +192 -0
  35. MRzeroCore/simulation/sig_to_mrd.py +362 -0
  36. MRzeroCore/util.py +884 -0
  37. MRzeroCore.libs/libgcc_s-39080030.so.1 +0 -0
  38. mrzerocore-0.4.3.dist-info/METADATA +121 -0
  39. mrzerocore-0.4.3.dist-info/RECORD +41 -0
  40. mrzerocore-0.4.3.dist-info/WHEEL +4 -0
  41. mrzerocore-0.4.3.dist-info/licenses/LICENSE +661 -0
@@ -0,0 +1,210 @@
1
+ from dataclasses import dataclass
2
+ from typing import Literal, Any
3
+ from pathlib import Path
4
+
5
+
6
+ @dataclass
7
+ class PhantomUnits:
8
+ gyro: Literal["MHz/T"]
9
+ B0: Literal["T"]
10
+ T1: Literal["s"]
11
+ T2: Literal["s"]
12
+ T2dash: Literal["s"]
13
+ ADC: Literal["10^-3 mm^2/s"]
14
+ dB0: Literal["Hz"]
15
+ B1_tx: Literal["rel"]
16
+ B1_rx: Literal["rel"]
17
+
18
+ @classmethod
19
+ def default(cls):
20
+ return cls(
21
+ gyro="MHz/T",
22
+ B0="T",
23
+ T1="s",
24
+ T2="s",
25
+ T2dash="s",
26
+ ADC="10^-3 mm^2/s",
27
+ dB0="Hz",
28
+ B1_tx="rel",
29
+ B1_rx="rel",
30
+ )
31
+
32
+ @classmethod
33
+ def from_dict(cls, config: dict[str, str]):
34
+ # Currently this is only for documentation and no other units than the
35
+ # default units are supported. This definetly can change in the future
36
+ # but the implementation has no priority right now
37
+ default = cls.default()
38
+ assert default.to_dict() == config, "Only default units are supported for now"
39
+ return default
40
+
41
+ def to_dict(self) -> dict[str, str]:
42
+ return {
43
+ "gyro": self.gyro,
44
+ "B0": self.B0,
45
+ "T1": self.T1,
46
+ "T2": self.T2,
47
+ "T2'": self.T2dash,
48
+ "ADC": self.ADC,
49
+ "dB0": self.dB0,
50
+ "B1+": self.B1_tx,
51
+ "B1-": self.B1_rx,
52
+ }
53
+
54
+
55
+ @dataclass
56
+ class PhantomSystem:
57
+ gyro: float
58
+ B0: float
59
+
60
+ @classmethod
61
+ def from_dict(cls, config: dict[str, float]):
62
+ return cls(**config)
63
+
64
+ def to_dict(self) -> dict[str, float]:
65
+ return {"gyro": self.gyro, "B0": self.B0}
66
+
67
+
68
+ @dataclass
69
+ class NiftiRef:
70
+ file_name: Path
71
+ tissue_index: int
72
+
73
+ @classmethod
74
+ def parse(cls, config: str):
75
+ import re
76
+
77
+ regex = re.compile(r"(?P<file>.+?)\[(?P<idx>\d+)\]$")
78
+ m = regex.match(config)
79
+ if not m:
80
+ raise ValueError("Invalid file_ref", m)
81
+ return cls(file_name=Path(m.group("file")), tissue_index=int(m.group("idx")))
82
+
83
+ def to_str(self) -> str:
84
+ return f"{self.file_name}[{self.tissue_index}]"
85
+
86
+
87
+ @dataclass
88
+ class NiftiMapping:
89
+ file: NiftiRef
90
+ func: str
91
+
92
+ @classmethod
93
+ def parse(cls, config: dict[str, Any]):
94
+ return cls(file=NiftiRef.parse(config["file"]), func=config["func"])
95
+
96
+ def to_dict(self) -> dict[str, Any]:
97
+ return {
98
+ "file": self.file.to_str(),
99
+ "func": self.func
100
+ }
101
+
102
+
103
+ @dataclass
104
+ class NiftiTissue:
105
+ density: NiftiRef
106
+ T1: float | NiftiRef | NiftiMapping
107
+ T2: float | NiftiRef | NiftiMapping
108
+ T2dash: float | NiftiRef | NiftiMapping
109
+ ADC: float | NiftiRef | NiftiMapping
110
+ dB0: float | NiftiRef | NiftiMapping
111
+ B1_tx: list[float | NiftiRef | NiftiMapping]
112
+ B1_rx: list[float | NiftiRef | NiftiMapping]
113
+
114
+ @classmethod
115
+ def default(cls, density: NiftiRef):
116
+ return cls.from_dict({"density": density})
117
+
118
+ @classmethod
119
+ def from_dict(cls, config: dict[str, Any]):
120
+ def parse_prop(prop):
121
+ if isinstance(prop, float):
122
+ return float(prop)
123
+ elif isinstance(prop, str):
124
+ return NiftiRef.parse(prop)
125
+ else:
126
+ return NiftiMapping.parse(prop)
127
+
128
+ return cls(
129
+ density=NiftiRef.parse(config["density"]),
130
+ T1=parse_prop(config.get("T1", float("inf"))),
131
+ T2=parse_prop(config.get("T2", float("inf"))),
132
+ T2dash=parse_prop(config.get("T2'", float("inf"))),
133
+ ADC=parse_prop(config.get("ADC", 0.0)),
134
+ dB0=parse_prop(config.get("dB0", 1.0)),
135
+ B1_tx=[parse_prop(ch) for ch in config.get("B1+", [1.0])],
136
+ B1_rx=[parse_prop(ch) for ch in config.get("B1-", [1.0])],
137
+ )
138
+
139
+ def to_dict(self) -> dict:
140
+ def serialize_prop(prop):
141
+ if isinstance(prop, float):
142
+ return prop
143
+ elif isinstance(prop, NiftiRef):
144
+ return prop.to_str()
145
+ elif isinstance(prop, NiftiMapping):
146
+ return prop.to_dict()
147
+ else:
148
+ raise ValueError("Unsupported property type", type(prop))
149
+
150
+ return {
151
+ "density": self.density.to_str(),
152
+ "T1": serialize_prop(self.T1),
153
+ "T2": serialize_prop(self.T2),
154
+ "T2'": serialize_prop(self.T2dash),
155
+ "ADC": serialize_prop(self.ADC),
156
+ "dB0": serialize_prop(self.dB0),
157
+ "B1+": [serialize_prop(ch) for ch in self.B1_tx],
158
+ "B1-": [serialize_prop(ch) for ch in self.B1_rx],
159
+ }
160
+
161
+
162
+ @dataclass
163
+ class NiftiPhantom:
164
+ file_type = "nifti_phantom_v1"
165
+ units: PhantomUnits
166
+ system: PhantomSystem
167
+ tissues: dict[str, NiftiTissue]
168
+
169
+ @classmethod
170
+ def default(cls, gyro=42.5764, B0=3.0):
171
+ return cls(PhantomUnits.default(), PhantomSystem(gyro, B0), {})
172
+
173
+ @classmethod
174
+ def load(cls, path: Path | str):
175
+ import json
176
+
177
+ with open(path, "r") as f:
178
+ config = json.load(f)
179
+ return cls.from_dict(config)
180
+
181
+ def save(self, path: Path | str):
182
+ import json
183
+ import os
184
+ path = Path(path)
185
+
186
+ os.makedirs(path.parent, exist_ok=True)
187
+ with open(path, "w") as f:
188
+ json.dump(self.to_dict(), f, indent=2)
189
+
190
+ @classmethod
191
+ def from_dict(cls, config: dict):
192
+ assert config["file_type"] == "nifti_phantom_v1"
193
+ units = PhantomUnits.from_dict(config["units"])
194
+ system = PhantomSystem.from_dict(config["system"])
195
+ tissues = {
196
+ name: NiftiTissue.from_dict(tissue)
197
+ for name, tissue in config["tissues"].items()
198
+ }
199
+
200
+ return cls(units, system, tissues)
201
+
202
+ def to_dict(self) -> dict:
203
+ return {
204
+ "file_type": self.file_type,
205
+ "units": self.units.to_dict(),
206
+ "system": self.system.to_dict(),
207
+ "tissues": {
208
+ name: tissue.to_dict() for name, tissue in self.tissues.items()
209
+ },
210
+ }
@@ -0,0 +1,200 @@
1
+ from __future__ import annotations
2
+ from typing import Callable, Any, Optional, Dict
3
+ import torch
4
+ from numpy import pi
5
+
6
+
7
+ class SimData:
8
+ """This class contains the physical data for simulating a MRI sequence.
9
+
10
+ It is not intended to create this class directly, but rather to use one of
11
+ the :class:`SimData` builders / loaders. Those are made fore specific
12
+ tasks and can be converted into :class:`SimData`, but also attach
13
+ metadata to the output so it can be converted back. The attributes of this
14
+ class are nothing but the data needed for simulation, so it can describe
15
+ a single voxel, randomly distributed voxels, a BrainWeb phantom, ...
16
+
17
+ Attributes
18
+ ----------
19
+ PD : torch.Tensor
20
+ Per voxel proton density
21
+ T1 : torch.Tensor
22
+ Per voxel T1 relaxation time (seconds)
23
+ T2 : torch.Tensor
24
+ Per voxel T2 relaxation time (seconds)
25
+ T2dash : torch.Tensor
26
+ Per voxel T2' dephasing time (seconds)
27
+ D: torch.Tensor
28
+ Isometric diffusion coefficients [10^-3 mm^2/s]
29
+ B0 : torch.Tensor
30
+ Per voxel B0 inhomogentity (Hertz)
31
+ B1 : torch.Tensor
32
+ (coil_count, voxel_count) Per coil and per voxel B1 inhomogenity
33
+ coil_sens : torch.Tensor
34
+ (coil_count, voxel_count) Per coil sensitivity (arbitrary units)
35
+ size : torch.Tensor
36
+ Physical size of the phantom. If a sequence with normalized gradients
37
+ is simulated, size is used to scale them to match the phantom.
38
+ avg_B1_trig : torch.Tensor
39
+ (361, 3) values containing the PD-weighted avg of sin/cos/sin²(B1*flip)
40
+ voxel_pos : torch.Tensor
41
+ (voxel_count, 3) Voxel positions. These can be anywhere, but for easy
42
+ sequence programming they should be in the range [-0.5, 0.5[
43
+ nyquist : torch.Tensor
44
+ (3, ) tensor: Maximum frequency encoded by the data
45
+ dephasing_func : torch.Tensor -> torch.Tensor
46
+ A function describing the intra-voxel dephasing. Maps a k-space
47
+ trajectory (events, 3) to the measured attenuation (events).
48
+ recover_func : SimData -> Any
49
+ A function that can recover the original data that was used to create
50
+ this instance. Usually a lambda that captures meta data like a mask.
51
+ """
52
+
53
+ def __init__(
54
+ self,
55
+ PD: torch.Tensor,
56
+ T1: torch.Tensor,
57
+ T2: torch.Tensor,
58
+ T2dash: torch.Tensor,
59
+ D: torch.Tensor,
60
+ B0: torch.Tensor,
61
+ B1: torch.Tensor,
62
+ coil_sens: torch.Tensor,
63
+ size: torch.Tensor,
64
+ voxel_pos: torch.Tensor,
65
+ nyquist: torch.Tensor,
66
+ dephasing_func: Callable[[torch.Tensor, torch.Tensor], torch.Tensor],
67
+ recover_func: Callable[[SimData], Any] | None = None,
68
+ phantom_motion=None,
69
+ voxel_motion=None,
70
+ tissue_masks: Optional[Dict[str,torch.Tensor]] = None,
71
+ ) -> None:
72
+ """Create a :class:`SimData` instance based on the given tensors.
73
+
74
+ All parameters must be of shape ``(voxel_count, )``, only B1 and
75
+ coil_sens have an additional first dimension for multiple coils.
76
+
77
+ Parameters
78
+ ----------
79
+ normalize : bool
80
+ If true, applies B0 -= B0.mean(), B1 /= B1.mean(), PD /= PD.sum()
81
+ """
82
+ if not (PD.shape == T1.shape == T2.shape == T2dash.shape == B0.shape):
83
+ raise Exception("Mismatch of voxel-data shapes")
84
+ if not PD.ndim == 1:
85
+ raise Exception("Data must be 1D (flattened)")
86
+ if B1.ndim < 2 or B1.shape[1] != PD.numel():
87
+ raise Exception("B1 must have shape [coils, voxel_count]")
88
+ if coil_sens.ndim < 2 or coil_sens.shape[1] != PD.numel():
89
+ raise Exception("coil_sens must have shape [coils, voxel_count]")
90
+
91
+ self.PD = PD.clamp(min=0)
92
+ self.T1 = T1.clamp(min=1e-6)
93
+ self.T2 = T2.clamp(min=1e-6)
94
+ self.T2dash = T2dash.clamp(min=1e-6)
95
+ self.D = D.clamp(min=1e-6)
96
+ self.B0 = B0.clone()
97
+ self.B1 = B1.clone()
98
+ self.tissue_masks = tissue_masks
99
+ if self.tissue_masks is None:
100
+ self.tissue_masks = {}
101
+ self.coil_sens = coil_sens.clone()
102
+ self.size = size.clone()
103
+ self.voxel_pos = voxel_pos.clone()
104
+ self.avg_B1_trig = calc_avg_B1_trig(B1, PD)
105
+ self.nyquist = nyquist.clone()
106
+ self.dephasing_func = dephasing_func
107
+ self.recover_func = recover_func
108
+
109
+ self.phantom_motion = phantom_motion
110
+ self.voxel_motion = voxel_motion
111
+
112
+ def cuda(self) -> SimData:
113
+ """Move the simulation data to the default CUDA device.
114
+
115
+ The returned :class:`SimData` is equivalent to :attr:`self` if the data
116
+ already was on the GPU.
117
+ """
118
+ return SimData(
119
+ self.PD.cuda(),
120
+ self.T1.cuda(),
121
+ self.T2.cuda(),
122
+ self.T2dash.cuda(),
123
+ self.D.cuda(),
124
+ self.B0.cuda(),
125
+ self.B1.cuda(),
126
+ self.coil_sens.cuda(),
127
+ self.size.cuda(),
128
+ self.voxel_pos.cuda(),
129
+ self.nyquist.cuda(),
130
+ self.dephasing_func,
131
+ self.recover_func,
132
+ self.phantom_motion,
133
+ self.voxel_motion,
134
+ tissue_masks={
135
+ k: v.cuda() for k, v in self.tissue_masks.items()
136
+ },
137
+ )
138
+
139
+ def cpu(self) -> SimData:
140
+ """Move the simulation data to the CPU.
141
+
142
+ The returned :class:`SimData` is equivalent to :attr:`self` if the data
143
+ already was on the CPU.
144
+ """
145
+ return SimData(
146
+ self.PD.cpu(),
147
+ self.T1.cpu(),
148
+ self.T2.cpu(),
149
+ self.T2dash.cpu(),
150
+ self.D.cpu(),
151
+ self.B0.cpu(),
152
+ self.B1.cpu(),
153
+ self.coil_sens.cpu(),
154
+ self.size.cpu(),
155
+ self.voxel_pos.cpu(),
156
+ self.nyquist.cpu(),
157
+ self.dephasing_func,
158
+ self.recover_func,
159
+ self.phantom_motion,
160
+ self.voxel_motion,
161
+ tissue_masks={
162
+ k: v.cpu() for k, v in self.tissue_masks.items()
163
+ },
164
+ )
165
+
166
+ @property
167
+ def device(self) -> torch.device:
168
+ """The device (either CPU or a CUDA device) the data is stored on."""
169
+ return self.PD.device
170
+
171
+ def recover(self) -> Any:
172
+ """Recover the data that was used to build this instance."""
173
+ if self.recover_func is None:
174
+ raise Exception("No recover function was provided")
175
+ else:
176
+ return self.recover_func(self)
177
+
178
+
179
+ def calc_avg_B1_trig(B1: torch.Tensor, PD: torch.Tensor) -> torch.Tensor:
180
+ """Return a (361, 3) tensor for B1 specific sin, cos and sin² values.
181
+
182
+ This function calculates values for sin, cos and sin² for (0, 2pi) * B1 and
183
+ then averages the results, weighted by PD. These 3 functions are the non
184
+ linear parts of a rotation matrix, the resulting look up table can be used
185
+ to calcualte averaged rotations for the whole phantom. This is useful for
186
+ the pre-pass, to get better magnetization estmates even if the pre-pass is
187
+ not spatially resolved.
188
+ """
189
+ # With pTx, there are now potentially multiple B1 maps with phase.
190
+ # NOTE: This is a (probably suboptimal) workaround
191
+ B1 = B1.sum(0).abs()
192
+
193
+ B1 = B1.flatten()[:, None] # voxels, 1
194
+ PD = (PD.flatten() / PD.sum())[:, None] # voxels, 1
195
+ angle = torch.linspace(0, 2*pi, 361, device=PD.device)[None, :] # 1, angle
196
+ return torch.stack([
197
+ (torch.sin(B1 * angle) * PD).sum(0),
198
+ (torch.cos(B1 * angle) * PD).sum(0),
199
+ (torch.sin(B1 * angle/2)**2 * PD).sum(0)
200
+ ], dim=1).type(torch.float32)
@@ -0,0 +1,269 @@
1
+ from .voxel_grid_phantom import VoxelGridPhantom
2
+ from .sim_data import SimData
3
+ from .nifti_phantom import NiftiPhantom, NiftiTissue, NiftiRef, NiftiMapping
4
+ from pathlib import Path
5
+ import torch
6
+ import numpy as np
7
+ from typing import Literal
8
+ from functools import lru_cache
9
+
10
+
11
+ class TissueDict(dict[str, VoxelGridPhantom]):
12
+ @classmethod
13
+ def load(cls, path: Path | str, config: NiftiPhantom | None = None):
14
+ """Load a NIfTI phantom into a dictionary of tissues.
15
+
16
+ This class is a Python dictionary where the keys are the names of the
17
+ tissues (as written in the `phantom.json`) and the values are
18
+ VoxelGridPhantoms (typically tissues).
19
+ The dictionary is extended by additional methods for saving as NIfTI
20
+ phantom, interpolation, slicing, combining all tissues into a single
21
+ VoxelGridPhantom with weighted voxels, and finally for converting all
22
+ tissues into `SimData` (with overlapping tissues / partial-volume).
23
+
24
+ Parameters
25
+ ----------
26
+ path: Path | str
27
+ Either the path to the `phantom.json` configuration file _or_ the
28
+ directory where the NIfTIs are found (if config is provided).
29
+ config: NiftiPhantom
30
+ Optional configuration - can be used to load a NIfTI phantom config
31
+ and modify it in-memory before loading the actual data.
32
+ """
33
+ if config:
34
+ base_dir = Path(path)
35
+ else:
36
+ base_dir = Path(path).parent
37
+ config = NiftiPhantom.load(path)
38
+
39
+ # NOTE: Unit conversion is ignored. Currently no other than the default
40
+ # units are supported (conversion factor 1); this might change in the future
41
+
42
+ return TissueDict({
43
+ name: load_tissue(tissue, base_dir)
44
+ for name, tissue in config.tissues.items()
45
+ })
46
+
47
+ def save(self, path_to_json: str | Path, gyro=42.5764, B0=3.0):
48
+ from pathlib import Path
49
+ import os
50
+ import nibabel as nib
51
+
52
+ path_to_json = Path(path_to_json)
53
+ base_name = path_to_json.stem
54
+ base_dir = path_to_json.parent
55
+ os.makedirs(base_dir, exist_ok=True)
56
+
57
+ density = []
58
+ T1 = []
59
+ T2 = []
60
+ T2dash = []
61
+ ADC = []
62
+ dB0 = []
63
+ B1_tx = []
64
+ B1_rx = []
65
+
66
+ def save_tissue(tissue: VoxelGridPhantom):
67
+ config = {}
68
+
69
+ def save_map(name, map, nifti):
70
+ ext = f"_{name}" if name != "density" else ""
71
+ # Multi-channel data when setting the same property multiple times
72
+ def set(value):
73
+ if name in config:
74
+ config[name].append(value)
75
+ else:
76
+ config[name] = value
77
+
78
+ if map.std() < 1e-5:
79
+ set(float(map.mean()))
80
+ else:
81
+ # Check if map is shared with other tissues
82
+ for idx, nifti_map in enumerate(nifti):
83
+ if torch.equal(map, nifti_map):
84
+ set(f"{base_name}{ext}.nii.gz[{idx}]")
85
+ return
86
+ # Not shared, write new map
87
+ set(f"{base_name}{ext}.nii.gz[{len(nifti)}]")
88
+ nifti.append(map)
89
+
90
+ save_map("density", tissue.PD, density)
91
+ save_map("T1", tissue.T1, T1)
92
+ save_map("T2", tissue.T2, T2)
93
+ save_map("T2'", tissue.T2dash, T2dash)
94
+ save_map("ADC", tissue.D, ADC)
95
+ save_map("dB0", tissue.B0, dB0)
96
+ config["B1+"] = []
97
+ for channel in tissue.B1:
98
+ save_map("B1+", channel, B1_tx)
99
+ config["B1-"] = []
100
+ for channel in tissue.coil_sens:
101
+ save_map("B1-", channel, B1_rx)
102
+
103
+ return NiftiTissue.from_dict(config)
104
+
105
+ # Generate all tissues (and fill the prop maps)
106
+ tissues = {tissue: save_tissue(self[tissue]) for tissue in self.keys()}
107
+
108
+ # Write the NIfTIs
109
+ size = np.asarray(next(iter(self.values())).size)
110
+ vs = 1000 * size / np.asarray(density[0].shape)
111
+ affine = np.array(
112
+ [
113
+ [+vs[0], 0, 0, -size[0] / 2 * 1000],
114
+ [0, +vs[1], 0, -size[1] / 2 * 1000],
115
+ [0, 0, +vs[2], -size[2] / 2 * 1000],
116
+ [0, 0, 0, 0], # Row ignored
117
+ ]
118
+ )
119
+
120
+ def save_nifti(prop, name):
121
+ if len(prop) > 0:
122
+ ext = f"-{name}" if name != "density" else ""
123
+ file_name = base_dir / f"{base_name}{ext}.nii.gz"
124
+ data = np.stack(prop, -1)
125
+
126
+ print(f"Storing '{file_name}' - {data.shape}")
127
+ nib.save(nib.nifti1.Nifti1Image(data, affine), file_name)
128
+
129
+ save_nifti(density, "density")
130
+ save_nifti(T1, "T1")
131
+ save_nifti(T2, "T2")
132
+ save_nifti(T2dash, "T2'")
133
+ save_nifti(ADC, "ADC")
134
+ save_nifti(dB0, "dB0")
135
+ save_nifti(B1_tx, "B1+")
136
+ save_nifti(B1_rx, "B1-")
137
+
138
+ config = NiftiPhantom.default(gyro, B0)
139
+ config.tissues = tissues
140
+ config.save(path_to_json)
141
+
142
+ def interpolate(self, x: int, y: int, z: int):
143
+ return TissueDict({
144
+ name: phantom.interpolate(x, y, z) for name, phantom in self.items()
145
+ })
146
+
147
+ def slices(self, slices: list[int]):
148
+ return TissueDict({
149
+ name: phantom.slices(slices) for name, phantom in self.items()
150
+ })
151
+
152
+ def combine(self) -> VoxelGridPhantom:
153
+ """Combine individual maps to mixed-tissue (no partial volume) phantom."""
154
+ phantoms = list(self.values())
155
+
156
+ PD = sum(p.PD for p in phantoms)
157
+ segmentation = [p.PD / PD for p in phantoms]
158
+
159
+ from copy import deepcopy
160
+ combined = deepcopy(phantoms[0])
161
+ combined.PD = PD
162
+ combined.T1 = sum(seg * p.T1 for seg, p in zip(segmentation, phantoms))
163
+ combined.T2 = sum(seg * p.T2 for seg, p in zip(segmentation, phantoms))
164
+ combined.T2dash = sum(seg * p.T2dash for seg, p in zip(segmentation, phantoms))
165
+ combined.D = sum(seg * p.D for seg, p in zip(segmentation, phantoms))
166
+ combined.B0 = sum(seg * p.B0 for seg, p in zip(segmentation, phantoms))
167
+ combined.B1 = sum(seg[None, ...] * p.B1 for seg, p in zip(segmentation, phantoms))
168
+
169
+ return combined
170
+
171
+ def build(self, PD_threshold: float = 1e-6,
172
+ voxel_shape: Literal["sinc", "box", "point"] = "sinc"
173
+ ) -> SimData:
174
+ data_list = [self[tissue].build(PD_threshold, voxel_shape) for tissue in self]
175
+
176
+ return SimData(
177
+ PD=torch.cat([obj.PD for obj in data_list]),
178
+ T1=torch.cat([obj.T1 for obj in data_list]),
179
+ T2=torch.cat([obj.T2 for obj in data_list]),
180
+ T2dash=torch.cat([obj.T2dash for obj in data_list]),
181
+ D=torch.cat([obj.D for obj in data_list]),
182
+ B0=torch.cat([obj.B0 for obj in data_list]),
183
+ B1=torch.cat([obj.B1 for obj in data_list], 1),
184
+ coil_sens=torch.cat([obj.coil_sens for obj in data_list], 1),
185
+ voxel_pos=torch.cat([obj.voxel_pos for obj in data_list], 0),
186
+ tissue_masks=torch.stack([obj.tissue_masks for obj in data_list]),
187
+ size=data_list[0].size,
188
+ nyquist=data_list[0].nyquist,
189
+ dephasing_func=data_list[0].dephasing_func,
190
+ )
191
+
192
+ # ============================
193
+ # Helpers for importing NIfTIs
194
+ # ============================
195
+
196
+ def load_tissue(config: NiftiTissue, base_dir: Path) -> VoxelGridPhantom:
197
+ density, affine = load_file_ref(base_dir, config.density)
198
+ size = np.abs(density.shape @ affine[:3, :3]) / 1000 # affine is in mm
199
+
200
+ def lp(cfg):
201
+ return torch.as_tensor(load_property(cfg, base_dir, density, affine))
202
+
203
+ return VoxelGridPhantom(
204
+ PD=torch.as_tensor(density),
205
+ size=torch.as_tensor(size),
206
+ T1=lp(config.T1),
207
+ T2=lp(config.T2),
208
+ T2dash=lp(config.T2dash),
209
+ D=lp(config.ADC),
210
+ B0=lp(config.dB0),
211
+ B1=torch.stack([lp(cfg) for cfg in config.B1_tx], 0),
212
+ coil_sens=torch.stack([lp(cfg) for cfg in config.B1_rx], 0),
213
+ )
214
+
215
+
216
+ def load_property(config: float | NiftiRef | NiftiMapping,
217
+ base_dir: Path, density_mat: np.ndarray, target_affine: np.ndarray
218
+ ) -> np.ndarray:
219
+
220
+ if isinstance(config, float):
221
+ return np.full_like(density_mat, config)
222
+
223
+ if isinstance(config, NiftiRef):
224
+ data, affine = load_file_ref(base_dir, config)
225
+ assert np.all(affine == target_affine)
226
+ return data
227
+
228
+ if isinstance(config, NiftiMapping):
229
+ data, affine = load_mapping(base_dir, config)
230
+ assert np.all(affine == target_affine)
231
+ return data
232
+
233
+ raise TypeError("Config must be a float, file_ref or mapping", type(config))
234
+
235
+ def load_mapping(base_dir: Path, file_mapping: NiftiMapping) -> tuple[np.ndarray, np.ndarray]:
236
+ data, affine = load_file_ref(base_dir, file_mapping.file)
237
+
238
+ # TODO - SAFETY: Don't use eval but a custom (imported?) expression parser.
239
+ print(f"Executing mapping function: '{file_mapping.func}'")
240
+ return eval(
241
+ file_mapping.func,
242
+ {"__builtins__": None},
243
+ {
244
+ "x": data,
245
+ "x_min": data.min(),
246
+ "x_max": data.max(),
247
+ "x_mean": data.mean(),
248
+ "x_std": data.std()
249
+ }
250
+ ), affine
251
+
252
+
253
+ def load_file_ref(base_dir: Path, file_ref: NiftiRef) -> tuple[np.ndarray, np.ndarray]:
254
+ print(f"Loading NIfTI (file={file_ref.file_name}, index={file_ref.tissue_index})")
255
+ file = file_ref.file_name
256
+ index = file_ref.tissue_index
257
+ if not file.is_absolute():
258
+ file = (base_dir / file).resolve()
259
+
260
+ data, affine = _load_cached(str(file))
261
+ return data[:, :, :, index], affine
262
+
263
+
264
+ # Use a small cache to avoid reloading NIfTIs every time
265
+ @lru_cache(maxsize=20)
266
+ def _load_cached(file_name):
267
+ import nibabel
268
+ img = nibabel.loadsave.load(file_name)
269
+ return np.asarray(img.dataobj), img.get_sform()