trackscan 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trackscan-0.0.1/PKG-INFO +44 -0
- trackscan-0.0.1/README.md +32 -0
- trackscan-0.0.1/pyproject.toml +24 -0
- trackscan-0.0.1/setup.cfg +4 -0
- trackscan-0.0.1/src/cell_track.egg-info/PKG-INFO +44 -0
- trackscan-0.0.1/src/cell_track.egg-info/SOURCES.txt +18 -0
- trackscan-0.0.1/src/cell_track.egg-info/dependency_links.txt +1 -0
- trackscan-0.0.1/src/cell_track.egg-info/entry_points.txt +2 -0
- trackscan-0.0.1/src/cell_track.egg-info/requires.txt +2 -0
- trackscan-0.0.1/src/cell_track.egg-info/top_level.txt +1 -0
- trackscan-0.0.1/src/trackscan/__init__.py +0 -0
- trackscan-0.0.1/src/trackscan/cli.py +13 -0
- trackscan-0.0.1/src/trackscan/modules/analysis_utils.py +51 -0
- trackscan-0.0.1/src/trackscan/modules/curve_fitting.py +127 -0
- trackscan-0.0.1/src/trackscan/modules/io_utils.py +110 -0
- trackscan-0.0.1/src/trackscan/modules/linking_functions.py +153 -0
- trackscan-0.0.1/src/trackscan/modules/make_tracks.py +232 -0
- trackscan-0.0.1/src/trackscan/modules/msd_analysis.py +65 -0
- trackscan-0.0.1/src/trackscan/modules/track_manipulations.py +118 -0
- trackscan-0.0.1/src/trackscan/modules/visualization_utils.py +37 -0
- trackscan-0.0.1/src/trackscan.egg-info/PKG-INFO +44 -0
- trackscan-0.0.1/src/trackscan.egg-info/SOURCES.txt +24 -0
- trackscan-0.0.1/src/trackscan.egg-info/dependency_links.txt +1 -0
- trackscan-0.0.1/src/trackscan.egg-info/entry_points.txt +2 -0
- trackscan-0.0.1/src/trackscan.egg-info/requires.txt +2 -0
- trackscan-0.0.1/src/trackscan.egg-info/top_level.txt +1 -0
trackscan-0.0.1/PKG-INFO
ADDED
@@ -0,0 +1,44 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: trackscan
|
3
|
+
Version: 0.0.1
|
4
|
+
Summary: Post-processing and analysis of cell track data
|
5
|
+
Author: Chris Viets
|
6
|
+
Project-URL: Homepage, https://github.com/cviets/trackscan
|
7
|
+
Project-URL: Bug Tracker, https://github.com/cviets/trackscan/issues
|
8
|
+
Requires-Python: >=3.6
|
9
|
+
Description-Content-Type: text/markdown
|
10
|
+
Requires-Dist: numpy
|
11
|
+
Requires-Dist: matplotlib
|
12
|
+
|
13
|
+
<h2 align="center">Track Scan</h2>
|
14
|
+
A Python package for post-processing and analyzing cell track data. Includes functionality for correcting automated tracking artifacts, measuring mean squared displacements, and measuring cell turning angles.
|
15
|
+
|
16
|
+
## Cite
|
17
|
+
```bibtex
|
18
|
+
@inbook{viets_measuring_2025,
|
19
|
+
author = {Viets, Chris and Stevens, Corey A.},
|
20
|
+
editor = {Brockhausen, Inka},
|
21
|
+
title = {Measuring and Analyzing Bacterial Movement in Mucus},
|
22
|
+
bookTitle = {Dynamics of Bacteria-Mucus Interactions},
|
23
|
+
year = {2025},
|
24
|
+
publisher = {Springer US},
|
25
|
+
address = {New York, NY},
|
26
|
+
pages = {187--197},
|
27
|
+
isbn = {978-1-0716-4627-4},
|
28
|
+
doi = {10.1007/978-1-0716-4627-4_16},
|
29
|
+
url = {https://doi.org/10.1007/978-1-0716-4627-4_16}
|
30
|
+
}
|
31
|
+
|
32
|
+
```
|
33
|
+
|
34
|
+
## Installation
|
35
|
+
```
|
36
|
+
pip install trackscan
|
37
|
+
```
|
38
|
+
|
39
|
+
## Usage
|
40
|
+
The first step to using `trackscan` is to read in a CSV file containing (note that the CSV file must contain columns labeled "Position_X", "Position_Y", "Frame", and "Track_ID -- not case-sensitive).
|
41
|
+
```
|
42
|
+
trackscan -i /path/to/track_data.csv
|
43
|
+
```
|
44
|
+
This command launches an interactive shell where the track data can be manipulated and measured. Once the interactive shell has appeared, simply type `?` to view the available commands. For example, the interactive shell contains commands to de-drift track data, correct artifacts arising from automated tracking, and measure mean squared displacement, turning angles, or mean cell speed.
|
@@ -0,0 +1,32 @@
|
|
1
|
+
<h2 align="center">Track Scan</h2>
|
2
|
+
A Python package for post-processing and analyzing cell track data. Includes functionality for correcting automated tracking artifacts, measuring mean squared displacements, and measuring cell turning angles.
|
3
|
+
|
4
|
+
## Cite
|
5
|
+
```bibtex
|
6
|
+
@inbook{viets_measuring_2025,
|
7
|
+
author = {Viets, Chris and Stevens, Corey A.},
|
8
|
+
editor = {Brockhausen, Inka},
|
9
|
+
title = {Measuring and Analyzing Bacterial Movement in Mucus},
|
10
|
+
bookTitle = {Dynamics of Bacteria-Mucus Interactions},
|
11
|
+
year = {2025},
|
12
|
+
publisher = {Springer US},
|
13
|
+
address = {New York, NY},
|
14
|
+
pages = {187--197},
|
15
|
+
isbn = {978-1-0716-4627-4},
|
16
|
+
doi = {10.1007/978-1-0716-4627-4_16},
|
17
|
+
url = {https://doi.org/10.1007/978-1-0716-4627-4_16}
|
18
|
+
}
|
19
|
+
|
20
|
+
```
|
21
|
+
|
22
|
+
## Installation
|
23
|
+
```
|
24
|
+
pip install trackscan
|
25
|
+
```
|
26
|
+
|
27
|
+
## Usage
|
28
|
+
The first step to using `trackscan` is to read in a CSV file containing (note that the CSV file must contain columns labeled "Position_X", "Position_Y", "Frame", and "Track_ID -- not case-sensitive).
|
29
|
+
```
|
30
|
+
trackscan -i /path/to/track_data.csv
|
31
|
+
```
|
32
|
+
This command launches an interactive shell where the track data can be manipulated and measured. Once the interactive shell has appeared, simply type `?` to view the available commands. For example, the interactive shell contains commands to de-drift track data, correct artifacts arising from automated tracking, and measure mean squared displacement, turning angles, or mean cell speed.
|
@@ -0,0 +1,24 @@
|
|
1
|
+
[build-system]
|
2
|
+
requires = ["setuptools>=61.0"]
|
3
|
+
build-backend = "setuptools.build_meta"
|
4
|
+
|
5
|
+
[project]
|
6
|
+
name = "trackscan"
|
7
|
+
version = "0.0.1"
|
8
|
+
description = "Post-processing and analysis of cell track data"
|
9
|
+
readme = "README.md"
|
10
|
+
requires-python = ">=3.6"
|
11
|
+
authors = [
|
12
|
+
{ name = "Chris Viets" }
|
13
|
+
]
|
14
|
+
dependencies = [
|
15
|
+
"numpy",
|
16
|
+
"matplotlib"
|
17
|
+
]
|
18
|
+
|
19
|
+
[project.urls]
|
20
|
+
"Homepage" = "https://github.com/cviets/trackscan"
|
21
|
+
"Bug Tracker" = "https://github.com/cviets/trackscan/issues"
|
22
|
+
|
23
|
+
[project.scripts]
|
24
|
+
cell_track = "cell_track.cli:main"
|
@@ -0,0 +1,44 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: cell_track
|
3
|
+
Version: 0.0.1
|
4
|
+
Summary: Post-processing and analysis of cell track data
|
5
|
+
Author: Chris Viets
|
6
|
+
Project-URL: Homepage, https://github.com/cviets/trackscan
|
7
|
+
Project-URL: Bug Tracker, https://github.com/cviets/trackscan/issues
|
8
|
+
Requires-Python: >=3.6
|
9
|
+
Description-Content-Type: text/markdown
|
10
|
+
Requires-Dist: numpy
|
11
|
+
Requires-Dist: matplotlib
|
12
|
+
|
13
|
+
<h2 align="center">Track Scan</h2>
|
14
|
+
A Python package for post-processing and analyzing cell track data. Includes functionality for correcting automated tracking artifacts, measuring mean squared displacements, and measuring cell turning angles.
|
15
|
+
|
16
|
+
## Cite
|
17
|
+
```bibtex
|
18
|
+
@inbook{viets_measuring_2025,
|
19
|
+
author = {Viets, Chris and Stevens, Corey A.},
|
20
|
+
editor = {Brockhausen, Inka},
|
21
|
+
title = {Measuring and Analyzing Bacterial Movement in Mucus},
|
22
|
+
bookTitle = {Dynamics of Bacteria-Mucus Interactions},
|
23
|
+
year = {2025},
|
24
|
+
publisher = {Springer US},
|
25
|
+
address = {New York, NY},
|
26
|
+
pages = {187--197},
|
27
|
+
isbn = {978-1-0716-4627-4},
|
28
|
+
doi = {10.1007/978-1-0716-4627-4_16},
|
29
|
+
url = {https://doi.org/10.1007/978-1-0716-4627-4_16}
|
30
|
+
}
|
31
|
+
|
32
|
+
```
|
33
|
+
|
34
|
+
## Installation
|
35
|
+
```
|
36
|
+
pip install trackscan
|
37
|
+
```
|
38
|
+
|
39
|
+
## Usage
|
40
|
+
The first step to using `trackscan` is to read in a CSV file containing (note that the CSV file must contain columns labeled "Position_X", "Position_Y", "Frame", and "Track_ID -- not case-sensitive).
|
41
|
+
```
|
42
|
+
trackscan -i /path/to/track_data.csv
|
43
|
+
```
|
44
|
+
This command launches an interactive shell where the track data can be manipulated and measured. Once the interactive shell has appeared, simply type `?` to view the available commands. For example, the interactive shell contains commands to de-drift track data, correct artifacts arising from automated tracking, and measure mean squared displacement, turning angles, or mean cell speed.
|
@@ -0,0 +1,18 @@
|
|
1
|
+
README.md
|
2
|
+
pyproject.toml
|
3
|
+
src/cell_track.egg-info/PKG-INFO
|
4
|
+
src/cell_track.egg-info/SOURCES.txt
|
5
|
+
src/cell_track.egg-info/dependency_links.txt
|
6
|
+
src/cell_track.egg-info/entry_points.txt
|
7
|
+
src/cell_track.egg-info/requires.txt
|
8
|
+
src/cell_track.egg-info/top_level.txt
|
9
|
+
src/trackscan/__init__.py
|
10
|
+
src/trackscan/cli.py
|
11
|
+
src/trackscan/modules/analysis_utils.py
|
12
|
+
src/trackscan/modules/curve_fitting.py
|
13
|
+
src/trackscan/modules/io_utils.py
|
14
|
+
src/trackscan/modules/linking_functions.py
|
15
|
+
src/trackscan/modules/make_tracks.py
|
16
|
+
src/trackscan/modules/msd_analysis.py
|
17
|
+
src/trackscan/modules/track_manipulations.py
|
18
|
+
src/trackscan/modules/visualization_utils.py
|
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1 @@
|
|
1
|
+
trackscan
|
File without changes
|
@@ -0,0 +1,13 @@
|
|
1
|
+
import argparse
|
2
|
+
from .modules.make_tracks import Tracks
|
3
|
+
|
4
|
+
def main():
|
5
|
+
parser = argparse.ArgumentParser("Cell track post-processing and analysis")
|
6
|
+
parser.add_argument("-i", "--input", type=str, required=True, help="Path to CSV file containing tracking data.")
|
7
|
+
args = parser.parse_args()
|
8
|
+
|
9
|
+
tracks = Tracks(args.input)
|
10
|
+
tracks.cmdloop()
|
11
|
+
|
12
|
+
if __name__ == '__main__':
|
13
|
+
main()
|
@@ -0,0 +1,51 @@
|
|
1
|
+
from typing import List, Dict, Union
|
2
|
+
import numpy as np
|
3
|
+
from numpy.typing import NDArray
|
4
|
+
from tqdm import tqdm
|
5
|
+
|
6
|
+
def turning_angle_analysis(tracks: List[Dict[str, Union[int, float]]], dt: int) -> NDArray[np.float64]:
|
7
|
+
|
8
|
+
out = []
|
9
|
+
for cell in tqdm(tracks, desc="Computing turning angles"):
|
10
|
+
|
11
|
+
t = np.array(cell['t'])
|
12
|
+
idx_t0, idx_t1 = get_timelag_indices(t, dt)
|
13
|
+
vx = np.gradient(cell['x'], t)
|
14
|
+
vy = np.gradient(cell['y'], t)
|
15
|
+
|
16
|
+
velocities_final = np.array([vx[idx_t1], vy[idx_t1]]).T
|
17
|
+
velocities_initial = np.array([vx[idx_t0], vy[idx_t0]]).T
|
18
|
+
|
19
|
+
for i in range(len(velocities_final)):
|
20
|
+
u = velocities_initial[i]
|
21
|
+
v = velocities_final[i]
|
22
|
+
|
23
|
+
angle = np.arctan2(u[0]*v[1] - u[1]*v[0], u[0]*v[0] + u[1]*v[1])
|
24
|
+
out.append(angle)
|
25
|
+
return np.array(out)*180/np.pi
|
26
|
+
|
27
|
+
def get_timelag_indices(t: NDArray[np.int_], tau: int):
|
28
|
+
|
29
|
+
t1_vals = t[t>=tau]
|
30
|
+
indices_t1 = np.searchsorted(t,t1_vals)
|
31
|
+
indices_t0 = np.searchsorted(t,t1_vals-tau)
|
32
|
+
|
33
|
+
valid_indices = t[indices_t0] == t1_vals - tau
|
34
|
+
indices_t0 = indices_t0[valid_indices]
|
35
|
+
indices_t1 = indices_t1[valid_indices]
|
36
|
+
|
37
|
+
return indices_t0, indices_t1
|
38
|
+
|
39
|
+
def get_ensemble_mean_speed(tracks: List[Dict[str, Union[int, float]]]) -> float:
|
40
|
+
|
41
|
+
means = np.zeros(shape=len(tracks))
|
42
|
+
for i, cell in tqdm(enumerate(tracks), desc="Computing mean speed"):
|
43
|
+
t = np.array(cell['t'])
|
44
|
+
vx = np.gradient(cell['x'], t)
|
45
|
+
vy = np.gradient(cell['y'], t)
|
46
|
+
speeds = np.sqrt(vx**2+ vy**2)
|
47
|
+
means[i] = np.mean(speeds)
|
48
|
+
|
49
|
+
return np.mean(means)
|
50
|
+
|
51
|
+
|
@@ -0,0 +1,127 @@
|
|
1
|
+
import numpy as np
|
2
|
+
from numpy.linalg import inv
|
3
|
+
from typing import List, Tuple
|
4
|
+
from numpy.typing import NDArray
|
5
|
+
|
6
|
+
def get_lsq_solution(n: int, f: int, t: List) -> Tuple[List, NDArray]:
|
7
|
+
"""
|
8
|
+
Returns least squares n-degree polynomial fit to y = f(t).
|
9
|
+
Parameters
|
10
|
+
----------
|
11
|
+
n : int
|
12
|
+
polynomial degree
|
13
|
+
f : list
|
14
|
+
y values
|
15
|
+
t : list
|
16
|
+
x values
|
17
|
+
|
18
|
+
Returns
|
19
|
+
-------
|
20
|
+
beta : array
|
21
|
+
fitted parameters
|
22
|
+
cov : 2d array
|
23
|
+
covariances of parameters
|
24
|
+
|
25
|
+
"""
|
26
|
+
A = np.array([[j**i for i in range(n, -1, -1)] for j in t])
|
27
|
+
AT = np.transpose(A)
|
28
|
+
cov = inv(np.matmul(AT, A))
|
29
|
+
beta = np.matmul(np.matmul(cov, AT), f)
|
30
|
+
beta = list(reversed(beta))
|
31
|
+
return beta, cov
|
32
|
+
|
33
|
+
def least_squares(n: int, f: List[int], t: List[int], return_error: bool=False):
|
34
|
+
"""
|
35
|
+
fit f, t to a n-degree polynomial.
|
36
|
+
Returns best-fit polynomial as a function with input x
|
37
|
+
"""
|
38
|
+
|
39
|
+
A = np.array([[j**i for i in range(n, -1, -1)] for j in t])
|
40
|
+
AT = np.transpose(A)
|
41
|
+
cov = inv(np.matmul(AT, A))
|
42
|
+
beta = np.matmul(np.matmul(cov, AT), f)
|
43
|
+
beta = list(reversed(beta))
|
44
|
+
|
45
|
+
beta, _ = get_lsq_solution(n, f, t)
|
46
|
+
|
47
|
+
return lambda x: sum(beta[i]*x**i for i in range(len(beta)))
|
48
|
+
|
49
|
+
def least_squares_error(n, f, t):
|
50
|
+
"""
|
51
|
+
Returns functions corresponding to +/- 1 sigma in the error of the least-squares
|
52
|
+
n-degree polynomial fit to y = f(t). In other words, f(t) should fit within inf(t)
|
53
|
+
and sup(t) most of the time. May not be super reliable.
|
54
|
+
|
55
|
+
inf: function corresponding to 1 sigma below the best fit. inf(t) < fit(t) < sup(t)
|
56
|
+
sup: function corresponding to 1 sigma above best fit.
|
57
|
+
"""
|
58
|
+
|
59
|
+
beta, cov = get_lsq_solution(n, f, t)
|
60
|
+
F = lambda x: sum(beta[i]*x**i for i in range(len(beta)))
|
61
|
+
|
62
|
+
S = sum((F(T) - f[i])**2 for i, T in enumerate(t))
|
63
|
+
var_beta = list(reversed([cov[i][i]*S/(len(f)-n) for i in range(len(beta))]))
|
64
|
+
sup = lambda x: sum((beta[i]+np.sqrt(var_beta[i]))*x**i
|
65
|
+
for i in range(len(beta)))
|
66
|
+
inf = lambda x: sum((beta[i]-np.sqrt(var_beta[i]))*x**i
|
67
|
+
for i in range(len(beta)))
|
68
|
+
|
69
|
+
return inf, sup
|
70
|
+
|
71
|
+
def fit_recent(f, t):
|
72
|
+
"""
|
73
|
+
Returns a list that fits y = f(t) based on its previous values. Fit at time
|
74
|
+
i is determined by fitting a parabola to the previous four values
|
75
|
+
(f(t=i-5) to f(t=i-1)). I.e., fits f(t) using its most recent values.
|
76
|
+
"""
|
77
|
+
|
78
|
+
assert len(f) >= 5, "f must have length >= 5 for accurate predictions"
|
79
|
+
prediction = f[0:4]
|
80
|
+
for i in range(len(f)-4):
|
81
|
+
sub_f = f[i:i+4]
|
82
|
+
sub_t = t[i:i+4]
|
83
|
+
F = least_squares(2, sub_f, sub_t)
|
84
|
+
prediction.append(F(t[i+4]))
|
85
|
+
|
86
|
+
return prediction
|
87
|
+
|
88
|
+
|
89
|
+
def find_breakpoints(f, t):
|
90
|
+
"""
|
91
|
+
Finds where f(t) changes abruptly, as determined by when it deviates significantly
|
92
|
+
from fit determined by fit_recent.
|
93
|
+
|
94
|
+
Returns idx: set of indices where f changes abruptly.
|
95
|
+
"""
|
96
|
+
F = fit_recent(f, t)
|
97
|
+
residuals = [F[i]-f[i] for i in range(len(f))]
|
98
|
+
for i in range(4, len(residuals)):
|
99
|
+
if residuals[i] == 0:
|
100
|
+
return set()
|
101
|
+
res2 = [r**2 for r in residuals]
|
102
|
+
partial_sums = [res2[i] + res2[i+1] + res2[i+2] for i in range(len(res2) - 2)]
|
103
|
+
ssr = sum(res2)
|
104
|
+
expected_res2 = ssr/len(res2)
|
105
|
+
exp_partial_sums = [0, 0, expected_res2, expected_res2*2]
|
106
|
+
exp_partial_sums.extend([expected_res2*3 for _ in range(len(partial_sums)-4)])
|
107
|
+
# standard deviation of each residual:
|
108
|
+
rmsd = np.sqrt(ssr/(len(t) - 2))
|
109
|
+
# standard deviations of the squared residuals:
|
110
|
+
d_res2 = [2*abs(x)*rmsd for x in residuals]
|
111
|
+
d_partial_sums = [np.sqrt(d_res2[i]**2 + d_res2[i+1]**2 + d_res2[i+2]**2) for i in range(len(res2) - 2)]
|
112
|
+
z_scores = [(partial_sums[i] - exp_partial_sums[i])/d_partial_sums[i] for i in range(2, len(partial_sums))]
|
113
|
+
res2_z_scores = [(res2[i] - expected_res2)/d_res2[i] for i in range(4, len(res2))]
|
114
|
+
idx = {i+1 for i in range(2, len(z_scores)) if z_scores[i]>2 and res2_z_scores[i-2] > 2}
|
115
|
+
|
116
|
+
removals = {i for i in idx if i-1 in idx or i-2 in idx}
|
117
|
+
idx -= removals
|
118
|
+
|
119
|
+
return idx
|
120
|
+
|
121
|
+
def get_squared_distance_between_curves(x, f, g):
|
122
|
+
"""
|
123
|
+
For array-like x and functions f, g of x, returns sum([(f(x) - g(x))]**2)
|
124
|
+
over all x in the specified interval
|
125
|
+
|
126
|
+
"""
|
127
|
+
return sum([f(X) - g(X)]**2 for X in x)
|
@@ -0,0 +1,110 @@
|
|
1
|
+
import csv
|
2
|
+
from typing import Dict, Union, List
|
3
|
+
from tqdm import tqdm
|
4
|
+
import os
|
5
|
+
from .track_manipulations import sort_track_by_time
|
6
|
+
from numpy.typing import NDArray
|
7
|
+
import numpy as np
|
8
|
+
|
9
|
+
def load_data_from_csv(filename: str) -> List[Dict[str, Union[float, int]]]:
|
10
|
+
|
11
|
+
with open(os.path.expanduser(filename), newline='') as csvfile:
|
12
|
+
|
13
|
+
csv_read = csv.reader(csvfile,delimiter=',')
|
14
|
+
|
15
|
+
prev_cell = None
|
16
|
+
cur_dict = {'x': [], 'y': [], 't': []}
|
17
|
+
tracks = []
|
18
|
+
|
19
|
+
header = next(csv_read)
|
20
|
+
header = [elt.upper() for elt in header]
|
21
|
+
assert "POSITION_X" in header, "File header must contain `POSITION_X` column"
|
22
|
+
assert "POSITION_Y" in header, "File header must contain `POSITION_Y` column"
|
23
|
+
assert "FRAME" in header, "File must contain `FRAME` column"
|
24
|
+
assert "TRACK_ID" in header, "File must contain `TRACK_ID` column"
|
25
|
+
|
26
|
+
counter = [0,0,0,0]
|
27
|
+
for i, label in enumerate(header):
|
28
|
+
if label == "POSITION_X":
|
29
|
+
x_column = i
|
30
|
+
counter[0] += 1
|
31
|
+
elif label == "POSITION_Y":
|
32
|
+
y_column = i
|
33
|
+
counter[1] += 1
|
34
|
+
elif label == "FRAME":
|
35
|
+
t_column = i
|
36
|
+
counter[2] += 1
|
37
|
+
elif label == "TRACK_ID":
|
38
|
+
cell_column = i
|
39
|
+
counter[3] += 1
|
40
|
+
|
41
|
+
assert counter == [1,1,1,1], "Position_x, Position_y, Frame, and Track_ID columns must each appear exactly once"
|
42
|
+
|
43
|
+
for i, row in tqdm(enumerate(csv_read)):
|
44
|
+
|
45
|
+
if row[x_column] == '' or not all([elt in '0123456789,. ' for elt in row[x_column]]):
|
46
|
+
continue
|
47
|
+
|
48
|
+
cur_cell = row[cell_column]
|
49
|
+
x = row[x_column]
|
50
|
+
y = row[y_column]
|
51
|
+
t = row[t_column]
|
52
|
+
|
53
|
+
if cur_cell != prev_cell and prev_cell is not None:
|
54
|
+
sort_track_by_time(cur_dict)
|
55
|
+
tracks.append(cur_dict)
|
56
|
+
cur_dict = {'x': [], 'y': [], 't': []}
|
57
|
+
|
58
|
+
cur_dict['x'].append(float(x))
|
59
|
+
cur_dict['y'].append(float(y))
|
60
|
+
cur_dict['t'].append(int(float(t)))
|
61
|
+
|
62
|
+
prev_cell = cur_cell
|
63
|
+
|
64
|
+
sort_track_by_time(cur_dict)
|
65
|
+
tracks.append(cur_dict)
|
66
|
+
|
67
|
+
return tracks
|
68
|
+
|
69
|
+
def save_track_data(track_data, filename: str, label=None) -> None:
|
70
|
+
with open(os.path.expanduser(filename), 'w', newline = '') as csvfile:
|
71
|
+
csv_write = csv.writer(csvfile)
|
72
|
+
header = [''] * 4
|
73
|
+
header[0] = 'TRACK_ID'
|
74
|
+
header[1] = 'POSITION_X'
|
75
|
+
header[2] = 'POSITION_Y'
|
76
|
+
header[3] = 'FRAME'
|
77
|
+
|
78
|
+
csv_write.writerow(header)
|
79
|
+
cur_cell = 0
|
80
|
+
for cell in tqdm(track_data):
|
81
|
+
cur_cell += 1
|
82
|
+
for i, x in enumerate(cell['x']):
|
83
|
+
newrow = [''] * 4
|
84
|
+
if label is None:
|
85
|
+
newrow[0] = str(cur_cell)
|
86
|
+
else:
|
87
|
+
newrow[0] = label
|
88
|
+
newrow[1] = x
|
89
|
+
newrow[2] = cell['y'][i]
|
90
|
+
newrow[3] = cell['t'][i]
|
91
|
+
|
92
|
+
csv_write.writerow(newrow)
|
93
|
+
|
94
|
+
def save_MSD_data(filename: str, t: List[int], y: List[float], dy: List[float]) -> None:
|
95
|
+
|
96
|
+
assert len(t) == len(y), "Input lists must have same length"
|
97
|
+
assert len(t) == len(dy), "Input lists must have same length"
|
98
|
+
|
99
|
+
with open(os.path.expanduser(filename), 'w', newline='') as csvfile:
|
100
|
+
writer = csv.writer(csvfile)
|
101
|
+
writer.writerow(["Time Lag", "MSD", "Error_MSD"])
|
102
|
+
for i, elt in tqdm(enumerate(t)):
|
103
|
+
writer.writerow([str(elt), str(y[i]), str(dy[i])])
|
104
|
+
|
105
|
+
def save_turning_angles_data(filename: str, data: NDArray[np.float64], dt: int) -> None:
|
106
|
+
with open(os.path.expanduser(filename), 'w', newline='') as csvfile:
|
107
|
+
writer = csv.writer(csvfile)
|
108
|
+
writer.writerow([f"Turning angles [deg], delta_t={dt} frames"])
|
109
|
+
for elt in tqdm(data):
|
110
|
+
writer.writerow([str(elt)])
|
@@ -0,0 +1,153 @@
|
|
1
|
+
import numpy as np
|
2
|
+
from .curve_fitting import least_squares
|
3
|
+
from typing import Dict, List
|
4
|
+
"""
|
5
|
+
Start by finding candidates (newborn cells) to link to each cell when it dies.
|
6
|
+
`linking_candidates` is a dict: {cellid: {set of cell ids}}.
|
7
|
+
Keys indicate a cell that has died and value contains all cells that are candidates to link to key,
|
8
|
+
depending on whether they meet the max_time_gap and max_distance requirements.
|
9
|
+
No values in dict are empty sets.
|
10
|
+
"""
|
11
|
+
|
12
|
+
def get_linking_candidates(track_dict: List[Dict], max_time_gap, max_distance: float):
|
13
|
+
"""
|
14
|
+
Generates candidates of tracks to link to each track using criteria specified by parameters
|
15
|
+
"""
|
16
|
+
linking_candidates = {i: set() for i in range(len(track_dict))}
|
17
|
+
|
18
|
+
t_all = sorted({T for cell in track_dict for T in cell['t']})
|
19
|
+
queue = {time_gap: set() for time_gap in t_all
|
20
|
+
if time_gap <= max_time_gap}
|
21
|
+
|
22
|
+
remaining_cells = {i for i in range(len(track_dict))}
|
23
|
+
for t in t_all:
|
24
|
+
|
25
|
+
# add newly dead cells to queue.
|
26
|
+
# Remove checked cells from remaining_cells
|
27
|
+
queue[0] = {cell_id for cell_id in remaining_cells
|
28
|
+
if max(track_dict[cell_id]['t']) == t}
|
29
|
+
remaining_cells -= queue[0]
|
30
|
+
|
31
|
+
# for cell in queue, check for newborns at distance <= max_distance
|
32
|
+
newborns = {cell_id for cell_id in remaining_cells
|
33
|
+
if min(track_dict[cell_id]['t']) == t}
|
34
|
+
cells_currently_in_queue = {c for cell_set in queue.values()
|
35
|
+
for c in cell_set}
|
36
|
+
for dead_cell_id in cells_currently_in_queue:
|
37
|
+
dead_cell = track_dict[dead_cell_id]
|
38
|
+
death_point = (dead_cell['x'][-1], dead_cell['y'][-1])
|
39
|
+
for newborn_cell_id in newborns:
|
40
|
+
newborn_cell = track_dict[newborn_cell_id]
|
41
|
+
spawn_point = (newborn_cell['x'][0], newborn_cell['y'][0])
|
42
|
+
dist = np.sqrt((spawn_point[0] - death_point[0])**2 +
|
43
|
+
(spawn_point[1] - death_point[1])**2)
|
44
|
+
if dist <= max_distance:
|
45
|
+
linking_candidates[dead_cell_id].add(newborn_cell_id)
|
46
|
+
# update queue such that queue[n+1] = queue[n], queue[0] = set()
|
47
|
+
time_gaps= sorted(queue.keys(), reverse=True)
|
48
|
+
for idx, time_gap in enumerate(time_gaps[:-1]):
|
49
|
+
queue[time_gap] = queue[time_gaps[idx+1]]
|
50
|
+
queue[0] = set()
|
51
|
+
linking_candidates = {key: val
|
52
|
+
for key, val in linking_candidates.items()
|
53
|
+
if val != set()}
|
54
|
+
|
55
|
+
return linking_candidates
|
56
|
+
|
57
|
+
def choose_linking_partners(linking_candidates, track_dict):
|
58
|
+
"""
|
59
|
+
Chooses best track to link using parabolic least squares fits to both tracks.
|
60
|
+
"""
|
61
|
+
link_dict = {}
|
62
|
+
for dead_cell_id in linking_candidates:
|
63
|
+
min_dist = -1
|
64
|
+
best_candidate = -1
|
65
|
+
cell_1 = track_dict[dead_cell_id]
|
66
|
+
|
67
|
+
# arbitrarily fit last 3 points of track to a quadratic
|
68
|
+
if len(cell_1['t']) >= 3:
|
69
|
+
t1_fit = cell_1['t'][-4:]
|
70
|
+
x1 = least_squares(2, cell_1['x'][-4:], t1_fit)
|
71
|
+
y1 = least_squares(2, cell_1['y'][-4:], t1_fit)
|
72
|
+
else:
|
73
|
+
x1 = least_squares(1, cell_1['x'], cell_1['t'])
|
74
|
+
y1 = least_squares(1, cell_1['y'], cell_1['t'])
|
75
|
+
|
76
|
+
for spawn_cell_id in linking_candidates[dead_cell_id]:
|
77
|
+
|
78
|
+
cell_2 = track_dict[spawn_cell_id]
|
79
|
+
|
80
|
+
if len(cell_2['t']) >= 3:
|
81
|
+
t2_fit = cell_2['t'][:4]
|
82
|
+
x2 = least_squares(2, cell_2['x'][:4], t2_fit)
|
83
|
+
y2 = least_squares(2, cell_2['y'][:4], t2_fit)
|
84
|
+
else:
|
85
|
+
x2 = least_squares(1, cell_2['x'], cell_2['t'])
|
86
|
+
y2 = least_squares(1, cell_2['y'], cell_2['t'])
|
87
|
+
|
88
|
+
t_max = min(cell_2['t'])
|
89
|
+
t_min = max(cell_1['t'])
|
90
|
+
|
91
|
+
t_all = {T for cell in track_dict for T in cell['t']
|
92
|
+
if T <= t_max and T >= t_min}
|
93
|
+
|
94
|
+
|
95
|
+
dist = sum(np.sqrt((x1(t) - x2(t))**2 + (y1(t) - y2(t))**2)
|
96
|
+
for t in t_all)/len(t_all)
|
97
|
+
|
98
|
+
if dist < min_dist or min_dist == -1:
|
99
|
+
best_candidate = spawn_cell_id
|
100
|
+
min_dist = dist
|
101
|
+
interpolation_domain = sorted(t_all)
|
102
|
+
x2_best = x2
|
103
|
+
y2_best = y2
|
104
|
+
|
105
|
+
link_dict[dead_cell_id] = (best_candidate, interpolation_domain,
|
106
|
+
(x1, y1), (x2_best, y2_best))
|
107
|
+
|
108
|
+
return link_dict
|
109
|
+
|
110
|
+
def link_partners(link_dict, track_dict):
|
111
|
+
"""
|
112
|
+
Interpolates the space between tracks, links them, and deletes the old one.
|
113
|
+
"""
|
114
|
+
|
115
|
+
removals = set()
|
116
|
+
|
117
|
+
while link_dict:
|
118
|
+
key_removals = set()
|
119
|
+
for key, (val, t_domain, (x1, y1), (x2, y2)) in link_dict.items():
|
120
|
+
if val not in link_dict:
|
121
|
+
|
122
|
+
dead_cell = track_dict[key]
|
123
|
+
new_cell = track_dict[val]
|
124
|
+
|
125
|
+
t_max = t_domain[-1]
|
126
|
+
t_min = t_domain[0]
|
127
|
+
|
128
|
+
if t_max != t_min:
|
129
|
+
weight = lambda t: (t - t_min)/(t_max - t_min)
|
130
|
+
else:
|
131
|
+
weight = lambda t: 0.5
|
132
|
+
X = lambda t: (1 - weight(t)) * x1(t) + weight(t) * x2(t)
|
133
|
+
Y = lambda t: (1 - weight(t)) * y1(t) + weight(t) * y2(t)
|
134
|
+
|
135
|
+
dead_cell['t'] = dead_cell['t'][:-1]
|
136
|
+
dead_cell['x'] = dead_cell['x'][:-1]
|
137
|
+
dead_cell['y'] = dead_cell['y'][:-1]
|
138
|
+
dead_cell['t'].extend(t_domain)
|
139
|
+
dead_cell['x'].extend([X(t) for t in t_domain])
|
140
|
+
dead_cell['y'].extend([Y(t) for t in t_domain])
|
141
|
+
dead_cell['t'].extend(new_cell['t'][1:])
|
142
|
+
dead_cell['x'].extend(new_cell['x'][1:])
|
143
|
+
dead_cell['y'].extend(new_cell['y'][1:])
|
144
|
+
|
145
|
+
|
146
|
+
key_removals.add(key)
|
147
|
+
removals.add(val)
|
148
|
+
for key in key_removals:
|
149
|
+
del link_dict[key]
|
150
|
+
|
151
|
+
for idx in sorted(removals, reverse=True):
|
152
|
+
del track_dict[idx]
|
153
|
+
|
@@ -0,0 +1,232 @@
|
|
1
|
+
from .linking_functions import get_linking_candidates, choose_linking_partners, link_partners
|
2
|
+
from .io_utils import load_data_from_csv, save_track_data, save_MSD_data, save_turning_angles_data
|
3
|
+
from .track_manipulations import scale_space, print_first_n, dedrift, split_tracks
|
4
|
+
from .msd_analysis import msd, fit_msd
|
5
|
+
from .analysis_utils import turning_angle_analysis, get_ensemble_mean_speed
|
6
|
+
from .visualization_utils import plot_msd, plot_turning_angles
|
7
|
+
import argparse
|
8
|
+
import os
|
9
|
+
import cmd
|
10
|
+
import shlex
|
11
|
+
|
12
|
+
class Tracks(cmd.Cmd):
|
13
|
+
intro = "Welcome to the trackscan command line interface for cell track post-processing and analysis.\n" \
|
14
|
+
"Type help or ? to list commands.\n"
|
15
|
+
prompt = "(trackscan) "
|
16
|
+
|
17
|
+
def __init__(self, filename: str):
|
18
|
+
"""
|
19
|
+
Tracks object has filename and tracks attributes.
|
20
|
+
|
21
|
+
filename: file containing track data (must be .csv)
|
22
|
+
tracks: list of dicts with keys 'x', 'y', 't' and values list of x, y,
|
23
|
+
and t data. Each dict represents one cell's track.
|
24
|
+
"""
|
25
|
+
super().__init__()
|
26
|
+
self.tracks = load_data_from_csv(filename)
|
27
|
+
self.filename = filename
|
28
|
+
|
29
|
+
def do_scale_space(self, arg: str) -> None:
|
30
|
+
"""Scale x and y data by a constant factor. Type scale_space -h to see available options."""
|
31
|
+
|
32
|
+
parser = argparse.ArgumentParser(prog="scale_space", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
33
|
+
parser.add_argument("-c", "--constant", default=1, type=float, help="Scale x and y data by a constant factor")
|
34
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
35
|
+
|
36
|
+
try:
|
37
|
+
args = parser.parse_args(shlex.split(arg))
|
38
|
+
except SystemExit:
|
39
|
+
return
|
40
|
+
|
41
|
+
scale_space(self.tracks, args.constant)
|
42
|
+
|
43
|
+
def do_show(self, arg: str) -> None:
|
44
|
+
"""Print the first n lines of track data. Type show -h to see available options."""
|
45
|
+
parser = argparse.ArgumentParser(prog="show", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
46
|
+
parser.add_argument("-n", "--n", default=5, type=int, help="Print the first n lines of track data.")
|
47
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
48
|
+
|
49
|
+
try:
|
50
|
+
args = parser.parse_args(shlex.split(arg))
|
51
|
+
except SystemExit:
|
52
|
+
return
|
53
|
+
|
54
|
+
print_first_n(self.tracks, args.n)
|
55
|
+
|
56
|
+
def do_save(self, arg: str) -> None:
|
57
|
+
"""Save cell track data in CSV format. Type save -h to see available options."""
|
58
|
+
|
59
|
+
root_name, ext = os.path.splitext(self.filename)
|
60
|
+
default_filename = root_name + "PROCESSED" + ext
|
61
|
+
|
62
|
+
parser = argparse.ArgumentParser(prog="save", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
63
|
+
parser.add_argument("-o", "--out", default=default_filename, type=str, help="Path to CSV file to save data")
|
64
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
65
|
+
|
66
|
+
try:
|
67
|
+
args = parser.parse_args(shlex.split(arg))
|
68
|
+
except SystemExit:
|
69
|
+
return
|
70
|
+
|
71
|
+
save_track_data(self.tracks, args.output)
|
72
|
+
|
73
|
+
def do_dedrift(self, arg: str) -> None:
|
74
|
+
"""Dedrift track data by cell-averaged velocity at each timepoint. Type dedrift -h to see available options."""
|
75
|
+
|
76
|
+
parser = argparse.ArgumentParser(prog="dedrift", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
77
|
+
parser.add_argument("-s", "--save", type=bool, default=False, help="Whether to save the drift velocities")
|
78
|
+
parser.add_argument("-o", "--out", type=str, default="", help="Path to CSV file to save drift velocities if --save is True")
|
79
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
80
|
+
|
81
|
+
try:
|
82
|
+
args = parser.parse_args(shlex.split(arg))
|
83
|
+
except SystemExit:
|
84
|
+
return
|
85
|
+
|
86
|
+
drift_velocities = dedrift(self.tracks)
|
87
|
+
if args.save:
|
88
|
+
save_track_data([drift_velocities], args.out, label="DRIFT")
|
89
|
+
|
90
|
+
def do_link(self, arg:str) -> None:
|
91
|
+
"""
|
92
|
+
Links tracks together by identifying candidates and choosing best one.
|
93
|
+
"""
|
94
|
+
|
95
|
+
parser = argparse.ArgumentParser(prog="link", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
96
|
+
parser.add_argument("-t", "--time", type=int, default=3, help="Max time gap to consider linking two tracks")
|
97
|
+
parser.add_argument("d", "--dist", type=float, default=10, help="Max distance to consider linking two tracks")
|
98
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
99
|
+
|
100
|
+
try:
|
101
|
+
args = parser.parse_args(shlex.split(arg))
|
102
|
+
except SystemExit:
|
103
|
+
return
|
104
|
+
|
105
|
+
linking_candidates = get_linking_candidates(self.tracks, args.time, args.dist)
|
106
|
+
link_dict = choose_linking_partners(linking_candidates, self.tracks)
|
107
|
+
link_partners(link_dict, self.tracks)
|
108
|
+
|
109
|
+
def do_split(self, arg: str) -> None:
|
110
|
+
"""
|
111
|
+
Splits tracks where abrupt changes in direction occur.
|
112
|
+
"""
|
113
|
+
parser = argparse.ArgumentParser(prog="split", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
114
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
115
|
+
|
116
|
+
try:
|
117
|
+
args = parser.parse_args(shlex.split(arg))
|
118
|
+
except SystemExit:
|
119
|
+
return
|
120
|
+
|
121
|
+
split_tracks(self.tracks)
|
122
|
+
|
123
|
+
def do_correct_artifacts(self, arg:str) -> None:
|
124
|
+
"""
|
125
|
+
Fix tracking artifacts by first splitting tracks at locations of artifacts, then linking tracks correctly
|
126
|
+
"""
|
127
|
+
|
128
|
+
parser = argparse.ArgumentParser(prog="correct_artifacts", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
129
|
+
parser.add_argument("-t", "--time", type=int, default=3, help="Max time gap to consider linking two tracks")
|
130
|
+
parser.add_argument("d", "--dist", type=float, default=10, help="Max distance to consider linking two tracks")
|
131
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
132
|
+
|
133
|
+
try:
|
134
|
+
args = parser.parse_args(shlex.split(arg))
|
135
|
+
except SystemExit:
|
136
|
+
return
|
137
|
+
|
138
|
+
print("Splitting tracks...")
|
139
|
+
split_tracks(self.tracks)
|
140
|
+
print("Linking tracks...")
|
141
|
+
linking_candidates = get_linking_candidates(self.tracks, args.time, args.dist)
|
142
|
+
link_dict = choose_linking_partners(linking_candidates, self.tracks)
|
143
|
+
link_partners(link_dict, self.tracks)
|
144
|
+
|
145
|
+
def do_MSD_analysis(self, arg:str) -> None:
|
146
|
+
"""Compute mean squared displacements"""
|
147
|
+
|
148
|
+
root_name, ext = os.path.splitext(self.filename)
|
149
|
+
default_filename = root_name + "_MSD" + ext
|
150
|
+
|
151
|
+
parser = argparse.ArgumentParser(prog="MSD_analysis", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
152
|
+
parser.add_argument("-o", "--out", type=str, default=default_filename, help="Path to CSV file to save MSD data")
|
153
|
+
parser.add_argument("-p", "--plot", type=bool, default=False, help="Whether to output a plot of MSD data")
|
154
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
155
|
+
|
156
|
+
try:
|
157
|
+
args = parser.parse_args(shlex.split(arg))
|
158
|
+
except SystemExit:
|
159
|
+
return
|
160
|
+
|
161
|
+
t, y, dy = msd(self.tracks)
|
162
|
+
save_MSD_data(args.out, t, y, dy)
|
163
|
+
|
164
|
+
if args.plot:
|
165
|
+
root_name, ext = os.path.splitext(args.out)
|
166
|
+
save_to = root_name + "_MSD_plot" + ".png"
|
167
|
+
|
168
|
+
(a, d), (a_err, D_err), r2 = fit_msd(t, y, (1, len(t)//2))
|
169
|
+
plot_msd(save_to, t, y, dy, a, d, a_err, D_err, r2)
|
170
|
+
|
171
|
+
def do_turning_angle(self, arg:str) -> None:
|
172
|
+
"""
|
173
|
+
Compute turning angles across a specified time
|
174
|
+
"""
|
175
|
+
|
176
|
+
root_name, ext = os.path.splitext(self.filename)
|
177
|
+
default_filename = root_name + "_turning_angles" + ext
|
178
|
+
|
179
|
+
parser = argparse.ArgumentParser(prog="turning_angles", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
180
|
+
parser.add_argument("-t", "--time", type=int, default=10, help="Time lag to calculate turning angles")
|
181
|
+
parser.add_argument("-o", "--out", type=str, default=default_filename, help="Path to CSV file to save turning angle data")
|
182
|
+
parser.add_argument("-p", "--plot", type=bool, default=False, help="Whether to output a plot of turning angle data")
|
183
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
184
|
+
|
185
|
+
try:
|
186
|
+
args = parser.parse_args(shlex.split(arg))
|
187
|
+
except SystemExit:
|
188
|
+
return
|
189
|
+
|
190
|
+
if args.time <= 0:
|
191
|
+
print("turning_angle: time must be positive")
|
192
|
+
return
|
193
|
+
|
194
|
+
turning_angles = turning_angle_analysis(self.tracks, args.time)
|
195
|
+
save_turning_angles_data(args.out, turning_angles, args.time)
|
196
|
+
|
197
|
+
if args.plot:
|
198
|
+
root_name, ext = os.path.splitext(args.out)
|
199
|
+
save_to = root_name + "_turning_angles_plot" + ".png"
|
200
|
+
plot_turning_angles(save_to, turning_angles)
|
201
|
+
|
202
|
+
|
203
|
+
|
204
|
+
def do_mean_speed(self, arg:str) -> None:
|
205
|
+
"""Measure and print ensemble mean cell speed"""
|
206
|
+
|
207
|
+
parser = argparse.ArgumentParser(prog="mean_speed", add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
208
|
+
parser.add_argument("-h", "--help", action="help", help="Show this message")
|
209
|
+
|
210
|
+
try:
|
211
|
+
args = parser.parse_args(shlex.split(arg))
|
212
|
+
except SystemExit:
|
213
|
+
return
|
214
|
+
print(get_ensemble_mean_speed(self.tracks))
|
215
|
+
|
216
|
+
|
217
|
+
def do_exit(self, arg:str) -> bool:
|
218
|
+
"""Exit the program"""
|
219
|
+
return True
|
220
|
+
|
221
|
+
|
222
|
+
def get_cell_mean_stepsize(self, cell):
|
223
|
+
dists = []
|
224
|
+
for i in range(1, len(cell['t'])):
|
225
|
+
cur_x = cell['x'][i]
|
226
|
+
prev_x = cell['x'][i-1]
|
227
|
+
|
228
|
+
cur_y = cell['y'][i]
|
229
|
+
prev_y = cell['y'][i-1]
|
230
|
+
dists.append(((cur_x - prev_x)**2 + (cur_y - prev_y)**2)**(1/2))
|
231
|
+
|
232
|
+
return sum(dists)/len(dists)
|
@@ -0,0 +1,65 @@
|
|
1
|
+
from scipy.stats import linregress
|
2
|
+
import numpy as np
|
3
|
+
import matplotlib.pyplot as plt
|
4
|
+
from typing import List, Sequence
|
5
|
+
from tqdm import tqdm
|
6
|
+
import os
|
7
|
+
from .analysis_utils import get_timelag_indices
|
8
|
+
|
9
|
+
def msd_fit(x,alpha,D):
|
10
|
+
|
11
|
+
return 4*D*x**alpha
|
12
|
+
|
13
|
+
def msd(track_dict):
|
14
|
+
|
15
|
+
# get all possible tau values
|
16
|
+
tau = set()
|
17
|
+
for cell in track_dict:
|
18
|
+
tau.update({t - cell['t'][0] for t in cell['t']})
|
19
|
+
tau = sorted(tau)
|
20
|
+
raw_data = {T: [] for T in tau}
|
21
|
+
|
22
|
+
for cell in tqdm(track_dict, desc="Computing MSDs"):
|
23
|
+
|
24
|
+
for tau_ in tau:
|
25
|
+
|
26
|
+
t = np.array(cell['t'])
|
27
|
+
indices_t0, indices_t1 = get_timelag_indices(t, tau_)
|
28
|
+
|
29
|
+
x = np.array(cell['x'])
|
30
|
+
y = np.array(cell['y'])
|
31
|
+
dx = x[indices_t1] - x[indices_t0]
|
32
|
+
dy = y[indices_t1] - y[indices_t0]
|
33
|
+
squared_disp = dx**2 + dy**2
|
34
|
+
|
35
|
+
raw_data[tau_].extend(squared_disp)
|
36
|
+
|
37
|
+
MSDs = [np.mean(raw_data[tau_]) for tau_ in tau]
|
38
|
+
dMSDs = [np.std(raw_data[tau_])/np.sqrt(len(raw_data[tau_])) for tau_ in tau]
|
39
|
+
# hack to avoid infinite error at zeroth timepoint
|
40
|
+
dMSDs[0] = dMSDs[1]
|
41
|
+
|
42
|
+
return tau, MSDs, dMSDs
|
43
|
+
|
44
|
+
|
45
|
+
def fit_msd(t: List[int], y: List[float], fit_window: Sequence[int]):
|
46
|
+
|
47
|
+
for i in range(1, len(t)):
|
48
|
+
assert t[i] > t[i-1], "Time data must be sorted."
|
49
|
+
|
50
|
+
[fit_from, fit_to] = fit_window
|
51
|
+
|
52
|
+
xfit = np.log(t[fit_from:fit_to])
|
53
|
+
yfit = np.log(y[fit_from:fit_to])
|
54
|
+
|
55
|
+
LinReg = linregress(xfit, yfit)
|
56
|
+
alpha = LinReg.slope
|
57
|
+
diffusivity = np.exp(LinReg.intercept)/4
|
58
|
+
|
59
|
+
alpha_err = LinReg.stderr
|
60
|
+
diffusivity_err = diffusivity*LinReg.intercept_stderr
|
61
|
+
|
62
|
+
return (alpha, diffusivity), (alpha_err, diffusivity_err), LinReg.rvalue**2
|
63
|
+
|
64
|
+
|
65
|
+
|
@@ -0,0 +1,118 @@
|
|
1
|
+
from typing import Dict, List, Union, Tuple
|
2
|
+
import numpy as np
|
3
|
+
from .curve_fitting import find_breakpoints
|
4
|
+
|
5
|
+
def scale_space(track_data: List[Dict[str, Union[int, float]]], factor: float) -> None:
|
6
|
+
"""
|
7
|
+
Scale all x and y values by factor.
|
8
|
+
"""
|
9
|
+
for cell in track_data:
|
10
|
+
cell['x'] = [x*factor for x in cell['x']]
|
11
|
+
cell['y'] = [y*factor for y in cell['y']]
|
12
|
+
|
13
|
+
def print_first_n(track_data: List[Dict[str, Union[int, float]]], n: int=5) -> None:
|
14
|
+
count = 0
|
15
|
+
cell_num = 0
|
16
|
+
idx = 0
|
17
|
+
|
18
|
+
header = ["Track ID", "Position_X", "Position_Y", "Frame"]
|
19
|
+
|
20
|
+
print(f"{header[0]:^12}", "|", f"{header[1]:^12}", "|", f"{header[2]:^12}", "|", f"{header[3]:^12}")
|
21
|
+
while count < n:
|
22
|
+
cur_cell = track_data[cell_num]
|
23
|
+
if idx >= len(cur_cell['x']):
|
24
|
+
idx = 0
|
25
|
+
cell_num += 1
|
26
|
+
cur_cell = track_data[cell_num]
|
27
|
+
|
28
|
+
print(f"{cell_num:<12}", "|", f"{cur_cell['x'][idx]:<12.2f}", "|", f"{cur_cell['y'][idx]:<12.2f}", "|", f"{cur_cell['t'][idx]:<12}")
|
29
|
+
idx += 1
|
30
|
+
count += 1
|
31
|
+
|
32
|
+
def sort_track_by_time(track_dict: Dict[str, Union[int, float]]) -> None:
|
33
|
+
"""
|
34
|
+
Track data is often not ordered chronologically.
|
35
|
+
This function ensures that position and time data are chronologoically ordered.
|
36
|
+
Acts in place.
|
37
|
+
"""
|
38
|
+
i_sort = np.argsort(track_dict['t'])
|
39
|
+
track_dict['t'] = list(np.array(track_dict['t'])[i_sort])
|
40
|
+
track_dict['x'] = list(np.array(track_dict['x'])[i_sort])
|
41
|
+
track_dict['y'] = list(np.array(track_dict['y'])[i_sort])
|
42
|
+
|
43
|
+
def dedrift(track_data: List[Dict[str, Union[int, float]]]) -> Dict[int, Tuple[float, float]]:
|
44
|
+
"""
|
45
|
+
Mutates track_data to dedrift all tracks using mean velocity at every
|
46
|
+
time point. Returns mean velocity at every time point.
|
47
|
+
"""
|
48
|
+
velocities = {t: [] for cell in track_data for t in cell['t']}
|
49
|
+
del velocities[min(velocities.keys())]
|
50
|
+
|
51
|
+
for cell in track_data:
|
52
|
+
for i in range(1,len(cell['t'])):
|
53
|
+
|
54
|
+
dx = cell['x'][i] - cell['x'][i-1]
|
55
|
+
dy = cell['y'][i] - cell['y'][i-1]
|
56
|
+
|
57
|
+
velocities[cell['t'][i]].append((dx, dy))
|
58
|
+
|
59
|
+
# get mean velocity at each available time point
|
60
|
+
vbar = {t: (sum(elt[0] for elt in velocities[t])/len(velocities[t]),
|
61
|
+
sum(elt[1] for elt in velocities[t])/len(velocities[t]))
|
62
|
+
for t in velocities}
|
63
|
+
|
64
|
+
# subtract summed drift velocities from each available timepoint
|
65
|
+
for cell in track_data:
|
66
|
+
for i,t in enumerate(cell['t']):
|
67
|
+
if i == 0:
|
68
|
+
continue
|
69
|
+
cell['x'][i] -= sum(vbar[T][0] for T in cell['t'][1:i+1])
|
70
|
+
cell['y'][i] -= sum(vbar[T][1] for T in cell['t'][1:i+1])
|
71
|
+
|
72
|
+
out = {'x': [], 'y': [], 't': []}
|
73
|
+
for key, val in vbar.items():
|
74
|
+
out['t'].append(int(key))
|
75
|
+
out['x'].append(val[0])
|
76
|
+
out['y'].append(val[1])
|
77
|
+
sort_track_by_time(out)
|
78
|
+
return out
|
79
|
+
|
80
|
+
def split_tracks(self) -> None:
|
81
|
+
"""
|
82
|
+
Splits tracks where abrupt changes in direction occur.
|
83
|
+
"""
|
84
|
+
|
85
|
+
new_tracks = []
|
86
|
+
|
87
|
+
for i, cell in enumerate(self.tracks):
|
88
|
+
|
89
|
+
X = cell['x']
|
90
|
+
if len(X) <= 4:
|
91
|
+
continue
|
92
|
+
|
93
|
+
Y = cell['y']
|
94
|
+
T = cell['t']
|
95
|
+
|
96
|
+
x_brks = find_breakpoints(X, T)
|
97
|
+
y_brks = find_breakpoints(Y, T)
|
98
|
+
|
99
|
+
brks = x_brks.union(y_brks)
|
100
|
+
|
101
|
+
removals = set()
|
102
|
+
for idx in brks:
|
103
|
+
if idx - 1 in brks or idx - 2 in brks:
|
104
|
+
removals.add(idx)
|
105
|
+
brks -= removals
|
106
|
+
|
107
|
+
brks = sorted(brks, reverse=True)
|
108
|
+
|
109
|
+
for idx in brks:
|
110
|
+
assert len(cell['x'][idx + 2:]) > 1, f"{i=}"
|
111
|
+
new_tracks.append({'x': cell['x'][idx + 2:],
|
112
|
+
'y': cell['y'][idx + 2:],
|
113
|
+
't': cell['t'][idx + 2:]})
|
114
|
+
cell['t'] = cell['t'][:idx+1]
|
115
|
+
cell['x'] = cell['x'][:idx+1]
|
116
|
+
cell['y'] = cell['y'][:idx+1]
|
117
|
+
|
118
|
+
self.tracks.extend(new_tracks)
|
@@ -0,0 +1,37 @@
|
|
1
|
+
import matplotlib.pyplot as plt
|
2
|
+
import os
|
3
|
+
import numpy as np
|
4
|
+
from typing import List
|
5
|
+
from .msd_analysis import msd_fit
|
6
|
+
from numpy.typing import NDArray
|
7
|
+
|
8
|
+
def plot_msd(filename: str, t: List[int], y: List[float], dy: List[float], alpha: float, D: float, a_err: float, D_err: float, r2: float) -> None:
|
9
|
+
|
10
|
+
fig, axs = plt.subplots(1, 1, figsize=(10,10))
|
11
|
+
|
12
|
+
axs.errorbar(t[1:], y[1:], yerr=dy[1:])
|
13
|
+
axs.set_xscale('log')
|
14
|
+
axs.set_yscale('log')
|
15
|
+
|
16
|
+
t_all = np.logspace(np.log10(t[1]), np.log10(t[-1]), 100)
|
17
|
+
y_fit = [msd_fit(tau, alpha, D) for tau in t_all]
|
18
|
+
|
19
|
+
axs.plot(t_all, y_fit, ls="--")
|
20
|
+
axs.axvline(len(t)//2, ls="--", color='black')
|
21
|
+
axs.text(0.05, 0.9, f"alpha = {alpha:.2f} +/- {a_err:.2f}\nD = {D:.2f} +/- {D_err:.2f}\nr^2 = {r2:.3f}", transform=axs.transAxes)
|
22
|
+
|
23
|
+
fig.savefig(os.path.expanduser(filename), dpi=400, bbox_inches='tight')
|
24
|
+
|
25
|
+
return None
|
26
|
+
|
27
|
+
def plot_turning_angles(filename: str, angles: NDArray[np.float64]):
|
28
|
+
|
29
|
+
fig, axs = plt.subplots(1,1,figsize=(10,10))
|
30
|
+
counts, bins = np.histogram(angles, bins='auto', density=True)
|
31
|
+
bin_centers = (bins[:-1] + bins[1:]) / 2
|
32
|
+
axs.scatter(bin_centers, counts)
|
33
|
+
axs.set_xlim(-185, 185)
|
34
|
+
|
35
|
+
fig.savefig(os.path.expanduser(filename), dpi=400, bbox_inches='tight')
|
36
|
+
|
37
|
+
return None
|
@@ -0,0 +1,44 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: trackscan
|
3
|
+
Version: 0.0.1
|
4
|
+
Summary: Post-processing and analysis of cell track data
|
5
|
+
Author: Chris Viets
|
6
|
+
Project-URL: Homepage, https://github.com/cviets/trackscan
|
7
|
+
Project-URL: Bug Tracker, https://github.com/cviets/trackscan/issues
|
8
|
+
Requires-Python: >=3.6
|
9
|
+
Description-Content-Type: text/markdown
|
10
|
+
Requires-Dist: numpy
|
11
|
+
Requires-Dist: matplotlib
|
12
|
+
|
13
|
+
<h2 align="center">Track Scan</h2>
|
14
|
+
A Python package for post-processing and analyzing cell track data. Includes functionality for correcting automated tracking artifacts, measuring mean squared displacements, and measuring cell turning angles.
|
15
|
+
|
16
|
+
## Cite
|
17
|
+
```bibtex
|
18
|
+
@inbook{viets_measuring_2025,
|
19
|
+
author = {Viets, Chris and Stevens, Corey A.},
|
20
|
+
editor = {Brockhausen, Inka},
|
21
|
+
title = {Measuring and Analyzing Bacterial Movement in Mucus},
|
22
|
+
bookTitle = {Dynamics of Bacteria-Mucus Interactions},
|
23
|
+
year = {2025},
|
24
|
+
publisher = {Springer US},
|
25
|
+
address = {New York, NY},
|
26
|
+
pages = {187--197},
|
27
|
+
isbn = {978-1-0716-4627-4},
|
28
|
+
doi = {10.1007/978-1-0716-4627-4_16},
|
29
|
+
url = {https://doi.org/10.1007/978-1-0716-4627-4_16}
|
30
|
+
}
|
31
|
+
|
32
|
+
```
|
33
|
+
|
34
|
+
## Installation
|
35
|
+
```
|
36
|
+
pip install trackscan
|
37
|
+
```
|
38
|
+
|
39
|
+
## Usage
|
40
|
+
The first step to using `trackscan` is to read in a CSV file containing (note that the CSV file must contain columns labeled "Position_X", "Position_Y", "Frame", and "Track_ID -- not case-sensitive).
|
41
|
+
```
|
42
|
+
trackscan -i /path/to/track_data.csv
|
43
|
+
```
|
44
|
+
This command launches an interactive shell where the track data can be manipulated and measured. Once the interactive shell has appeared, simply type `?` to view the available commands. For example, the interactive shell contains commands to de-drift track data, correct artifacts arising from automated tracking, and measure mean squared displacement, turning angles, or mean cell speed.
|
@@ -0,0 +1,24 @@
|
|
1
|
+
README.md
|
2
|
+
pyproject.toml
|
3
|
+
src/cell_track.egg-info/PKG-INFO
|
4
|
+
src/cell_track.egg-info/SOURCES.txt
|
5
|
+
src/cell_track.egg-info/dependency_links.txt
|
6
|
+
src/cell_track.egg-info/entry_points.txt
|
7
|
+
src/cell_track.egg-info/requires.txt
|
8
|
+
src/cell_track.egg-info/top_level.txt
|
9
|
+
src/trackscan/__init__.py
|
10
|
+
src/trackscan/cli.py
|
11
|
+
src/trackscan.egg-info/PKG-INFO
|
12
|
+
src/trackscan.egg-info/SOURCES.txt
|
13
|
+
src/trackscan.egg-info/dependency_links.txt
|
14
|
+
src/trackscan.egg-info/entry_points.txt
|
15
|
+
src/trackscan.egg-info/requires.txt
|
16
|
+
src/trackscan.egg-info/top_level.txt
|
17
|
+
src/trackscan/modules/analysis_utils.py
|
18
|
+
src/trackscan/modules/curve_fitting.py
|
19
|
+
src/trackscan/modules/io_utils.py
|
20
|
+
src/trackscan/modules/linking_functions.py
|
21
|
+
src/trackscan/modules/make_tracks.py
|
22
|
+
src/trackscan/modules/msd_analysis.py
|
23
|
+
src/trackscan/modules/track_manipulations.py
|
24
|
+
src/trackscan/modules/visualization_utils.py
|
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1 @@
|
|
1
|
+
trackscan
|