ETDQualitizer 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Diederick Niehorster
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ include requirements.txt
@@ -0,0 +1,56 @@
1
+ Metadata-Version: 2.4
2
+ Name: ETDQualitizer
3
+ Version: 0.6.0
4
+ Summary: Automated eye tracking data quality determination for screen-based eye trackers.
5
+ Home-page: https://github.com/dcnieho/ETDQualitizer
6
+ Author: Diederick Niehorster
7
+ Author-email: diederick_c.niehorster@humlab.lu.se
8
+ License: MIT License
9
+
10
+ Copyright (c) 2024 Diederick Niehorster
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+
30
+ Project-URL: Source Code, https://github.com/dcnieho/ETDQualitizer
31
+ Classifier: Programming Language :: Python :: 3.10
32
+ Classifier: License :: OSI Approved :: MIT License
33
+ Classifier: Operating System :: OS Independent
34
+ Requires-Python: >=3.10
35
+ Description-Content-Type: text/markdown
36
+ License-File: LICENSE
37
+ Requires-Dist: numpy
38
+ Requires-Dist: pandas
39
+ Dynamic: author
40
+ Dynamic: author-email
41
+ Dynamic: classifier
42
+ Dynamic: description
43
+ Dynamic: description-content-type
44
+ Dynamic: home-page
45
+ Dynamic: license
46
+ Dynamic: license-file
47
+ Dynamic: project-url
48
+ Dynamic: requires-dist
49
+ Dynamic: requires-python
50
+ Dynamic: summary
51
+
52
+ [![Downloads](https://static.pepy.tech/badge/ETDQualitizer)](https://pepy.tech/project/ETDQualitizer)
53
+ [![PyPI Latest Release](https://img.shields.io/pypi/v/ETDQualitizer.svg)](https://pypi.org/project/ETDQualitizer/)
54
+ [![image](https://img.shields.io/pypi/pyversions/ETDQualitizer.svg)](https://pypi.org/project/ETDQualitizer/)
55
+
56
+ # ETDQualitizer v0.6.0
@@ -0,0 +1,5 @@
1
+ [![Downloads](https://static.pepy.tech/badge/ETDQualitizer)](https://pepy.tech/project/ETDQualitizer)
2
+ [![PyPI Latest Release](https://img.shields.io/pypi/v/ETDQualitizer.svg)](https://pypi.org/project/ETDQualitizer/)
3
+ [![image](https://img.shields.io/pypi/pyversions/ETDQualitizer.svg)](https://pypi.org/project/ETDQualitizer/)
4
+
5
+ # ETDQualitizer v0.6.0
@@ -0,0 +1,6 @@
1
+ [build-system]
2
+ requires = [
3
+ "setuptools>=42",
4
+ "wheel"
5
+ ]
6
+ build-backend = "setuptools.build_meta"
@@ -0,0 +1,214 @@
1
+ import pandas as pd
2
+ import numpy as np
3
+ import typing
4
+
5
+ N = typing.TypeVar("N", bound=int)
6
+
7
+ class ScreenConfiguration:
8
+ def __init__(self,
9
+ screen_size_x_mm: float, screen_size_y_mm: float,
10
+ screen_res_x_pix: int , screen_res_y_pix: int,
11
+ viewing_distance_mm: float):
12
+ self.screen_size_x_mm = screen_size_x_mm
13
+ self.screen_size_y_mm = screen_size_y_mm
14
+ self.screen_res_x_pix = screen_res_x_pix
15
+ self.screen_res_y_pix = screen_res_y_pix
16
+ self.viewing_distance_mm= viewing_distance_mm
17
+
18
+ def pix_to_mm(self, x: float, y: float) -> tuple[float,float]:
19
+ x_mm = x/self.screen_res_x_pix*self.screen_size_x_mm
20
+ y_mm = y/self.screen_res_y_pix*self.screen_size_y_mm
21
+ return x_mm, y_mm
22
+
23
+ def pix_to_deg(self, x: float, y: float) -> tuple[float,float]:
24
+ # N.B.: output is in Fick angles
25
+ x_mm, y_mm = self.pix_to_mm(x, y)
26
+ return self.mm_to_deg(x_mm, y_mm)
27
+
28
+ def mm_to_deg(self, x: float, y: float) -> tuple[float,float]:
29
+ # N.B.: output is in Fick angles
30
+ azi = np.arctan2(x,self.viewing_distance_mm)
31
+ ele = np.arctan2(y,np.hypot(self.viewing_distance_mm,x))
32
+ return np.degrees(azi), np.degrees(ele)
33
+
34
+
35
+ def _Fick_to_cartesian(azi: np.ndarray[tuple[N], np.dtype[np.float64]], ele: np.ndarray[tuple[N], np.dtype[np.float64]], r: float=1.) -> tuple[np.ndarray[tuple[N], np.dtype[np.float64]], np.ndarray[tuple[N], np.dtype[np.float64]], np.ndarray[tuple[N], np.dtype[np.float64]]]:
36
+ azi = np.radians(azi)
37
+ ele = np.radians(ele)
38
+ r_cos_ele = r*np.cos(ele)
39
+
40
+ x = r_cos_ele * np.sin(azi)
41
+ y = r * np.sin(ele)
42
+ z = r_cos_ele * np.cos(azi)
43
+ return x,y,z
44
+
45
+
46
+ class DataQuality:
47
+ # N.B: for this module it is assumed that any missing data are not coded with some special value
48
+ # such as (0,0) or (-xres,-yres) but as nan. Missing data should also not be removed, or the RMS
49
+ # calculation would be incorrect.
50
+ #
51
+ # timestamps should be in seconds.
52
+ #
53
+ # all angular positions are expected to be expressed in Fick angles.
54
+ def __init__(self,
55
+ gaze_x : np.ndarray[tuple[N], np.dtype[np.float64]],
56
+ gaze_y : np.ndarray[tuple[N], np.dtype[np.float64]],
57
+ timestamps : np.ndarray[tuple[N], np.dtype[np.float64]],
58
+ unit : str,
59
+ screen : ScreenConfiguration|None = None):
60
+ self.timestamps = np.array(timestamps)
61
+
62
+ gaze_x = np.array(gaze_x)
63
+ gaze_y = np.array(gaze_y)
64
+ if unit=='pixels':
65
+ if screen is None:
66
+ raise ValueError('If unit is "pixels", a screen configuration must be supplied')
67
+ gaze_x, gaze_y = screen.pix_to_deg(gaze_x, gaze_y)
68
+ elif unit!='degrees':
69
+ raise ValueError('unit should be "pixels" or "degrees"')
70
+ self.x = gaze_x
71
+ self.y = gaze_y
72
+
73
+ def accuracy(self, target_x_deg: float, target_y_deg: float, central_tendency_fun=np.nanmean) -> tuple[float,float,float]:
74
+ # get unit vectors for gaze and target
75
+ g_x,g_y,g_z = _Fick_to_cartesian( self.x, self.y)
76
+ t_x,t_y,t_z = _Fick_to_cartesian(target_x_deg, target_y_deg)
77
+ # calculate angular offset for each sample using dot product
78
+ offsets = np.arccos(np.dot(np.vstack((g_x,g_y,g_z)).T, np.array([t_x,t_y,t_z])))
79
+ # calculate on-screen orientation so we can decompose offset into x and y
80
+ direction = np.arctan2(g_y/g_z-t_y/t_z, g_x/g_z-t_x/t_z) # compute direction on tangent screen (divide by z to project to screen at 1m)
81
+ offsets_2D = np.degrees(offsets.reshape((-1,1))*np.array([np.cos(direction), np.sin(direction)]).T)
82
+ # calculate mean horizontal and vertical offset
83
+ offset_x = central_tendency_fun(offsets_2D[:,0])
84
+ offset_y = central_tendency_fun(offsets_2D[:,1])
85
+ # calculate offset of centroid
86
+ return float(np.hypot(offset_x, offset_y)), float(offset_x), float(offset_y)
87
+
88
+ def precision_RMS_S2S(self, central_tendency_fun=np.nanmean) -> tuple[float,float,float]:
89
+ return _RMS_S2S_impl(self.x, self.y, central_tendency_fun)
90
+
91
+ def precision_STD(self) -> tuple[float,float,float]:
92
+ return _STD_impl(self.x, self.y)
93
+
94
+ def precision_BCEA(self, P: float = 0.6827) -> tuple[float,float,float,float,float]:
95
+ return _BCEA_impl(self.x, self.y, P)
96
+
97
+ def data_loss_percentage(self):
98
+ missing = np.logical_or(np.isnan(self.x), np.isnan(self.y))
99
+ return np.sum(missing)/missing.shape[0]*100
100
+
101
+ def effective_frequency(self):
102
+ N_valid = np.sum(np.logical_not(np.logical_or(np.isnan(self.x), np.isnan(self.y))))
103
+ # to get duration right, we need to include duration of last sample
104
+ isi = np.median(np.diff(self.timestamps))
105
+ return N_valid/(self.timestamps[-1]-self.timestamps[0]+isi)
106
+
107
+
108
+ def precision_using_moving_window(self, window_length, metric, aggregation_fun=np.nanmedian, **kwargs) -> float:
109
+ match metric:
110
+ case 'RMS_S2S':
111
+ fun = _RMS_S2S_impl
112
+ case 'STD':
113
+ fun = _STD_impl
114
+ case 'BCEA':
115
+ fun = _BCEA_impl
116
+ case _:
117
+ raise ValueError(f'metric "{metric}" is not understood')
118
+
119
+ # get number of samples in data
120
+ ns = self.x.shape[0]
121
+
122
+ if window_length < ns: # if number of samples in data exceeds window size
123
+ values = np.full((ns-window_length+1,), np.nan) # pre-allocate
124
+ for p in range(0,ns-window_length+1):
125
+ values[p] = fun(self.x[p:p+window_length], self.y[p:p+window_length], **kwargs)[0]
126
+ precision = aggregation_fun(values)
127
+ else:
128
+ # if too few samples in data
129
+ precision = np.nan
130
+ return precision
131
+
132
+
133
+ def compute_data_quality_from_validation(gaze : pd.DataFrame,
134
+ unit : str,
135
+ screen : ScreenConfiguration|None = None,
136
+ advanced : bool = False, # if True, report all metrics. If False, only simple subset
137
+ include_data_loss : bool = False) -> pd.DataFrame:
138
+ # get all targets
139
+ targets = sorted([t for t in gaze['target_id'].unique() if t!=-1])
140
+ target_locations= np.array([gaze.loc[gaze.index[(gaze['target_id'].values==t).argmax()], ['tar_x','tar_y']] for t in targets])
141
+
142
+ # ensure we have target locations in degrees
143
+ if unit=='pixels':
144
+ if screen is None:
145
+ raise ValueError('If unit is "pixels", a screen configuration must be supplied')
146
+ target_locations[:,0], target_locations[:,1] = screen.pix_to_deg(target_locations[:,0], target_locations[:,1])
147
+ elif unit!='degrees':
148
+ raise ValueError('unit should be "pixels" or "degrees"')
149
+
150
+ # now, per target, compute data quality metrics
151
+ rows = []
152
+ for e in ('left','right'):
153
+ if f'{e}_x' not in gaze.columns:
154
+ continue
155
+ for i,t_id in enumerate(targets):
156
+ is_target = gaze['target_id'].values==t_id
157
+ dq = DataQuality(gaze[f'{e}_x'][is_target], gaze[f'{e}_y'][is_target], gaze['timestamp'][is_target]/1000, unit, screen) # timestamps are in ms in the file
158
+ row = {'eye': e, 'target_id': t_id}
159
+ for k,v in zip(('offset','offset_x','offset_y'),dq.accuracy(*target_locations[i])):
160
+ row[k] = v
161
+ for k,v in zip(('rms_s2s','rms_s2s_x','rms_s2s_y'),dq.precision_RMS_S2S()):
162
+ row[k] = v
163
+ for k,v in zip(('std','std_x','std_y'),dq.precision_STD()):
164
+ row[k] = v
165
+ for k,v in zip(('bcea','bcea_orientation','bcea_ax1','bcea_ax2','bcea_aspect_ratio'),dq.precision_BCEA()):
166
+ row[k] = v
167
+ if include_data_loss:
168
+ row['data_loss'] = dq.data_loss_percentage()
169
+ row['effective_frequency'] = dq.effective_frequency()
170
+ rows.append(row)
171
+
172
+ dq_df = pd.DataFrame.from_records(rows).set_index(['eye','target_id'])
173
+ if not advanced:
174
+ dq_df = dq_df.drop(columns=[c for c in dq_df.columns if c not in ('eye', 'target_id', 'offset', 'rms_s2s', 'std', 'bcea', 'data_loss', 'effective_frequency')])
175
+ return dq_df
176
+
177
+
178
+
179
+ def _RMS_S2S_impl(x: np.ndarray[tuple[N], np.dtype[np.float64]], y: np.ndarray[tuple[N], np.dtype[np.float64]], central_tendency_fun=np.nanmean) -> tuple[float,float,float]:
180
+ x_diff = np.diff(x)**2
181
+ y_diff = np.diff(y)**2
182
+ # N.B.: cannot simplify to np.hypot(rms_x, rms_y)
183
+ # as that is only equivalent when mean() is used as central tendency estimator
184
+ return float(np.sqrt(central_tendency_fun(x_diff + y_diff))), \
185
+ float(np.sqrt(central_tendency_fun(x_diff))), \
186
+ float(np.sqrt(central_tendency_fun(y_diff)))
187
+
188
+ def _STD_impl(x: np.ndarray[tuple[N], np.dtype[np.float64]], y: np.ndarray[tuple[N], np.dtype[np.float64]]) -> tuple[float,float,float]:
189
+ std_x = np.nanstd(x, ddof=0)
190
+ std_y = np.nanstd(y, ddof=0)
191
+ return float(np.hypot(std_x, std_y)), \
192
+ float(std_x), \
193
+ float(std_y)
194
+
195
+ def _BCEA_impl(x: np.ndarray[tuple[N], np.dtype[np.float64]], y: np.ndarray[tuple[N], np.dtype[np.float64]], P: float = 0.6827) -> tuple[float,float,float,float,float]:
196
+ k = np.log(1./(1-P)) # turn cumulative probability of area under the multivariate normal into scale factor
197
+
198
+ x = np.delete(x, np.isnan(x))
199
+ y = np.delete(y, np.isnan(y))
200
+ std_x = np.std(x, ddof=1)
201
+ std_y = np.std(y, ddof=1)
202
+ rho = np.corrcoef(x, y)[0,1]
203
+ area = 2*k*np.pi*std_x*std_y*np.sqrt(1-rho**2)
204
+ # compute major and minor axis radii, and orientation, of the BCEA ellipse
205
+ d,v = np.linalg.eig(np.cov(x,y))
206
+ i = np.argmax(d)
207
+ orientation = np.degrees(np.arctan2(v[1,i], v[0,i]))
208
+ ax1 = np.sqrt(k*d[i])
209
+ ax2 = np.sqrt(k*d[1-i])
210
+ aspect_ratio = max([ax1, ax2])/min([ax1, ax2])
211
+ # sanity check: this (formula for area of ellipse) should
212
+ # closely match directly computed area from above
213
+ # 2*np.pi*ax1*ax2
214
+ return float(area), float(orientation), float(ax1), float(ax2), float(aspect_ratio)
@@ -0,0 +1,13 @@
1
+ __version__ = "0.6.0"
2
+
3
+ __title__ = "ETDQualitizer"
4
+ __description__ = "Automated eye tracking data quality determination for screen-based eye trackers."
5
+ __url__ = "https://github.com/dcnieho/ETDQualitizer"
6
+ __uri__ = __url__
7
+ __doc__ = __description__ + " <" + __url__ + ">"
8
+
9
+ __author__ = "Diederick Niehorster"
10
+ __email__ = "diederick_c.niehorster@humlab.lu.se"
11
+
12
+ __license__ = "MIT"
13
+ __copyright__ = "Copyright (c) 2025 " + __author__
@@ -0,0 +1,56 @@
1
+ Metadata-Version: 2.4
2
+ Name: ETDQualitizer
3
+ Version: 0.6.0
4
+ Summary: Automated eye tracking data quality determination for screen-based eye trackers.
5
+ Home-page: https://github.com/dcnieho/ETDQualitizer
6
+ Author: Diederick Niehorster
7
+ Author-email: diederick_c.niehorster@humlab.lu.se
8
+ License: MIT License
9
+
10
+ Copyright (c) 2024 Diederick Niehorster
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+
30
+ Project-URL: Source Code, https://github.com/dcnieho/ETDQualitizer
31
+ Classifier: Programming Language :: Python :: 3.10
32
+ Classifier: License :: OSI Approved :: MIT License
33
+ Classifier: Operating System :: OS Independent
34
+ Requires-Python: >=3.10
35
+ Description-Content-Type: text/markdown
36
+ License-File: LICENSE
37
+ Requires-Dist: numpy
38
+ Requires-Dist: pandas
39
+ Dynamic: author
40
+ Dynamic: author-email
41
+ Dynamic: classifier
42
+ Dynamic: description
43
+ Dynamic: description-content-type
44
+ Dynamic: home-page
45
+ Dynamic: license
46
+ Dynamic: license-file
47
+ Dynamic: project-url
48
+ Dynamic: requires-dist
49
+ Dynamic: requires-python
50
+ Dynamic: summary
51
+
52
+ [![Downloads](https://static.pepy.tech/badge/ETDQualitizer)](https://pepy.tech/project/ETDQualitizer)
53
+ [![PyPI Latest Release](https://img.shields.io/pypi/v/ETDQualitizer.svg)](https://pypi.org/project/ETDQualitizer/)
54
+ [![image](https://img.shields.io/pypi/pyversions/ETDQualitizer.svg)](https://pypi.org/project/ETDQualitizer/)
55
+
56
+ # ETDQualitizer v0.6.0
@@ -0,0 +1,13 @@
1
+ LICENSE
2
+ MANIFEST.in
3
+ README.md
4
+ pyproject.toml
5
+ requirements.txt
6
+ setup.py
7
+ python/ETDQualitizer/__init__.py
8
+ python/ETDQualitizer/version.py
9
+ python/ETDQualitizer.egg-info/PKG-INFO
10
+ python/ETDQualitizer.egg-info/SOURCES.txt
11
+ python/ETDQualitizer.egg-info/dependency_links.txt
12
+ python/ETDQualitizer.egg-info/requires.txt
13
+ python/ETDQualitizer.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ numpy
2
+ pandas
@@ -0,0 +1 @@
1
+ ETDQualitizer
@@ -0,0 +1,2 @@
1
+ numpy
2
+ pandas
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,44 @@
1
+ import setuptools
2
+ import runpy
3
+
4
+ info = runpy.run_path("python/ETDQualitizer/version.py")
5
+
6
+ with open('README.md') as f:
7
+ readme = f.read()
8
+
9
+ with open('LICENSE') as f:
10
+ license = f.read()
11
+
12
+ with open('requirements.txt') as f:
13
+ requirements = f.read().splitlines()
14
+
15
+ required = []
16
+
17
+ # get required packages
18
+ for line in requirements:
19
+ required.append(line)
20
+
21
+ setuptools.setup(
22
+ name=info['__title__'],
23
+ version=info['__version__'],
24
+ author=info['__author__'],
25
+ author_email=info['__email__'],
26
+ description=info['__description__'],
27
+ long_description=readme,
28
+ long_description_content_type="text/markdown",
29
+ url=info['__url__'],
30
+ project_urls={
31
+ "Source Code": info['__url__'],
32
+ },
33
+ classifiers=[
34
+ "Programming Language :: Python :: 3.10",
35
+ "License :: OSI Approved :: MIT License",
36
+ "Operating System :: OS Independent",
37
+ ],
38
+ license=license,
39
+ package_dir={"": "python"},
40
+ packages=setuptools.find_packages(where="python"),
41
+ include_package_data=True,
42
+ python_requires=">=3.10",
43
+ install_requires=required
44
+ )