heavyedge-dataset 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,272 @@
1
- """PyTorch-compatiable dataset API for edge profiles."""
1
+ """Custom dataset classes for edge profiles.
2
+
3
+ Refer to `PyTorch tutorial <tutorial>`_ for information about custom PyTorch dataset.
4
+
5
+ .. _tutorial: https://docs.pytorch.org/tutorials/beginner/data_loading_tutorial.html
6
+ """
7
+
8
+ import abc
9
+ import numbers
10
+ from collections.abc import Sequence
11
+
12
+ import numpy as np
13
+ from heavyedge.api import landmarks_type3
14
+ from torch.utils.data import Dataset
2
15
 
3
16
  __all__ = [
4
17
  "ProfileDataset",
5
- "PseudoLmDataset",
6
- "MathLm1dDataset",
7
- "MathLm2dDataset",
18
+ "PseudoLandmarkDataset",
19
+ "MathematicalLandmarkDataset",
8
20
  ]
9
21
 
10
- from .datasets import (
11
- MathLm1dDataset,
12
- MathLm2dDataset,
13
- ProfileDataset,
14
- PseudoLmDataset,
15
- )
22
+
23
+ class ProfileDatasetBase(abc.ABC):
24
+ """Abstract base class for profile dataset."""
25
+
26
+ @property
27
+ @abc.abstractmethod
28
+ def file(self):
29
+ """Profile data file.
30
+
31
+ Returns
32
+ -------
33
+ heavyedge.ProfileData
34
+ """
35
+
36
+ @property
37
+ @abc.abstractmethod
38
+ def transform(self):
39
+ """Optional transformation to be applied on samples.
40
+
41
+ Returns
42
+ -------
43
+ Callable
44
+ """
45
+
46
+ def __len__(self):
47
+ return len(self.file)
48
+
49
+ def __getitem__(self, idx):
50
+ if isinstance(idx, numbers.Integral):
51
+ Y, L, _ = self.file[idx]
52
+ Ys, Ls = [Y], [L]
53
+ else:
54
+ # Support multi-indexing
55
+ idxs = idx
56
+ needs_sort = isinstance(idx, (Sequence, np.ndarray))
57
+ if needs_sort:
58
+ # idxs must be sorted for h5py
59
+ idxs = np.array(idxs)
60
+ sort_idx = np.argsort(idxs)
61
+ idxs = idxs[sort_idx]
62
+ Ys, Ls, _ = self.file[idxs]
63
+ if needs_sort:
64
+ reverse_idx = np.argsort(sort_idx)
65
+ Ys = Ys[reverse_idx]
66
+ Ls = Ls[reverse_idx]
67
+
68
+ ret = self.default_transform(Ys, Ls)
69
+ if self.transform:
70
+ ret = self.transform(ret)
71
+ return ret
72
+
73
+ def __getitems__(self, idxs):
74
+ # PyTorch API
75
+ return self.__getitem__(idxs)
76
+
77
+ @abc.abstractmethod
78
+ def default_transform(self, profiles, lengths):
79
+ """Default data transformation.
80
+
81
+ Subclass must implement this method to transform profile data into target data.
82
+
83
+ Parameters
84
+ ----------
85
+ profiles : (N, M) array
86
+ Profile data.
87
+ lengths : (N,) array
88
+ Length of each profile in *profiles*.
89
+ """
90
+ pass
91
+
92
+
93
+ class ProfileDataset(ProfileDatasetBase, Dataset):
94
+ """Full profile dataset in 1-D or 2-D.
95
+
96
+ Parameters
97
+ ----------
98
+ file : heavyedge.ProfileData
99
+ Open hdf5 file.
100
+ m : {1, 2}
101
+ Profile data dimension.
102
+ 1 means only y coordinates, and 2 means both x and y coordinates.
103
+ transform : callable, optional
104
+ Optional transformation to be applied on samples.
105
+
106
+ Examples
107
+ --------
108
+ >>> from heavyedge import get_sample_path, ProfileData
109
+ >>> from heavyedge_dataset import ProfileDataset
110
+ >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
111
+ ... data = ProfileDataset(file, 2)[:]
112
+ >>> import matplotlib.pyplot as plt # doctest: +SKIP
113
+ ... for coords in data:
114
+ ... plt.plot(*coords, color="gray")
115
+ """
116
+
117
+ def __init__(self, file, m, transform=None):
118
+ self._file = file
119
+ self.m = m
120
+ self._transform = transform
121
+
122
+ self.x = file.x()
123
+
124
+ @property
125
+ def file(self):
126
+ return self._file
127
+
128
+ @property
129
+ def transform(self):
130
+ return self._transform
131
+
132
+ def default_transform(self, profiles, lengths):
133
+ """Crop profiles by their contact points.
134
+
135
+ Parameters
136
+ ----------
137
+ profiles : (N, M) array
138
+ Profile data.
139
+ lengths : (N,) array
140
+ Length of each profile in *profiles*.
141
+ """
142
+ if self.m == 1:
143
+ ret = [Y[:L].reshape(1, -1) for Y, L in zip(profiles, lengths)]
144
+ elif self.m == 2:
145
+ ret = [np.stack([self.x[:L], Y[:L]]) for Y, L in zip(profiles, lengths)]
146
+ else:
147
+ raise ValueError(f"Invalid dimension: {self.m}")
148
+ return ret
149
+
150
+
151
+ class PseudoLandmarkDataset(ProfileDatasetBase, Dataset):
152
+ """Pseudo-landmark dataset in 1-D or 2-D.
153
+
154
+ Pseudo-landmarks are points that are equidistantly sampled.
155
+
156
+ Parameters
157
+ ----------
158
+ file : heavyedge.ProfileData
159
+ Open hdf5 file.
160
+ k : int
161
+ Number of landmarks to sample.
162
+ m : {1, 2}
163
+ Profile data dimension.
164
+ 1 means only y coordinates, and 2 means both x and y coordinates.
165
+ transform : callable, optional
166
+ Optional transformation to be applied on samples.
167
+
168
+ Examples
169
+ --------
170
+ >>> from heavyedge import get_sample_path, ProfileData
171
+ >>> from heavyedge_dataset import PseudoLandmarkDataset
172
+ >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
173
+ ... data = PseudoLandmarkDataset(file, 10, 2)[:]
174
+ >>> import matplotlib.pyplot as plt # doctest: +SKIP
175
+ ... plt.plot(*data.transpose(1, 2, 0), color="gray")
176
+ """
177
+
178
+ def __init__(self, file, k, m, transform=None):
179
+ self._file = file
180
+ self.k = k
181
+ self.m = m
182
+ self._transform = transform
183
+
184
+ self.x = file.x()
185
+
186
+ @property
187
+ def file(self):
188
+ return self._file
189
+
190
+ @property
191
+ def transform(self):
192
+ return self._transform
193
+
194
+ def default_transform(self, profiles, lengths):
195
+ """Sample pseudo-landmarks from profiles.
196
+
197
+ Parameters
198
+ ----------
199
+ profiles : (N, M) array
200
+ Profile data.
201
+ lengths : (N,) array
202
+ Length of each profile in *profiles*.
203
+ """
204
+ ret = []
205
+ if self.m == 1:
206
+ for Y, L in zip(profiles, lengths):
207
+ idxs = np.linspace(0, L - 1, self.k, dtype=int)
208
+ ret.append(Y[idxs].reshape(1, -1))
209
+ elif self.m == 2:
210
+ for Y, L in zip(profiles, lengths):
211
+ idxs = np.linspace(0, L - 1, self.k, dtype=int)
212
+ ret.append(np.stack([self.x[idxs], Y[idxs]]))
213
+ else:
214
+ raise ValueError(f"Invalid dimension: {self.m}")
215
+ return np.array(ret)
216
+
217
+
218
+ class MathematicalLandmarkDataset(ProfileDatasetBase, Dataset):
219
+ """Mathematical landmark dataset in 1-D.
220
+
221
+ Mathematical landmarks are points which are choosed by their
222
+ mathematical properties, i.e., slope or curvature.
223
+
224
+ Parameters
225
+ ----------
226
+ file : heavyedge.ProfileData
227
+ Open hdf5 file.
228
+ sigma : scalar
229
+ Standard deviation of Gaussian kernel for landmark detection.
230
+ transform : callable, optional
231
+ Optional transformation to be applied on samples.
232
+
233
+ Examples
234
+ --------
235
+ >>> from heavyedge import get_sample_path, ProfileData
236
+ >>> from heavyedge_dataset import MathematicalLandmarkDataset
237
+ >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
238
+ ... data = MathematicalLandmarkDataset(file, 32)[:]
239
+ >>> import matplotlib.pyplot as plt # doctest: +SKIP
240
+ ... plt.plot(*data.transpose(1, 2, 0), color="gray")
241
+ """
242
+
243
+ def __init__(self, file, sigma, transform=None):
244
+ self._file = file
245
+ self.sigma = sigma
246
+ self._transform = transform
247
+
248
+ @property
249
+ def file(self):
250
+ return self._file
251
+
252
+ @property
253
+ def transform(self):
254
+ return self._transform
255
+
256
+ def default_transform(self, profiles, lengths):
257
+ """Detect mathematical landmarks from profiles.
258
+
259
+ Parameters
260
+ ----------
261
+ profiles : (N, M) array
262
+ Profile data.
263
+ lengths : (N,) array
264
+ Length of each profile in *profiles*.
265
+ """
266
+ ret = []
267
+ for Y, L in zip(profiles, lengths):
268
+ Y = Y[:L]
269
+ indices = np.flip(landmarks_type3(Y, self.sigma))
270
+ y = np.concat([[np.mean(Y[: indices[0]])], Y[indices]])
271
+ ret.append(y.reshape(1, -1))
272
+ return np.array(ret)
@@ -0,0 +1,90 @@
1
+ Metadata-Version: 2.4
2
+ Name: heavyedge-dataset
3
+ Version: 0.2.0
4
+ Summary: PyTorch-compatible edge profile dataset API
5
+ Author-email: Jisoo Song <jeesoo9595@snu.ac.kr>
6
+ License-Expression: MIT
7
+ Project-URL: homepage, https://pypi.python.org/pypi/heavyedge-dataset/
8
+ Project-URL: source, https://github.com/heavyedge/heavyedge-dataset
9
+ Project-URL: documentation, https://heavyedge-dataset.readthedocs.io
10
+ Classifier: Development Status :: 5 - Production/Stable
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: Programming Language :: Python
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Programming Language :: Python :: 3 :: Only
18
+ Classifier: Topic :: Scientific/Engineering
19
+ Classifier: Operating System :: OS Independent
20
+ Requires-Python: >=3.10
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: heavyedge>=1.1.2
24
+ Requires-Dist: torch
25
+ Provides-Extra: test
26
+ Requires-Dist: pytest; extra == "test"
27
+ Provides-Extra: doc
28
+ Requires-Dist: sphinx; extra == "doc"
29
+ Requires-Dist: numpydoc; extra == "doc"
30
+ Requires-Dist: pydata_sphinx_theme; extra == "doc"
31
+ Requires-Dist: matplotlib; extra == "doc"
32
+ Provides-Extra: dev
33
+ Requires-Dist: flake8; extra == "dev"
34
+ Requires-Dist: black; extra == "dev"
35
+ Requires-Dist: isort; extra == "dev"
36
+ Requires-Dist: heavyedge-dataset[doc,test]; extra == "dev"
37
+ Dynamic: license-file
38
+
39
+ # HeavyEdge-Dataset
40
+
41
+ [![Supported Python Versions](https://img.shields.io/pypi/pyversions/heavyedge-dataset.svg)](https://pypi.python.org/pypi/heavyedge-dataset/)
42
+ [![PyPI Version](https://img.shields.io/pypi/v/heavyedge-dataset.svg)](https://pypi.python.org/pypi/heavyedge-dataset/)
43
+ [![License](https://img.shields.io/github/license/heavyedge/heavyedge-dataset)](https://github.com/heavyedge/heavyedge-dataset/blob/master/LICENSE)
44
+ [![CI](https://github.com/heavyedge/heavyedge-dataset/actions/workflows/ci.yml/badge.svg)](https://github.com/heavyedge/heavyedge-dataset/actions/workflows/ci.yml)
45
+ [![CD](https://github.com/heavyedge/heavyedge-dataset/actions/workflows/cd.yml/badge.svg)](https://github.com/heavyedge/heavyedge-dataset/actions/workflows/cd.yml)
46
+ [![Docs](https://readthedocs.org/projects/heavyedge-dataset/badge/?version=latest)](https://heavyedge-dataset.readthedocs.io/en/latest/?badge=latest)
47
+
48
+ Package to load edge profile data as PyTorch dataset.
49
+
50
+ ## Usage
51
+
52
+ HeavyEdge-Dataset provides custom dataset classes which wraps profile data file.
53
+
54
+ A simple use case to load a list of profiles as two-dimensional coordinates:
55
+
56
+ ```python
57
+ from heavyedge import get_sample_path, ProfileData
58
+ from heavyedge_dataset import ProfileDataset
59
+ with ProfileData(get_sample_path("Prep-Type2.h5")) as file: # Profile data file object
60
+ data = ProfileDataset(file, 2)[:]
61
+ ```
62
+
63
+ Refer to the package documentation for more information.
64
+
65
+ ## Documentation
66
+
67
+ The manual can be found online:
68
+
69
+ > https://heavyedge-dataset.readthedocs.io
70
+
71
+ If you want to build the document yourself, get the source code and install with `[doc]` dependency.
72
+ Then, go to `doc` directory and build the document:
73
+
74
+ ```
75
+ $ pip install .[doc]
76
+ $ cd doc
77
+ $ make html
78
+ ```
79
+
80
+ Document will be generated in `build/html` directory. Open `index.html` to see the central page.
81
+
82
+ ## Developing
83
+
84
+ ### Installation
85
+
86
+ For development features, you must install the package by `pip install -e .[dev]`.
87
+
88
+ ### Testing
89
+
90
+ Run `pytest` command to perform unit test.
@@ -0,0 +1,6 @@
1
+ heavyedge_dataset/__init__.py,sha256=iBFvupZMaHpTGUxZOJvVKfD4bAOAdikK60a8bd4oxB0,7746
2
+ heavyedge_dataset-0.2.0.dist-info/licenses/LICENSE,sha256=pBq2E7YJkUcEINdYeERL4RVFOQICd_MwJq6OusuAPGc,1066
3
+ heavyedge_dataset-0.2.0.dist-info/METADATA,sha256=vZF2VwCYUmf7OzXfCO-pg9cHEeE7kCYIfl1JGmfTad0,3496
4
+ heavyedge_dataset-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
5
+ heavyedge_dataset-0.2.0.dist-info/top_level.txt,sha256=wpRFI8TlkYFGetc17appkyybauBvzhGGvyueunsdJTc,18
6
+ heavyedge_dataset-0.2.0.dist-info/RECORD,,
@@ -1,293 +0,0 @@
1
- """
2
- Dataset classes
3
- ---------------
4
-
5
- PyTorch dataset classes for edge profiles.
6
- """
7
-
8
- import abc
9
- import numbers
10
- from collections.abc import Sequence
11
-
12
- import numpy as np
13
- from heavyedge.api import landmarks_type3
14
- from torch.utils.data import Dataset
15
-
16
- from .landmarks import math_landmarks_1d, pseudo_landmarks_1d, pseudo_landmarks_2d
17
-
18
- __all__ = [
19
- "ProfileDataset",
20
- "PseudoLmDataset",
21
- "MathLm1dDataset",
22
- "MathLm2dDataset",
23
- ]
24
-
25
-
26
- class ProfileDatasetBase(abc.ABC):
27
- """Abstract base class for profile dataset."""
28
-
29
- @property
30
- @abc.abstractmethod
31
- def file(self):
32
- """Profile data file.
33
-
34
- Returns
35
- -------
36
- heavyedge.ProfileData
37
- """
38
-
39
- @abc.abstractmethod
40
- def default_transform(self, profiles, lengths):
41
- """Default transform by the dataset class.
42
-
43
- Parameters
44
- ----------
45
- profiles : (N, M) array
46
- Profile data.
47
- lengths : (N,) array
48
- Length of each profile.
49
- """
50
- pass
51
-
52
- @property
53
- @abc.abstractmethod
54
- def transform(self):
55
- """Optional transformation passed to the dataset instance.
56
-
57
- Returns
58
- -------
59
- Callable
60
- """
61
-
62
- def __len__(self):
63
- return len(self.file)
64
-
65
- def __getitem__(self, idx):
66
- if isinstance(idx, numbers.Integral):
67
- Y, L, _ = self.file[idx]
68
- ret = self.default_transform([Y], [L])
69
- if self.transform:
70
- ret = self.transform(ret)
71
- ret = ret[0]
72
- else:
73
- ret = self.__getitems__(idx)
74
- return ret
75
-
76
- def __getitems__(self, idxs):
77
- # PyTorch API
78
- needs_sort = isinstance(idxs, (Sequence, np.ndarray))
79
- if needs_sort:
80
- # idxs must be sorted for h5py
81
- idxs = np.array(idxs)
82
- sort_idx = np.argsort(idxs)
83
- idxs = idxs[sort_idx]
84
- else:
85
- pass
86
- Ys, Ls, _ = self.file[idxs]
87
- if needs_sort:
88
- reverse_idx = np.argsort(sort_idx)
89
- Ys = Ys[reverse_idx]
90
- Ls = Ls[reverse_idx]
91
- ret = self.default_transform(Ys, Ls)
92
- if self.transform:
93
- ret = self.transform(ret)
94
- return ret
95
-
96
-
97
- class ProfileDataset(ProfileDatasetBase, Dataset):
98
- """Full profile dataset.
99
-
100
- Parameters
101
- ----------
102
- file : heavyedge.ProfileData
103
- Open hdf5 file.
104
- m : {1, 2}
105
- Profile data dimension.
106
- 1 means only y coordinates, and 2 means both x and y coordinates.
107
- transform : callable, optional
108
- Optional transform to be applied on a sample.
109
-
110
- Examples
111
- --------
112
- >>> from heavyedge import get_sample_path, ProfileData
113
- >>> from heavyedge_dataset import ProfileDataset
114
- >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
115
- ... data = ProfileDataset(file, 2)[:]
116
- >>> import matplotlib.pyplot as plt # doctest: +SKIP
117
- ... for coords in data:
118
- ... plt.plot(*coords, color="gray")
119
- """
120
-
121
- def __init__(self, file, m, transform=None):
122
- self._file = file
123
- self.m = m
124
- self._transform = transform
125
-
126
- self.x = file.x()
127
-
128
- @property
129
- def file(self):
130
- return self._file
131
-
132
- def default_transform(self, profiles, lengths):
133
- if self.m == 1:
134
- ret = [Y[:L].reshape(1, -1) for Y, L in zip(profiles, lengths)]
135
- elif self.m == 2:
136
- ret = [np.stack([self.x[:L], Y[:L]]) for Y, L in zip(profiles, lengths)]
137
- else:
138
- raise ValueError(f"Invalid dimension: {self.m}")
139
- return ret
140
-
141
- @property
142
- def transform(self):
143
- return self._transform
144
-
145
-
146
- class PseudoLmDataset(ProfileDatasetBase, Dataset):
147
- """Pseudo-landmark dataset.
148
-
149
- Parameters
150
- ----------
151
- file : heavyedge.ProfileData
152
- Open hdf5 file.
153
- k : int
154
- Number of landmarks to sample.
155
- m : {1, 2}
156
- Profile data dimension.
157
- 1 means only y coordinates, and 2 means both x and y coordinates.
158
- transform : callable, optional
159
- Optional transform to be applied on a sample.
160
-
161
- Examples
162
- --------
163
- >>> from heavyedge import get_sample_path, ProfileData
164
- >>> from heavyedge_dataset import PseudoLmDataset
165
- >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
166
- ... data = PseudoLmDataset(file, 10, 2)[:]
167
- >>> import matplotlib.pyplot as plt # doctest: +SKIP
168
- ... plt.plot(*data.transpose(1, 2, 0), color="gray")
169
- """
170
-
171
- def __init__(self, file, k, m, transform=None):
172
- self._file = file
173
- self.k = k
174
- self.m = m
175
- self._transform = transform
176
-
177
- self.x = file.x()
178
-
179
- @property
180
- def file(self):
181
- return self._file
182
-
183
- def default_transform(self, profiles, lengths):
184
- if self.m == 1:
185
- ret = pseudo_landmarks_1d(profiles, lengths, self.k)
186
- elif self.m == 2:
187
- ret = pseudo_landmarks_2d(self.x, profiles, lengths, self.k)
188
- else:
189
- raise ValueError(f"Invalid dimension: {self.m}")
190
- return ret
191
-
192
- @property
193
- def transform(self):
194
- return self._transform
195
-
196
-
197
- class MathLm1dDataset(ProfileDatasetBase, Dataset):
198
- """1-D mathematical landmarks dataset.
199
-
200
- Parameters
201
- ----------
202
- file : heavyedge.ProfileData
203
- Open hdf5 file.
204
- sigma : scalar
205
- Standard deviation of Gaussian kernel for landmark detection.
206
- transform : callable, optional
207
- Optional transform to be applied on a sample.
208
-
209
- Examples
210
- --------
211
- >>> from heavyedge import get_sample_path, ProfileData
212
- >>> from heavyedge_dataset import MathLm1dDataset
213
- >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
214
- ... data = MathLm1dDataset(file, 32)[:]
215
- >>> import matplotlib.pyplot as plt # doctest: +SKIP
216
- ... plt.plot(*data.transpose(1, 2, 0), color="gray")
217
- """
218
-
219
- def __init__(self, file, sigma, transform=None):
220
- self._file = file
221
- self.sigma = sigma
222
- self._transform = transform
223
-
224
- @property
225
- def file(self):
226
- return self._file
227
-
228
- def default_transform(self, profiles, lengths):
229
- return math_landmarks_1d(profiles, lengths, self.sigma)
230
-
231
- @property
232
- def transform(self):
233
- return self._transform
234
-
235
-
236
- class MathLm2dDataset(ProfileDatasetBase, Dataset):
237
- """2-D mathematical landmarks dataset.
238
-
239
- Parameters
240
- ----------
241
- file : heavyedge.ProfileData
242
- Open hdf5 file.
243
- sigma : scalar
244
- Standard deviation of Gaussian kernel for landmark detection.
245
- transform : callable, optional
246
- Optional transform to be applied on a sample.
247
-
248
- Examples
249
- --------
250
- >>> from heavyedge import get_sample_path, ProfileData
251
- >>> from heavyedge_dataset import MathLm2dDataset
252
- >>> with ProfileData(get_sample_path("Prep-Type2.h5")) as file:
253
- ... lm, _ = MathLm2dDataset(file, 32)[:]
254
- >>> import matplotlib.pyplot as plt # doctest: +SKIP
255
- ... plt.plot(*lm.transpose(1, 2, 0), color="gray")
256
- """
257
-
258
- def __init__(self, file, sigma, transform=None):
259
- self._file = file
260
- self.sigma = sigma
261
- self._transform = transform
262
-
263
- self.x = file.x()
264
-
265
- @property
266
- def file(self):
267
- return self._file
268
-
269
- def default_transform(self, profiles, lengths):
270
- # Todo: cythonize this method to avoid python loop.
271
- # This will require cythonizing landmarks_type3().
272
- lm, center_height = [], []
273
- for Y, L in zip(profiles, lengths):
274
- Y = Y[:L]
275
- indices = np.flip(landmarks_type3(Y, self.sigma))
276
- lm.append(np.stack([self.x[indices], Y[indices]]))
277
- center_height.append(np.mean(Y[: indices[0]]))
278
- return np.array(lm), np.array(center_height)
279
-
280
- def __getitem__(self, idx):
281
- if isinstance(idx, numbers.Integral):
282
- Y, L, _ = self.file[idx]
283
- lm, ch = self.default_transform([Y], [L])
284
- if self.transform:
285
- lm, ch = self.transform(lm, ch)
286
- lm, ch = lm[0], ch[0]
287
- else:
288
- lm, ch = self.__getitems__(idx)
289
- return (lm, ch)
290
-
291
- @property
292
- def transform(self):
293
- return self._transform
@@ -1,41 +0,0 @@
1
- """
2
- Landmark locators
3
- -----------------
4
-
5
- Functions to locate landmarks from edge profiles.
6
- """
7
-
8
- import numpy as np
9
- from heavyedge.api import landmarks_type3
10
-
11
- __all__ = [
12
- "pseudo_landmarks_1d",
13
- "pseudo_landmarks_2d",
14
- "math_landmarks_1d",
15
- ]
16
-
17
-
18
- def pseudo_landmarks_1d(Ys, Ls, k):
19
- ret = []
20
- for Y, L in zip(Ys, Ls):
21
- idxs = np.linspace(0, L - 1, k, dtype=int)
22
- ret.append(Y[idxs].reshape(1, -1))
23
- return np.array(ret)
24
-
25
-
26
- def pseudo_landmarks_2d(x, Ys, Ls, k):
27
- ret = []
28
- for Y, L in zip(Ys, Ls):
29
- idxs = np.linspace(0, L - 1, k, dtype=int)
30
- ret.append(np.stack([x[idxs], Y[idxs]]))
31
- return np.array(ret)
32
-
33
-
34
- def math_landmarks_1d(Ys, Ls, sigma):
35
- ret = []
36
- for Y, L in zip(Ys, Ls):
37
- Y = Y[:L]
38
- indices = np.flip(landmarks_type3(Y, sigma))
39
- y = np.concat([[np.mean(Y[: indices[0]])], Y[indices]])
40
- ret.append(y.reshape(1, -1))
41
- return np.array(ret)
@@ -1,39 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: heavyedge-dataset
3
- Version: 0.1.0
4
- Summary: PyTorch-compatible edge profile dataset API
5
- Author-email: Jisoo Song <jeesoo9595@snu.ac.kr>
6
- License-Expression: MIT
7
- Project-URL: homepage, https://pypi.python.org/pypi/heavyedge-dataset/
8
- Project-URL: source, https://github.com/heavyedge/heavyedge-dataset
9
- Project-URL: documentation, https://heavyedge-dataset.readthedocs.io
10
- Classifier: Development Status :: 5 - Production/Stable
11
- Classifier: Intended Audience :: Science/Research
12
- Classifier: Programming Language :: Python
13
- Classifier: Programming Language :: Python :: 3
14
- Classifier: Programming Language :: Python :: 3.10
15
- Classifier: Programming Language :: Python :: 3.11
16
- Classifier: Programming Language :: Python :: 3.12
17
- Classifier: Programming Language :: Python :: 3 :: Only
18
- Classifier: Topic :: Scientific/Engineering
19
- Classifier: Operating System :: OS Independent
20
- Requires-Python: >=3.10
21
- Description-Content-Type: text/markdown
22
- License-File: LICENSE
23
- Requires-Dist: heavyedge>=1.1.2
24
- Requires-Dist: torch
25
- Provides-Extra: test
26
- Requires-Dist: pytest; extra == "test"
27
- Provides-Extra: doc
28
- Requires-Dist: sphinx; extra == "doc"
29
- Requires-Dist: numpydoc; extra == "doc"
30
- Requires-Dist: pydata_sphinx_theme; extra == "doc"
31
- Requires-Dist: matplotlib; extra == "doc"
32
- Provides-Extra: dev
33
- Requires-Dist: flake8; extra == "dev"
34
- Requires-Dist: black; extra == "dev"
35
- Requires-Dist: isort; extra == "dev"
36
- Requires-Dist: heavyedge-dataset[doc,test]; extra == "dev"
37
- Dynamic: license-file
38
-
39
- # HeavyEdge-Dataset
@@ -1,8 +0,0 @@
1
- heavyedge_dataset/__init__.py,sha256=X9li4ScLZs9TSlbH4ApoctQa6g7vglH0nkQadcb7Gq4,273
2
- heavyedge_dataset/datasets.py,sha256=j8a3KRXzWd0QpceAvP7ATvNUj5VSjyx445ksMMMdJio,8150
3
- heavyedge_dataset/landmarks.py,sha256=Tn7Pd91cWLs9txYu9mkNxC6OX-Y6VTNMzwTNzzCE4Bo,937
4
- heavyedge_dataset-0.1.0.dist-info/licenses/LICENSE,sha256=pBq2E7YJkUcEINdYeERL4RVFOQICd_MwJq6OusuAPGc,1066
5
- heavyedge_dataset-0.1.0.dist-info/METADATA,sha256=QsFgB89UjLFlONWWcgWvCaxhlSCgK535OxBXeB_oMrg,1515
6
- heavyedge_dataset-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
7
- heavyedge_dataset-0.1.0.dist-info/top_level.txt,sha256=wpRFI8TlkYFGetc17appkyybauBvzhGGvyueunsdJTc,18
8
- heavyedge_dataset-0.1.0.dist-info/RECORD,,