sarkit-convert 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sarkit_convert-0.1.0/LICENSE +22 -0
- sarkit_convert-0.1.0/PKG-INFO +69 -0
- sarkit_convert-0.1.0/README.md +18 -0
- sarkit_convert-0.1.0/pyproject.toml +117 -0
- sarkit_convert-0.1.0/sarkit_convert/__init__.py +17 -0
- sarkit_convert-0.1.0/sarkit_convert/_utils.py +253 -0
- sarkit_convert-0.1.0/sarkit_convert/_version.py +1 -0
- sarkit_convert-0.1.0/sarkit_convert/csk.py +822 -0
- sarkit_convert-0.1.0/sarkit_convert/iceye.py +885 -0
- sarkit_convert-0.1.0/sarkit_convert/sentinel.py +1589 -0
- sarkit_convert-0.1.0/sarkit_convert/tsx.py +906 -0
- sarkit_convert-0.1.0/tests/core/test_utils.py +209 -0
- sarkit_convert-0.1.0/tests/cosmo/test_csk.py +10 -0
- sarkit_convert-0.1.0/tests/iceye/test_iceye.py +24 -0
- sarkit_convert-0.1.0/tests/sentinel/test_sentinel.py +24 -0
- sarkit_convert-0.1.0/tests/tsx/test_tsx.py +22 -0
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Valkyrie Systems Corporation
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: sarkit-convert
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Python library for converting SAR data to standard formats.
|
|
5
|
+
Author-Email: Valkyrie Systems Corporation <info@govsco.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Classifier: Development Status :: 2 - Pre-Alpha
|
|
8
|
+
Classifier: Intended Audience :: Science/Research
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Topic :: Scientific/Engineering
|
|
11
|
+
Classifier: Programming Language :: Python
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Requires-Python: >=3.11
|
|
17
|
+
Requires-Dist: lxml>=5.1.0
|
|
18
|
+
Requires-Dist: numpy>=1.26.3
|
|
19
|
+
Requires-Dist: sarkit[verification]>=0.6.0
|
|
20
|
+
Provides-Extra: iceye
|
|
21
|
+
Requires-Dist: h5py>=3.12.1; extra == "iceye"
|
|
22
|
+
Requires-Dist: python-dateutil>=2.9.0; extra == "iceye"
|
|
23
|
+
Provides-Extra: cosmo
|
|
24
|
+
Requires-Dist: h5py>=3.12.1; extra == "cosmo"
|
|
25
|
+
Requires-Dist: python-dateutil>=2.9.0; extra == "cosmo"
|
|
26
|
+
Requires-Dist: scipy>=1.15.1; extra == "cosmo"
|
|
27
|
+
Requires-Dist: shapely>=2.0.2; extra == "cosmo"
|
|
28
|
+
Provides-Extra: tsx
|
|
29
|
+
Requires-Dist: lxml>=5.1.0; extra == "tsx"
|
|
30
|
+
Requires-Dist: python-dateutil>=2.9.0; extra == "tsx"
|
|
31
|
+
Requires-Dist: sarkit[verification]>=0.5.0; extra == "tsx"
|
|
32
|
+
Requires-Dist: scipy>=1.15.1; extra == "tsx"
|
|
33
|
+
Provides-Extra: sentinel
|
|
34
|
+
Requires-Dist: python-dateutil>=2.9.0; extra == "sentinel"
|
|
35
|
+
Requires-Dist: scipy>=1.15.1; extra == "sentinel"
|
|
36
|
+
Requires-Dist: tifffile>=2025.5.10; extra == "sentinel"
|
|
37
|
+
Provides-Extra: all
|
|
38
|
+
Requires-Dist: sarkit-convert[iceye]; extra == "all"
|
|
39
|
+
Requires-Dist: sarkit-convert[cosmo]; extra == "all"
|
|
40
|
+
Requires-Dist: sarkit-convert[tsx]; extra == "all"
|
|
41
|
+
Requires-Dist: sarkit-convert[sentinel]; extra == "all"
|
|
42
|
+
Provides-Extra: dev-lint
|
|
43
|
+
Requires-Dist: ruff>=0.3.0; extra == "dev-lint"
|
|
44
|
+
Requires-Dist: mypy>=1.8.0; extra == "dev-lint"
|
|
45
|
+
Requires-Dist: types-python-dateutil>=2.9.0; extra == "dev-lint"
|
|
46
|
+
Provides-Extra: dev-test
|
|
47
|
+
Requires-Dist: pytest>=7.4.4; extra == "dev-test"
|
|
48
|
+
Provides-Extra: dev
|
|
49
|
+
Requires-Dist: sarkit-convert[dev-lint,dev-test]; extra == "dev"
|
|
50
|
+
Description-Content-Type: text/markdown
|
|
51
|
+
|
|
52
|
+
<div align="center">
|
|
53
|
+
|
|
54
|
+
<img src="https://raw.githubusercontent.com/ValkyrieSystems/sarkit/main/docs/source/_static/sarkit_logo.png" width=200>
|
|
55
|
+
|
|
56
|
+
</div>
|
|
57
|
+
|
|
58
|
+
**sarkit-convert** is a Python library for converting SAR data to standard formats.
|
|
59
|
+
|
|
60
|
+
## License
|
|
61
|
+
This repository is licensed under the [MIT license](./LICENSE).
|
|
62
|
+
|
|
63
|
+
A few tips for getting started using [PDM](https://pdm-project.org/en/latest/) are below:
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
```shell
|
|
67
|
+
$ pdm install -G:all # install SARkit-convert with optional & dev dependencies
|
|
68
|
+
$ pdm run nox # run lint and tests
|
|
69
|
+
```
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
<div align="center">
|
|
2
|
+
|
|
3
|
+
<img src="https://raw.githubusercontent.com/ValkyrieSystems/sarkit/main/docs/source/_static/sarkit_logo.png" width=200>
|
|
4
|
+
|
|
5
|
+
</div>
|
|
6
|
+
|
|
7
|
+
**sarkit-convert** is a Python library for converting SAR data to standard formats.
|
|
8
|
+
|
|
9
|
+
## License
|
|
10
|
+
This repository is licensed under the [MIT license](./LICENSE).
|
|
11
|
+
|
|
12
|
+
A few tips for getting started using [PDM](https://pdm-project.org/en/latest/) are below:
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
```shell
|
|
16
|
+
$ pdm install -G:all # install SARkit-convert with optional & dev dependencies
|
|
17
|
+
$ pdm run nox # run lint and tests
|
|
18
|
+
```
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "sarkit-convert"
|
|
3
|
+
description = "Python library for converting SAR data to standard formats."
|
|
4
|
+
authors = [
|
|
5
|
+
{ name = "Valkyrie Systems Corporation", email = "info@govsco.com" },
|
|
6
|
+
]
|
|
7
|
+
requires-python = ">=3.11"
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
classifiers = [
|
|
10
|
+
"Development Status :: 2 - Pre-Alpha",
|
|
11
|
+
"Intended Audience :: Science/Research",
|
|
12
|
+
"License :: OSI Approved :: MIT License",
|
|
13
|
+
"Topic :: Scientific/Engineering",
|
|
14
|
+
"Programming Language :: Python",
|
|
15
|
+
"Programming Language :: Python :: 3",
|
|
16
|
+
"Programming Language :: Python :: 3.11",
|
|
17
|
+
"Programming Language :: Python :: 3.12",
|
|
18
|
+
"Programming Language :: Python :: 3.13",
|
|
19
|
+
]
|
|
20
|
+
dynamic = []
|
|
21
|
+
dependencies = [
|
|
22
|
+
"lxml>=5.1.0",
|
|
23
|
+
"numpy>=1.26.3",
|
|
24
|
+
"sarkit[verification]>=0.6.0",
|
|
25
|
+
]
|
|
26
|
+
version = "0.1.0"
|
|
27
|
+
|
|
28
|
+
[project.license]
|
|
29
|
+
text = "MIT"
|
|
30
|
+
|
|
31
|
+
[project.optional-dependencies]
|
|
32
|
+
iceye = [
|
|
33
|
+
"h5py>=3.12.1",
|
|
34
|
+
"python-dateutil>=2.9.0",
|
|
35
|
+
]
|
|
36
|
+
cosmo = [
|
|
37
|
+
"h5py>=3.12.1",
|
|
38
|
+
"python-dateutil>=2.9.0",
|
|
39
|
+
"scipy>=1.15.1",
|
|
40
|
+
"shapely>=2.0.2",
|
|
41
|
+
]
|
|
42
|
+
tsx = [
|
|
43
|
+
"lxml>=5.1.0",
|
|
44
|
+
"python-dateutil>=2.9.0",
|
|
45
|
+
"sarkit[verification]>=0.5.0",
|
|
46
|
+
"scipy>=1.15.1",
|
|
47
|
+
]
|
|
48
|
+
sentinel = [
|
|
49
|
+
"python-dateutil>=2.9.0",
|
|
50
|
+
"scipy>=1.15.1",
|
|
51
|
+
"tifffile>=2025.5.10",
|
|
52
|
+
]
|
|
53
|
+
all = [
|
|
54
|
+
"sarkit-convert[iceye]",
|
|
55
|
+
"sarkit-convert[cosmo]",
|
|
56
|
+
"sarkit-convert[tsx]",
|
|
57
|
+
"sarkit-convert[sentinel]",
|
|
58
|
+
]
|
|
59
|
+
dev-lint = [
|
|
60
|
+
"ruff>=0.3.0",
|
|
61
|
+
"mypy>=1.8.0",
|
|
62
|
+
"types-python-dateutil>=2.9.0",
|
|
63
|
+
]
|
|
64
|
+
dev-test = [
|
|
65
|
+
"pytest>=7.4.4",
|
|
66
|
+
]
|
|
67
|
+
dev = [
|
|
68
|
+
"sarkit-convert[dev-test,dev-lint]",
|
|
69
|
+
]
|
|
70
|
+
|
|
71
|
+
[dependency-groups]
|
|
72
|
+
test = [
|
|
73
|
+
"nox>=2024.3.2",
|
|
74
|
+
]
|
|
75
|
+
doc = [
|
|
76
|
+
"sphinx>=7.2.6",
|
|
77
|
+
"numpydoc>=1.7.0",
|
|
78
|
+
"sphinx-rtd-theme>=2.0.0",
|
|
79
|
+
"sphinxcontrib-autoprogram>=0.1.9",
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
[build-system]
|
|
83
|
+
requires = [
|
|
84
|
+
"pdm-backend",
|
|
85
|
+
]
|
|
86
|
+
build-backend = "pdm.backend"
|
|
87
|
+
|
|
88
|
+
[tool.pdm]
|
|
89
|
+
distribution = true
|
|
90
|
+
|
|
91
|
+
[tool.pdm.version]
|
|
92
|
+
source = "scm"
|
|
93
|
+
write_to = "sarkit_convert/_version.py"
|
|
94
|
+
write_template = "__version__ = '{}'"
|
|
95
|
+
|
|
96
|
+
[tool.ruff.lint]
|
|
97
|
+
select = [
|
|
98
|
+
"E4",
|
|
99
|
+
"E7",
|
|
100
|
+
"E9",
|
|
101
|
+
"F",
|
|
102
|
+
"I",
|
|
103
|
+
"N",
|
|
104
|
+
"NPY",
|
|
105
|
+
"ISC",
|
|
106
|
+
"RUF022",
|
|
107
|
+
]
|
|
108
|
+
preview = true
|
|
109
|
+
|
|
110
|
+
[[tool.mypy.overrides]]
|
|
111
|
+
module = [
|
|
112
|
+
"h5py.*",
|
|
113
|
+
"lxml.*",
|
|
114
|
+
"scipy.*",
|
|
115
|
+
"shapely.*",
|
|
116
|
+
]
|
|
117
|
+
ignore_missing_imports = true
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""
|
|
2
|
+
======================
|
|
3
|
+
SARkit-convert (:mod:`sarkit-convert`)
|
|
4
|
+
======================
|
|
5
|
+
|
|
6
|
+
The main namespace is almost empty by design.
|
|
7
|
+
|
|
8
|
+
.. list-table::
|
|
9
|
+
|
|
10
|
+
* - ``__version__``
|
|
11
|
+
- SARkit-convert version string
|
|
12
|
+
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from sarkit_convert._version import __version__
|
|
16
|
+
|
|
17
|
+
__all__ = ["__version__"]
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""
|
|
2
|
+
=====================================
|
|
3
|
+
Utility functions for SICD converters
|
|
4
|
+
=====================================
|
|
5
|
+
|
|
6
|
+
Common utility functions for use in SICD converters
|
|
7
|
+
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import itertools
|
|
11
|
+
|
|
12
|
+
import numpy as np
|
|
13
|
+
import numpy.polynomial.polynomial as npp
|
|
14
|
+
import sarkit.wgs84
|
|
15
|
+
|
|
16
|
+
RNIIRS_FIT_PARAMETERS = np.array([3.4761, 0.4357], dtype="float64")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def fit_state_vectors(
|
|
20
|
+
fit_time_range, times, positions, velocities=None, accelerations=None, order=5
|
|
21
|
+
):
|
|
22
|
+
times = np.asarray(times)
|
|
23
|
+
positions = np.asarray(positions)
|
|
24
|
+
knots_per_state = 1
|
|
25
|
+
if velocities is not None:
|
|
26
|
+
velocities = np.asarray(velocities)
|
|
27
|
+
knots_per_state += 1
|
|
28
|
+
if accelerations is not None:
|
|
29
|
+
accelerations = np.asarray(accelerations)
|
|
30
|
+
knots_per_state += 1
|
|
31
|
+
|
|
32
|
+
num_coefs = order + 1
|
|
33
|
+
states_needed = int(np.ceil(num_coefs / knots_per_state))
|
|
34
|
+
if states_needed > times.size:
|
|
35
|
+
raise ValueError("Not enough state vectors")
|
|
36
|
+
start_state = max(np.sum(times < fit_time_range[0]) - 1, 0)
|
|
37
|
+
end_state = min(np.sum(times < fit_time_range[1]) + 1, times.size)
|
|
38
|
+
while end_state - start_state < states_needed:
|
|
39
|
+
start_state = max(start_state - 1, 0)
|
|
40
|
+
end_state = min(end_state + 1, times.size)
|
|
41
|
+
|
|
42
|
+
rnc = np.arange(num_coefs)
|
|
43
|
+
used_states = slice(start_state, end_state)
|
|
44
|
+
used_times = times[used_states][:, np.newaxis]
|
|
45
|
+
independent_stack = [used_times**rnc]
|
|
46
|
+
dependent_stack = [positions[used_states, :]]
|
|
47
|
+
if velocities is not None:
|
|
48
|
+
independent_stack.append(rnc * used_times ** (rnc - 1).clip(0))
|
|
49
|
+
dependent_stack.append(velocities[used_states, :])
|
|
50
|
+
if accelerations is not None:
|
|
51
|
+
independent_stack.append(rnc * (rnc - 1) * used_times ** (rnc - 2).clip(0))
|
|
52
|
+
dependent_stack.append(accelerations[used_states, :])
|
|
53
|
+
|
|
54
|
+
dependent = np.stack(dependent_stack, axis=-2)
|
|
55
|
+
independent = np.stack(independent_stack, axis=-2)
|
|
56
|
+
return np.linalg.lstsq(
|
|
57
|
+
independent.reshape(-1, independent.shape[-1]),
|
|
58
|
+
dependent.reshape(-1, dependent.shape[-1]),
|
|
59
|
+
rcond=-1,
|
|
60
|
+
)[0]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def polyfit2d(x, y, z, order1, order2):
|
|
64
|
+
"""Fits 2d polynomials to data."""
|
|
65
|
+
if x.ndim != 1 or y.ndim != 1:
|
|
66
|
+
raise ValueError("Expected x and y to be one dimensional")
|
|
67
|
+
if not 0 < z.ndim <= 2:
|
|
68
|
+
raise ValueError("Expected z to be one or two dimensional")
|
|
69
|
+
if not x.shape[0] == y.shape[0] == z.shape[0]:
|
|
70
|
+
raise ValueError("Expected x, y, z to have same leading dimension size")
|
|
71
|
+
vander = npp.polyvander2d(x, y, (order1, order2))
|
|
72
|
+
scales = np.sqrt(np.square(vander).sum(0))
|
|
73
|
+
coefs_flat = (np.linalg.lstsq(vander / scales, z, rcond=-1)[0].T / scales).T
|
|
74
|
+
return coefs_flat.reshape(order1 + 1, order2 + 1)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def polyfit2d_tol(x, y, z, max_order_x, max_order_y, tol, strict_tol=False):
|
|
78
|
+
"""Fits 2D polys of minimum order to bring the maximum residual under tol.
|
|
79
|
+
|
|
80
|
+
Args
|
|
81
|
+
----
|
|
82
|
+
x: array-like
|
|
83
|
+
First independent variable values. One dimensional.
|
|
84
|
+
y: array-like
|
|
85
|
+
Second independent variable values. One dimensional.
|
|
86
|
+
z: array-like
|
|
87
|
+
Dependent variable values. Leading dimension must have same size as `x` and `y` .
|
|
88
|
+
max_order_x: int
|
|
89
|
+
The maximum order in `x` to consider
|
|
90
|
+
max_order_y: int
|
|
91
|
+
The maximum order in `y` to consider
|
|
92
|
+
tol: float
|
|
93
|
+
The maximum residual requested.
|
|
94
|
+
strict_tol: bool
|
|
95
|
+
``True`` if an exception should be raised if `tol` is not met with allowed orders.
|
|
96
|
+
|
|
97
|
+
If ``False``, return best fitting polynomial of allowed order.
|
|
98
|
+
|
|
99
|
+
Returns
|
|
100
|
+
-------
|
|
101
|
+
poly
|
|
102
|
+
2d polynomials of common orders no greater than `(max_order_x, max_order_y)` .
|
|
103
|
+
|
|
104
|
+
Raises
|
|
105
|
+
------
|
|
106
|
+
`ValueError`
|
|
107
|
+
If `strict_tol` and tolerance is not reached.
|
|
108
|
+
|
|
109
|
+
"""
|
|
110
|
+
orders = sorted(
|
|
111
|
+
list(itertools.product(range(max_order_x + 1), range(max_order_y + 1))),
|
|
112
|
+
key=lambda x: (x[0] + 1) * (x[1] + 1),
|
|
113
|
+
)
|
|
114
|
+
best = None
|
|
115
|
+
for order_x, order_y in orders:
|
|
116
|
+
poly = polyfit2d(x, y, z, order_x, order_y)
|
|
117
|
+
resid = np.abs(z - np.moveaxis(npp.polyval2d(x, y, poly), 0, -1)).max()
|
|
118
|
+
if resid <= tol:
|
|
119
|
+
return poly
|
|
120
|
+
if best is None or resid < best[1]:
|
|
121
|
+
best = (poly, resid)
|
|
122
|
+
if strict_tol:
|
|
123
|
+
raise ValueError("Max order exceeded before tolerance was reached")
|
|
124
|
+
return best[0]
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def broadening_from_amp(amp_vals, threshold_db=None):
|
|
128
|
+
"""Compute the broadening factor from amplitudes
|
|
129
|
+
|
|
130
|
+
Parameters
|
|
131
|
+
----------
|
|
132
|
+
amp_vals: array-like
|
|
133
|
+
window amplitudes
|
|
134
|
+
threshold_db: float, optional
|
|
135
|
+
threshold to use to compute broadening (Default: 10*log10(0.5))
|
|
136
|
+
|
|
137
|
+
Returns
|
|
138
|
+
-------
|
|
139
|
+
float
|
|
140
|
+
|
|
141
|
+
"""
|
|
142
|
+
if threshold_db is None:
|
|
143
|
+
threshold = np.sqrt(0.5)
|
|
144
|
+
else:
|
|
145
|
+
threshold = 10 ** (threshold_db / 20)
|
|
146
|
+
amp_vals = np.asarray(amp_vals)
|
|
147
|
+
fft_size = 2 ** int(np.ceil(np.log2(amp_vals.size * 10000)))
|
|
148
|
+
impulse_response = np.abs(np.fft.fft(amp_vals, fft_size))
|
|
149
|
+
impulse_response /= impulse_response.max()
|
|
150
|
+
width = (impulse_response[: fft_size // 2] < threshold).argmax() + (
|
|
151
|
+
impulse_response[-1 : fft_size // 2 : -1] > threshold
|
|
152
|
+
).argmin()
|
|
153
|
+
|
|
154
|
+
return width / fft_size * amp_vals.size
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _get_sigma0_noise(xml_helper):
|
|
158
|
+
"""Calculate the absolute noise estimate, in sigma0 power units."""
|
|
159
|
+
|
|
160
|
+
if xml_helper.element_tree.find("./{*}Radiometric/{*}SigmaZeroSFPoly") is None:
|
|
161
|
+
raise ValueError(
|
|
162
|
+
"Radiometric.SigmaZeroSFPoly is not populated, so no sigma0 noise estimate can be derived."
|
|
163
|
+
)
|
|
164
|
+
if (
|
|
165
|
+
xml_helper.load("./{*}Radiometric/{*}NoiseLevel/{*}NoiseLevelType")
|
|
166
|
+
!= "ABSOLUTE"
|
|
167
|
+
):
|
|
168
|
+
raise ValueError(
|
|
169
|
+
"Radiometric.NoiseLevel.NoiseLevelType is not `ABSOLUTE` so no noise estimate can be derived."
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
noisepoly = xml_helper.load("./{*}Radiometric/{*}NoiseLevel/{*}NoisePoly")
|
|
173
|
+
scp_noise_db = noisepoly[0, 0]
|
|
174
|
+
scp_noise = 10 ** (scp_noise_db / 10)
|
|
175
|
+
|
|
176
|
+
# convert to SigmaZero value
|
|
177
|
+
sigma_zero_sf = xml_helper.load("./{*}Radiometric/{*}SigmaZeroSFPoly")
|
|
178
|
+
scp_noise *= sigma_zero_sf[0, 0]
|
|
179
|
+
|
|
180
|
+
return scp_noise
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def _get_default_signal_estimate(xml_helper):
|
|
184
|
+
"""Gets default signal for use in the RNIIRS calculation.
|
|
185
|
+
|
|
186
|
+
This will be 1.0 for copolar (or unknown) collections, and 0.25 for cross-pole collections."""
|
|
187
|
+
|
|
188
|
+
pol = xml_helper.load("./{*}ImageFormation/{*}TxRcvPolarizationProc")
|
|
189
|
+
if pol is None or ":" not in pol:
|
|
190
|
+
return 1.0
|
|
191
|
+
|
|
192
|
+
pols = pol.split(":")
|
|
193
|
+
|
|
194
|
+
return 1.0 if pols[0] == pols[1] else 0.25
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _estimate_rniirs(information_density):
|
|
198
|
+
"""Calculate an RNIIRS estimate from the information density or Shannon-Hartley channel capacity.
|
|
199
|
+
|
|
200
|
+
This mapping has been empirically determined by fitting Shannon-Hartley channel
|
|
201
|
+
capacity to RNIIRS for some sample images.
|
|
202
|
+
|
|
203
|
+
To maintain positivity of the estimated rniirs, this transitions to a linear
|
|
204
|
+
model.
|
|
205
|
+
|
|
206
|
+
"""
|
|
207
|
+
a = RNIIRS_FIT_PARAMETERS
|
|
208
|
+
iim_transition = np.exp(1 - np.log(2) * a[0] / a[1])
|
|
209
|
+
slope = a[1] / (iim_transition * np.log(2))
|
|
210
|
+
|
|
211
|
+
if not isinstance(information_density, np.ndarray):
|
|
212
|
+
information_density = np.array(information_density, dtype="float64")
|
|
213
|
+
orig_ndim = information_density.ndim
|
|
214
|
+
if orig_ndim == 0:
|
|
215
|
+
information_density = np.reshape(information_density, (1,))
|
|
216
|
+
|
|
217
|
+
out = np.empty(information_density.shape, dtype="float64")
|
|
218
|
+
mask = information_density > iim_transition
|
|
219
|
+
mask_other = ~mask
|
|
220
|
+
if np.any(mask):
|
|
221
|
+
out[mask] = a[0] + a[1] * np.log2(information_density[mask])
|
|
222
|
+
if np.any(mask_other):
|
|
223
|
+
out[mask_other] = slope * information_density[mask_other]
|
|
224
|
+
|
|
225
|
+
if orig_ndim == 0:
|
|
226
|
+
return float(out[0])
|
|
227
|
+
return out
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def get_rniirs_estimate(xml_helper):
|
|
231
|
+
"""This calculates the value(s) for RNIIRS and information density for SICD, according to the RGIQE."""
|
|
232
|
+
scp_noise = _get_sigma0_noise(xml_helper)
|
|
233
|
+
signal = _get_default_signal_estimate(xml_helper)
|
|
234
|
+
|
|
235
|
+
u_row = xml_helper.load("./{*}Grid/{*}Row/{*}UVectECF")
|
|
236
|
+
u_col = xml_helper.load("./{*}Grid/{*}Col/{*}UVectECF")
|
|
237
|
+
ipn = np.cross(u_row, u_col)
|
|
238
|
+
u_ipn = ipn / np.linalg.norm(ipn)
|
|
239
|
+
|
|
240
|
+
scp_llh = xml_helper.load("./{*}GeoData/{*}SCP/{*}LLH")
|
|
241
|
+
u_gpn = sarkit.wgs84.up(scp_llh)
|
|
242
|
+
|
|
243
|
+
bw_sf = np.dot(u_gpn, u_ipn)
|
|
244
|
+
bw_area = abs(
|
|
245
|
+
xml_helper.load("./{*}Grid/{*}Row/{*}ImpRespBW")
|
|
246
|
+
* xml_helper.load("./{*}Grid/{*}Col/{*}ImpRespBW")
|
|
247
|
+
* bw_sf
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
inf_density = float(bw_area * np.log2(1 + signal / scp_noise))
|
|
251
|
+
rniirs = float(_estimate_rniirs(inf_density))
|
|
252
|
+
|
|
253
|
+
return inf_density, rniirs
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '0.1.0'
|