shancx 1.9.33.170__py3-none-any.whl → 1.9.33.171__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shancx/NN/__init__.py +26 -0
- {shancx-1.9.33.170.dist-info → shancx-1.9.33.171.dist-info}/METADATA +1 -1
- {shancx-1.9.33.170.dist-info → shancx-1.9.33.171.dist-info}/RECORD +5 -9
- shancx/H9/__init__.py +0 -126
- shancx/H9/ahi_read_hsd.py +0 -877
- shancx/H9/ahisearchtable.py +0 -298
- shancx/H9/geometry.py +0 -2439
- {shancx-1.9.33.170.dist-info → shancx-1.9.33.171.dist-info}/WHEEL +0 -0
- {shancx-1.9.33.170.dist-info → shancx-1.9.33.171.dist-info}/top_level.txt +0 -0
shancx/H9/ahi_read_hsd.py
DELETED
|
@@ -1,877 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
# -*- coding:utf-8 -*-
|
|
3
|
-
'''
|
|
4
|
-
@Project : fypy
|
|
5
|
-
|
|
6
|
-
@File : ahi_read_hsd.py
|
|
7
|
-
|
|
8
|
-
@Modify Time : 2022/11/10 14:45
|
|
9
|
-
|
|
10
|
-
@Author : fypy Team
|
|
11
|
-
|
|
12
|
-
@Version : 1.0
|
|
13
|
-
|
|
14
|
-
@Description :
|
|
15
|
-
|
|
16
|
-
'''
|
|
17
|
-
import os
|
|
18
|
-
import time
|
|
19
|
-
import numpy as np
|
|
20
|
-
from datetime import timedelta
|
|
21
|
-
import datetime
|
|
22
|
-
import tempfile
|
|
23
|
-
import bz2
|
|
24
|
-
import dask.array as da
|
|
25
|
-
import xarray as xr
|
|
26
|
-
from contextlib import closing
|
|
27
|
-
import shutil
|
|
28
|
-
from subprocess import Popen, PIPE
|
|
29
|
-
from io import BytesIO
|
|
30
|
-
from shancx.H9.geometry import AreaDefinition
|
|
31
|
-
|
|
32
|
-
try:
|
|
33
|
-
from shutil import which
|
|
34
|
-
except ImportError:
|
|
35
|
-
# python 2 - won't be used, but needed for mocking in tests
|
|
36
|
-
which = None
|
|
37
|
-
|
|
38
|
-
AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5",
|
|
39
|
-
"6", "7", "8", "9", "10",
|
|
40
|
-
"11", "12", "13", "14", "15", "16")
|
|
41
|
-
|
|
42
|
-
# logger = logging.getLogger('ahi_hsd')
|
|
43
|
-
|
|
44
|
-
# Basic information block:
|
|
45
|
-
_BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"),
|
|
46
|
-
("blocklength", "<u2"),
|
|
47
|
-
("total_number_of_hblocks", "<u2"),
|
|
48
|
-
("byte_order", "u1"),
|
|
49
|
-
("satellite", "S16"),
|
|
50
|
-
("proc_center_name", "S16"),
|
|
51
|
-
("observation_area", "S4"),
|
|
52
|
-
("other_observation_info", "S2"),
|
|
53
|
-
("observation_timeline", "<u2"),
|
|
54
|
-
("observation_start_time", "f8"),
|
|
55
|
-
("observation_end_time", "f8"),
|
|
56
|
-
("file_creation_time", "f8"),
|
|
57
|
-
("total_header_length", "<u4"),
|
|
58
|
-
("total_data_length", "<u4"),
|
|
59
|
-
("quality_flag1", "u1"),
|
|
60
|
-
("quality_flag2", "u1"),
|
|
61
|
-
("quality_flag3", "u1"),
|
|
62
|
-
("quality_flag4", "u1"),
|
|
63
|
-
("file_format_version", "S32"),
|
|
64
|
-
("file_name", "S128"),
|
|
65
|
-
("spare", "S40"),
|
|
66
|
-
])
|
|
67
|
-
|
|
68
|
-
# Data information block
|
|
69
|
-
_DATA_INFO_TYPE = np.dtype([("hblock_number", "u1"),
|
|
70
|
-
("blocklength", "<u2"),
|
|
71
|
-
("number_of_bits_per_pixel", "<u2"),
|
|
72
|
-
("number_of_columns", "<u2"),
|
|
73
|
-
("number_of_lines", "<u2"),
|
|
74
|
-
("compression_flag_for_data", "u1"),
|
|
75
|
-
("spare", "S40"),
|
|
76
|
-
])
|
|
77
|
-
|
|
78
|
-
# Projection information block
|
|
79
|
-
# See footnote 2; LRIT/HRIT Global Specification Section 4.4, CGMS, 1999)
|
|
80
|
-
_PROJ_INFO_TYPE = np.dtype([("hblock_number", "u1"),
|
|
81
|
-
("blocklength", "<u2"),
|
|
82
|
-
("sub_lon", "f8"),
|
|
83
|
-
("CFAC", "<u4"),
|
|
84
|
-
("LFAC", "<u4"),
|
|
85
|
-
("COFF", "f4"),
|
|
86
|
-
("LOFF", "f4"),
|
|
87
|
-
("distance_from_earth_center", "f8"),
|
|
88
|
-
("earth_equatorial_radius", "f8"),
|
|
89
|
-
("earth_polar_radius", "f8"),
|
|
90
|
-
("req2_rpol2_req2", "f8"),
|
|
91
|
-
("rpol2_req2", "f8"),
|
|
92
|
-
("req2_rpol2", "f8"),
|
|
93
|
-
("coeff_for_sd", "f8"),
|
|
94
|
-
# Note: processing center use only:
|
|
95
|
-
("resampling_types", "<i2"),
|
|
96
|
-
# Note: processing center use only:
|
|
97
|
-
("resampling_size", "<i2"),
|
|
98
|
-
("spare", "S40"),
|
|
99
|
-
])
|
|
100
|
-
|
|
101
|
-
# Navigation information block
|
|
102
|
-
_NAV_INFO_TYPE = np.dtype([("hblock_number", "u1"),
|
|
103
|
-
("blocklength", "<u2"),
|
|
104
|
-
("navigation_info_time", "f8"),
|
|
105
|
-
("SSP_longitude", "f8"),
|
|
106
|
-
("SSP_latitude", "f8"),
|
|
107
|
-
("distance_earth_center_to_satellite", "f8"),
|
|
108
|
-
("nadir_longitude", "f8"),
|
|
109
|
-
("nadir_latitude", "f8"),
|
|
110
|
-
("sun_position", "f8", (3,)),
|
|
111
|
-
("moon_position", "f8", (3,)),
|
|
112
|
-
("spare", "S40"),
|
|
113
|
-
])
|
|
114
|
-
|
|
115
|
-
# Calibration information block
|
|
116
|
-
_CAL_INFO_TYPE = np.dtype([("hblock_number", "u1"),
|
|
117
|
-
("blocklength", "<u2"),
|
|
118
|
-
("band_number", "<u2"),
|
|
119
|
-
("central_wave_length", "f8"),
|
|
120
|
-
("valid_number_of_bits_per_pixel", "<u2"),
|
|
121
|
-
("count_value_error_pixels", "<u2"),
|
|
122
|
-
("count_value_outside_scan_pixels", "<u2"),
|
|
123
|
-
("gain_count2rad_conversion", "f8"),
|
|
124
|
-
("offset_count2rad_conversion", "f8"),
|
|
125
|
-
])
|
|
126
|
-
|
|
127
|
-
# Infrared band (Band No. 7 – 16)
|
|
128
|
-
# (Band No. 2 – 5: backup operation (See Table 4 bb))
|
|
129
|
-
_IRCAL_INFO_TYPE = np.dtype([("c0_rad2tb_conversion", "f8"),
|
|
130
|
-
("c1_rad2tb_conversion", "f8"),
|
|
131
|
-
("c2_rad2tb_conversion", "f8"),
|
|
132
|
-
("c0_tb2rad_conversion", "f8"),
|
|
133
|
-
("c1_tb2rad_conversion", "f8"),
|
|
134
|
-
("c2_tb2rad_conversion", "f8"),
|
|
135
|
-
("speed_of_light", "f8"),
|
|
136
|
-
("planck_constant", "f8"),
|
|
137
|
-
("boltzmann_constant", "f8"),
|
|
138
|
-
("spare", "S40"),
|
|
139
|
-
])
|
|
140
|
-
|
|
141
|
-
# Visible, near-infrared band (Band No. 1 – 6)
|
|
142
|
-
# (Band No. 1: backup operation (See Table 4 bb))
|
|
143
|
-
_VISCAL_INFO_TYPE = np.dtype([("coeff_rad2albedo_conversion", "f8"),
|
|
144
|
-
("coeff_update_time", "f8"),
|
|
145
|
-
("cali_gain_count2rad_conversion", "f8"),
|
|
146
|
-
("cali_offset_count2rad_conversion", "f8"),
|
|
147
|
-
("spare", "S80"),
|
|
148
|
-
])
|
|
149
|
-
|
|
150
|
-
# 6 Inter-calibration information block
|
|
151
|
-
_INTER_CALIBRATION_INFO_TYPE = np.dtype([
|
|
152
|
-
("hblock_number", "u1"),
|
|
153
|
-
("blocklength", "<u2"),
|
|
154
|
-
("gsics_calibration_intercept", "f8"),
|
|
155
|
-
("gsics_calibration_slope", "f8"),
|
|
156
|
-
("gsics_calibration_coeff_quadratic_term", "f8"),
|
|
157
|
-
("gsics_std_scn_radiance_bias", "f8"),
|
|
158
|
-
("gsics_std_scn_radiance_bias_uncertainty", "f8"),
|
|
159
|
-
("gsics_std_scn_radiance", "f8"),
|
|
160
|
-
("gsics_correction_starttime", "f8"),
|
|
161
|
-
("gsics_correction_endtime", "f8"),
|
|
162
|
-
("gsics_radiance_validity_upper_lim", "f4"),
|
|
163
|
-
("gsics_radiance_validity_lower_lim", "f4"),
|
|
164
|
-
("gsics_filename", "S128"),
|
|
165
|
-
("spare", "S56"),
|
|
166
|
-
])
|
|
167
|
-
|
|
168
|
-
# 7 Segment information block
|
|
169
|
-
_SEGMENT_INFO_TYPE = np.dtype([
|
|
170
|
-
("hblock_number", "u1"),
|
|
171
|
-
("blocklength", "<u2"),
|
|
172
|
-
("total_number_of_segments", "u1"),
|
|
173
|
-
("segment_sequence_number", "u1"),
|
|
174
|
-
("first_line_number_of_image_segment", "u2"),
|
|
175
|
-
("spare", "S40"),
|
|
176
|
-
])
|
|
177
|
-
|
|
178
|
-
# 8 Navigation correction information block
|
|
179
|
-
_NAVIGATION_CORRECTION_INFO_TYPE = np.dtype([
|
|
180
|
-
("hblock_number", "u1"),
|
|
181
|
-
("blocklength", "<u2"),
|
|
182
|
-
("center_column_of_rotation", "f4"),
|
|
183
|
-
("center_line_of_rotation", "f4"),
|
|
184
|
-
("amount_of_rotational_correction", "f8"),
|
|
185
|
-
("numof_correction_info_data", "<u2"),
|
|
186
|
-
])
|
|
187
|
-
|
|
188
|
-
# 9 Observation time information block
|
|
189
|
-
_OBS_TIME_INFO_TYPE = np.dtype([
|
|
190
|
-
("hblock_number", "u1"),
|
|
191
|
-
("blocklength", "<u2"),
|
|
192
|
-
("number_of_observation_times", "<u2"),
|
|
193
|
-
])
|
|
194
|
-
|
|
195
|
-
# 10 Error information block
|
|
196
|
-
_ERROR_INFO_TYPE = np.dtype([
|
|
197
|
-
("hblock_number", "u1"),
|
|
198
|
-
("blocklength", "<u4"),
|
|
199
|
-
("number_of_error_info_data", "<u2"),
|
|
200
|
-
])
|
|
201
|
-
|
|
202
|
-
# 11 Spare block
|
|
203
|
-
_SPARE_TYPE = np.dtype([
|
|
204
|
-
("hblock_number", "u1"),
|
|
205
|
-
("blocklength", "<u2"),
|
|
206
|
-
("spare", "S256")
|
|
207
|
-
])
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
class ahi_read_hsd :
|
|
212
|
-
|
|
213
|
-
def readhsd(self, hsdname, segnum):
|
|
214
|
-
self.segment_number = segnum
|
|
215
|
-
# self.segtotal = segtotal
|
|
216
|
-
# self.data = None
|
|
217
|
-
self.is_zipped = False
|
|
218
|
-
|
|
219
|
-
self._unzipped = self.unzip_file(hsdname)
|
|
220
|
-
if self._unzipped:
|
|
221
|
-
# But if it is, set the filename to point to unzipped temp file
|
|
222
|
-
self.is_zipped = True
|
|
223
|
-
self.filename = self._unzipped
|
|
224
|
-
else:
|
|
225
|
-
self.filename = hsdname
|
|
226
|
-
|
|
227
|
-
try:
|
|
228
|
-
with open(self.filename) as fd:
|
|
229
|
-
self.basic_info = np.fromfile(fd,
|
|
230
|
-
dtype=_BASIC_INFO_TYPE,
|
|
231
|
-
count=1)
|
|
232
|
-
self.data_info = np.fromfile(fd,
|
|
233
|
-
dtype=_DATA_INFO_TYPE,
|
|
234
|
-
count=1)
|
|
235
|
-
self.proj_info = np.fromfile(fd,
|
|
236
|
-
dtype=_PROJ_INFO_TYPE,
|
|
237
|
-
count=1)[0]
|
|
238
|
-
self.nav_info = np.fromfile(fd,
|
|
239
|
-
dtype=_NAV_INFO_TYPE,
|
|
240
|
-
count=1)[0]
|
|
241
|
-
fd.close()
|
|
242
|
-
except BaseException as e :
|
|
243
|
-
print(e)
|
|
244
|
-
# os.remove(self.filename)
|
|
245
|
-
return None
|
|
246
|
-
|
|
247
|
-
self.platform_name = np2str(self.basic_info['satellite'])
|
|
248
|
-
self.sensor = 'ahi'
|
|
249
|
-
# self.segment_number = filename_info['segment']
|
|
250
|
-
# self.total_segments = filename_info['total_segments']
|
|
251
|
-
self.observation_area = np2str(self.basic_info['observation_area'])
|
|
252
|
-
|
|
253
|
-
user_calibration=None
|
|
254
|
-
calib_mode = 'NOMINAL'
|
|
255
|
-
calib_mode_choices = ('NOMINAL', 'UPDATE')
|
|
256
|
-
if calib_mode.upper() not in calib_mode_choices:
|
|
257
|
-
raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format(
|
|
258
|
-
calib_mode, calib_mode_choices))
|
|
259
|
-
|
|
260
|
-
self.calib_mode = calib_mode.upper()
|
|
261
|
-
self.user_calibration = user_calibration
|
|
262
|
-
|
|
263
|
-
with open(self.filename, "rb") as fp_:
|
|
264
|
-
self._header = self._read_header(fp_)
|
|
265
|
-
res = self._read_data(fp_, self._header)
|
|
266
|
-
fp_.close()
|
|
267
|
-
data = self.read_band(res)
|
|
268
|
-
|
|
269
|
-
return data.values
|
|
270
|
-
|
|
271
|
-
def read_band(self, res):
|
|
272
|
-
|
|
273
|
-
res = self._mask_invalid(data=res, header=self._header)
|
|
274
|
-
# Calibrate
|
|
275
|
-
bandnum = self._header["block5"]['band_number'][0]
|
|
276
|
-
if bandnum < 7:
|
|
277
|
-
res = self.calibrate(res, 'reflectance', )
|
|
278
|
-
else:
|
|
279
|
-
res = self.calibrate(res, 'brightness_temperature')
|
|
280
|
-
|
|
281
|
-
# Update metadata
|
|
282
|
-
new_info = dict(
|
|
283
|
-
# units=info['units'],
|
|
284
|
-
# standard_name=info['standard_name'],
|
|
285
|
-
# wavelength=info['wavelength'],
|
|
286
|
-
resolution='resolution',
|
|
287
|
-
# id=key,
|
|
288
|
-
# name=key['name'],
|
|
289
|
-
# scheduled_time=self.scheduled_time,
|
|
290
|
-
platform_name=self.platform_name,
|
|
291
|
-
sensor=self.sensor,
|
|
292
|
-
satellite_longitude=float(self.nav_info['SSP_longitude']),
|
|
293
|
-
satellite_latitude=float(self.nav_info['SSP_latitude']),
|
|
294
|
-
satellite_altitude=float(self.nav_info['distance_earth_center_to_satellite'] -
|
|
295
|
-
self.proj_info['earth_equatorial_radius']) * 1000,
|
|
296
|
-
orbital_parameters={
|
|
297
|
-
'projection_longitude': float(self.proj_info['sub_lon']),
|
|
298
|
-
'projection_latitude': 0.,
|
|
299
|
-
'projection_altitude': float(self.proj_info['distance_from_earth_center'] -
|
|
300
|
-
self.proj_info['earth_equatorial_radius']) * 1000,
|
|
301
|
-
# 'satellite_actual_longitude': actual_lon,
|
|
302
|
-
# 'satellite_actual_latitude': actual_lat,
|
|
303
|
-
# 'satellite_actual_altitude': actual_alt,
|
|
304
|
-
'nadir_longitude': float(self.nav_info['nadir_longitude']),
|
|
305
|
-
'nadir_latitude': float(self.nav_info['nadir_latitude'])}
|
|
306
|
-
)
|
|
307
|
-
res = xr.DataArray(res, attrs=new_info, dims=['y', 'x'])
|
|
308
|
-
|
|
309
|
-
self.get_area_def()
|
|
310
|
-
|
|
311
|
-
self.mask_space = True
|
|
312
|
-
# Mask space pixels
|
|
313
|
-
if self.mask_space:
|
|
314
|
-
res = self._mask_space(res)
|
|
315
|
-
|
|
316
|
-
return res
|
|
317
|
-
|
|
318
|
-
def calibrate(self, data, calibration):
|
|
319
|
-
"""Calibrate the data."""
|
|
320
|
-
tic = datetime.datetime.now()
|
|
321
|
-
|
|
322
|
-
if calibration == 'counts':
|
|
323
|
-
return data
|
|
324
|
-
|
|
325
|
-
if calibration in ['radiance', 'reflectance', 'brightness_temperature']:
|
|
326
|
-
data = self.convert_to_radiance(data)
|
|
327
|
-
|
|
328
|
-
if calibration == 'reflectance':
|
|
329
|
-
data = self._vis_calibrate(data)
|
|
330
|
-
elif calibration == 'brightness_temperature':
|
|
331
|
-
data = self._ir_calibrate(data)
|
|
332
|
-
|
|
333
|
-
# print("Calibration time " + str(datetime.now() - tic))
|
|
334
|
-
return data
|
|
335
|
-
|
|
336
|
-
def convert_to_radiance(self, data):
|
|
337
|
-
"""Calibrate to radiance."""
|
|
338
|
-
bnum = self._header["block5"]['band_number'][0]
|
|
339
|
-
# Check calibration mode and select corresponding coefficients
|
|
340
|
-
if self.calib_mode == "UPDATE" and bnum < 7:
|
|
341
|
-
dn_gain = self._header['calibration']["cali_gain_count2rad_conversion"][0]
|
|
342
|
-
dn_offset = self._header['calibration']["cali_offset_count2rad_conversion"][0]
|
|
343
|
-
if dn_gain == 0 and dn_offset == 0:
|
|
344
|
-
print("No valid updated coefficients, fall back to default values.")
|
|
345
|
-
dn_gain = self._header["block5"]["gain_count2rad_conversion"][0]
|
|
346
|
-
dn_offset = self._header["block5"]["offset_count2rad_conversion"][0]
|
|
347
|
-
else:
|
|
348
|
-
dn_gain = self._header["block5"]["gain_count2rad_conversion"][0]
|
|
349
|
-
dn_offset = self._header["block5"]["offset_count2rad_conversion"][0]
|
|
350
|
-
|
|
351
|
-
# Assume no user correction
|
|
352
|
-
correction_type = None
|
|
353
|
-
if isinstance(self.user_calibration, dict):
|
|
354
|
-
# Check if we have DN correction coeffs
|
|
355
|
-
if 'type' in self.user_calibration:
|
|
356
|
-
correction_type = self.user_calibration['type']
|
|
357
|
-
else:
|
|
358
|
-
# If not, assume radiance correction
|
|
359
|
-
correction_type = 'RAD'
|
|
360
|
-
if correction_type == 'DN':
|
|
361
|
-
# Replace file calibration with user calibration
|
|
362
|
-
dn_gain, dn_offset = get_user_calibration_factors(self.band_name,
|
|
363
|
-
self.user_calibration)
|
|
364
|
-
elif correction_type == 'RAD':
|
|
365
|
-
user_slope, user_offset = get_user_calibration_factors(self.band_name,
|
|
366
|
-
self.user_calibration)
|
|
367
|
-
|
|
368
|
-
data = (data * dn_gain + dn_offset).clip(0)
|
|
369
|
-
# If using radiance correction factors from GSICS or similar, apply here
|
|
370
|
-
if correction_type == 'RAD':
|
|
371
|
-
data = apply_rad_correction(data, user_slope, user_offset)
|
|
372
|
-
return data
|
|
373
|
-
|
|
374
|
-
def _vis_calibrate(self, data):
|
|
375
|
-
"""Visible channel calibration only."""
|
|
376
|
-
coeff = self._header["calibration"]["coeff_rad2albedo_conversion"]
|
|
377
|
-
return (data * coeff * 100).clip(0)
|
|
378
|
-
|
|
379
|
-
def _ir_calibrate(self, data):
|
|
380
|
-
"""IR calibration."""
|
|
381
|
-
# No radiance -> no temperature
|
|
382
|
-
data = da.where(data == 0, np.float32(np.nan), data)
|
|
383
|
-
|
|
384
|
-
cwl = self._header['block5']["central_wave_length"][0] * 1e-6
|
|
385
|
-
c__ = self._header['calibration']["speed_of_light"][0]
|
|
386
|
-
h__ = self._header['calibration']["planck_constant"][0]
|
|
387
|
-
k__ = self._header['calibration']["boltzmann_constant"][0]
|
|
388
|
-
a__ = (h__ * c__) / (k__ * cwl)
|
|
389
|
-
|
|
390
|
-
b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1
|
|
391
|
-
|
|
392
|
-
Te_ = a__ / da.log(b__)
|
|
393
|
-
|
|
394
|
-
c0_ = self._header['calibration']["c0_rad2tb_conversion"][0]
|
|
395
|
-
c1_ = self._header['calibration']["c1_rad2tb_conversion"][0]
|
|
396
|
-
c2_ = self._header['calibration']["c2_rad2tb_conversion"][0]
|
|
397
|
-
|
|
398
|
-
return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0)
|
|
399
|
-
|
|
400
|
-
def _read_header(self, fp_):
|
|
401
|
-
"""Read header."""
|
|
402
|
-
header = {}
|
|
403
|
-
|
|
404
|
-
fpos = 0
|
|
405
|
-
header['block1'] = np.fromfile(
|
|
406
|
-
fp_, dtype=_BASIC_INFO_TYPE, count=1)
|
|
407
|
-
fpos = fpos + int(header['block1']['blocklength'])
|
|
408
|
-
self._check_fpos(fp_, fpos, 0, 'block1')
|
|
409
|
-
fp_.seek(fpos, 0)
|
|
410
|
-
header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1)
|
|
411
|
-
fpos = fpos + int(header['block2']['blocklength'])
|
|
412
|
-
self._check_fpos(fp_, fpos, 0, 'block2')
|
|
413
|
-
fp_.seek(fpos, 0)
|
|
414
|
-
header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1)
|
|
415
|
-
fpos = fpos + int(header['block3']['blocklength'])
|
|
416
|
-
self._check_fpos(fp_, fpos, 0, 'block3')
|
|
417
|
-
fp_.seek(fpos, 0)
|
|
418
|
-
header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1)
|
|
419
|
-
fpos = fpos + int(header['block4']['blocklength'])
|
|
420
|
-
self._check_fpos(fp_, fpos, 0, 'block4')
|
|
421
|
-
fp_.seek(fpos, 0)
|
|
422
|
-
header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1)
|
|
423
|
-
# print("Band number = " + str(header["block5"]['band_number'][0]))
|
|
424
|
-
# print('Time_interval: %s - %s', str(self.start_time), str(self.end_time))
|
|
425
|
-
band_number = header["block5"]['band_number'][0]
|
|
426
|
-
if band_number < 7:
|
|
427
|
-
cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1)
|
|
428
|
-
else:
|
|
429
|
-
cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1)
|
|
430
|
-
fpos = fpos + int(header['block5']['blocklength'])
|
|
431
|
-
self._check_fpos(fp_, fpos, 0, 'block5')
|
|
432
|
-
fp_.seek(fpos, 0)
|
|
433
|
-
|
|
434
|
-
header['calibration'] = cal
|
|
435
|
-
|
|
436
|
-
header["block6"] = np.fromfile(
|
|
437
|
-
fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1)
|
|
438
|
-
fpos = fpos + int(header['block6']['blocklength'])
|
|
439
|
-
self._check_fpos(fp_, fpos, 0, 'block6')
|
|
440
|
-
fp_.seek(fpos, 0)
|
|
441
|
-
header["block7"] = np.fromfile(
|
|
442
|
-
fp_, dtype=_SEGMENT_INFO_TYPE, count=1)
|
|
443
|
-
fpos = fpos + int(header['block7']['blocklength'])
|
|
444
|
-
self._check_fpos(fp_, fpos, 0, 'block7')
|
|
445
|
-
fp_.seek(fpos, 0)
|
|
446
|
-
header["block8"] = np.fromfile(
|
|
447
|
-
fp_, dtype=_NAVIGATION_CORRECTION_INFO_TYPE, count=1)
|
|
448
|
-
# 8 The navigation corrections:
|
|
449
|
-
ncorrs = header["block8"]['numof_correction_info_data'][0]
|
|
450
|
-
dtype = np.dtype([
|
|
451
|
-
("line_number_after_rotation", "<u2"),
|
|
452
|
-
("shift_amount_for_column_direction", "f4"),
|
|
453
|
-
("shift_amount_for_line_direction", "f4"),
|
|
454
|
-
])
|
|
455
|
-
corrections = []
|
|
456
|
-
for _i in range(ncorrs):
|
|
457
|
-
corrections.append(np.fromfile(fp_, dtype=dtype, count=1))
|
|
458
|
-
fpos = fpos + int(header['block8']['blocklength'])
|
|
459
|
-
self._check_fpos(fp_, fpos, 40, 'block8')
|
|
460
|
-
fp_.seek(fpos, 0)
|
|
461
|
-
header['navigation_corrections'] = corrections
|
|
462
|
-
header["block9"] = np.fromfile(fp_,
|
|
463
|
-
dtype=_OBS_TIME_INFO_TYPE,
|
|
464
|
-
count=1)
|
|
465
|
-
numobstimes = header["block9"]['number_of_observation_times'][0]
|
|
466
|
-
|
|
467
|
-
dtype = np.dtype([
|
|
468
|
-
("line_number", "<u2"),
|
|
469
|
-
("observation_time", "f8"),
|
|
470
|
-
])
|
|
471
|
-
lines_and_times = []
|
|
472
|
-
for _i in range(numobstimes):
|
|
473
|
-
lines_and_times.append(np.fromfile(fp_,
|
|
474
|
-
dtype=dtype,
|
|
475
|
-
count=1))
|
|
476
|
-
header['observation_time_information'] = lines_and_times
|
|
477
|
-
fpos = fpos + int(header['block9']['blocklength'])
|
|
478
|
-
self._check_fpos(fp_, fpos, 40, 'block9')
|
|
479
|
-
fp_.seek(fpos, 0)
|
|
480
|
-
|
|
481
|
-
header["block10"] = np.fromfile(fp_,
|
|
482
|
-
dtype=_ERROR_INFO_TYPE,
|
|
483
|
-
count=1)
|
|
484
|
-
dtype = np.dtype([
|
|
485
|
-
("line_number", "<u2"),
|
|
486
|
-
("numof_error_pixels_per_line", "<u2"),
|
|
487
|
-
])
|
|
488
|
-
num_err_info_data = header["block10"][
|
|
489
|
-
'number_of_error_info_data'][0]
|
|
490
|
-
err_info_data = []
|
|
491
|
-
for _i in range(num_err_info_data):
|
|
492
|
-
err_info_data.append(np.fromfile(fp_, dtype=dtype, count=1))
|
|
493
|
-
header['error_information_data'] = err_info_data
|
|
494
|
-
fpos = fpos + int(header['block10']['blocklength'])
|
|
495
|
-
self._check_fpos(fp_, fpos, 40, 'block10')
|
|
496
|
-
fp_.seek(fpos, 0)
|
|
497
|
-
|
|
498
|
-
header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1)
|
|
499
|
-
fpos = fpos + int(header['block11']['blocklength'])
|
|
500
|
-
self._check_fpos(fp_, fpos, 0, 'block11')
|
|
501
|
-
fp_.seek(fpos, 0)
|
|
502
|
-
|
|
503
|
-
return header
|
|
504
|
-
|
|
505
|
-
def _read_data(self, fp_, header):
|
|
506
|
-
"""Read data block."""
|
|
507
|
-
|
|
508
|
-
nlines = int(header["block2"]['number_of_lines'][0])
|
|
509
|
-
ncols = int(header["block2"]['number_of_columns'][0])
|
|
510
|
-
return da.from_array(np.memmap(self.filename, offset=fp_.tell(),
|
|
511
|
-
dtype='<u2', shape=(nlines, ncols), mode='r'),
|
|
512
|
-
chunks=4096)
|
|
513
|
-
|
|
514
|
-
def unzip_file(self, filename, tmppath=None):
|
|
515
|
-
from subprocess import Popen, PIPE
|
|
516
|
-
from io import BytesIO
|
|
517
|
-
from contextlib import closing
|
|
518
|
-
import shutil
|
|
519
|
-
import bz2
|
|
520
|
-
|
|
521
|
-
try:
|
|
522
|
-
from shutil import which
|
|
523
|
-
except ImportError:
|
|
524
|
-
# python 2 - won't be used, but needed for mocking in tests
|
|
525
|
-
which = None
|
|
526
|
-
|
|
527
|
-
"""Unzip the file if file is bzipped = ending with 'bz2'."""
|
|
528
|
-
try:
|
|
529
|
-
if filename.endswith('bz2'):
|
|
530
|
-
# fdn, tmpfilepath = tempfile.mkstemp()
|
|
531
|
-
if tmppath is None :
|
|
532
|
-
tmpfilepath = filename.replace('.bz2','')
|
|
533
|
-
else:
|
|
534
|
-
tmpfilepath = os.path.join(tmppath, os.path.basename(filename).replace('.bz2',''))
|
|
535
|
-
|
|
536
|
-
if os.path.isfile(tmpfilepath) :
|
|
537
|
-
return tmpfilepath
|
|
538
|
-
# print("解压bz2文件【%s】" %(tmpfilepath))
|
|
539
|
-
# try pbzip2
|
|
540
|
-
pbzip = which('pbzip2')
|
|
541
|
-
# Run external pbzip2
|
|
542
|
-
if pbzip is not None:
|
|
543
|
-
n_thr = os.environ.get('OMP_NUM_THREADS')
|
|
544
|
-
if n_thr:
|
|
545
|
-
runner = [pbzip,
|
|
546
|
-
'-dc',
|
|
547
|
-
'-p'+str(n_thr),
|
|
548
|
-
filename]
|
|
549
|
-
else:
|
|
550
|
-
runner = [pbzip,
|
|
551
|
-
'-dc',
|
|
552
|
-
filename]
|
|
553
|
-
p = Popen(runner, stdout=PIPE, stderr=PIPE)
|
|
554
|
-
stdout = BytesIO(p.communicate()[0])
|
|
555
|
-
status = p.returncode
|
|
556
|
-
if status != 0:
|
|
557
|
-
raise IOError("pbzip2 error '%s', failed, status=%d"
|
|
558
|
-
% (filename, status))
|
|
559
|
-
with closing(open(tmpfilepath, 'wb')) as ofpt:
|
|
560
|
-
try:
|
|
561
|
-
stdout.seek(0)
|
|
562
|
-
shutil.copyfileobj(stdout, ofpt)
|
|
563
|
-
except IOError:
|
|
564
|
-
import traceback
|
|
565
|
-
traceback.print_exc()
|
|
566
|
-
print("Failed to read bzipped file %s",
|
|
567
|
-
str(filename))
|
|
568
|
-
os.remove(tmpfilepath)
|
|
569
|
-
|
|
570
|
-
return tmpfilepath
|
|
571
|
-
|
|
572
|
-
# Otherwise, fall back to the original method
|
|
573
|
-
bz2file = bz2.BZ2File(filename)
|
|
574
|
-
with closing(open(tmpfilepath, 'wb')) as ofpt:
|
|
575
|
-
try:
|
|
576
|
-
ofpt.write(bz2file.read())
|
|
577
|
-
except IOError:
|
|
578
|
-
import traceback
|
|
579
|
-
traceback.print_exc()
|
|
580
|
-
print("Failed to read bzipped file %s", str(filename))
|
|
581
|
-
# os.remove(tmpfilepath)
|
|
582
|
-
return None
|
|
583
|
-
return tmpfilepath
|
|
584
|
-
else:
|
|
585
|
-
return None
|
|
586
|
-
except BaseException :
|
|
587
|
-
return None
|
|
588
|
-
|
|
589
|
-
def _check_fpos(self, fp_, fpos, offset, block):
|
|
590
|
-
"""Check file position matches blocksize."""
|
|
591
|
-
if (fp_.tell() + offset != fpos):
|
|
592
|
-
print("Actual "+block+" header size does not match expected")
|
|
593
|
-
return
|
|
594
|
-
|
|
595
|
-
def _mask_invalid(self, data, header):
|
|
596
|
-
"""Mask invalid data."""
|
|
597
|
-
|
|
598
|
-
invalid = da.logical_or(data == header['block5']["count_value_outside_scan_pixels"][0],
|
|
599
|
-
data == header['block5']["count_value_error_pixels"][0])
|
|
600
|
-
return da.where(invalid, np.float32(np.nan), data)
|
|
601
|
-
|
|
602
|
-
def _mask_space(self, data):
|
|
603
|
-
"""Mask space pixels."""
|
|
604
|
-
return data.where(get_geostationary_mask(self.area))
|
|
605
|
-
# pass
|
|
606
|
-
|
|
607
|
-
@property
|
|
608
|
-
def start_time(self):
|
|
609
|
-
"""Get the start time."""
|
|
610
|
-
return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_start_time']))
|
|
611
|
-
|
|
612
|
-
@property
|
|
613
|
-
def end_time(self):
|
|
614
|
-
"""Get the end time."""
|
|
615
|
-
return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_end_time']))
|
|
616
|
-
|
|
617
|
-
def get_area_def(self):
|
|
618
|
-
"""Get the area definition."""
|
|
619
|
-
|
|
620
|
-
pdict = {}
|
|
621
|
-
pdict['cfac'] = np.uint32(self.proj_info['CFAC'])
|
|
622
|
-
pdict['lfac'] = np.uint32(self.proj_info['LFAC'])
|
|
623
|
-
pdict['coff'] = np.float32(self.proj_info['COFF'])
|
|
624
|
-
pdict['loff'] = -np.float32(self.proj_info['LOFF']) + 1
|
|
625
|
-
pdict['a'] = float(self.proj_info['earth_equatorial_radius'] * 1000)
|
|
626
|
-
pdict['h'] = float(self.proj_info['distance_from_earth_center'] * 1000 - pdict['a'])
|
|
627
|
-
pdict['b'] = float(self.proj_info['earth_polar_radius'] * 1000)
|
|
628
|
-
pdict['ssp_lon'] = float(self.proj_info['sub_lon'])
|
|
629
|
-
pdict['nlines'] = int(self.data_info['number_of_lines'])
|
|
630
|
-
pdict['ncols'] = int(self.data_info['number_of_columns'])
|
|
631
|
-
pdict['scandir'] = 'N2S'
|
|
632
|
-
|
|
633
|
-
pdict['loff'] = pdict['loff'] + (self.segment_number * pdict['nlines'])
|
|
634
|
-
|
|
635
|
-
aex = get_area_extent(pdict)
|
|
636
|
-
|
|
637
|
-
pdict['a_name'] = self.observation_area
|
|
638
|
-
pdict['a_desc'] = "AHI {} area".format(self.observation_area)
|
|
639
|
-
pdict['p_id'] = 'geosh8'
|
|
640
|
-
|
|
641
|
-
area = get_area_definition(pdict, aex)
|
|
642
|
-
|
|
643
|
-
self.area = area
|
|
644
|
-
return area
|
|
645
|
-
|
|
646
|
-
def __del__(self):
|
|
647
|
-
|
|
648
|
-
# time.sleep(2)
|
|
649
|
-
# if (self.is_zipped and os.path.exists(self.filename)):
|
|
650
|
-
# os.remove(self.filename)
|
|
651
|
-
pass
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
def np2str(value):
|
|
657
|
-
"""Convert an `numpy.string_` to str.
|
|
658
|
-
|
|
659
|
-
Args:
|
|
660
|
-
value (ndarray): scalar or 1-element numpy array to convert
|
|
661
|
-
|
|
662
|
-
Raises:
|
|
663
|
-
ValueError: if value is array larger than 1-element or it is not of
|
|
664
|
-
type `numpy.string_` or it is not a numpy array
|
|
665
|
-
|
|
666
|
-
"""
|
|
667
|
-
if hasattr(value, 'dtype') and \
|
|
668
|
-
issubclass(value.dtype.type, (np.str_, np.string_, np.object_)) \
|
|
669
|
-
and value.size == 1:
|
|
670
|
-
value = value.item()
|
|
671
|
-
if not isinstance(value, str):
|
|
672
|
-
# python 3 - was scalar numpy array of bytes
|
|
673
|
-
# otherwise python 2 - scalar numpy array of 'str'
|
|
674
|
-
value = value.decode()
|
|
675
|
-
return value
|
|
676
|
-
else:
|
|
677
|
-
raise ValueError("Array is not a string type or is larger than 1")
|
|
678
|
-
|
|
679
|
-
def get_user_calibration_factors(band_name, correction_dict):
|
|
680
|
-
"""Retrieve radiance correction factors from user-supplied dict."""
|
|
681
|
-
if band_name in correction_dict:
|
|
682
|
-
try:
|
|
683
|
-
slope = correction_dict[band_name]['slope']
|
|
684
|
-
offset = correction_dict[band_name]['offset']
|
|
685
|
-
except KeyError:
|
|
686
|
-
raise KeyError("Incorrect correction factor dictionary. You must "
|
|
687
|
-
"supply 'slope' and 'offset' keys.")
|
|
688
|
-
else:
|
|
689
|
-
# If coefficients not present, warn user and use slope=1, offset=0
|
|
690
|
-
print("WARNING: You have selected radiance correction but "
|
|
691
|
-
" have not supplied coefficients for channel " +
|
|
692
|
-
band_name)
|
|
693
|
-
return 1., 0.
|
|
694
|
-
|
|
695
|
-
return slope, offset
|
|
696
|
-
|
|
697
|
-
def apply_rad_correction(data, slope, offset):
|
|
698
|
-
"""Apply GSICS-like correction factors to radiance data."""
|
|
699
|
-
data = (data - offset) / slope
|
|
700
|
-
return data
|
|
701
|
-
|
|
702
|
-
def get_area_extent(pdict):
|
|
703
|
-
"""Get the area extent seen by a geostationary satellite.
|
|
704
|
-
|
|
705
|
-
Args:
|
|
706
|
-
pdict: A dictionary containing common parameters:
|
|
707
|
-
nlines: Number of lines in image
|
|
708
|
-
ncols: Number of columns in image
|
|
709
|
-
cfac: Column scaling factor
|
|
710
|
-
lfac: Line scaling factor
|
|
711
|
-
coff: Column offset factor
|
|
712
|
-
loff: Line offset factor
|
|
713
|
-
scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N)
|
|
714
|
-
Returns:
|
|
715
|
-
aex: An area extent for the scene
|
|
716
|
-
|
|
717
|
-
"""
|
|
718
|
-
# count starts at 1
|
|
719
|
-
cols = 1 - 0.5
|
|
720
|
-
|
|
721
|
-
if pdict['scandir'] == 'S2N':
|
|
722
|
-
lines = 0.5 - 1
|
|
723
|
-
scanmult = -1
|
|
724
|
-
else:
|
|
725
|
-
lines = 1 - 0.5
|
|
726
|
-
scanmult = 1
|
|
727
|
-
# Lower left x, y scanning angles in degrees
|
|
728
|
-
ll_x, ll_y = get_xy_from_linecol(lines * scanmult,
|
|
729
|
-
cols,
|
|
730
|
-
(pdict['loff'], pdict['coff']),
|
|
731
|
-
(pdict['lfac'], pdict['cfac']))
|
|
732
|
-
|
|
733
|
-
cols += pdict['ncols']
|
|
734
|
-
lines += pdict['nlines']
|
|
735
|
-
# Upper right x, y scanning angles in degrees
|
|
736
|
-
ur_x, ur_y = get_xy_from_linecol(lines * scanmult,
|
|
737
|
-
cols,
|
|
738
|
-
(pdict['loff'], pdict['coff']),
|
|
739
|
-
(pdict['lfac'], pdict['cfac']))
|
|
740
|
-
if pdict['scandir'] == 'S2N':
|
|
741
|
-
ll_y *= -1
|
|
742
|
-
ur_y *= -1
|
|
743
|
-
|
|
744
|
-
# Convert degrees to radians and create area extent
|
|
745
|
-
aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict['h'])
|
|
746
|
-
|
|
747
|
-
return aex
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
def get_area_definition(pdict, a_ext):
|
|
751
|
-
"""Get the area definition for a geo-sat.
|
|
752
|
-
|
|
753
|
-
Args:
|
|
754
|
-
pdict: A dictionary containing common parameters:
|
|
755
|
-
nlines: Number of lines in image
|
|
756
|
-
ncols: Number of columns in image
|
|
757
|
-
ssp_lon: Subsatellite point longitude (deg)
|
|
758
|
-
a: Earth equatorial radius (m)
|
|
759
|
-
b: Earth polar radius (m)
|
|
760
|
-
h: Platform height (m)
|
|
761
|
-
a_name: Area name
|
|
762
|
-
a_desc: Area description
|
|
763
|
-
p_id: Projection id
|
|
764
|
-
a_ext: A four element tuple containing the area extent (scan angle)
|
|
765
|
-
for the scene in radians
|
|
766
|
-
Returns:
|
|
767
|
-
a_def: An area definition for the scene
|
|
768
|
-
|
|
769
|
-
.. note::
|
|
770
|
-
|
|
771
|
-
The AreaDefinition `proj_id` attribute is being deprecated.
|
|
772
|
-
|
|
773
|
-
"""
|
|
774
|
-
proj_dict = {'a': float(pdict['a']),
|
|
775
|
-
'b': float(pdict['b']),
|
|
776
|
-
'lon_0': float(pdict['ssp_lon']),
|
|
777
|
-
'h': float(pdict['h']),
|
|
778
|
-
'proj': 'geos',
|
|
779
|
-
'units': 'm'}
|
|
780
|
-
|
|
781
|
-
a_def = AreaDefinition(
|
|
782
|
-
pdict['a_name'],
|
|
783
|
-
pdict['a_desc'],
|
|
784
|
-
pdict['p_id'],
|
|
785
|
-
proj_dict,
|
|
786
|
-
int(pdict['ncols']),
|
|
787
|
-
int(pdict['nlines']),
|
|
788
|
-
a_ext)
|
|
789
|
-
|
|
790
|
-
return a_def
|
|
791
|
-
|
|
792
|
-
def get_xy_from_linecol(line, col, offsets, factors):
|
|
793
|
-
"""Get the intermediate coordinates from line & col.
|
|
794
|
-
|
|
795
|
-
Intermediate coordinates are actually the instruments scanning angles.
|
|
796
|
-
"""
|
|
797
|
-
loff, coff = offsets
|
|
798
|
-
lfac, cfac = factors
|
|
799
|
-
x__ = float(col - coff) / (float(cfac) / 2 ** 16)
|
|
800
|
-
y__ = float(line - loff) / (float(lfac) / 2 ** 16)
|
|
801
|
-
|
|
802
|
-
return x__, y__
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
def make_ext(ll_x, ur_x, ll_y, ur_y, h):
|
|
806
|
-
"""Create the area extent from computed ll and ur.
|
|
807
|
-
|
|
808
|
-
Args:
|
|
809
|
-
ll_x: The lower left x coordinate (m)
|
|
810
|
-
ur_x: The upper right x coordinate (m)
|
|
811
|
-
ll_y: The lower left y coordinate (m)
|
|
812
|
-
ur_y: The upper right y coordinate (m)
|
|
813
|
-
h: The satellite altitude above the Earth's surface
|
|
814
|
-
Returns:
|
|
815
|
-
aex: An area extent for the scene
|
|
816
|
-
|
|
817
|
-
"""
|
|
818
|
-
aex = (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h,
|
|
819
|
-
np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h)
|
|
820
|
-
|
|
821
|
-
return aex
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
def get_geostationary_mask(area):
|
|
825
|
-
"""Compute a mask of the earth's shape as seen by a geostationary satellite.
|
|
826
|
-
|
|
827
|
-
Args:
|
|
828
|
-
area (pyresample.geometry.AreaDefinition) : Corresponding area
|
|
829
|
-
definition
|
|
830
|
-
|
|
831
|
-
Returns:
|
|
832
|
-
Boolean mask, True inside the earth's shape, False outside.
|
|
833
|
-
|
|
834
|
-
"""
|
|
835
|
-
# Compute projection coordinates at the earth's limb
|
|
836
|
-
h = area.proj_dict['h']
|
|
837
|
-
xmax, ymax = get_geostationary_angle_extent(area)
|
|
838
|
-
xmax *= h
|
|
839
|
-
ymax *= h
|
|
840
|
-
|
|
841
|
-
# Compute projection coordinates at the centre of each pixel
|
|
842
|
-
x, y = area.get_proj_coords(chunks=4096)
|
|
843
|
-
|
|
844
|
-
# Compute mask of the earth's elliptical shape
|
|
845
|
-
return ((x / xmax) ** 2 + (y / ymax) ** 2) <= 1
|
|
846
|
-
|
|
847
|
-
def get_geostationary_angle_extent(geos_area):
|
|
848
|
-
"""Get the max earth (vs space) viewing angles in x and y."""
|
|
849
|
-
# TODO: take into account sweep_axis_angle parameter
|
|
850
|
-
|
|
851
|
-
# get some projection parameters
|
|
852
|
-
try:
|
|
853
|
-
crs = geos_area.crs
|
|
854
|
-
a = crs.ellipsoid.semi_major_metre
|
|
855
|
-
b = crs.ellipsoid.semi_minor_metre
|
|
856
|
-
if np.isnan(b):
|
|
857
|
-
# see https://github.com/pyproj4/pyproj/issues/457
|
|
858
|
-
raise AttributeError("'semi_minor_metre' attribute is not valid "
|
|
859
|
-
"in older versions of pyproj.")
|
|
860
|
-
except AttributeError:
|
|
861
|
-
# older versions of pyproj don't have CRS objects
|
|
862
|
-
from pyresample.utils import proj4_radius_parameters
|
|
863
|
-
a, b = proj4_radius_parameters(geos_area.proj_dict)
|
|
864
|
-
|
|
865
|
-
req = float(a) / 1000
|
|
866
|
-
rp = float(b) / 1000
|
|
867
|
-
h = float(geos_area.proj_dict['h']) / 1000 + req
|
|
868
|
-
|
|
869
|
-
# compute some constants
|
|
870
|
-
aeq = 1 - req**2 / (h ** 2)
|
|
871
|
-
ap_ = 1 - rp**2 / (h ** 2)
|
|
872
|
-
|
|
873
|
-
# generate points around the north hemisphere in satellite projection
|
|
874
|
-
# make it a bit smaller so that we stay inside the valid area
|
|
875
|
-
xmax = np.arccos(np.sqrt(aeq))
|
|
876
|
-
ymax = np.arccos(np.sqrt(ap_))
|
|
877
|
-
return xmax, ymax
|