pthelma 0.99.3.dev0__cp312-cp312-win_amd64.whl
Sign up to get free protection for your applications and to get access to all the features.
- enhydris_api_client/__init__.py +252 -0
- enhydris_cache/__init__.py +5 -0
- enhydris_cache/cli.py +150 -0
- enhydris_cache/enhydris_cache.py +69 -0
- evaporation/__init__.py +4 -0
- evaporation/cli.py +729 -0
- evaporation/evaporation.py +437 -0
- haggregate/__init__.py +5 -0
- haggregate/cli.py +91 -0
- haggregate/haggregate.py +155 -0
- haggregate/regularize.cp312-win_amd64.pyd +0 -0
- haggregate/regularize.pyx +193 -0
- hspatial/__init__.py +4 -0
- hspatial/cli.py +310 -0
- hspatial/hspatial.py +425 -0
- hspatial/test.py +27 -0
- htimeseries/__init__.py +2 -0
- htimeseries/htimeseries.py +574 -0
- htimeseries/timezone_utils.py +44 -0
- pthelma/__init__.py +0 -0
- pthelma/_version.py +16 -0
- pthelma-0.99.3.dev0.dist-info/LICENSE.rst +34 -0
- pthelma-0.99.3.dev0.dist-info/METADATA +54 -0
- pthelma-0.99.3.dev0.dist-info/RECORD +27 -0
- pthelma-0.99.3.dev0.dist-info/WHEEL +5 -0
- pthelma-0.99.3.dev0.dist-info/entry_points.txt +5 -0
- pthelma-0.99.3.dev0.dist-info/top_level.txt +7 -0
hspatial/hspatial.py
ADDED
@@ -0,0 +1,425 @@
|
|
1
|
+
import datetime as dt
|
2
|
+
import os
|
3
|
+
import struct
|
4
|
+
from glob import glob
|
5
|
+
from math import isnan
|
6
|
+
|
7
|
+
import iso8601
|
8
|
+
import numpy as np
|
9
|
+
from affine import Affine
|
10
|
+
from django.contrib.gis.gdal import CoordTransform, SpatialReference
|
11
|
+
from django.contrib.gis.gdal.error import GDALException
|
12
|
+
from django.contrib.gis.geos import Point as GeoDjangoPoint
|
13
|
+
from osgeo import gdal, ogr, osr
|
14
|
+
|
15
|
+
from htimeseries import HTimeseries
|
16
|
+
|
17
|
+
gdal.UseExceptions()
|
18
|
+
|
19
|
+
NODATAVALUE = -(2.0**127)
|
20
|
+
|
21
|
+
|
22
|
+
def coordinates2point(x, y, srid=4326):
|
23
|
+
point = ogr.Geometry(ogr.wkbPoint)
|
24
|
+
sr = osr.SpatialReference()
|
25
|
+
sr.ImportFromEPSG(srid)
|
26
|
+
if int(gdal.__version__.split(".")[0]) > 2:
|
27
|
+
sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
28
|
+
point.AssignSpatialReference(sr)
|
29
|
+
point.AddPoint(x, y)
|
30
|
+
return point
|
31
|
+
|
32
|
+
|
33
|
+
def idw(point, data_layer, alpha=1):
|
34
|
+
data_layer.ResetReading()
|
35
|
+
features = [f for f in data_layer if not isnan(f.GetField("value"))]
|
36
|
+
distances = np.array([point.Distance(f.GetGeometryRef()) for f in features])
|
37
|
+
values = np.array([f.GetField("value") for f in features])
|
38
|
+
matches_station_exactly = abs(distances) < 1e-3
|
39
|
+
if matches_station_exactly.any():
|
40
|
+
invdistances = np.where(matches_station_exactly, 1, 0)
|
41
|
+
else:
|
42
|
+
invdistances = distances ** (-alpha)
|
43
|
+
weights = invdistances / invdistances.sum()
|
44
|
+
return (weights * values).sum()
|
45
|
+
|
46
|
+
|
47
|
+
def integrate(dataset, data_layer, target_band, funct, kwargs={}):
|
48
|
+
mask = dataset.GetRasterBand(1).ReadAsArray() != 0
|
49
|
+
|
50
|
+
# Create an array with the x co-ordinate of each grid point, and
|
51
|
+
# one with the y co-ordinate of each grid point
|
52
|
+
height, width = mask.shape
|
53
|
+
x_left, x_step, d1, y_top, d2, y_step = dataset.GetGeoTransform()
|
54
|
+
xcoords = np.arange(x_left + x_step / 2.0, x_left + x_step * width, x_step)
|
55
|
+
ycoords = np.arange(y_top + y_step / 2.0, y_top + y_step * height, y_step)
|
56
|
+
xarray, yarray = np.meshgrid(xcoords, ycoords)
|
57
|
+
|
58
|
+
# Create a ufunc that makes the interpolation given the above arrays
|
59
|
+
def interpolate_one_point(x, y, mask):
|
60
|
+
if not mask:
|
61
|
+
return np.nan
|
62
|
+
point = ogr.Geometry(ogr.wkbPoint)
|
63
|
+
point.AddPoint(x, y)
|
64
|
+
return funct(point, data_layer, **kwargs)
|
65
|
+
|
66
|
+
interpolate = np.vectorize(interpolate_one_point, otypes=[np.float32])
|
67
|
+
|
68
|
+
# Make the calculation
|
69
|
+
result = interpolate(xarray, yarray, mask)
|
70
|
+
result[np.isnan(result)] = NODATAVALUE
|
71
|
+
target_band.SetNoDataValue(NODATAVALUE)
|
72
|
+
target_band.WriteArray(result)
|
73
|
+
|
74
|
+
|
75
|
+
def create_ogr_layer_from_timeseries(filenames, epsg, data_source):
|
76
|
+
# Prepare the co-ordinate transformation from WGS84 to epsg
|
77
|
+
source_sr = osr.SpatialReference()
|
78
|
+
source_sr.ImportFromEPSG(4326)
|
79
|
+
target_sr = osr.SpatialReference()
|
80
|
+
target_sr.ImportFromEPSG(epsg)
|
81
|
+
if int(gdal.__version__.split(".")[0]) > 2:
|
82
|
+
source_sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
83
|
+
target_sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
84
|
+
transform = osr.CoordinateTransformation(source_sr, target_sr)
|
85
|
+
|
86
|
+
layer = data_source.CreateLayer("stations", target_sr)
|
87
|
+
layer.CreateField(ogr.FieldDefn("filename", ogr.OFTString))
|
88
|
+
for filename in filenames:
|
89
|
+
with open(filename, newline="\n") as f:
|
90
|
+
# The default_tzinfo doesn't matter because we don't care about the data,
|
91
|
+
# we only use the location.
|
92
|
+
ts = HTimeseries(f, default_tzinfo=dt.timezone.utc)
|
93
|
+
point = ogr.Geometry(ogr.wkbPoint)
|
94
|
+
point.AddPoint(ts.location["abscissa"], ts.location["ordinate"])
|
95
|
+
point.Transform(transform)
|
96
|
+
f = ogr.Feature(layer.GetLayerDefn())
|
97
|
+
f.SetGeometry(point)
|
98
|
+
f.SetField("filename", filename)
|
99
|
+
layer.CreateFeature(f)
|
100
|
+
return layer
|
101
|
+
|
102
|
+
|
103
|
+
def _needs_calculation(output_filename, date, stations_layer):
|
104
|
+
"""
|
105
|
+
Used by h_integrate to check whether the output file needs to be calculated
|
106
|
+
or not. It does not need to be calculated if it already exists and has been
|
107
|
+
calculated from all available data.
|
108
|
+
"""
|
109
|
+
# Return immediately if output file does not exist
|
110
|
+
if not os.path.exists(output_filename):
|
111
|
+
return True
|
112
|
+
|
113
|
+
# Get list of files which were used to calculate the output file
|
114
|
+
fp = gdal.Open(output_filename)
|
115
|
+
try:
|
116
|
+
actual_input_files = fp.GetMetadataItem("INPUT_FILES")
|
117
|
+
if actual_input_files is None:
|
118
|
+
raise IOError(
|
119
|
+
"{} does not contain the metadata item INPUT_FILES".format(
|
120
|
+
output_filename
|
121
|
+
)
|
122
|
+
)
|
123
|
+
finally:
|
124
|
+
fp = None # Close file
|
125
|
+
actual_input_files = set(actual_input_files.split("\n"))
|
126
|
+
|
127
|
+
# Get list of files available for calculating the output file
|
128
|
+
stations_layer.ResetReading()
|
129
|
+
available_input_files = set(
|
130
|
+
[
|
131
|
+
station.GetField("filename")
|
132
|
+
for station in stations_layer
|
133
|
+
if os.path.exists(station.GetField("filename"))
|
134
|
+
]
|
135
|
+
)
|
136
|
+
|
137
|
+
# Which of these files have not been used?
|
138
|
+
unused_files = available_input_files - actual_input_files
|
139
|
+
|
140
|
+
# For each one of these files, check whether it has newly available data.
|
141
|
+
# Upon finding one that does, the verdict is made: return True
|
142
|
+
for filename in unused_files:
|
143
|
+
with open(filename, newline="\n") as f:
|
144
|
+
t = HTimeseries(f)
|
145
|
+
try:
|
146
|
+
value = t.data.loc[date, "value"]
|
147
|
+
if not isnan(value):
|
148
|
+
return True
|
149
|
+
except KeyError:
|
150
|
+
continue
|
151
|
+
|
152
|
+
# We were unable to find data that had not already been used
|
153
|
+
return False
|
154
|
+
|
155
|
+
|
156
|
+
def h_integrate(
|
157
|
+
mask, stations_layer, date, output_filename_prefix, date_fmt, funct, kwargs
|
158
|
+
):
|
159
|
+
date_fmt_for_filename = date.strftime(date_fmt).replace(" ", "-").replace(":", "-")
|
160
|
+
output_filename = "{}-{}.tif".format(
|
161
|
+
output_filename_prefix, date.strftime(date_fmt_for_filename)
|
162
|
+
)
|
163
|
+
if not _needs_calculation(output_filename, date, stations_layer):
|
164
|
+
return
|
165
|
+
|
166
|
+
# Read the time series values and add the 'value' attribute to
|
167
|
+
# stations_layer. Also determine the unit of measurement.
|
168
|
+
stations_layer.CreateField(ogr.FieldDefn("value", ogr.OFTReal))
|
169
|
+
input_files = []
|
170
|
+
unit_of_measurement = None
|
171
|
+
stations_layer.ResetReading()
|
172
|
+
for station in stations_layer:
|
173
|
+
filename = station.GetField("filename")
|
174
|
+
with open(filename, newline="\n") as f:
|
175
|
+
t = HTimeseries(f)
|
176
|
+
if unit_of_measurement is None and hasattr(t, "unit"):
|
177
|
+
unit_of_measurement = t.unit
|
178
|
+
try:
|
179
|
+
value = t.data.loc[date, "value"]
|
180
|
+
except KeyError:
|
181
|
+
value = np.nan
|
182
|
+
station.SetField("value", value)
|
183
|
+
if not isnan(value):
|
184
|
+
input_files.append(filename)
|
185
|
+
stations_layer.SetFeature(station)
|
186
|
+
if not input_files:
|
187
|
+
return
|
188
|
+
|
189
|
+
# Create destination data source
|
190
|
+
output = gdal.GetDriverByName("GTiff").Create(
|
191
|
+
output_filename, mask.RasterXSize, mask.RasterYSize, 1, gdal.GDT_Float32
|
192
|
+
)
|
193
|
+
output.SetMetadataItem("TIMESTAMP", date.strftime(date_fmt))
|
194
|
+
output.SetMetadataItem("INPUT_FILES", "\n".join(input_files))
|
195
|
+
output.SetMetadataItem("UNIT", unit_of_measurement)
|
196
|
+
|
197
|
+
try:
|
198
|
+
# Set geotransform and projection in the output data source
|
199
|
+
output.SetGeoTransform(mask.GetGeoTransform())
|
200
|
+
output.SetProjection(mask.GetProjection())
|
201
|
+
|
202
|
+
# Do the integration
|
203
|
+
integrate(mask, stations_layer, output.GetRasterBand(1), funct, kwargs)
|
204
|
+
finally:
|
205
|
+
# Close the dataset
|
206
|
+
output = None
|
207
|
+
|
208
|
+
|
209
|
+
class PassepartoutPoint:
|
210
|
+
"""Uniform interface for GeoDjango Point and OGR Point."""
|
211
|
+
|
212
|
+
def __init__(self, point):
|
213
|
+
self.point = point
|
214
|
+
|
215
|
+
def transform_to(self, target_srs_wkt):
|
216
|
+
point = self.clone(self.point)
|
217
|
+
if isinstance(self.point, GeoDjangoPoint):
|
218
|
+
source_srs = point.srs or SpatialReference(4326)
|
219
|
+
try:
|
220
|
+
source_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
221
|
+
except AttributeError:
|
222
|
+
pass
|
223
|
+
ct = CoordTransform(source_srs, SpatialReference(target_srs_wkt))
|
224
|
+
point.transform(ct)
|
225
|
+
return PassepartoutPoint(point)
|
226
|
+
else:
|
227
|
+
point_sr = point.GetSpatialReference()
|
228
|
+
raster_sr = osr.SpatialReference()
|
229
|
+
raster_sr.ImportFromWkt(target_srs_wkt)
|
230
|
+
if int(gdal.__version__.split(".")[0]) > 2:
|
231
|
+
point_sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
232
|
+
raster_sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
233
|
+
transform = osr.CoordinateTransformation(point_sr, raster_sr)
|
234
|
+
point.Transform(transform)
|
235
|
+
return PassepartoutPoint(point)
|
236
|
+
|
237
|
+
def clone(self, original_point):
|
238
|
+
if isinstance(original_point, GeoDjangoPoint):
|
239
|
+
return GeoDjangoPoint(
|
240
|
+
original_point.x, original_point.y, original_point.srid
|
241
|
+
)
|
242
|
+
else:
|
243
|
+
point = ogr.Geometry(ogr.wkbPoint)
|
244
|
+
point.AddPoint(original_point.GetX(), original_point.GetY())
|
245
|
+
point.AssignSpatialReference(original_point.GetSpatialReference())
|
246
|
+
return point
|
247
|
+
|
248
|
+
@property
|
249
|
+
def x(self):
|
250
|
+
try:
|
251
|
+
return self.point.x
|
252
|
+
except AttributeError:
|
253
|
+
return self.point.GetX()
|
254
|
+
|
255
|
+
@property
|
256
|
+
def y(self):
|
257
|
+
try:
|
258
|
+
return self.point.y
|
259
|
+
except AttributeError:
|
260
|
+
return self.point.GetY()
|
261
|
+
|
262
|
+
|
263
|
+
def extract_point_from_raster(point, data_source, band_number=1):
|
264
|
+
"""Return floating-point value that corresponds to given point."""
|
265
|
+
pppoint = PassepartoutPoint(point)
|
266
|
+
|
267
|
+
# Convert point co-ordinates so that they are in same projection as raster
|
268
|
+
try:
|
269
|
+
target_srs_wkt = data_source.GetProjection()
|
270
|
+
except AttributeError:
|
271
|
+
target_srs_wkt = data_source.srs.wkt
|
272
|
+
try:
|
273
|
+
pppoint = pppoint.transform_to(target_srs_wkt)
|
274
|
+
except GDALException:
|
275
|
+
raise RuntimeError("Couldn't convert point to raster's CRS")
|
276
|
+
infinities = (float("inf"), float("-inf"))
|
277
|
+
if pppoint.x in infinities or pppoint.y in infinities:
|
278
|
+
raise RuntimeError("Couldn't convert point to raster's CRS")
|
279
|
+
|
280
|
+
# Convert geographic co-ordinates to pixel co-ordinates
|
281
|
+
try:
|
282
|
+
forward_transform = Affine.from_gdal(*data_source.GetGeoTransform())
|
283
|
+
except AttributeError:
|
284
|
+
forward_transform = Affine.from_gdal(*data_source.geotransform)
|
285
|
+
reverse_transform = ~forward_transform
|
286
|
+
px, py = reverse_transform * (pppoint.x, pppoint.y)
|
287
|
+
px, py = int(px), int(py)
|
288
|
+
|
289
|
+
# Extract pixel value
|
290
|
+
try:
|
291
|
+
band = data_source.GetRasterBand(band_number)
|
292
|
+
except AttributeError:
|
293
|
+
band = data_source.bands[band_number - 1]
|
294
|
+
try:
|
295
|
+
structval = band.ReadRaster(px, py, 1, 1, buf_type=gdal.GDT_Float32)
|
296
|
+
except AttributeError:
|
297
|
+
structval = band.data(offset=(px, py), size=(1, 1))
|
298
|
+
result = struct.unpack("f", structval)[0]
|
299
|
+
try:
|
300
|
+
nodata_value = band.GetNoDataValue()
|
301
|
+
except AttributeError:
|
302
|
+
nodata_value = band.nodata_value
|
303
|
+
if result == nodata_value:
|
304
|
+
result = float("nan")
|
305
|
+
return result
|
306
|
+
|
307
|
+
|
308
|
+
class PointTimeseries:
|
309
|
+
def __init__(self, point, **kwargs):
|
310
|
+
self.point = point
|
311
|
+
filenames = kwargs.pop("filenames", None)
|
312
|
+
self.prefix = kwargs.pop("prefix", None)
|
313
|
+
assert filenames is None or self.prefix is None
|
314
|
+
assert filenames is not None or self.prefix is not None
|
315
|
+
self.date_fmt = kwargs.pop("date_fmt", None)
|
316
|
+
self.start_date = kwargs.pop("start_date", None)
|
317
|
+
self.end_date = kwargs.pop("end_date", None)
|
318
|
+
self.default_time = kwargs.pop(
|
319
|
+
"default_time", dt.time(0, 0, tzinfo=dt.timezone.utc)
|
320
|
+
)
|
321
|
+
if self.default_time.tzinfo is None:
|
322
|
+
raise TypeError("default_time must be aware")
|
323
|
+
if self.start_date and self.start_date.tzinfo is None:
|
324
|
+
self.start_date = self.start_date.replace(tzinfo=self.default_time.tzinfo)
|
325
|
+
if self.end_date and self.end_date.tzinfo is None:
|
326
|
+
self.end_date = self.end_date.replace(tzinfo=self.default_time.tzinfo)
|
327
|
+
self.filenames = self._get_filenames(filenames)
|
328
|
+
|
329
|
+
def _get_filenames(self, filenames):
|
330
|
+
if self.prefix is None:
|
331
|
+
return filenames
|
332
|
+
filenames = glob(self.prefix + "-*.tif")
|
333
|
+
self.filename_format = FilenameWithDateFormat(
|
334
|
+
self.prefix, date_fmt=self.date_fmt, tzinfo=self.default_time.tzinfo
|
335
|
+
)
|
336
|
+
result = []
|
337
|
+
for filename in filenames:
|
338
|
+
date = self.filename_format.get_date(filename)
|
339
|
+
is_after_start_date = (self.start_date is None) or (date >= self.start_date)
|
340
|
+
is_before_end_date = (self.end_date is None) or (date <= self.end_date)
|
341
|
+
if is_after_start_date and is_before_end_date:
|
342
|
+
result.append(filename)
|
343
|
+
return result
|
344
|
+
|
345
|
+
def get(self):
|
346
|
+
result = HTimeseries(default_tzinfo=self.default_time.tzinfo)
|
347
|
+
for filename in self.filenames:
|
348
|
+
f = gdal.Open(filename)
|
349
|
+
try:
|
350
|
+
timestamp = self._get_timestamp(f)
|
351
|
+
self._get_unit_of_measurement(f, result)
|
352
|
+
value = extract_point_from_raster(self.point, f)
|
353
|
+
result.data.loc[timestamp, "value"] = value
|
354
|
+
result.data.loc[timestamp, "flags"] = ""
|
355
|
+
finally:
|
356
|
+
f = None
|
357
|
+
result.data = result.data.sort_index()
|
358
|
+
return result
|
359
|
+
|
360
|
+
def _get_timestamp(self, f):
|
361
|
+
isostring = f.GetMetadata()["TIMESTAMP"]
|
362
|
+
timestamp = iso8601.parse_date(
|
363
|
+
isostring, default_timezone=self.default_time.tzinfo
|
364
|
+
)
|
365
|
+
if len(isostring) <= 10:
|
366
|
+
timestamp = dt.datetime.combine(timestamp.date(), self.default_time)
|
367
|
+
return timestamp
|
368
|
+
|
369
|
+
def _get_unit_of_measurement(self, f, ahtimeseries):
|
370
|
+
if hasattr(ahtimeseries, "unit"):
|
371
|
+
return
|
372
|
+
unit = f.GetMetadataItem("UNIT")
|
373
|
+
if unit is not None:
|
374
|
+
ahtimeseries.unit = unit
|
375
|
+
|
376
|
+
def get_cached(self, dest, force=False, version=4):
|
377
|
+
assert self.prefix
|
378
|
+
ts = self._get_saved_timeseries_if_updated_else_none(dest, force)
|
379
|
+
if ts is None:
|
380
|
+
ts = self.get()
|
381
|
+
with open(dest, "w", newline="") as f:
|
382
|
+
ts.write(f, format=HTimeseries.FILE, version=version)
|
383
|
+
return ts
|
384
|
+
|
385
|
+
def _get_saved_timeseries_if_updated_else_none(self, dest, force):
|
386
|
+
if force or not os.path.exists(dest):
|
387
|
+
return None
|
388
|
+
else:
|
389
|
+
return self._get_timeseries_if_file_is_up_to_date_else_none(dest)
|
390
|
+
|
391
|
+
def _get_timeseries_if_file_is_up_to_date_else_none(self, dest):
|
392
|
+
with open(dest, "r", newline="") as f:
|
393
|
+
ts = HTimeseries(f, default_tzinfo=self.default_time.tzinfo)
|
394
|
+
for filename in self.filenames:
|
395
|
+
if not self.filename_format.get_date(filename) in ts.data.index:
|
396
|
+
return None
|
397
|
+
return ts
|
398
|
+
|
399
|
+
|
400
|
+
class FilenameWithDateFormat:
|
401
|
+
def __init__(self, prefix, *, date_fmt=None, tzinfo):
|
402
|
+
self.prefix = prefix
|
403
|
+
self.date_fmt = date_fmt
|
404
|
+
self.tzinfo = tzinfo
|
405
|
+
|
406
|
+
def get_date(self, filename):
|
407
|
+
datestr = self._extract_datestr(filename)
|
408
|
+
self._ensure_we_have_date_fmt(datestr)
|
409
|
+
return dt.datetime.strptime(datestr, self.date_fmt).replace(tzinfo=self.tzinfo)
|
410
|
+
|
411
|
+
def _ensure_we_have_date_fmt(self, datestr):
|
412
|
+
if self.date_fmt is not None:
|
413
|
+
pass
|
414
|
+
elif datestr.count("-") == 4:
|
415
|
+
self.date_fmt = "%Y-%m-%d-%H-%M"
|
416
|
+
elif datestr.count("-") == 2:
|
417
|
+
self.date_fmt = "%Y-%m-%d"
|
418
|
+
else:
|
419
|
+
raise ValueError("Invalid date " + datestr)
|
420
|
+
|
421
|
+
def _extract_datestr(self, filename):
|
422
|
+
assert filename.startswith(self.prefix + "-")
|
423
|
+
assert filename.endswith(".tif")
|
424
|
+
startpos = len(self.prefix) + 1
|
425
|
+
return filename[startpos:-4]
|
hspatial/test.py
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
import numpy as np
|
2
|
+
from osgeo import gdal, osr
|
3
|
+
|
4
|
+
|
5
|
+
def setup_test_raster(filename, value, timestamp=None, srid=4326, unit=None):
|
6
|
+
"""Save value, which is an np array, to a GeoTIFF file."""
|
7
|
+
nodata = 1e8
|
8
|
+
value[np.isnan(value)] = nodata
|
9
|
+
f = gdal.GetDriverByName("GTiff").Create(filename, 3, 3, 1, gdal.GDT_Float32)
|
10
|
+
try:
|
11
|
+
if timestamp:
|
12
|
+
f.SetMetadataItem("TIMESTAMP", timestamp.isoformat())
|
13
|
+
if unit:
|
14
|
+
f.SetMetadataItem("UNIT", unit)
|
15
|
+
if srid == 4326:
|
16
|
+
f.SetGeoTransform((22.0, 0.01, 0, 38.0, 0, -0.01))
|
17
|
+
elif srid == 2100:
|
18
|
+
f.SetGeoTransform((320000, 1000, 0, 4210000, 0, -1000))
|
19
|
+
sr = osr.SpatialReference()
|
20
|
+
sr.ImportFromEPSG(srid)
|
21
|
+
if int(gdal.__version__.split(".")[0]) > 2:
|
22
|
+
sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
|
23
|
+
f.SetProjection(sr.ExportToWkt())
|
24
|
+
f.GetRasterBand(1).SetNoDataValue(nodata)
|
25
|
+
f.GetRasterBand(1).WriteArray(value)
|
26
|
+
finally:
|
27
|
+
f = None
|
htimeseries/__init__.py
ADDED