ccfx 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ccfx/__init__.py +5 -0
- ccfx/ccfx.py +694 -0
- ccfx/excel.py +143 -0
- ccfx/mssqlConnection.py +228 -0
- ccfx/sqliteConnection.py +302 -0
- ccfx/word.py +96 -0
- ccfx-0.1.0.dist-info/LICENSE +21 -0
- ccfx-0.1.0.dist-info/METADATA +145 -0
- ccfx-0.1.0.dist-info/RECORD +11 -0
- ccfx-0.1.0.dist-info/WHEEL +5 -0
- ccfx-0.1.0.dist-info/top_level.txt +1 -0
ccfx/__init__.py
ADDED
ccfx/ccfx.py
ADDED
@@ -0,0 +1,694 @@
|
|
1
|
+
'''
|
2
|
+
This module contains functions to speed general python prototyping and development.
|
3
|
+
|
4
|
+
Author : Celray James CHAWANDA
|
5
|
+
Email : celray@chawanda.com
|
6
|
+
Date : 2024-09-11
|
7
|
+
License : MIT
|
8
|
+
|
9
|
+
Repository : https://github.com/celray/ccfx
|
10
|
+
'''
|
11
|
+
|
12
|
+
# imports
|
13
|
+
import os, sys
|
14
|
+
import glob
|
15
|
+
import warnings
|
16
|
+
from netCDF4 import Dataset
|
17
|
+
from osgeo import gdal, osr
|
18
|
+
import numpy
|
19
|
+
from genericpath import exists
|
20
|
+
import shutil
|
21
|
+
import platform
|
22
|
+
import pickle
|
23
|
+
import time
|
24
|
+
from shapely.geometry import box, Point
|
25
|
+
import geopandas, pandas
|
26
|
+
from osgeo import gdal
|
27
|
+
|
28
|
+
|
29
|
+
|
30
|
+
# functions
|
31
|
+
def listFiles(path: str, ext: str = None) -> list:
|
32
|
+
'''
|
33
|
+
List all files in a directory with a specific extension
|
34
|
+
path: directory
|
35
|
+
ext: extension (optional), variations allowed like 'txt', '.txt', '*txt', '*.txt'
|
36
|
+
'''
|
37
|
+
|
38
|
+
if ext is None:
|
39
|
+
ext = '*'
|
40
|
+
else:
|
41
|
+
ext = ext.lstrip('*')
|
42
|
+
if not ext.startswith('.'):
|
43
|
+
ext = '.' + ext
|
44
|
+
|
45
|
+
pattern = os.path.join(path, f'*{ext}')
|
46
|
+
|
47
|
+
if not os.path.isdir(path):
|
48
|
+
print(f'! Warning: {path} is not a directory')
|
49
|
+
return []
|
50
|
+
|
51
|
+
return glob.glob(pattern)
|
52
|
+
|
53
|
+
def getExtension(filePath:str) -> str:
|
54
|
+
'''
|
55
|
+
Get the extension of a file
|
56
|
+
filePath: file path
|
57
|
+
'''
|
58
|
+
return os.path.splitext(filePath)[1]
|
59
|
+
|
60
|
+
|
61
|
+
def deleteFile(filePath:str, v:bool = False) -> bool:
|
62
|
+
'''
|
63
|
+
Delete a file
|
64
|
+
filePath: file path
|
65
|
+
v: verbose (default is True)
|
66
|
+
|
67
|
+
return: True if the file is deleted, False otherwise
|
68
|
+
'''
|
69
|
+
|
70
|
+
deleted = False
|
71
|
+
if os.path.exists(filePath):
|
72
|
+
try:
|
73
|
+
os.remove(filePath)
|
74
|
+
deleted = True
|
75
|
+
except:
|
76
|
+
print(f'! Could not delete {filePath}')
|
77
|
+
deleted = False
|
78
|
+
if v:
|
79
|
+
print(f'> {filePath} deleted')
|
80
|
+
else:
|
81
|
+
if v:
|
82
|
+
print(f'! {filePath} does not exist')
|
83
|
+
deleted = False
|
84
|
+
|
85
|
+
return deleted
|
86
|
+
|
87
|
+
def systemPlatform() -> str:
|
88
|
+
'''
|
89
|
+
Get the system platform
|
90
|
+
'''
|
91
|
+
return platform.system()
|
92
|
+
|
93
|
+
def progressBar(count, total, message=""):
|
94
|
+
percent = int(count / total * 100)
|
95
|
+
filled = int(percent / 2)
|
96
|
+
bar = '█' * filled + '░' * (50 - filled)
|
97
|
+
print(f'\r{message} |{bar}| {percent}% [{count}/{total}]', end='', flush=True)
|
98
|
+
if count == total:
|
99
|
+
print()
|
100
|
+
|
101
|
+
def fileCount(path:str = "./", extension:str = ".*", v:bool = True) -> int:
|
102
|
+
'''
|
103
|
+
get the number of files in a directory with a specific extension
|
104
|
+
path: directory
|
105
|
+
ext: extension
|
106
|
+
v: verbose (default is True)
|
107
|
+
'''
|
108
|
+
count = len(listFiles(path, extension))
|
109
|
+
if v:
|
110
|
+
print(f'> there are {count} {extension if not extension ==".*" else ""} files in {path}')
|
111
|
+
return count
|
112
|
+
|
113
|
+
def watchFileCount(path:str="./", extension:str = ".*", interval:float = 0.2, duration = 3, v:bool = True) -> None:
|
114
|
+
'''
|
115
|
+
Watch the number of files in a directory with a specific extension
|
116
|
+
path: directory
|
117
|
+
extension: extension
|
118
|
+
interval: time interval in seconds
|
119
|
+
duration: duration in minutes
|
120
|
+
v: verbose (default is True)
|
121
|
+
'''
|
122
|
+
|
123
|
+
duration *= 60
|
124
|
+
count = 0
|
125
|
+
end = time.time() + duration
|
126
|
+
while time.time() < end:
|
127
|
+
count = fileCount(path, extension, False)
|
128
|
+
sys.stdout.write(f'\r\t> {count} {extension if not extension ==".*" else ""} files in {path} ')
|
129
|
+
sys.stdout.flush()
|
130
|
+
time.sleep(interval)
|
131
|
+
|
132
|
+
return None
|
133
|
+
|
134
|
+
|
135
|
+
|
136
|
+
|
137
|
+
|
138
|
+
def pythonVariable(filename, option, variable=None):
|
139
|
+
'''
|
140
|
+
option: save, load or open
|
141
|
+
|
142
|
+
'''
|
143
|
+
if ((option == "save") or (option == "dump")) and (variable is None):
|
144
|
+
print("\t! please specify a variable")
|
145
|
+
|
146
|
+
if (option == "save") or (option == "dump"):
|
147
|
+
createPath(os.path.dirname(filename))
|
148
|
+
with open(filename, 'wb') as f:
|
149
|
+
pickle.dump(variable, f)
|
150
|
+
|
151
|
+
if (option == "load") or (option == "open"):
|
152
|
+
with open(filename, "rb") as f:
|
153
|
+
variable = pickle.load(f)
|
154
|
+
|
155
|
+
return variable
|
156
|
+
|
157
|
+
|
158
|
+
def listFolders(path:str) -> list:
|
159
|
+
'''
|
160
|
+
List all folders in a directory
|
161
|
+
path: directory
|
162
|
+
(use './' for current directory and always use forward slashes)
|
163
|
+
'''
|
164
|
+
if not path.endswith('/'):
|
165
|
+
path += '/'
|
166
|
+
|
167
|
+
if os.path.exists(path):
|
168
|
+
return [f for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))]
|
169
|
+
else:
|
170
|
+
return []
|
171
|
+
|
172
|
+
def readFrom(filename, decode_codec = None, v=False):
|
173
|
+
'''
|
174
|
+
a function to read ascii files
|
175
|
+
'''
|
176
|
+
try:
|
177
|
+
if not decode_codec is None: g = open(filename, 'rb')
|
178
|
+
else: g = open(filename, 'r')
|
179
|
+
except:
|
180
|
+
print("\t! error reading {0}, make sure the file exists".format(filename))
|
181
|
+
return
|
182
|
+
|
183
|
+
file_text = g.readlines()
|
184
|
+
if not decode_codec is None: file_text = [line.decode(decode_codec) for line in file_text]
|
185
|
+
if v: print("\t> read {0}".format(getFileBaseName(filename, extension=True)))
|
186
|
+
g.close
|
187
|
+
return file_text
|
188
|
+
|
189
|
+
def readFile(filename, decode_codec = None, v=False):
|
190
|
+
return readFrom(filename, decode_codec, v)
|
191
|
+
|
192
|
+
def writeTo(filename, file_text, encode_codec = None, v=False) -> bool:
|
193
|
+
'''
|
194
|
+
a function to write ascii files
|
195
|
+
'''
|
196
|
+
try:
|
197
|
+
if not encode_codec is None: g = open(filename, 'wb')
|
198
|
+
else: g = open(filename, 'w')
|
199
|
+
except:
|
200
|
+
print("\t! error writing to {0}".format(filename))
|
201
|
+
return False
|
202
|
+
|
203
|
+
createPath(os.path.dirname(filename))
|
204
|
+
|
205
|
+
if not encode_codec is None: file_text = [line.encode(encode_codec) for line in file_text]
|
206
|
+
g.writelines(file_text)
|
207
|
+
g.close
|
208
|
+
if v: print("\t> wrote {0}".format(getFileBaseName(filename, extension=True)))
|
209
|
+
return True
|
210
|
+
|
211
|
+
def writeToFile(filename, file_text, encode_codec = None, v=False) -> bool:
|
212
|
+
return writeTo(filename, file_text, encode_codec, v)
|
213
|
+
|
214
|
+
def writeFile(filename, file_text, encode_codec = None, v=False) -> bool:
|
215
|
+
return writeTo(filename, file_text, encode_codec, v)
|
216
|
+
|
217
|
+
def createPath(pathName, v = False):
|
218
|
+
'''
|
219
|
+
this function creates a directory if it does not exist
|
220
|
+
pathName: the path to create
|
221
|
+
v: verbose (default is False)
|
222
|
+
'''
|
223
|
+
if pathName == '':
|
224
|
+
return './'
|
225
|
+
|
226
|
+
if pathName.endswith('\\'): pathName = pathName[:-1]
|
227
|
+
if not pathName.endswith('/'): pathName += '/'
|
228
|
+
|
229
|
+
if not os.path.isdir(pathName):
|
230
|
+
os.makedirs(pathName)
|
231
|
+
if v: print(f"\t> created path: {pathName}")
|
232
|
+
if pathName.endswith("/"): pathName = pathName[:-1]
|
233
|
+
return pathName
|
234
|
+
|
235
|
+
|
236
|
+
|
237
|
+
def moveDirectory(srcDir:str, destDir:str, v:bool = False) -> bool:
|
238
|
+
'''
|
239
|
+
this function moves all files from srcDir to destDir
|
240
|
+
srcDir: the source directory
|
241
|
+
destDir: the destination directory
|
242
|
+
return: True if the operation is successful, False otherwise
|
243
|
+
'''
|
244
|
+
# Ensure both directories exist
|
245
|
+
if not os.path.isdir(srcDir):
|
246
|
+
print("! source directory does not exist")
|
247
|
+
return False
|
248
|
+
|
249
|
+
if not os.path.isdir(destDir):
|
250
|
+
createPath(f"{destDir}/")
|
251
|
+
|
252
|
+
# Get a list of all files in the source directory
|
253
|
+
files = [f for f in os.listdir(srcDir) if os.path.isfile(os.path.join(srcDir, f))]
|
254
|
+
|
255
|
+
# Move each file to the destination directory
|
256
|
+
for file in files:
|
257
|
+
src_path = os.path.join(srcDir, file)
|
258
|
+
dest_path = os.path.join(destDir, file)
|
259
|
+
if v:
|
260
|
+
print(f"\t> moving {src_path} to {dest_path}")
|
261
|
+
shutil.move(src_path, dest_path)
|
262
|
+
|
263
|
+
return True
|
264
|
+
|
265
|
+
|
266
|
+
def moveDirectoryFiles(srcDir: str, destDir: str, v: bool = False) -> bool:
|
267
|
+
'''
|
268
|
+
This function moves all files from srcDir to destDir one at a time.
|
269
|
+
It also moves empty directories at the end to ensure no empty folders remain in srcDir.
|
270
|
+
srcDir: the source directory
|
271
|
+
destDir: the destination directory
|
272
|
+
v: verbose flag for printing actions
|
273
|
+
return: True if the operation is successful, False otherwise
|
274
|
+
'''
|
275
|
+
# Ensure both directories exist
|
276
|
+
if not os.path.isdir(srcDir):
|
277
|
+
print("! Source directory does not exist")
|
278
|
+
return False
|
279
|
+
|
280
|
+
if not os.path.isdir(destDir):
|
281
|
+
os.makedirs(destDir, exist_ok=True)
|
282
|
+
|
283
|
+
# Walk through the directory tree
|
284
|
+
for root, dirs, files in os.walk(srcDir, topdown=True):
|
285
|
+
# Compute the relative path from the source directory
|
286
|
+
rel_path = os.path.relpath(root, srcDir)
|
287
|
+
# Compute the destination root path
|
288
|
+
dest_root = os.path.join(destDir, rel_path) if rel_path != '.' else destDir
|
289
|
+
|
290
|
+
# Create destination directories if they don't exist
|
291
|
+
if not os.path.exists(dest_root):
|
292
|
+
os.makedirs(dest_root, exist_ok=True)
|
293
|
+
|
294
|
+
# Move files
|
295
|
+
for file in files:
|
296
|
+
src_file = os.path.join(root, file)
|
297
|
+
dest_file = os.path.join(dest_root, file)
|
298
|
+
if v:
|
299
|
+
print(f"\t> Moving file \n\t - {src_file}\n\t - to {dest_file}")
|
300
|
+
shutil.move(src_file, dest_file)
|
301
|
+
|
302
|
+
return True
|
303
|
+
|
304
|
+
|
305
|
+
def clipRasterByExtent(inFile: str, outFile: str, bounds: tuple) -> str:
|
306
|
+
'''
|
307
|
+
Clips a raster using GDAL translate
|
308
|
+
inFile: input raster path
|
309
|
+
outFile: output path
|
310
|
+
bounds: tuple (minx, miny, maxx, maxy)
|
311
|
+
return: output path
|
312
|
+
'''
|
313
|
+
ds = gdal.Open(inFile)
|
314
|
+
gdal.Translate(outFile, ds, projWin=[bounds[0], bounds[3], bounds[2], bounds[1]])
|
315
|
+
ds = None
|
316
|
+
return outFile
|
317
|
+
|
318
|
+
def getVectorBounds(grid_gdf: geopandas.GeoDataFrame) -> tuple:
|
319
|
+
'''
|
320
|
+
This function gets the bounds of a GeoDataFrame
|
321
|
+
grid_gdf: GeoDataFrame
|
322
|
+
|
323
|
+
return: minx, miny, maxx, maxy
|
324
|
+
'''
|
325
|
+
# Initialize min and max values with extreme values
|
326
|
+
minx, miny = numpy.inf, numpy.inf
|
327
|
+
maxx, maxy = -numpy.inf, -numpy.inf
|
328
|
+
|
329
|
+
# Iterate through each geometry in the GeoDataFrame
|
330
|
+
for geom in grid_gdf.geometry:
|
331
|
+
# Get bounds for each geometry (minx, miny, maxx, maxy)
|
332
|
+
geom_minx, geom_miny, geom_maxx, geom_maxy = geom.bounds
|
333
|
+
|
334
|
+
# Update the global min/max for x and y
|
335
|
+
minx = min(minx, geom_minx)
|
336
|
+
miny = min(miny, geom_miny)
|
337
|
+
maxx = max(maxx, geom_maxx)
|
338
|
+
maxy = max(maxy, geom_maxy)
|
339
|
+
|
340
|
+
return minx, miny, maxx, maxy
|
341
|
+
|
342
|
+
|
343
|
+
def ignoreWarnings(ignore:bool = True, v:bool = False) -> None:
|
344
|
+
'''
|
345
|
+
Ignore warnings
|
346
|
+
ignore: True to ignore warnings, False to show warnings
|
347
|
+
v: verbose (default is False)
|
348
|
+
|
349
|
+
returns: None
|
350
|
+
'''
|
351
|
+
if ignore:
|
352
|
+
warnings.filterwarnings("ignore")
|
353
|
+
if v: print("warnings ignored")
|
354
|
+
else:
|
355
|
+
warnings.filterwarnings("default")
|
356
|
+
if v: print("warnings not ignored")
|
357
|
+
return None
|
358
|
+
|
359
|
+
|
360
|
+
def createGrid(shapefile_path: str, resolution: float, useDegree: bool=True) -> tuple:
|
361
|
+
'''
|
362
|
+
This function creates a grid of polygons based on a shapefile
|
363
|
+
shapefile_path: path to the shapefile
|
364
|
+
resolution: resolution of the grid
|
365
|
+
useDegree: use degree (default is True)
|
366
|
+
|
367
|
+
return: xx, yy, polygons, within_mask, gdf.crs, minx, miny
|
368
|
+
'''
|
369
|
+
# Read the shapefile
|
370
|
+
gdf = geopandas.read_file(shapefile_path)
|
371
|
+
|
372
|
+
if useDegree:
|
373
|
+
gdf = gdf.to_crs(epsg=4326)
|
374
|
+
|
375
|
+
# Get the bounds of the shapefile
|
376
|
+
minx, miny, maxx, maxy = gdf.total_bounds
|
377
|
+
|
378
|
+
# Create a grid based on the bounds and resolution
|
379
|
+
x = numpy.arange(minx, maxx, resolution)
|
380
|
+
y = numpy.arange(miny, maxy, resolution)
|
381
|
+
xx, yy = numpy.meshgrid(x, y)
|
382
|
+
|
383
|
+
# Create polygons for each grid cell, arranged in 2D array
|
384
|
+
grid_shape = xx.shape
|
385
|
+
polygons = numpy.empty(grid_shape, dtype=object)
|
386
|
+
for i in range(grid_shape[0]):
|
387
|
+
for j in range(grid_shape[1]):
|
388
|
+
x0, y0 = xx[i, j], yy[i, j]
|
389
|
+
x1, y1 = x0 + resolution, y0 + resolution
|
390
|
+
polygons[i, j] = box(x0, y0, x1, y1)
|
391
|
+
|
392
|
+
# Flatten the polygons for GeoDataFrame creation
|
393
|
+
flat_polygons = polygons.ravel()
|
394
|
+
|
395
|
+
# Create a GeoDataFrame from the grid
|
396
|
+
grid_gdf = geopandas.GeoDataFrame({'geometry': flat_polygons}, crs=gdf.crs)
|
397
|
+
|
398
|
+
minx, miny, maxx, maxy = grid_gdf.total_bounds
|
399
|
+
print(" minx:", minx, "miny:", miny, "maxx:", maxx, "maxy:", maxy)
|
400
|
+
|
401
|
+
minx, miny, maxx, maxy = getVectorBounds(grid_gdf)
|
402
|
+
# Add a column to indicate if the cell intersects with the original shapefile
|
403
|
+
grid_gdf['within'] = grid_gdf.intersects(gdf.unary_union)
|
404
|
+
|
405
|
+
# Reshape the 'within' mask to grid shape
|
406
|
+
within_mask = grid_gdf['within'].values.reshape(grid_shape)
|
407
|
+
|
408
|
+
# Save the grid
|
409
|
+
reprojectedGrid = grid_gdf.to_crs(epsg=4326)
|
410
|
+
|
411
|
+
grid_gdf.to_file("generatedGrid4326.gpkg", driver="GPKG")
|
412
|
+
reprojectedGrid.to_file("generatedGrid.gpkg", driver="GPKG")
|
413
|
+
|
414
|
+
return xx, yy, polygons, within_mask, gdf.crs, minx, miny
|
415
|
+
|
416
|
+
def setHomeDir(path:str) -> str:
|
417
|
+
'''
|
418
|
+
Set the working directory to location of script that imported this function
|
419
|
+
'''
|
420
|
+
homeDir = os.path.dirname(os.path.realpath(path))
|
421
|
+
os.chdir(homeDir)
|
422
|
+
|
423
|
+
return homeDir
|
424
|
+
|
425
|
+
def listDirectories(path:str) -> list:
|
426
|
+
'''
|
427
|
+
List all directories in a directory
|
428
|
+
path : directory
|
429
|
+
'''
|
430
|
+
return listFolders(path)
|
431
|
+
|
432
|
+
|
433
|
+
def netcdfVariablesList(ncFile:str) -> list:
|
434
|
+
'''
|
435
|
+
List all variables in a NetCDF file
|
436
|
+
ncFile: NetCDF file
|
437
|
+
'''
|
438
|
+
nc = Dataset(ncFile)
|
439
|
+
return list(nc.variables.keys())
|
440
|
+
|
441
|
+
def netcdfVariableDimensions(ncFile: str, variable: str) -> dict:
|
442
|
+
'''
|
443
|
+
Get available bands (e.g., time, level, depth) for a given variable in a NetCDF file.
|
444
|
+
|
445
|
+
ncFile: NetCDF file (str)
|
446
|
+
variable: Name of the variable (str)
|
447
|
+
|
448
|
+
Returns:
|
449
|
+
A dictionary with dimension names and their sizes (e.g., time steps or levels).
|
450
|
+
'''
|
451
|
+
# Open the NetCDF file
|
452
|
+
nc = Dataset(ncFile)
|
453
|
+
|
454
|
+
# Check if the variable exists in the file
|
455
|
+
if variable not in nc.variables:
|
456
|
+
raise ValueError(f"Variable '{variable}' not found in {ncFile}")
|
457
|
+
|
458
|
+
# Access the variable
|
459
|
+
var = nc.variables[variable]
|
460
|
+
|
461
|
+
# Get dimensions associated with the variable
|
462
|
+
dimensions = var.dimensions
|
463
|
+
|
464
|
+
# Create a dictionary with dimension names and their sizes
|
465
|
+
bands_info = {}
|
466
|
+
for dim in dimensions:
|
467
|
+
bands_info[dim] = len(nc.dimensions[dim])
|
468
|
+
|
469
|
+
return bands_info
|
470
|
+
|
471
|
+
def netcdfExportTif(ncFile: str, variable: str, outputFile: str = None, band: int = None, v:bool = True) -> gdal.Dataset:
|
472
|
+
'''
|
473
|
+
Export a variable from a NetCDF file to a GeoTiff file
|
474
|
+
ncFile: NetCDF file
|
475
|
+
variable: variable to export
|
476
|
+
outputFile: GeoTiff file (optional)
|
477
|
+
band: Band number to export (optional, return all bands if not specified)
|
478
|
+
'''
|
479
|
+
input_string = f'NETCDF:"{ncFile}":{variable}"'
|
480
|
+
|
481
|
+
if v: print(f'> Exporting {variable} from {ncFile} to {outputFile}')
|
482
|
+
if outputFile:
|
483
|
+
if not os.path.exists(outputFile):
|
484
|
+
dirName = os.path.dirname(outputFile)
|
485
|
+
if not os.path.exists(dirName):
|
486
|
+
os.makedirs(dirName)
|
487
|
+
if band:
|
488
|
+
dataset = gdal.Translate(outputFile, input_string, bandList=[band])
|
489
|
+
else:
|
490
|
+
dataset = gdal.Translate(outputFile, input_string)
|
491
|
+
else:
|
492
|
+
if band:
|
493
|
+
dataset = gdal.Translate('', input_string, format='MEM', bandList=[band])
|
494
|
+
else:
|
495
|
+
dataset = gdal.Translate('', input_string, format='MEM')
|
496
|
+
|
497
|
+
return dataset
|
498
|
+
|
499
|
+
|
500
|
+
def getFileBaseName(filePath:str, extension:bool = False) -> str:
|
501
|
+
'''
|
502
|
+
Get the base name of a file
|
503
|
+
filePath: file path
|
504
|
+
extension: include extension
|
505
|
+
'''
|
506
|
+
baseName = os.path.basename(filePath)
|
507
|
+
if extension:
|
508
|
+
return baseName
|
509
|
+
else:
|
510
|
+
return os.path.splitext(baseName)[0]
|
511
|
+
|
512
|
+
def netcdfAverageMap(ncFiles:list, variable:str, band:int = 1) -> numpy.ndarray:
|
513
|
+
sum_array = netcdfSumMaps(ncFiles, variable, band=band)
|
514
|
+
return sum_array / len(ncFiles)
|
515
|
+
|
516
|
+
def netcdfSumMaps(ncFiles:list, variable:str, band:int = 1) -> numpy.ndarray:
|
517
|
+
sum_array = None
|
518
|
+
for ncFile in ncFiles:
|
519
|
+
dataset = netcdfExportTif(ncFile, variable, band=band, v=False)
|
520
|
+
data = dataset.GetRasterBand(1)
|
521
|
+
data = data.ReadAsArray()
|
522
|
+
if sum_array is None:
|
523
|
+
sum_array = numpy.zeros_like(data, dtype=numpy.float32)
|
524
|
+
sum_array += data
|
525
|
+
return sum_array
|
526
|
+
|
527
|
+
|
528
|
+
def tiffWriteArray(array: numpy.ndarray, outputFile: str,
|
529
|
+
geoTransform: tuple = (0, 1, 0, 0, 0, -1),
|
530
|
+
projection: str = 'EPSG:4326',
|
531
|
+
noData:float = None,
|
532
|
+
v:bool = False) -> gdal.Dataset:
|
533
|
+
'''
|
534
|
+
Write a numpy array to a GeoTIFF file
|
535
|
+
array : numpy array to write
|
536
|
+
outputFile : output GeoTIFF file
|
537
|
+
geoTransform : GeoTransform tuple (default is (0, 1, 0, 0, 0, -1))
|
538
|
+
example: (originX, pixelWidth, 0, originY, 0, -pixelHeight)
|
539
|
+
projection : Projection string (default is 'EPSG:4326')
|
540
|
+
'''
|
541
|
+
driver = gdal.GetDriverByName('GTiff')
|
542
|
+
out_ds = driver.Create(outputFile, array.shape[1], array.shape[0], 1, gdal.GDT_Float32)
|
543
|
+
|
544
|
+
# Set GeoTransform
|
545
|
+
out_ds.SetGeoTransform(geoTransform)
|
546
|
+
|
547
|
+
# Set Projection
|
548
|
+
srs = osr.SpatialReference()
|
549
|
+
srs.SetFromUserInput(projection)
|
550
|
+
out_ds.SetProjection(srs.ExportToWkt())
|
551
|
+
|
552
|
+
# Write array to band
|
553
|
+
out_band = out_ds.GetRasterBand(1)
|
554
|
+
# Set NoData
|
555
|
+
if noData:
|
556
|
+
out_band.SetNoDataValue(noData)
|
557
|
+
|
558
|
+
out_band.WriteArray(array)
|
559
|
+
out_band.FlushCache()
|
560
|
+
|
561
|
+
if v:
|
562
|
+
print(f'> Array written to {outputFile}')
|
563
|
+
return out_ds
|
564
|
+
|
565
|
+
def copyFile(source:str, destination:str, v:bool = True) -> None:
|
566
|
+
'''
|
567
|
+
Copy a file from source to destination
|
568
|
+
source: source file
|
569
|
+
destination: destination file
|
570
|
+
'''
|
571
|
+
with open(source, 'rb') as src:
|
572
|
+
with open(destination, 'wb') as dest: dest.write(src.read())
|
573
|
+
|
574
|
+
if v: print(f'> {source} copied to \t - {destination}')
|
575
|
+
|
576
|
+
|
577
|
+
def convertCoordinates(lon, lat, srcEPSG, dstCRS) -> tuple:
|
578
|
+
"""
|
579
|
+
this function converts coordinates from one CRS to another
|
580
|
+
|
581
|
+
lon: longitude
|
582
|
+
lat: latitude
|
583
|
+
srcEPSG: source CRS
|
584
|
+
dstCRS: destination CRS
|
585
|
+
|
586
|
+
return: tuple (new_lon, new_lat)
|
587
|
+
"""
|
588
|
+
gdf = geopandas.GeoDataFrame(geometry=[Point(lon, lat)], crs=f"{srcEPSG.upper()}")
|
589
|
+
gdf_converted = gdf.to_crs(dstCRS.upper())
|
590
|
+
new_lon, new_lat = gdf_converted.geometry.x[0], gdf_converted.geometry.y[0]
|
591
|
+
return (new_lon, new_lat)
|
592
|
+
|
593
|
+
|
594
|
+
def extractRasterValue(rasterPath: str, lat: float, lon: float, coordProj: str = 'EPSG:4326') -> float:
|
595
|
+
"""
|
596
|
+
Extract raster value at given coordinates.
|
597
|
+
|
598
|
+
Args:
|
599
|
+
rasterPath (str): Path to the raster file
|
600
|
+
lat (float): Latitude of the point
|
601
|
+
lon (float): Longitude of the point
|
602
|
+
coordProj (str): Projection of input coordinates (default: 'EPSG:4326')
|
603
|
+
|
604
|
+
Returns:
|
605
|
+
float: Raster value at the specified coordinates
|
606
|
+
"""
|
607
|
+
# Open raster dataset
|
608
|
+
if not exists(rasterPath): raise ValueError(f"Raster file not found: {rasterPath}")
|
609
|
+
|
610
|
+
ds = gdal.Open(rasterPath)
|
611
|
+
if ds is None: raise ValueError(f"Could not open raster file: {rasterPath}")
|
612
|
+
|
613
|
+
# Check if raster has projection
|
614
|
+
raster_proj = ds.GetProjection()
|
615
|
+
if not raster_proj:
|
616
|
+
raise ValueError("Raster has no projection information")
|
617
|
+
|
618
|
+
# Convert coordinates to raster projection
|
619
|
+
x, y = convertCoordinates(lon, lat, coordProj, raster_proj)
|
620
|
+
|
621
|
+
# Get geotransform parameters and calculate pixel coordinates
|
622
|
+
geotransform = ds.GetGeoTransform()
|
623
|
+
px = int((x - geotransform[0]) / geotransform[1])
|
624
|
+
py = int((y - geotransform[3]) / geotransform[5])
|
625
|
+
|
626
|
+
# Check if within bounds
|
627
|
+
if px < 0 or px >= ds.RasterXSize or py < 0 or py >= ds.RasterYSize:
|
628
|
+
print(f"! point ({lat}, {lon}) is outside raster bounds")
|
629
|
+
ds = None
|
630
|
+
return None
|
631
|
+
|
632
|
+
# Get value at pixel
|
633
|
+
value = ds.GetRasterBand(1).ReadAsArray(px, py, 1, 1)[0][0]
|
634
|
+
ds = None
|
635
|
+
|
636
|
+
return float(value)
|
637
|
+
|
638
|
+
|
639
|
+
def getRasterValue(rasterPath: str, lat: float, lon: float, coordProj: str = 'EPSG:4326') -> float:
|
640
|
+
'''
|
641
|
+
this function is a wrapper for extractRasterValue
|
642
|
+
'''
|
643
|
+
return extractRasterValue(rasterPath, lat, lon, coordProj)
|
644
|
+
|
645
|
+
|
646
|
+
def isBetween(number:float, a:float, b:float) -> bool:
|
647
|
+
'''
|
648
|
+
this function returns True if number is between a and b
|
649
|
+
it also takes care if the user swaps a and b
|
650
|
+
'''
|
651
|
+
if a > b: a, b = b, a
|
652
|
+
return a <= number <= b
|
653
|
+
|
654
|
+
def showProgress(count: int, end: int, message: str, barLength: int = 100) -> None:
|
655
|
+
'''
|
656
|
+
Display a progress bar
|
657
|
+
count: current count
|
658
|
+
end: end count
|
659
|
+
message: message to display
|
660
|
+
barLength: length of the progress bar
|
661
|
+
'''
|
662
|
+
percent = int(count / end * 100)
|
663
|
+
percentStr = f'{percent:03.1f}'
|
664
|
+
filled = int(barLength * count / end)
|
665
|
+
bar = '█' * filled + '░' * (barLength - filled)
|
666
|
+
print(f'\r{bar}| {percentStr}% [{count}/{end}] | {message} ', end='', flush=True)
|
667
|
+
if count == end: print()
|
668
|
+
|
669
|
+
|
670
|
+
|
671
|
+
def createPointGeometry(coords: list, proj: str = "EPSG:4326") -> geopandas.GeoDataFrame:
|
672
|
+
'''
|
673
|
+
Convert list of coordinate tuples to GeoDataFrame
|
674
|
+
coords: list of tuples (lat, lon, *labels)
|
675
|
+
proj: projection string e.g. "EPSG:4326"
|
676
|
+
return: GeoDataFrame
|
677
|
+
'''
|
678
|
+
data = []
|
679
|
+
geoms = []
|
680
|
+
max_labels = max(len(coord) - 2 for coord in coords)
|
681
|
+
|
682
|
+
for coord in coords:
|
683
|
+
lat, lon = coord[0], coord[1]
|
684
|
+
labels = coord[2:] if len(coord) > 2 else []
|
685
|
+
geoms.append(Point(lon, lat)) # Note: Point takes (x,y) = (lon,lat)
|
686
|
+
data.append(labels)
|
687
|
+
|
688
|
+
df = pandas.DataFrame(data)
|
689
|
+
df.columns = [f'label{i+1}' for i in range(len(df.columns))]
|
690
|
+
gdf = geopandas.GeoDataFrame(df, geometry=geoms, crs=proj)
|
691
|
+
gdf.reset_index(inplace=True)
|
692
|
+
return gdf
|
693
|
+
|
694
|
+
ignoreWarnings()
|