gtrack 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gtrack/__init__.py +137 -0
- gtrack/boundaries.py +396 -0
- gtrack/config.py +202 -0
- gtrack/geometry.py +348 -0
- gtrack/hpc_integration.py +851 -0
- gtrack/initial_conditions.py +255 -0
- gtrack/io_formats.py +477 -0
- gtrack/logging.py +193 -0
- gtrack/mesh.py +101 -0
- gtrack/mor_seeds.py +390 -0
- gtrack/point_rotation.py +836 -0
- gtrack/polygon_filter.py +223 -0
- gtrack/spatial.py +397 -0
- gtrack-0.3.0.dist-info/METADATA +66 -0
- gtrack-0.3.0.dist-info/RECORD +17 -0
- gtrack-0.3.0.dist-info/WHEEL +5 -0
- gtrack-0.3.0.dist-info/top_level.txt +1 -0
gtrack/io_formats.py
ADDED
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Input/output utilities for PointCloud data.
|
|
3
|
+
|
|
4
|
+
Supports multiple formats:
|
|
5
|
+
- NumPy (.npy, .npz): Fast binary format for Python workflows
|
|
6
|
+
- GPML (.gpml, .gpmlz): Native pygplates/GPlates format
|
|
7
|
+
- CSV/text: Human-readable format
|
|
8
|
+
|
|
9
|
+
Also provides checkpointing functionality for saving/restoring state.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Dict, Optional, Tuple, Union
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def load_points_numpy(
|
|
19
|
+
filepath: Union[str, Path],
|
|
20
|
+
xyz_columns: Tuple[int, int, int] = (0, 1, 2),
|
|
21
|
+
property_columns: Optional[Dict[str, int]] = None
|
|
22
|
+
) -> "PointCloud":
|
|
23
|
+
"""
|
|
24
|
+
Load points from numpy file.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
filepath : str or Path
|
|
29
|
+
Path to .npy or .npz file.
|
|
30
|
+
xyz_columns : tuple, default=(0, 1, 2)
|
|
31
|
+
Column indices for x, y, z coordinates (for .npy files).
|
|
32
|
+
property_columns : dict, optional
|
|
33
|
+
Mapping from property name to column index (for .npy files).
|
|
34
|
+
|
|
35
|
+
Returns
|
|
36
|
+
-------
|
|
37
|
+
PointCloud
|
|
38
|
+
Loaded points.
|
|
39
|
+
|
|
40
|
+
Examples
|
|
41
|
+
--------
|
|
42
|
+
>>> # Load from .npz with xyz and properties
|
|
43
|
+
>>> cloud = load_points_numpy('points.npz')
|
|
44
|
+
>>>
|
|
45
|
+
>>> # Load from .npy with specific columns
|
|
46
|
+
>>> cloud = load_points_numpy(
|
|
47
|
+
... 'data.npy',
|
|
48
|
+
... xyz_columns=(0, 1, 2),
|
|
49
|
+
... property_columns={'depth': 3, 'temperature': 4}
|
|
50
|
+
... )
|
|
51
|
+
"""
|
|
52
|
+
from .point_rotation import PointCloud
|
|
53
|
+
|
|
54
|
+
filepath = Path(filepath)
|
|
55
|
+
|
|
56
|
+
if filepath.suffix == '.npz':
|
|
57
|
+
data = np.load(filepath, allow_pickle=True)
|
|
58
|
+
if 'xyz' in data:
|
|
59
|
+
xyz = data['xyz']
|
|
60
|
+
properties = {}
|
|
61
|
+
plate_ids = None
|
|
62
|
+
|
|
63
|
+
for key in data.files:
|
|
64
|
+
if key == 'xyz':
|
|
65
|
+
continue
|
|
66
|
+
elif key == 'plate_ids':
|
|
67
|
+
plate_ids = data[key]
|
|
68
|
+
elif key.startswith('prop_'):
|
|
69
|
+
# Properties saved with prefix
|
|
70
|
+
prop_name = key[5:]
|
|
71
|
+
properties[prop_name] = data[key]
|
|
72
|
+
elif key not in ('metadata',):
|
|
73
|
+
# Assume it's a property
|
|
74
|
+
properties[key] = data[key]
|
|
75
|
+
|
|
76
|
+
cloud = PointCloud(xyz=xyz, properties=properties)
|
|
77
|
+
cloud.plate_ids = plate_ids
|
|
78
|
+
return cloud
|
|
79
|
+
else:
|
|
80
|
+
raise ValueError("NPZ file must contain 'xyz' array")
|
|
81
|
+
else:
|
|
82
|
+
# .npy file - raw array
|
|
83
|
+
data = np.load(filepath)
|
|
84
|
+
xyz = data[:, list(xyz_columns)]
|
|
85
|
+
|
|
86
|
+
properties = {}
|
|
87
|
+
if property_columns:
|
|
88
|
+
for name, col in property_columns.items():
|
|
89
|
+
properties[name] = data[:, col]
|
|
90
|
+
|
|
91
|
+
return PointCloud(xyz=xyz, properties=properties)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def load_points_latlon(
|
|
95
|
+
filepath: Union[str, Path],
|
|
96
|
+
latlon_columns: Tuple[int, int] = (0, 1),
|
|
97
|
+
property_columns: Optional[Dict[str, int]] = None,
|
|
98
|
+
delimiter: str = ',',
|
|
99
|
+
skip_header: int = 0
|
|
100
|
+
) -> "PointCloud":
|
|
101
|
+
"""
|
|
102
|
+
Load points from lat/lon text file (CSV, etc.).
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
filepath : str or Path
|
|
107
|
+
Path to text file.
|
|
108
|
+
latlon_columns : tuple, default=(0, 1)
|
|
109
|
+
Column indices for lat, lon (in degrees).
|
|
110
|
+
property_columns : dict, optional
|
|
111
|
+
Mapping from property name to column index.
|
|
112
|
+
delimiter : str, default=','
|
|
113
|
+
Column delimiter.
|
|
114
|
+
skip_header : int, default=0
|
|
115
|
+
Number of header lines to skip.
|
|
116
|
+
|
|
117
|
+
Returns
|
|
118
|
+
-------
|
|
119
|
+
PointCloud
|
|
120
|
+
Loaded points.
|
|
121
|
+
|
|
122
|
+
Examples
|
|
123
|
+
--------
|
|
124
|
+
>>> cloud = load_points_latlon(
|
|
125
|
+
... 'points.csv',
|
|
126
|
+
... latlon_columns=(0, 1),
|
|
127
|
+
... property_columns={'depth': 2}
|
|
128
|
+
... )
|
|
129
|
+
"""
|
|
130
|
+
from .point_rotation import PointCloud
|
|
131
|
+
|
|
132
|
+
data = np.loadtxt(
|
|
133
|
+
filepath,
|
|
134
|
+
delimiter=delimiter,
|
|
135
|
+
skiprows=skip_header
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
latlon = data[:, list(latlon_columns)]
|
|
139
|
+
|
|
140
|
+
properties = {}
|
|
141
|
+
if property_columns:
|
|
142
|
+
for name, col in property_columns.items():
|
|
143
|
+
properties[name] = data[:, col]
|
|
144
|
+
|
|
145
|
+
return PointCloud.from_latlon(latlon, properties)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def save_points_numpy(
|
|
149
|
+
cloud: "PointCloud",
|
|
150
|
+
filepath: Union[str, Path],
|
|
151
|
+
include_properties: bool = True
|
|
152
|
+
) -> None:
|
|
153
|
+
"""
|
|
154
|
+
Save points to numpy format.
|
|
155
|
+
|
|
156
|
+
Parameters
|
|
157
|
+
----------
|
|
158
|
+
cloud : PointCloud
|
|
159
|
+
Points to save.
|
|
160
|
+
filepath : str or Path
|
|
161
|
+
Output path (.npy or .npz).
|
|
162
|
+
include_properties : bool, default=True
|
|
163
|
+
If True, save properties (requires .npz format).
|
|
164
|
+
|
|
165
|
+
Examples
|
|
166
|
+
--------
|
|
167
|
+
>>> save_points_numpy(cloud, 'output.npz')
|
|
168
|
+
"""
|
|
169
|
+
filepath = Path(filepath)
|
|
170
|
+
|
|
171
|
+
if include_properties and (cloud.properties or cloud.plate_ids is not None):
|
|
172
|
+
# Use .npz for properties
|
|
173
|
+
if filepath.suffix != '.npz':
|
|
174
|
+
filepath = filepath.with_suffix('.npz')
|
|
175
|
+
|
|
176
|
+
save_dict = {'xyz': cloud.xyz}
|
|
177
|
+
|
|
178
|
+
if cloud.plate_ids is not None:
|
|
179
|
+
save_dict['plate_ids'] = cloud.plate_ids
|
|
180
|
+
|
|
181
|
+
for name, prop in cloud.properties.items():
|
|
182
|
+
save_dict[f'prop_{name}'] = prop
|
|
183
|
+
|
|
184
|
+
np.savez(filepath, **save_dict)
|
|
185
|
+
else:
|
|
186
|
+
# Simple .npy for xyz only
|
|
187
|
+
np.save(filepath, cloud.xyz)
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def save_points_latlon(
|
|
191
|
+
cloud: "PointCloud",
|
|
192
|
+
filepath: Union[str, Path],
|
|
193
|
+
delimiter: str = ',',
|
|
194
|
+
header: Optional[str] = None,
|
|
195
|
+
include_properties: bool = True
|
|
196
|
+
) -> None:
|
|
197
|
+
"""
|
|
198
|
+
Save points to lat/lon text file.
|
|
199
|
+
|
|
200
|
+
Parameters
|
|
201
|
+
----------
|
|
202
|
+
cloud : PointCloud
|
|
203
|
+
Points to save.
|
|
204
|
+
filepath : str or Path
|
|
205
|
+
Output path.
|
|
206
|
+
delimiter : str, default=','
|
|
207
|
+
Column delimiter.
|
|
208
|
+
header : str, optional
|
|
209
|
+
Header line to write.
|
|
210
|
+
include_properties : bool, default=True
|
|
211
|
+
If True, include properties as additional columns.
|
|
212
|
+
|
|
213
|
+
Examples
|
|
214
|
+
--------
|
|
215
|
+
>>> save_points_latlon(cloud, 'output.csv', header='lat,lon,depth')
|
|
216
|
+
"""
|
|
217
|
+
latlon = cloud.latlon
|
|
218
|
+
|
|
219
|
+
if include_properties and cloud.properties:
|
|
220
|
+
# Stack lat, lon, and properties
|
|
221
|
+
columns = [latlon]
|
|
222
|
+
for name in sorted(cloud.properties.keys()):
|
|
223
|
+
columns.append(cloud.properties[name].reshape(-1, 1))
|
|
224
|
+
data = np.hstack(columns)
|
|
225
|
+
else:
|
|
226
|
+
data = latlon
|
|
227
|
+
|
|
228
|
+
np.savetxt(
|
|
229
|
+
filepath,
|
|
230
|
+
data,
|
|
231
|
+
delimiter=delimiter,
|
|
232
|
+
header=header if header else '',
|
|
233
|
+
comments=''
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def save_points_gpml(
|
|
238
|
+
cloud: "PointCloud",
|
|
239
|
+
filepath: Union[str, Path],
|
|
240
|
+
feature_type: str = "UnclassifiedFeature",
|
|
241
|
+
include_plate_ids: bool = True
|
|
242
|
+
) -> None:
|
|
243
|
+
"""
|
|
244
|
+
Save points to GPML format (pygplates-compatible).
|
|
245
|
+
|
|
246
|
+
Parameters
|
|
247
|
+
----------
|
|
248
|
+
cloud : PointCloud
|
|
249
|
+
Points to save.
|
|
250
|
+
filepath : str or Path
|
|
251
|
+
Output path (.gpml or .gpmlz for compressed).
|
|
252
|
+
feature_type : str, default="UnclassifiedFeature"
|
|
253
|
+
GPlates feature type for points.
|
|
254
|
+
include_plate_ids : bool, default=True
|
|
255
|
+
If True, include reconstruction plate IDs in features.
|
|
256
|
+
|
|
257
|
+
Examples
|
|
258
|
+
--------
|
|
259
|
+
>>> save_points_gpml(cloud, 'continental_points.gpml')
|
|
260
|
+
>>> save_points_gpml(cloud, 'compressed.gpmlz') # Compressed
|
|
261
|
+
"""
|
|
262
|
+
import pygplates
|
|
263
|
+
from .geometry import XYZ2LatLon
|
|
264
|
+
|
|
265
|
+
# Convert to lat/lon
|
|
266
|
+
lats, lons = XYZ2LatLon(cloud.xyz)
|
|
267
|
+
|
|
268
|
+
# Create features
|
|
269
|
+
features = []
|
|
270
|
+
for i in range(cloud.n_points):
|
|
271
|
+
feature = pygplates.Feature()
|
|
272
|
+
feature.set_geometry(pygplates.PointOnSphere(lats[i], lons[i]))
|
|
273
|
+
|
|
274
|
+
if include_plate_ids and cloud.plate_ids is not None:
|
|
275
|
+
feature.set_reconstruction_plate_id(int(cloud.plate_ids[i]))
|
|
276
|
+
|
|
277
|
+
features.append(feature)
|
|
278
|
+
|
|
279
|
+
# Save to file
|
|
280
|
+
feature_collection = pygplates.FeatureCollection(features)
|
|
281
|
+
feature_collection.write(str(filepath))
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def load_points_gpml(
|
|
285
|
+
filepath: Union[str, Path]
|
|
286
|
+
) -> "PointCloud":
|
|
287
|
+
"""
|
|
288
|
+
Load points from GPML file.
|
|
289
|
+
|
|
290
|
+
Parameters
|
|
291
|
+
----------
|
|
292
|
+
filepath : str or Path
|
|
293
|
+
Path to .gpml or .gpmlz file.
|
|
294
|
+
|
|
295
|
+
Returns
|
|
296
|
+
-------
|
|
297
|
+
PointCloud
|
|
298
|
+
Loaded points with plate_ids if available.
|
|
299
|
+
|
|
300
|
+
Examples
|
|
301
|
+
--------
|
|
302
|
+
>>> cloud = load_points_gpml('points.gpml')
|
|
303
|
+
"""
|
|
304
|
+
import pygplates
|
|
305
|
+
from .point_rotation import PointCloud
|
|
306
|
+
from .geometry import LatLon2XYZ
|
|
307
|
+
|
|
308
|
+
feature_collection = pygplates.FeatureCollection(str(filepath))
|
|
309
|
+
|
|
310
|
+
latlons = []
|
|
311
|
+
plate_ids = []
|
|
312
|
+
|
|
313
|
+
for feature in feature_collection:
|
|
314
|
+
geometry = feature.get_geometry()
|
|
315
|
+
if geometry is not None:
|
|
316
|
+
if hasattr(geometry, 'to_lat_lon'):
|
|
317
|
+
lat, lon = geometry.to_lat_lon()
|
|
318
|
+
latlons.append([lat, lon])
|
|
319
|
+
|
|
320
|
+
plate_id = feature.get_reconstruction_plate_id()
|
|
321
|
+
plate_ids.append(plate_id if plate_id is not None else 0)
|
|
322
|
+
|
|
323
|
+
if len(latlons) == 0:
|
|
324
|
+
raise ValueError(f"No point features found in {filepath}")
|
|
325
|
+
|
|
326
|
+
latlon = np.array(latlons)
|
|
327
|
+
xyz = LatLon2XYZ(latlon)
|
|
328
|
+
|
|
329
|
+
cloud = PointCloud(xyz=xyz)
|
|
330
|
+
cloud.plate_ids = np.array(plate_ids, dtype=int)
|
|
331
|
+
|
|
332
|
+
return cloud
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
class PointCloudCheckpoint:
|
|
336
|
+
"""
|
|
337
|
+
Checkpoint manager for PointCloud state.
|
|
338
|
+
|
|
339
|
+
Provides save/load functionality with metadata for checkpointing
|
|
340
|
+
during long-running simulations.
|
|
341
|
+
|
|
342
|
+
Examples
|
|
343
|
+
--------
|
|
344
|
+
>>> checkpoint = PointCloudCheckpoint()
|
|
345
|
+
>>>
|
|
346
|
+
>>> # Save with metadata
|
|
347
|
+
>>> checkpoint.save(cloud, 'checkpoint_50Ma.npz', geological_age=50.0)
|
|
348
|
+
>>>
|
|
349
|
+
>>> # Load and get metadata
|
|
350
|
+
>>> cloud, metadata = checkpoint.load('checkpoint_50Ma.npz')
|
|
351
|
+
>>> print(metadata['geological_age']) # 50.0
|
|
352
|
+
"""
|
|
353
|
+
|
|
354
|
+
def save(
|
|
355
|
+
self,
|
|
356
|
+
cloud: "PointCloud",
|
|
357
|
+
filepath: Union[str, Path],
|
|
358
|
+
geological_age: Optional[float] = None,
|
|
359
|
+
metadata: Optional[Dict] = None
|
|
360
|
+
) -> None:
|
|
361
|
+
"""
|
|
362
|
+
Save checkpoint with metadata.
|
|
363
|
+
|
|
364
|
+
Parameters
|
|
365
|
+
----------
|
|
366
|
+
cloud : PointCloud
|
|
367
|
+
Points to save.
|
|
368
|
+
filepath : str or Path
|
|
369
|
+
Output path (.npz format).
|
|
370
|
+
geological_age : float, optional
|
|
371
|
+
Current geological age for reference.
|
|
372
|
+
metadata : dict, optional
|
|
373
|
+
Additional metadata to save.
|
|
374
|
+
|
|
375
|
+
Examples
|
|
376
|
+
--------
|
|
377
|
+
>>> checkpoint.save(
|
|
378
|
+
... cloud, 'state.npz',
|
|
379
|
+
... geological_age=50.0,
|
|
380
|
+
... metadata={'simulation_step': 100}
|
|
381
|
+
... )
|
|
382
|
+
"""
|
|
383
|
+
filepath = Path(filepath)
|
|
384
|
+
if filepath.suffix != '.npz':
|
|
385
|
+
filepath = filepath.with_suffix('.npz')
|
|
386
|
+
|
|
387
|
+
meta = metadata.copy() if metadata else {}
|
|
388
|
+
if geological_age is not None:
|
|
389
|
+
meta['geological_age'] = geological_age
|
|
390
|
+
|
|
391
|
+
save_dict = {
|
|
392
|
+
'xyz': cloud.xyz,
|
|
393
|
+
'metadata': np.array([meta], dtype=object),
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
if cloud.plate_ids is not None:
|
|
397
|
+
save_dict['plate_ids'] = cloud.plate_ids
|
|
398
|
+
|
|
399
|
+
for name, prop in cloud.properties.items():
|
|
400
|
+
save_dict[f'prop_{name}'] = prop
|
|
401
|
+
|
|
402
|
+
np.savez(filepath, **save_dict)
|
|
403
|
+
|
|
404
|
+
def load(
|
|
405
|
+
self,
|
|
406
|
+
filepath: Union[str, Path]
|
|
407
|
+
) -> Tuple["PointCloud", Dict]:
|
|
408
|
+
"""
|
|
409
|
+
Load checkpoint.
|
|
410
|
+
|
|
411
|
+
Parameters
|
|
412
|
+
----------
|
|
413
|
+
filepath : str or Path
|
|
414
|
+
Path to checkpoint file.
|
|
415
|
+
|
|
416
|
+
Returns
|
|
417
|
+
-------
|
|
418
|
+
cloud : PointCloud
|
|
419
|
+
Loaded point cloud.
|
|
420
|
+
metadata : dict
|
|
421
|
+
Associated metadata.
|
|
422
|
+
|
|
423
|
+
Examples
|
|
424
|
+
--------
|
|
425
|
+
>>> cloud, metadata = checkpoint.load('state.npz')
|
|
426
|
+
>>> print(f"Loaded cloud at {metadata['geological_age']} Ma")
|
|
427
|
+
"""
|
|
428
|
+
from .point_rotation import PointCloud
|
|
429
|
+
|
|
430
|
+
filepath = Path(filepath)
|
|
431
|
+
data = np.load(filepath, allow_pickle=True)
|
|
432
|
+
|
|
433
|
+
xyz = data['xyz']
|
|
434
|
+
|
|
435
|
+
# Extract metadata
|
|
436
|
+
if 'metadata' in data:
|
|
437
|
+
metadata = data['metadata'][0] if len(data['metadata']) > 0 else {}
|
|
438
|
+
else:
|
|
439
|
+
metadata = {}
|
|
440
|
+
|
|
441
|
+
# Extract plate_ids
|
|
442
|
+
plate_ids = data['plate_ids'] if 'plate_ids' in data else None
|
|
443
|
+
|
|
444
|
+
# Extract properties
|
|
445
|
+
properties = {}
|
|
446
|
+
for key in data.files:
|
|
447
|
+
if key.startswith('prop_'):
|
|
448
|
+
prop_name = key[5:] # Remove 'prop_' prefix
|
|
449
|
+
properties[prop_name] = data[key]
|
|
450
|
+
|
|
451
|
+
cloud = PointCloud(xyz=xyz, properties=properties, plate_ids=plate_ids)
|
|
452
|
+
|
|
453
|
+
return cloud, metadata
|
|
454
|
+
|
|
455
|
+
def list_checkpoints(
|
|
456
|
+
self,
|
|
457
|
+
directory: Union[str, Path],
|
|
458
|
+
pattern: str = "*.npz"
|
|
459
|
+
) -> list:
|
|
460
|
+
"""
|
|
461
|
+
List checkpoint files in a directory.
|
|
462
|
+
|
|
463
|
+
Parameters
|
|
464
|
+
----------
|
|
465
|
+
directory : str or Path
|
|
466
|
+
Directory to search.
|
|
467
|
+
pattern : str, default="*.npz"
|
|
468
|
+
Glob pattern for checkpoint files.
|
|
469
|
+
|
|
470
|
+
Returns
|
|
471
|
+
-------
|
|
472
|
+
list
|
|
473
|
+
Sorted list of checkpoint file paths.
|
|
474
|
+
"""
|
|
475
|
+
directory = Path(directory)
|
|
476
|
+
checkpoints = sorted(directory.glob(pattern))
|
|
477
|
+
return [str(cp) for cp in checkpoints]
|
gtrack/logging.py
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logging configuration for gtrack.
|
|
3
|
+
|
|
4
|
+
This module provides centralized logging for the gtrack package.
|
|
5
|
+
Log level can be controlled via the GTRACK_LOGLEVEL environment variable.
|
|
6
|
+
|
|
7
|
+
Environment Variables
|
|
8
|
+
---------------------
|
|
9
|
+
GTRACK_LOGLEVEL : str
|
|
10
|
+
Set the logging level. Options: DEBUG, INFO, WARNING, ERROR, CRITICAL.
|
|
11
|
+
Default is WARNING (minimal output).
|
|
12
|
+
|
|
13
|
+
Usage
|
|
14
|
+
-----
|
|
15
|
+
>>> from gtrack.logging import get_logger
|
|
16
|
+
>>> logger = get_logger(__name__)
|
|
17
|
+
>>> logger.info("Processing started")
|
|
18
|
+
>>> logger.debug("Detailed debug information")
|
|
19
|
+
|
|
20
|
+
To enable verbose output, set the environment variable before importing gtrack:
|
|
21
|
+
|
|
22
|
+
export GTRACK_LOGLEVEL=INFO # Show progress messages
|
|
23
|
+
export GTRACK_LOGLEVEL=DEBUG # Show detailed debug info
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
import logging
|
|
27
|
+
import os
|
|
28
|
+
import sys
|
|
29
|
+
from typing import Optional
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# Package-level logger name
|
|
33
|
+
LOGGER_NAME = "gtrack"
|
|
34
|
+
|
|
35
|
+
# Default log level (quiet by default)
|
|
36
|
+
DEFAULT_LOG_LEVEL = "WARNING"
|
|
37
|
+
|
|
38
|
+
# Format for log messages
|
|
39
|
+
LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
40
|
+
LOG_FORMAT_SIMPLE = "%(levelname)s: %(message)s"
|
|
41
|
+
|
|
42
|
+
# Track if logging has been configured
|
|
43
|
+
_logging_configured = False
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_log_level_from_env() -> int:
|
|
47
|
+
"""Get log level from GTRACK_LOGLEVEL environment variable."""
|
|
48
|
+
level_name = os.environ.get("GTRACK_LOGLEVEL", DEFAULT_LOG_LEVEL).upper()
|
|
49
|
+
|
|
50
|
+
level_map = {
|
|
51
|
+
"DEBUG": logging.DEBUG,
|
|
52
|
+
"INFO": logging.INFO,
|
|
53
|
+
"WARNING": logging.WARNING,
|
|
54
|
+
"WARN": logging.WARNING,
|
|
55
|
+
"ERROR": logging.ERROR,
|
|
56
|
+
"CRITICAL": logging.CRITICAL,
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return level_map.get(level_name, logging.WARNING)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def configure_logging(
|
|
63
|
+
level: Optional[int] = None,
|
|
64
|
+
format_string: Optional[str] = None,
|
|
65
|
+
stream: Optional[object] = None,
|
|
66
|
+
) -> None:
|
|
67
|
+
"""
|
|
68
|
+
Configure gtrack logging.
|
|
69
|
+
|
|
70
|
+
This function sets up the logging configuration for the entire package.
|
|
71
|
+
It is called automatically when the package is imported, but can be
|
|
72
|
+
called again to reconfigure.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
level : int, optional
|
|
77
|
+
Logging level (e.g., logging.DEBUG, logging.INFO).
|
|
78
|
+
If None, reads from GTRACK_LOGLEVEL environment variable.
|
|
79
|
+
format_string : str, optional
|
|
80
|
+
Format string for log messages.
|
|
81
|
+
If None, uses a simple format for INFO and above, detailed for DEBUG.
|
|
82
|
+
stream : file-like, optional
|
|
83
|
+
Stream to write logs to. Default is sys.stderr.
|
|
84
|
+
"""
|
|
85
|
+
global _logging_configured
|
|
86
|
+
|
|
87
|
+
if level is None:
|
|
88
|
+
level = _get_log_level_from_env()
|
|
89
|
+
|
|
90
|
+
if format_string is None:
|
|
91
|
+
format_string = LOG_FORMAT_SIMPLE if level >= logging.INFO else LOG_FORMAT
|
|
92
|
+
|
|
93
|
+
if stream is None:
|
|
94
|
+
stream = sys.stderr
|
|
95
|
+
|
|
96
|
+
# Get or create the root gtrack logger
|
|
97
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
98
|
+
logger.setLevel(level)
|
|
99
|
+
|
|
100
|
+
# Remove existing handlers to avoid duplicates
|
|
101
|
+
logger.handlers.clear()
|
|
102
|
+
|
|
103
|
+
# Create console handler
|
|
104
|
+
handler = logging.StreamHandler(stream)
|
|
105
|
+
handler.setLevel(level)
|
|
106
|
+
handler.setFormatter(logging.Formatter(format_string))
|
|
107
|
+
|
|
108
|
+
logger.addHandler(handler)
|
|
109
|
+
|
|
110
|
+
# Prevent propagation to root logger
|
|
111
|
+
logger.propagate = False
|
|
112
|
+
|
|
113
|
+
_logging_configured = True
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def get_logger(name: str) -> logging.Logger:
|
|
117
|
+
"""
|
|
118
|
+
Get a logger for a gtrack module.
|
|
119
|
+
|
|
120
|
+
Parameters
|
|
121
|
+
----------
|
|
122
|
+
name : str
|
|
123
|
+
Name of the module (typically __name__).
|
|
124
|
+
|
|
125
|
+
Returns
|
|
126
|
+
-------
|
|
127
|
+
logging.Logger
|
|
128
|
+
Logger instance for the module.
|
|
129
|
+
|
|
130
|
+
Examples
|
|
131
|
+
--------
|
|
132
|
+
>>> logger = get_logger(__name__)
|
|
133
|
+
>>> logger.info("Starting computation")
|
|
134
|
+
"""
|
|
135
|
+
global _logging_configured
|
|
136
|
+
|
|
137
|
+
# Auto-configure if not already done
|
|
138
|
+
if not _logging_configured:
|
|
139
|
+
configure_logging()
|
|
140
|
+
|
|
141
|
+
# Ensure name is under gtrack namespace
|
|
142
|
+
if not name.startswith(LOGGER_NAME):
|
|
143
|
+
name = f"{LOGGER_NAME}.{name}"
|
|
144
|
+
|
|
145
|
+
return logging.getLogger(name)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def set_log_level(level: int) -> None:
|
|
149
|
+
"""
|
|
150
|
+
Set the log level for all gtrack loggers.
|
|
151
|
+
|
|
152
|
+
Parameters
|
|
153
|
+
----------
|
|
154
|
+
level : int
|
|
155
|
+
Logging level (e.g., logging.DEBUG, logging.INFO).
|
|
156
|
+
|
|
157
|
+
Examples
|
|
158
|
+
--------
|
|
159
|
+
>>> import logging
|
|
160
|
+
>>> from gtrack.logging import set_log_level
|
|
161
|
+
>>> set_log_level(logging.DEBUG) # Enable debug output
|
|
162
|
+
"""
|
|
163
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
164
|
+
logger.setLevel(level)
|
|
165
|
+
for handler in logger.handlers:
|
|
166
|
+
handler.setLevel(level)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def enable_verbose() -> None:
|
|
170
|
+
"""
|
|
171
|
+
Enable verbose output (INFO level).
|
|
172
|
+
|
|
173
|
+
Convenience function equivalent to set_log_level(logging.INFO).
|
|
174
|
+
"""
|
|
175
|
+
set_log_level(logging.INFO)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def enable_debug() -> None:
|
|
179
|
+
"""
|
|
180
|
+
Enable debug output (DEBUG level).
|
|
181
|
+
|
|
182
|
+
Convenience function equivalent to set_log_level(logging.DEBUG).
|
|
183
|
+
"""
|
|
184
|
+
set_log_level(logging.DEBUG)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def disable_logging() -> None:
|
|
188
|
+
"""
|
|
189
|
+
Disable all gtrack logging output.
|
|
190
|
+
|
|
191
|
+
Convenience function equivalent to set_log_level(logging.CRITICAL + 1).
|
|
192
|
+
"""
|
|
193
|
+
set_log_level(logging.CRITICAL + 1)
|