tsp 1.7.7__py3-none-any.whl → 1.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tsp/__init__.py +11 -11
- tsp/__meta__.py +1 -1
- tsp/concatenation.py +153 -0
- tsp/core.py +1162 -1035
- tsp/data/2023-01-06_755-test-Dataset_2031-Constant_Over_Interval-Hourly-Ground_Temperature-Thermistor_Automated.timeserie.csv +4 -4
- tsp/data/2023-01-06_755-test.metadata.txt +208 -208
- tsp/data/NTGS_example_csv.csv +6 -6
- tsp/data/NTGS_example_slash_dates.csv +6 -6
- tsp/data/example_geotop.csv +5240 -5240
- tsp/data/example_gtnp.csv +1298 -1298
- tsp/data/example_permos.csv +7 -7
- tsp/data/test_geotop_has_space.txt +5 -5
- tsp/dataloggers/AbstractReader.py +43 -43
- tsp/dataloggers/FG2.py +110 -110
- tsp/dataloggers/GP5W.py +114 -114
- tsp/dataloggers/Geoprecision.py +34 -34
- tsp/dataloggers/HOBO.py +914 -914
- tsp/dataloggers/RBRXL800.py +190 -190
- tsp/dataloggers/RBRXR420.py +308 -308
- tsp/dataloggers/__init__.py +15 -15
- tsp/dataloggers/logr.py +115 -115
- tsp/dataloggers/test_files/004448.DAT +2543 -2543
- tsp/dataloggers/test_files/004531.DAT +17106 -17106
- tsp/dataloggers/test_files/004531.HEX +3587 -3587
- tsp/dataloggers/test_files/004534.HEX +3587 -3587
- tsp/dataloggers/test_files/010252.dat +1731 -1731
- tsp/dataloggers/test_files/010252.hex +1739 -1739
- tsp/dataloggers/test_files/010274.hex +1291 -1291
- tsp/dataloggers/test_files/010278.hex +3544 -3544
- tsp/dataloggers/test_files/012064.dat +1286 -1286
- tsp/dataloggers/test_files/012064.hex +1294 -1294
- tsp/dataloggers/test_files/012081.hex +3532 -3532
- tsp/dataloggers/test_files/07B1592.DAT +1483 -1483
- tsp/dataloggers/test_files/07B1592.HEX +1806 -1806
- tsp/dataloggers/test_files/07B4450.DAT +2234 -2234
- tsp/dataloggers/test_files/07B4450.HEX +2559 -2559
- tsp/dataloggers/test_files/FG2_399.csv +9881 -9881
- tsp/dataloggers/test_files/GP5W.csv +1121 -1121
- tsp/dataloggers/test_files/GP5W_260.csv +1884 -1884
- tsp/dataloggers/test_files/GP5W_270.csv +2210 -2210
- tsp/dataloggers/test_files/H08-030-08_HOBOware.csv +998 -998
- tsp/dataloggers/test_files/RBR_01.dat +1046 -1046
- tsp/dataloggers/test_files/RBR_02.dat +2426 -2426
- tsp/dataloggers/test_files/RSTDT2055.csv +2152 -2152
- tsp/dataloggers/test_files/U23-001_HOBOware.csv +1001 -1001
- tsp/dataloggers/test_files/hobo-negative-2.txt +6396 -6396
- tsp/dataloggers/test_files/hobo-negative-3.txt +5593 -5593
- tsp/dataloggers/test_files/hobo-positive-number-1.txt +1000 -1000
- tsp/dataloggers/test_files/hobo-positive-number-2.csv +1003 -1003
- tsp/dataloggers/test_files/hobo-positive-number-3.csv +1133 -1133
- tsp/dataloggers/test_files/hobo-positive-number-4.csv +1209 -1209
- tsp/dataloggers/test_files/hobo2.csv +8702 -8702
- tsp/dataloggers/test_files/hobo_1_AB.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_Details.txt +133 -133
- tsp/dataloggers/test_files/hobo_1_AB_classic.csv +4373 -4373
- tsp/dataloggers/test_files/hobo_1_AB_defaults.csv +21732 -21732
- tsp/dataloggers/test_files/hobo_1_AB_minimal.txt +1358 -1358
- tsp/dataloggers/test_files/hobo_1_AB_var2.csv +3189 -3189
- tsp/dataloggers/test_files/hobo_1_AB_var3.csv +2458 -2458
- tsp/dataloggers/test_files/logR_ULogC16-32_1.csv +106 -106
- tsp/dataloggers/test_files/logR_ULogC16-32_2.csv +100 -100
- tsp/dataloggers/test_files/mon_3_Ta_2010-08-18_2013-02-08.txt +21724 -21724
- tsp/dataloggers/test_files/rbr_001.dat +1133 -1133
- tsp/dataloggers/test_files/rbr_001.hex +1139 -1139
- tsp/dataloggers/test_files/rbr_001_no_comment.dat +1132 -1132
- tsp/dataloggers/test_files/rbr_001_no_comment.hex +1138 -1138
- tsp/dataloggers/test_files/rbr_002.dat +1179 -1179
- tsp/dataloggers/test_files/rbr_002.hex +1185 -1185
- tsp/dataloggers/test_files/rbr_003.hex +1292 -1292
- tsp/dataloggers/test_files/rbr_003.xls +0 -0
- tsp/dataloggers/test_files/rbr_xl_001.DAT +1105 -1105
- tsp/dataloggers/test_files/rbr_xl_002.DAT +1126 -1126
- tsp/dataloggers/test_files/rbr_xl_003.DAT +4622 -4622
- tsp/dataloggers/test_files/rbr_xl_003.HEX +3587 -3587
- tsp/gtnp.py +148 -148
- tsp/labels.py +3 -3
- tsp/misc.py +90 -90
- tsp/physics.py +101 -101
- tsp/plots/static.py +373 -373
- tsp/readers.py +548 -548
- tsp/time.py +45 -45
- tsp/tspwarnings.py +14 -14
- tsp/utils.py +101 -101
- tsp/version.py +1 -1
- {tsp-1.7.7.dist-info → tsp-1.8.1.dist-info}/METADATA +30 -23
- tsp-1.8.1.dist-info/RECORD +94 -0
- {tsp-1.7.7.dist-info → tsp-1.8.1.dist-info}/WHEEL +5 -5
- {tsp-1.7.7.dist-info → tsp-1.8.1.dist-info/licenses}/LICENSE +674 -674
- tsp/dataloggers/test_files/CSc_CR1000_1.dat +0 -295
- tsp/scratch.py +0 -6
- tsp-1.7.7.dist-info/RECORD +0 -95
- {tsp-1.7.7.dist-info → tsp-1.8.1.dist-info}/top_level.txt +0 -0
tsp/core.py
CHANGED
|
@@ -1,1035 +1,1162 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import pandas as pd
|
|
4
|
-
import re
|
|
5
|
-
import inspect
|
|
6
|
-
import numpy as np
|
|
7
|
-
import functools
|
|
8
|
-
import warnings
|
|
9
|
-
|
|
10
|
-
try:
|
|
11
|
-
import netCDF4 as nc
|
|
12
|
-
|
|
13
|
-
try:
|
|
14
|
-
from pfit.pfnet_standard import make_temperature_base
|
|
15
|
-
except ModuleNotFoundError:
|
|
16
|
-
warnings.warn("Missing pfit library. Some functionality will be limited.", stacklevel=2)
|
|
17
|
-
|
|
18
|
-
except ModuleNotFoundError:
|
|
19
|
-
warnings.warn("Missing netCDF4 library. Some functionality will be limited.", stacklevel=2)
|
|
20
|
-
|
|
21
|
-
from typing import Union, Optional
|
|
22
|
-
from
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
import tsp
|
|
26
|
-
import tsp.
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
from tsp.
|
|
30
|
-
from tsp.
|
|
31
|
-
from tsp.time import
|
|
32
|
-
from tsp.
|
|
33
|
-
|
|
34
|
-
from
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
A
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
self.
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
self
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
"""
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
self.
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
"""
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
"""
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
""
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
fig.show()
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
return
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import re
|
|
5
|
+
import inspect
|
|
6
|
+
import numpy as np
|
|
7
|
+
import functools
|
|
8
|
+
import warnings
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
import netCDF4 as nc
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from pfit.pfnet_standard import make_temperature_base, calculate_extent_metadata as calc_ext_meta
|
|
15
|
+
except ModuleNotFoundError:
|
|
16
|
+
warnings.warn("Missing pfit library. Some functionality will be limited.", stacklevel=2)
|
|
17
|
+
|
|
18
|
+
except ModuleNotFoundError:
|
|
19
|
+
warnings.warn("Missing netCDF4 library. Some functionality will be limited.", stacklevel=2)
|
|
20
|
+
|
|
21
|
+
from typing import Union, Optional
|
|
22
|
+
from numpy.typing import NDArray
|
|
23
|
+
from datetime import datetime, tzinfo, timezone, timedelta
|
|
24
|
+
|
|
25
|
+
import tsp
|
|
26
|
+
import tsp.labels as lbl
|
|
27
|
+
import tsp.tspwarnings as tw
|
|
28
|
+
|
|
29
|
+
from tsp.physics import analytical_fourier
|
|
30
|
+
from tsp.plots.static import trumpet_curve, colour_contour, time_series, profile_evolution
|
|
31
|
+
from tsp.time import format_utc_offset
|
|
32
|
+
from tsp.time import get_utc_offset
|
|
33
|
+
from tsp.misc import completeness
|
|
34
|
+
from tsp.concatenation import _tsp_concat
|
|
35
|
+
|
|
36
|
+
from matplotlib.figure import Figure
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class TSP:
|
|
40
|
+
""" A Time Series Profile (a collection of time series data at different depths)
|
|
41
|
+
|
|
42
|
+
A TSP can also be:
|
|
43
|
+
Thermal State of Permafrost
|
|
44
|
+
Temperature du Sol en Profondeur
|
|
45
|
+
Temperatures, Secondes, Profondeurs
|
|
46
|
+
|
|
47
|
+
Parameters
|
|
48
|
+
----------
|
|
49
|
+
times : pandas.DatetimeIndex
|
|
50
|
+
DatetimeIndex with optional UTC offset. List-like array of datetime objects can also be passed,
|
|
51
|
+
but will be converted to a DatetimeIndex with no UTC offset.
|
|
52
|
+
depths : list-like
|
|
53
|
+
d-length array of depths
|
|
54
|
+
values : numpy.ndarray
|
|
55
|
+
array with shape (t,d) containing values at (t)emperatures and (d)epths
|
|
56
|
+
longitude : float, optional
|
|
57
|
+
Longitude at which data were collected
|
|
58
|
+
latitude : float, optional
|
|
59
|
+
Latitude at which data were collected
|
|
60
|
+
site_id : str, optional
|
|
61
|
+
Name of location at which data were collected
|
|
62
|
+
metadata : dict
|
|
63
|
+
Additional metadata
|
|
64
|
+
|
|
65
|
+
Attributes
|
|
66
|
+
----------
|
|
67
|
+
values
|
|
68
|
+
latitude : float
|
|
69
|
+
Latitude at which data were collected
|
|
70
|
+
longitude : float
|
|
71
|
+
Longitude at which data were collected
|
|
72
|
+
metadata : dict
|
|
73
|
+
Additional metadata provided at instantiation or by other methods
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
def __repr__(self) -> str:
|
|
77
|
+
return repr(self.wide)
|
|
78
|
+
|
|
79
|
+
def __str__(self) -> str:
|
|
80
|
+
return str(self.wide)
|
|
81
|
+
|
|
82
|
+
def __add__(self, other: TSP) -> TSP:
|
|
83
|
+
""" Concatenate two TSP objects along the time axis.
|
|
84
|
+
The two TSP objects must have the same depths and the same UTC offset.
|
|
85
|
+
|
|
86
|
+
Parameters
|
|
87
|
+
----------
|
|
88
|
+
other : TSP
|
|
89
|
+
Another TSP object to concatenate with this one
|
|
90
|
+
|
|
91
|
+
Returns
|
|
92
|
+
-------
|
|
93
|
+
TSP
|
|
94
|
+
A new TSP object with the concatenated data
|
|
95
|
+
"""
|
|
96
|
+
if not isinstance(other, TSP):
|
|
97
|
+
raise TypeError("Can only concatenate TSP objects.")
|
|
98
|
+
|
|
99
|
+
if self.utc_offset != other.utc_offset:
|
|
100
|
+
raise ValueError("UTC offsets must be the same to concatenate.")
|
|
101
|
+
|
|
102
|
+
return tsp_concat([self, other])
|
|
103
|
+
|
|
104
|
+
def __init__(self, times, depths, values,
|
|
105
|
+
latitude: Optional[float]=None,
|
|
106
|
+
longitude: Optional[float]=None,
|
|
107
|
+
site_id: Optional[str]=None,
|
|
108
|
+
metadata: dict={}):
|
|
109
|
+
|
|
110
|
+
self._times = handle_incoming_times(times)
|
|
111
|
+
if self._times.duplicated().any():
|
|
112
|
+
warnings.warn(tw.DuplicateTimesWarning(self._times), stacklevel=2)
|
|
113
|
+
|
|
114
|
+
if self.utc_offset:
|
|
115
|
+
self._output_utc_offset = self.utc_offset
|
|
116
|
+
else:
|
|
117
|
+
self._output_utc_offset = None
|
|
118
|
+
|
|
119
|
+
self._depths = np.atleast_1d(depths)
|
|
120
|
+
self._values = np.atleast_2d(values)
|
|
121
|
+
self.__number_of_observations = np.ones_like(values, dtype=int)
|
|
122
|
+
self.__number_of_observations[np.isnan(values)] = 0
|
|
123
|
+
self.metadata = metadata
|
|
124
|
+
self._latitude = latitude
|
|
125
|
+
self._longitude = longitude
|
|
126
|
+
self.site_id = site_id
|
|
127
|
+
self._freq = None
|
|
128
|
+
self._completeness = None
|
|
129
|
+
|
|
130
|
+
self._export_precision = 3
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def latitude(self):
|
|
134
|
+
""" Latitude at which data were collected """
|
|
135
|
+
return self._latitude
|
|
136
|
+
|
|
137
|
+
@latitude.setter
|
|
138
|
+
def latitude(self, value: Optional[float]):
|
|
139
|
+
if value is not None:
|
|
140
|
+
try:
|
|
141
|
+
self._latitude = float(value)
|
|
142
|
+
except ValueError:
|
|
143
|
+
raise ValueError("Latitude must be a float or None.")
|
|
144
|
+
else:
|
|
145
|
+
self._latitude = None
|
|
146
|
+
self.metadata['_latitude'] = self._latitude
|
|
147
|
+
|
|
148
|
+
@property
|
|
149
|
+
def longitude(self):
|
|
150
|
+
""" Longitude at which data were collected """
|
|
151
|
+
return self._longitude
|
|
152
|
+
|
|
153
|
+
@longitude.setter
|
|
154
|
+
def longitude(self, value: Optional[float]):
|
|
155
|
+
if value is not None:
|
|
156
|
+
try:
|
|
157
|
+
self._longitude = float(value)
|
|
158
|
+
except ValueError:
|
|
159
|
+
raise ValueError("Longitude must be a float or None.")
|
|
160
|
+
else:
|
|
161
|
+
self._longitude = None
|
|
162
|
+
self.metadata['_longitude'] = self._longitude
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
def freq(self) -> Optional[int]:
|
|
166
|
+
""" Measurement frequency [s] """
|
|
167
|
+
return self._freq
|
|
168
|
+
|
|
169
|
+
@freq.setter
|
|
170
|
+
def freq(self, value: int):
|
|
171
|
+
if not isinstance(value, int):
|
|
172
|
+
raise TypeError("Must be string, e.g. '1D', '3600s'")
|
|
173
|
+
self._freq = value
|
|
174
|
+
|
|
175
|
+
@property
|
|
176
|
+
def completeness(self) -> Optional[pd.DataFrame]:
|
|
177
|
+
""" Data completeness """
|
|
178
|
+
return self._completeness
|
|
179
|
+
|
|
180
|
+
@completeness.setter
|
|
181
|
+
def completeness(self, value):
|
|
182
|
+
raise ValueError("You can't assign this variable.")
|
|
183
|
+
|
|
184
|
+
@classmethod
|
|
185
|
+
def from_tidy_format(cls, times, depths, values,
|
|
186
|
+
number_of_observations=None,
|
|
187
|
+
latitude: Optional[float]=None,
|
|
188
|
+
longitude: Optional[float]=None,
|
|
189
|
+
site_id: Optional[str]=None,
|
|
190
|
+
metadata:dict={}):
|
|
191
|
+
""" Create a TSP from data in a 'tidy' or 'long' format
|
|
192
|
+
|
|
193
|
+
Parameters
|
|
194
|
+
----------
|
|
195
|
+
times : list-like
|
|
196
|
+
n-length array of datetime objects
|
|
197
|
+
depths : list-like
|
|
198
|
+
n-length array of depths
|
|
199
|
+
values : numpy.ndarray
|
|
200
|
+
n-length array of (temperaure) values at associated time and depth
|
|
201
|
+
number_of_observations : numpy.ndarray, optional
|
|
202
|
+
n-length array of number of observations at associated time and
|
|
203
|
+
depth for aggregated values (default: 1)
|
|
204
|
+
longitude : float, optional
|
|
205
|
+
Longitude at which data were collected
|
|
206
|
+
latitude : float, optional
|
|
207
|
+
Latitude at which data were collected
|
|
208
|
+
site_id : str, optional
|
|
209
|
+
Name of location at which data were collected
|
|
210
|
+
metadata : dict
|
|
211
|
+
Additional metadata
|
|
212
|
+
"""
|
|
213
|
+
times = np.atleast_1d(times)
|
|
214
|
+
depths = np.atleast_1d(depths)
|
|
215
|
+
values = np.atleast_1d(values)
|
|
216
|
+
|
|
217
|
+
number_of_observations = number_of_observations if number_of_observations else np.ones_like(values)
|
|
218
|
+
df = pd.DataFrame({"times": times, "depths": depths, "temperature_in_ground": values, "number_of_observations": number_of_observations})
|
|
219
|
+
df.set_index(["times", "depths"], inplace=True)
|
|
220
|
+
|
|
221
|
+
try:
|
|
222
|
+
unstacked = df.unstack()
|
|
223
|
+
except ValueError as e:
|
|
224
|
+
if np.any(df.index.duplicated()):
|
|
225
|
+
print(f"Duplicate data found at {df.iloc[np.where(df.index.duplicated())[0], :].index.get_level_values(0).unique()}")
|
|
226
|
+
raise e
|
|
227
|
+
|
|
228
|
+
temps = unstacked.get('temperature_in_ground')
|
|
229
|
+
|
|
230
|
+
this = cls(times=temps.index.values,
|
|
231
|
+
depths=temps.columns.values,
|
|
232
|
+
values=temps.values,
|
|
233
|
+
latitude=latitude,
|
|
234
|
+
longitude=longitude,
|
|
235
|
+
site_id=site_id,
|
|
236
|
+
metadata=metadata)
|
|
237
|
+
|
|
238
|
+
number_of_observations = unstacked.get('number_of_observations').values
|
|
239
|
+
|
|
240
|
+
number_of_observations[np.isnan(number_of_observations)] = 0
|
|
241
|
+
this.__number_of_observations = number_of_observations
|
|
242
|
+
return this
|
|
243
|
+
|
|
244
|
+
@classmethod
|
|
245
|
+
def __from_tsp(cls, t:TSP, **kwargs) -> "TSP":
|
|
246
|
+
""" Use an existing TSP object as a template, """
|
|
247
|
+
kw = {}
|
|
248
|
+
for arg in inspect.getfullargspec(TSP).args[1:]:
|
|
249
|
+
if kwargs.get(arg) is not None:
|
|
250
|
+
kw[arg] = kwargs.get(arg)
|
|
251
|
+
else:
|
|
252
|
+
kw[arg] = getattr(t, arg)
|
|
253
|
+
|
|
254
|
+
t = TSP(**kw)
|
|
255
|
+
|
|
256
|
+
return t
|
|
257
|
+
|
|
258
|
+
@classmethod
|
|
259
|
+
def from_json(cls, json_file) -> "TSP":
|
|
260
|
+
""" Read data from a json file
|
|
261
|
+
|
|
262
|
+
Parameters
|
|
263
|
+
----------
|
|
264
|
+
json_file : str
|
|
265
|
+
Path to a json file from which to read
|
|
266
|
+
"""
|
|
267
|
+
df = pd.read_json(json_file)
|
|
268
|
+
depth_pattern = r"^(-?[0-9\.]+)$"
|
|
269
|
+
|
|
270
|
+
times = pd.to_datetime(df['time']).values
|
|
271
|
+
depths = [re.search(depth_pattern, c).group(1) for c in df.columns if tsp._is_depth_column(c, depth_pattern)]
|
|
272
|
+
values = df.loc[:, depths].to_numpy()
|
|
273
|
+
|
|
274
|
+
t = cls(times=times, depths=depths, values=values)
|
|
275
|
+
|
|
276
|
+
return t
|
|
277
|
+
|
|
278
|
+
@classmethod
|
|
279
|
+
def synthetic(cls, depths: NDArray[np.number],
|
|
280
|
+
start:str ="2000-01-01",
|
|
281
|
+
end:str ="2003-01-01",
|
|
282
|
+
freq: "str"="D",
|
|
283
|
+
Q:float=0.2,
|
|
284
|
+
c:float=1.6e6,
|
|
285
|
+
k:float=2.5,
|
|
286
|
+
A:float=6,
|
|
287
|
+
MAGST:float=-0.5) -> "TSP":
|
|
288
|
+
"""
|
|
289
|
+
Create a 'synthetic' temperature time series using the analytical solution to the heat conduction equation.
|
|
290
|
+
Suitable for testing
|
|
291
|
+
|
|
292
|
+
Parameters
|
|
293
|
+
----------
|
|
294
|
+
depths : np.ndarray
|
|
295
|
+
array of depths in metres
|
|
296
|
+
start : str
|
|
297
|
+
start date for the time series, in the format "YYYY-MM-DD"
|
|
298
|
+
end : str
|
|
299
|
+
end date for the time series, in the format "YYYY-MM-DD"
|
|
300
|
+
freq : str
|
|
301
|
+
pandas frequency string, e.g. "D" for daily, "H" for hourly, etc.
|
|
302
|
+
Q : Optional[float], optional
|
|
303
|
+
Ground heat flux [W m-2], by default 0.2
|
|
304
|
+
c : Optional[float], optional
|
|
305
|
+
heat capacity [J m-3 K-1], by default 1.6e6
|
|
306
|
+
k : Optional[float], optional
|
|
307
|
+
thermal conductivity [W m-1 K-1], by default 2.5
|
|
308
|
+
A : Optional[float], optional
|
|
309
|
+
Amplitude of temperature fluctuation [C], by default 6
|
|
310
|
+
MAGST : Optional[float], optional
|
|
311
|
+
Mean annual ground surface temperature [C], by default -0.5
|
|
312
|
+
|
|
313
|
+
Returns
|
|
314
|
+
-------
|
|
315
|
+
TSP
|
|
316
|
+
A timeseries profile (TSP) object
|
|
317
|
+
"""
|
|
318
|
+
times = pd.date_range(start=start, end=end, freq=freq).to_pydatetime()
|
|
319
|
+
t_sec = np.array([(t-times[0]).total_seconds() for t in times])
|
|
320
|
+
|
|
321
|
+
values = analytical_fourier(depths=depths,
|
|
322
|
+
times=t_sec,
|
|
323
|
+
Q=Q,
|
|
324
|
+
c=c,
|
|
325
|
+
k=k,
|
|
326
|
+
A=A,
|
|
327
|
+
MAGST=MAGST)
|
|
328
|
+
|
|
329
|
+
this = cls(depths=depths, times=times, values=values)
|
|
330
|
+
|
|
331
|
+
return this
|
|
332
|
+
|
|
333
|
+
@property
|
|
334
|
+
@functools.lru_cache()
|
|
335
|
+
def long(self) -> "pd.DataFrame":
|
|
336
|
+
""" Return the data in a 'long' or 'tidy' format (one row per observation, one column per variable)
|
|
337
|
+
|
|
338
|
+
Returns
|
|
339
|
+
-------
|
|
340
|
+
pandas.DataFrame
|
|
341
|
+
Time series profile data with columns:
|
|
342
|
+
- **time**: time
|
|
343
|
+
- **depth**: depth
|
|
344
|
+
- **temperature_in_ground**: temperature
|
|
345
|
+
- **number_of_observations**: If data are aggregated, how many observations are used in the aggregation
|
|
346
|
+
"""
|
|
347
|
+
values = self.wide.melt(id_vars='time',
|
|
348
|
+
var_name="depth",
|
|
349
|
+
value_name="temperature_in_ground")
|
|
350
|
+
|
|
351
|
+
number_of_observations = self.number_of_observations.melt(id_vars='time',
|
|
352
|
+
var_name="depth",
|
|
353
|
+
value_name="number_of_observations")
|
|
354
|
+
|
|
355
|
+
values['number_of_observations'] = number_of_observations['number_of_observations']
|
|
356
|
+
|
|
357
|
+
return values
|
|
358
|
+
|
|
359
|
+
@property
|
|
360
|
+
@functools.lru_cache()
|
|
361
|
+
def wide(self) -> "pd.DataFrame":
|
|
362
|
+
""" Return the data in a 'wide' format (one column per depth)
|
|
363
|
+
|
|
364
|
+
Returns
|
|
365
|
+
-------
|
|
366
|
+
pandas.DataFrame
|
|
367
|
+
Time series profile data
|
|
368
|
+
"""
|
|
369
|
+
tabular = pd.DataFrame(self._values)
|
|
370
|
+
tabular.columns = self._depths
|
|
371
|
+
tabular.index = self.times
|
|
372
|
+
tabular.insert(0, "time", self.times)
|
|
373
|
+
|
|
374
|
+
return tabular
|
|
375
|
+
|
|
376
|
+
@property
|
|
377
|
+
@functools.lru_cache()
|
|
378
|
+
def number_of_observations(self) -> "pd.DataFrame":
|
|
379
|
+
""" The number of observations for an average at a particular depth or time.
|
|
380
|
+
|
|
381
|
+
For pure observational data, the number of observations will always be '1'. When data are aggregated,
|
|
382
|
+
(e.g. using :py:meth:`~tsp.core.TSP.monthly` or :py:meth:`~tsp.core.TSP.daily`) these numbers
|
|
383
|
+
will be greater than 1.
|
|
384
|
+
|
|
385
|
+
Returns
|
|
386
|
+
-------
|
|
387
|
+
DataFrame
|
|
388
|
+
Number of observations
|
|
389
|
+
"""
|
|
390
|
+
tabular = pd.DataFrame(self.__number_of_observations, dtype=int)
|
|
391
|
+
tabular.columns = self._depths
|
|
392
|
+
tabular.index = self._times
|
|
393
|
+
tabular.insert(0, "time", self._times)
|
|
394
|
+
|
|
395
|
+
return tabular
|
|
396
|
+
|
|
397
|
+
@number_of_observations.setter
|
|
398
|
+
def number_of_observations(self, value):
|
|
399
|
+
raise ValueError(f"You can't assign {value} to this variable (no assignment allowed).")
|
|
400
|
+
|
|
401
|
+
def reset_counts(self):
|
|
402
|
+
""" Set observation count to 1 if data exists, 0 otherwise """
|
|
403
|
+
self.__number_of_observations = (~self.wide.isna()).astype('boolean')
|
|
404
|
+
|
|
405
|
+
def set_utc_offset(self, offset:"Union[int,str]") -> None:
|
|
406
|
+
""" Set the time zone of the data by providing a UTC offset
|
|
407
|
+
|
|
408
|
+
Parameters
|
|
409
|
+
----------
|
|
410
|
+
offset : int, str
|
|
411
|
+
If int, the number of seconds. If str, a string in the format "+HH:MM" or "-HH:MM"
|
|
412
|
+
"""
|
|
413
|
+
if self.utc_offset is not None:
|
|
414
|
+
raise ValueError("You can only set the UTC offset once.")
|
|
415
|
+
|
|
416
|
+
utc_offset = get_utc_offset(offset)
|
|
417
|
+
|
|
418
|
+
tz = timezone(timedelta(seconds = utc_offset))
|
|
419
|
+
self._times = self._times.tz_localize(tz)
|
|
420
|
+
self._output_utc_offset = timezone(timedelta(seconds = utc_offset))
|
|
421
|
+
|
|
422
|
+
TSP.wide.fget.cache_clear()
|
|
423
|
+
TSP.long.fget.cache_clear()
|
|
424
|
+
|
|
425
|
+
@property
|
|
426
|
+
def utc_offset(self) -> "Optional[tzinfo]":
|
|
427
|
+
""" Get the time zone of the data by providing a UTC offset
|
|
428
|
+
|
|
429
|
+
Returns
|
|
430
|
+
-------
|
|
431
|
+
datetime.tzinfo
|
|
432
|
+
A timezone object
|
|
433
|
+
"""
|
|
434
|
+
if self._times.tz is None:
|
|
435
|
+
return None
|
|
436
|
+
else:
|
|
437
|
+
return self._times.tz
|
|
438
|
+
|
|
439
|
+
@utc_offset.setter
|
|
440
|
+
def utc_offset(self, value):
|
|
441
|
+
self.set_utc_offset(value)
|
|
442
|
+
|
|
443
|
+
@property
|
|
444
|
+
def output_utc_offset(self) -> "Optional[tzinfo]":
|
|
445
|
+
""" Get the time zone in which to output or display the data by providing a UTC offset
|
|
446
|
+
|
|
447
|
+
Returns
|
|
448
|
+
-------
|
|
449
|
+
datetime.tzinfo
|
|
450
|
+
A timezone object
|
|
451
|
+
"""
|
|
452
|
+
if self._output_utc_offset is None:
|
|
453
|
+
return None
|
|
454
|
+
else:
|
|
455
|
+
return self._output_utc_offset
|
|
456
|
+
|
|
457
|
+
@output_utc_offset.setter
|
|
458
|
+
def output_utc_offset(self, offset:"Union[int,str]") -> None:
|
|
459
|
+
self.set_output_utc_offset(offset)
|
|
460
|
+
|
|
461
|
+
def set_output_utc_offset(self, offset:"Union[int,str]") -> None:
|
|
462
|
+
""" Set the time zone in which to display the output or data by providing a UTC offset
|
|
463
|
+
Parameters
|
|
464
|
+
----------
|
|
465
|
+
offset : int, str
|
|
466
|
+
If int, the number of seconds. If str, a string in the format "+HH:MM" or "-HH:MM"
|
|
467
|
+
"""
|
|
468
|
+
utc_offset = get_utc_offset(offset)
|
|
469
|
+
tz = timezone(timedelta(seconds = utc_offset))
|
|
470
|
+
self._output_utc_offset = tz
|
|
471
|
+
|
|
472
|
+
TSP.wide.fget.cache_clear()
|
|
473
|
+
TSP.long.fget.cache_clear()
|
|
474
|
+
|
|
475
|
+
def reset_output_utc_offset(self) -> None:
|
|
476
|
+
""" Reset the time zone in which to output or display the data to the default (the one set by set_utc_offset)
|
|
477
|
+
|
|
478
|
+
"""
|
|
479
|
+
if self.utc_offset is None:
|
|
480
|
+
raise ValueError("You can't reset the output time zone if the time zone of the data hasn't yet been set with set_utc_offset.")
|
|
481
|
+
else:
|
|
482
|
+
self._output_utc_offset = self.utc_offset
|
|
483
|
+
|
|
484
|
+
def __nly(self,
|
|
485
|
+
freq_fmt:str,
|
|
486
|
+
new_freq,
|
|
487
|
+
min_count:Optional[int],
|
|
488
|
+
max_gap:Optional[int],
|
|
489
|
+
min_span:Optional[int]) -> TSP:
|
|
490
|
+
"""
|
|
491
|
+
Temporal aggregation by grouping according to a string-ified time
|
|
492
|
+
|
|
493
|
+
Parameters
|
|
494
|
+
----------
|
|
495
|
+
freq_fmt : str
|
|
496
|
+
Python date format string used to aggregate and recover time
|
|
497
|
+
|
|
498
|
+
Returns
|
|
499
|
+
-------
|
|
500
|
+
tuple[pd.DataFrame, pd.DataFrame]
|
|
501
|
+
A tuple of dataframes, the first containing the aggregated data, the second containing the number of observations
|
|
502
|
+
"""
|
|
503
|
+
R = self.wide.drop("time", axis=1).resample(freq_fmt)
|
|
504
|
+
cumulative_obs = self.number_of_observations.drop("time", axis=1).resample(freq_fmt).sum()
|
|
505
|
+
total_obs = R.count()
|
|
506
|
+
values = R.mean()
|
|
507
|
+
|
|
508
|
+
# Calculate masks
|
|
509
|
+
mc_mask = Mg_mask = ms_mask = pd.DataFrame(index=values.index, columns=values.columns, data=False)
|
|
510
|
+
|
|
511
|
+
if min_count is not None:
|
|
512
|
+
mc_mask = (cumulative_obs < min_count)
|
|
513
|
+
if max_gap is not None:
|
|
514
|
+
Mg_mask = max_gap_mask(R, max_gap)
|
|
515
|
+
if min_span is not None:
|
|
516
|
+
ms_mask = min_span_mask(R, min_span)
|
|
517
|
+
|
|
518
|
+
mask = (mc_mask | Mg_mask | ms_mask)
|
|
519
|
+
values[mask] = np.nan
|
|
520
|
+
|
|
521
|
+
# Construct TSP
|
|
522
|
+
t = TSP.__from_tsp(self, times=values.index,
|
|
523
|
+
depths=values.columns,
|
|
524
|
+
values=values.values)
|
|
525
|
+
t.__number_of_observations = cumulative_obs
|
|
526
|
+
t.freq = new_freq
|
|
527
|
+
|
|
528
|
+
# Calculate data completeness
|
|
529
|
+
if self.freq is not None:
|
|
530
|
+
f1 = self.freq
|
|
531
|
+
f2 = new_freq
|
|
532
|
+
t._completeness = completeness(total_obs, f1, f2)
|
|
533
|
+
|
|
534
|
+
return t
|
|
535
|
+
|
|
536
|
+
def monthly(self,
|
|
537
|
+
min_count:Optional[int]=24,
|
|
538
|
+
max_gap:Optional[int]=3600*24*8,
|
|
539
|
+
min_span:Optional[int]=3600*24*21) -> "TSP":
|
|
540
|
+
""" Monthly averages, possibly with some months unavailable (NaN) if there is insufficient data
|
|
541
|
+
|
|
542
|
+
Parameters
|
|
543
|
+
----------
|
|
544
|
+
min_count : int
|
|
545
|
+
Minimum number of observations in a month to be considered a valid average,
|
|
546
|
+
defaults to None
|
|
547
|
+
max_gap : int
|
|
548
|
+
Maximum gap (in seconds) between data points to be considered a valid average,
|
|
549
|
+
defaults to None
|
|
550
|
+
min_span : int
|
|
551
|
+
Minimum total data range (in seconds) to be consiered a valid average,
|
|
552
|
+
defaults to None
|
|
553
|
+
|
|
554
|
+
Returns
|
|
555
|
+
-------
|
|
556
|
+
TSP
|
|
557
|
+
A TSP object with data aggregated to monthly averages
|
|
558
|
+
"""
|
|
559
|
+
t = self.__nly(freq_fmt="M",
|
|
560
|
+
new_freq=lbl.MONTHLY,
|
|
561
|
+
min_count=min_count,
|
|
562
|
+
max_gap=max_gap,
|
|
563
|
+
min_span=min_span)
|
|
564
|
+
|
|
565
|
+
return t
|
|
566
|
+
|
|
567
|
+
def daily(self,
|
|
568
|
+
min_count:Optional[int]=None,
|
|
569
|
+
max_gap:Optional[int]=None,
|
|
570
|
+
min_span:Optional[int]=None) -> "TSP":
|
|
571
|
+
""" Daily averages, possibly with some days unavailable (NaN) if there is insufficient data
|
|
572
|
+
|
|
573
|
+
Parameters
|
|
574
|
+
----------
|
|
575
|
+
min_count : int
|
|
576
|
+
Minimum number of observations in a day to be considered a valid average,
|
|
577
|
+
defaults to None
|
|
578
|
+
max_gap : int
|
|
579
|
+
Maximum gap (in seconds) between data points to be considered a valid average, defaults to None
|
|
580
|
+
min_span : int
|
|
581
|
+
Minimum total data range (in seconds) to be consiered a valid average, defaults to None
|
|
582
|
+
|
|
583
|
+
Returns
|
|
584
|
+
-------
|
|
585
|
+
TSP
|
|
586
|
+
A TSP object with data aggregated to daily averages
|
|
587
|
+
"""
|
|
588
|
+
# if the data is already daily +/- 1min , just return it
|
|
589
|
+
t = self.__nly(freq_fmt="D",
|
|
590
|
+
new_freq=lbl.DAILY,
|
|
591
|
+
min_count=min_count,
|
|
592
|
+
max_gap=max_gap,
|
|
593
|
+
min_span=min_span)
|
|
594
|
+
|
|
595
|
+
return t
|
|
596
|
+
|
|
597
|
+
def yearly(self,
|
|
598
|
+
min_count:Optional[int]=None,
|
|
599
|
+
max_gap:Optional[int]=None,
|
|
600
|
+
min_span:Optional[int]=None) -> "TSP":
|
|
601
|
+
""" Yearly averages, possibly with some years unavailable (NaN) if there is insufficient data
|
|
602
|
+
|
|
603
|
+
Parameters
|
|
604
|
+
----------
|
|
605
|
+
min_count : int
|
|
606
|
+
Minimum number of observations in a month to be considered a valid average, defaults to None
|
|
607
|
+
max_gap : int
|
|
608
|
+
Maximum gap (in seconds) between data points to be considered a valid average, defaults to None
|
|
609
|
+
min_span : int
|
|
610
|
+
Minimum total data range (in seconds) to be consiered a valid average, defaults to None
|
|
611
|
+
|
|
612
|
+
Returns
|
|
613
|
+
-------
|
|
614
|
+
TSP
|
|
615
|
+
A TSP object with data aggregated to yearly averages
|
|
616
|
+
"""
|
|
617
|
+
t = self.__nly(freq_fmt="Y",
|
|
618
|
+
new_freq=lbl.YEARLY,
|
|
619
|
+
min_count=min_count,
|
|
620
|
+
max_gap=max_gap,
|
|
621
|
+
min_span=min_span)
|
|
622
|
+
|
|
623
|
+
return t
|
|
624
|
+
|
|
625
|
+
@property
|
|
626
|
+
def depths(self) -> NDArray[np.number]:
|
|
627
|
+
""" Return the depth values in the profile
|
|
628
|
+
|
|
629
|
+
Returns
|
|
630
|
+
-------
|
|
631
|
+
numpy.ndarray
|
|
632
|
+
The depths in the profile
|
|
633
|
+
"""
|
|
634
|
+
return self._depths
|
|
635
|
+
|
|
636
|
+
@depths.setter
|
|
637
|
+
def depths(self, value):
|
|
638
|
+
depths = np.atleast_1d(value)
|
|
639
|
+
|
|
640
|
+
if not len(depths) == len(self._depths):
|
|
641
|
+
raise ValueError(f"List of depths must have length of {len(self._depths)}.")
|
|
642
|
+
|
|
643
|
+
self._depths = depths
|
|
644
|
+
|
|
645
|
+
TSP.wide.fget.cache_clear()
|
|
646
|
+
TSP.long.fget.cache_clear()
|
|
647
|
+
|
|
648
|
+
@property
|
|
649
|
+
def times(self):
|
|
650
|
+
""" Return the timestamps in the time series
|
|
651
|
+
|
|
652
|
+
Returns
|
|
653
|
+
-------
|
|
654
|
+
pandas.DatetimeIndex
|
|
655
|
+
The timestamps in the time series
|
|
656
|
+
"""
|
|
657
|
+
if self.utc_offset is None:
|
|
658
|
+
return self._times
|
|
659
|
+
|
|
660
|
+
elif self._output_utc_offset == self.utc_offset:
|
|
661
|
+
return self._times
|
|
662
|
+
|
|
663
|
+
else:
|
|
664
|
+
return self._times.tz_convert(self.output_utc_offset)
|
|
665
|
+
|
|
666
|
+
@property
|
|
667
|
+
def values(self):
|
|
668
|
+
return self._values
|
|
669
|
+
|
|
670
|
+
def to_gtnp(self, filename: str) -> None:
|
|
671
|
+
""" Write the data in GTN-P format
|
|
672
|
+
|
|
673
|
+
Parameters
|
|
674
|
+
----------
|
|
675
|
+
filename : str
|
|
676
|
+
Path to the file to write to
|
|
677
|
+
"""
|
|
678
|
+
df = self.wide.round(self._export_precision).rename(columns={'time': 'Date/Depth'})
|
|
679
|
+
df['Date/Depth'] = df['Date/Depth'].dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
680
|
+
|
|
681
|
+
df.to_csv(filename, index=False, na_rep="-999")
|
|
682
|
+
|
|
683
|
+
def to_ntgs(self, filename:str, project_name:Optional[str]="", site_id:"Optional[str]" = None, latitude:"Optional[float]"=None, longitude:"Optional[float]"=None) -> None:
|
|
684
|
+
""" Write the data in NTGS template format
|
|
685
|
+
|
|
686
|
+
Parameters
|
|
687
|
+
----------
|
|
688
|
+
filename : str
|
|
689
|
+
Path to the file to write to
|
|
690
|
+
project_name : str, optional
|
|
691
|
+
The project name, by default ""
|
|
692
|
+
site_id : str, optional
|
|
693
|
+
The name of the site , by default None
|
|
694
|
+
latitude : float, optional
|
|
695
|
+
WGS84 latitude at which the observations were recorded, by default None
|
|
696
|
+
longitude : float, optional
|
|
697
|
+
WGS84 longitude at which the observations were recorded, by default None
|
|
698
|
+
"""
|
|
699
|
+
if latitude is None:
|
|
700
|
+
latitude = self.latitude if self.latitude is not None else ""
|
|
701
|
+
|
|
702
|
+
if longitude is None:
|
|
703
|
+
longitude = self.longitude if self.longitude is not None else ""
|
|
704
|
+
|
|
705
|
+
if site_id is None:
|
|
706
|
+
site_id = self.site_id if self.site_id is not None else ""
|
|
707
|
+
|
|
708
|
+
if project_name is None:
|
|
709
|
+
project_name = self.metadata.get("project_name", "")
|
|
710
|
+
|
|
711
|
+
data = self.values
|
|
712
|
+
|
|
713
|
+
df = pd.DataFrame({'project_name': pd.Series(dtype='str'),
|
|
714
|
+
'site_id': pd.Series(dtype='str'),
|
|
715
|
+
'latitude': pd.Series(dtype='float'),
|
|
716
|
+
'longitude': pd.Series(dtype='float')
|
|
717
|
+
})
|
|
718
|
+
|
|
719
|
+
df["date_YYYY-MM-DD"] = pd.Series(self.times).dt.strftime(r"%Y-%m-%d")
|
|
720
|
+
df["time_HH:MM:SS"] = pd.Series(self.times).dt.strftime(r"%H:%M:%S")
|
|
721
|
+
|
|
722
|
+
df["project_name"] = project_name
|
|
723
|
+
df["site_id"] = site_id
|
|
724
|
+
df["latitude"] = latitude
|
|
725
|
+
df["longitude"] = longitude
|
|
726
|
+
|
|
727
|
+
headers = [str(d) + "_m" for d in self.depths]
|
|
728
|
+
|
|
729
|
+
for i, h in enumerate(headers):
|
|
730
|
+
df[h] = data[:, i].round(self._export_precision)
|
|
731
|
+
|
|
732
|
+
df.to_csv(filename, index=False)
|
|
733
|
+
|
|
734
|
+
def to_netcdf(self, file: str, only_use_cf_metadata=True, calculate_extent_metadata=True) -> None:
|
|
735
|
+
""" Write the data as a netcdf"""
|
|
736
|
+
try:
|
|
737
|
+
ncf = make_temperature_base(file, ndepth=len(self.depths), ntime=len(self.times), strings_as_strings=True)
|
|
738
|
+
except NameError:
|
|
739
|
+
warnings.warn("Missing required packages. Try installing with `pip install tsp[nc]`", stacklevel=2)
|
|
740
|
+
return
|
|
741
|
+
|
|
742
|
+
with nc.Dataset(ncf, 'a') as ncd:
|
|
743
|
+
pytime = self.times.to_pydatetime()
|
|
744
|
+
|
|
745
|
+
ncd['depth_below_ground_surface'][:] = self.depths
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
ncd['time'][:] = nc.date2num(pytime, ncd['time'].units, ncd['time'].calendar)
|
|
749
|
+
ncd['ground_temperature'][:] = self.values
|
|
750
|
+
|
|
751
|
+
if self.latitude:
|
|
752
|
+
ncd['latitude'][:] = self.latitude
|
|
753
|
+
if self.longitude:
|
|
754
|
+
ncd['longitude'][:] = self.longitude
|
|
755
|
+
if self.site_id:
|
|
756
|
+
if ncd['site_name'].dtype == str:
|
|
757
|
+
ncd['site_name'][0] = self.site_id
|
|
758
|
+
else:
|
|
759
|
+
strlen = ncd['site_name'].shape[0]
|
|
760
|
+
ncd['site_name'][:] = nc.stringtochar(np.array([self.site_id], f"S{strlen}"))
|
|
761
|
+
|
|
762
|
+
if "_elevation" in self.metadata:
|
|
763
|
+
ncd['surface_elevation'][:] = self.metadata.get("_elevation")
|
|
764
|
+
|
|
765
|
+
if only_use_cf_metadata:
|
|
766
|
+
metadata = self.metadata.get('CF', {})
|
|
767
|
+
else:
|
|
768
|
+
metadata = self.metadata
|
|
769
|
+
|
|
770
|
+
for key, value in metadata.items():
|
|
771
|
+
try:
|
|
772
|
+
if isinstance(value, str):
|
|
773
|
+
ncd.setncattr_string(key, value)
|
|
774
|
+
else:
|
|
775
|
+
ncd.setncattr(key, value)
|
|
776
|
+
except Exception:
|
|
777
|
+
warnings.warn(f"Could not set metadata item: {key} : {value}", stacklevel=2)
|
|
778
|
+
|
|
779
|
+
if calculate_extent_metadata:
|
|
780
|
+
calc_ext_meta(ncd)
|
|
781
|
+
|
|
782
|
+
def to_json(self, file: str) -> None:
|
|
783
|
+
""" Write the data to a serialized json file """
|
|
784
|
+
with open(file, 'w') as f:
|
|
785
|
+
f.write(self._to_json())
|
|
786
|
+
|
|
787
|
+
def _to_json(self) -> str:
|
|
788
|
+
return self.wide.round(self._export_precision).to_json()
|
|
789
|
+
|
|
790
|
+
def plot_profiles(self, P:int=100, n:int=10) -> Figure:
|
|
791
|
+
""" Create a plot of the temperature profiles at different times
|
|
792
|
+
|
|
793
|
+
Parameters
|
|
794
|
+
----------
|
|
795
|
+
P : int
|
|
796
|
+
Percentage of time range to plot
|
|
797
|
+
n : int
|
|
798
|
+
Number of evenly-spaced profiles to plot
|
|
799
|
+
|
|
800
|
+
Returns
|
|
801
|
+
-------
|
|
802
|
+
Figure
|
|
803
|
+
matplotlib `Figure` object
|
|
804
|
+
"""
|
|
805
|
+
fig = profile_evolution(depths=self.depths, times=self.times, values=self._values, P=P, n=n)
|
|
806
|
+
fig.show()
|
|
807
|
+
return fig
|
|
808
|
+
|
|
809
|
+
def plot_trumpet(self,
|
|
810
|
+
year: Optional[int]=None,
|
|
811
|
+
begin: Optional[datetime]=None,
|
|
812
|
+
end: Optional[datetime]=None,
|
|
813
|
+
min_completeness: Optional[float]=None,
|
|
814
|
+
**kwargs) -> Figure:
|
|
815
|
+
""" Create a trumpet plot from the data
|
|
816
|
+
|
|
817
|
+
Parameters
|
|
818
|
+
----------
|
|
819
|
+
year : int, optional
|
|
820
|
+
Which year to plot
|
|
821
|
+
begin : datetime, optional
|
|
822
|
+
If 'end' also provided, the earliest measurement to include in the averaging for the plot
|
|
823
|
+
end : datetime, optional
|
|
824
|
+
If 'begin' also provided, the latest measurement to include in the averaging for the plot
|
|
825
|
+
min_completeness : float, optional
|
|
826
|
+
If provided, the minimum completeness (fractional, 0 to 1) required to include
|
|
827
|
+
in temperature envelope, otherwise
|
|
828
|
+
the point is plotted as an unconnected, slightly transparent dot, by default None
|
|
829
|
+
**kwargs : dict, optional
|
|
830
|
+
Extra arguments to the plotting function: refer to the documentation for :func:`~tsp.plots.static.trumpet_curve` for a
|
|
831
|
+
list of all possible arguments.
|
|
832
|
+
|
|
833
|
+
Returns
|
|
834
|
+
-------
|
|
835
|
+
Figure
|
|
836
|
+
a matplotlib `Figure` object
|
|
837
|
+
"""
|
|
838
|
+
df = self.long.dropna()
|
|
839
|
+
|
|
840
|
+
if year is not None:
|
|
841
|
+
df = df[df['time'].dt.year == year]
|
|
842
|
+
|
|
843
|
+
elif begin is not None or end is not None:
|
|
844
|
+
raise NotImplementedError
|
|
845
|
+
|
|
846
|
+
else:
|
|
847
|
+
raise ValueError("One of 'year', 'begin', 'end' must be provided.")
|
|
848
|
+
|
|
849
|
+
grouped = df.groupby('depth')
|
|
850
|
+
|
|
851
|
+
max_t = grouped.max().get('temperature_in_ground').values
|
|
852
|
+
min_t = grouped.min().get('temperature_in_ground').values
|
|
853
|
+
mean_t = grouped.mean().get('temperature_in_ground').values
|
|
854
|
+
depth = np.array([d for d in grouped.groups.keys()])
|
|
855
|
+
|
|
856
|
+
# Calculate completeness
|
|
857
|
+
c = self.yearly(None, None, None).completeness
|
|
858
|
+
|
|
859
|
+
if min_completeness is not None and c is not None:
|
|
860
|
+
C = c[c.index.year == year]
|
|
861
|
+
C = C[depth].iloc[0,:].values
|
|
862
|
+
|
|
863
|
+
else:
|
|
864
|
+
C = None
|
|
865
|
+
|
|
866
|
+
fig = trumpet_curve(depth=depth,
|
|
867
|
+
t_max=max_t,
|
|
868
|
+
t_min=min_t,
|
|
869
|
+
t_mean=mean_t,
|
|
870
|
+
min_completeness=min_completeness,
|
|
871
|
+
data_completeness=C,
|
|
872
|
+
**kwargs)
|
|
873
|
+
fig.show()
|
|
874
|
+
|
|
875
|
+
return fig
|
|
876
|
+
|
|
877
|
+
def plot_contour(self, **kwargs) -> Figure:
|
|
878
|
+
""" Create a contour plot
|
|
879
|
+
|
|
880
|
+
Parameters
|
|
881
|
+
----------
|
|
882
|
+
**kwargs : dict, optional
|
|
883
|
+
Extra arguments to the plotting function: refer to the documentation for :func:`~tsp.plots.static.colour_contour` for a
|
|
884
|
+
list of all possible arguments.
|
|
885
|
+
|
|
886
|
+
Returns
|
|
887
|
+
-------
|
|
888
|
+
Figure
|
|
889
|
+
matplotlib `Figure` object
|
|
890
|
+
"""
|
|
891
|
+
fig = colour_contour(depths=self.depths, times=self.times, values=self._values, **kwargs)
|
|
892
|
+
|
|
893
|
+
if self.output_utc_offset is not None:
|
|
894
|
+
label = format_utc_offset(self.output_utc_offset)
|
|
895
|
+
if label != "UTC":
|
|
896
|
+
label = f"UTC{label}"
|
|
897
|
+
fig.axes[0].set_xlabel(f"Time [{label}]")
|
|
898
|
+
|
|
899
|
+
fig.show()
|
|
900
|
+
|
|
901
|
+
return fig
|
|
902
|
+
|
|
903
|
+
def plot_timeseries(self, depths: list=[], **kwargs) -> Figure:
|
|
904
|
+
"""Create a time series T(t) plot
|
|
905
|
+
|
|
906
|
+
Parameters
|
|
907
|
+
----------
|
|
908
|
+
depths : list, optional
|
|
909
|
+
If non-empty, restricts the depths to include in the plot, by default []
|
|
910
|
+
**kwargs : dict, optional
|
|
911
|
+
Extra arguments to the plotting function: refer to the documentation for :func:`~tsp.plots.static.time_series` for a
|
|
912
|
+
list of all possible arguments.
|
|
913
|
+
|
|
914
|
+
Returns
|
|
915
|
+
-------
|
|
916
|
+
Figure
|
|
917
|
+
matplotlib `Figure` object
|
|
918
|
+
"""
|
|
919
|
+
if depths == []:
|
|
920
|
+
depths = self.depths
|
|
921
|
+
|
|
922
|
+
d_mask = np.isin(self.depths, depths)
|
|
923
|
+
|
|
924
|
+
fig = time_series(self.depths[d_mask], self.times, self.values[:, d_mask], **kwargs)
|
|
925
|
+
|
|
926
|
+
|
|
927
|
+
if self.output_utc_offset is not None:
|
|
928
|
+
label = format_utc_offset(self.output_utc_offset)
|
|
929
|
+
if label != "UTC":
|
|
930
|
+
label = f"UTC{label}"
|
|
931
|
+
fig.axes[0].set_xlabel(f"Time [{label}]")
|
|
932
|
+
fig.autofmt_xdate()
|
|
933
|
+
fig.show()
|
|
934
|
+
|
|
935
|
+
return fig
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
class AggregatedTSP(TSP):
|
|
939
|
+
""" A Time Series Profile that uses indices (1,2,3,...) instead of depth values.
|
|
940
|
+
|
|
941
|
+
Used in situations when depths are unknown (such as when reading datlogger exports
|
|
942
|
+
that don't have depth measurements.)
|
|
943
|
+
|
|
944
|
+
Parameters
|
|
945
|
+
----------
|
|
946
|
+
times : list-like
|
|
947
|
+
t-length array of datetime objects
|
|
948
|
+
values : numpy.ndarray
|
|
949
|
+
array with shape (t,d) containing values at (t)emperatures and (d)epths
|
|
950
|
+
**kwargs : dict
|
|
951
|
+
Extra arguments to parent class: refer to :py:class:`tsp.core.TSP` documentation for a
|
|
952
|
+
list of all possible arguments.
|
|
953
|
+
"""
|
|
954
|
+
|
|
955
|
+
|
|
956
|
+
class IndexedTSP(TSP):
|
|
957
|
+
""" A Time Series Profile that uses indices (1,2,3,...) instead of depth values.
|
|
958
|
+
|
|
959
|
+
Used in situations when depths are unknown (such as when reading datlogger exports
|
|
960
|
+
that don't have depth measurements.)
|
|
961
|
+
|
|
962
|
+
Parameters
|
|
963
|
+
----------
|
|
964
|
+
times : list-like
|
|
965
|
+
t-length array of datetime objects
|
|
966
|
+
values : numpy.ndarray
|
|
967
|
+
array with shape (t,d) containing values at (t)emperatures and (d)epths
|
|
968
|
+
**kwargs : dict
|
|
969
|
+
Extra arguments to parent class: refer to :py:class:`~tsp.core.TSP` documentation for a
|
|
970
|
+
list of all possible arguments.
|
|
971
|
+
"""
|
|
972
|
+
|
|
973
|
+
def __init__(self, times, values, **kwargs):
|
|
974
|
+
depths = np.arange(0, values.shape[1]) + 1
|
|
975
|
+
super().__init__(times=times, depths=depths, values=values, **kwargs)
|
|
976
|
+
|
|
977
|
+
@property
|
|
978
|
+
def depths(self) -> np.ndarray:
|
|
979
|
+
"""Depth indices
|
|
980
|
+
|
|
981
|
+
Returns
|
|
982
|
+
-------
|
|
983
|
+
numpy.ndarray
|
|
984
|
+
An array of depth indices
|
|
985
|
+
"""
|
|
986
|
+
warnings.warn("This TSP uses indices (1,2,3,...) instad of depths. Use set_depths() to use measured depths.", stacklevel=2)
|
|
987
|
+
return self._depths
|
|
988
|
+
|
|
989
|
+
@depths.setter
|
|
990
|
+
def depths(self, value):
|
|
991
|
+
TSP.depths.__set__(self, value)
|
|
992
|
+
|
|
993
|
+
def set_depths(self, depths: np.ndarray):
|
|
994
|
+
"""Assign depth values to depth indices. Change the object to a :py:class:`~tsp.core.TSP`
|
|
995
|
+
|
|
996
|
+
Parameters
|
|
997
|
+
----------
|
|
998
|
+
depths : np.ndarray
|
|
999
|
+
An array or list of depth values equal in lenth to the depth indices
|
|
1000
|
+
"""
|
|
1001
|
+
self.depths = depths
|
|
1002
|
+
self.__class__ = TSP
|
|
1003
|
+
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def span(S: pd.Series) -> float:
|
|
1007
|
+
first = S.first_valid_index() # type: pd.Timestamp
|
|
1008
|
+
last = S.last_valid_index() # type: pd.Timestamp
|
|
1009
|
+
if first is None or last is None:
|
|
1010
|
+
return 0
|
|
1011
|
+
|
|
1012
|
+
return (last - first).total_seconds()
|
|
1013
|
+
|
|
1014
|
+
def min_span_mask(R: "pd.core.resample.DatetimeIndexResampler",
|
|
1015
|
+
threshold: float) -> "pd.DataFrame":
|
|
1016
|
+
s = R.apply(lambda x: span(x))
|
|
1017
|
+
return s < threshold
|
|
1018
|
+
|
|
1019
|
+
|
|
1020
|
+
def gap(S: pd.Series) -> float:
|
|
1021
|
+
|
|
1022
|
+
d = np.diff(S.dropna().index)
|
|
1023
|
+
if len(d) == 0:
|
|
1024
|
+
return 0
|
|
1025
|
+
elif len(d) == 1:
|
|
1026
|
+
return 0
|
|
1027
|
+
elif len(d) > 1:
|
|
1028
|
+
gap = max(d).astype('timedelta64[s]').astype(float)
|
|
1029
|
+
return gap
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
def max_gap_mask(R: "pd.core.resample.DatetimeIndexResampler",
|
|
1033
|
+
threshold: float) -> "pd.DataFrame":
|
|
1034
|
+
g = R.apply(lambda x: gap(x))
|
|
1035
|
+
return (g > threshold) | (g == 0)
|
|
1036
|
+
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
|
|
1040
|
+
def _temporal_gap_mask(grouped: "pd.core.groupby.DataFrameGroupBy", max_gap: Optional[int], min_span: Optional[int]) -> np.ndarray:
|
|
1041
|
+
""" Mask out observational groups in which there is more than a certain size temporal gap
|
|
1042
|
+
|
|
1043
|
+
Controls for gaps in the data within an aggregation group (using max_gap) and missing data at the beginning
|
|
1044
|
+
or end of the aggregation group (using min_span).
|
|
1045
|
+
|
|
1046
|
+
Parameters
|
|
1047
|
+
----------
|
|
1048
|
+
grouped : pandas.core.groupby.DataFrameGroupBy
|
|
1049
|
+
groupby with 'time' and 'depth' columns
|
|
1050
|
+
max_gap : int
|
|
1051
|
+
maximum gap in seconds to tolerate between observations in a group
|
|
1052
|
+
min_span : int
|
|
1053
|
+
minimum data range (beginning to end) in seconds.
|
|
1054
|
+
|
|
1055
|
+
Returns
|
|
1056
|
+
-------
|
|
1057
|
+
numpy.ndarray
|
|
1058
|
+
boolean array with ``True`` where measurement spacing or range in group does not satisfy tolerances
|
|
1059
|
+
"""
|
|
1060
|
+
if max_gap is not None:
|
|
1061
|
+
max_diff = grouped.time.apply(np.diff).apply(lambda x: np.max(x, initial=np.timedelta64(0))).apply(lambda x: x.total_seconds())
|
|
1062
|
+
max_diff = max_diff.unstack().to_numpy()
|
|
1063
|
+
diff_mask = np.where((max_diff == 0) | (max_diff >= max_gap), True, False)
|
|
1064
|
+
else:
|
|
1065
|
+
diff_mask = np.zeros_like(grouped, dtype=bool)
|
|
1066
|
+
|
|
1067
|
+
if min_span is not None:
|
|
1068
|
+
total_span = grouped.time.apply(np.ptp).apply(lambda x: x.total_seconds()).unstack().to_numpy()
|
|
1069
|
+
span_mask = np.where(total_span < min_span, True, False)
|
|
1070
|
+
else:
|
|
1071
|
+
span_mask = np.zeros_like(grouped, dtype=bool)
|
|
1072
|
+
|
|
1073
|
+
mask = diff_mask * span_mask
|
|
1074
|
+
|
|
1075
|
+
return mask
|
|
1076
|
+
|
|
1077
|
+
|
|
1078
|
+
def _observation_count_mask(number_of_observations: np.ndarray, min_count:int) -> np.ndarray:
|
|
1079
|
+
""" Create a mask array for an
|
|
1080
|
+
|
|
1081
|
+
Parameters
|
|
1082
|
+
----------
|
|
1083
|
+
number_of_observations : numpy.ndarray
|
|
1084
|
+
Array of how many data points are in aggregation
|
|
1085
|
+
min_count : int
|
|
1086
|
+
Minimum number of data points for aggregation to be 'valid'
|
|
1087
|
+
|
|
1088
|
+
Returns
|
|
1089
|
+
-------
|
|
1090
|
+
np.ndarray
|
|
1091
|
+
a mask, True where data should be masked
|
|
1092
|
+
"""
|
|
1093
|
+
valid = np.less(number_of_observations, min_count) # type: np.ndarray
|
|
1094
|
+
return valid
|
|
1095
|
+
|
|
1096
|
+
|
|
1097
|
+
def handle_incoming_times(times: "Union[np.ndarray, pd.DatetimeIndex, pd.Series, list]") -> "pd.DatetimeIndex":
|
|
1098
|
+
"""Convert a list of times to a pandas DatetimeIndex object"""
|
|
1099
|
+
invalid_msg = "Times must be a list, numpy array, pandas DatetimeIndex, or pandas Series"
|
|
1100
|
+
|
|
1101
|
+
try:
|
|
1102
|
+
if not len(times):
|
|
1103
|
+
raise ValueError(invalid_msg)
|
|
1104
|
+
except TypeError:
|
|
1105
|
+
raise ValueError(invalid_msg)
|
|
1106
|
+
|
|
1107
|
+
if isinstance(times, pd.DatetimeIndex):
|
|
1108
|
+
return times
|
|
1109
|
+
|
|
1110
|
+
if isinstance(times, pd.Series):
|
|
1111
|
+
try:
|
|
1112
|
+
times = pd.DatetimeIndex(times)
|
|
1113
|
+
except Exception:
|
|
1114
|
+
raise ValueError("Series must be convertible to DatetimeIndex")
|
|
1115
|
+
times.name = 'time'
|
|
1116
|
+
|
|
1117
|
+
return times
|
|
1118
|
+
|
|
1119
|
+
elif isinstance(times, np.ndarray):
|
|
1120
|
+
times = pd.to_datetime(times)
|
|
1121
|
+
times.name = 'time'
|
|
1122
|
+
return times
|
|
1123
|
+
|
|
1124
|
+
elif isinstance(times, list):
|
|
1125
|
+
return pd.to_datetime(times)
|
|
1126
|
+
|
|
1127
|
+
else:
|
|
1128
|
+
raise ValueError(invalid_msg)
|
|
1129
|
+
|
|
1130
|
+
def tsp_concat(tsp_list, on_conflict='error', metadata='first') -> TSP:
|
|
1131
|
+
"""Combine multiple TSPs into a single TSP.
|
|
1132
|
+
|
|
1133
|
+
Parameters
|
|
1134
|
+
----------
|
|
1135
|
+
tsp_list : list[TSP]
|
|
1136
|
+
List of TSPs to combine. They must have the same depths
|
|
1137
|
+
on_conflict : str, optional
|
|
1138
|
+
Method to resolve duplicate times with different values. Chosen from "error", "keep", by default "error"
|
|
1139
|
+
- "error": Raise an error if duplicate times with different values are found.
|
|
1140
|
+
- "keep": Keep the first occurrence of the duplicate time.
|
|
1141
|
+
metadata : str, optional
|
|
1142
|
+
Method to select metadata from the TSPs. Chosen from "first", "identical", or "none", by default "first"
|
|
1143
|
+
- "first": Use the metadata from the first TSP in the list.
|
|
1144
|
+
- "identical": Only keep metadata records that are identical across TSPs.
|
|
1145
|
+
- "none": Ignore metadata and set it to None.
|
|
1146
|
+
Returns
|
|
1147
|
+
-------
|
|
1148
|
+
TSP
|
|
1149
|
+
Combined TSP.
|
|
1150
|
+
|
|
1151
|
+
Description
|
|
1152
|
+
-----------
|
|
1153
|
+
This function combines multiple TSPs into a single TSP. The TSPs must have the same depths.
|
|
1154
|
+
"""
|
|
1155
|
+
tsp_dict = _tsp_concat(tsp_list=tsp_list, on_conflict=on_conflict, metadata=metadata)
|
|
1156
|
+
times = tsp_dict.pop('times')
|
|
1157
|
+
depths = tsp_dict.pop('depths')
|
|
1158
|
+
values = tsp_dict.pop('values')
|
|
1159
|
+
|
|
1160
|
+
t = TSP(times, depths, values, **tsp_dict)
|
|
1161
|
+
|
|
1162
|
+
return t
|