pygtide 0.8.2__cp314-cp314-macosx_15_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,515 @@
1
+ """
2
+ PyGTide Update - A python class to update the files upon which PyGTide depends
3
+ -------------------------------------------------------------------------------
4
+ Author: Gabriel C. Rau (gabriel@hydrogeo.science)
5
+ Website: https://hydrogeo.science
6
+ """
7
+
8
+ import pygtide.etpred as etpred
9
+
10
+ from importlib import resources
11
+ from pathlib import Path
12
+ import numpy as np
13
+ import time as tt
14
+ import pandas as pd
15
+ import datetime as dt
16
+ import urllib
17
+ import urllib.request
18
+ import re
19
+ import os
20
+
21
+
22
+ def timestampToDecyear(ts):
23
+ year = ts.year
24
+ jan1 = pd.Timestamp(year, 1, 1)
25
+ jan1next = pd.Timestamp(year + 1, 1, 1)
26
+ yrlen = (jan1next - jan1).total_seconds()
27
+ return year + (ts - jan1).total_seconds() / yrlen
28
+
29
+
30
+ class update_etpred_db(object):
31
+ """
32
+ The pygtide_update class will initialise internal variables
33
+ """
34
+
35
+ def __init__(self, msg=True):
36
+ self.msg = msg
37
+ # Resolve package data directory directly from filesystem.
38
+ # Avoid importlib.resources.as_file() which creates temp dirs that get cleaned up.
39
+ pkg_dir = os.path.dirname(__file__)
40
+ self.data_dir = os.path.join(pkg_dir, "commdat")
41
+ if not self.data_dir.endswith(os.sep):
42
+ self.data_dir += os.sep
43
+
44
+ # Fortran expects a fixed-length string (256 chars)
45
+ etpred.params.comdir = self.data_dir + " " * (256 - len(self.data_dir))
46
+
47
+ # OS-dependent null file
48
+ etpred.params.nullfile = os.devnull + " " * (10 - len(os.devnull))
49
+
50
+ # set OS dependent module output
51
+ etpred.params.nullfile = os.devnull + " " * (10 - len(os.devnull))
52
+ self.etddt_file = str(etpred.params.etddtdat, "UTF-8").strip()
53
+ self.etpolut1_dat_file = str(etpred.params.etpolutdat, "UTF-8").strip()
54
+ self.etpolut1_bin_file = str(etpred.params.etpolutbin, "UTF-8").strip()
55
+
56
+ # %% remote data files
57
+ # IERS leap seconds history file
58
+ self.etddt_tmpl = "etddt_tmpl.dat"
59
+ self.leapsec_rfile = (
60
+ "https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second_History.dat"
61
+ )
62
+ # IERS pole coordinate observations
63
+ # self.iauhist_rfile = 'http://hpiers.obspm.fr/iers/eop/eopc04/eopc04_IAU2000.62-now'
64
+ self.iauhist_rfile = (
65
+ "ftp://hpiers.obspm.fr/iers/eop/eopc04/eopc04_IAU2000.62-now"
66
+ )
67
+ # US navy pole coordinate predictions
68
+ self.iaucurr_rfile = "https://datacenter.iers.org/data/9/finals2000A.all"
69
+ # self.iaucurr_rfile = 'ftp://cddis.gsfc.nasa.gov/pub/products/iers/finals2000A.all'
70
+
71
+ # %% update the pole coordinates and UT1 to TAI times
72
+ def update_etpolut1(self):
73
+ global etpolut
74
+ status = True
75
+ etpolut1_file = Path(self.data_dir + "/" + self.etpolut1_dat_file)
76
+ leapsec_file = Path(self.data_dir + "/" + "[raw]_Leap_Second_History.dat")
77
+ iauhist_file = Path(self.data_dir + "/" + "[raw]_eopc04_IAU2000.dat")
78
+ iaucurr_file = Path(self.data_dir + "/" + "[raw]_finals2000A.dat")
79
+
80
+ print("--------------------------------------")
81
+ print(
82
+ "-->> Updating the Earth orientation database '{:s}':".format(
83
+ etpolut1_file.as_posix()
84
+ )
85
+ )
86
+ start = tt.time()
87
+ if status:
88
+ try:
89
+ print("Start downloading: {:s} ...".format(self.leapsec_rfile))
90
+ urllib.request.urlopen(self.leapsec_rfile)
91
+ except OSError as error:
92
+ print(
93
+ "ERROR: Could not connect to remote server: {:s}".format(
94
+ self.leapsec_rfile
95
+ )
96
+ )
97
+ print("MESSAGE: {0}.".format(error))
98
+ status = False
99
+ pass
100
+ else:
101
+ urllib.request.urlretrieve(self.leapsec_rfile, leapsec_file)
102
+ end = tt.time()
103
+ print("Finished downloading ({:.1f} s).".format((end - start)))
104
+
105
+ if status:
106
+ try:
107
+ print("Start downloading: {:s} ...".format(self.iauhist_rfile))
108
+ urllib.request.urlopen(self.iauhist_rfile, timeout=1)
109
+ except OSError as error:
110
+ print(
111
+ "ERROR: Could not connect to remote server: {:s}".format(
112
+ self.iauhist_rfile
113
+ )
114
+ )
115
+ print("MESSAGE: {0}.".format(error))
116
+ status = False
117
+ pass
118
+ else:
119
+ urllib.request.urlretrieve(self.iauhist_rfile, iauhist_file)
120
+ end = tt.time()
121
+ print("Finished downloading ({:.1f} s).".format((end - start)))
122
+
123
+ if status:
124
+ try:
125
+ print("Start downloading: {:s} ...".format(self.iaucurr_rfile))
126
+ urllib.request.urlopen(self.iaucurr_rfile)
127
+ except OSError as error:
128
+ print(
129
+ "ERROR: Could not connect to remote server: {:s}".format(
130
+ self.iaucurr_rfile
131
+ )
132
+ )
133
+ print("MESSAGE: {0}.".format(error))
134
+ status = False
135
+ pass
136
+ else:
137
+ urllib.request.urlretrieve(self.iaucurr_rfile, iaucurr_file)
138
+ end = tt.time()
139
+ print("Finished downloading ({:.1f} s).".format((end - start)))
140
+
141
+ # %%
142
+ if status:
143
+ try:
144
+ open(leapsec_file, "r")
145
+ except OSError as error:
146
+ print(
147
+ "ERROR: Could not open file: {:s}".format(leapsec_file.as_posix())
148
+ )
149
+ print("MESSAGE: {0}.".format(error))
150
+ status = False
151
+ pass
152
+
153
+ if status:
154
+ try:
155
+ open(iauhist_file, "r")
156
+ except OSError as error:
157
+ print(
158
+ "ERROR: Could not open file: {:s}".format(iauhist_file.as_posix())
159
+ )
160
+ print("MESSAGE: {0}.".format(error))
161
+ status = False
162
+ pass
163
+
164
+ if status:
165
+ try:
166
+ open(iaucurr_file, "r")
167
+ except OSError as error:
168
+ print(
169
+ "ERROR: Could not open file: {:s}".format(iaucurr_file.as_posix())
170
+ )
171
+ print("MESSAGE: {0}.".format(error))
172
+ status = False
173
+ pass
174
+
175
+ if status:
176
+ # %% read leap second history
177
+ leapsdf = pd.read_csv(
178
+ leapsec_file,
179
+ comment="#",
180
+ header=None,
181
+ delimiter=r"\s+",
182
+ names=["MJD", "day", "month", "year", "leaps"],
183
+ usecols=[0, 1, 2, 3, 4],
184
+ )
185
+
186
+ # Create datetime column from day, month, year
187
+ leapsdf["date"] = pd.to_datetime(leapsdf[["year", "month", "day"]])
188
+
189
+ # Keep only 'date' and 'leaps'
190
+ leapsdf = leapsdf[["date", "leaps"]]
191
+
192
+ # Reindex starting at 1
193
+ leapsdf.index += 1
194
+
195
+ # Insert initial row at the beginning
196
+ leapsdf.loc[0] = [dt.datetime(1962, 1, 1), 10]
197
+
198
+ # Sort index to restore correct order
199
+ leapsdf.sort_index(inplace=True)
200
+
201
+ # %% read historic pole coordinates
202
+ # convert = {3: lambda x: np.around(np.float64(x), 3)}
203
+ iauhist = pd.read_csv(
204
+ iauhist_file,
205
+ skiprows=13,
206
+ header=None,
207
+ delimiter=r"\s+",
208
+ usecols=[0, 1, 2, 3, 4, 5, 6],
209
+ names=["year", "month", "day", "MJD", "x", "y", "UT1-UTC"],
210
+ )
211
+
212
+ # Combine year, month, day into a single datetime column
213
+ iauhist["date"] = pd.to_datetime(iauhist[["year", "month", "day"]])
214
+
215
+ # Keep only the desired columns
216
+ iauhist = iauhist[["date", "MJD", "x", "y", "UT1-UTC"]]
217
+
218
+ # %% read current pole coordinates
219
+ fw = [2, 2, 2, 9, 3, 9, 9, 10, 9, 3, 10, 10]
220
+ iaucurr = pd.read_fwf(
221
+ iaucurr_file, header=None, widths=fw, usecols=[0, 1, 2, 3, 5, 7, 10]
222
+ )
223
+
224
+ iaucurr_dates = (
225
+ iaucurr.iloc[:, 0].astype(str).str.zfill(2)
226
+ + "-"
227
+ + iaucurr.iloc[:, 1].astype(str).str.zfill(2)
228
+ + "-"
229
+ + iaucurr.iloc[:, 2].astype(str).str.zfill(2)
230
+ )
231
+
232
+ iaucurr["date"] = pd.to_datetime(iaucurr_dates, format="%y-%m-%d")
233
+
234
+ # Drop old columns used for date
235
+ iaucurr = iaucurr.drop(columns=iaucurr.columns[:3])
236
+ # Rename remaining columns
237
+ iaucurr.columns = ["MJD", "x", "y", "UT1-UTC", "date"]
238
+ # Move 'date' to first position if needed
239
+ iaucurr = iaucurr[["date", "MJD", "x", "y", "UT1-UTC"]]
240
+
241
+ mask = iaucurr["date"] > iauhist["date"].iloc[-1]
242
+ etpolut = pd.concat([iauhist, iaucurr.loc[mask]])
243
+ etpolut = etpolut[np.isfinite(etpolut["x"])]
244
+
245
+ # %%
246
+ etpolut["Date"] = etpolut["date"].dt.strftime("%Y%m%d")
247
+ etpolut["Time"] = etpolut["date"].dt.strftime("%H%M%S")
248
+ etpolut["MJD"] = etpolut["MJD"].map("{:8.3f}".format)
249
+ etpolut["x"] = etpolut["x"].map("{:9.5f}".format)
250
+ etpolut["y"] = etpolut["y"].map("{:9.5f}".format)
251
+ etpolut["TAI-UT1"] = etpolut["UT1-UTC"]
252
+ etpolut["UT1-UTC"] = etpolut["UT1-UTC"].map("{:9.6f}".format)
253
+
254
+ # %%
255
+ # prepare the last column
256
+ for idx, val in leapsdf.iterrows():
257
+ # print(idx, val[1])
258
+ # find mask for leap seconds
259
+ if idx + 1 in leapsdf.index:
260
+ mask = (etpolut["date"] >= leapsdf["date"].loc[idx]) & (
261
+ etpolut["date"] < leapsdf["date"].loc[idx + 1]
262
+ )
263
+ # print(mask)
264
+ # subtract leap seconds from UTC
265
+ etpolut.loc[mask, "TAI-UT1"] = (
266
+ leapsdf["leaps"].loc[idx] - etpolut.loc[mask, "TAI-UT1"]
267
+ )
268
+ else:
269
+ mask = etpolut["date"] >= leapsdf["date"].loc[idx]
270
+ etpolut.loc[mask, "TAI-UT1"] = (
271
+ leapsdf["leaps"].loc[idx] - etpolut.loc[mask, "TAI-UT1"]
272
+ )
273
+
274
+ etpolut["TAI-UT1"] = etpolut["TAI-UT1"].map("{:9.6f}".format)
275
+
276
+ # %%
277
+ # etpolut[0] = etpolut[0].map('${:,.2f}'.format)
278
+ header = """File : etpolut1.dat
279
+ Updated : $1$
280
+ Contents : Pole coordinates and earth rotation one day sampling interval,
281
+ given at 0 hours UTC. Historic data is combined with predictions.
282
+ Data are from IERS and USNO.
283
+ Period : $2$
284
+ Historic : $3$
285
+ Current : $4$
286
+ Leap sec.: $5$
287
+
288
+ Date Time MJD x y UT1-UTC TAI-UT1
289
+ ["] ["] [sec] [sec]
290
+ C****************************************************************\n"""
291
+ header = header.replace("$1$", dt.datetime.now(dt.timezone.utc).strftime("%d/%m/%Y"))
292
+ header = header.replace(
293
+ "$2$",
294
+ etpolut["date"].iloc[0].strftime("%d/%m/%Y")
295
+ + " to "
296
+ + etpolut["date"].iloc[-1].strftime("%d/%m/%Y"),
297
+ )
298
+ header = header.replace("$3$", self.iauhist_rfile)
299
+ header = header.replace("$4$", self.iaucurr_rfile)
300
+ header = header.replace("$5$", self.leapsec_rfile)
301
+
302
+ pd.options.display.max_colwidth = 200
303
+
304
+ # IMPORTANT: newline needs to comply with windows platform!
305
+ # https://pythonconquerstheuniverse.wordpress.com/2011/05/08/newline-conversion-in-python-3/
306
+ with open(etpolut1_file, "w", newline="") as myfile:
307
+ myfile.write(header)
308
+ # myfile.write(etpolut['combined'].to_string(index=False, header=False).replace('\n ', '\n'))
309
+ # etpolut['combined'].to_string(myfile, index=False, header=False)
310
+ # WEIRD PANDAS BUG: to_string() puts white space at beginning of each line
311
+ for index, row in etpolut.iterrows():
312
+ string = "{:s} {:s} {:s} {:s} {:s} {:s} {:s}".format(
313
+ row["Date"],
314
+ row["Time"],
315
+ row["MJD"],
316
+ row["x"],
317
+ row["y"],
318
+ row["UT1-UTC"],
319
+ row["TAI-UT1"],
320
+ )
321
+ myfile.write(string + "\r\n")
322
+ myfile.write("99999999")
323
+ myfile.close()
324
+ end = tt.time()
325
+ # update also bin file
326
+ self.etpolut1_dat2bin()
327
+ print(
328
+ "Finished updating {:s} ({:.1f} s).".format(
329
+ etpolut1_file.as_posix(), (end - start)
330
+ )
331
+ )
332
+ else:
333
+ print("Update failed!")
334
+ pass
335
+
336
+ # %% remove temporary files
337
+ os.remove(leapsec_file)
338
+ os.remove(iauhist_file)
339
+ os.remove(iaucurr_file)
340
+ return
341
+
342
+ # %% update the etpolut1 binary file from the text file
343
+ def etpolut1_dat2bin(self):
344
+ etpolut1_dat = Path(self.data_dir + "/" + self.etpolut1_dat_file)
345
+ etpolut1_bin = Path(self.data_dir + "/" + self.etpolut1_bin_file)
346
+ header = []
347
+ # find the end of the header
348
+ with open(etpolut1_dat, "r") as f:
349
+ for num, line in enumerate(f, 1):
350
+ header.append(line)
351
+ if "C*******" in header[-1]:
352
+ break
353
+
354
+ # read into dataframe
355
+ cols = ["Date", "Time", "MJD", "x", "y", "UT1-UTC", "TAI-UT1"]
356
+ etpolut = pd.read_csv(
357
+ etpolut1_dat, names=cols, skiprows=num, header=None, delimiter=r"\s+"
358
+ )
359
+ # drop the last row with EOL ('99999999')
360
+ etpolut = etpolut[:-1]
361
+ print(
362
+ "File '{:s}' has {:d} rows.".format(
363
+ etpolut1_dat.as_posix(), etpolut.shape[0]
364
+ )
365
+ )
366
+ # %%
367
+ # write as binary for use in fortran: each record has 4*8 bytes = 32 bytes
368
+ # header contains start date in MJD and number of rows + 1
369
+ head = np.array([np.int32(etpolut.iloc[0, 2]), np.int32(etpolut.shape[0] + 1)])
370
+ data = np.float64(etpolut.values[:, 3:])
371
+ # print(data)
372
+ with open(etpolut1_bin, "wb+") as f:
373
+ # write header integers
374
+ f.write(head.tobytes())
375
+ # advance to next record (32 bytes)
376
+ f.seek(32)
377
+ # write the flattened matrix (this may have 64 bytes)
378
+ f.write(data.flatten().tobytes())
379
+ f.close()
380
+ print(
381
+ "File '{:s}' has been updated (Header: {:.0f}, {:d}).".format(
382
+ etpolut1_bin.as_posix(), etpolut.iloc[0, 2], etpolut.shape[0] + 1
383
+ )
384
+ )
385
+
386
+ # %% update the time conversion database (leap seconds)
387
+ def update_etddt(self):
388
+ global etddt, leapsdf, tmp
389
+ leapsec_file = Path(self.data_dir + "/" + "[raw]_Leap_Second_History.dat")
390
+ old_etddt_file = Path(self.data_dir + "/" + self.etddt_tmpl)
391
+ etddt_file = Path(self.data_dir + "/" + self.etddt_file)
392
+
393
+ # %%
394
+ print("--------------------------------------")
395
+ print(
396
+ "-->> Updating time conversion database '{:s}':".format(
397
+ leapsec_file.as_posix()
398
+ )
399
+ )
400
+ # %% download leap second history
401
+ start = tt.time()
402
+ try:
403
+ print("Start downloading: {:s} ...".format(leapsec_file.as_posix()))
404
+ urllib.request.urlopen(self.leapsec_rfile)
405
+ except OSError as error:
406
+ print("ERROR: Could not connect to remote server!")
407
+ print("MESSAGE: {0}.".format(error))
408
+ pass
409
+ else:
410
+ urllib.request.urlretrieve(self.leapsec_rfile, leapsec_file)
411
+ end = tt.time()
412
+ print("Finished downloading ({:.1f} s).".format((end - start)))
413
+
414
+ # %% READ THE EXISTING FILE
415
+ # print(etddt_file)
416
+ # find the end of the header
417
+ with open(old_etddt_file, "r") as f:
418
+ print("Processing file '{:s}' ...".format(etddt_file.as_posix()))
419
+ header = []
420
+ regex = re.compile(r"^\s*updated\s*\:.*$", re.IGNORECASE)
421
+ for num, line in enumerate(f, 1):
422
+ line = regex.sub(
423
+ "Updated : %s" % dt.datetime.now(dt.timezone.utc).strftime("%d/%m/%Y"),
424
+ line,
425
+ )
426
+ header.append(line)
427
+ if "C*******" in header[-1]:
428
+ break
429
+
430
+ cols = ["year", "JD", "DDT"]
431
+ etddt = pd.read_csv(
432
+ old_etddt_file, names=cols, skiprows=num, header=None, delimiter=r"\s+"
433
+ )
434
+
435
+ # %% read leap second history
436
+ leapsdf = pd.read_csv(
437
+ leapsec_file,
438
+ comment="#",
439
+ header=None,
440
+ delimiter=r"\s+",
441
+ names=[
442
+ "MJD",
443
+ "day",
444
+ "month",
445
+ "year",
446
+ "leaps",
447
+ ], # assign proper column names
448
+ usecols=[0, 1, 2, 3, 4],
449
+ )
450
+ # Combine day, month, year into a single datetime column
451
+ leapsdf["date"] = pd.to_datetime(leapsdf[["year", "month", "day"]])
452
+
453
+ # Keep only the relevant columns
454
+ leapsdf = leapsdf[["date", "MJD", "leaps"]]
455
+
456
+ # Compute DDT = leaps + 32.184 s
457
+ leapsdf["DDT"] = leapsdf["leaps"] + 32.184
458
+
459
+ # %%
460
+ leapsdf["JD"] = [dt.to_julian_date() for dt in leapsdf["date"]]
461
+ leapsdf["year"] = [timestampToDecyear(dt) for dt in leapsdf["date"]]
462
+
463
+ # %%
464
+ mask = leapsdf["year"] > etddt["year"].values[-1]
465
+ indices = leapsdf.index[mask]
466
+ # print(indices)
467
+ # tmp = []
468
+ for i, val in enumerate(indices):
469
+ # for each record create a new row
470
+ etddt.loc[len(etddt) + 1] = {
471
+ "year": leapsdf.loc[val, "year"],
472
+ "JD": leapsdf.loc[val, "JD"],
473
+ "DDT": leapsdf.loc[val, "DDT"],
474
+ }
475
+
476
+ # number of new records
477
+ records = sum(mask)
478
+ if records > 0:
479
+ # write header
480
+ with open(self.data_dir + "/" + self.etddt_file, "w+", newline="\r\n") as f:
481
+ f.write("".join(header))
482
+ # etddt['combined'].to_string(f, index=False, header=False)
483
+ # f.write("\n")
484
+ # WEIRD PANDAS BUG: to_string() puts white space at beginning of each line
485
+ for index, row in etddt.iterrows():
486
+ string = "{:.5f} {:.5f} {:8.3f}".format(
487
+ row["year"], row["JD"], row["DDT"]
488
+ )
489
+ # print(string)
490
+ f.write(string + "\n")
491
+ f.close()
492
+ end = tt.time()
493
+ print(
494
+ "{:d} records were added to the template ({:.1f} s).".format(
495
+ records, end - start
496
+ )
497
+ )
498
+ print(
499
+ "The leap second File ('{:s}') is now up to date ({:.1f} s).".format(
500
+ self.etddt_file, end - start
501
+ )
502
+ )
503
+
504
+
505
+ # %% run the update
506
+ def update(msg=True):
507
+ pt = update_etpred_db(msg)
508
+ pt.update_etddt()
509
+ print(etddt.iloc[-10:, :])
510
+ pt.update_etpolut1()
511
+ print("---------------------")
512
+
513
+
514
+ if __name__ == "__main__":
515
+ update()
@@ -0,0 +1,124 @@
1
+ Metadata-Version: 2.4
2
+ Name: pygtide
3
+ Version: 0.8.2
4
+ Summary: A Python module and wrapper for ETERNA PREDICT to compute gravitational tides on Earth
5
+ Author: Tom Eulenfeld
6
+ Author-email: "Gabriel C. Rau" <gabriel@hydrogeo.science>
7
+ License-Expression: MPL-2.0
8
+ Project-URL: Homepage, https://github.com/hydrogeoscience/pygtide
9
+ Project-URL: Documentation, https://hydrogeo.science
10
+ Project-URL: Repository, https://github.com/hydrogeoscience/pygtide.git
11
+ Project-URL: Issues, https://github.com/hydrogeoscience/pygtide/issues
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: Operating System :: OS Independent
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.8
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
+ Classifier: Programming Language :: Python :: 3.14
23
+ Classifier: Topic :: Scientific/Engineering :: Physics
24
+ Requires-Python: >=3.8
25
+ Description-Content-Type: text/markdown
26
+ License-File: LICENSE
27
+ Requires-Dist: numpy>=1.21.0
28
+ Requires-Dist: pandas
29
+ Dynamic: license-file
30
+
31
+ # PyGTide
32
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.1346260.svg)](https://doi.org/10.5281/zenodo.1346260)
33
+
34
+ ## A Python module and wrapper for ETERNA PREDICT to compute gravitational tides on Earth
35
+
36
+ PyGTide is a Python module that wraps around ETERNA PREDICT 3.4 which is compiled from Fortran into an executable using [f2py](https://docs.scipy.org/doc/numpy/f2py/). The original ETERNA PREDICT 3.3 was written by the late Prof. H.-G. Wenzel (Wenzel, 1996) in a mix of Fortran 77 and 90. This was updated by Kudryavtsev (2004) to include the latest tidal catalogue. Note that the original Fortran code was comprehensively revised in order to facilitate integration into Python. The original Fortran code for ETERNA PREDICT can be downloaded from the [International Geodynamics and Earth Tide Service (IGETS)](http://igets.u-strasbg.fr/soft_and_tool.php).
37
+
38
+ ## How to use
39
+
40
+ There are two options:
41
+ * Download and install on your system (see below instructions)
42
+ * Via our online calculator: <a href="https://groundwater.app/app.php?app=pygtide" target="_blank">groundwater.app</a>
43
+
44
+
45
+ ## How to install and run
46
+
47
+ ### Prerequisites
48
+
49
+ * Download and install [*Anaconda*](https://www.anaconda.com/products/distribution) or [*Miniconda*](https://docs.conda.io/en/latest/miniconda.html)
50
+ * Install required packages:
51
+ ```
52
+ conda install numpy pandas
53
+ ```
54
+
55
+ ### Installation options
56
+
57
+ #### Option 1: Install and compile source distribution from PyPi (Python 3.8–3.11) or install pre-compiled distribution (Linux, macOS, Windows; Python>=3.12)
58
+
59
+ ```bash
60
+ pip install pygtide
61
+ ```
62
+
63
+ #### Option 2: Build from source locally (Linux, macOS, Windows; Python>=3.8)
64
+
65
+ **Requirements for building:**
66
+ - A Fortran compiler (e.g., `gfortran` via MinGW on Windows; included in Linux/macOS gcc toolchains) `conda install gfortran`
67
+ - Meson build system with ninja: automatically installed via `pip`
68
+
69
+ **Clone repo from git:**
70
+ ```bash
71
+ git clone https://github.com/hydrogeoscience/pygtide.git
72
+ ```
73
+
74
+ **Install from local repository:**
75
+ ```bash
76
+ cd /path/to/pygtide
77
+
78
+ pip install .
79
+ ```
80
+
81
+ ### After installation
82
+
83
+ * Run tests to verify installation:
84
+ ```
85
+ python -c "import pygtide; pygtide.test(msg=True)"
86
+ ```
87
+
88
+ * Update internal database files (downloads latest leap seconds and pole data):
89
+ ```
90
+ python -c "import pygtide; pygtide.update()"
91
+ ```
92
+
93
+ ### Example usage
94
+
95
+ See `pygtide/tests.py` for complete examples. Quick start:
96
+
97
+ ```python
98
+ from pygtide import predict_series
99
+ args = (-20.82071, -70.15288, 830.0, '2020-01-01', 6, 600)
100
+ series = predict_series(*args, statazimut=90, tidalcompo=8)
101
+ ```
102
+
103
+ ## How to use
104
+
105
+ An updated user guide is currently in progress ...
106
+
107
+
108
+ ## How to cite
109
+ If you use PyGTide, please cite the work as:
110
+
111
+ *Rau, Gabriel C. (2018) hydrogeoscience/pygtide: PyGTid. Zenodo. [https://doi.org/10.5281/zenodo.1346260](https://doi.org/10.5281/zenodo.1346260)*
112
+
113
+ ## Example
114
+ <img src="https://raw.githubusercontent.com/hydrogeoscience/pygtide/master/earth_tide_example.png" width="500">
115
+ This image shows Earth tides calculated for the city Karlsruhe (Germany) in the year 2018.
116
+
117
+ ## References
118
+ * Hartmann, T., and H.-G. Wenzel (1995), The HW95 tidal potential catalogue, Geophysical Research Letters, 22(24), 3553–3556, https://doi.org/10.1029/95GL03324.
119
+ * Kudryavtsev, S. M. (2004), Improved harmonic development of the Earth tide-generating potential, Journal of Geodesy, 17(12), 829-838, https://doi.org/10.1007/s00190-003-0361-2.
120
+ * Wenzel, H.-G. (1996), The nanogal software: Earth tide data processing package ETERNA 3.30, Bulletin d’Informations des Marées Terrestres, 124, 9425–9439.
121
+ * McMillan, T. C., and Rau, G. C., and Timms, W. A., and Andersen, M. S. (2019), Utilizing the impact of Earth and atmospheric tides on groundwater systems: A review reveals the future potential, Reviews of Geophysics, https://dx.doi.org/10.1029/2018RG000630.
122
+
123
+ ## License
124
+ PyGTide is released by [Gabriel C. Rau](https://hydrogeo.science) and [Tom Eulenfeld](https://scholar.google.com/citations?user=SJXF3mwAAAAJ&hl=en) under the [Mozilla Public License 2.0](https://www.mozilla.org/en-US/MPL/2.0/)
@@ -0,0 +1,27 @@
1
+ pygtide/update_etpred_data.py,sha256=Qq1L18dIBmcljY1scYlhq4R-dCvAiRovWa0TvSyLeJY,20099
2
+ pygtide/__init__.py,sha256=GDTiNIuNxlnnNxt9ZzuOTM6pnAP0xTKOHP2b1cL6jlg,550
3
+ pygtide/core.py,sha256=cFVr4MIh6zdaSWji4GSQWJR7y0vUPKiUGv3chFyTUZ8,24870
4
+ pygtide/tests.py,sha256=a_ISOCP0ZQqX7RZEpWs8Cgy3sOpuMz7U_hby-ELkOJg,2836
5
+ pygtide/etpred.so,sha256=GJAXZ7gp5qdNmoHrUyHA8-bBHaJuAQ3v7jXzfTv-hUw,255808
6
+ pygtide/commdat/[raw]_eopc04_IAU2000.dat,sha256=KxG3418Ei3PTLpVhUiUb4Xfe3wei2RUkPdrxcf4tEXg,3438828
7
+ pygtide/commdat/ksm03.dat,sha256=Vm9Ki0syDWFElATnipPYdbKzAOjq0suXLpCZPoDaeOM,3376311
8
+ pygtide/commdat/[raw]_finals2000A.dat,sha256=IVpyQIn1SF31QSXlmauHEde3EYlZ5Cje8BAtd2VRof8,3472736
9
+ pygtide/commdat/etddt.dat,sha256=dGlRn_1mr24PfYe21uYBTCMfK4lgLyvB4WASqFpvMi8,8804
10
+ pygtide/commdat/hw95s.dat,sha256=5jTmVgtBGT4u0y419izudWtRvkTLVBIShDGnIIrJBQ4,1387984
11
+ pygtide/commdat/buellehw.dat,sha256=rdPoy3_ti_EAQVVh52LopEjdK6vzwmuRvL5zz36FErw,74886
12
+ pygtide/commdat/etpolut1.dat,sha256=_8m8-C7MRgKzyxCWzg5fwo5PY3c-bTZhOPxcGtFVBo4,1539863
13
+ pygtide/commdat/tamurahw.dat,sha256=IEA7UCyXPhtawMSFcaLo5lAiXBUjlFFrhzEVi7fJJgI,132651
14
+ pygtide/commdat/cted73hw.dat,sha256=MgXpux18c8_vzYFyxj4o0LBuhUzyau6_Kn_ocyi7ZHA,60593
15
+ pygtide/commdat/doodsehw.dat,sha256=WrjGdssZnMnK8Ou77Y7m3T7DlKWqS1KKExdt5zpTBWY,45589
16
+ pygtide/commdat/ratgp95.dat,sha256=3OJplSsT7Tlhqks2dd9tpt6SQpN3UyMNR-WJOKWm2NU,694252
17
+ pygtide/commdat/[raw]_Leap_Second_History.dat,sha256=I3OPHsFPfRKzsLDzaU3TSNID419loYq4s10qvuUMX64,1359
18
+ pygtide/commdat/etddt_tmpl.dat,sha256=oqf7mXP0w2XpeFn34pdec7csDGlkU1fpS5fnVw8a0S8,8191
19
+ pygtide/commdat/xi1989hw.dat,sha256=gz4ALMVjDRfcQ2wxJjSLPl5HCsp_wQJNfyMfKeMz0Go,316600
20
+ pygtide/.dylibs/libgfortran.5.dylib,sha256=99789i_-NnS7IQbTwqkda12OBSYyRDNYQ_ysaPqPeB8,2215232
21
+ pygtide/.dylibs/libquadmath.0.dylib,sha256=AlETbt6vE9tgihFfxma6uF1DpgZ31j4KjXbXpQ8gVGI,362768
22
+ pygtide/.dylibs/libgcc_s.1.1.dylib,sha256=clc36rqoUnzTS9oS0bDTJl4UJBomeWLlxBOzbueYO-I,220288
23
+ pygtide-0.8.2.dist-info/RECORD,,
24
+ pygtide-0.8.2.dist-info/WHEEL,sha256=59HIdHQbECqK4HZdSy7PNtdzqg2EbF0lIn3L0mnWAxA,137
25
+ pygtide-0.8.2.dist-info/top_level.txt,sha256=MHVHiiI0GXxh3mAyQttFc1S0jXkE2Z7Zkl2iTnkV-NQ,8
26
+ pygtide-0.8.2.dist-info/METADATA,sha256=ozP6p4DlLEF5nmLE6DaIvOwA1Sl3pD1nA_TXpKqK6HU,5352
27
+ pygtide-0.8.2.dist-info/licenses/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
@@ -0,0 +1,6 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: false
4
+ Tag: cp314-cp314-macosx_15_0_arm64
5
+ Generator: delocate 0.13.0
6
+