pygnss 0.0.0__cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pygnss might be problematic. Click here for more details.

pygnss/sinex.py ADDED
@@ -0,0 +1,121 @@
1
+ from dataclasses import dataclass
2
+ import datetime
3
+ import typing
4
+
5
+ TAG_SAT_PRN = "SATELLITE/PRN"
6
+
7
+
8
+ @dataclass
9
+ class _SatPrnItem():
10
+ svid: str
11
+ valid_from: datetime.datetime
12
+ valid_to: datetime.datetime
13
+ prn: str
14
+
15
+ def in_period(self, epoch: datetime):
16
+ return epoch >= self.valid_from and (self.valid_to is None or epoch <= self.valid_to)
17
+
18
+
19
+ class _SatPrn():
20
+
21
+ def __init__(self, sat_prns: typing.List[_SatPrnItem]):
22
+ self.items = {}
23
+
24
+ for sat_prn in sat_prns:
25
+ key = sat_prn.prn
26
+ if key not in self.items:
27
+ self.items[key] = []
28
+
29
+ self.items[key].append(sat_prn)
30
+
31
+ def to_svid(self, prn: str, epoch: datetime) -> str:
32
+ """
33
+ Get the Space Vehicle ID for a given Satellite PRN assignement at a
34
+ specific epoch
35
+ """
36
+
37
+ items = self.items[prn]
38
+
39
+ for item in items:
40
+ if item.in_period(epoch):
41
+ return item.svid
42
+
43
+ raise ValueError(f'Could not find SVID for [ {prn} ] at [ {epoch} ]')
44
+
45
+
46
+ def to_sat_prn(sinex_filename: str) -> _SatPrn:
47
+
48
+ with open(sinex_filename, 'r') as fh:
49
+ lines = _extract_section(fh, TAG_SAT_PRN)
50
+ sat_prns = []
51
+
52
+ for line in lines:
53
+ if line.startswith(('*', '+')):
54
+ continue
55
+ sat_prns.append(_parse_sat_prn_line(line))
56
+
57
+ return _SatPrn(sat_prns)
58
+
59
+
60
+ def _parse_epoch(epoch_str: str) -> datetime.datetime:
61
+ """
62
+ Parse an epoch expressed in SINEX format %Y:%j:<seconds_of_day>
63
+
64
+ >>> _parse_epoch("2024:029:43200")
65
+ datetime.datetime(2024, 1, 29, 12, 0)
66
+
67
+ >>> _parse_epoch("0000:000:00000")
68
+ """
69
+
70
+ NO_EPOCH = "0000:000:00000"
71
+ if epoch_str == NO_EPOCH:
72
+ return None
73
+
74
+ fields = epoch_str.split(":")
75
+ if len(fields) < 3:
76
+ raise ValueError(f'Input [ {epoch_str} ] does not seem to conform to SINEX epoch format and cannot be parsed')
77
+
78
+ epoch = datetime.datetime.strptime(f'{fields[0]}:{fields[1]}', "%Y:%j")
79
+ seconds = float(fields[2])
80
+
81
+ epoch = epoch + datetime.timedelta(seconds=seconds)
82
+
83
+ return epoch
84
+
85
+
86
+ def _parse_sat_prn_line(line: str) -> _SatPrnItem:
87
+ """
88
+ Parse a SATELLITE/PRN line and extract its fields
89
+
90
+ >>> _parse_sat_prn_line('G001 1978:053:00000 1985:199:00000 G04')
91
+ _SatPrnItem(svid='G001', valid_from=datetime.datetime(1978, 2, 22, 0, 0), valid_to=datetime.datetime(1985, 7, 18, 0, 0), prn='G04')
92
+ """
93
+
94
+ fields = line.split(' ')
95
+ if len(fields) < 4:
96
+ raise ValueError(f'The input line [ {line} ] does not seem to be a SATELLITE/PRN line')
97
+
98
+ svid, valid_from, valid_to, prn = fields[0:4]
99
+
100
+ return _SatPrnItem(svid, _parse_epoch(valid_from), _parse_epoch(valid_to), prn)
101
+
102
+
103
+ def _extract_section(fh, tag: str) -> typing.List[str]:
104
+ """
105
+ Extract a SINEX section
106
+ """
107
+
108
+ lines = []
109
+ in_block = False
110
+
111
+ for line in fh:
112
+
113
+ if f'+{tag}' in line:
114
+ in_block = True
115
+ elif f'-{tag}' in line:
116
+ break
117
+
118
+ if in_block:
119
+ lines.append(line.strip())
120
+
121
+ return lines
pygnss/stats.py ADDED
@@ -0,0 +1,75 @@
1
+ import argparse
2
+ import sys
3
+ from typing import Iterable, Tuple
4
+
5
+ import numpy as np
6
+
7
+
8
+ def cdf_cli():
9
+ argParser = argparse.ArgumentParser(description=__doc__,
10
+ formatter_class=argparse.RawDescriptionHelpFormatter) # verbatim
11
+
12
+ argParser.add_argument('--n-bins', '-n', metavar='<int>', type=int,
13
+ help='Number of bins', default=10)
14
+
15
+ args = argParser.parse_args()
16
+
17
+ samples = []
18
+
19
+ for sample in sys.stdin:
20
+ try:
21
+ _ = float(sample)
22
+ except ValueError:
23
+ continue
24
+
25
+ samples.append(float(sample))
26
+
27
+ pdf, edges = np.histogram(samples, bins=args.n_bins, density=True)
28
+ binwidth = edges[1] - edges[0]
29
+
30
+ pdf = np.array(pdf) * binwidth
31
+ cdf = np.cumsum(pdf)
32
+
33
+ for i in range(args.n_bins):
34
+ print(f"{edges[i]} {pdf[i]} {cdf[i]}")
35
+
36
+
37
+
38
+ def compute_robust(data:Iterable) -> Tuple[float, float]:
39
+ """
40
+ Compute the robust statistics for the input data set. These robust
41
+ statistics are:
42
+ - median
43
+ - Median Absolute Deviation (MAD) (https://en.wikipedia.org/wiki/Median_absolute_deviation)
44
+
45
+ :param data: input data (array-like)
46
+ :return: the median and mad
47
+
48
+ Example (extracted from http://kldavenport.com/absolute-deviation-around-the-median/)
49
+ >>> data = [2, 6, 6, 12, 17, 25 ,32]
50
+ >>> median, mad = compute_robust(data)
51
+ >>> np.allclose(median, 12)
52
+ True
53
+ >>> np.allclose(mad, 6)
54
+ True
55
+ """
56
+
57
+ if len(data) == 0:
58
+ raise ValueError("Unable to compute the robust statistics for an empty list or array")
59
+
60
+ median = np.median(data)
61
+
62
+ mad = np.median(np.abs(data - median))
63
+
64
+ return median, mad
65
+
66
+
67
+ def rms(values: Iterable) -> float:
68
+ """
69
+ Compute the Root Mean Square of an array of values
70
+
71
+ >>> array = [1, 2, 3, 4, 5]
72
+ >>> rms(array)
73
+ np.float64(3.3166247903554)
74
+ """
75
+ return np.sqrt(np.mean(np.square(values)))
pygnss/tensorial.py ADDED
@@ -0,0 +1,50 @@
1
+ """
2
+ This script is used to compare two files so that they are put side by
3
+ side.
4
+
5
+ Usage:
6
+ cat file1 file2 | tensorial.py -c col1 [col2 [col3]] ...
7
+
8
+
9
+
10
+ Examples:
11
+ (a) Join two files using the label in column 1 as a reference
12
+ cat f1.txt f2.txt | tensorial.py -c 1
13
+ """
14
+ import argparse
15
+ import sys
16
+
17
+
18
+ def entry_point():
19
+ argParser = argparse.ArgumentParser(description=__doc__,
20
+ formatter_class=argparse.RawDescriptionHelpFormatter) # for verbatim
21
+
22
+ argParser.add_argument('--column', '-c', metavar='<int>', type=int, nargs='+',
23
+ help='Column that contains the reference label used to join the lines. '
24
+ 'This option is repeatable and, if not present, defaults to 1. '
25
+ 'Columns are expressed as 1-based.')
26
+
27
+ args = argParser.parse_args()
28
+
29
+ # Retrieve the columns that will be used to make the index
30
+ if len(args.column) == 0:
31
+ idxes = [0]
32
+ else:
33
+ idxes = [v - 1 for v in args.column]
34
+
35
+ lines = {}
36
+ for line in sys.stdin:
37
+ values = line.split()
38
+
39
+ # Build the index
40
+ try:
41
+ index = ' '.join([values[i] for i in idxes])
42
+ except IndexError:
43
+ sys.stderr.write("FATAL : Not enough columns to build the index in the current line\n %s\n" % line)
44
+ sys.exit(1)
45
+
46
+ # Store the line based on the index, or print it
47
+ if index in lines:
48
+ sys.stdout.write("{0} {1}\n".format(lines[index], line[:-1]))
49
+ else:
50
+ lines[index] = line[:-1]
pygnss/time.py ADDED
@@ -0,0 +1,350 @@
1
+ from collections import namedtuple
2
+ import datetime
3
+ import math
4
+ import enum
5
+ from typing import List, Tuple
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ GPS_TIME_START = datetime.datetime(1980, 1, 6, 0, 0, 0)
10
+ J2000_TIME_START = datetime.datetime(2000, 1, 1, 12, 0, 0)
11
+ SECONDS_IN_DAY = 24 * 60 * 60
12
+ SECONDS_IN_WEEK = 86400 * 7
13
+ GPS_AS_J2000 = -630763200
14
+
15
+ WeekTow = namedtuple('WeekTow', 'week tow day_of_week')
16
+
17
+
18
+ class TimeScale(enum.Enum):
19
+ GPS = enum.auto()
20
+ UTC = enum.auto()
21
+
22
+
23
+ def get_gps_leapseconds(utc_date: datetime.datetime) -> datetime.timedelta:
24
+
25
+ if utc_date >= datetime.datetime(2017, 1, 1):
26
+ return datetime.timedelta(seconds=18)
27
+ elif utc_date >= datetime.datetime(2015, 7, 1):
28
+ return datetime.timedelta(seconds=17)
29
+ elif utc_date >= datetime.datetime(2012, 7, 1):
30
+ return datetime.timedelta(seconds=16)
31
+ elif utc_date >= datetime.datetime(2009, 1, 1):
32
+ return datetime.timedelta(seconds=15)
33
+ elif utc_date >= datetime.datetime(2006, 1, 1):
34
+ return datetime.timedelta(seconds=14)
35
+ elif utc_date >= datetime.datetime(1999, 1, 1):
36
+ return datetime.timedelta(seconds=13)
37
+
38
+ raise ValueError('No Leap second information for epochs prior to 1999-01-01')
39
+
40
+
41
+ class Timespan:
42
+ def __init__(self, start: datetime.datetime, end: datetime.datetime):
43
+ self.start = start
44
+ self.end = end
45
+
46
+ def __str__(self):
47
+ return f"{self.start} - {self.end}"
48
+
49
+ def is_overlaping(self, other: 'Timespan') -> bool:
50
+ return (self.start <= other.end) and (self.end >= other.start)
51
+
52
+ def duration(self) -> datetime.timedelta:
53
+ return self.end - self.start
54
+
55
+ def duration_seconds(self) -> int:
56
+ return int((self.duration()).total_seconds())
57
+
58
+ def duration_minutes(self) -> float:
59
+ return self.duration_seconds() / 60
60
+
61
+ def duration_hours(self) -> float:
62
+ return self.duration_minutes() / 60
63
+
64
+ def duration_days(self) -> float:
65
+ return self.duration_hours() / 24
66
+
67
+ def overlap(self, other: 'Timespan') -> 'Timespan':
68
+ if not self.is_overlaping(other):
69
+ raise ValueError('Timespans are not overlaped')
70
+
71
+ start = max(self.start, other.start)
72
+ end = min(self.end, other.end)
73
+ return Timespan(start, end)
74
+
75
+ def as_tuple(self) -> tuple:
76
+ return (self.start, self.end)
77
+
78
+ def __repr__(self) -> str:
79
+ return self.__str__
80
+
81
+
82
+ def to_week_tow(epoch: datetime.datetime, timescale: TimeScale = TimeScale.GPS) -> WeekTow:
83
+ """
84
+ Convert from datetime to GPS week (asumes datetime in GPS Timescale)
85
+
86
+ >>> to_week_tow(datetime.datetime(1980, 1, 6))
87
+ WeekTow(week=0, tow=0.0, day_of_week=0)
88
+ >>> to_week_tow(datetime.datetime(2005, 1, 28, 13, 30))
89
+ WeekTow(week=1307, tow=480600.0, day_of_week=5)
90
+
91
+ Conversion method based on algorithm provided in this link
92
+ http://www.novatel.com/support/knowledge-and-learning/published-papers-and-documents/unit-conversions/
93
+ """
94
+
95
+ timedelta = epoch - GPS_TIME_START
96
+ leap_delta = get_gps_leapseconds(epoch) if timescale == TimeScale.UTC else datetime.timedelta(0)
97
+ gpsw = int(timedelta.days / 7)
98
+ day = timedelta.days - 7 * gpsw
99
+ tow = timedelta.microseconds * 1e-6 + timedelta.seconds + day * SECONDS_IN_DAY + leap_delta.total_seconds()
100
+
101
+ return WeekTow(gpsw, tow, day)
102
+
103
+
104
+ def from_week_tow(week: int, tow: float, timescale: TimeScale = TimeScale.GPS) -> datetime.datetime:
105
+ """
106
+ Convert from week tow to datetime in GPS scale
107
+
108
+ >>> from_week_tow(0, 0.0)
109
+ datetime.datetime(1980, 1, 6, 0, 0)
110
+ >>> from_week_tow(1307, 480600.0)
111
+ datetime.datetime(2005, 1, 28, 13, 30)
112
+ """
113
+
114
+ delta = datetime.timedelta(weeks=week, seconds=tow)
115
+
116
+ gps_epoch = GPS_TIME_START + delta
117
+
118
+ leap_delta = get_gps_leapseconds(gps_epoch) if timescale == TimeScale.UTC else datetime.timedelta(0)
119
+
120
+ return gps_epoch - leap_delta
121
+
122
+
123
+ def weektow_to_datetime(tow: float, week: int) -> datetime.datetime:
124
+ import warnings
125
+ warnings.warn("This function will be replaced by 'from_week_tow'", DeprecationWarning, stacklevel=2)
126
+ return from_week_tow(week, tow)
127
+
128
+
129
+ def weektow_to_j2000(tow: float, week: int) -> float:
130
+ """
131
+ Convert from GPS week and time of the week (in seconds) to j2000 seconds
132
+
133
+ The week and tow values can be vectors, and thus it will return a vector of
134
+ tuples.
135
+
136
+ >>> weektow_to_j2000(0, 0.0)
137
+ -630763200.0
138
+ """
139
+
140
+ j2000s = week * SECONDS_IN_WEEK
141
+ j2000s += tow
142
+
143
+ # Rebase seconds from GPS start origin to J2000 start origin
144
+ j2000s += GPS_AS_J2000
145
+
146
+ return j2000s
147
+
148
+
149
+ def to_j2000(epoch: datetime.datetime) -> float:
150
+ """
151
+ Convert from datetime toj2000 seconds
152
+
153
+ >>> to_j2000(datetime.datetime(2005, 1, 28, 13, 30))
154
+ 160191000.0
155
+ """
156
+ week_tow = to_week_tow(epoch)
157
+ return weektow_to_j2000(week_tow.tow, week_tow.week)
158
+
159
+
160
+ def from_j2000(j2000s: int, fraction_of_seconds: float = 0.0) -> datetime.datetime:
161
+ """
162
+ Convert from J2000 epoch to datetime
163
+
164
+ >>> from_j2000(160191000)
165
+ datetime.datetime(2005, 1, 28, 13, 30)
166
+
167
+ >>> from_j2000(160191000, fraction_of_seconds = 0.1)
168
+ datetime.datetime(2005, 1, 28, 13, 30, 0, 100000)
169
+ """
170
+
171
+ microseconds = int(fraction_of_seconds * 1.0e6)
172
+ epoch = J2000_TIME_START + datetime.timedelta(seconds=j2000s, microseconds=microseconds)
173
+ return epoch
174
+
175
+
176
+ def epoch_range(start_epoch, end_epoch, interval_s):
177
+ """
178
+ Iterate between 2 epochs with a given interval
179
+
180
+ >>> import datetime
181
+ >>> st = datetime.datetime(2015, 10, 1, 0, 0, 0)
182
+ >>> en = datetime.datetime(2015, 10, 1, 0, 59, 59)
183
+ >>> interval_s = 15 * 60
184
+ >>> ','.join([str(d) for d in epoch_range(st, en, interval_s)])
185
+ '2015-10-01 00:00:00,2015-10-01 00:15:00,2015-10-01 00:30:00,2015-10-01 00:45:00'
186
+ >>> st = datetime.datetime(2015, 10, 1, 0, 0, 0)
187
+ >>> en = datetime.datetime(2015, 10, 1, 1, 0, 0)
188
+ >>> interval_s = 15 * 60
189
+ >>> ','.join([str(d) for d in epoch_range(st, en, interval_s)])
190
+ '2015-10-01 00:00:00,2015-10-01 00:15:00,2015-10-01 00:30:00,2015-10-01 00:45:00,2015-10-01 01:00:00'
191
+ """
192
+
193
+ total_seconds = (end_epoch - start_epoch).total_seconds() + interval_s / 2.0
194
+ n_intervals_as_float = total_seconds / interval_s
195
+ n_intervals = int(n_intervals_as_float)
196
+ if math.fabs(n_intervals - n_intervals_as_float) >= 0.5:
197
+ n_intervals = n_intervals + 1
198
+
199
+ for q in range(n_intervals):
200
+ yield start_epoch + datetime.timedelta(seconds=interval_s * q)
201
+
202
+
203
+ def round_to_interval(epoch: datetime, interval: int) -> datetime:
204
+ """
205
+ >>> dt = datetime.datetime(2023, 4, 20, 10, 48, 52, 794000)
206
+ >>> interval = 0.1
207
+ >>> round_to_interval(dt, interval)
208
+ datetime.datetime(2023, 4, 20, 10, 48, 52, 800000)
209
+
210
+ >>> interval = 1.0
211
+ >>> round_to_interval(dt, interval)
212
+ datetime.datetime(2023, 4, 20, 10, 48, 53)
213
+
214
+ >>> interval = 0.5
215
+ >>> round_to_interval(dt, interval)
216
+ datetime.datetime(2023, 4, 20, 10, 48, 53)
217
+
218
+ >>> interval = 2.0
219
+ >>> round_to_interval(dt, interval)
220
+ datetime.datetime(2023, 4, 20, 10, 48, 52)
221
+
222
+ >>> interval = 0.05
223
+ >>> round_to_interval(dt, interval)
224
+ datetime.datetime(2023, 4, 20, 10, 48, 52, 800000)
225
+
226
+ >>> interval = 0.01
227
+ >>> round_to_interval(dt, interval)
228
+ datetime.datetime(2023, 4, 20, 10, 48, 52, 790000)
229
+ """
230
+ timestamp = epoch.timestamp()
231
+ rounded_timestamp = round(timestamp / interval) * interval
232
+ return datetime.datetime.fromtimestamp(rounded_timestamp)
233
+
234
+
235
+ def get_interval(epochs: List[datetime.datetime], target_intervals: Tuple[float] = (2.0, 1.0, 0.5, 0.1, 0.05, 0.01)) -> float:
236
+ """
237
+ Finds the closest possible interval from a predefined set of intervals to the computed inteval
238
+ from the pvt.
239
+
240
+ Args:
241
+ epochs: List of datetimes
242
+ interval: The target intervals for which the closest possible interval is to be found.
243
+
244
+ Returns:
245
+ The closest possible interval from the predefined set.
246
+
247
+ >>> t0 = datetime.datetime(2023, 6, 1, 12, 0, 0)
248
+ >>> epochs = [t0, datetime.datetime(2023, 6, 1, 12, 0, 2), datetime.datetime(2023, 6, 1, 12, 0, 3)]
249
+ >>> get_interval(epochs)
250
+ 2.0
251
+
252
+ >>> t2 = datetime.datetime(2023, 6, 1, 12, 0, 1)
253
+ >>> epochs = [t0, t2, datetime.datetime(2023, 6, 1, 12, 0, 2)]
254
+ >>> get_interval(epochs)
255
+ 1.0
256
+
257
+ >>> t1 = datetime.datetime(2023, 6, 1, 12, 0, 0, 500000)
258
+ >>> epochs = [t0, t1, t2]
259
+ >>> get_interval(epochs)
260
+ 0.5
261
+
262
+ >>> t1 = datetime.datetime(2023, 6, 1, 12, 0, 0, 100000)
263
+ >>> t2 = datetime.datetime(2023, 6, 1, 12, 0, 0, 200000)
264
+ >>> epochs = [t0, t1, t2]
265
+ >>> get_interval(epochs)
266
+ 0.1
267
+
268
+ >>> t1 = datetime.datetime(2023, 6, 1, 12, 0, 0, 10000)
269
+ >>> t2 = datetime.datetime(2023, 6, 1, 12, 0, 0, 11000)
270
+ >>> epochs = [t0, t1, t2]
271
+ >>> get_interval(epochs)
272
+ 0.01
273
+
274
+ """
275
+ interval = np.median(np.ediff1d(epochs))
276
+ differences = [abs(interval.total_seconds() - target_interval) for target_interval in target_intervals]
277
+ return target_intervals[differences.index(min(differences))]
278
+
279
+
280
+ def to_julian_date(epoch: datetime.datetime) -> float:
281
+ """
282
+ Convert an epoch to Julian Date
283
+
284
+ >>> to_julian_date(datetime.datetime(2024, 2, 11))
285
+ 2460351.5
286
+ >>> round(to_julian_date(datetime.datetime(2019, 1, 1, 8)), 2)
287
+ 2458484.83
288
+ """
289
+
290
+ # Convert datetime object to Julian Date
291
+ dt = epoch - datetime.datetime(2000, 1, 1, 12, 0, 0)
292
+ julian_date = 2451545.0 + dt.total_seconds() / 86400.0
293
+
294
+ return julian_date
295
+
296
+
297
+ def seconds_of_day(epoch: datetime.datetime) -> float:
298
+ """
299
+ Compute the seconds of the day
300
+
301
+ >>> seconds_of_day(datetime.datetime(2024, 4, 1))
302
+ 0.0
303
+ >>> seconds_of_day(datetime.datetime(2024, 4, 1, 23, 59, 59))
304
+ 86399.0
305
+ """
306
+
307
+ return epoch.hour * 3600 + epoch.minute * 60 + epoch.second + epoch.microsecond / 1.0e6
308
+
309
+
310
+ def gmst(epoch: datetime.datetime) -> float:
311
+ """
312
+ Compute the Greenwich Mean Sidereal Time (in hours)
313
+
314
+ https://astronomy.stackexchange.com/questions/21002/how-to-find-greenwich-mean-sideral-time
315
+
316
+
317
+ >>> round(gmst(datetime.datetime(2019, 1, 1, 8)), 6)
318
+ 14.712605
319
+ >>> gmst_hour = gmst(datetime.datetime(2024, 2, 11))
320
+ >>> round(gmst_hour * math.tau / 24, 9)
321
+ 2.453307616
322
+ """
323
+
324
+ julian_date = to_julian_date(epoch)
325
+
326
+ midnight = math.floor(julian_date) + 0.5
327
+ days_since_midnight = julian_date - midnight
328
+ hours_since_midnight = days_since_midnight * 24.0
329
+ days_since_epoch = julian_date - 2451545.0
330
+ centuries_since_epoch = days_since_epoch / 36525
331
+ whole_days_since_epoch = midnight - 2451545.0
332
+
333
+ GMST_hours = 6.697374558 + 0.06570982441908 * whole_days_since_epoch \
334
+ + 1.00273790935 * hours_since_midnight \
335
+ + 0.000026 * centuries_since_epoch**2
336
+
337
+ return GMST_hours % 24
338
+
339
+
340
+ def compute_elapsed_seconds(epochs: pd.Series) -> pd.Series:
341
+ return (epochs - epochs.iloc[0]).dt.total_seconds()
342
+
343
+
344
+ def compute_decimal_hours(epochs: pd.Series) -> pd.Series:
345
+ return epochs.apply(lambda x: x.hour + x.minute / 60 + x.second / 3600)
346
+
347
+
348
+ if __name__ == "__main__":
349
+ import doctest
350
+ doctest.testmod()
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) [2022] [Rokubun]
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,67 @@
1
+ Metadata-Version: 2.2
2
+ Name: pygnss
3
+ Version: 0.0.0
4
+ Summary: Package with utilities and tools for GNSS data processing
5
+ Author-email: Miquel Garcia-Fernandez <miquel@mgfernan.com>
6
+ License: MIT
7
+ Requires-Python: >=3.10
8
+ Description-Content-Type: text/markdown
9
+ License-File: LICENSE
10
+ Requires-Dist: numpy
11
+ Requires-Dist: pandas>=2.2
12
+ Requires-Dist: pyarrow>=18.0.0
13
+ Provides-Extra: test
14
+ Requires-Dist: pytest>=8.3.4; extra == "test"
15
+ Requires-Dist: pytest-env>=1.1.5; extra == "test"
16
+ Requires-Dist: pytest-mocha>=0.4.0; extra == "test"
17
+ Requires-Dist: flake8>=7.0.0; extra == "test"
18
+ Provides-Extra: release
19
+ Requires-Dist: python-semantic-release>=9.4.0; extra == "release"
20
+
21
+ # pyrok-tools
22
+
23
+ Python tools used in internal Rokubun projects. This repository contains the following modules:
24
+
25
+ - `logger`, a module that extends basic Python logging
26
+ - `geodetic`, to perform basic geodetic transformation (Cartesian to Geodetic,
27
+ Cartesian to Local Tangential Plane, ...)
28
+
29
+ ## Installation
30
+
31
+ To make sure that the extensions are installed along with the package, run
32
+
33
+ `pip install pygnss*.whl`
34
+
35
+ ## Modules
36
+
37
+ ### Logger
38
+
39
+ Example of how to use the logger module:
40
+
41
+ ```python
42
+ >>> from pygnss import logger
43
+ >>> logger.set_level("DEBUG")
44
+ >>> logger.debug("Debug message")
45
+ 2020-05-05 18:23:55,688 - DEBUG - Debug message
46
+ >>> logger.warning("Warning message")
47
+ 2020-05-05 18:24:11,327 - WARNING - Warning message
48
+ >>> logger.info("Info message")
49
+ 2020-05-05 18:24:26,021 - INFO - Info message
50
+ >>> logger.error("Error message")
51
+ 2020-05-05 18:24:36,090 - ERROR - Error message
52
+ >>> logger.critical("Critical message")
53
+ 2020-05-05 18:24:43,562 - CRITICAL - Critical message
54
+ >>> logger.exception("Exception message", ValueError("Exception message")
55
+ 2020-05-05 18:25:11,360 - CRITICAL - Exception message
56
+ ValueError: Exception message
57
+ Traceback (most recent call last):
58
+ ...
59
+ ValueError: Exception message
60
+ ```
61
+
62
+ ## Deployment to PyPi
63
+
64
+ The project is published automatically using internal Gitlab CI on each commit to `trunk` to PyPi repository [pygnss](https://pypi.org/project/pygnss/)
65
+
66
+ It uses semantic versioning and conventional commits to set the version and [semantic-release](https://python-semantic-release.readthedocs.io/en/latest/index.html) as
67
+ versioning tool.