saviialib 0.9.1__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of saviialib might be problematic. Click here for more details.
- saviialib/__init__.py +73 -3
- saviialib/general_types/api/__init__.py +0 -3
- saviialib/general_types/api/{epii_api_types.py → saviia_api_types.py} +4 -38
- saviialib/general_types/api/saviia_backup_api_types.py +24 -0
- saviialib/general_types/api/saviia_netcamera_api_types.py +11 -0
- saviialib/general_types/api/saviia_shakes_api_types.py +21 -0
- saviialib/general_types/api/saviia_thies_api_types.py +31 -0
- saviialib/general_types/error_types/api/{epii_api_error_types.py → saviia_api_error_types.py} +20 -0
- saviialib/general_types/error_types/api/saviia_netcamera_error_types.py +7 -0
- saviialib/general_types/error_types/common/common_types.py +9 -0
- saviialib/libs/directory_client/__init__.py +4 -0
- saviialib/libs/directory_client/client/os_client.py +55 -0
- saviialib/libs/directory_client/directory_client.py +44 -0
- saviialib/libs/directory_client/directory_client_contract.py +40 -0
- saviialib/libs/directory_client/types/directory_client_types.py +6 -0
- saviialib/libs/ffmpeg_client/__init__.py +8 -0
- saviialib/libs/ffmpeg_client/clients/ffmpeg_asyncio_client.py +101 -0
- saviialib/libs/ffmpeg_client/ffmpeg_client.py +25 -0
- saviialib/libs/ffmpeg_client/ffmpeg_client_contract.py +12 -0
- saviialib/libs/ffmpeg_client/types/ffmpeg_client_types.py +28 -0
- saviialib/libs/files_client/__init__.py +2 -2
- saviialib/libs/files_client/clients/aiofiles_client.py +26 -3
- saviialib/libs/files_client/clients/csv_client.py +42 -0
- saviialib/libs/files_client/files_client.py +5 -7
- saviialib/libs/files_client/types/files_client_types.py +5 -4
- saviialib/libs/ftp_client/clients/aioftp_client.py +13 -6
- saviialib/libs/ftp_client/clients/ftplib_client.py +58 -0
- saviialib/libs/ftp_client/ftp_client.py +8 -5
- saviialib/libs/ftp_client/ftp_client_contract.py +2 -2
- saviialib/libs/log_client/__init__.py +19 -0
- saviialib/libs/log_client/log_client.py +46 -0
- saviialib/libs/log_client/log_client_contract.py +28 -0
- saviialib/libs/log_client/logging_client/logging_client.py +58 -0
- saviialib/libs/log_client/types/log_client_types.py +47 -0
- saviialib/libs/log_client/utils/log_client_utils.py +6 -0
- saviialib/libs/sftp_client/__init__.py +8 -0
- saviialib/libs/sftp_client/clients/asyncssh_sftp_client.py +83 -0
- saviialib/libs/sftp_client/sftp_client.py +26 -0
- saviialib/libs/sftp_client/sftp_client_contract.py +13 -0
- saviialib/libs/sftp_client/types/sftp_client_types.py +24 -0
- saviialib/libs/sharepoint_client/__init__.py +2 -0
- saviialib/libs/sharepoint_client/clients/sharepoint_rest_api.py +31 -6
- saviialib/libs/sharepoint_client/sharepoint_client.py +25 -1
- saviialib/libs/sharepoint_client/sharepoint_client_contract.py +5 -0
- saviialib/libs/sharepoint_client/types/sharepoint_client_types.py +5 -0
- saviialib/libs/zero_dependency/utils/booleans_utils.py +2 -0
- saviialib/libs/zero_dependency/utils/datetime_utils.py +1 -1
- saviialib/libs/zero_dependency/utils/strings_utils.py +5 -0
- saviialib/services/backup/api.py +36 -0
- saviialib/services/backup/controllers/__init__.py +0 -0
- saviialib/services/{epii → backup}/controllers/types/__init__.py +1 -1
- saviialib/services/{epii → backup}/controllers/types/upload_backup_to_sharepoint_types.py +4 -2
- saviialib/services/{epii → backup}/controllers/upload_backup_to_sharepoint.py +9 -8
- saviialib/services/backup/use_cases/constants/upload_backup_to_sharepoint_constants.py +5 -0
- saviialib/services/{epii → backup}/use_cases/types/__init__.py +1 -1
- saviialib/services/{epii → backup}/use_cases/types/upload_backup_to_sharepoint_types.py +4 -2
- saviialib/services/backup/use_cases/upload_backup_to_sharepoint.py +474 -0
- saviialib/services/backup/utils/__init__.py +3 -0
- saviialib/services/backup/utils/upload_backup_to_sharepoint_utils.py +100 -0
- saviialib/services/netcamera/api.py +30 -0
- saviialib/services/netcamera/controllers/get_media_files.py +40 -0
- saviialib/services/netcamera/controllers/types/get_media_files_types.py +16 -0
- saviialib/services/netcamera/use_cases/get_media_files.py +76 -0
- saviialib/services/netcamera/use_cases/types/get_media_files_types.py +18 -0
- saviialib/services/shakes/__init__.py +0 -0
- saviialib/services/shakes/api.py +31 -0
- saviialib/services/shakes/controllers/get_miniseed_files.py +48 -0
- saviialib/services/shakes/controllers/types/get_miniseed_files_types.py +16 -0
- saviialib/services/shakes/use_cases/get_miniseed_files.py +79 -0
- saviialib/services/shakes/use_cases/types/get_miniseed_files_types.py +18 -0
- saviialib/services/shakes/use_cases/utils/get_miniseed_files_utils.py +11 -0
- saviialib/services/thies/__init__.py +0 -0
- saviialib/services/thies/api.py +42 -0
- saviialib/services/thies/constants/update_thies_data_constants.py +67 -0
- saviialib/services/{epii → thies}/controllers/types/update_thies_data_types.py +5 -4
- saviialib/services/{epii → thies}/controllers/update_thies_data.py +18 -6
- saviialib/services/thies/use_cases/components/create_thies_statistics_file.py +115 -0
- saviialib/services/thies/use_cases/components/thies_bp.py +442 -0
- saviialib/services/{epii → thies}/use_cases/types/update_thies_data_types.py +10 -2
- saviialib/services/thies/use_cases/update_thies_data.py +391 -0
- saviialib-1.6.0.dist-info/METADATA +126 -0
- saviialib-1.6.0.dist-info/RECORD +96 -0
- {saviialib-0.9.1.dist-info → saviialib-1.6.0.dist-info}/WHEEL +1 -1
- saviialib/services/epii/api.py +0 -80
- saviialib/services/epii/use_cases/constants/update_thies_data_constants.py +0 -5
- saviialib/services/epii/use_cases/constants/upload_backup_to_sharepoint_constants.py +0 -5
- saviialib/services/epii/use_cases/update_thies_data.py +0 -171
- saviialib/services/epii/use_cases/upload_backup_to_sharepoint.py +0 -241
- saviialib/services/epii/utils/__init__.py +0 -3
- saviialib/services/epii/utils/upload_backup_to_sharepoint_utils.py +0 -102
- saviialib-0.9.1.dist-info/METADATA +0 -120
- saviialib-0.9.1.dist-info/RECORD +0 -49
- /saviialib/{services/epii → libs/log_client/types}/__init__.py +0 -0
- /saviialib/services/{epii/controllers → backup}/__init__.py +0 -0
- /saviialib/services/{epii → thies}/utils/update_thies_data_utils.py +0 -0
- {saviialib-0.9.1.dist-info → saviialib-1.6.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
import configparser
|
|
2
|
+
import os
|
|
3
|
+
import struct
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
import pandas as pd
|
|
8
|
+
from bitarray import bitarray
|
|
9
|
+
|
|
10
|
+
TIME_LIST = []
|
|
11
|
+
|
|
12
|
+
start_time = datetime.strptime("00:00", "%H:%M")
|
|
13
|
+
for i in range(0, 24 * 60, 10):
|
|
14
|
+
TIME_LIST.append((start_time + timedelta(minutes=i)).strftime("%H:%M"))
|
|
15
|
+
|
|
16
|
+
ROWS = len(TIME_LIST)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def date_range(start_date: str, end_date: str) -> list:
|
|
20
|
+
start = datetime.strptime(start_date, "%Y/%m/%d") + timedelta(days=1)
|
|
21
|
+
end = datetime.strptime(end_date, "%Y/%m/%d") - timedelta(days=1)
|
|
22
|
+
return [
|
|
23
|
+
(start + timedelta(days=i)).strftime("%Y/%m/%d")
|
|
24
|
+
for i in range((end - start).days + 1)
|
|
25
|
+
if start <= end
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def add_date_sep(date: str) -> str:
|
|
30
|
+
"""
|
|
31
|
+
Input: date as YYYYMMDD.BIN
|
|
32
|
+
Returns: date as YYYY/MM/DD
|
|
33
|
+
"""
|
|
34
|
+
return date[:4] + "/" + date[4:6] + "/" + date[6:8]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def verify_datestr(filename: str) -> bool:
|
|
38
|
+
"""
|
|
39
|
+
Returns True if filename has the YYYYMMDD.BIN format
|
|
40
|
+
"""
|
|
41
|
+
try:
|
|
42
|
+
datetime.strptime(filename[:8], "%Y%m%d")
|
|
43
|
+
return filename.endswith(".BIN")
|
|
44
|
+
except ValueError:
|
|
45
|
+
return False
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def read_descfile(path) -> dict:
|
|
49
|
+
"""
|
|
50
|
+
Input: path DESCFILE.INI
|
|
51
|
+
Returns: dict
|
|
52
|
+
key is index [i]
|
|
53
|
+
value is dict with parameters from .ini
|
|
54
|
+
"""
|
|
55
|
+
if type(path) == dict: # noqa: E721
|
|
56
|
+
return path
|
|
57
|
+
config = configparser.ConfigParser()
|
|
58
|
+
config.read(path)
|
|
59
|
+
data_dict = {}
|
|
60
|
+
for section in config.sections():
|
|
61
|
+
section_dict = dict(config.items(section))
|
|
62
|
+
for v in section_dict:
|
|
63
|
+
if v == "name":
|
|
64
|
+
continue
|
|
65
|
+
section_dict[v] = int(section_dict[v])
|
|
66
|
+
data_dict[int(section)] = section_dict
|
|
67
|
+
return data_dict
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class THIESDayData:
|
|
71
|
+
# Bytes per parameter
|
|
72
|
+
BPP = {"av": 5, "ex": 9}
|
|
73
|
+
# Timestamp Offset
|
|
74
|
+
OFFSET = 4
|
|
75
|
+
|
|
76
|
+
def __init__(self, datatype: str) -> None:
|
|
77
|
+
d = datatype.lower().strip()
|
|
78
|
+
if d not in ["av", "ex"]:
|
|
79
|
+
raise ValueError(
|
|
80
|
+
"Invalid datatype. Expected 'av' (average values) or 'ex' (minmax values)."
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
self._bpr = -1 # Bytes per row
|
|
84
|
+
self._bpp = THIESDayData.BPP[d] # Bytes per parameter
|
|
85
|
+
self._datatype = d
|
|
86
|
+
self._binfile = None
|
|
87
|
+
self.descfile = {}
|
|
88
|
+
self.nparameters = -1
|
|
89
|
+
self._parameters = []
|
|
90
|
+
self.nbytes = -1
|
|
91
|
+
self.nrows = -1
|
|
92
|
+
self._date = ""
|
|
93
|
+
self.statusDF = pd.DataFrame()
|
|
94
|
+
self.dataDF = pd.DataFrame()
|
|
95
|
+
self.datesDF = pd.DataFrame()
|
|
96
|
+
|
|
97
|
+
@staticmethod
|
|
98
|
+
def _bytes2datetime(b: bytes, only_time: bool = False) -> str:
|
|
99
|
+
"""
|
|
100
|
+
Input: bytes (size 4)
|
|
101
|
+
Output: str (YYYY/MM/DD hh:mm:ss)
|
|
102
|
+
"""
|
|
103
|
+
bits = bitarray()
|
|
104
|
+
bits.frombytes(b[::-1]) # Invert 4 bytes
|
|
105
|
+
hr = int(bits[15:20].to01(), 2)
|
|
106
|
+
min = int(bits[20:26].to01(), 2)
|
|
107
|
+
sec = int(bits[26:].to01(), 2)
|
|
108
|
+
time = f"{str(hr).zfill(2)}:{str(min).zfill(2)}"
|
|
109
|
+
if only_time:
|
|
110
|
+
return time
|
|
111
|
+
yr = int(bits[0:6].to01(), 2)
|
|
112
|
+
mon = int(bits[6:10].to01(), 2)
|
|
113
|
+
day = int(bits[10:15].to01(), 2)
|
|
114
|
+
date = f"20{yr}/{str(mon).zfill(2)}/{str(day).zfill(2)}"
|
|
115
|
+
return date + " " + time + f":{str(sec).zfill(2)}"
|
|
116
|
+
|
|
117
|
+
def _set_descfile(self, inipath: str) -> None:
|
|
118
|
+
self.descfile = read_descfile(inipath)
|
|
119
|
+
self.nparameters = len(self.descfile)
|
|
120
|
+
row_size = sum([self.descfile[num]["size"] for num in self.descfile])
|
|
121
|
+
self._bpr = row_size + THIESDayData.OFFSET
|
|
122
|
+
|
|
123
|
+
def read_binfile(self, binpath: str, inipath: str) -> None:
|
|
124
|
+
self._set_descfile(inipath)
|
|
125
|
+
with open(binpath, "rb") as bin_file:
|
|
126
|
+
binfile = bin_file.read()
|
|
127
|
+
self._binfile = binfile
|
|
128
|
+
self.nbytes = len(self._binfile)
|
|
129
|
+
self.nrows = int(self.nbytes / self._bpr)
|
|
130
|
+
self._make_dataframes()
|
|
131
|
+
|
|
132
|
+
def make_empty(self, inipath: str, date: str) -> None:
|
|
133
|
+
self._set_descfile(inipath)
|
|
134
|
+
dataDF = pd.DataFrame(
|
|
135
|
+
None, index=range(ROWS), columns=range(self.nparameters + 2)
|
|
136
|
+
)
|
|
137
|
+
col_names = {0: "Date", 1: "Time"}
|
|
138
|
+
par_names = {key + 1: self.descfile[key]["name"] for key in self.descfile}
|
|
139
|
+
col_names.update(par_names)
|
|
140
|
+
dataDF = dataDF.rename(columns=col_names)
|
|
141
|
+
dataDF["Time"] = TIME_LIST
|
|
142
|
+
dataDF["Date"] = [date] * ROWS
|
|
143
|
+
|
|
144
|
+
self.dataDF = dataDF
|
|
145
|
+
self.statusDF = dataDF
|
|
146
|
+
self.datesDF = dataDF
|
|
147
|
+
|
|
148
|
+
def _make_dataframes(self) -> None:
|
|
149
|
+
"""
|
|
150
|
+
Builds data DF, status DF and, if datatype=ex, dates DF.
|
|
151
|
+
"""
|
|
152
|
+
byterows = [
|
|
153
|
+
self._binfile[i * self._bpr + THIESDayData.OFFSET : (i + 1) * self._bpr]
|
|
154
|
+
for i in range(0, self.nrows)
|
|
155
|
+
]
|
|
156
|
+
data_arr = np.zeros((self.nrows, self.nparameters))
|
|
157
|
+
status_arr = np.zeros((self.nrows, self.nparameters))
|
|
158
|
+
time_idx = np.empty(self.nrows, dtype=object)
|
|
159
|
+
date_idx = np.empty(self.nrows, dtype=object)
|
|
160
|
+
dates_arr = np.empty((self.nrows, self.nparameters), dtype=object)
|
|
161
|
+
|
|
162
|
+
for i, row in enumerate(byterows):
|
|
163
|
+
# Timestamp
|
|
164
|
+
ts_bytes = self._binfile[i * self._bpr : i * self._bpr + 4]
|
|
165
|
+
ts = THIESDayData._bytes2datetime(ts_bytes)
|
|
166
|
+
date_idx[i], time_idx[i] = ts[:-3].split()
|
|
167
|
+
|
|
168
|
+
for j in range(self.nparameters):
|
|
169
|
+
# Status = byte 1
|
|
170
|
+
status = row[j * self._bpp]
|
|
171
|
+
status_arr[i, j] = status
|
|
172
|
+
|
|
173
|
+
# Value = bytes 2-5, float
|
|
174
|
+
value = struct.unpack("<f", row[j * self._bpp + 1 : j * self._bpp + 5])[
|
|
175
|
+
0
|
|
176
|
+
]
|
|
177
|
+
data_arr[i, j] = round(value, 1)
|
|
178
|
+
|
|
179
|
+
if self._datatype == "ex":
|
|
180
|
+
# Datetime = bytes 6-9
|
|
181
|
+
dt = THIESDayData._bytes2datetime(
|
|
182
|
+
row[j * self._bpp + 5 : j * self._bpp + 9], only_time=True
|
|
183
|
+
)
|
|
184
|
+
dates_arr[i, j] = dt
|
|
185
|
+
|
|
186
|
+
self.dataDF = pd.DataFrame(data_arr).rename(
|
|
187
|
+
columns={i: self.descfile[i + 1]["name"] for i in range(self.nparameters)}
|
|
188
|
+
)
|
|
189
|
+
self.statusDF = pd.DataFrame(status_arr).rename(
|
|
190
|
+
columns={i: self.descfile[i + 1]["name"] for i in range(self.nparameters)}
|
|
191
|
+
)
|
|
192
|
+
self.dataDF = self.dataDF.where(self.statusDF == 0.0, other=None)
|
|
193
|
+
|
|
194
|
+
if self._datatype == "ex":
|
|
195
|
+
self.datesDF = pd.DataFrame(dates_arr).rename(
|
|
196
|
+
columns={
|
|
197
|
+
i: self.descfile[i + 1]["name"] for i in range(self.nparameters)
|
|
198
|
+
}
|
|
199
|
+
)
|
|
200
|
+
self.datesDF = self.datesDF.where(self.statusDF == 0.0, other=None)
|
|
201
|
+
self.datesDF.insert(0, "Time", time_idx)
|
|
202
|
+
self.datesDF.insert(0, "Date", date_idx)
|
|
203
|
+
|
|
204
|
+
self.dataDF.insert(0, "Time", time_idx)
|
|
205
|
+
self.dataDF.insert(0, "Date", date_idx)
|
|
206
|
+
self.statusDF.insert(0, "Time", time_idx)
|
|
207
|
+
self.statusDF.insert(0, "Date", date_idx)
|
|
208
|
+
|
|
209
|
+
def _generate_blank_rows(self) -> pd.DataFrame:
|
|
210
|
+
if len(self) == ROWS:
|
|
211
|
+
# Nothing to fill (already full rows)
|
|
212
|
+
return []
|
|
213
|
+
|
|
214
|
+
new = []
|
|
215
|
+
none_row = {col: None for col in self.dataDF.columns}
|
|
216
|
+
none_row["Date"] = self.date
|
|
217
|
+
current_times = self.dataDF["Time"]
|
|
218
|
+
for time in TIME_LIST:
|
|
219
|
+
if time not in current_times.values:
|
|
220
|
+
row = none_row.copy()
|
|
221
|
+
# 'time' was not measured in the original data
|
|
222
|
+
# fill it with None row
|
|
223
|
+
row["Time"] = time
|
|
224
|
+
new.append(row)
|
|
225
|
+
return pd.DataFrame(new)
|
|
226
|
+
|
|
227
|
+
def complete_empty(self):
|
|
228
|
+
"""
|
|
229
|
+
Completes DataFrames with all the timestamps of missing data
|
|
230
|
+
Fills all columns with 'None' except Date and Time cols
|
|
231
|
+
"""
|
|
232
|
+
if len(self) == ROWS:
|
|
233
|
+
return
|
|
234
|
+
new_rows = self._generate_blank_rows()
|
|
235
|
+
# self.dataDF = self.dataDF.append(new_rows, ignore_index=True)
|
|
236
|
+
self.dataDF = pd.concat([self.dataDF, new_rows], ignore_index=True)
|
|
237
|
+
self.dataDF = self.dataDF.sort_values(by="Time").reset_index(drop=True)
|
|
238
|
+
# self.statusDF = self.statusDF.append(new_rows, ignore_index=True)
|
|
239
|
+
self.statusDF = pd.concat([self.statusDF, new_rows], ignore_index=True)
|
|
240
|
+
self.statusDF = self.statusDF.sort_values(by="Time").reset_index(drop=True)
|
|
241
|
+
|
|
242
|
+
if self._datatype == "ex":
|
|
243
|
+
# self.datesDF = self.datesDF.append(new_rows, ignore_index=True)
|
|
244
|
+
self.datesDF = pd.concat([self.datesDF, new_rows], ignore_index=True)
|
|
245
|
+
self.datesDF = self.datesDF.sort_values(by="Time").reset_index(drop=True)
|
|
246
|
+
|
|
247
|
+
def sort_by(self, cols: list):
|
|
248
|
+
self.dataDF = self.dataDF.sort_values(
|
|
249
|
+
by=cols, ascending=[True, True]
|
|
250
|
+
).reset_index(drop=True)
|
|
251
|
+
self.statusDF = self.statusDF.sort_values(
|
|
252
|
+
by=cols, ascending=[True, True]
|
|
253
|
+
).reset_index(drop=True)
|
|
254
|
+
if len(self.datesDF):
|
|
255
|
+
self.datesDF = self.datesDF.sort_values(
|
|
256
|
+
by=cols, ascending=[True, True]
|
|
257
|
+
).reset_index(drop=True)
|
|
258
|
+
|
|
259
|
+
@property
|
|
260
|
+
def date(self) -> str:
|
|
261
|
+
"""
|
|
262
|
+
Returns str of date of measurement
|
|
263
|
+
"""
|
|
264
|
+
if len(self.dataDF) and self._date == "":
|
|
265
|
+
self._date = self.dataDF["Date"][0]
|
|
266
|
+
return self._date
|
|
267
|
+
|
|
268
|
+
@property
|
|
269
|
+
def shape(self):
|
|
270
|
+
return self.dataDF.shape
|
|
271
|
+
|
|
272
|
+
@property
|
|
273
|
+
def info(self) -> None:
|
|
274
|
+
bf = self._binfile
|
|
275
|
+
if bf:
|
|
276
|
+
bf = bf[:8]
|
|
277
|
+
print(f"""=== THIES Day Data Instance ===\n
|
|
278
|
+
Bytes per row (BPR): {self._bpr}
|
|
279
|
+
Bytes per parameter (BPP): {self._bpp}
|
|
280
|
+
Datatype: {self._datatype}
|
|
281
|
+
Binfile: {bf}...
|
|
282
|
+
Descfile: {self.descfile}
|
|
283
|
+
N parameters: {self.nparameters}
|
|
284
|
+
N Bytes: {self.nbytes}
|
|
285
|
+
Rows: {self.nrows}
|
|
286
|
+
Date: {self.date}
|
|
287
|
+
""")
|
|
288
|
+
|
|
289
|
+
@property
|
|
290
|
+
def parameters(self) -> list:
|
|
291
|
+
if self._parameters == []:
|
|
292
|
+
self._parameters = [self.descfile[i]["name"] for i in self.descfile]
|
|
293
|
+
return self._parameters
|
|
294
|
+
|
|
295
|
+
def write_csv(self, filename: str) -> None:
|
|
296
|
+
with open(filename + ".csv", "w") as outfile:
|
|
297
|
+
outfile.write(self.dataDF.to_csv())
|
|
298
|
+
|
|
299
|
+
def __repr__(self) -> str:
|
|
300
|
+
return str(self.dataDF)
|
|
301
|
+
|
|
302
|
+
def _repr_html_(self):
|
|
303
|
+
return self.dataDF._repr_html_()
|
|
304
|
+
|
|
305
|
+
def __len__(self):
|
|
306
|
+
return len(self.dataDF)
|
|
307
|
+
|
|
308
|
+
def __add__(self, other):
|
|
309
|
+
if isinstance(other, THIESDayData):
|
|
310
|
+
new = THIESDayData(datatype=self._datatype)
|
|
311
|
+
new.descfile = other.descfile
|
|
312
|
+
new.nparameters = other.nparameters
|
|
313
|
+
new._parameters = other.parameters
|
|
314
|
+
new.nrows = self.nrows + other.nrows
|
|
315
|
+
new.nbytes = self.nbytes + other.nbytes
|
|
316
|
+
new.statusDF = pd.concat([self.statusDF, other.statusDF]).reset_index(
|
|
317
|
+
drop=True
|
|
318
|
+
)
|
|
319
|
+
new.dataDF = pd.concat([self.dataDF, other.dataDF]).reset_index(drop=True)
|
|
320
|
+
if self._datatype == "ex":
|
|
321
|
+
new.datesDF = pd.concat([self.datesDF, other.datesDF]).reset_index(
|
|
322
|
+
drop=True
|
|
323
|
+
)
|
|
324
|
+
return new
|
|
325
|
+
raise TypeError(
|
|
326
|
+
f"unsupported operand type(s) for +: 'THIESDayData' and '{type(other)}'"
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
class THIESData:
|
|
331
|
+
def __init__(self, datatype: str, dirpath: str) -> None:
|
|
332
|
+
d = datatype.lower().strip()
|
|
333
|
+
if d not in ["av", "ex"]:
|
|
334
|
+
raise ValueError(
|
|
335
|
+
"Invalid datatype. Expected 'av' (average values) or 'ex' (minmax values)."
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
self._path = dirpath
|
|
339
|
+
self._datatype = d
|
|
340
|
+
self.filelist = []
|
|
341
|
+
|
|
342
|
+
self._verify_path(dirpath)
|
|
343
|
+
descpath = self._path + "/DESCFILE.INI"
|
|
344
|
+
self.descfile = read_descfile(descpath)
|
|
345
|
+
|
|
346
|
+
self.daylist = []
|
|
347
|
+
self.fullData = pd.DataFrame()
|
|
348
|
+
|
|
349
|
+
self.completed = False
|
|
350
|
+
|
|
351
|
+
def reset(self):
|
|
352
|
+
self.daylist = []
|
|
353
|
+
self.fullData = pd.DataFrame()
|
|
354
|
+
self.completed = False
|
|
355
|
+
|
|
356
|
+
def _verify_path(self, path: str) -> None:
|
|
357
|
+
fl = sorted(os.listdir(path))
|
|
358
|
+
if "DESCFILE.INI" not in fl:
|
|
359
|
+
raise FileNotFoundError("No DESCFILE.INI found in this directory.")
|
|
360
|
+
self.filelist = [file for file in fl if verify_datestr(file)]
|
|
361
|
+
|
|
362
|
+
def load_df(self, complete_rows=False) -> pd.DataFrame:
|
|
363
|
+
"""Reads folder given in DIRPATH and
|
|
364
|
+
transforms data into DF. Saves it in self.fullData
|
|
365
|
+
- complete_rows (bool): if True, completes DFs with Empty Rows by calling
|
|
366
|
+
THIESDayData.complete_empty()
|
|
367
|
+
"""
|
|
368
|
+
self.reset()
|
|
369
|
+
for f in self.filelist:
|
|
370
|
+
filepath = f"{self._path}/{f}"
|
|
371
|
+
daydata = THIESDayData(datatype=self._datatype)
|
|
372
|
+
daydata.read_binfile(binpath=filepath, inipath=self.descfile)
|
|
373
|
+
if complete_rows:
|
|
374
|
+
daydata.complete_empty()
|
|
375
|
+
self.daylist.append(daydata)
|
|
376
|
+
|
|
377
|
+
self.fullData = sum(self.daylist, start=THIESDayData(self._datatype))
|
|
378
|
+
|
|
379
|
+
return self.fullData
|
|
380
|
+
|
|
381
|
+
def complete_empty_dates(self):
|
|
382
|
+
if self.completed:
|
|
383
|
+
return
|
|
384
|
+
date_s = add_date_sep(self.filelist[0])
|
|
385
|
+
date_e = add_date_sep(self.filelist[-1])
|
|
386
|
+
d_range = date_range(date_s, date_e)
|
|
387
|
+
for date in d_range:
|
|
388
|
+
if date not in self.fullData.dataDF["Date"].values:
|
|
389
|
+
# Missing day
|
|
390
|
+
new = THIESDayData(self._datatype)
|
|
391
|
+
new.make_empty(self.descfile, date=date)
|
|
392
|
+
self.fullData += new
|
|
393
|
+
|
|
394
|
+
self.fullData.sort_by(["Date", "Time"])
|
|
395
|
+
self.completed = True
|
|
396
|
+
|
|
397
|
+
def df2csv(self, outpath: str) -> None:
|
|
398
|
+
# if self._datatype == 'av':
|
|
399
|
+
# FORMAT FOR EX FILES ???
|
|
400
|
+
self.fullData.write_csv(outpath)
|
|
401
|
+
print(f"Data written in: {outpath}.csv")
|
|
402
|
+
|
|
403
|
+
def read_write(self, outpath: str):
|
|
404
|
+
"""Quick version of the read-write process.
|
|
405
|
+
Reads the path given and writes all BIN file data in same CSV
|
|
406
|
+
Does NOT save as DF the data.
|
|
407
|
+
Does NOT complete missing timestamps with empty rows.
|
|
408
|
+
"""
|
|
409
|
+
write_header = True
|
|
410
|
+
bcount = 0
|
|
411
|
+
with open(outpath + ".csv", "w") as outfile:
|
|
412
|
+
for i, f in enumerate(self.filelist):
|
|
413
|
+
filepath = f"{self._path}/{f}"
|
|
414
|
+
daydata = THIESDayData(datatype=self._datatype)
|
|
415
|
+
daydata.read_binfile(binpath=filepath, inipath=self.descfile)
|
|
416
|
+
outfile.write(daydata.dataDF.to_csv(header=write_header))
|
|
417
|
+
bcount += daydata.nbytes
|
|
418
|
+
if i == 0:
|
|
419
|
+
write_header = False
|
|
420
|
+
print(f"Data written in: {outpath}.csv")
|
|
421
|
+
|
|
422
|
+
@property
|
|
423
|
+
def dataDF(self):
|
|
424
|
+
return self.fullData.dataDF
|
|
425
|
+
|
|
426
|
+
@property
|
|
427
|
+
def shape(self):
|
|
428
|
+
return self.fullData.shape
|
|
429
|
+
|
|
430
|
+
@property
|
|
431
|
+
def size(self):
|
|
432
|
+
return len(self.filelist)
|
|
433
|
+
|
|
434
|
+
@property
|
|
435
|
+
def parameters(self):
|
|
436
|
+
return self.fullData.parameters
|
|
437
|
+
|
|
438
|
+
def __repr__(self) -> str:
|
|
439
|
+
return str(self.fullData)
|
|
440
|
+
|
|
441
|
+
def _repr_html_(self):
|
|
442
|
+
return self.fullData
|
|
@@ -1,12 +1,20 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
|
-
from typing import Dict
|
|
3
|
-
from saviialib.general_types.api.
|
|
2
|
+
from typing import Dict, List
|
|
3
|
+
from saviialib.general_types.api.saviia_api_types import (
|
|
4
|
+
FtpClientConfig,
|
|
5
|
+
SharepointConfig,
|
|
6
|
+
)
|
|
7
|
+
from logging import Logger
|
|
4
8
|
|
|
5
9
|
|
|
6
10
|
@dataclass
|
|
7
11
|
class UpdateThiesDataUseCaseInput:
|
|
8
12
|
ftp_config: FtpClientConfig
|
|
9
13
|
sharepoint_config: SharepointConfig
|
|
14
|
+
sharepoint_folders_path: List
|
|
15
|
+
ftp_server_folders_path: List
|
|
16
|
+
local_backup_source_path: str
|
|
17
|
+
logger: Logger
|
|
10
18
|
|
|
11
19
|
|
|
12
20
|
@dataclass
|