saviialib 1.1.0__tar.gz → 1.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of saviialib might be problematic. Click here for more details.

Files changed (57) hide show
  1. {saviialib-1.1.0 → saviialib-1.2.0}/PKG-INFO +5 -3
  2. {saviialib-1.1.0 → saviialib-1.2.0}/pyproject.toml +6 -2
  3. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/directory_client/client/os_client.py +4 -0
  4. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/directory_client/directory_client.py +3 -0
  5. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/directory_client/directory_client_contract.py +4 -0
  6. saviialib-1.2.0/src/saviialib/libs/files_client/clients/csv_client.py +42 -0
  7. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/files_client/files_client.py +5 -7
  8. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/files_client/types/files_client_types.py +2 -2
  9. saviialib-1.2.0/src/saviialib/services/epii/use_cases/components/create_thies_statistics_file.py +160 -0
  10. saviialib-1.2.0/src/saviialib/services/epii/use_cases/components/thies_bp.py +442 -0
  11. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/use_cases/update_thies_data.py +95 -0
  12. {saviialib-1.1.0 → saviialib-1.2.0}/LICENSE +0 -0
  13. {saviialib-1.1.0 → saviialib-1.2.0}/README.md +0 -0
  14. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/__init__.py +0 -0
  15. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/__init__.py +0 -0
  16. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/api/__init__.py +0 -0
  17. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/api/epii_api_types.py +0 -0
  18. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/error_types/__init__.py +0 -0
  19. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/error_types/api/__init__.py +0 -0
  20. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/error_types/api/epii_api_error_types.py +0 -0
  21. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/error_types/common/__init__.py +0 -0
  22. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/general_types/error_types/common/common_types.py +0 -0
  23. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/directory_client/__init__.py +0 -0
  24. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/directory_client/types/directory_client_types.py +0 -0
  25. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/files_client/__init__.py +0 -0
  26. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/files_client/clients/aiofiles_client.py +0 -0
  27. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/files_client/files_client_contract.py +0 -0
  28. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/__init__.py +0 -0
  29. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/clients/__init__.py +0 -0
  30. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/clients/aioftp_client.py +0 -0
  31. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/clients/ftplib_client.py +0 -0
  32. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/ftp_client.py +0 -0
  33. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/ftp_client_contract.py +0 -0
  34. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/types/__init__.py +0 -0
  35. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/ftp_client/types/ftp_client_types.py +0 -0
  36. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/sharepoint_client/__init__.py +0 -0
  37. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/sharepoint_client/clients/sharepoint_rest_api.py +0 -0
  38. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/sharepoint_client/sharepoint_client.py +0 -0
  39. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/sharepoint_client/sharepoint_client_contract.py +0 -0
  40. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/sharepoint_client/types/sharepoint_client_types.py +0 -0
  41. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/libs/zero_dependency/utils/datetime_utils.py +0 -0
  42. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/__init__.py +0 -0
  43. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/api.py +0 -0
  44. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/controllers/__init__.py +0 -0
  45. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/controllers/types/__init__.py +0 -0
  46. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/controllers/types/update_thies_data_types.py +0 -0
  47. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/controllers/types/upload_backup_to_sharepoint_types.py +0 -0
  48. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/controllers/update_thies_data.py +0 -0
  49. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/controllers/upload_backup_to_sharepoint.py +0 -0
  50. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/use_cases/constants/upload_backup_to_sharepoint_constants.py +0 -0
  51. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/use_cases/types/__init__.py +0 -0
  52. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/use_cases/types/update_thies_data_types.py +0 -0
  53. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/use_cases/types/upload_backup_to_sharepoint_types.py +0 -0
  54. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/use_cases/upload_backup_to_sharepoint.py +0 -0
  55. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/utils/__init__.py +0 -0
  56. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/utils/update_thies_data_utils.py +0 -0
  57. {saviialib-1.1.0 → saviialib-1.2.0}/src/saviialib/services/epii/utils/upload_backup_to_sharepoint_utils.py +0 -0
@@ -1,21 +1,23 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: saviialib
3
- Version: 1.1.0
3
+ Version: 1.2.0
4
4
  Summary: A client library for IoT projects in the RCER initiative
5
5
  License: MIT
6
6
  Author: pedropablozavalat
7
- Requires-Python: >=3.10,<4.0
7
+ Requires-Python: >=3.11,<4.0
8
8
  Classifier: License :: OSI Approved :: MIT License
9
9
  Classifier: Programming Language :: Python :: 3
10
- Classifier: Programming Language :: Python :: 3.10
11
10
  Classifier: Programming Language :: Python :: 3.11
12
11
  Classifier: Programming Language :: Python :: 3.12
13
12
  Classifier: Programming Language :: Python :: 3.13
14
13
  Requires-Dist: aiofiles
15
14
  Requires-Dist: aioftp
16
15
  Requires-Dist: aiohttp
16
+ Requires-Dist: bitarray (>=1.9.2,<2.0.0)
17
17
  Requires-Dist: build
18
18
  Requires-Dist: dotenv (>=0.9.9,<0.10.0)
19
+ Requires-Dist: numpy (>=2.3.0,<3.0.0)
20
+ Requires-Dist: pandas (>=2.3.0,<3.0.0)
19
21
  Requires-Dist: pytest-cov (>=6.1.1,<7.0.0)
20
22
  Description-Content-Type: text/markdown
21
23
 
@@ -1,19 +1,23 @@
1
1
  [tool.poetry]
2
2
  name = "saviialib"
3
- version = "1.1.0"
3
+ version = "1.2.0"
4
4
  description = "A client library for IoT projects in the RCER initiative"
5
5
  authors = ["pedropablozavalat"]
6
6
  license = "MIT"
7
7
  readme = "README.md"
8
8
 
9
9
  [tool.poetry.dependencies]
10
- python = "^3.10"
10
+ python = "^3.11"
11
11
  aioftp = "*"
12
12
  aiohttp = "*"
13
13
  aiofiles = "*"
14
14
  dotenv = "^0.9.9"
15
15
  pytest-cov="^6.1.1"
16
16
  build="*"
17
+ numpy="^2.3.0"
18
+ pandas="^2.3.0"
19
+ bitarray="^1.9.2"
20
+
17
21
  [tool.poetry.group.dev.dependencies]
18
22
  pytest = "8.3.5"
19
23
  pytest-asyncio = "0.26.0"
@@ -21,3 +21,7 @@ class OsClient(DirectoryClientContract):
21
21
  @staticmethod
22
22
  async def isdir(path: str) -> list:
23
23
  return await asyncio.to_thread(os.path.isdir, path)
24
+
25
+ @staticmethod
26
+ async def makedirs(path: str) -> None:
27
+ return await asyncio.to_thread(os.makedirs, path, exist_ok=True)
@@ -26,3 +26,6 @@ class DirectoryClient(DirectoryClientContract):
26
26
 
27
27
  async def isdir(self, path: str) -> bool:
28
28
  return await self.client_obj.isdir(path)
29
+
30
+ async def makedirs(self, path: str) -> None:
31
+ return await self.client_obj.makedirs(path)
@@ -17,3 +17,7 @@ class DirectoryClientContract(ABC):
17
17
  @abstractmethod
18
18
  async def isdir(self, path) -> bool:
19
19
  pass
20
+
21
+ @abstractmethod
22
+ async def makedirs(self, path: str) -> None:
23
+ pass
@@ -0,0 +1,42 @@
1
+ import csv
2
+ from asyncio import to_thread
3
+
4
+ from saviialib.libs.directory_client.directory_client import (
5
+ DirectoryClient,
6
+ DirectoryClientArgs,
7
+ )
8
+ from saviialib.libs.files_client.files_client_contract import FilesClientContract
9
+ from saviialib.libs.files_client.types.files_client_types import (
10
+ FilesClientInitArgs,
11
+ ReadArgs,
12
+ WriteArgs,
13
+ )
14
+
15
+
16
+ class CsvClient(FilesClientContract):
17
+ def __init__(self, args: FilesClientInitArgs):
18
+ self.dir_client = DirectoryClient(DirectoryClientArgs(client_name="os_client"))
19
+
20
+ async def read(self, args: ReadArgs) -> str | bytes:
21
+ raise OSError("This method is not implemented yet.")
22
+
23
+ async def write(self, args: WriteArgs) -> None:
24
+ file_type = args.file_name.split(".")[-1]
25
+ file_content = args.file_content
26
+ header = file_content[0].keys()
27
+ if file_type == "tsv":
28
+ delimiter = "\t"
29
+ else: # Default CSV.
30
+ delimiter = ","
31
+
32
+ if args.destination_path == "":
33
+ dest_path = self.dir_client.join_paths(args.file_name)
34
+ else:
35
+ dest_path = self.dir_client.join_paths(
36
+ args.destination_path, args.file_name
37
+ )
38
+
39
+ with open(dest_path, "w", newline="") as file:
40
+ writer = csv.DictWriter(file, fieldnames=header, delimiter=delimiter) # type: ignore
41
+ await to_thread(writer.writeheader)
42
+ await to_thread(writer.writerows, file_content) # type: ignore
@@ -1,10 +1,11 @@
1
1
  from .clients.aiofiles_client import AioFilesClient
2
+ from .clients.csv_client import CsvClient
2
3
  from .files_client_contract import FilesClientContract
3
4
  from .types.files_client_types import FilesClientInitArgs, ReadArgs, WriteArgs
4
5
 
5
6
 
6
7
  class FilesClient(FilesClientContract):
7
- CLIENTS = {"aiofiles_client"}
8
+ CLIENTS = {"aiofiles_client", "csv_client"}
8
9
 
9
10
  def __init__(self, args: FilesClientInitArgs) -> None:
10
11
  if args.client_name not in FilesClient.CLIENTS:
@@ -13,15 +14,12 @@ class FilesClient(FilesClientContract):
13
14
 
14
15
  if args.client_name == "aiofiles_client":
15
16
  self.client_obj = AioFilesClient(args)
17
+ elif args.client_name == "csv_client":
18
+ self.client_obj = CsvClient(args)
19
+
16
20
  self.client_name = args.client_name
17
21
 
18
22
  async def read(self, args: ReadArgs):
19
- """Reads data from a specified source using the provided arguments.
20
-
21
- :param args (ReadArgs): An object containing the parameters required for the read operation.
22
-
23
- :return file: The result of the read operation, as returned by the client object.
24
- """
25
23
  return await self.client_obj.read(args)
26
24
 
27
25
  async def write(self, args: WriteArgs):
@@ -1,5 +1,5 @@
1
1
  from dataclasses import dataclass
2
- from typing import Literal
2
+ from typing import Literal, Union, List, Dict
3
3
 
4
4
 
5
5
  @dataclass
@@ -27,6 +27,6 @@ class ReadArgs:
27
27
  @dataclass
28
28
  class WriteArgs:
29
29
  file_name: str
30
- file_content: str | bytes
30
+ file_content: Union[str, bytes, List[Dict]]
31
31
  mode: Literal["w", "wb", "a"]
32
32
  destination_path: str = ""
@@ -0,0 +1,160 @@
1
+ from .thies_bp import THIESDayData
2
+ from typing import List
3
+ from logging import Logger
4
+ from asyncio import to_thread
5
+ from saviialib.libs.directory_client import DirectoryClient
6
+ from saviialib.libs.zero_dependency.utils.datetime_utils import datetime_to_str, today
7
+ from saviialib.libs.files_client import FilesClient, FilesClientInitArgs, WriteArgs
8
+
9
+
10
+ AVG_COLUMNS = {
11
+ "Date": "date",
12
+ "Time": "time",
13
+ "AirTemperature": "air_temperature",
14
+ "Radiation": "radiation",
15
+ "CO2": "carbon_dioxide",
16
+ "Precipitation": "precipitation",
17
+ "WS": "wind_velocity",
18
+ "WD": "wind_direction",
19
+ "Humidity": "humidity",
20
+ }
21
+
22
+ EXT_COLUMNS = {
23
+ "Date": "date",
24
+ "Time": "time",
25
+ "AirTemperature MIN": "air_temperature",
26
+ "AirTemperature MAX": "air_temperature",
27
+ "Radiation MIN": "radiation",
28
+ "Radiation MAX": "radiation",
29
+ "CO2 MIN": "carbon_dioxide",
30
+ "CO2 MAX": "carbon_dioxide",
31
+ "WS MIN": "wind_velocity",
32
+ "WS MAX gust": "wind_velocity",
33
+ "WD MIN": "wind_direction",
34
+ "WD MAX gust": "wind_direction",
35
+ "Humidity MIN": "humidity",
36
+ "Humidity MAX": "humidity",
37
+ }
38
+
39
+ AGG_DICT = {
40
+ "AirTemperature": "mean",
41
+ "AirTemperature MIN": "mean",
42
+ "AirTemperature MAX": "mean",
43
+ "Precipitation": "sum",
44
+ "Humidity": "mean",
45
+ "Humidity MIN": "mean",
46
+ "Humidity MAX": "mean",
47
+ "Radiation": "sum",
48
+ "Radiation MIN": "sum",
49
+ "Radiation MAX": "sum",
50
+ "CO2": "sum",
51
+ "CO2 MIN": "sum",
52
+ "CO2 MAX": "sum",
53
+ "WS": "mean",
54
+ "WS MIN": "mean",
55
+ "WS MAX gust": "mean",
56
+ "WD": "mean",
57
+ "WD MIN": "mean",
58
+ "WD MAX gust": "mean",
59
+ }
60
+
61
+ UNITS = {
62
+ "AirTemperature": "°C",
63
+ "Precipitation": "mm",
64
+ "Humidity": "%",
65
+ "Radiation": "W/m2",
66
+ "CO2": "ppm",
67
+ "WS": "m/s",
68
+ "WD": "°",
69
+ }
70
+
71
+
72
+ async def create_thies_daily_statistics_file(
73
+ os_client: DirectoryClient, logger: Logger, daily_files: List[str]
74
+ ) -> None:
75
+ logger.debug("[thies_synchronization_lib] Creating Daily Statistics ...")
76
+ csv_client = FilesClient(FilesClientInitArgs(client_name="csv_client"))
77
+ filename = datetime_to_str(today(), date_format="%Y%m%d") + ".BIN"
78
+ path_bin_av = os_client.join_paths("thies-daily-files", "ARCH_AV1", filename)
79
+ path_ini_av = os_client.join_paths("thies-daily-files", "ARCH_AV1", "DESCFILE.INI")
80
+ path_bin_ex = os_client.join_paths("thies-daily-files", "ARCH_EX1", filename)
81
+ path_ini_ex = os_client.join_paths("thies-daily-files", "ARCH_EX1", "DESCFILE.INI")
82
+
83
+ ext_df = THIESDayData("ex")
84
+ await to_thread(ext_df.read_binfile, path_bin_ex, path_ini_ex)
85
+
86
+ avg_df = THIESDayData("av")
87
+ await to_thread(avg_df.read_binfile, path_bin_av, path_ini_av)
88
+
89
+ ext_df = ext_df.dataDF[EXT_COLUMNS.keys()]
90
+ avg_df = avg_df.dataDF[AVG_COLUMNS.keys()]
91
+
92
+ # Merge both dataframes
93
+ df = avg_df.merge(ext_df, on=["Date", "Time"], how="outer")
94
+ # Set the date as dd.mm.yyyy format.
95
+ df["Date"] = df["Date"].str.replace(
96
+ r"(\d{4})/(\d{2})/(\d{2})", r"\3.\2.\1", regex=True
97
+ )
98
+ df["Hour"] = df["Time"].str[:2]
99
+
100
+ # Group by hour.
101
+ hourly_agg = df.groupby(["Date", "Hour"]).agg(AGG_DICT).reset_index()
102
+
103
+ rows = []
104
+ # For each attribute in avg_columns (except Date, Time)
105
+ for col, col_id in AVG_COLUMNS.items():
106
+ if col in ["Date", "Time"]:
107
+ continue
108
+ # Determine the corresponding min/max columns if they exist
109
+ min_col = f"{col} MIN"
110
+ max_col = f"{col} MAX"
111
+ mean_col = col
112
+ if col in ["WS", "WD"]:
113
+ max_col += " gust"
114
+
115
+ unit = UNITS.get(col, "")
116
+
117
+ for idx, row in hourly_agg.iterrows():
118
+ statistic_id = f"sensor.saviia_epii_{col_id}"
119
+ start = f"{row['Date']} {row['Hour']}:00"
120
+ mean = row[mean_col] if mean_col in row else 0
121
+ min_val = row[min_col] if min_col in row else 0
122
+ max_val = row[max_col] if max_col in row else 0
123
+
124
+ # If no min/max for this attribute is 0
125
+ if min_col not in row:
126
+ min_val = 0
127
+ if max_col not in row:
128
+ max_val = 0
129
+
130
+ if col in ["WD"]: # Avoid error
131
+ rows.append(
132
+ {
133
+ "statistic_id": statistic_id,
134
+ "unit": unit,
135
+ "start": start,
136
+ "min": mean,
137
+ "max": mean,
138
+ "mean": mean,
139
+ }
140
+ )
141
+ else:
142
+ rows.append(
143
+ {
144
+ "statistic_id": statistic_id,
145
+ "unit": unit,
146
+ "start": start,
147
+ "min": min_val,
148
+ "max": max_val,
149
+ "mean": mean,
150
+ }
151
+ )
152
+
153
+ logger.debug("[thies_synchronization_lib] Saving file in /config folder ...")
154
+ logger.debug(rows[0].keys())
155
+ await csv_client.write(
156
+ WriteArgs(file_name="thies_daily_statistics.tsv", file_content=rows, mode="w")
157
+ )
158
+ logger.debug(
159
+ "[thies_synchronization_lib] thies_daily_statistics.tsv created successfully!"
160
+ )
@@ -0,0 +1,442 @@
1
+ import configparser
2
+ import os
3
+ import struct
4
+ from datetime import datetime, timedelta
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+ from bitarray import bitarray
9
+
10
+ TIME_LIST = []
11
+
12
+ start_time = datetime.strptime("00:00", "%H:%M")
13
+ for i in range(0, 24 * 60, 10):
14
+ TIME_LIST.append((start_time + timedelta(minutes=i)).strftime("%H:%M"))
15
+
16
+ ROWS = len(TIME_LIST)
17
+
18
+
19
+ def date_range(start_date: str, end_date: str) -> list:
20
+ start = datetime.strptime(start_date, "%Y/%m/%d") + timedelta(days=1)
21
+ end = datetime.strptime(end_date, "%Y/%m/%d") - timedelta(days=1)
22
+ return [
23
+ (start + timedelta(days=i)).strftime("%Y/%m/%d")
24
+ for i in range((end - start).days + 1)
25
+ if start <= end
26
+ ]
27
+
28
+
29
+ def add_date_sep(date: str) -> str:
30
+ """
31
+ Input: date as YYYYMMDD.BIN
32
+ Returns: date as YYYY/MM/DD
33
+ """
34
+ return date[:4] + "/" + date[4:6] + "/" + date[6:8]
35
+
36
+
37
+ def verify_datestr(filename: str) -> bool:
38
+ """
39
+ Returns True if filename has the YYYYMMDD.BIN format
40
+ """
41
+ try:
42
+ datetime.strptime(filename[:8], "%Y%m%d")
43
+ return filename.endswith(".BIN")
44
+ except ValueError:
45
+ return False
46
+
47
+
48
+ def read_descfile(path) -> dict:
49
+ """
50
+ Input: path DESCFILE.INI
51
+ Returns: dict
52
+ key is index [i]
53
+ value is dict with parameters from .ini
54
+ """
55
+ if type(path) == dict: # noqa: E721
56
+ return path
57
+ config = configparser.ConfigParser()
58
+ config.read(path)
59
+ data_dict = {}
60
+ for section in config.sections():
61
+ section_dict = dict(config.items(section))
62
+ for v in section_dict:
63
+ if v == "name":
64
+ continue
65
+ section_dict[v] = int(section_dict[v])
66
+ data_dict[int(section)] = section_dict
67
+ return data_dict
68
+
69
+
70
+ class THIESDayData:
71
+ # Bytes per parameter
72
+ BPP = {"av": 5, "ex": 9}
73
+ # Timestamp Offset
74
+ OFFSET = 4
75
+
76
+ def __init__(self, datatype: str) -> None:
77
+ d = datatype.lower().strip()
78
+ if d not in ["av", "ex"]:
79
+ raise ValueError(
80
+ "Invalid datatype. Expected 'av' (average values) or 'ex' (minmax values)."
81
+ )
82
+
83
+ self._bpr = -1 # Bytes per row
84
+ self._bpp = THIESDayData.BPP[d] # Bytes per parameter
85
+ self._datatype = d
86
+ self._binfile = None
87
+ self.descfile = {}
88
+ self.nparameters = -1
89
+ self._parameters = []
90
+ self.nbytes = -1
91
+ self.nrows = -1
92
+ self._date = ""
93
+ self.statusDF = pd.DataFrame()
94
+ self.dataDF = pd.DataFrame()
95
+ self.datesDF = pd.DataFrame()
96
+
97
+ @staticmethod
98
+ def _bytes2datetime(b: bytes, only_time: bool = False) -> str:
99
+ """
100
+ Input: bytes (size 4)
101
+ Output: str (YYYY/MM/DD hh:mm:ss)
102
+ """
103
+ bits = bitarray()
104
+ bits.frombytes(b[::-1]) # Invert 4 bytes
105
+ hr = int(bits[15:20].to01(), 2)
106
+ min = int(bits[20:26].to01(), 2)
107
+ sec = int(bits[26:].to01(), 2)
108
+ time = f"{str(hr).zfill(2)}:{str(min).zfill(2)}"
109
+ if only_time:
110
+ return time
111
+ yr = int(bits[0:6].to01(), 2)
112
+ mon = int(bits[6:10].to01(), 2)
113
+ day = int(bits[10:15].to01(), 2)
114
+ date = f"20{yr}/{str(mon).zfill(2)}/{str(day).zfill(2)}"
115
+ return date + " " + time + f":{str(sec).zfill(2)}"
116
+
117
+ def _set_descfile(self, inipath: str) -> None:
118
+ self.descfile = read_descfile(inipath)
119
+ self.nparameters = len(self.descfile)
120
+ row_size = sum([self.descfile[num]["size"] for num in self.descfile])
121
+ self._bpr = row_size + THIESDayData.OFFSET
122
+
123
+ def read_binfile(self, binpath: str, inipath: str) -> None:
124
+ self._set_descfile(inipath)
125
+ with open(binpath, "rb") as bin_file:
126
+ binfile = bin_file.read()
127
+ self._binfile = binfile
128
+ self.nbytes = len(self._binfile)
129
+ self.nrows = int(self.nbytes / self._bpr)
130
+ self._make_dataframes()
131
+
132
+ def make_empty(self, inipath: str, date: str) -> None:
133
+ self._set_descfile(inipath)
134
+ dataDF = pd.DataFrame(
135
+ None, index=range(ROWS), columns=range(self.nparameters + 2)
136
+ )
137
+ col_names = {0: "Date", 1: "Time"}
138
+ par_names = {key + 1: self.descfile[key]["name"] for key in self.descfile}
139
+ col_names.update(par_names)
140
+ dataDF = dataDF.rename(columns=col_names)
141
+ dataDF["Time"] = TIME_LIST
142
+ dataDF["Date"] = [date] * ROWS
143
+
144
+ self.dataDF = dataDF
145
+ self.statusDF = dataDF
146
+ self.datesDF = dataDF
147
+
148
+ def _make_dataframes(self) -> None:
149
+ """
150
+ Builds data DF, status DF and, if datatype=ex, dates DF.
151
+ """
152
+ byterows = [
153
+ self._binfile[i * self._bpr + THIESDayData.OFFSET : (i + 1) * self._bpr]
154
+ for i in range(0, self.nrows)
155
+ ]
156
+ data_arr = np.zeros((self.nrows, self.nparameters))
157
+ status_arr = np.zeros((self.nrows, self.nparameters))
158
+ time_idx = np.empty(self.nrows, dtype=object)
159
+ date_idx = np.empty(self.nrows, dtype=object)
160
+ dates_arr = np.empty((self.nrows, self.nparameters), dtype=object)
161
+
162
+ for i, row in enumerate(byterows):
163
+ # Timestamp
164
+ ts_bytes = self._binfile[i * self._bpr : i * self._bpr + 4]
165
+ ts = THIESDayData._bytes2datetime(ts_bytes)
166
+ date_idx[i], time_idx[i] = ts[:-3].split()
167
+
168
+ for j in range(self.nparameters):
169
+ # Status = byte 1
170
+ status = row[j * self._bpp]
171
+ status_arr[i, j] = status
172
+
173
+ # Value = bytes 2-5, float
174
+ value = struct.unpack("<f", row[j * self._bpp + 1 : j * self._bpp + 5])[
175
+ 0
176
+ ]
177
+ data_arr[i, j] = round(value, 1)
178
+
179
+ if self._datatype == "ex":
180
+ # Datetime = bytes 6-9
181
+ dt = THIESDayData._bytes2datetime(
182
+ row[j * self._bpp + 5 : j * self._bpp + 9], only_time=True
183
+ )
184
+ dates_arr[i, j] = dt
185
+
186
+ self.dataDF = pd.DataFrame(data_arr).rename(
187
+ columns={i: self.descfile[i + 1]["name"] for i in range(self.nparameters)}
188
+ )
189
+ self.statusDF = pd.DataFrame(status_arr).rename(
190
+ columns={i: self.descfile[i + 1]["name"] for i in range(self.nparameters)}
191
+ )
192
+ self.dataDF = self.dataDF.where(self.statusDF == 0.0, other=None)
193
+
194
+ if self._datatype == "ex":
195
+ self.datesDF = pd.DataFrame(dates_arr).rename(
196
+ columns={
197
+ i: self.descfile[i + 1]["name"] for i in range(self.nparameters)
198
+ }
199
+ )
200
+ self.datesDF = self.datesDF.where(self.statusDF == 0.0, other=None)
201
+ self.datesDF.insert(0, "Time", time_idx)
202
+ self.datesDF.insert(0, "Date", date_idx)
203
+
204
+ self.dataDF.insert(0, "Time", time_idx)
205
+ self.dataDF.insert(0, "Date", date_idx)
206
+ self.statusDF.insert(0, "Time", time_idx)
207
+ self.statusDF.insert(0, "Date", date_idx)
208
+
209
+ def _generate_blank_rows(self) -> pd.DataFrame:
210
+ if len(self) == ROWS:
211
+ # Nothing to fill (already full rows)
212
+ return []
213
+
214
+ new = []
215
+ none_row = {col: None for col in self.dataDF.columns}
216
+ none_row["Date"] = self.date
217
+ current_times = self.dataDF["Time"]
218
+ for time in TIME_LIST:
219
+ if time not in current_times.values:
220
+ row = none_row.copy()
221
+ # 'time' was not measured in the original data
222
+ # fill it with None row
223
+ row["Time"] = time
224
+ new.append(row)
225
+ return pd.DataFrame(new)
226
+
227
+ def complete_empty(self):
228
+ """
229
+ Completes DataFrames with all the timestamps of missing data
230
+ Fills all columns with 'None' except Date and Time cols
231
+ """
232
+ if len(self) == ROWS:
233
+ return
234
+ new_rows = self._generate_blank_rows()
235
+ # self.dataDF = self.dataDF.append(new_rows, ignore_index=True)
236
+ self.dataDF = pd.concat([self.dataDF, new_rows], ignore_index=True)
237
+ self.dataDF = self.dataDF.sort_values(by="Time").reset_index(drop=True)
238
+ # self.statusDF = self.statusDF.append(new_rows, ignore_index=True)
239
+ self.statusDF = pd.concat([self.statusDF, new_rows], ignore_index=True)
240
+ self.statusDF = self.statusDF.sort_values(by="Time").reset_index(drop=True)
241
+
242
+ if self._datatype == "ex":
243
+ # self.datesDF = self.datesDF.append(new_rows, ignore_index=True)
244
+ self.datesDF = pd.concat([self.datesDF, new_rows], ignore_index=True)
245
+ self.datesDF = self.datesDF.sort_values(by="Time").reset_index(drop=True)
246
+
247
+ def sort_by(self, cols: list):
248
+ self.dataDF = self.dataDF.sort_values(
249
+ by=cols, ascending=[True, True]
250
+ ).reset_index(drop=True)
251
+ self.statusDF = self.statusDF.sort_values(
252
+ by=cols, ascending=[True, True]
253
+ ).reset_index(drop=True)
254
+ if len(self.datesDF):
255
+ self.datesDF = self.datesDF.sort_values(
256
+ by=cols, ascending=[True, True]
257
+ ).reset_index(drop=True)
258
+
259
+ @property
260
+ def date(self) -> str:
261
+ """
262
+ Returns str of date of measurement
263
+ """
264
+ if len(self.dataDF) and self._date == "":
265
+ self._date = self.dataDF["Date"][0]
266
+ return self._date
267
+
268
+ @property
269
+ def shape(self):
270
+ return self.dataDF.shape
271
+
272
+ @property
273
+ def info(self) -> None:
274
+ bf = self._binfile
275
+ if bf:
276
+ bf = bf[:8]
277
+ print(f"""=== THIES Day Data Instance ===\n
278
+ Bytes per row (BPR): {self._bpr}
279
+ Bytes per parameter (BPP): {self._bpp}
280
+ Datatype: {self._datatype}
281
+ Binfile: {bf}...
282
+ Descfile: {self.descfile}
283
+ N parameters: {self.nparameters}
284
+ N Bytes: {self.nbytes}
285
+ Rows: {self.nrows}
286
+ Date: {self.date}
287
+ """)
288
+
289
+ @property
290
+ def parameters(self) -> list:
291
+ if self._parameters == []:
292
+ self._parameters = [self.descfile[i]["name"] for i in self.descfile]
293
+ return self._parameters
294
+
295
+ def write_csv(self, filename: str) -> None:
296
+ with open(filename + ".csv", "w") as outfile:
297
+ outfile.write(self.dataDF.to_csv())
298
+
299
+ def __repr__(self) -> str:
300
+ return str(self.dataDF)
301
+
302
+ def _repr_html_(self):
303
+ return self.dataDF._repr_html_()
304
+
305
+ def __len__(self):
306
+ return len(self.dataDF)
307
+
308
+ def __add__(self, other):
309
+ if isinstance(other, THIESDayData):
310
+ new = THIESDayData(datatype=self._datatype)
311
+ new.descfile = other.descfile
312
+ new.nparameters = other.nparameters
313
+ new._parameters = other.parameters
314
+ new.nrows = self.nrows + other.nrows
315
+ new.nbytes = self.nbytes + other.nbytes
316
+ new.statusDF = pd.concat([self.statusDF, other.statusDF]).reset_index(
317
+ drop=True
318
+ )
319
+ new.dataDF = pd.concat([self.dataDF, other.dataDF]).reset_index(drop=True)
320
+ if self._datatype == "ex":
321
+ new.datesDF = pd.concat([self.datesDF, other.datesDF]).reset_index(
322
+ drop=True
323
+ )
324
+ return new
325
+ raise TypeError(
326
+ f"unsupported operand type(s) for +: 'THIESDayData' and '{type(other)}'"
327
+ )
328
+
329
+
330
+ class THIESData:
331
+ def __init__(self, datatype: str, dirpath: str) -> None:
332
+ d = datatype.lower().strip()
333
+ if d not in ["av", "ex"]:
334
+ raise ValueError(
335
+ "Invalid datatype. Expected 'av' (average values) or 'ex' (minmax values)."
336
+ )
337
+
338
+ self._path = dirpath
339
+ self._datatype = d
340
+ self.filelist = []
341
+
342
+ self._verify_path(dirpath)
343
+ descpath = self._path + "/DESCFILE.INI"
344
+ self.descfile = read_descfile(descpath)
345
+
346
+ self.daylist = []
347
+ self.fullData = pd.DataFrame()
348
+
349
+ self.completed = False
350
+
351
+ def reset(self):
352
+ self.daylist = []
353
+ self.fullData = pd.DataFrame()
354
+ self.completed = False
355
+
356
+ def _verify_path(self, path: str) -> None:
357
+ fl = sorted(os.listdir(path))
358
+ if "DESCFILE.INI" not in fl:
359
+ raise FileNotFoundError("No DESCFILE.INI found in this directory.")
360
+ self.filelist = [file for file in fl if verify_datestr(file)]
361
+
362
+ def load_df(self, complete_rows=False) -> pd.DataFrame:
363
+ """Reads folder given in DIRPATH and
364
+ transforms data into DF. Saves it in self.fullData
365
+ - complete_rows (bool): if True, completes DFs with Empty Rows by calling
366
+ THIESDayData.complete_empty()
367
+ """
368
+ self.reset()
369
+ for f in self.filelist:
370
+ filepath = f"{self._path}/{f}"
371
+ daydata = THIESDayData(datatype=self._datatype)
372
+ daydata.read_binfile(binpath=filepath, inipath=self.descfile)
373
+ if complete_rows:
374
+ daydata.complete_empty()
375
+ self.daylist.append(daydata)
376
+
377
+ self.fullData = sum(self.daylist, start=THIESDayData(self._datatype))
378
+
379
+ return self.fullData
380
+
381
+ def complete_empty_dates(self):
382
+ if self.completed:
383
+ return
384
+ date_s = add_date_sep(self.filelist[0])
385
+ date_e = add_date_sep(self.filelist[-1])
386
+ d_range = date_range(date_s, date_e)
387
+ for date in d_range:
388
+ if date not in self.fullData.dataDF["Date"].values:
389
+ # Missing day
390
+ new = THIESDayData(self._datatype)
391
+ new.make_empty(self.descfile, date=date)
392
+ self.fullData += new
393
+
394
+ self.fullData.sort_by(["Date", "Time"])
395
+ self.completed = True
396
+
397
+ def df2csv(self, outpath: str) -> None:
398
+ # if self._datatype == 'av':
399
+ # FORMAT FOR EX FILES ???
400
+ self.fullData.write_csv(outpath)
401
+ print(f"Data written in: {outpath}.csv")
402
+
403
+ def read_write(self, outpath: str):
404
+ """Quick version of the read-write process.
405
+ Reads the path given and writes all BIN file data in same CSV
406
+ Does NOT save as DF the data.
407
+ Does NOT complete missing timestamps with empty rows.
408
+ """
409
+ write_header = True
410
+ bcount = 0
411
+ with open(outpath + ".csv", "w") as outfile:
412
+ for i, f in enumerate(self.filelist):
413
+ filepath = f"{self._path}/{f}"
414
+ daydata = THIESDayData(datatype=self._datatype)
415
+ daydata.read_binfile(binpath=filepath, inipath=self.descfile)
416
+ outfile.write(daydata.dataDF.to_csv(header=write_header))
417
+ bcount += daydata.nbytes
418
+ if i == 0:
419
+ write_header = False
420
+ print(f"Data written in: {outpath}.csv")
421
+
422
+ @property
423
+ def dataDF(self):
424
+ return self.fullData.dataDF
425
+
426
+ @property
427
+ def shape(self):
428
+ return self.fullData.shape
429
+
430
+ @property
431
+ def size(self):
432
+ return len(self.filelist)
433
+
434
+ @property
435
+ def parameters(self):
436
+ return self.fullData.parameters
437
+
438
+ def __repr__(self) -> str:
439
+ return str(self.fullData)
440
+
441
+ def _repr_html_(self):
442
+ return self.fullData
@@ -23,6 +23,9 @@ from saviialib.libs.sharepoint_client import (
23
23
  SpListFoldersArgs,
24
24
  SpUploadFileArgs,
25
25
  )
26
+ from saviialib.libs.directory_client import DirectoryClient, DirectoryClientArgs
27
+
28
+ from saviialib.libs.files_client import FilesClient, FilesClientInitArgs, WriteArgs
26
29
  from saviialib.services.epii.use_cases.types import (
27
30
  FtpClientConfig,
28
31
  SharepointConfig,
@@ -32,6 +35,7 @@ from saviialib.services.epii.utils import (
32
35
  parse_execute_response,
33
36
  )
34
37
  from saviialib.libs.zero_dependency.utils.datetime_utils import today, datetime_to_str
38
+ from .components.create_thies_statistics_file import create_thies_daily_statistics_file
35
39
 
36
40
 
37
41
  class UpdateThiesDataUseCase:
@@ -45,6 +49,8 @@ class UpdateThiesDataUseCase:
45
49
  self.ftp_server_folders_path = input.ftp_server_folders_path
46
50
  self.sharepoint_base_url = f"/sites/{self.sharepoint_client.site_name}"
47
51
  self.uploading = set()
52
+ self.os_client = self._initialize_os_client()
53
+ self.files_client = self._initialize_files_client()
48
54
 
49
55
  def _initialize_sharepoint_client(
50
56
  self, config: SharepointConfig
@@ -64,6 +70,12 @@ class UpdateThiesDataUseCase:
64
70
  except RuntimeError as error:
65
71
  raise FtpClientError(error)
66
72
 
73
+ def _initialize_os_client(self) -> DirectoryClient:
74
+ return DirectoryClient(DirectoryClientArgs(client_name="os_client"))
75
+
76
+ def _initialize_files_client(self) -> FilesClient:
77
+ return FilesClient(FilesClientInitArgs(client_name="aiofiles_client"))
78
+
67
79
  async def _validate_sharepoint_current_folders(self):
68
80
  async with self.sharepoint_client:
69
81
  folder_base_path = "/".join(
@@ -219,6 +231,84 @@ class UpdateThiesDataUseCase:
219
231
 
220
232
  return uploading
221
233
 
234
+ async def _extract_thies_daily_statistics(self) -> None:
235
+ # Create the folder thies-daily-files if doesnt exists
236
+ self.logger.info("[thies_synchronization_lib] Creating Daily files directory")
237
+ base_folder = "thies-daily-files"
238
+ if not await self.os_client.isdir(base_folder):
239
+ for dest_folder in {"ARCH_AV1", "ARCH_EX1"}:
240
+ await self.os_client.makedirs(
241
+ self.os_client.join_paths(base_folder, dest_folder)
242
+ )
243
+ else:
244
+ self.logger.info(
245
+ "[thies_synchronization_lib] Thies daily files already exists"
246
+ )
247
+
248
+ # Read the daily files and save each data in the folder
249
+ daily_files = [
250
+ prefix + datetime_to_str(today(), date_format="%Y%m%d") + ".BIN"
251
+ for prefix in ["AVG_", "EXT_"]
252
+ ]
253
+ # Receive from FTP server and write the file in thies-daily-files
254
+ for file in daily_files:
255
+ prefix, filename = file.split("_", 1)
256
+ # The first path is for AVG files. The second file is for EXT files
257
+ folder_path = next(
258
+ (
259
+ path
260
+ for path in self.ftp_server_folders_path
261
+ if prefix == ("AVG" if "AV" in path else "EXT")
262
+ ),
263
+ self.ftp_server_folders_path[0], # Default to the first path
264
+ )
265
+ # Retrieve the AVG or EXT file
266
+ file_path = f"{folder_path}/{filename}"
267
+ try:
268
+ content = await self.thies_ftp_client.read_file(
269
+ FtpReadFileArgs(file_path)
270
+ )
271
+ except FileNotFoundError as error:
272
+ raise ThiesFetchingError(reason=str(error))
273
+ # Destination local folder
274
+ self.logger.debug(file_path)
275
+
276
+ dest_folder = "ARCH_AV1" if prefix == "AVG" else "ARCH_EX1"
277
+ await self.files_client.write(
278
+ WriteArgs(
279
+ file_name=filename,
280
+ file_content=content,
281
+ mode="wb",
282
+ destination_path=f"{base_folder}/{dest_folder}",
283
+ )
284
+ )
285
+ # Retrieve the DESCFILE and save if is not in the base folder
286
+ descfile_name = "DESCFILE.INI"
287
+ if not await self.os_client.path_exists(
288
+ self.os_client.join_paths(base_folder, dest_folder, descfile_name)
289
+ ):
290
+ descfile_path = f"{folder_path}/{descfile_name}"
291
+ descfile_content = await self.thies_ftp_client.read_file(
292
+ FtpReadFileArgs(descfile_path)
293
+ )
294
+ await self.files_client.write(
295
+ WriteArgs(
296
+ file_name=descfile_name,
297
+ file_content=descfile_content,
298
+ mode="wb",
299
+ destination_path=f"{base_folder}/{dest_folder}",
300
+ )
301
+ )
302
+ else:
303
+ self.logger.debug(
304
+ "[thies_synchronization_lib] DESCFILE.INI already exists in %s",
305
+ dest_folder,
306
+ )
307
+ # Read the files with ThiesDayData class
308
+ await create_thies_daily_statistics_file(
309
+ self.os_client, self.logger, daily_files
310
+ )
311
+
222
312
  async def execute(self):
223
313
  """Synchronize data from the THIES Center to the cloud."""
224
314
  self.logger.debug("[thies_synchronization_lib] Starting ...")
@@ -239,6 +329,9 @@ class UpdateThiesDataUseCase:
239
329
  str(len(cloud_files)),
240
330
  )
241
331
  self.uploading = await self._sync_pending_files(thies_files, cloud_files)
332
+ # Extract thies statistics for SAVIIA Sensors
333
+ await self._extract_thies_daily_statistics()
334
+
242
335
  if not self.uploading:
243
336
  raise EmptyDataError(reason="No files to upload.")
244
337
  # Fetch the content of the files to be uploaded from THIES FTP Server
@@ -248,7 +341,9 @@ class UpdateThiesDataUseCase:
248
341
  upload_statistics = await self.upload_thies_files_to_sharepoint(
249
342
  thies_fetched_files
250
343
  )
344
+ self.logger.info(upload_statistics)
251
345
  self.logger.debug(
252
346
  "[thies_synchronization_lib] All the files were uploaded successfully 🎉"
253
347
  )
348
+
254
349
  return parse_execute_response(thies_fetched_files, upload_statistics) # type: ignore
File without changes
File without changes