pipeline-eds 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. pipeline/__init__.py +4 -0
  2. pipeline/__main__.py +1 -0
  3. pipeline/api/__init__.py +0 -0
  4. pipeline/api/eds.py +980 -0
  5. pipeline/api/rjn.py +157 -0
  6. pipeline/api/status_api.py +9 -0
  7. pipeline/calls.py +108 -0
  8. pipeline/cli.py +282 -0
  9. pipeline/configrationmanager.py +22 -0
  10. pipeline/decorators.py +13 -0
  11. pipeline/env.py +61 -0
  12. pipeline/environment.py +59 -0
  13. pipeline/gui_fastapi_plotly_live.py +78 -0
  14. pipeline/gui_mpl_live.py +113 -0
  15. pipeline/helpers.py +125 -0
  16. pipeline/logging_setup.py +45 -0
  17. pipeline/pastehelpers.py +10 -0
  18. pipeline/philosophy.py +62 -0
  19. pipeline/plotbuffer.py +21 -0
  20. pipeline/points_loader.py +19 -0
  21. pipeline/queriesmanager.py +122 -0
  22. pipeline/time_manager.py +211 -0
  23. pipeline/workspace_manager.py +253 -0
  24. pipeline_eds-0.2.4.dist-info/LICENSE +14 -0
  25. pipeline_eds-0.2.4.dist-info/METADATA +238 -0
  26. pipeline_eds-0.2.4.dist-info/RECORD +62 -0
  27. pipeline_eds-0.2.4.dist-info/WHEEL +4 -0
  28. pipeline_eds-0.2.4.dist-info/entry_points.txt +6 -0
  29. workspaces/default-workspace.toml +3 -0
  30. workspaces/eds_to_rjn/__init__.py +0 -0
  31. workspaces/eds_to_rjn/code/__init__.py +0 -0
  32. workspaces/eds_to_rjn/code/aggregator.py +84 -0
  33. workspaces/eds_to_rjn/code/collector.py +60 -0
  34. workspaces/eds_to_rjn/code/sanitizer.py +40 -0
  35. workspaces/eds_to_rjn/code/storage.py +16 -0
  36. workspaces/eds_to_rjn/configurations/config_time.toml +11 -0
  37. workspaces/eds_to_rjn/configurations/configuration.toml +2 -0
  38. workspaces/eds_to_rjn/exports/README.md +7 -0
  39. workspaces/eds_to_rjn/exports/aggregate/README.md +7 -0
  40. workspaces/eds_to_rjn/exports/aggregate/live_data - Copy.csv +355 -0
  41. workspaces/eds_to_rjn/exports/aggregate/live_data_EFF.csv +17521 -0
  42. workspaces/eds_to_rjn/exports/aggregate/live_data_INF.csv +17521 -0
  43. workspaces/eds_to_rjn/exports/export_eds_points_neo.txt +11015 -0
  44. workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.csv +8759 -0
  45. workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.xlsx +0 -0
  46. workspaces/eds_to_rjn/exports/manual_effluent.csv +8759 -0
  47. workspaces/eds_to_rjn/exports/manual_influent.csv +8759 -0
  48. workspaces/eds_to_rjn/exports/manual_wetwell.csv +8761 -0
  49. workspaces/eds_to_rjn/history/time_sample.txt +0 -0
  50. workspaces/eds_to_rjn/imports/zdMaxson_idcsD321E_sid11003.toml +14 -0
  51. workspaces/eds_to_rjn/imports/zdMaxson_idcsFI8001_sid8528.toml +14 -0
  52. workspaces/eds_to_rjn/imports/zdMaxson_idcsM100FI_sid2308.toml +14 -0
  53. workspaces/eds_to_rjn/imports/zdMaxson_idcsM310LI_sid2382.toml +14 -0
  54. workspaces/eds_to_rjn/queries/default-queries.toml +4 -0
  55. workspaces/eds_to_rjn/queries/points-maxson.csv +4 -0
  56. workspaces/eds_to_rjn/queries/points-stiles.csv +4 -0
  57. workspaces/eds_to_rjn/queries/timestamps_success.json +20 -0
  58. workspaces/eds_to_rjn/scripts/__init__.py +0 -0
  59. workspaces/eds_to_rjn/scripts/daemon_runner.py +212 -0
  60. workspaces/eds_to_rjn/secrets/README.md +24 -0
  61. workspaces/eds_to_rjn/secrets/secrets-example.yaml +15 -0
  62. workspaces/eds_to_termux/..txt +0 -0
File without changes
@@ -0,0 +1,14 @@
1
+ [eds_characteristics]
2
+ ip_address="172.19.4.127"
3
+ idcs="D-321E"
4
+ iess="D-321E.UNIT0@NET0"
5
+ sid=11003
6
+ zd="Maxson"
7
+
8
+ [manual_characteristics]
9
+ shortdesc="DOSE"
10
+
11
+ [rjn_characteristics]
12
+ rjn_siteid="64c5c5ac-04ca-4a08-bdce-5327e4b21bc5"
13
+ rjn_entityid=null
14
+ rjn_name=null
@@ -0,0 +1,14 @@
1
+ [eds_characteristics]
2
+ ip_address="172.19.4.127"
3
+ idcs="FI8001"
4
+ iess="FI8001.UNIT0@NET0"
5
+ sid=8528
6
+ zd="Maxson"
7
+
8
+ [manual_characteristics]
9
+ shortdesc="EFF"
10
+
11
+ [rjn_characteristics]
12
+ rjn_siteid="64c5c5ac-04ca-4a08-bdce-5327e4b21bc5"
13
+ rjn_entityid="s198"
14
+ rjn_name="Effluent"
@@ -0,0 +1,14 @@
1
+ [eds_characteristics]
2
+ ip_address="172.19.4.127"
3
+ idcs="M100FI"
4
+ iess = "M100FI.UNIT0@NET0"
5
+ sid=2308
6
+ zd="Maxson"
7
+
8
+ [manual_characteristics]
9
+ shortdesc="INFLU"
10
+
11
+ [rjn_characteristics]
12
+ rjn_siteid="64c5c5ac-04ca-4a08-bdce-5327e4b21bc5"
13
+ rjn_entityid="s199"
14
+ rjn_name="Influent"
@@ -0,0 +1,14 @@
1
+ [eds_characteristics]
2
+ ip_address="172.19.4.127"
3
+ idcs="M310LI"
4
+ iess="M310LI.UNIT0@NET0"
5
+ sid=2382
6
+ zd="Maxson"
7
+
8
+ [manual_characteristics]
9
+ shortdesc="WETWELL"
10
+
11
+ [rjn_characteristics]
12
+ rjn_siteid="64c5c5ac-04ca-4a08-bdce-5327e4b21bc5"
13
+ rjn_entityid=null
14
+ rjn_name=null
@@ -0,0 +1,4 @@
1
+ [default-query]
2
+ # example: files=["points-maxson.csv"]
3
+ files = ["points-maxson.csv", "points-stiles.csv"]
4
+ use-most-recently-edited-query-file = false # while true, this will ignore the files variable list and instead use a single list of the most recent files
@@ -0,0 +1,4 @@
1
+ zd,idcs,iess,sid,shortdesc,rjn_projectid,rjn_siteid,rjn_entityid,rjn_name
2
+ Maxson,M100FI,M100FI.UNIT0@NET0,2308,INFLU,47EE48FD-904F-4EDA-9ED9-C622D1944194,64c5c5ac-04ca-4a08-bdce-5327e4b21bc5,199,Influent
3
+ Maxson,FI8001,FI8001.UNIT0@NET0,8528,EFF,47EE48FD-904F-4EDA-9ED9-C622D1944194,64c5c5ac-04ca-4a08-bdce-5327e4b21bc5,198,Effluent
4
+ Maxson,M310LI,M310LI.UNIT0@NET0,2382,WELL,47EE48FD-904F-4EDA-9ED9-C622D1944194,64c5c5ac-04ca-4a08-bdce-5327e4b21bc5,228,wet well level
@@ -0,0 +1,4 @@
1
+ zd,idcs,iess,sid,shortdesc,rjn_projectid,rjn_siteid,rjn_entityid,rjn_name
2
+ WWTF,I-5005A,I-5005A.UNIT1@NET1,5392,INFLU,47EE48FD-904F-4EDA-9ED9-C622D1944194,eefe228a-39a2-4742-a9e3-c07314544ada,200,Influent
3
+ WWTF,FI-405/415,FI-405/415.UNIT1@NET1,3550,EFF,47EE48FD-904F-4EDA-9ED9-C622D1944194,eefe228a-39a2-4742-a9e3-c07314544ada,197,Effluent
4
+ WWTF,I-0300A,I-0300A.UNIT1@NET1,0000,WELL,47EE48FD-904F-4EDA-9ED9-C622D1944194,eefe228a-39a2-4742-a9e3-c07314544ada,229,Wet Well
@@ -0,0 +1,20 @@
1
+ {
2
+ "Maxson": {
3
+ "timestamps": {
4
+ "last_success": "2025-07-31 22:31:07",
5
+ "last_attempt": "2025-07-31 22:31:07"
6
+ }
7
+ },
8
+ "RJN": {
9
+ "timestamps": {
10
+ "last_success": "2025-07-22 10:08:40",
11
+ "last_attempt": "2025-08-23 03:43:15"
12
+ }
13
+ },
14
+ "WWTF": {
15
+ "timestamps": {
16
+ "last_success": "2025-07-20 15:18:24",
17
+ "last_attempt": "2025-07-20 15:26:03"
18
+ }
19
+ }
20
+ }
File without changes
@@ -0,0 +1,212 @@
1
+ #workspaces/eds_to_rjndaemon_runner.py
2
+ import schedule, time
3
+ import logging
4
+ import csv
5
+ from datetime import datetime
6
+
7
+ from src.pipeline.api.eds import EdsClient, identify_relevant_tables
8
+ from src.pipeline.api.rjn import RjnClient
9
+ from src.pipeline import helpers
10
+ from src.pipeline.env import SecretConfig
11
+ from src.pipeline.workspace_manager import WorkspaceManager
12
+ from src.pipeline.queriesmanager import QueriesManager
13
+ from src.pipeline.queriesmanager import load_query_rows_from_csv_files, group_queries_by_col
14
+ from src.pipeline.time_manager import TimeManager
15
+
16
+ logger = logging.getLogger(__name__)
17
+ logger.setLevel(logging.INFO)
18
+
19
+ #def save_tabular_trend_data_to_log_file(project_id, entity_id, endtime: int, workspace_manager, timestamps: list[int], values: list[float]):
20
+ def save_tabular_trend_data_to_log_file(project_id, entity_id, endtime, workspace_manager, timestamps, values):
21
+ ### save file for log
22
+ timestamps_str = [TimeManager(ts).as_formatted_date_time() for ts in timestamps]
23
+ endtime_iso = TimeManager(endtime).as_safe_isoformat_for_filename()
24
+ filename = f"rjn_data_{project_id}_{entity_id}_{endtime_iso}.csv"
25
+ log_dir = workspace_manager.get_logs_dir()
26
+ filepath = log_dir / filename
27
+ logger.info(f"filepath = {filepath}")
28
+
29
+ with open(filepath, "w", newline="") as csvfile:
30
+ writer = csv.writer(csvfile)
31
+ writer.writerow(["timestamp", "value"]) # Header
32
+ #for ts, val in zip(timestamps, values):
33
+ for ts, val in zip(timestamps_str, values):
34
+ writer.writerow([ts, val])
35
+
36
+ def run_hourly_tabular_trend_eds_to_rjn(test = False):
37
+
38
+
39
+ #test_connection_to_internet()
40
+
41
+ workspace_name = 'eds_to_rjn' # workspace_name = WorkspaceManager.identify_default_workspace_name()
42
+ workspace_manager = WorkspaceManager(workspace_name)
43
+ queries_manager = QueriesManager(workspace_manager)
44
+ queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
45
+ logger.debug(f"queries_file_path_list = {queries_file_path_list}")
46
+
47
+ queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list)
48
+ queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered,'zd')
49
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
50
+ sessions_eds = {}
51
+
52
+ # --- Prepare Maxson session_eds
53
+ base_url_maxson = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("url").rstrip("/")
54
+ session_maxson = EdsClient.login_to_session(api_url = base_url_maxson,
55
+ username = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("username"),
56
+ password = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("password"))
57
+ session_maxson.base_url = base_url_maxson
58
+ session_maxson.zd = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("zd")
59
+ sessions_eds.update({"Maxson":session_maxson})
60
+
61
+
62
+ # --- Prepare Stiles session_eds
63
+ try:
64
+ # REST API access fails due to firewall blocking the port
65
+ # So, alternatively, if this fails, encourage direct MariaDB access, with files at E:\SQLData\stiles\
66
+ base_url_stiles = secrets_dict.get("eds_apis", {}).get("WWTP", {}).get("url").rstrip("/")
67
+ session_stiles = EdsClient.login_to_session(api_url = base_url_stiles,
68
+ username = secrets_dict.get("eds_apis", {}).get("WWTP", {}).get("username"),
69
+ password = secrets_dict.get("eds_apis", {}).get("WWTP", {}).get("password"))
70
+ session_stiles.base_url = base_url_stiles
71
+ session_stiles.zd = secrets_dict.get("eds_apis", {}).get("WWTP", {}).get("zd")
72
+ sessions_eds.update({"WWTP":session_stiles})
73
+ except:
74
+ session_stiles = None # possible reduntant for login_to_session() output
75
+ sessions_eds.update({"WWTF":session_stiles})
76
+
77
+ base_url_rjn = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("url").rstrip("/")
78
+ session_rjn = RjnClient.login_to_session(api_url = base_url_rjn,
79
+ client_id = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("client_id"),
80
+ password = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("password"))
81
+ if session_rjn is None:
82
+ logger.warning("RJN session not established. Skipping RJN-related data transmission.\n")
83
+ if test is False:
84
+ return
85
+ else:
86
+ logger.info("RJN session established successfully.")
87
+ session_rjn.base_url = base_url_rjn
88
+
89
+ # Discern the time range to use
90
+ starttime = queries_manager.get_most_recent_successful_timestamp(api_id="RJN")
91
+ logger.info(f"queries_manager.get_most_recent_successful_timestamp(), key = {'RJN'}")
92
+ endtime = helpers.get_now_time_rounded(workspace_manager)
93
+ starttime_ts = TimeManager(starttime).as_unix()
94
+ endtime_ts = TimeManager(endtime).as_unix()
95
+ logger.info(f"starttime = {starttime}")
96
+ logger.info(f"endtime = {endtime}")
97
+
98
+ #key = "Maxson"
99
+ #session = sessions_eds[key]
100
+
101
+ ## To do: start using pandas, for the sake of clarity of manipulation 15 Aug 2025
102
+ for key_eds, session_eds in sessions_eds.items():
103
+ point_list = [row['iess'] for row in queries_defaultdictlist_grouped_by_session_key.get(key_eds,[])]
104
+ point_list_sid = [row['sid'] for row in queries_defaultdictlist_grouped_by_session_key.get(key_eds,[])]
105
+
106
+ rjn_projectid_list = [row['rjn_projectid'] for row in queries_defaultdictlist_grouped_by_session_key.get(key_eds,[])]
107
+ rjn_entityid_list = [row['rjn_entityid'] for row in queries_defaultdictlist_grouped_by_session_key.get(key_eds,[])]
108
+
109
+ if session_eds is None and not EdsClient.this_computer_is_an_enterprise_database_server(secrets_dict, key_eds):
110
+ logger.warning(f"Skipping EDS session for {key_eds} — session_eds is None and this computer is not an enterprise database server.")
111
+ continue
112
+ if session_eds is None and EdsClient.this_computer_is_an_enterprise_database_server(secrets_dict, key_eds):
113
+ relevant_tables = identify_relevant_tables(key_eds, starttime_ts, endtime_ts, secrets_dict)
114
+ results = EdsClient.access_database_files_locally(key_eds, starttime_ts, endtime_ts, point=point_list_sid, tables=relevant_tables)
115
+ else:
116
+ api_url = session_eds.base_url
117
+ request_id = EdsClient.create_tabular_request(session_eds, api_url, starttime_ts, endtime_ts, points=point_list)
118
+ logger.info(f"request_id = {request_id}")
119
+ EdsClient.wait_for_request_execution_session(session_eds, api_url, request_id)
120
+ results = EdsClient.get_tabular_trend(session_eds, request_id, point_list)
121
+ #results = EdsClient.get_tabular_mod(session_eds, request_id, point_list)
122
+ session_eds.post(f'{api_url}/logout', verify=False)
123
+ #print(f"len(results) = {len(results)}")
124
+
125
+ for idx, iess in enumerate(point_list):
126
+ #print(f"rows = {rows}")
127
+ timestamps = []
128
+ values = []
129
+ entity_id = rjn_entityid_list[idx]
130
+ project_id = rjn_projectid_list[idx]
131
+ print(f"/nentity_id = {entity_id}")
132
+ print(f"iess = {iess}")
133
+ print(f"project_id = {project_id}")
134
+
135
+ for row in results[idx]:
136
+ #print(f"row = {row}")
137
+ #EdsClient.print_point_info_row(row)
138
+
139
+ dt = datetime.fromtimestamp(row["ts"])
140
+ timestamp_str = helpers.round_datetime_to_nearest_past_five_minutes(dt).isoformat(timespec='seconds')
141
+ #if row['quality'] == 'G':
142
+ timestamps.append(timestamp_str)
143
+ value = round(row["value"],5)
144
+ # QUICK AND DIRTY CONVERSION FOR WWTF WETWELL LEVEL TO FEET
145
+ if iess == "M310LI.UNIT0@NET0":
146
+ value = (value/12)+181.25 # convert inches of wetwell to feet above mean sealevel
147
+ values.append(value) # unrounded values fail to post
148
+
149
+ logger.info(f"len(timestamps) = {len(timestamps)}")
150
+ if len(timestamps)>0:
151
+ logger.info(f"timestamps[0] = {timestamps[0]}")
152
+ logger.info(f"timestamps[-1] = {timestamps[-1]}")
153
+ else:
154
+ logger.info(f"No timestamps retrieved. Transmission to RJN skipped for {iess}.")
155
+ if timestamps and values:
156
+
157
+ # Send data to RJN
158
+ if not test:
159
+ rjn_data_transmission_succeeded = RjnClient.send_data_to_rjn(
160
+ session_rjn,
161
+ base_url = session_rjn.base_url,
162
+ entity_id = entity_id,
163
+ project_id = project_id,
164
+ timestamps=timestamps,
165
+ values=values
166
+ )
167
+
168
+ if rjn_data_transmission_succeeded:
169
+ queries_manager.update_success(api_id="RJN", success_time=endtime)
170
+ logger.info(f"RJN data transmission succeeded for entity_id {entity_id}, project_id {project_id}.")
171
+ save_tabular_trend_data_to_log_file(project_id, entity_id, endtime, workspace_manager,timestamps, values)
172
+ else:
173
+ print("[TEST] RjnClient.send_data_to_rjn() skipped")
174
+
175
+
176
+ def setup_schedules():
177
+ testing = False
178
+ if not testing:
179
+ schedule.every().hour.do(run_hourly_tabular_trend_eds_to_rjn)
180
+ else:
181
+ schedule.every().second.do(run_hourly_tabular_trend_eds_to_rjn)
182
+
183
+ def main():
184
+ #logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
185
+ task_scheduler_is_ready_to_handle_hourly_task_scheduling = True
186
+ if task_scheduler_is_ready_to_handle_hourly_task_scheduling:
187
+ run_hourly_tabular_trend_eds_to_rjn()
188
+ else:
189
+ setup_schedules()
190
+
191
+ def start_daemon():
192
+ logging.info(f"Daemon started at {datetime.now()} and running...")
193
+ setup_schedules()
194
+ while True:
195
+ schedule.run_pending()
196
+ time.sleep(1)
197
+
198
+ if __name__ == "__main__":
199
+ import sys
200
+ cmd = sys.argv[1] if len(sys.argv) > 1 else "default"
201
+
202
+ if cmd == "main":
203
+ main()
204
+ elif cmd == "once":
205
+ run_hourly_tabular_trend_eds_to_rjn()
206
+ elif cmd == "test":
207
+ run_hourly_tabular_trend_eds_to_rjn(test=True)
208
+ else:
209
+ print("Usage options: \n"
210
+ "poetry run python -m workspaces.eds_to_rjn.scripts.daemon_runner main \n"
211
+ "poetry run python -m workspaces.eds_to_rjn.scripts.daemon_runner once \n"
212
+ "poetry run python -m workspaces.eds_to_rjn.scripts.daemon_runner test ")
@@ -0,0 +1,24 @@
1
+ # secrets.yaml
2
+ Access will not work without a secrets.yaml file in /pipeline/workspaces/your-workspace-name/config/secrets.yaml
3
+
4
+ secrets.yaml is registered with the .gitignore for security.
5
+
6
+ // Example secrets.yaml:
7
+ ```
8
+ eds_apis:
9
+ MyServer1:
10
+ url: "http://127.0.0.1:43084/api/v1/"
11
+ username: "admin"
12
+ password: ""
13
+ MyServer2:
14
+ url: "http://some-ip-address:port/api/v1/"
15
+ username: "admin"
16
+ password: ""
17
+
18
+ contractor_apis:
19
+ MySpecialContractor:
20
+ url: "https://contractor-api.com/v1/special/"
21
+ client_id: "special-user"
22
+ password: "2685steam"
23
+ ```
24
+ ```
@@ -0,0 +1,15 @@
1
+ eds_apis:
2
+ MyServer1:
3
+ url: "http://127.0.0.1:43084/api/v1/"
4
+ username: "admin"
5
+ password: ""
6
+ MyServer2:
7
+ url: "http://some-ip-address:port/api/v1/"
8
+ username: "admin"
9
+ password: ""
10
+
11
+ contractor_apis:
12
+ MySpecialContractor:
13
+ url: "https://contractor-api.com/v1/special/"
14
+ client_id: "special-user"
15
+ password: "2685steam"
File without changes