pipeline-eds 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pipeline/__init__.py +4 -0
- pipeline/__main__.py +1 -0
- pipeline/api/__init__.py +0 -0
- pipeline/api/eds.py +980 -0
- pipeline/api/rjn.py +157 -0
- pipeline/api/status_api.py +9 -0
- pipeline/calls.py +108 -0
- pipeline/cli.py +282 -0
- pipeline/configrationmanager.py +22 -0
- pipeline/decorators.py +13 -0
- pipeline/env.py +61 -0
- pipeline/environment.py +59 -0
- pipeline/gui_fastapi_plotly_live.py +78 -0
- pipeline/gui_mpl_live.py +113 -0
- pipeline/helpers.py +125 -0
- pipeline/logging_setup.py +45 -0
- pipeline/pastehelpers.py +10 -0
- pipeline/philosophy.py +62 -0
- pipeline/plotbuffer.py +21 -0
- pipeline/points_loader.py +19 -0
- pipeline/queriesmanager.py +122 -0
- pipeline/time_manager.py +211 -0
- pipeline/workspace_manager.py +253 -0
- pipeline_eds-0.2.4.dist-info/LICENSE +14 -0
- pipeline_eds-0.2.4.dist-info/METADATA +238 -0
- pipeline_eds-0.2.4.dist-info/RECORD +62 -0
- pipeline_eds-0.2.4.dist-info/WHEEL +4 -0
- pipeline_eds-0.2.4.dist-info/entry_points.txt +6 -0
- workspaces/default-workspace.toml +3 -0
- workspaces/eds_to_rjn/__init__.py +0 -0
- workspaces/eds_to_rjn/code/__init__.py +0 -0
- workspaces/eds_to_rjn/code/aggregator.py +84 -0
- workspaces/eds_to_rjn/code/collector.py +60 -0
- workspaces/eds_to_rjn/code/sanitizer.py +40 -0
- workspaces/eds_to_rjn/code/storage.py +16 -0
- workspaces/eds_to_rjn/configurations/config_time.toml +11 -0
- workspaces/eds_to_rjn/configurations/configuration.toml +2 -0
- workspaces/eds_to_rjn/exports/README.md +7 -0
- workspaces/eds_to_rjn/exports/aggregate/README.md +7 -0
- workspaces/eds_to_rjn/exports/aggregate/live_data - Copy.csv +355 -0
- workspaces/eds_to_rjn/exports/aggregate/live_data_EFF.csv +17521 -0
- workspaces/eds_to_rjn/exports/aggregate/live_data_INF.csv +17521 -0
- workspaces/eds_to_rjn/exports/export_eds_points_neo.txt +11015 -0
- workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.csv +8759 -0
- workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.xlsx +0 -0
- workspaces/eds_to_rjn/exports/manual_effluent.csv +8759 -0
- workspaces/eds_to_rjn/exports/manual_influent.csv +8759 -0
- workspaces/eds_to_rjn/exports/manual_wetwell.csv +8761 -0
- workspaces/eds_to_rjn/history/time_sample.txt +0 -0
- workspaces/eds_to_rjn/imports/zdMaxson_idcsD321E_sid11003.toml +14 -0
- workspaces/eds_to_rjn/imports/zdMaxson_idcsFI8001_sid8528.toml +14 -0
- workspaces/eds_to_rjn/imports/zdMaxson_idcsM100FI_sid2308.toml +14 -0
- workspaces/eds_to_rjn/imports/zdMaxson_idcsM310LI_sid2382.toml +14 -0
- workspaces/eds_to_rjn/queries/default-queries.toml +4 -0
- workspaces/eds_to_rjn/queries/points-maxson.csv +4 -0
- workspaces/eds_to_rjn/queries/points-stiles.csv +4 -0
- workspaces/eds_to_rjn/queries/timestamps_success.json +20 -0
- workspaces/eds_to_rjn/scripts/__init__.py +0 -0
- workspaces/eds_to_rjn/scripts/daemon_runner.py +212 -0
- workspaces/eds_to_rjn/secrets/README.md +24 -0
- workspaces/eds_to_rjn/secrets/secrets-example.yaml +15 -0
- workspaces/eds_to_termux/..txt +0 -0
pipeline/api/eds.py
ADDED
@@ -0,0 +1,980 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
import logging
|
3
|
+
import requests
|
4
|
+
import time
|
5
|
+
from pprint import pprint
|
6
|
+
from pathlib import Path
|
7
|
+
import os
|
8
|
+
import inspect
|
9
|
+
import subprocess
|
10
|
+
import platform
|
11
|
+
import mysql.connector
|
12
|
+
from functools import lru_cache
|
13
|
+
|
14
|
+
from src.pipeline.env import SecretConfig
|
15
|
+
from src.pipeline.workspace_manager import WorkspaceManager
|
16
|
+
from src.pipeline import helpers
|
17
|
+
from src.pipeline.decorators import log_function_call
|
18
|
+
from src.pipeline.time_manager import TimeManager
|
19
|
+
|
20
|
+
logger = logging.getLogger(__name__)
|
21
|
+
#logger.setLevel(logging.INFO)
|
22
|
+
|
23
|
+
class EdsClient:
|
24
|
+
|
25
|
+
@staticmethod
|
26
|
+
def get_license(session,api_url:str):
|
27
|
+
response = session.get(f'{api_url}/license', json={}, verify=False).json()
|
28
|
+
return response
|
29
|
+
|
30
|
+
@staticmethod
|
31
|
+
def print_point_info_row(row):
|
32
|
+
# Desired keys to print, with optional formatting
|
33
|
+
keys_to_print = {
|
34
|
+
"iess": lambda v: f"iess:{v}",
|
35
|
+
"ts": lambda v: f"dt:{datetime.fromtimestamp(v)}",
|
36
|
+
"un": lambda v: f"un:{v}",
|
37
|
+
"value": lambda v: f"av:{round(v, 2)}",
|
38
|
+
"shortdesc": lambda v: str(v),
|
39
|
+
}
|
40
|
+
|
41
|
+
parts = []
|
42
|
+
for key, formatter in keys_to_print.items():
|
43
|
+
try:
|
44
|
+
parts.append(formatter(row[key]))
|
45
|
+
except (KeyError, TypeError, ValueError):
|
46
|
+
continue # Skip missing or malformed values
|
47
|
+
|
48
|
+
print(", ".join(parts))
|
49
|
+
|
50
|
+
@staticmethod
|
51
|
+
#def get_points_live_mod(session, iess: str):
|
52
|
+
def get_points_live_mod(session, iess):
|
53
|
+
# please make this session based rather than header based
|
54
|
+
"Access live value of point from the EDS, based on zs/api_id value (i.e. Maxson, WWTF, Server)"
|
55
|
+
#api_url = str(session.custom_dict["url"])
|
56
|
+
api_url = str(session.base_url)
|
57
|
+
|
58
|
+
query = {
|
59
|
+
'filters' : [{
|
60
|
+
'iess': [iess],
|
61
|
+
'tg' : [0, 1],
|
62
|
+
}],
|
63
|
+
'order' : ['iess']
|
64
|
+
}
|
65
|
+
response = session.post(f"{api_url}/points/query", json=query, verify=False).json()
|
66
|
+
#print(f"response = {response}")
|
67
|
+
|
68
|
+
if not response or "points" not in response:
|
69
|
+
return None
|
70
|
+
|
71
|
+
points = response["points"]
|
72
|
+
if len(points) != 1:
|
73
|
+
raise ValueError(f"Expected 1 point for iess='{iess}', got {len(points)}")
|
74
|
+
|
75
|
+
return points[0]
|
76
|
+
|
77
|
+
'''
|
78
|
+
points_datas = response.get("points", [])
|
79
|
+
if not points_datas:
|
80
|
+
raise ValueError(f"No data returned for iess='{iess}': len(points) == 0")
|
81
|
+
elif len(points_datas) != 1:
|
82
|
+
raise ValueError(f"Expected exactly one point, got {len(points_datas)}")
|
83
|
+
else:
|
84
|
+
point_data = points_datas[0] # You expect exactly one point usually
|
85
|
+
#print(f"point_data = {point_data}")
|
86
|
+
return point_data
|
87
|
+
'''
|
88
|
+
|
89
|
+
@staticmethod
|
90
|
+
def get_tabular_mod(session, req_id, point_list):
|
91
|
+
results = [[] for _ in range(len(point_list))]
|
92
|
+
while True:
|
93
|
+
#api_url = session.custom_dict["url"]
|
94
|
+
api_url = str(session.base_url)
|
95
|
+
response = session.get(f'{api_url}/trend/tabular?id={req_id}', verify=False).json()
|
96
|
+
for chunk in response:
|
97
|
+
if chunk['status'] == 'TIMEOUT':
|
98
|
+
raise RuntimeError('timeout')
|
99
|
+
|
100
|
+
for idx, samples in enumerate(chunk['items']):
|
101
|
+
results[idx] += samples
|
102
|
+
|
103
|
+
if chunk['status'] == 'LAST':
|
104
|
+
return results
|
105
|
+
|
106
|
+
@staticmethod
|
107
|
+
def get_tabular_trend(session, req_id, point_list):
|
108
|
+
#print(f"point_list = {point_list}")
|
109
|
+
results = [[] for _ in range(len(point_list))]
|
110
|
+
while True:
|
111
|
+
#api_url = session.custom_dict["url"]
|
112
|
+
api_url = str(session.base_url)
|
113
|
+
response = session.get(f'{api_url}/trend/tabular?id={req_id}', verify=False).json()
|
114
|
+
|
115
|
+
for chunk in response:
|
116
|
+
if chunk['status'] == 'TIMEOUT':
|
117
|
+
raise RuntimeError('timeout')
|
118
|
+
|
119
|
+
for idx, samples in enumerate(chunk['items']):
|
120
|
+
for sample in samples:
|
121
|
+
#print(f"sample = {sample}")
|
122
|
+
structured = {
|
123
|
+
"ts": sample[0], # Timestamp
|
124
|
+
"value": sample[1], # Measurement value
|
125
|
+
"quality": sample[2], # Optional units or label
|
126
|
+
}
|
127
|
+
results[idx].append(structured)
|
128
|
+
|
129
|
+
if chunk['status'] == 'LAST':
|
130
|
+
return results
|
131
|
+
|
132
|
+
|
133
|
+
@staticmethod
|
134
|
+
#def get_points_export(session,iess_filter:str=''):
|
135
|
+
def get_points_export(session,iess_filter=''):
|
136
|
+
#api_url = session.custom_dict["url"]
|
137
|
+
#zd = session.custom_dict["zd"]
|
138
|
+
api_url = str(session.base_url)
|
139
|
+
zd = str(session.zd)
|
140
|
+
order = 'iess'
|
141
|
+
query = '?zd={}&iess={}&order={}'.format(zd, iess_filter, order)
|
142
|
+
request_url = f"{api_url}/points/export" + query
|
143
|
+
response = session.get(request_url, json={}, verify=False)
|
144
|
+
#print(f"Status Code: {response.status_code}, Content-Type: {response.headers.get('Content-Type')}, Body: {response.text[:500]}")
|
145
|
+
decoded_str = response.text
|
146
|
+
return decoded_str
|
147
|
+
|
148
|
+
@staticmethod
|
149
|
+
def save_points_export(decoded_str, export_file_path):
|
150
|
+
lines = decoded_str.strip().splitlines()
|
151
|
+
|
152
|
+
with open(export_file_path, "w", encoding="utf-8") as f:
|
153
|
+
for line in lines:
|
154
|
+
f.write(line + "\n") # Save each line in the text file
|
155
|
+
|
156
|
+
@staticmethod
|
157
|
+
def login_to_session(api_url, username, password):
|
158
|
+
session = requests.Session()
|
159
|
+
|
160
|
+
data = {'username': username, 'password': password, 'type': 'script'}
|
161
|
+
response = session.post(f"{api_url}/login", json=data, verify=False).json()
|
162
|
+
#print(f"response = {response}")
|
163
|
+
session.headers['Authorization'] = f"Bearer {response['sessionId']}"
|
164
|
+
return session
|
165
|
+
|
166
|
+
@staticmethod
|
167
|
+
#def create_tabular_request(session: object, api_url: str, starttime: int, endtime: int, points: list):
|
168
|
+
def create_tabular_request(session, api_url, starttime, endtime, points, step_seconds = 300):
|
169
|
+
|
170
|
+
data = {
|
171
|
+
'period': {
|
172
|
+
'from': starttime,
|
173
|
+
'till': endtime, # must be of type int, like: int(datetime(YYYY, MM, DD, HH).timestamp()),
|
174
|
+
},
|
175
|
+
|
176
|
+
'step': step_seconds, # five minutes
|
177
|
+
'items': [{
|
178
|
+
'pointId': {'iess': p},
|
179
|
+
'shadePriority': 'DEFAULT',
|
180
|
+
'function': 'AVG'
|
181
|
+
} for p in points],
|
182
|
+
}
|
183
|
+
try:
|
184
|
+
response = session.post(f"{api_url}/trend/tabular", json=data, verify=False).json()
|
185
|
+
return response['id']
|
186
|
+
#print(f"response = {response}")
|
187
|
+
except:
|
188
|
+
#raise ValueError(f"JSON not returned with {inspect.currentframe().f_code.co_name} response")
|
189
|
+
response = session.post(f"{api_url}/trend/tabular", json=data, verify=False)
|
190
|
+
#print(f"response = {response}")
|
191
|
+
|
192
|
+
@staticmethod
|
193
|
+
def wait_for_request_execution_session(session, api_url, req_id):
|
194
|
+
st = time.time()
|
195
|
+
while True:
|
196
|
+
time.sleep(1)
|
197
|
+
res = session.get(f'{api_url}/requests?id={req_id}', verify=False).json()
|
198
|
+
status = res[str(req_id)]
|
199
|
+
if status['status'] == 'FAILURE':
|
200
|
+
raise RuntimeError('request [{}] failed: {}'.format(req_id, status['message']))
|
201
|
+
elif status['status'] == 'SUCCESS':
|
202
|
+
break
|
203
|
+
elif status['status'] == 'EXECUTING':
|
204
|
+
print('request [{}] progress: {:.2f}\n'.format(req_id, time.time() - st))
|
205
|
+
|
206
|
+
print('request [{}] executed in: {:.3f} s\n'.format(req_id, time.time() - st))
|
207
|
+
|
208
|
+
@staticmethod
|
209
|
+
#def this_computer_is_an_enterprise_database_server(secrets_dict: dict, session_key: str) -> bool:
|
210
|
+
def this_computer_is_an_enterprise_database_server(secrets_dict, session_key):
|
211
|
+
"""
|
212
|
+
Check if the current computer is an enterprise database server.
|
213
|
+
This is determined by checking if the ip address matches the configured EDS database key.
|
214
|
+
"""
|
215
|
+
import socket
|
216
|
+
from urllib.parse import urlparse
|
217
|
+
from src.pipeline.helpers import get_lan_ip_address_of_current_machine
|
218
|
+
# Check if the session_key exists in the secrets_dict
|
219
|
+
url = secrets_dict["eds_apis"][session_key]["url"]
|
220
|
+
parsed = urlparse(url)
|
221
|
+
hostname = parsed.hostname # extracts just "172.19.4.128"
|
222
|
+
ip = socket.gethostbyname(hostname)
|
223
|
+
bool_ip = (ip == get_lan_ip_address_of_current_machine())
|
224
|
+
logger.info(f"Checking if this computer is enterprise database server: {bool_ip}")
|
225
|
+
return bool_ip
|
226
|
+
|
227
|
+
@staticmethod
|
228
|
+
def get_graphics_list(session, api_url):
|
229
|
+
"""Return list of graphics from EDS session."""
|
230
|
+
resp = session.get(f"{api_url}/graphics") # api_url passed in
|
231
|
+
resp.raise_for_status()
|
232
|
+
return resp.json()
|
233
|
+
|
234
|
+
@staticmethod
|
235
|
+
def get_graphic_export(session, api_url, graphic_file):
|
236
|
+
"""Fetch a graphic as PNG bytes."""
|
237
|
+
resp = session.get(f"{api_url}/graphics/{graphic_file}/export", params={"format": "png"})
|
238
|
+
resp.raise_for_status()
|
239
|
+
return resp.content
|
240
|
+
|
241
|
+
@staticmethod
|
242
|
+
def save_graphic_export(graphic_bytes, output_file_path):
|
243
|
+
os.makedirs(os.path.dirname(output_file_path), exist_ok=True)
|
244
|
+
with open(output_file_path, "wb") as f:
|
245
|
+
f.write(graphic_bytes)
|
246
|
+
|
247
|
+
|
248
|
+
@staticmethod
|
249
|
+
#def access_database_files_locally(
|
250
|
+
# session_key: str,
|
251
|
+
# starttime: int,
|
252
|
+
# endtime: int,
|
253
|
+
# point: list[int],
|
254
|
+
# tables: list[str] | None = None
|
255
|
+
#) -> list[list[dict]]:
|
256
|
+
def access_database_files_locally(
|
257
|
+
session_key,
|
258
|
+
starttime,
|
259
|
+
endtime,
|
260
|
+
point,
|
261
|
+
tables
|
262
|
+
):
|
263
|
+
"""
|
264
|
+
Access MariaDB data directly by querying all MyISAM tables with .MYD files
|
265
|
+
modified in the given time window, filtering by sensor ids in 'point'.
|
266
|
+
|
267
|
+
If 'tables' is provided, only query those tables; otherwise fall back to most recent table.
|
268
|
+
|
269
|
+
|
270
|
+
Returns a list (per sensor id) of dicts with keys 'ts', 'value', 'quality'.
|
271
|
+
"""
|
272
|
+
|
273
|
+
logger.info("Accessing MariaDB directly — local SQL mode enabled.")
|
274
|
+
workspace_name = 'eds_to_rjn'
|
275
|
+
workspace_manager = WorkspaceManager(workspace_name)
|
276
|
+
secrets_dict = SecretConfig.load_config(secrets_file_path=workspace_manager.get_secrets_file_path())
|
277
|
+
#full_config = secrets_dict["eds_dbs"][session_key]
|
278
|
+
#conn_config = {k: v for k, v in full_config.items() if k != "storage_path"}
|
279
|
+
|
280
|
+
conn_config = secrets_dict["eds_dbs"][session_key]
|
281
|
+
results = []
|
282
|
+
|
283
|
+
try:
|
284
|
+
logger.info("Attempting: mysql.connector.connect(**conn_config)")
|
285
|
+
conn = mysql.connector.connect(**conn_config)
|
286
|
+
cursor = conn.cursor(dictionary=True)
|
287
|
+
|
288
|
+
# Determine which tables to query
|
289
|
+
if tables is None:
|
290
|
+
most_recent_table = get_most_recent_table(cursor, session_key.lower())
|
291
|
+
if not most_recent_table:
|
292
|
+
logger.warning("No recent tables found.")
|
293
|
+
return [[] for _ in point]
|
294
|
+
tables_to_query = [most_recent_table]
|
295
|
+
else:
|
296
|
+
tables_to_query = tables
|
297
|
+
|
298
|
+
for table_name in tables_to_query:
|
299
|
+
if not table_has_ts_column(conn, table_name, db_type="mysql"):
|
300
|
+
logger.warning(f"Skipping table '{table_name}': no 'ts' column.")
|
301
|
+
continue
|
302
|
+
|
303
|
+
for point_id in point:
|
304
|
+
#logger.info(f"Querying table {table_name} for sensor id {point_id}")
|
305
|
+
query = f"""
|
306
|
+
SELECT ts, ids, tss, stat, val FROM `{table_name}`
|
307
|
+
WHERE ts BETWEEN %s AND %s AND ids = %s
|
308
|
+
ORDER BY ts ASC
|
309
|
+
"""
|
310
|
+
cursor.execute(query, (starttime, endtime, point_id))
|
311
|
+
full_rows = []
|
312
|
+
for row in cursor:
|
313
|
+
quality_flags = decode_stat(row["stat"])
|
314
|
+
quality_code = quality_flags[0][2] if quality_flags else "N"
|
315
|
+
full_rows.append({
|
316
|
+
"ts": row["ts"],
|
317
|
+
"value": row["val"],
|
318
|
+
"quality": quality_code,
|
319
|
+
})
|
320
|
+
full_rows.sort(key=lambda x: x["ts"])
|
321
|
+
results.append(full_rows)
|
322
|
+
|
323
|
+
except mysql.connector.errors.DatabaseError as db_err:
|
324
|
+
if "Can't connect to MySQL server" in str(db_err):
|
325
|
+
logger.error("Local database access failed: Please run this code on the proper EDS server where the local MariaDB is accessible.")
|
326
|
+
# Optionally:
|
327
|
+
print("ERROR: This code must be run on the proper EDS server for local database access to work.")
|
328
|
+
return [[] for _ in point] # return list of empty lists, one per point
|
329
|
+
else:
|
330
|
+
raise # re-raise other DB errors
|
331
|
+
except Exception as e:
|
332
|
+
logger.error(f"Unexpected error accessing local database: {e}")
|
333
|
+
# hitting this in termux
|
334
|
+
raise
|
335
|
+
finally:
|
336
|
+
# cleanup cursor/connection if they exist
|
337
|
+
try:
|
338
|
+
cursor.close()
|
339
|
+
conn.close()
|
340
|
+
except Exception:
|
341
|
+
pass
|
342
|
+
|
343
|
+
logger.info(f"Successfully retrieved data for {len(point)} point(s)")
|
344
|
+
return results
|
345
|
+
|
346
|
+
|
347
|
+
def table_has_ts_column(conn, table_name, db_type="mysql"):
|
348
|
+
if db_type == "sqlite":
|
349
|
+
with conn.cursor() as cur:
|
350
|
+
# your sqlite logic here
|
351
|
+
cur.execute(f"PRAGMA table_info({table_name});")
|
352
|
+
return any(row[1] == "ts" for row in cur.fetchall())
|
353
|
+
pass
|
354
|
+
elif db_type == "mysql":
|
355
|
+
with conn.cursor() as cur:
|
356
|
+
cur.execute(f"SHOW COLUMNS FROM `{table_name}` LIKE 'ts'")
|
357
|
+
result = cur.fetchall()
|
358
|
+
return len(result) > 0
|
359
|
+
else:
|
360
|
+
raise ValueError(f"Unsupported database type: {db_type}")
|
361
|
+
|
362
|
+
|
363
|
+
#def identify_relevant_MyISM_tables(session_key: str, starttime: int, endtime: int, secrets_dict: dict) -> list:
|
364
|
+
# 3.8-safe, no hints
|
365
|
+
def identify_relevant_MyISM_tables(session_key, starttime, endtime, secrets_dict):
|
366
|
+
#
|
367
|
+
# Use the secrets file to control where your database can be found
|
368
|
+
try:
|
369
|
+
storage_dir = secrets_dict["eds_dbs"][str(session_key+"-config")]["storage_path"]
|
370
|
+
except:
|
371
|
+
logging.warning(f"User the secrets.yaml file to set the local database folder. Something like, storage_path: 'E:/SQLData/wwtf/'")
|
372
|
+
return []
|
373
|
+
# Collect matching table names based on file mtime
|
374
|
+
matching_tables = []
|
375
|
+
|
376
|
+
if False:
|
377
|
+
for fname in os.listdir(storage_dir):
|
378
|
+
fpath = os.path.join(storage_dir, fname)
|
379
|
+
if not os.path.isfile(fpath):
|
380
|
+
continue
|
381
|
+
mtime = os.path.getmtime(fpath)
|
382
|
+
if starttime <= mtime <= endtime:
|
383
|
+
table_name, _ = os.path.splitext(fname)
|
384
|
+
if 'pla' in table_name:
|
385
|
+
matching_tables.append(table_name)
|
386
|
+
|
387
|
+
'''
|
388
|
+
# Instead of os.path.join + isfile + getmtime every time...
|
389
|
+
# Use `os.scandir`, which gives all of that in one go and is much faster:
|
390
|
+
with os.scandir(storage_dir) as it:
|
391
|
+
for entry in it:
|
392
|
+
if entry.is_file():
|
393
|
+
mtime = entry.stat().st_mtime
|
394
|
+
if starttime <= mtime <= endtime and 'pla' in entry.name:
|
395
|
+
table_name, _ = os.path.splitext(entry.name)
|
396
|
+
matching_tables.append(table_name)
|
397
|
+
'''
|
398
|
+
# Efficient, sorted, filtered scan
|
399
|
+
sorted_entries = sorted(
|
400
|
+
(entry for entry in os.scandir(storage_dir) if entry.is_file()),
|
401
|
+
key=lambda e: e.stat().st_mtime,
|
402
|
+
reverse=True
|
403
|
+
)
|
404
|
+
|
405
|
+
for entry in sorted_entries:
|
406
|
+
mtime = entry.stat().st_mtime
|
407
|
+
if starttime <= mtime <= endtime and 'pla' in entry.name:
|
408
|
+
table_name, _ = os.path.splitext(entry.name)
|
409
|
+
matching_tables.append(table_name)
|
410
|
+
|
411
|
+
|
412
|
+
#print("Matching tables:", matching_tables)
|
413
|
+
return matching_tables
|
414
|
+
|
415
|
+
def identify_relevant_tables(session_key, starttime, endtime, secrets_dict):
|
416
|
+
try:
|
417
|
+
conn_config = secrets_dict["eds_dbs"][session_key]
|
418
|
+
conn = mysql.connector.connect(**conn_config)
|
419
|
+
cursor = conn.cursor(dictionary=True)
|
420
|
+
# Use INFORMATION_SCHEMA instead of filesystem
|
421
|
+
#return get_ten_most_recent_tables(cursor, conn_config["database"])
|
422
|
+
return get_n_most_recent_tables(cursor, conn_config["database"], n=80)
|
423
|
+
except mysql.connector.Error:
|
424
|
+
logger.warning("Falling back to filesystem scan — DB not accessible.")
|
425
|
+
return identify_relevant_MyISM_tables(session_key, starttime, endtime, secrets_dict)
|
426
|
+
|
427
|
+
def get_most_recent_table(cursor, db_name, prefix='pla_'):
|
428
|
+
query = f"""
|
429
|
+
SELECT TABLE_NAME
|
430
|
+
FROM INFORMATION_SCHEMA.TABLES
|
431
|
+
WHERE TABLE_SCHEMA = %s AND TABLE_NAME LIKE %s
|
432
|
+
ORDER BY TABLE_NAME DESC
|
433
|
+
LIMIT 1;
|
434
|
+
"""
|
435
|
+
cursor.execute(query, (db_name, f'{prefix}%'))
|
436
|
+
result = cursor.fetchone()
|
437
|
+
return result['TABLE_NAME'] if result else None
|
438
|
+
|
439
|
+
#def get_ten_most_recent_tables(cursor, db_name, prefix='pla_') -> list[str]:
|
440
|
+
def get_ten_most_recent_tables(cursor, db_name, prefix='pla_'):
|
441
|
+
"""
|
442
|
+
Get the 10 most recent tables with the given prefix.
|
443
|
+
Returns a LIST OF STRINGS, not a single string.
|
444
|
+
"""
|
445
|
+
query = f"""
|
446
|
+
SELECT TABLE_NAME
|
447
|
+
FROM INFORMATION_SCHEMA.TABLES
|
448
|
+
WHERE TABLE_SCHEMA = %s AND TABLE_NAME LIKE %s
|
449
|
+
ORDER BY TABLE_NAME DESC
|
450
|
+
LIMIT 10;
|
451
|
+
"""
|
452
|
+
cursor.execute(query, (db_name, f'{prefix}%'))
|
453
|
+
results = cursor.fetchall()
|
454
|
+
|
455
|
+
# Extract table names as individual strings
|
456
|
+
table_names = [result['TABLE_NAME'] for result in results]
|
457
|
+
|
458
|
+
logger.info(f"Found {len(table_names)} recent tables with prefix '{prefix}': {table_names}")
|
459
|
+
return table_names # This is a LIST of strings: ['pla_68a98310', 'pla_68a97500', ...]
|
460
|
+
|
461
|
+
|
462
|
+
def get_n_most_recent_tables(cursor, db_name, n, prefix='pla_'):
|
463
|
+
"""
|
464
|
+
Get the 10 most recent tables with the given prefix.
|
465
|
+
Returns a LIST OF STRINGS, not a single string.
|
466
|
+
"""
|
467
|
+
query = f"""
|
468
|
+
SELECT TABLE_NAME
|
469
|
+
FROM INFORMATION_SCHEMA.TABLES
|
470
|
+
WHERE TABLE_SCHEMA = %s AND TABLE_NAME LIKE %s
|
471
|
+
ORDER BY TABLE_NAME DESC
|
472
|
+
LIMIT {n};
|
473
|
+
"""
|
474
|
+
cursor.execute(query, (db_name, f'{prefix}%'))
|
475
|
+
results = cursor.fetchall()
|
476
|
+
|
477
|
+
# Extract table names as individual strings
|
478
|
+
table_names = [result['TABLE_NAME'] for result in results]
|
479
|
+
|
480
|
+
logger.info(f"Found {len(table_names)} recent tables with prefix '{prefix}': {table_names}")
|
481
|
+
return table_names # This is a LIST of strings: ['pla_68a98310', 'pla_68a97500', ...]
|
482
|
+
|
483
|
+
|
484
|
+
@lru_cache()
|
485
|
+
def get_stat_alarm_definitions():
|
486
|
+
"""
|
487
|
+
Returns a dictionary where each key is the bitmask integer value from the EDS alarm types,
|
488
|
+
and each value is a tuple: (description, quality_code).
|
489
|
+
|
490
|
+
| Quality Flag | Meaning | Common Interpretation |
|
491
|
+
| ------------ | ------------ | ------------------------------------------------ |
|
492
|
+
| `G` | Good | Value is reliable/valid |
|
493
|
+
| `B` | Bad | Value is invalid/unreliable |
|
494
|
+
| `U` | Uncertain | Value may be usable, but not guaranteed accurate |
|
495
|
+
| `S` | Substituted | Manually entered or filled in |
|
496
|
+
| `N` | No Data | No value available |
|
497
|
+
| `Q` | Questionable | Fails some validation |
|
498
|
+
|
499
|
+
Source: eDocs/eDocs%203.8.0%20FP3/Index/en/OPH070.pdf
|
500
|
+
|
501
|
+
"""
|
502
|
+
return {
|
503
|
+
1: ("ALMTYPE_RETURN", "G"),
|
504
|
+
2: ("ALMTYPE_SENSOR", "B"),
|
505
|
+
4: ("ALMTYPE_HIGH", "G"),
|
506
|
+
8: ("ALMTYPE_HI_WRS", "G"),
|
507
|
+
16: ("ALMTYPE_HI_BET", "G"),
|
508
|
+
32: ("ALMTYPE_HI_UDA", "G"),
|
509
|
+
64: ("ALMTYPE_HI_WRS_UDA", "G"),
|
510
|
+
128: ("ALMTYPE_HI_BET_UDA", "G"),
|
511
|
+
256: ("ALMTYPE_LOW", "G"),
|
512
|
+
512: ("ALMTYPE_LOW_WRS", "G"),
|
513
|
+
1024: ("ALMTYPE_LOW_BET", "G"),
|
514
|
+
2048: ("ALMTYPE_LOW_UDA", "G"),
|
515
|
+
4096: ("ALMTYPE_LOW_WRS_UDA", "G"),
|
516
|
+
8192: ("ALMTYPE_LOW_BET_UDA", "G"),
|
517
|
+
16384: ("ALMTYPE_SP_ALM", "B"),
|
518
|
+
32768: ("ALMTYPE_TIME_OUT", "U"),
|
519
|
+
65536: ("ALMTYPE_SID_ALM", "U"),
|
520
|
+
131072: ("ALMTYPE_ALARM", "B"),
|
521
|
+
262144: ("ALMTYPE_ST_CHG", "G"),
|
522
|
+
524288: ("ALMTYPE_INCR_ALARM", "G"),
|
523
|
+
1048576: ("ALMTYPE_HIGH_HIGH", "G"),
|
524
|
+
2097152: ("ALMTYPE_LOW_LOW", "G"),
|
525
|
+
4194304: ("ALMTYPE_DEVICE", "U"),
|
526
|
+
}
|
527
|
+
def decode_stat(stat_value):
|
528
|
+
'''
|
529
|
+
Example:
|
530
|
+
>>> decode_stat(8192)
|
531
|
+
[(8192, 'ALMTYPE_LOW_BET_UDA', 'G')]
|
532
|
+
|
533
|
+
>>> decode_stat(8192 + 2)
|
534
|
+
[(2, 'ALMTYPE_SENSOR', 'B'), (8192, 'ALMTYPE_LOW_BET_UDA', 'G')]
|
535
|
+
'''
|
536
|
+
alarm_dict = get_stat_alarm_definitions()
|
537
|
+
active_flags = []
|
538
|
+
for bitmask, (description, quality) in alarm_dict.items():
|
539
|
+
if stat_value & bitmask:
|
540
|
+
active_flags.append((bitmask, description, quality))
|
541
|
+
return active_flags
|
542
|
+
|
543
|
+
|
544
|
+
def fetch_eds_data_row(session, iess):
|
545
|
+
point_data = EdsClient.get_points_live_mod(session, iess)
|
546
|
+
return point_data
|
547
|
+
|
548
|
+
@log_function_call(level=logging.DEBUG)
|
549
|
+
def _demo_eds_start_session_CoM_WWTPs():
|
550
|
+
|
551
|
+
workspace_name = WorkspaceManager.identify_default_workspace_name()
|
552
|
+
workspace_manager = WorkspaceManager(workspace_name)
|
553
|
+
|
554
|
+
secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
|
555
|
+
sessions = {}
|
556
|
+
|
557
|
+
base_url_maxson = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("url").rstrip("/")
|
558
|
+
session_maxson = EdsClient.login_to_session(api_url = base_url_maxson,
|
559
|
+
username = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("username"),
|
560
|
+
password = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("password"))
|
561
|
+
session_maxson.base_url = base_url_maxson
|
562
|
+
session_maxson.zd = secrets_dict.get("eds_apis", {}).get("Maxson", {}).get("zd")
|
563
|
+
|
564
|
+
sessions.update({"Maxson":session_maxson})
|
565
|
+
|
566
|
+
# Show example of what it would be like to start a second session (though Stiles API port 43084 is not accesible at this writing)
|
567
|
+
if False:
|
568
|
+
base_url_stiles = secrets_dict.get("eds_apis", {}).get("WWTF", {}).get("url").rstrip("/")
|
569
|
+
session_stiles = EdsClient.login_to_session(api_url = base_url_stiles ,username = secrets_dict.get("eds_apis", {}).get("WWTF", {}).get("username"), password = secrets_dict.get("eds_apis", {}).get("WWTF", {}).get("password"))
|
570
|
+
session_stiles.base_url = base_url_stiles
|
571
|
+
session_stiles.zd = secrets_dict.get("eds_apis", {}).get("WWTF", {}).get("zd")
|
572
|
+
sessions.update({"WWTF":session_stiles})
|
573
|
+
|
574
|
+
return workspace_manager, sessions
|
575
|
+
|
576
|
+
@log_function_call(level=logging.DEBUG)
|
577
|
+
def demo_eds_print_point_live_alt():
|
578
|
+
from src.pipeline.queriesmanager import load_query_rows_from_csv_files, group_queries_by_col
|
579
|
+
|
580
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
581
|
+
queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
|
582
|
+
queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list) # A scripter can edit their queries file names here - they do not need to use the default.
|
583
|
+
queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered,'zd')
|
584
|
+
|
585
|
+
# for key, session in sessions.items(): # Given multiple sessions, cycle through each.
|
586
|
+
key = "Maxson"
|
587
|
+
session = sessions[key]
|
588
|
+
# Discern which queries to use, filtered by current session key.
|
589
|
+
queries_dictlist_filtered_by_session_key = queries_defaultdictlist_grouped_by_session_key.get(key,[])
|
590
|
+
|
591
|
+
logging.debug(f"queries_dictlist_unfiltered = {queries_dictlist_unfiltered}\n")
|
592
|
+
logging.debug(f"queries_dictlist_filtered_by_session_key = {queries_dictlist_filtered_by_session_key}\n")
|
593
|
+
logging.debug(f"queries_defaultdictlist_grouped_by_session_key = {queries_defaultdictlist_grouped_by_session_key}\n")
|
594
|
+
|
595
|
+
for row in queries_dictlist_filtered_by_session_key:
|
596
|
+
iess = str(row["iess"]) if row["iess"] not in (None, '', '\t') else None
|
597
|
+
point_data = EdsClient.get_points_live_mod(session,iess)
|
598
|
+
if point_data is None:
|
599
|
+
raise ValueError(f"No live point returned for iess {iess}")
|
600
|
+
else:
|
601
|
+
row.update(point_data)
|
602
|
+
EdsClient.print_point_info_row(row)
|
603
|
+
|
604
|
+
@log_function_call(level=logging.DEBUG)
|
605
|
+
def demo_eds_print_point_live():
|
606
|
+
from src.pipeline.queriesmanager import load_query_rows_from_csv_files, group_queries_by_col
|
607
|
+
from workspaces.eds_to_rjn.code import collector
|
608
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
609
|
+
queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
|
610
|
+
queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list) # A scripter can edit their queries file names here - they do not need to use the default.
|
611
|
+
queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered)
|
612
|
+
|
613
|
+
# for key, session in sessions.items(): # Given multiple sessions, cycle through each.
|
614
|
+
key = "Maxson"
|
615
|
+
session = sessions[key]
|
616
|
+
queries_dictlist_filtered_by_session_key = queries_defaultdictlist_grouped_by_session_key.get(key,[])
|
617
|
+
queries_plus_responses_filtered_by_session_key = collector.collect_live_values(session, queries_dictlist_filtered_by_session_key)
|
618
|
+
# Discern which queries to use, filtered by current session key.
|
619
|
+
|
620
|
+
logging.debug(f"queries_dictlist_unfiltered = {queries_dictlist_unfiltered}\n")
|
621
|
+
logging.debug(f"queries_defaultdictlist_grouped_by_session_key = {queries_defaultdictlist_grouped_by_session_key}\n")
|
622
|
+
logging.debug(f"queries_dictlist_filtered_by_session_key = {queries_dictlist_filtered_by_session_key}\n")
|
623
|
+
logging.debug(f"queries_plus_responses_filtered_by_session_key = {queries_plus_responses_filtered_by_session_key}\n")
|
624
|
+
|
625
|
+
for row in queries_plus_responses_filtered_by_session_key:
|
626
|
+
EdsClient.print_point_info_row(row)
|
627
|
+
|
628
|
+
@log_function_call(level=logging.DEBUG)
|
629
|
+
def demo_eds_plot_point_live():
|
630
|
+
from threading import Thread
|
631
|
+
|
632
|
+
from src.pipeline.queriesmanager import load_query_rows_from_csv_files, group_queries_by_col
|
633
|
+
from workspaces.eds_to_rjn.code import collector, sanitizer
|
634
|
+
from src.pipeline.plotbuffer import PlotBuffer
|
635
|
+
from src.pipeline import gui_mpl_live
|
636
|
+
|
637
|
+
# Initialize the workspace based on configs and defaults, in the demo initializtion script
|
638
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
639
|
+
|
640
|
+
data_buffer = PlotBuffer()
|
641
|
+
|
642
|
+
# Load queries
|
643
|
+
queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
|
644
|
+
queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list) # A scripter can edit their queries file names here - they do not need to use the default.
|
645
|
+
queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered)
|
646
|
+
|
647
|
+
key = "Maxson"
|
648
|
+
session = sessions[key]
|
649
|
+
queries_maxson = queries_defaultdictlist_grouped_by_session_key.get(key,[])
|
650
|
+
|
651
|
+
def collect_loop():
|
652
|
+
while True:
|
653
|
+
responses = collector.collect_live_values(session, queries_maxson)
|
654
|
+
for row in responses:
|
655
|
+
label = f"{row.get('shortdesc')} ({row.get('un')})"
|
656
|
+
ts = row.get("ts")
|
657
|
+
ts = helpers.iso(row.get("ts")) # dpg is out, mpl is in. plotly is way, way in.
|
658
|
+
av = row.get("value")
|
659
|
+
un = row.get("un")
|
660
|
+
if ts is not None and av is not None:
|
661
|
+
data_buffer.append(label, ts, av)
|
662
|
+
#logger.info(f"Live: {label} → {av} @ {ts}")
|
663
|
+
logger.info(f"Live: {label} {round(av,2)} {un}")
|
664
|
+
time.sleep(1)
|
665
|
+
|
666
|
+
collector_thread = Thread(target=collect_loop, daemon=True)
|
667
|
+
collector_thread.start()
|
668
|
+
|
669
|
+
# Now run the GUI in the main thread
|
670
|
+
#gui_dpg_live.run_gui(data_buffer)
|
671
|
+
gui_mpl_live.run_gui(data_buffer)
|
672
|
+
|
673
|
+
@log_function_call(level=logging.DEBUG)
|
674
|
+
def demo_eds_webplot_point_live():
|
675
|
+
from threading import Thread
|
676
|
+
|
677
|
+
from src.pipeline.queriesmanager import QueriesManager, load_query_rows_from_csv_files, group_queries_by_col
|
678
|
+
from workspaces.eds_to_rjn.code import collector, sanitizer
|
679
|
+
from src.pipeline.plotbuffer import PlotBuffer
|
680
|
+
#from src.pipeline import gui_flaskplotly_live
|
681
|
+
from src.pipeline import gui_fastapi_plotly_live
|
682
|
+
|
683
|
+
# Initialize the workspace based on configs and defaults, in the demo initializtion script
|
684
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
685
|
+
|
686
|
+
queries_manager = QueriesManager(workspace_manager)
|
687
|
+
|
688
|
+
data_buffer = PlotBuffer()
|
689
|
+
|
690
|
+
# Load queries
|
691
|
+
queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
|
692
|
+
queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list) # A scripter can edit their queries file names here - they do not need to use the default.
|
693
|
+
queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered)
|
694
|
+
|
695
|
+
key = "Maxson"
|
696
|
+
session = sessions[key]
|
697
|
+
queries_maxson = queries_defaultdictlist_grouped_by_session_key.get(key,[])
|
698
|
+
|
699
|
+
def collect_loop():
|
700
|
+
while True:
|
701
|
+
responses = collector.collect_live_values(session, queries_maxson)
|
702
|
+
for row in responses:
|
703
|
+
|
704
|
+
#ts = TimeManager(row.get("ts")).as_formatted_time()
|
705
|
+
ts = TimeManager(row.get("ts")).as_iso()
|
706
|
+
#ts = helpers.iso(row.get("ts"))
|
707
|
+
av = row.get("value")
|
708
|
+
un = row.get("un")
|
709
|
+
# QUICK AND DIRTY CONVERSION FOR WWTF WETWELL LEVEL TO FEET
|
710
|
+
if row.get('iess') == "M310LI.UNIT0@NET0":
|
711
|
+
av = (av/12)+181.25 # convert inches of wetwell to feet above mean sealevel
|
712
|
+
un = "FT"
|
713
|
+
label = f"{row.get('shortdesc')} ({un})"
|
714
|
+
if ts is not None and av is not None:
|
715
|
+
data_buffer.append(label, ts, av)
|
716
|
+
#logger.info(f"Live: {label} → {av} @ {ts}")
|
717
|
+
logger.info(f"Live: {label} {round(av,2)} {un}")
|
718
|
+
time.sleep(1)
|
719
|
+
if False:
|
720
|
+
load_historic_data()
|
721
|
+
collector_thread = Thread(target=collect_loop, daemon=True)
|
722
|
+
collector_thread.start()
|
723
|
+
|
724
|
+
# Now run the GUI in the main thread
|
725
|
+
#gui_flaskplotly_live.run_gui(data_buffer)
|
726
|
+
gui_fastapi_plotly_live.run_gui(data_buffer)
|
727
|
+
|
728
|
+
@log_function_call(level=logging.DEBUG)
|
729
|
+
def load_historic_data(queries_manager, workspace_manager, session, iess_list, starttime=None, endtime=None):
|
730
|
+
if starttime is None:
|
731
|
+
# back_to_last_success = True
|
732
|
+
starttime = queries_manager.get_most_recent_successful_timestamp(api_id=zd)
|
733
|
+
|
734
|
+
if endtime is None:
|
735
|
+
endtime = helpers.get_now_time_rounded(workspace_manager)
|
736
|
+
|
737
|
+
starttime = TimeManager(starttime).as_unix()
|
738
|
+
endtime = TimeManager(endtime).as_unix()
|
739
|
+
logger.info(f"starttime = {starttime}")
|
740
|
+
logger.info(f"endtime = {endtime}")
|
741
|
+
|
742
|
+
step_seconds = helpers.nice_step(endtime-starttime)
|
743
|
+
|
744
|
+
point_list = iess_list
|
745
|
+
api_url = str(session.base_url)
|
746
|
+
request_id = EdsClient.create_tabular_request(session, api_url, starttime, endtime, points=point_list, step_seconds=step_seconds)
|
747
|
+
EdsClient.wait_for_request_execution_session(session, api_url, request_id)
|
748
|
+
results = EdsClient.get_tabular_trend(session, request_id, point_list)
|
749
|
+
logger.debug(f"len(results) = {len(results)}")
|
750
|
+
return results
|
751
|
+
|
752
|
+
|
753
|
+
|
754
|
+
@log_function_call(level=logging.DEBUG)
|
755
|
+
def demo_eds_plot_trend():
|
756
|
+
pass
|
757
|
+
|
758
|
+
@log_function_call(level=logging.DEBUG)
|
759
|
+
def demo_eds_print_point_export():
|
760
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
761
|
+
session_maxson = sessions["Maxson"]
|
762
|
+
|
763
|
+
point_export_decoded_str = EdsClient.get_points_export(session_maxson)
|
764
|
+
pprint(point_export_decoded_str)
|
765
|
+
return point_export_decoded_str
|
766
|
+
|
767
|
+
@log_function_call(level=logging.DEBUG)
|
768
|
+
def demo_eds_save_point_export():
|
769
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
770
|
+
session_maxson = sessions["Maxson"]
|
771
|
+
|
772
|
+
point_export_decoded_str = EdsClient.get_points_export(session_maxson)
|
773
|
+
export_file_path = workspace_manager.get_exports_file_path(filename = 'export_eds_points_neo.txt')
|
774
|
+
EdsClient.save_points_export(point_export_decoded_str, export_file_path = export_file_path)
|
775
|
+
print(f"Export file saved to: \n{export_file_path}")
|
776
|
+
|
777
|
+
@log_function_call(level=logging.DEBUG)
|
778
|
+
def demo_eds_save_graphics_export():
|
779
|
+
# Start sessions for your WWTPs
|
780
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
781
|
+
session_maxson = sessions["Maxson"]
|
782
|
+
|
783
|
+
# Get list of graphics from the EDS session
|
784
|
+
graphics_list = EdsClient.get_graphics_list(session_maxson, session_maxson.base_url)
|
785
|
+
print(f"Found {len(graphics_list)} graphics to export.")
|
786
|
+
|
787
|
+
# Loop through each graphic and save it
|
788
|
+
for graphic in graphics_list:
|
789
|
+
graphic_name = graphic.get("name", os.path.splitext(graphic["file"])[0])
|
790
|
+
safe_name = "".join(c if c.isalnum() or c in "_-" else "_" for c in graphic_name)
|
791
|
+
output_file_path = workspace_manager.get_exports_file_path(filename=f"{safe_name}.png")
|
792
|
+
|
793
|
+
# Fetch and save the graphic
|
794
|
+
graphic_bytes = EdsClient.get_graphic_export(session_maxson, session_maxson.base_url, graphic["file"])
|
795
|
+
EdsClient.save_graphic_export(graphic_bytes, output_file_path)
|
796
|
+
|
797
|
+
print(f"Saved graphic: {graphic_name} → {output_file_path}")
|
798
|
+
|
799
|
+
print("All graphics exported successfully.")
|
800
|
+
|
801
|
+
@log_function_call(level=logging.DEBUG)
|
802
|
+
def demo_eds_print_tabular_trend():
|
803
|
+
|
804
|
+
from src.pipeline.queriesmanager import QueriesManager
|
805
|
+
from src.pipeline.queriesmanager import load_query_rows_from_csv_files, group_queries_by_col
|
806
|
+
|
807
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
808
|
+
|
809
|
+
queries_manager = QueriesManager(workspace_manager)
|
810
|
+
queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
|
811
|
+
logger.debug(f"queries_file_path_list = {queries_file_path_list}")
|
812
|
+
queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list) # you can edit your queries files here
|
813
|
+
|
814
|
+
queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered,'zd')
|
815
|
+
|
816
|
+
for key, session in sessions.items():
|
817
|
+
# Discern which queries to use
|
818
|
+
point_list = [row['iess'] for row in queries_defaultdictlist_grouped_by_session_key.get(key,[])]
|
819
|
+
|
820
|
+
# Discern the time range to use
|
821
|
+
starttime = queries_manager.get_most_recent_successful_timestamp(api_id="Maxson")
|
822
|
+
endtime = helpers.get_now_time_rounded(workspace_manager)
|
823
|
+
|
824
|
+
#api_url = session.custom_dict["url"]
|
825
|
+
api_url = str(session.base_url)
|
826
|
+
request_id = EdsClient.create_tabular_request(session, api_url, starttime, endtime, points=point_list)
|
827
|
+
EdsClient.wait_for_request_execution_session(session, api_url, request_id)
|
828
|
+
results = EdsClient.get_tabular_trend(session, request_id, point_list)
|
829
|
+
session.post(f"{api_url}'/logout", verify=False)
|
830
|
+
#
|
831
|
+
for idx, iess in enumerate(point_list):
|
832
|
+
print('\n{} samples:'.format(iess))
|
833
|
+
for s in results[idx]:
|
834
|
+
#print('{} {} {}'.format(datetime.fromtimestamp(s['ts']), round(s['value'],2), s['quality']))
|
835
|
+
print('{} {} {}'.format(datetime.fromtimestamp(s['ts']), s['value'], s['quality']))
|
836
|
+
queries_manager.update_success(api_id=key) # not appropriate here in demo without successful transmission to 3rd party API
|
837
|
+
|
838
|
+
@log_function_call(level=logging.DEBUG)
|
839
|
+
def demo_eds_local_database_access():
|
840
|
+
from src.pipeline.queriesmanager import QueriesManager
|
841
|
+
from src.pipeline.queriesmanager import load_query_rows_from_csv_files, group_queries_by_col
|
842
|
+
workspace_name = 'eds_to_rjn' # workspace_name = WorkspaceManager.identify_default_workspace_name()
|
843
|
+
workspace_manager = WorkspaceManager(workspace_name)
|
844
|
+
queries_manager = QueriesManager(workspace_manager)
|
845
|
+
queries_file_path_list = workspace_manager.get_default_query_file_paths_list() # use default identified by the default-queries.toml file
|
846
|
+
logger.debug(f"queries_file_path_list = {queries_file_path_list}")
|
847
|
+
|
848
|
+
queries_dictlist_unfiltered = load_query_rows_from_csv_files(queries_file_path_list)
|
849
|
+
queries_defaultdictlist_grouped_by_session_key = group_queries_by_col(queries_dictlist_unfiltered,'zd')
|
850
|
+
secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
|
851
|
+
sessions_eds = {}
|
852
|
+
|
853
|
+
# --- Prepare Stiles session_eds
|
854
|
+
|
855
|
+
session_stiles = None # assume the EDS API session cannot be established
|
856
|
+
sessions_eds.update({"WWTF":session_stiles})
|
857
|
+
|
858
|
+
|
859
|
+
key_eds = "WWTF"
|
860
|
+
session_key = key_eds
|
861
|
+
session_eds = session_stiles
|
862
|
+
point_list = [row['iess'] for row in queries_defaultdictlist_grouped_by_session_key.get(key_eds,[])]
|
863
|
+
point_list_sid = [row['sid'] for row in queries_defaultdictlist_grouped_by_session_key.get(key_eds,[])]
|
864
|
+
|
865
|
+
logger.info(f"point_list = {point_list}")
|
866
|
+
# Discern the time range to use
|
867
|
+
starttime = queries_manager.get_most_recent_successful_timestamp(api_id="WWTF")
|
868
|
+
logger.info(f"queries_manager.get_most_recent_successful_timestamp(), key = {'WWTF'}")
|
869
|
+
endtime = helpers.get_now_time_rounded(workspace_manager)
|
870
|
+
starttime = TimeManager(starttime).as_unix()
|
871
|
+
endtime = TimeManager(endtime).as_unix()
|
872
|
+
logger.info(f"starttime = {starttime}")
|
873
|
+
logger.info(f"endtime = {endtime}")
|
874
|
+
|
875
|
+
if EdsClient.this_computer_is_an_enterprise_database_server(secrets_dict, key_eds):
|
876
|
+
tables = identify_relevant_tables(session_key, starttime, endtime, secrets_dict)
|
877
|
+
results = EdsClient.access_database_files_locally(key_eds, starttime, endtime, point=point_list_sid, tables=tables)
|
878
|
+
else:
|
879
|
+
logger.warning("This computer is not an enterprise database server. Local database access will not work.")
|
880
|
+
results = [[] for _ in point_list]
|
881
|
+
print(f"len(results) = {len(results)}")
|
882
|
+
print(f"len(results[0]) = {len(results[0])}")
|
883
|
+
print(f"len(results[1]) = {len(results[1])}")
|
884
|
+
|
885
|
+
for idx, iess in enumerate(point_list):
|
886
|
+
if results[idx]:
|
887
|
+
#print(f"rows = {rows}")
|
888
|
+
timestamps = []
|
889
|
+
values = []
|
890
|
+
|
891
|
+
for row in results[idx]:
|
892
|
+
#print(f"row = {row}")
|
893
|
+
#EdsClient.print_point_info_row(row)
|
894
|
+
|
895
|
+
dt = datetime.fromtimestamp(row["ts"])
|
896
|
+
timestamp_str = helpers.round_datetime_to_nearest_past_five_minutes(dt).isoformat(timespec='seconds')
|
897
|
+
if row['quality'] == 'G':
|
898
|
+
timestamps.append(timestamp_str)
|
899
|
+
values.append(round(row["value"],5)) # unrounded values fail to post
|
900
|
+
print(f"final row = {row}")
|
901
|
+
else:
|
902
|
+
print("No data rows for this point")
|
903
|
+
|
904
|
+
@log_function_call(level=logging.DEBUG)
|
905
|
+
def demo_eds_print_license():
|
906
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
907
|
+
session_maxson = sessions["Maxson"]
|
908
|
+
|
909
|
+
response = EdsClient.get_license(session_maxson, api_url = session_maxson.base_url)
|
910
|
+
pprint(response)
|
911
|
+
return response
|
912
|
+
|
913
|
+
@log_function_call(level=logging.DEBUG)
|
914
|
+
def demo_eds_ping():
|
915
|
+
from src.pipeline.calls import call_ping
|
916
|
+
workspace_manager, sessions = _demo_eds_start_session_CoM_WWTPs()
|
917
|
+
session_maxson = sessions["Maxson"]
|
918
|
+
|
919
|
+
#api_url = session_maxson.custom_dict["url"]
|
920
|
+
response = call_ping(session_maxson.base_url)
|
921
|
+
|
922
|
+
if __name__ == "__main__":
|
923
|
+
|
924
|
+
'''
|
925
|
+
- auto id current function name. solution: decorator, @log_function_call
|
926
|
+
- print only which vars succeed
|
927
|
+
'''
|
928
|
+
import sys
|
929
|
+
from src.pipeline.logging_setup import setup_logging
|
930
|
+
cmd = sys.argv[1] if len(sys.argv) > 1 else "default"
|
931
|
+
|
932
|
+
setup_logging()
|
933
|
+
logger = logging.getLogger(__name__)
|
934
|
+
logger.info("CLI started")
|
935
|
+
|
936
|
+
if cmd == "demo-live":
|
937
|
+
demo_eds_print_point_live()
|
938
|
+
elif cmd == "demo-live-alt":
|
939
|
+
demo_eds_print_point_live_alt()
|
940
|
+
elif cmd == "demo-plot-live":
|
941
|
+
demo_eds_plot_point_live()
|
942
|
+
elif cmd == "demo-webplot-live":
|
943
|
+
demo_eds_webplot_point_live()
|
944
|
+
elif cmd == "demo-plot-trend":
|
945
|
+
demo_eds_plot_trend()
|
946
|
+
elif cmd == "demo-point-export":
|
947
|
+
#demo_eds_print_point_export()
|
948
|
+
demo_eds_save_point_export()
|
949
|
+
elif cmd =="demo-db":
|
950
|
+
demo_eds_local_database_access()
|
951
|
+
elif cmd == "demo-trend":
|
952
|
+
demo_eds_print_tabular_trend()
|
953
|
+
elif cmd == "ping":
|
954
|
+
demo_eds_ping()
|
955
|
+
elif cmd == "export-graphics":
|
956
|
+
demo_eds_save_graphics_export()
|
957
|
+
elif cmd == "license":
|
958
|
+
demo_eds_print_license()
|
959
|
+
elif cmd == "access-workspace""":
|
960
|
+
if platform.system().lower() == "windows":
|
961
|
+
# run the Open-FileBrowser command, registered with: git clone https://github.com/city-of-memphis-wastewater/powershell-tools.git ## run `notepad $profile` #noobs
|
962
|
+
#command = ["Open-FileBrowser", WorkspaceManager.get_cwd()]
|
963
|
+
command = ["explorer", str(WorkspaceManager.get_cwd())]
|
964
|
+
subprocess.call(command)
|
965
|
+
else:
|
966
|
+
print("Usage options: \n"
|
967
|
+
"poetry run python -m pipeline.api.eds demo-point-export \n"
|
968
|
+
"poetry run python -m pipeline.api.eds demo-tabular-export \n"
|
969
|
+
"poetry run python -m pipeline.api.eds demo-live \n"
|
970
|
+
"poetry run python -m pipeline.api.eds demo-live-alt \n"
|
971
|
+
"poetry run python -m pipeline.api.eds demo-trend \n"
|
972
|
+
"poetry run python -m pipeline.api.eds demo-plot-live \n"
|
973
|
+
"poetry run python -m pipeline.api.eds demo-webplot-live \n"
|
974
|
+
"poetry run python -m pipeline.api.eds demo-plot-trend \n"
|
975
|
+
"poetry run python -m pipeline.api.eds demo-db \n"
|
976
|
+
"poetry run python -m pipeline.api.eds ping \n"
|
977
|
+
"poetry run python -m pipeline.api.eds license \n"
|
978
|
+
"poetry run python -m pipeline.api.eds export-graphics \n"
|
979
|
+
"poetry run python -m pipeline.api.eds access-workspace")
|
980
|
+
|