pipeline-eds 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. pipeline/__init__.py +4 -0
  2. pipeline/__main__.py +1 -0
  3. pipeline/api/__init__.py +0 -0
  4. pipeline/api/eds.py +980 -0
  5. pipeline/api/rjn.py +157 -0
  6. pipeline/api/status_api.py +9 -0
  7. pipeline/calls.py +108 -0
  8. pipeline/cli.py +282 -0
  9. pipeline/configrationmanager.py +22 -0
  10. pipeline/decorators.py +13 -0
  11. pipeline/env.py +61 -0
  12. pipeline/environment.py +59 -0
  13. pipeline/gui_fastapi_plotly_live.py +78 -0
  14. pipeline/gui_mpl_live.py +113 -0
  15. pipeline/helpers.py +125 -0
  16. pipeline/logging_setup.py +45 -0
  17. pipeline/pastehelpers.py +10 -0
  18. pipeline/philosophy.py +62 -0
  19. pipeline/plotbuffer.py +21 -0
  20. pipeline/points_loader.py +19 -0
  21. pipeline/queriesmanager.py +122 -0
  22. pipeline/time_manager.py +211 -0
  23. pipeline/workspace_manager.py +253 -0
  24. pipeline_eds-0.2.4.dist-info/LICENSE +14 -0
  25. pipeline_eds-0.2.4.dist-info/METADATA +238 -0
  26. pipeline_eds-0.2.4.dist-info/RECORD +62 -0
  27. pipeline_eds-0.2.4.dist-info/WHEEL +4 -0
  28. pipeline_eds-0.2.4.dist-info/entry_points.txt +6 -0
  29. workspaces/default-workspace.toml +3 -0
  30. workspaces/eds_to_rjn/__init__.py +0 -0
  31. workspaces/eds_to_rjn/code/__init__.py +0 -0
  32. workspaces/eds_to_rjn/code/aggregator.py +84 -0
  33. workspaces/eds_to_rjn/code/collector.py +60 -0
  34. workspaces/eds_to_rjn/code/sanitizer.py +40 -0
  35. workspaces/eds_to_rjn/code/storage.py +16 -0
  36. workspaces/eds_to_rjn/configurations/config_time.toml +11 -0
  37. workspaces/eds_to_rjn/configurations/configuration.toml +2 -0
  38. workspaces/eds_to_rjn/exports/README.md +7 -0
  39. workspaces/eds_to_rjn/exports/aggregate/README.md +7 -0
  40. workspaces/eds_to_rjn/exports/aggregate/live_data - Copy.csv +355 -0
  41. workspaces/eds_to_rjn/exports/aggregate/live_data_EFF.csv +17521 -0
  42. workspaces/eds_to_rjn/exports/aggregate/live_data_INF.csv +17521 -0
  43. workspaces/eds_to_rjn/exports/export_eds_points_neo.txt +11015 -0
  44. workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.csv +8759 -0
  45. workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.xlsx +0 -0
  46. workspaces/eds_to_rjn/exports/manual_effluent.csv +8759 -0
  47. workspaces/eds_to_rjn/exports/manual_influent.csv +8759 -0
  48. workspaces/eds_to_rjn/exports/manual_wetwell.csv +8761 -0
  49. workspaces/eds_to_rjn/history/time_sample.txt +0 -0
  50. workspaces/eds_to_rjn/imports/zdMaxson_idcsD321E_sid11003.toml +14 -0
  51. workspaces/eds_to_rjn/imports/zdMaxson_idcsFI8001_sid8528.toml +14 -0
  52. workspaces/eds_to_rjn/imports/zdMaxson_idcsM100FI_sid2308.toml +14 -0
  53. workspaces/eds_to_rjn/imports/zdMaxson_idcsM310LI_sid2382.toml +14 -0
  54. workspaces/eds_to_rjn/queries/default-queries.toml +4 -0
  55. workspaces/eds_to_rjn/queries/points-maxson.csv +4 -0
  56. workspaces/eds_to_rjn/queries/points-stiles.csv +4 -0
  57. workspaces/eds_to_rjn/queries/timestamps_success.json +20 -0
  58. workspaces/eds_to_rjn/scripts/__init__.py +0 -0
  59. workspaces/eds_to_rjn/scripts/daemon_runner.py +212 -0
  60. workspaces/eds_to_rjn/secrets/README.md +24 -0
  61. workspaces/eds_to_rjn/secrets/secrets-example.yaml +15 -0
  62. workspaces/eds_to_termux/..txt +0 -0
pipeline/api/rjn.py ADDED
@@ -0,0 +1,157 @@
1
+ import requests
2
+ import logging
3
+ from typing import Union
4
+
5
+ from src.pipeline.calls import make_request, call_ping
6
+ from src.pipeline.env import find_urls
7
+ from src.pipeline.decorators import log_function_call
8
+ from src.pipeline import helpers
9
+ from src.pipeline.time_manager import TimeManager
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ class RjnClient:
14
+ def __init__(self,config):
15
+ self.config = config
16
+
17
+ def send_point(self, payload: dict):
18
+ request_url = f"{self.api_url}/data/point" # Adjust this if needed
19
+ response = make_request(url=request_url, headers=self.headers, data=payload, method="POST")
20
+ if response.status_code == 200:
21
+ print(f"Successfully posted point {payload.get('rjn_name')}")
22
+ else:
23
+ print(f"Failed to post point {payload.get('rjn_name')}: {response.status_code}")
24
+
25
+ @staticmethod
26
+ def login_to_session(api_url, client_id, password):
27
+ logger.info("RjnClient.login_to_session()")
28
+ session = requests.Session()
29
+
30
+ data = {'client_id': client_id, 'password': password, 'type': 'script'}
31
+
32
+ try:
33
+ response = session.post(f'{api_url}/auth', json=data, verify=True)
34
+ response.raise_for_status() # catch 4xx/5xx html status
35
+ token = response.json().get('token')
36
+ session.headers['Authorization'] = f'Bearer {token}'
37
+ print("Status code:", response.status_code)
38
+ print("Response text:", response.text)
39
+ return session
40
+ except requests.exceptions.SSLError as ssl_err:
41
+ logging.warning("SSL verification failed. Will retry on next scheduled cycle.")
42
+ logging.debug(f"SSL error details: {ssl_err}")
43
+ return None
44
+
45
+ except requests.exceptions.ConnectionError as conn_err:
46
+ logging.warning("Connection error during authentication. Will retry next hour.")
47
+ logging.debug(f"Connection error details: {conn_err}")
48
+ return None
49
+
50
+ except Exception as general_err:
51
+ logging.error("Unexpected error during login.", exc_info=True)
52
+ return None
53
+
54
+ @staticmethod
55
+ #def send_data_to_rjn(session, base_url:str, project_id:str, entity_id:int, timestamps: list[Union[int, float, str]], values: list[float]):
56
+ def send_data_to_rjn(session, base_url, project_id, entity_id, timestamps, values):
57
+ if timestamps is None:
58
+ raise ValueError("timestamps cannot be None")
59
+ if values is None:
60
+ raise ValueError("values cannot be None")
61
+ if not isinstance(timestamps, list):
62
+ raise ValueError("timestamps must be a list. If you have a single timestamp, use: [timestamp] ")
63
+ if not isinstance(values, list):
64
+ raise ValueError("values must be a list. If you have a single value, use: [value] ")
65
+ # Check for matching lengths of timestamps and values
66
+ if len(timestamps) != len(values):
67
+ raise ValueError(f"timestamps and values must have the same length: {len(timestamps)} vs {len(values)}")
68
+
69
+ timestamps_str = [TimeManager(ts).as_formatted_date_time() for ts in timestamps]
70
+
71
+ url = f"{base_url}/projects/{project_id}/entities/{entity_id}/data"
72
+ params = {
73
+ "interval": 300,
74
+ "import_mode": "OverwriteExistingData",
75
+ "incoming_time": "DST"#, # DST seemed to fail and offset by an hour into the future. UTC with central time seemed to fail and offset the data 5 hours into the past.
76
+ #"local_timezone": "CST_CentralStandardTime"
77
+ }
78
+ body = {
79
+ "comments": "Imported from EDS.",
80
+ "data": dict(zip(timestamps_str, values)) # Works for single or multiple entries
81
+ }
82
+
83
+
84
+ response = None
85
+ try:
86
+ response = session.post(url=url, json= body, params = params)
87
+
88
+ print("Status code:", response.status_code)
89
+ print("Response text:", response.text)
90
+ if response is None:
91
+ print("Response = None, job cancelled")
92
+ else:
93
+ response.raise_for_status()
94
+ print(f"Sent timestamps and values to entity {entity_id} (HTTP {response.status_code})")
95
+ return True
96
+ except requests.exceptions.ConnectionError as e:
97
+ print("Skipping RjnClient.send_data_to_rjn() due to connection error")
98
+ print(e)
99
+ return False
100
+ except requests.exceptions.RequestException as e:
101
+ print(f"Error sending data to RJN: {e}")
102
+ if response is not None:# and response.status_code != 500:
103
+ logging.debug(f"Response content: {response.text}") # Print error response
104
+
105
+ return False
106
+
107
+ @staticmethod
108
+ def ping():
109
+ from src.pipeline.env import SecretConfig
110
+ from src.pipeline.workspace_manager import WorkspaceManager
111
+ workspace_name = WorkspaceManager.identify_default_workspace_name()
112
+ workspace_manager = WorkspaceManager(workspace_name)
113
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
114
+
115
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
116
+ sessions = {}
117
+
118
+ url_set = find_urls(secrets_dict)
119
+ for url in url_set:
120
+ if "rjn" in url.lower():
121
+ print(f"ping url: {url}")
122
+ call_ping(url)
123
+
124
+ @log_function_call(level=logging.DEBUG)
125
+ def demo_rjn_ping():
126
+ from src.pipeline.calls import call_ping
127
+ from src.pipeline.env import SecretConfig
128
+ from src.pipeline.workspace_manager import WorkspaceManager
129
+
130
+ from src.pipeline.env import SecretConfig
131
+ from src.pipeline.workspace_manager import WorkspaceManager
132
+ workspace_name = WorkspaceManager.identify_default_workspace_name()
133
+ workspace_manager = WorkspaceManager(workspace_name)
134
+
135
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
136
+
137
+ base_url = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("url").rstrip("/")
138
+ session = RjnClient.login_to_session(api_url = base_url,
139
+ client_id = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("client_id"),
140
+ password = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("password"))
141
+ if session is None:
142
+ logger.warning("RJN session not established. Skipping RJN-related data transmission.\n")
143
+ return
144
+ else:
145
+ logger.info("RJN session established successfully.")
146
+ session.base_url = base_url
147
+ response = call_ping(session.base_url)
148
+
149
+ if __name__ == "__main__":
150
+ import sys
151
+ cmd = sys.argv[1] if len(sys.argv) > 1 else "default"
152
+
153
+ if cmd == "ping":
154
+ demo_rjn_ping()
155
+ else:
156
+ print("Usage options: \n"
157
+ "poetry run python -m pipeline.api.rjn ping")
@@ -0,0 +1,9 @@
1
+ # src/api/status_api.py
2
+ from fastapi import FastAPI
3
+ from src.pipeline.daemon.status import get_latest_status
4
+
5
+ app = FastAPI()
6
+
7
+ @app.get("/status")
8
+ def read_status():
9
+ return get_latest_status()
pipeline/calls.py ADDED
@@ -0,0 +1,108 @@
1
+ import requests
2
+ import certifi
3
+ import platform
4
+ import subprocess
5
+ import sys
6
+ import time
7
+ import logging
8
+ from urllib.parse import urlparse
9
+ from urllib3.exceptions import NewConnectionError
10
+
11
+ logging.basicConfig(level=logging.WARNING, format="%(levelname)s: %(message)s")
12
+
13
+ def test_connection_to_internet():
14
+ try:
15
+ # call Cloudflare's CDN test site, because it is lite.
16
+ response = requests.get("http://1.1.1.1", timeout = 5)
17
+ print("You are connected to the internet.")
18
+ except:
19
+ print(f"It appears you are not connected to the internet.")
20
+ sys.exit()
21
+
22
+ def make_request(url, data=None, params = None, method="POST", headers=None, retries=3, delay=2, timeout=10, verify_ssl=True):
23
+ """Now defunct, converted to a requests.Session() paradigm."""
24
+ default_headers = {
25
+ "Accept": "application/json",
26
+ "Content-Type": "application/json",
27
+ }
28
+
29
+ merged_headers = {**default_headers, **(headers or {})}
30
+ #print(f"merged_headers = {merged_headers}")
31
+
32
+ verify = certifi.where() if verify_ssl else False
33
+
34
+ request_func = {
35
+ "POST": requests.post,
36
+ "GET": requests.get,
37
+ "PUT": requests.put,
38
+ "DELETE": requests.delete,
39
+ "PATCH": requests.patch,
40
+ }.get(method.upper())
41
+
42
+ if not request_func:
43
+ logging.error(f"Unsupported HTTP method: {method}")
44
+ return None
45
+ #raise ValueError(f"Unsupported HTTP method: {method}")
46
+ try:
47
+ response = request_func(
48
+ url,
49
+ json=data,
50
+ params=params,
51
+ headers=merged_headers,
52
+ timeout=timeout,
53
+ verify=verify
54
+ )
55
+ response.raise_for_status()
56
+ return response
57
+ except requests.exceptions.SSLError as e:
58
+ #raise ConnectionError(f"SSL Error: {e}")
59
+ logging.error(f"SSL Error: {e}")
60
+ return None
61
+ except requests.exceptions.HTTPError as e:
62
+ if response.status_code == 500:
63
+ logging.error(f"HTTP 500 Error - Response content: {response.text}")
64
+ elif response.status_code == 503 and retries > 0:
65
+ logging.warning(f"Service unavailable (503). Retrying in {delay} seconds...")
66
+ time.sleep(delay)
67
+ #return make_request(url, data, retries - 1, delay * 2) # Exponential backoff
68
+ return make_request(url, data, params, method, headers, retries - 1, delay * 2, timeout, verify_ssl)
69
+ elif response.status_code == 403:
70
+ #raise PermissionError("Access denied (403). The server rejected your credentials or IP.")
71
+ logging.error("Access denied (403). The server rejected your credentials or IP.")
72
+ else:
73
+ #raise RuntimeError(f"HTTP error: {response.status_code} {response.text}")
74
+ logging.error(f"HTTP error: {response.status_code} {response.text}")
75
+ return None
76
+ except requests.exceptions.RequestException as e:
77
+ logging.warning(f"Request failed: {e}")
78
+ return None # Ensures calling functions properly handle failure
79
+ except NewConnectionError as e:
80
+ logging.warning("Request failed due to connection issues.")
81
+ logging.debug(f"Detailed error: {e}", exc_info=False) # Only logs full traceback if DEBUG level is set
82
+
83
+ def call_ping(url):
84
+ parsed = urlparse(url)
85
+ param = "-n" if platform.system().lower() == "windows" else "-c"
86
+ command = ["ping", param, "1", parsed.hostname]
87
+ return subprocess.call(command) == 0 # True if ping succeeds
88
+
89
+ def find_urls(config_dict):
90
+ url_set = set()
91
+
92
+ def recursive_search(d):
93
+ if isinstance(d, dict):
94
+ for k, v in d.items():
95
+ if isinstance(v, str) and v.startswith("http"):
96
+ url_set.add(v)
97
+ elif isinstance(v, dict):
98
+ recursive_search(v)
99
+ elif isinstance(v, list):
100
+ for item in v:
101
+ recursive_search(item)
102
+
103
+ recursive_search(config_dict)
104
+ return url_set
105
+
106
+ if __name__ == "__main__":
107
+ from src.pipeline.helpers import function_view
108
+ function_view()
pipeline/cli.py ADDED
@@ -0,0 +1,282 @@
1
+ '''
2
+ import typer
3
+ from pathlib import Path
4
+
5
+ app = typer.Typer()
6
+
7
+ @app.command()
8
+ def list_workspaces(workspaces_dir: Path = Path("workspaces")):
9
+ """List valid mulch workspaces in the given directory."""
10
+ if not workspaces_dir.exists():
11
+ typer.echo(f"Directory not found: {workspaces_dir}")
12
+ raise typer.Exit(code=1)
13
+ for path in workspaces_dir.iterdir():
14
+ if path.is_dir() and (path / ".mulch").is_dir():
15
+ typer.echo(f"🪴 {path.name}")
16
+
17
+ @app.command()
18
+ def list_mulch_folders(start: Path = Path(".")):
19
+ """Recursively find folders containing a .mulch/ directory."""
20
+ for path in start.rglob(".mulch"):
21
+ typer.echo(f"📁 {path.parent}")
22
+
23
+ @app.command()
24
+ def inspect(workspace: Path):
25
+ """Show scaffold or metadata info from a workspace."""
26
+ metadata = workspace / ".mulch" / "mulch-scaffold.json"
27
+ if metadata.exists():
28
+ typer.echo(f"🔍 {workspace.name}: {metadata}")
29
+ typer.echo(metadata.read_text())
30
+ else:
31
+ typer.echo(f"No scaffold found in {workspace}")
32
+ '''
33
+ # src/pipeline/cli.py
34
+
35
+ import typer
36
+ import importlib
37
+ from pathlib import Path
38
+
39
+ from src.pipeline.env import SecretConfig
40
+ #from src.pipeline.helpers import setup_logging
41
+ from src.pipeline.workspace_manager import WorkspaceManager
42
+
43
+ app = typer.Typer(help="CLI for running pipeline workspaces.")
44
+
45
+
46
+ @app.callback(invoke_without_command=True)
47
+ def main(ctx: typer.Context):
48
+ """
49
+ Pipeline CLI – run workspaces built on the pipeline framework.
50
+ """
51
+ if ctx.invoked_subcommand is None:
52
+ typer.echo(ctx.get_help())
53
+ raise typer.Exit()
54
+
55
+ @app.command()
56
+ def run(
57
+ workspace: str = typer.Option(None, help="Workspace to run"),
58
+ ):
59
+ """
60
+ Import and run a workspace's main() function.
61
+ """
62
+ # Determine workspace name
63
+ if workspace is None:
64
+ workspace = WorkspaceManager.identify_default_workspace_name()
65
+ wm = WorkspaceManager(workspace)
66
+
67
+ workspace_dir = wm.get_workspace_dir()
68
+ module_path = f"workspaces.{workspace}.main"
69
+
70
+ typer.echo(f"🚀 Running {module_path} from {workspace_dir}")
71
+
72
+ try:
73
+ mod = importlib.import_module(module_path)
74
+ if not hasattr(mod, "main"):
75
+ typer.echo("❌ This workspace does not have a 'main()' function.")
76
+ raise typer.Exit(1)
77
+ mod.main()
78
+ except Exception as e:
79
+ typer.echo(f"💥 Error while running {workspace}: {e}")
80
+ raise typer.Exit(1)
81
+
82
+ @app.command()
83
+ def typical(zd: str):
84
+ """
85
+ Print the typical idcs list for an EDS zd.
86
+ """
87
+ pass
88
+
89
+ @app.command()
90
+ def trend(
91
+ idcs: list[str] = typer.Argument(..., help="Provide known idcs values that match the given zd."), # , "--idcs", "-i"
92
+ starttime: str = typer.Option(None, "--start", "-s", help="Index from 'mulch order' to choose scaffold source."),
93
+ endtime: str = typer.Option(None, "--end", "-end", help="Reference a known template for workspace organization."),
94
+ zd: str = typer.Option('Maxson', "--zd", "-z", help = "Define the EDS ZD from your secrets file. This must correlate with your idcs point selection(s)."),
95
+ workspace: str = typer.Option(WorkspaceManager.identify_default_workspace_name(),"--workspace","-w", help = "Provide the name of the workspace you want to use, for the secrets.yaml credentials and for the timezone config. If a start time is not provided, the workspace queries can checked for the most recent successful timestamp. ")
96
+ ):
97
+ """
98
+ Show a curve for a sensor over time.
99
+ """
100
+ #from dateutil import parser
101
+ import pendulum
102
+ from pipeline.api.eds import EdsClient, load_historic_data
103
+ from pipeline import helpers
104
+ from pipeline.queriesmanager import QueriesManager
105
+ from src.pipeline.plotbuffer import PlotBuffer
106
+ from src.pipeline import gui_fastapi_plotly_live
107
+ from src.pipeline import environment
108
+
109
+ if zd.lower() == "stiles":
110
+ zd = "WWTF"
111
+
112
+ if zd == "Maxson":
113
+ idcs_to_iess_suffix = ".UNIT0@NET0"
114
+ elif zd == "WWTF":
115
+ idcs_to_iess_suffix = ".UNIT1@NET1"
116
+ else:
117
+ # assumption
118
+ idcs_to_iess_suffix = ".UNIT0@NET0"
119
+ iess_list = [x+idcs_to_iess_suffix for x in idcs]
120
+
121
+
122
+ wm = WorkspaceManager(workspace)
123
+ secrets_dict = SecretConfig.load_config(secrets_file_path = wm.get_secrets_file_path())
124
+
125
+ base_url = secrets_dict.get("eds_apis", {}).get(zd, {}).get("url").rstrip("/")
126
+ session = EdsClient.login_to_session(api_url = base_url,
127
+ username = secrets_dict.get("eds_apis", {}).get(zd, {}).get("username"),
128
+ password = secrets_dict.get("eds_apis", {}).get(zd, {}).get("password"))
129
+ session.base_url = base_url
130
+ session.zd = secrets_dict.get("eds_apis", {}).get(zd, {}).get("zd")
131
+ queries_manager = QueriesManager(wm)
132
+
133
+ if starttime is None:
134
+ # back_to_last_success = True
135
+ dt_start = queries_manager.get_most_recent_successful_timestamp(api_id=zd)
136
+ else:
137
+ dt_start = pendulum.parse(starttime, strict=False)
138
+ if endtime is None:
139
+ dt_finish = helpers.get_now_time_rounded(wm)
140
+ else:
141
+ dt_finish = pendulum.parse(endtime, strict=False)
142
+
143
+ # Should automatically choose time step granularity based on time length; map
144
+
145
+ results = load_historic_data(queries_manager, wm, session, iess_list, dt_start, dt_finish)
146
+
147
+ data_buffer = PlotBuffer()
148
+ for idx, rows in enumerate(results):
149
+ for row in rows:
150
+ label = f"{row.get('rjn_entityid')} ({row.get('units')})"
151
+ ts = helpers.iso(row.get("ts"))
152
+ av = row.get("value")
153
+ data_buffer.append(label, ts, av)
154
+
155
+ if not environment.matplotlib_enabled():
156
+ gui_fastapi_plotly_live.run_gui(data_buffer)
157
+ else:
158
+ from pipeline import gui_mpl_live
159
+ gui_mpl_live.run_gui(data_buffer)
160
+
161
+
162
+
163
+ @app.command()
164
+ def list_workspaces():
165
+ """
166
+ List all available workspaces detected in the workspaces folder.
167
+ """
168
+ # Determine workspace name
169
+
170
+ workspace = WorkspaceManager.identify_default_workspace_name()
171
+ wm = WorkspaceManager(workspace)
172
+ workspaces = wm.get_all_workspaces_names()
173
+ typer.echo("📦 Available workspaces:")
174
+ for name in workspaces:
175
+ typer.echo(f" - {name}")
176
+
177
+ @app.command()
178
+ def demo_rjn_ping():
179
+ """
180
+ Demo function to ping RJN service.
181
+ """
182
+ from src.pipeline.api.rjn import RjnClient
183
+ from src.pipeline.calls import call_ping
184
+ from src.pipeline.env import SecretConfig
185
+ from src.pipeline.workspace_manager import WorkspaceManager
186
+ from src.pipeline import helpers
187
+ import logging
188
+
189
+ logger = logging.getLogger(__name__)
190
+ workspace_name = WorkspaceManager.identify_default_workspace_name()
191
+ workspace_manager = WorkspaceManager(workspace_name)
192
+
193
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
194
+ base_url = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("url").rstrip("/")
195
+ session = RjnClient.login_to_session(api_url = base_url,
196
+ client_id = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("client_id"),
197
+ password = secrets_dict.get("contractor_apis", {}).get("RJN", {}).get("password"))
198
+ if session is None:
199
+ logger.warning("RJN session not established. Skipping RJN-related data transmission.\n")
200
+ return
201
+ else:
202
+ logger.info("RJN session established successfully.")
203
+ session.base_url = base_url
204
+ response = call_ping(session.base_url)
205
+
206
+ @app.command()
207
+ def ping_rjn_services():
208
+ """
209
+ Ping all RJN services found in the secrets configuration.
210
+ """
211
+ from src.pipeline.calls import find_urls, call_ping
212
+ from src.pipeline.env import SecretConfig
213
+ from src.pipeline.workspace_manager import WorkspaceManager
214
+ import logging
215
+
216
+ logger = logging.getLogger(__name__)
217
+ workspace_name = WorkspaceManager.identify_default_workspace_name()
218
+ workspace_manager = WorkspaceManager(workspace_name)
219
+
220
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
221
+
222
+ sessions = {}
223
+
224
+ url_set = find_urls(secrets_dict)
225
+ for url in url_set:
226
+ if "rjn" in url.lower():
227
+ print(f"ping url: {url}")
228
+ call_ping(url)
229
+
230
+ @app.command()
231
+ def ping_eds_services():
232
+ """
233
+ Ping all EDS services found in the secrets configuration.
234
+ """
235
+ from src.pipeline.calls import find_urls, call_ping
236
+ from src.pipeline.env import SecretConfig
237
+ from src.pipeline.workspace_manager import WorkspaceManager
238
+ import logging
239
+
240
+ logger = logging.getLogger(__name__)
241
+ workspace_name = WorkspaceManager.identify_default_workspace_name()
242
+ workspace_manager = WorkspaceManager(workspace_name)
243
+
244
+ secrets_dict = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
245
+
246
+ sessions = {}
247
+
248
+ url_set = find_urls(secrets_dict)
249
+ typer.echo(f"Found {len(url_set)} URLs in secrets configuration.")
250
+ logger.info(f"url_set: {url_set}")
251
+ for url in url_set:
252
+ if "172.19.4" in url.lower():
253
+ print(f"ping url: {url}")
254
+ call_ping(url)
255
+
256
+ @app.command()
257
+ def daemon_runner_main():
258
+ """
259
+ Run the daemon_runner script from the eds_to_rjn workspace.
260
+ """
261
+ import workspaces.eds_to_rjn.scripts.daemon_runner as dr
262
+
263
+ dr.main()
264
+
265
+ @app.command()
266
+ def daemon_runner_once():
267
+ """
268
+ Run the daemon_runner script from the eds_to_rjn workspace.
269
+ """
270
+ import workspaces.eds_to_rjn.scripts.daemon_runner as dr
271
+
272
+ dr.run_hourly_tabular_trend_eds_to_rjn()
273
+
274
+ @app.command()
275
+ def help():
276
+ """
277
+ Show help information.
278
+ """
279
+ typer.echo(app.get_help())
280
+
281
+ if __name__ == "__main__":
282
+ app()
@@ -0,0 +1,22 @@
1
+ '''
2
+ Title: configurationmanager.py
3
+ Author: George Clayton Bennett
4
+ Created : 05 June 2025
5
+
6
+ Purpose: Modularized file-based configuration via a Singlteon class. In this case, configuration is just credentials.
7
+
8
+ Attributes:
9
+ - Load default config values from a TOML file
10
+ - No fallbacks for secret.yaml files.
11
+ - Tracking history of changes to allow undo functionality if values are changed from the default during p(unexpected in this case).
12
+ '''
13
+
14
+
15
+ import toml
16
+ # import os
17
+ # from colletion import defaultdict
18
+
19
+ class ConfigurationManager:
20
+
21
+ def __init__(self):
22
+ self._instance = None
pipeline/decorators.py ADDED
@@ -0,0 +1,13 @@
1
+ import logging
2
+ import inspect
3
+ from functools import wraps
4
+
5
+ def log_function_call(level=logging.DEBUG):
6
+ def decorator(func):
7
+ @wraps(func)
8
+ def wrapper(*args, **kwargs):
9
+ func_name = func.__qualname__
10
+ logging.log(level, f"Start: {func_name}() args={args} kwargs={kwargs}")
11
+ return func(*args, **kwargs)
12
+ return wrapper
13
+ return decorator
pipeline/env.py ADDED
@@ -0,0 +1,61 @@
1
+ #env.__main__.py
2
+
3
+ import yaml
4
+ from src.pipeline.workspace_manager import WorkspaceManager
5
+
6
+ '''
7
+ migrate this to ConfigurationManager
8
+ '''
9
+
10
+ class SecretConfig:
11
+ def __init__(self, config):
12
+ self.config = config
13
+
14
+ @staticmethod
15
+ def load_config(secrets_file_path):
16
+ with open(secrets_file_path, 'r') as f:
17
+ return yaml.safe_load(f)
18
+
19
+ def print_config(self):
20
+ # Print the values
21
+ for section, values in self.config.items():
22
+ print(f"[{section}]")
23
+ for key, val in values.items():
24
+ print(f"{key} = {val}")
25
+
26
+
27
+ def find_urls(config, url_set=None):
28
+ '''determine all values with the key "url" in a config file.'''
29
+ if url_set is None:
30
+ url_set = set()
31
+
32
+ if isinstance(config, dict):
33
+ for key, value in config.items():
34
+ if key == "url":
35
+ url_set.add(value)
36
+ else:
37
+ find_urls(value, url_set)
38
+ elif isinstance(config, list):
39
+ for item in config:
40
+ find_urls(item, url_set)
41
+
42
+ return url_set
43
+
44
+ def demo_secrets():
45
+ """
46
+ The defaut SecretConfig.load_config() call
47
+ should load fromthe default-workspace
48
+ as defined by the configuration file in the workspaces directorys,
49
+ caed defaut_workspace.toml - Clayton Bennett 26 April 2025.
50
+ However this call can also be made if another project is made the active project.
51
+ """
52
+ workspace_name = WorkspaceManager.identify_default_workspace_name()
53
+ workspace_manager = WorkspaceManager(workspace_name)
54
+ config = SecretConfig.load_config(secrets_file_path = workspace_manager.get_secrets_file_path())
55
+ secrets = SecretConfig(config)
56
+ return secrets
57
+
58
+ if __name__ == "__main__":
59
+ # call from the root directory using: poetry run python -m pipeline.env
60
+ secrets=demo_secrets()
61
+ secrets.print_config()