zscams 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. zscams/__init__.py +3 -0
  2. zscams/__main__.py +43 -0
  3. zscams/agent/__init__.py +93 -0
  4. zscams/agent/certificates/.gitkeep +0 -0
  5. zscams/agent/config.yaml +103 -0
  6. zscams/agent/configuration/config.j2 +103 -0
  7. zscams/agent/configuration/service.j2 +12 -0
  8. zscams/agent/keys/autoport.key +27 -0
  9. zscams/agent/src/__init__.py +0 -0
  10. zscams/agent/src/core/__init__.py +1 -0
  11. zscams/agent/src/core/backend/bootstrap.py +76 -0
  12. zscams/agent/src/core/backend/client.py +281 -0
  13. zscams/agent/src/core/backend/exceptions.py +10 -0
  14. zscams/agent/src/core/backend/update_machine_info.py +16 -0
  15. zscams/agent/src/core/prerequisites.py +36 -0
  16. zscams/agent/src/core/service_health_check.py +49 -0
  17. zscams/agent/src/core/services.py +86 -0
  18. zscams/agent/src/core/tunnel/__init__.py +144 -0
  19. zscams/agent/src/core/tunnel/tls.py +56 -0
  20. zscams/agent/src/core/tunnels.py +55 -0
  21. zscams/agent/src/services/__init__.py +0 -0
  22. zscams/agent/src/services/reverse_ssh.py +73 -0
  23. zscams/agent/src/services/ssh_forwarder.py +75 -0
  24. zscams/agent/src/services/system_monitor.py +264 -0
  25. zscams/agent/src/support/__init__.py +0 -0
  26. zscams/agent/src/support/cli.py +50 -0
  27. zscams/agent/src/support/configuration.py +86 -0
  28. zscams/agent/src/support/filesystem.py +63 -0
  29. zscams/agent/src/support/logger.py +88 -0
  30. zscams/agent/src/support/mac.py +18 -0
  31. zscams/agent/src/support/network.py +49 -0
  32. zscams/agent/src/support/openssl.py +114 -0
  33. zscams/agent/src/support/os.py +138 -0
  34. zscams/agent/src/support/ssh.py +24 -0
  35. zscams/agent/src/support/yaml.py +37 -0
  36. zscams/deps.py +86 -0
  37. zscams/lib/.gitkeep +0 -0
  38. zscams-2.0.4.dist-info/METADATA +114 -0
  39. zscams-2.0.4.dist-info/RECORD +41 -0
  40. zscams-2.0.4.dist-info/WHEEL +4 -0
  41. zscams-2.0.4.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,55 @@
1
+ """
2
+ Utilities to start TLS tunnels asynchronously
3
+
4
+ - Supports multiple forwards
5
+ - Signals readiness via asyncio.Event
6
+ - Auto-reconnect and logging
7
+ """
8
+
9
+ import asyncio
10
+ from zscams.agent.src.core.tunnel import start_tunnel
11
+ from zscams.agent.src.support.logger import get_logger
12
+
13
+ logger = get_logger("tunnel_launcher")
14
+
15
+
16
+ async def start_all_tunnels(forwards_cfg_list, remote_host, remote_port, ssl_context):
17
+ """
18
+ Start all TLS tunnels concurrently and wait for readiness.
19
+
20
+ Args:
21
+ forwards_cfg_list (list): List of forward dictionaries from config
22
+ Each dict should have:
23
+ - local_port
24
+ - sni_hostname
25
+ remote_host (str): Remote host to connect to
26
+ remote_port (int): Remote port to connect to
27
+ ssl_context (ssl.SSLContext): SSL context for the TLS tunnel
28
+
29
+ Returns:
30
+ list of asyncio.Tasks: tunnel tasks running indefinitely
31
+ """
32
+ tasks = []
33
+ ready_events = []
34
+
35
+ for forward_cfg in forwards_cfg_list:
36
+ ready_event = asyncio.Event()
37
+ ready_events.append(ready_event)
38
+ tasks.append(
39
+ asyncio.create_task(
40
+ start_tunnel(
41
+ local_port=forward_cfg["local_port"],
42
+ remote_host=remote_host,
43
+ remote_port=remote_port,
44
+ sni_hostname=forward_cfg["sni_hostname"],
45
+ ssl_context=ssl_context,
46
+ reconnect_max_delay=60,
47
+ ready_event=ready_event,
48
+ )
49
+ )
50
+ )
51
+
52
+ # Wait for all tunnels to signal readiness
53
+ await asyncio.gather(*(event.wait() for event in ready_events))
54
+ logger.info("[*] %d tunnel(s) ready", len(tasks))
55
+ return tasks
File without changes
@@ -0,0 +1,73 @@
1
+ import asyncio
2
+ import json
3
+ import os
4
+ import sys
5
+ from zscams.agent.src.support.logger import get_logger
6
+
7
+ logger = get_logger("autossh_service")
8
+
9
+ # Load service-specific params from environment
10
+ params_env = os.environ.get("SERVICE_PARAMS")
11
+ if not params_env:
12
+ logger.error("SERVICE_PARAMS environment variable not set")
13
+ sys.exit(1)
14
+
15
+ params = json.loads(params_env)
16
+
17
+ LOCAL_PORT = params.get("local_port", 4422)
18
+ SERVER_SSH_USER = params.get("server_ssh_user", "ssh_user")
19
+ REVERSE_PORT = params.get("reverse_port", 2222)
20
+ PRIVATE_KEY = params.get("private_key") # Path to RSA key
21
+ SSH_OPTIONS = params.get("ssh_options", [])
22
+ CHECK_INTERVAL = 120 #
23
+
24
+
25
+ async def run():
26
+ backoff = 1
27
+ max_backoff = 60
28
+ ssh_cmd = [
29
+ "ssh",
30
+ "-p",
31
+ f"{LOCAL_PORT}",
32
+ "-R",
33
+ f"*:{REVERSE_PORT}:localhost:22",
34
+ f"{SERVER_SSH_USER}@localhost",
35
+ "-N",
36
+ ]
37
+
38
+ # Use key file if provided
39
+ if PRIVATE_KEY:
40
+ ssh_cmd += ["-i", PRIVATE_KEY]
41
+
42
+ # Add additional SSH options
43
+ ssh_cmd += SSH_OPTIONS
44
+
45
+ logger.info(f"Starting reverse SSH tunnel: {' '.join(ssh_cmd)}")
46
+
47
+ while True:
48
+ try:
49
+ process = await asyncio.create_subprocess_exec(
50
+ *ssh_cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
51
+ )
52
+ logger.info(f"SSH tunnel started (PID={process.pid})")
53
+ stdout, stderr = await process.communicate()
54
+ if stdout:
55
+ logger.info(stdout.decode())
56
+ if stderr:
57
+ logger.warning(stderr.decode())
58
+ returncode = process.returncode
59
+ logger.warning(f"SSH tunnel exited with code {returncode}")
60
+ except Exception as e:
61
+ logger.error(f"SSH tunnel failed: {e}")
62
+
63
+ logger.info(f"Reconnecting in {backoff} seconds...")
64
+ await asyncio.sleep(backoff)
65
+ backoff = min(backoff * 2, max_backoff)
66
+
67
+
68
+ if __name__ == "__main__":
69
+ try:
70
+ asyncio.run(run())
71
+ except KeyboardInterrupt:
72
+ logger.info("Exiting autossh_service.py")
73
+ sys.exit(0)
@@ -0,0 +1,75 @@
1
+ import asyncio
2
+ import json
3
+ import os
4
+ import sys
5
+ from zscams.agent.src.support.logger import get_logger
6
+
7
+ logger = get_logger("autossh_service")
8
+
9
+ # Load service-specific params from environment
10
+ params_env = os.environ.get("SERVICE_PARAMS")
11
+ if not params_env:
12
+ logger.error("SERVICE_PARAMS environment variable not set")
13
+ sys.exit(1)
14
+
15
+ params = json.loads(params_env)
16
+
17
+ FORWARDER_PORT = params.get("forwarder_port", 4422)
18
+ LOCAL_PORT = params.get("local_port", 4422)
19
+ REMOTE_PORT = params.get("remote_port", 4422)
20
+ REMOTE_HOST = params.get("remote_host", "localhost")
21
+ SERVER_SSH_USER = params.get("server_ssh_user", "ssh_user")
22
+ PRIVATE_KEY = params.get("private_key") # Path to RSA key
23
+ SSH_OPTIONS = params.get("ssh_options", [])
24
+ CHECK_INTERVAL = 120 #
25
+
26
+
27
+ async def run():
28
+ backoff = 1
29
+ max_backoff = 60
30
+ ssh_cmd = [
31
+ "ssh",
32
+ "-p",
33
+ f"{LOCAL_PORT}",
34
+ "-L",
35
+ f"{FORWARDER_PORT}:{REMOTE_HOST}:{REMOTE_PORT}",
36
+ f"{SERVER_SSH_USER}@localhost",
37
+ "-N", # No command execution
38
+ ]
39
+
40
+ # Use key file if provided
41
+ if PRIVATE_KEY:
42
+ ssh_cmd += ["-i", PRIVATE_KEY]
43
+
44
+ # Add additional SSH options
45
+ ssh_cmd += SSH_OPTIONS
46
+
47
+ logger.info(f"Starting reverse SSH tunnel: {' '.join(ssh_cmd)}")
48
+
49
+ while True:
50
+ try:
51
+ process = await asyncio.create_subprocess_exec(
52
+ *ssh_cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
53
+ )
54
+ logger.info(f"SSH tunnel started (PID={process.pid})")
55
+ stdout, stderr = await process.communicate()
56
+ if stdout:
57
+ logger.info(stdout.decode())
58
+ if stderr:
59
+ logger.warning(stderr.decode())
60
+ returncode = process.returncode
61
+ logger.warning(f"SSH tunnel exited with code {returncode}")
62
+ except Exception as e:
63
+ logger.error(f"SSH tunnel failed: {e}")
64
+
65
+ logger.info(f"Reconnecting in {backoff} seconds...")
66
+ await asyncio.sleep(backoff)
67
+ backoff = min(backoff * 2, max_backoff)
68
+
69
+
70
+ if __name__ == "__main__":
71
+ try:
72
+ asyncio.run(run())
73
+ except KeyboardInterrupt:
74
+ logger.info("Exiting autossh_service.py")
75
+ sys.exit(0)
@@ -0,0 +1,264 @@
1
+ import asyncio
2
+ import os
3
+ import sys
4
+ import datetime
5
+ import psutil
6
+ import logging
7
+ import logging.handlers
8
+ import json
9
+ import socket
10
+ import platform
11
+ from zscams.agent.src.support.logger import get_logger
12
+ from http.client import BadStatusLine, HTTPConnection, HTTPException
13
+ logger = get_logger("system_monitor")
14
+
15
+ # Load service-specific params from environment
16
+ params_env = os.environ.get("SERVICE_PARAMS")
17
+ if not params_env:
18
+ logger.error("SERVICE_PARAMS environment variable not set")
19
+ sys.exit(1)
20
+
21
+ params = json.loads(params_env)
22
+ # -----------------------------
23
+ # Configuration
24
+ # -----------------------------
25
+ SYSLOG_HOST = params.get("remote_host", "localhost") # remote rsyslog host
26
+ SYSLOG_PORT = params.get("remote_port", 514) # TCP syslog port
27
+ EQUIPMENT_NAME = params.get("equipment_name", "connector")
28
+ EQUIPMENT_TYPE = params.get("equipment_type", "zpa")
29
+ SERVICE_NAME = params.get("service_name", "Zscaler-AppConnector")
30
+ HOSTNAME = platform.node()
31
+
32
+
33
+
34
+
35
+ def network():
36
+ """
37
+ Collect network interfaces and their statistics.
38
+ Returns a dictionary with per-interface
39
+ """
40
+ ifaces = {}
41
+ for iface, stats in psutil.net_io_counters(pernic=True).items():
42
+ ifaces[iface] = {
43
+ "in": {
44
+ "bytes": stats.bytes_recv,
45
+ "packets": stats.packets_recv,
46
+ "errors": getattr(stats, "errin", 0),
47
+ "dropped": getattr(stats, "dropin", 0),
48
+ },
49
+ "out": {
50
+ "bytes": stats.bytes_sent,
51
+ "packets": stats.packets_sent,
52
+ "errors": getattr(stats, "errout", 0),
53
+ "dropped": getattr(stats, "dropout", 0),
54
+ },
55
+ }
56
+ if "lo" in ifaces:
57
+ del ifaces["lo"]
58
+ return ifaces
59
+
60
+
61
+ def memory():
62
+ """
63
+ Get memory and swap usage
64
+ Returns a dictionary with total, used, free, and swap info.
65
+ """
66
+ mem = psutil.virtual_memory()
67
+ swap = psutil.swap_memory()
68
+ mem_remain = mem.total - mem.available
69
+ output_memory = {
70
+ "actual": {
71
+ "free": mem.available,
72
+ "used": {
73
+ "pct": round(mem_remain / mem.total, 4),
74
+ "bytes": mem_remain,
75
+ },
76
+ },
77
+ "total": mem.total,
78
+ "used": {
79
+ "pct": round(mem.percent / 100, 4),
80
+ "bytes": mem.total - mem.available,
81
+ },
82
+ "free": mem.available,
83
+ "swap": {
84
+ "total": swap.total,
85
+ "used": {
86
+ "pct": swap.percent / 100,
87
+ "bytes": swap.used,
88
+ },
89
+ "free": swap.free,
90
+ },
91
+ }
92
+ return output_memory
93
+
94
+
95
+ def cpu():
96
+ """
97
+ Get CPU usage
98
+ Returns a dictionary with total, user, system, idle, and core count.
99
+ """
100
+ times = psutil.cpu_times()
101
+ total = sum(times)
102
+ used = total - times.idle
103
+ cpu_total_pct = round(used / total, 4)
104
+ return {
105
+ "total": {"pct": cpu_total_pct},
106
+ "system": {"pct": round(times.system / total, 4)},
107
+ "user": {"pct": round(times.user / total, 4)},
108
+ "idle": {"pct": round(times.idle / total, 4)},
109
+ "cores": psutil.cpu_count(),
110
+ "iowait": {"pct": round(getattr(times, "iowait", 0) / total, 4)},
111
+ "irq": {"pct": round(getattr(times, "irq", 0) / total, 4)},
112
+ "softirq": {"pct": round(getattr(times, "softirq", 0) / total, 4)},
113
+ "nice": {"pct": round(getattr(times, "nice", 0) / total, 4)},
114
+ "steal": {"pct": round(getattr(times, "steal", 0) / total, 4)},
115
+ }
116
+
117
+
118
+ def load():
119
+ """
120
+ Get CPU load averages over 1, 5, and 15 minutes.
121
+ Returns a dictionary with normalized load per CPU core.
122
+ """
123
+ load1, load5, load15 = psutil.getloadavg()
124
+ cores = psutil.cpu_count()
125
+ cores = float(cores) if cores else 1.0
126
+ return {
127
+ "1": load1,
128
+ "5": load5,
129
+ "15": load15,
130
+ "cores": cores,
131
+ "norm": {
132
+ "1": load1 / cores,
133
+ "5": load5 / cores,
134
+ "15": load15 / cores,
135
+ },
136
+ }
137
+
138
+
139
+ def uptime():
140
+ """
141
+ Return system boot time in seconds since the epoch.
142
+ """
143
+ return {"duration": {"seconds": int(psutil.boot_time())}}
144
+
145
+
146
+ def disk():
147
+ """
148
+ Get disk usage metrics for the root filesystem.
149
+ Returns a dictionary with total, used, and free bytes.
150
+ """
151
+ disk = psutil.disk_usage("/")
152
+ return {"total": disk.total, "used": disk.used, "free": disk.free}
153
+
154
+
155
+ def process(process_names=None):
156
+ """
157
+ Get metrics about running processes by name.
158
+ Default: ["zpa-connector", "zpa-service-edge"]
159
+ Returns a list of dictionaries with process info.
160
+ """
161
+ if process_names is None:
162
+ process_names = ["zpa-connector", "zpa-service-edge"]
163
+
164
+ output = []
165
+ for proc in psutil.process_iter(
166
+ [
167
+ "name",
168
+ "pid",
169
+ "ppid",
170
+ "username",
171
+ "memory_info",
172
+ "cpu_times",
173
+ "cmdline",
174
+ "status",
175
+ "create_time",
176
+ ]
177
+ ):
178
+ try:
179
+ if proc.info["name"] in process_names:
180
+ mem = proc.info["memory_info"]
181
+ cpu_pct = proc.cpu_percent(interval=0.1)
182
+ output.append(
183
+ {
184
+ "name": proc.info["name"],
185
+ "pid": proc.info["pid"],
186
+ "ppid": proc.info["ppid"],
187
+ "username": proc.info["username"],
188
+ "memory": {
189
+ "rss": mem.rss,
190
+ "vms": mem.vms,
191
+ "percent": round(proc.memory_percent(), 2),
192
+ },
193
+ "cpu": {
194
+ "percent": cpu_pct,
195
+ "start_time": datetime.datetime.fromtimestamp(
196
+ proc.info["create_time"]
197
+ ).isoformat(),
198
+ },
199
+ "cmdline": proc.info["cmdline"],
200
+ "status": proc.info["status"],
201
+ }
202
+ )
203
+ except (psutil.NoSuchProcess, psutil.AccessDenied):
204
+ continue
205
+ return output
206
+
207
+
208
+ def collect_all():
209
+ """
210
+ Collect all metrics in a single dictionary.
211
+ """
212
+ return {
213
+ "network": network(),
214
+ "memory": memory(),
215
+ "cpu": cpu(),
216
+ "load": load(),
217
+ "uptime": uptime(),
218
+ "disk": disk(),
219
+ "processes": process(),
220
+ }
221
+
222
+
223
+ def log():
224
+ return {
225
+ "system": collect_all(),
226
+ "beat": {
227
+ "name": EQUIPMENT_NAME,
228
+ "type": EQUIPMENT_TYPE,
229
+ "hostname": HOSTNAME,
230
+ },
231
+ "host": HOSTNAME,
232
+ }
233
+
234
+
235
+
236
+ # -----------------------------
237
+ # Helper function to log JSON
238
+ # -----------------------------
239
+ async def send_json_log(payload: dict):
240
+ """
241
+ Send a JSON-formatted log to syslog via TCP
242
+ """
243
+ conn = HTTPConnection(SYSLOG_HOST, SYSLOG_PORT, timeout=10)
244
+ conn.request(
245
+ "POST", "/", json.dumps(payload), headers={"Content-type": "application/json"}
246
+ )
247
+ conn.close()
248
+ logger.info("Metrices collected and sent successfully.")
249
+
250
+
251
+ async def schedule_task(interval):
252
+ while (True):
253
+ try:
254
+ await send_json_log(log())
255
+ except Exception as exception:
256
+ print(exception)
257
+ await asyncio.sleep(interval)
258
+
259
+ if __name__ == "__main__":
260
+ try:
261
+ asyncio.run(schedule_task(30))
262
+ except KeyboardInterrupt:
263
+ logger.info("Exiting system_monitor.py")
264
+ sys.exit(0)
File without changes
@@ -0,0 +1,50 @@
1
+ from typing import Optional, cast
2
+ from zscams.agent.src.support.logger import get_logger
3
+
4
+ logger = get_logger("bootstrap")
5
+
6
+
7
+ class RequiredFieldException(Exception):
8
+ pass
9
+
10
+
11
+ class InvalidFieldException(Exception):
12
+ pass
13
+
14
+
15
+ def prompt(
16
+ name: str,
17
+ message: str,
18
+ required=False,
19
+ startswith: Optional[str] = None,
20
+ fail_on_error=False,
21
+ retries_count=3,
22
+ ):
23
+ _message = message
24
+ if not required:
25
+ _message += " (Optional)"
26
+
27
+ _message += ": "
28
+
29
+ val = input(_message)
30
+
31
+ if required and not val:
32
+ if fail_on_error:
33
+ raise RequiredFieldException(f"Missing {name}")
34
+ logger.error(f"{name} is required..")
35
+ retries_count -= 1
36
+ return prompt(
37
+ name, message, required, startswith, fail_on_error=retries_count <= 0
38
+ )
39
+
40
+ if startswith is not None and not val.startswith(startswith):
41
+ error_mesagge = f"The value has to start with {startswith}"
42
+ if fail_on_error:
43
+ raise InvalidFieldException(error_mesagge)
44
+ logger.error(error_mesagge)
45
+ retries_count -= 1
46
+ return prompt(
47
+ name, message, required, startswith, fail_on_error=retries_count <= 0
48
+ )
49
+
50
+ return val
@@ -0,0 +1,86 @@
1
+ """
2
+ Configuration loader module
3
+ """
4
+
5
+ import os
6
+ from pathlib import Path
7
+ from typing import TypedDict
8
+ import json
9
+ import yaml
10
+ from zscams.agent.src.support.yaml import YamlIndentedListsDumper, resolve_placeholders
11
+
12
+
13
+ config = {}
14
+
15
+ ROOT_PATH = Path(__file__).parent.parent.parent
16
+
17
+ CONFIG_PATH = os.path.join(ROOT_PATH.absolute(), "config.yaml")
18
+
19
+
20
+ class RemoteConfig(TypedDict):
21
+ """Type definition for remote configuration."""
22
+
23
+ host: str
24
+ port: str
25
+ verify_cert: bool
26
+ client_key: str
27
+ ca_cert: str
28
+ ca_chain: str
29
+
30
+ def reinitialize(**kwargs):
31
+ import zscams
32
+ BASE_DIR = Path(zscams.__file__).resolve().parent
33
+ template_path = f"{BASE_DIR}/agent/configuration/config.j2"
34
+ with open(template_path) as f:
35
+ template = yaml.safe_load(f)
36
+ resolve_placeholders(template, kwargs)
37
+ override_config(template)
38
+
39
+ def save_config(data):
40
+ """Save the YAML config back to disk."""
41
+ with open(CONFIG_PATH, "w", encoding="utf-8") as f:
42
+ yaml.safe_dump(data, f, default_flow_style=False)
43
+
44
+
45
+ def load_config():
46
+ """
47
+ Load and parse the YAML configuration file.
48
+
49
+ Returns:
50
+ dict: Configuration dictionary containing remote settings and forwards.
51
+ """
52
+ with open(CONFIG_PATH, "r", encoding="utf-8") as f:
53
+ config = yaml.safe_load(f)
54
+ return config
55
+
56
+
57
+ def get_config():
58
+ """
59
+ Get the loaded configuration.
60
+
61
+ Returns:
62
+ dict: Configuration dictionary containing remote settings and forwards.
63
+ """
64
+
65
+ if not config:
66
+ return load_config()
67
+
68
+ return config
69
+
70
+
71
+ def override_config(new_config: dict):
72
+ """
73
+ Override the existing configuration with a new one.
74
+ Args:
75
+ new_config (dict): New configuration dictionary to override the existing one.
76
+ """
77
+ config = new_config
78
+
79
+ with open(CONFIG_PATH, "w", encoding="utf-8") as file:
80
+ yaml.dump(
81
+ config,
82
+ file,
83
+ Dumper=YamlIndentedListsDumper,
84
+ default_flow_style=False,
85
+ explicit_start=True,
86
+ )
@@ -0,0 +1,63 @@
1
+ """
2
+ Path utilities for TLS Tunnel Client
3
+ """
4
+
5
+ import os
6
+ from pathlib import Path
7
+ from typing import Optional
8
+ from zscams.agent.src.support.logger import get_logger
9
+
10
+ logger = get_logger("FileSystem")
11
+
12
+ def resolve_path(path: Optional[str], base_dir: Optional[str] = None) -> Optional[str]:
13
+ """
14
+ Resolve a file path relative to a base directory.
15
+
16
+ Args:
17
+ path (str): Path to resolve (absolute or relative)
18
+ base_dir (str, optional): Directory to resolve relative paths from.
19
+ Defaults to None (current working directory)
20
+
21
+ Returns:
22
+ str: Absolute path
23
+ """
24
+ if path is None:
25
+ return None
26
+ if not os.path.isabs(path) and base_dir:
27
+ return os.path.join(base_dir, path)
28
+ return path
29
+
30
+
31
+ def ensure_dir(path: str):
32
+ """Ensure a directory exists."""
33
+
34
+ os.makedirs(path, exist_ok=True)
35
+
36
+
37
+ def is_file_exists(path, logger):
38
+ """Check if file exists."""
39
+ if os.path.exists(path):
40
+ logger.debug(f"File exists: {path}")
41
+ return True
42
+
43
+ logger.error(f"File not found: {path}")
44
+ return False
45
+
46
+
47
+ def append_to_file(path: str | Path, content: str):
48
+ try:
49
+ if isinstance(path, str):
50
+ path = Path(path)
51
+
52
+ if not path.parent.exists():
53
+ os.mkdir(path.parent,0o700)
54
+
55
+ with open(path, "a", encoding="utf-8") as file:
56
+ file.write(content)
57
+ except Exception as exception:
58
+ logger.error(exception)
59
+ raise exception
60
+
61
+ def write_to_file(path: str | Path, content: str):
62
+ with open(path, "w", encoding="utf-8") as f:
63
+ f.write(content)