osbot-utils 1.16.0__py3-none-any.whl → 1.20.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osbot_utils/base_classes/Kwargs_To_Self.py +3 -54
- osbot_utils/base_classes/Type_Safe.py +6 -0
- osbot_utils/context_managers/disable_root_loggers.py +30 -0
- osbot_utils/helpers/CFormat.py +147 -0
- osbot_utils/helpers/CPrint.py +5 -50
- osbot_utils/helpers/Print_Table.py +1 -1
- osbot_utils/helpers/cache_requests/Cache__Requests__Actions.py +23 -0
- osbot_utils/helpers/cache_requests/Cache__Requests__Config.py +32 -0
- osbot_utils/helpers/cache_requests/Cache__Requests__Data.py +105 -0
- osbot_utils/helpers/cache_requests/Cache__Requests__Invoke.py +55 -0
- osbot_utils/helpers/cache_requests/Cache__Requests__Row.py +64 -0
- osbot_utils/helpers/cache_requests/Cache__Requests__Table.py +16 -0
- osbot_utils/helpers/cache_requests/__init__.py +0 -0
- osbot_utils/helpers/cache_requests/flows/flow__Cache__Requests.py +11 -0
- osbot_utils/helpers/flows/Flow.py +145 -0
- osbot_utils/helpers/flows/Task.py +18 -0
- osbot_utils/helpers/flows/__init__.py +0 -0
- osbot_utils/helpers/sqlite/{domains/schemas → cache}/Schema__Table__Requests.py +6 -4
- osbot_utils/helpers/sqlite/cache/Sqlite__Cache__Requests.py +104 -0
- osbot_utils/helpers/sqlite/{domains → cache}/Sqlite__Cache__Requests__Patch.py +10 -8
- osbot_utils/helpers/sqlite/cache/Sqlite__Cache__Requests__Sqlite.py +18 -0
- osbot_utils/helpers/sqlite/cache/Sqlite__Cache__Requests__Table.py +48 -0
- osbot_utils/helpers/sqlite/{domains → cache}/Sqlite__DB__Requests.py +8 -7
- osbot_utils/helpers/sqlite/cache/TestCase__Sqlite__Cache__Requests.py +35 -0
- osbot_utils/helpers/sqlite/cache/__init__.py +0 -0
- osbot_utils/helpers/sqlite/domains/Sqlite__DB__Local.py +6 -2
- osbot_utils/helpers/{SCP.py → ssh/SCP.py} +23 -20
- osbot_utils/helpers/ssh/SSH.py +30 -0
- osbot_utils/helpers/ssh/SSH__Cache__Requests.py +66 -0
- osbot_utils/helpers/ssh/SSH__Execute.py +158 -0
- osbot_utils/helpers/ssh/SSH__Health_Check.py +49 -0
- osbot_utils/helpers/ssh/SSH__Linux.py +106 -0
- osbot_utils/helpers/ssh/SSH__Python.py +48 -0
- osbot_utils/helpers/ssh/TestCase__SSH.py +50 -0
- osbot_utils/helpers/ssh/__init__.py +0 -0
- osbot_utils/helpers/trace/Trace_Call__Print_Lines.py +1 -1
- osbot_utils/testing/Logging.py +15 -5
- osbot_utils/testing/Pytest.py +18 -0
- osbot_utils/utils/Env.py +29 -9
- osbot_utils/utils/Json.py +2 -9
- osbot_utils/utils/Misc.py +17 -16
- osbot_utils/utils/Objects.py +17 -7
- osbot_utils/utils/Python_Logger.py +54 -38
- osbot_utils/utils/Str.py +20 -3
- osbot_utils/utils/Toml.py +33 -0
- osbot_utils/version +1 -1
- {osbot_utils-1.16.0.dist-info → osbot_utils-1.20.0.dist-info}/METADATA +2 -2
- {osbot_utils-1.16.0.dist-info → osbot_utils-1.20.0.dist-info}/RECORD +50 -23
- osbot_utils/helpers/SSH.py +0 -151
- osbot_utils/helpers/sqlite/domains/Sqlite__Cache__Requests.py +0 -214
- {osbot_utils-1.16.0.dist-info → osbot_utils-1.20.0.dist-info}/LICENSE +0 -0
- {osbot_utils-1.16.0.dist-info → osbot_utils-1.20.0.dist-info}/WHEEL +0 -0
osbot_utils/helpers/SSH.py
DELETED
@@ -1,151 +0,0 @@
|
|
1
|
-
from decimal import Decimal
|
2
|
-
|
3
|
-
from osbot_utils.base_classes.Kwargs_To_Self import Kwargs_To_Self
|
4
|
-
from osbot_utils.context_managers.capture_duration import capture_duration
|
5
|
-
from osbot_utils.decorators.lists.group_by import group_by
|
6
|
-
from osbot_utils.decorators.lists.index_by import index_by
|
7
|
-
from osbot_utils.utils.Dev import pprint
|
8
|
-
from osbot_utils.utils.Misc import timestamp_utc_now
|
9
|
-
from osbot_utils.utils.Process import start_process
|
10
|
-
from osbot_utils.utils.Status import status_error
|
11
|
-
|
12
|
-
class SSH(Kwargs_To_Self):
|
13
|
-
ssh_host : str
|
14
|
-
ssh_key_file : str
|
15
|
-
ssh_key_user : str
|
16
|
-
strict_host_check : bool = False
|
17
|
-
|
18
|
-
def exec(self, command):
|
19
|
-
return self.execute_command__return_stdout(command)
|
20
|
-
|
21
|
-
def execute_command(self, command):
|
22
|
-
if self.ssh_host and self.ssh_key_file and self.ssh_key_user and command: # todo: add check to see if ssh executable exists (this check can be cached)
|
23
|
-
ssh_args = self.execute_command_args(command)
|
24
|
-
with capture_duration() as duration:
|
25
|
-
result = start_process("ssh", ssh_args) # execute command using subprocess.run(...)
|
26
|
-
result['duration'] = duration.data()
|
27
|
-
return result
|
28
|
-
return status_error(error='in execute_command not all required vars were setup')
|
29
|
-
|
30
|
-
def execute_ssh_args(self, command=None):
|
31
|
-
ssh_args = []
|
32
|
-
if self.strict_host_check is False:
|
33
|
-
ssh_args += ['-o',
|
34
|
-
'StrictHostKeyChecking=no'] # todo: add support for updating the local hosts file so that we dont need to do this that often
|
35
|
-
if self.ssh_key_file:
|
36
|
-
ssh_args += ['-i', self.ssh_key_file]
|
37
|
-
return ssh_args
|
38
|
-
|
39
|
-
def execute_command_args(self, command=None):
|
40
|
-
ssh_args = self.execute_ssh_args()
|
41
|
-
if self.ssh_host:
|
42
|
-
ssh_args += [self.execute_command_target_host()]
|
43
|
-
if command:
|
44
|
-
ssh_args += [command]
|
45
|
-
return ssh_args
|
46
|
-
|
47
|
-
def execute_command_target_host(self):
|
48
|
-
if self.ssh_key_user:
|
49
|
-
return f'{self.ssh_key_user}@{self.ssh_host}'
|
50
|
-
else:
|
51
|
-
return f'{self.ssh_host}'
|
52
|
-
|
53
|
-
def execute_command__return_stdout(self, command):
|
54
|
-
return self.execute_command(command).get('stdout').strip()
|
55
|
-
|
56
|
-
def execute_command__return_stderr(self, command):
|
57
|
-
return self.execute_command(command).get('stderr').strip()
|
58
|
-
|
59
|
-
@index_by
|
60
|
-
@group_by
|
61
|
-
def execute_command__return_dict(self, command):
|
62
|
-
stdout = self.execute_command(command).get('stdout').strip()
|
63
|
-
return self.parse_stdout_to_dict(stdout)
|
64
|
-
|
65
|
-
# helpers for common linux methods
|
66
|
-
|
67
|
-
def cat(self, path=''):
|
68
|
-
command = f'cat {path}'
|
69
|
-
return self.execute_command__return_stdout(command)
|
70
|
-
|
71
|
-
@index_by
|
72
|
-
def disk_space(self):
|
73
|
-
command = "df -h"
|
74
|
-
stdout = self.execute_command__return_stdout(command)
|
75
|
-
stdout_disk_space = stdout.replace('Mounted on', 'Mounted_on') # todo, find a better way to do this
|
76
|
-
disk_space = self.parse_stdout_to_dict(stdout_disk_space)
|
77
|
-
return disk_space
|
78
|
-
|
79
|
-
def find(self, path=''):
|
80
|
-
command = f'find {path}'
|
81
|
-
return self.execute_command__return_stdout(command)
|
82
|
-
|
83
|
-
def ls(self, path=''):
|
84
|
-
command = f'ls {path}'
|
85
|
-
ls_raw = self.execute_command__return_stdout(command)
|
86
|
-
return ls_raw.splitlines()
|
87
|
-
|
88
|
-
def mkdir(self, folder):
|
89
|
-
command = f'mkdir -p {folder}'
|
90
|
-
return self.execute_command__return_stdout(command)
|
91
|
-
|
92
|
-
def memory_usage(self):
|
93
|
-
command = "free -h"
|
94
|
-
memory_usage_raw = self.execute_command__return_stdout(command) # todo: add fix for data parsing issue
|
95
|
-
return memory_usage_raw.splitlines()
|
96
|
-
|
97
|
-
def rm(self, path=''):
|
98
|
-
command = f'rm {path}'
|
99
|
-
return self.execute_command__return_stderr(command)
|
100
|
-
|
101
|
-
def running_processes(self,**kwargs):
|
102
|
-
command = "ps aux"
|
103
|
-
return self.execute_command__return_dict(command, **kwargs)
|
104
|
-
|
105
|
-
def system_uptime(self):
|
106
|
-
command = "uptime"
|
107
|
-
uptime_raw = self.execute_command__return_stdout(command)
|
108
|
-
return uptime_raw.strip()
|
109
|
-
|
110
|
-
def uname(self):
|
111
|
-
return self.execute_command__return_stdout('uname')
|
112
|
-
|
113
|
-
def parse_stdout_to_dict(self, stdout):
|
114
|
-
lines = stdout.splitlines()
|
115
|
-
headers = lines[0].split()
|
116
|
-
result = []
|
117
|
-
|
118
|
-
for line in lines[1:]: # Split each line into parts based on whitespace
|
119
|
-
parts = line.split() # Combine the parts with headers to create a dictionary
|
120
|
-
entry = {headers[i]: parts[i] for i in range(len(headers))}
|
121
|
-
result.append(entry)
|
122
|
-
|
123
|
-
return result
|
124
|
-
|
125
|
-
def which(self, target):
|
126
|
-
command = f'which {target}' # todo: security-vuln: add protection against code injection
|
127
|
-
return self.execute_command__return_stdout(command)
|
128
|
-
|
129
|
-
def whoami(self):
|
130
|
-
command = f'whoami' # todo: security-vuln: add protection against code injection
|
131
|
-
return self.execute_command__return_stdout(command)
|
132
|
-
|
133
|
-
# print helpers
|
134
|
-
def print_ls(self, path=''):
|
135
|
-
pprint(self.ls(path))
|
136
|
-
return self
|
137
|
-
|
138
|
-
def print_exec(self, command=''):
|
139
|
-
pprint(self.exec(command))
|
140
|
-
return self
|
141
|
-
# def ifconfig(self):
|
142
|
-
# command = "export PATH=$PATH:/sbin && ifconfig" # todo add example with PATH modification
|
143
|
-
# return self.execute_command__return_stdout(command)
|
144
|
-
|
145
|
-
# def ifconfig(self): # todo add command to execute in separate bash (see when it is needed)
|
146
|
-
# command = "bash -l -c 'ifconfig'"
|
147
|
-
# return self.execute_command__return_stdout(command)
|
148
|
-
# if port_forward: # todo: add support for port forward (this will need async execution)
|
149
|
-
# local_port = port_forward.get('local_port' )
|
150
|
-
# remote_ip = port_forward.get('remote_ip' )
|
151
|
-
# remote_port = port_forward.get('remote_port')
|
@@ -1,214 +0,0 @@
|
|
1
|
-
import types
|
2
|
-
from osbot_utils.base_classes.Kwargs_To_Self import Kwargs_To_Self
|
3
|
-
from osbot_utils.helpers.sqlite.domains.Sqlite__DB__Requests import Sqlite__DB__Requests
|
4
|
-
from osbot_utils.utils.Json import json_dumps, json_loads
|
5
|
-
from osbot_utils.utils.Misc import str_sha256, timestamp_utc_now
|
6
|
-
from osbot_utils.utils.Objects import pickle_save_to_bytes, pickle_load_from_bytes
|
7
|
-
|
8
|
-
|
9
|
-
class Sqlite__Cache__Requests(Kwargs_To_Self):
|
10
|
-
add_timestamp : bool = True
|
11
|
-
enabled : bool = True
|
12
|
-
update_mode : bool = False
|
13
|
-
cache_only_mode : bool = False
|
14
|
-
sqlite_requests : Sqlite__DB__Requests = None
|
15
|
-
pickle_response : bool = False
|
16
|
-
capture_exceptions: bool = False # once this is working, it might be more useful to have this set to true
|
17
|
-
exception_classes : list
|
18
|
-
on_invoke_target : types.FunctionType
|
19
|
-
|
20
|
-
def __init__(self, db_path=None, db_name=None, table_name=None):
|
21
|
-
self.sqlite_requests = Sqlite__DB__Requests(db_path=db_path, db_name=db_name, table_name=table_name)
|
22
|
-
super().__init__()
|
23
|
-
|
24
|
-
def cache_add(self, request_data, response_data):
|
25
|
-
new_row_obj = self.create_new_cache_obj(request_data, response_data)
|
26
|
-
return self.cache_table().row_add_and_commit(new_row_obj)
|
27
|
-
|
28
|
-
def cache_delete(self, request_data):
|
29
|
-
request_data = json_dumps(request_data)
|
30
|
-
request_data_sha256 = str_sha256(request_data)
|
31
|
-
return self.cache_table().rows_delete_where(request_hash=request_data_sha256)
|
32
|
-
|
33
|
-
def cache_entries(self):
|
34
|
-
return self.cache_table().rows()
|
35
|
-
|
36
|
-
def cache_entry(self, request_data):
|
37
|
-
request_data = json_dumps(request_data)
|
38
|
-
request_data_sha256 = str_sha256(request_data)
|
39
|
-
data = self.cache_table().select_rows_where(request_hash=request_data_sha256)
|
40
|
-
if len(data) > 0: # todo: add logic to handle (or log), where there are multiple entries with the same hash
|
41
|
-
return data[0]
|
42
|
-
return {}
|
43
|
-
|
44
|
-
def cache_entry_comments(self, model_id, body):
|
45
|
-
cache_entry = self.cache_entry_for_request_params(model_id=model_id, body=body)
|
46
|
-
return cache_entry.get('comments')
|
47
|
-
|
48
|
-
def cache_entry_comments_update(self, model_id, body, new_comments):
|
49
|
-
cache_entry = self.cache_entry_for_request_params(model_id=model_id, body=body)
|
50
|
-
request_hash = cache_entry.get('request_hash')
|
51
|
-
update_fields = dict(comments=new_comments)
|
52
|
-
query_conditions = dict(request_hash=request_hash)
|
53
|
-
result = self.cache_table().row_update(update_fields, query_conditions)
|
54
|
-
return result
|
55
|
-
|
56
|
-
def cache_entry_for_request_params(self, *args, **target_kwargs):
|
57
|
-
request_data = self.cache_request_data(*args, **target_kwargs)
|
58
|
-
return self.cache_entry(request_data)
|
59
|
-
|
60
|
-
def create_new_cache_data(self, request_data, response_data):
|
61
|
-
request_data_json = json_dumps(request_data)
|
62
|
-
request_data_hash = str_sha256(request_data_json)
|
63
|
-
if self.add_timestamp:
|
64
|
-
timestamp = timestamp_utc_now()
|
65
|
-
else:
|
66
|
-
timestamp = 0
|
67
|
-
cache_cata = dict(request_data = request_data_json ,
|
68
|
-
request_hash = request_data_hash ,
|
69
|
-
response_bytes = b'' ,
|
70
|
-
response_data = '' ,
|
71
|
-
response_hash = '' ,
|
72
|
-
timestamp = timestamp )
|
73
|
-
if self.pickle_response:
|
74
|
-
cache_cata['response_bytes'] = response_data
|
75
|
-
else:
|
76
|
-
response_data_json = json_dumps(response_data)
|
77
|
-
response_data_hash = str_sha256(response_data_json)
|
78
|
-
cache_cata['response_data'] = response_data_json
|
79
|
-
cache_cata['response_hash'] = response_data_hash
|
80
|
-
return cache_cata
|
81
|
-
|
82
|
-
def create_new_cache_obj(self, request_data, response_data):
|
83
|
-
new_row_data = self.create_new_cache_data(request_data, response_data)
|
84
|
-
new_row_obj = self.cache_table().new_row_obj(new_row_data)
|
85
|
-
return new_row_obj
|
86
|
-
|
87
|
-
def cache_table(self):
|
88
|
-
return self.sqlite_requests.table_requests()
|
89
|
-
|
90
|
-
def cache_table__clear(self):
|
91
|
-
return self.cache_table().clear()
|
92
|
-
|
93
|
-
def cache_request_data(self, *args, **target_kwargs):
|
94
|
-
return {'args': list(args), 'kwargs': target_kwargs} # convert the args tuple to a list since that is what it will be once it is serialised
|
95
|
-
|
96
|
-
|
97
|
-
def delete_where_request_data(self, request_data): # todo: check if it is ok to use the request_data as a query target, or if we should use the request_hash variable
|
98
|
-
if type(request_data) is dict: # if we get an request_data obj
|
99
|
-
request_data = json_dumps(request_data) # convert it to the json dump
|
100
|
-
if type(request_data) is str: # make sure we have a string
|
101
|
-
if len(self.rows_where__request_data(request_data)) > 0: # make sure there is at least one entry to delete
|
102
|
-
self.cache_table().rows_delete_where(request_data=request_data) # delete it
|
103
|
-
return len(self.rows_where__request_data(request_data)) == 0 # confirm it was deleted
|
104
|
-
return False # if anything was not right, return False
|
105
|
-
|
106
|
-
def disable(self):
|
107
|
-
self.enabled = False
|
108
|
-
return self
|
109
|
-
|
110
|
-
def enable(self):
|
111
|
-
self.enabled = True
|
112
|
-
return self
|
113
|
-
|
114
|
-
def invoke(self, target, target_args, target_kwargs):
|
115
|
-
return self.invoke_with_cache(target, target_args, target_kwargs)
|
116
|
-
|
117
|
-
def invoke_target(self, target, target_args, target_kwargs):
|
118
|
-
if self.on_invoke_target:
|
119
|
-
raw_response = self.on_invoke_target(target, target_args, target_kwargs)
|
120
|
-
else:
|
121
|
-
raw_response = target(*target_args, **target_kwargs)
|
122
|
-
return self.transform_raw_response(raw_response)
|
123
|
-
|
124
|
-
def invoke_with_cache(self, target, target_args, target_kwargs, request_data=None):
|
125
|
-
if self.enabled is False:
|
126
|
-
if self.cache_only_mode:
|
127
|
-
return None
|
128
|
-
return self.invoke_target(target, target_args, target_kwargs)
|
129
|
-
if request_data is None:
|
130
|
-
request_data = self.cache_request_data(*target_args, **target_kwargs)
|
131
|
-
cache_entry = self.cache_entry(request_data)
|
132
|
-
if cache_entry:
|
133
|
-
if self.update_mode is True:
|
134
|
-
self.cache_delete(request_data)
|
135
|
-
else:
|
136
|
-
return self.response_data_deserialize(cache_entry)
|
137
|
-
if self.cache_only_mode is False:
|
138
|
-
return self.invoke_target__and_add_to_cache(request_data, target, target_args, target_kwargs)
|
139
|
-
|
140
|
-
|
141
|
-
def invoke_target__and_add_to_cache(self,request_data, target, target_args, target_kwargs):
|
142
|
-
try:
|
143
|
-
response_data_obj = self.invoke_target(target, target_args, target_kwargs)
|
144
|
-
response_data = self.response_data_serialize(response_data_obj)
|
145
|
-
self.cache_add(request_data=request_data, response_data=response_data)
|
146
|
-
return response_data_obj
|
147
|
-
except Exception as exception:
|
148
|
-
if self.capture_exceptions:
|
149
|
-
response_data = self.response_data_serialize(exception)
|
150
|
-
self.cache_add(request_data=request_data, response_data=response_data)
|
151
|
-
raise exception
|
152
|
-
|
153
|
-
def only_from_cache(self, value=True):
|
154
|
-
self.cache_only_mode = value
|
155
|
-
return self
|
156
|
-
|
157
|
-
def response_data_deserialize(self, cache_entry):
|
158
|
-
if self.pickle_response:
|
159
|
-
response_bytes = cache_entry.get('response_bytes')
|
160
|
-
response_data_obj = pickle_load_from_bytes(response_bytes)
|
161
|
-
else:
|
162
|
-
response_data = cache_entry.get('response_data')
|
163
|
-
response_data_obj = json_loads(response_data)
|
164
|
-
if self.capture_exceptions:
|
165
|
-
if (type(response_data_obj) is Exception or # raise if it is an exception
|
166
|
-
type(response_data_obj) in self.exception_classes): # or if one of the types that have been set as being exception classes
|
167
|
-
raise response_data_obj
|
168
|
-
# else:
|
169
|
-
# pprint(type(response_data_obj))
|
170
|
-
return response_data_obj
|
171
|
-
|
172
|
-
def response_data_serialize(self, response_data):
|
173
|
-
if self.pickle_response:
|
174
|
-
return pickle_save_to_bytes(response_data)
|
175
|
-
return response_data
|
176
|
-
|
177
|
-
def response_data_for__request_hash(self, request_hash):
|
178
|
-
rows = self.rows_where__request_hash(request_hash)
|
179
|
-
if len(rows) > 0:
|
180
|
-
cache_entry = rows[0]
|
181
|
-
response_data_obj = self.response_data_deserialize(cache_entry)
|
182
|
-
return response_data_obj
|
183
|
-
return {}
|
184
|
-
|
185
|
-
def requests_data__all(self):
|
186
|
-
requests_data = []
|
187
|
-
for row in self.cache_table().rows():
|
188
|
-
req_id = row.get('id')
|
189
|
-
request_data = row.get('request_data')
|
190
|
-
request_hash = row.get('request_hash')
|
191
|
-
request_comments = row.get('comments')
|
192
|
-
request_data_obj = json_loads(request_data)
|
193
|
-
request_data_obj['_id' ] = req_id
|
194
|
-
request_data_obj['_hash' ] = request_hash
|
195
|
-
request_data_obj['_comments'] = request_comments
|
196
|
-
|
197
|
-
requests_data.append(request_data_obj)
|
198
|
-
return requests_data
|
199
|
-
|
200
|
-
def rows_where(self, **kwargs):
|
201
|
-
return self.cache_table().select_rows_where(**kwargs)
|
202
|
-
|
203
|
-
def rows_where__request_data(self, request_data):
|
204
|
-
return self.rows_where(request_data=request_data)
|
205
|
-
|
206
|
-
def rows_where__request_hash(self, request_hash):
|
207
|
-
return self.rows_where(request_hash=request_hash)
|
208
|
-
|
209
|
-
def transform_raw_response(self, raw_response):
|
210
|
-
return raw_response
|
211
|
-
|
212
|
-
def update(self, value=True):
|
213
|
-
self.update_mode = value
|
214
|
-
return self
|
File without changes
|
File without changes
|