atomicshop 3.3.28__py3-none-any.whl → 3.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/a_mains/get_local_tcp_ports.py +85 -0
- atomicshop/a_mains/install_ca_certificate.py +172 -0
- atomicshop/a_mains/process_from_port.py +119 -0
- atomicshop/a_mains/set_default_dns_gateway.py +90 -0
- atomicshop/basics/strings.py +1 -1
- atomicshop/certificates.py +2 -2
- atomicshop/dns.py +26 -28
- atomicshop/etws/traces/trace_tcp.py +1 -2
- atomicshop/mitm/centered_settings.py +133 -0
- atomicshop/mitm/config_static.py +18 -43
- atomicshop/mitm/connection_thread_worker.py +376 -162
- atomicshop/mitm/engines/__parent/recorder___parent.py +1 -1
- atomicshop/mitm/engines/__parent/requester___parent.py +1 -1
- atomicshop/mitm/engines/__parent/responder___parent.py +15 -2
- atomicshop/mitm/engines/create_module_template.py +1 -2
- atomicshop/mitm/import_config.py +79 -88
- atomicshop/mitm/initialize_engines.py +1 -2
- atomicshop/mitm/message.py +5 -4
- atomicshop/mitm/mitm_main.py +222 -121
- atomicshop/mitm/recs_files.py +61 -5
- atomicshop/mitm/ssh_tester.py +82 -0
- atomicshop/networks.py +108 -93
- atomicshop/package_mains_processor.py +84 -0
- atomicshop/permissions/ubuntu_permissions.py +47 -0
- atomicshop/print_api.py +3 -5
- atomicshop/python_functions.py +23 -108
- atomicshop/speech_recognize.py +8 -0
- atomicshop/ssh_remote.py +115 -51
- atomicshop/web.py +20 -7
- atomicshop/web_apis/google_llm.py +22 -14
- atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
- atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +2 -1
- atomicshop/wrappers/dockerw/dockerw.py +2 -2
- atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +5 -5
- atomicshop/wrappers/githubw.py +175 -63
- atomicshop/wrappers/loggingw/handlers.py +1 -1
- atomicshop/wrappers/loggingw/loggingw.py +17 -1
- atomicshop/wrappers/netshw.py +124 -3
- atomicshop/wrappers/playwrightw/scenarios.py +1 -1
- atomicshop/wrappers/powershell_networking.py +80 -0
- atomicshop/wrappers/psutilw/psutil_networks.py +9 -0
- atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +3 -105
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +3 -57
- atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +12 -27
- atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +15 -9
- atomicshop/wrappers/socketw/certificator.py +19 -9
- atomicshop/wrappers/socketw/creator.py +30 -7
- atomicshop/wrappers/socketw/dns_server.py +6 -6
- atomicshop/wrappers/socketw/exception_wrapper.py +3 -3
- atomicshop/wrappers/socketw/process_getter.py +86 -0
- atomicshop/wrappers/socketw/receiver.py +29 -9
- atomicshop/wrappers/socketw/sender.py +10 -9
- atomicshop/wrappers/socketw/sni.py +23 -6
- atomicshop/wrappers/socketw/{base.py → socket_base.py} +33 -1
- atomicshop/wrappers/socketw/socket_client.py +6 -8
- atomicshop/wrappers/socketw/socket_wrapper.py +82 -21
- atomicshop/wrappers/socketw/ssl_base.py +6 -2
- atomicshop/wrappers/win_auditw.py +189 -0
- {atomicshop-3.3.28.dist-info → atomicshop-3.10.0.dist-info}/METADATA +25 -30
- {atomicshop-3.3.28.dist-info → atomicshop-3.10.0.dist-info}/RECORD +74 -88
- atomicshop/_basics_temp.py +0 -101
- atomicshop/a_installs/ubuntu/docker_rootless.py +0 -11
- atomicshop/a_installs/ubuntu/docker_sudo.py +0 -11
- atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
- atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
- atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
- atomicshop/addons/package_setup/Setup.cmd +0 -7
- atomicshop/archiver/__init__.py +0 -0
- atomicshop/archiver/_search_in_zip.py +0 -189
- atomicshop/archiver/search_in_archive.py +0 -284
- atomicshop/archiver/sevenz_app_w.py +0 -86
- atomicshop/archiver/sevenzs.py +0 -73
- atomicshop/archiver/shutils.py +0 -34
- atomicshop/archiver/zips.py +0 -353
- atomicshop/file_types.py +0 -24
- atomicshop/pbtkmultifile_argparse.py +0 -88
- atomicshop/script_as_string_processor.py +0 -42
- atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
- atomicshop/ssh_scripts/process_from_port.py +0 -27
- atomicshop/wrappers/_process_wrapper_curl.py +0 -27
- atomicshop/wrappers/_process_wrapper_tar.py +0 -21
- atomicshop/wrappers/dockerw/install_docker.py +0 -449
- atomicshop/wrappers/ffmpegw.py +0 -125
- atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
- atomicshop/wrappers/socketw/get_process.py +0 -123
- /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compile.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.dll +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.exp +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.lib +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +0 -0
- {atomicshop-3.3.28.dist-info → atomicshop-3.10.0.dist-info}/WHEEL +0 -0
- {atomicshop-3.3.28.dist-info → atomicshop-3.10.0.dist-info}/licenses/LICENSE.txt +0 -0
- {atomicshop-3.3.28.dist-info → atomicshop-3.10.0.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import sys
|
|
2
1
|
import subprocess
|
|
3
2
|
from pathlib import Path
|
|
4
3
|
|
|
4
|
+
from dkarchiver.arch_wrappers import zips
|
|
5
|
+
from dkinst.installers.helpers import docker_installer
|
|
6
|
+
|
|
5
7
|
from .... import filesystem
|
|
6
8
|
from ....permissions import ubuntu_permissions
|
|
7
|
-
from ....archiver import zips
|
|
8
9
|
from ....print_api import print_api
|
|
9
10
|
from ... import githubw, pipw, ubuntu_terminal
|
|
10
|
-
from ...dockerw import install_docker
|
|
11
11
|
from .. import config_install
|
|
12
12
|
|
|
13
13
|
|
|
@@ -87,7 +87,7 @@ def install_before_restart(
|
|
|
87
87
|
|
|
88
88
|
# Install docker. FACT installs the docker, but there can be a problem with permissions, so we need to add
|
|
89
89
|
# the user permissions to the docker group before restart.
|
|
90
|
-
if not
|
|
90
|
+
if not docker_installer.add_current_user_to_docker_group():
|
|
91
91
|
print_api("Docker is installed, but the current user was not added to the docker group.", color='red')
|
|
92
92
|
return 1
|
|
93
93
|
else:
|
|
@@ -108,7 +108,7 @@ def install_before_restart(
|
|
|
108
108
|
# use_docker_installer=True, rootless=True, add_current_user_to_docker_group_bool=False)
|
|
109
109
|
|
|
110
110
|
# Install docker in regular mode.
|
|
111
|
-
result: int =
|
|
111
|
+
result: int = docker_installer.install_docker_ubuntu(
|
|
112
112
|
use_docker_installer=True, rootless=False, add_current_user_to_docker_group_bool=True)
|
|
113
113
|
if result != 0:
|
|
114
114
|
print_api("Docker installation failed. Please install Docker manually.", color='red')
|
atomicshop/wrappers/githubw.py
CHANGED
|
@@ -163,11 +163,103 @@ class GitHubWrapper:
|
|
|
163
163
|
|
|
164
164
|
self.build_links_from_user_and_repo()
|
|
165
165
|
|
|
166
|
-
def check_github_domain(
|
|
166
|
+
def check_github_domain(
|
|
167
|
+
self,
|
|
168
|
+
domain: str
|
|
169
|
+
):
|
|
167
170
|
if self.domain not in domain:
|
|
168
171
|
print_api(
|
|
169
172
|
f'This is not [{self.domain}] domain.', color="red", error_type=True)
|
|
170
173
|
|
|
174
|
+
def download_file(
|
|
175
|
+
self,
|
|
176
|
+
file_name: str,
|
|
177
|
+
target_dir: str
|
|
178
|
+
) -> str:
|
|
179
|
+
"""
|
|
180
|
+
Download a single repo file to a local directory.
|
|
181
|
+
|
|
182
|
+
:param file_name: string, Full repo-relative path to the file. Example:
|
|
183
|
+
"eng.traineddata"
|
|
184
|
+
"script\\English.script"
|
|
185
|
+
:param target_dir: string, Local directory to save into.
|
|
186
|
+
|
|
187
|
+
:return: The local path to the downloaded file.
|
|
188
|
+
"""
|
|
189
|
+
|
|
190
|
+
# Normalize to GitHub path format
|
|
191
|
+
file_path = file_name.replace("\\", "/").strip("/")
|
|
192
|
+
|
|
193
|
+
headers = self._get_headers()
|
|
194
|
+
url = f"{self.contents_url}/{file_path}"
|
|
195
|
+
params = {"ref": self.branch}
|
|
196
|
+
|
|
197
|
+
resp = requests.get(url, headers=headers, params=params)
|
|
198
|
+
resp.raise_for_status()
|
|
199
|
+
item = resp.json()
|
|
200
|
+
|
|
201
|
+
# Expect a single file object
|
|
202
|
+
if isinstance(item, list) or item.get("type") != "file":
|
|
203
|
+
raise ValueError(f"'{file_name}' is not a file in branch '{self.branch}'.")
|
|
204
|
+
|
|
205
|
+
download_url = item.get("download_url")
|
|
206
|
+
if not download_url:
|
|
207
|
+
raise ValueError(f"Unable to obtain download URL for '{file_name}'.")
|
|
208
|
+
|
|
209
|
+
os.makedirs(target_dir, exist_ok=True)
|
|
210
|
+
local_name = item.get("name") or os.path.basename(file_path)
|
|
211
|
+
|
|
212
|
+
from .. import web # ensure available in your module structure
|
|
213
|
+
web.download(
|
|
214
|
+
file_url=download_url,
|
|
215
|
+
target_directory=target_dir,
|
|
216
|
+
file_name=local_name,
|
|
217
|
+
headers=headers,
|
|
218
|
+
)
|
|
219
|
+
return os.path.join(target_dir, local_name)
|
|
220
|
+
|
|
221
|
+
def download_directory(
|
|
222
|
+
self,
|
|
223
|
+
folder_name: str,
|
|
224
|
+
target_dir: str
|
|
225
|
+
) -> None:
|
|
226
|
+
"""
|
|
227
|
+
Recursively download a repo directory to a local directory.
|
|
228
|
+
|
|
229
|
+
:param folder_name: string, Repo-relative directory path to download (e.g., "tests/langs").
|
|
230
|
+
:param target_dir: string, Local directory to save the folder tree into.
|
|
231
|
+
"""
|
|
232
|
+
headers = self._get_headers()
|
|
233
|
+
root_path = folder_name.replace("\\", "/").strip("/")
|
|
234
|
+
|
|
235
|
+
def _walk_dir(rel_path: str, local_dir: str) -> None:
|
|
236
|
+
contents_url = f"{self.contents_url}/{rel_path}" if rel_path else self.contents_url
|
|
237
|
+
params = {"ref": self.branch}
|
|
238
|
+
|
|
239
|
+
response = requests.get(contents_url, headers=headers, params=params)
|
|
240
|
+
response.raise_for_status()
|
|
241
|
+
items = response.json()
|
|
242
|
+
|
|
243
|
+
# If a file path was passed accidentally, delegate to download_file
|
|
244
|
+
if isinstance(items, dict) and items.get("type") == "file":
|
|
245
|
+
self.download_file(rel_path, local_dir)
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
if not isinstance(items, list):
|
|
249
|
+
raise ValueError(f"Unexpected response shape when listing '{rel_path or '/'}'.")
|
|
250
|
+
|
|
251
|
+
os.makedirs(local_dir, exist_ok=True)
|
|
252
|
+
|
|
253
|
+
for item in items:
|
|
254
|
+
name = item["name"]
|
|
255
|
+
if item["type"] == "file":
|
|
256
|
+
self.download_file(f"{rel_path}/{name}" if rel_path else name, local_dir)
|
|
257
|
+
elif item["type"] == "dir":
|
|
258
|
+
_walk_dir(f"{rel_path}/{name}" if rel_path else name, os.path.join(local_dir, name))
|
|
259
|
+
# ignore symlinks/submodules if present
|
|
260
|
+
|
|
261
|
+
_walk_dir(root_path, target_dir)
|
|
262
|
+
|
|
171
263
|
def download_and_extract_branch(
|
|
172
264
|
self,
|
|
173
265
|
target_directory: str,
|
|
@@ -184,7 +276,7 @@ class GitHubWrapper:
|
|
|
184
276
|
:param archive_remove_first_directory: boolean, available only if 'path' was not specified during the initialization
|
|
185
277
|
Sets if archive extract function will extract the archive
|
|
186
278
|
without first directory in the archive. Check reference in the
|
|
187
|
-
'
|
|
279
|
+
'dkarchiver.arch_wrappers.zips.extract_archive_with_zipfile' function.
|
|
188
280
|
:param download_each_file: bool, available only if 'path' was specified during the initialization of the class.
|
|
189
281
|
Sets if each file will be downloaded separately.
|
|
190
282
|
|
|
@@ -195,48 +287,6 @@ class GitHubWrapper:
|
|
|
195
287
|
:return:
|
|
196
288
|
"""
|
|
197
289
|
|
|
198
|
-
def download_file(file_url: str, target_dir: str, file_name: str, current_headers: dict) -> None:
|
|
199
|
-
os.makedirs(target_dir, exist_ok=True)
|
|
200
|
-
|
|
201
|
-
web.download(
|
|
202
|
-
file_url=file_url,
|
|
203
|
-
target_directory=target_dir,
|
|
204
|
-
file_name=file_name,
|
|
205
|
-
headers=current_headers
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
def download_directory(folder_path: str, target_dir: str, current_headers: dict) -> None:
|
|
209
|
-
# Construct the API URL for the current folder.
|
|
210
|
-
contents_url = f"{self.contents_url}/{folder_path}"
|
|
211
|
-
params = {'ref': self.branch}
|
|
212
|
-
|
|
213
|
-
response = requests.get(contents_url, headers=current_headers, params=params)
|
|
214
|
-
response.raise_for_status()
|
|
215
|
-
|
|
216
|
-
# Get the list of items (files and subdirectories) in the folder.
|
|
217
|
-
items = response.json()
|
|
218
|
-
|
|
219
|
-
# Ensure the local target directory exists.
|
|
220
|
-
os.makedirs(target_dir, exist_ok=True)
|
|
221
|
-
|
|
222
|
-
# Process each item.
|
|
223
|
-
for item in items:
|
|
224
|
-
local_item_path = os.path.join(target_dir, item['name'])
|
|
225
|
-
if item['type'] == 'file':
|
|
226
|
-
download_file(
|
|
227
|
-
file_url=item['download_url'],
|
|
228
|
-
target_dir=target_dir,
|
|
229
|
-
file_name=item['name'],
|
|
230
|
-
current_headers=current_headers
|
|
231
|
-
)
|
|
232
|
-
elif item['type'] == 'dir':
|
|
233
|
-
# Recursively download subdirectories.
|
|
234
|
-
download_directory(
|
|
235
|
-
folder_path=f"{folder_path}/{item['name']}",
|
|
236
|
-
target_dir=local_item_path,
|
|
237
|
-
current_headers=current_headers
|
|
238
|
-
)
|
|
239
|
-
|
|
240
290
|
headers: dict = self._get_headers()
|
|
241
291
|
|
|
242
292
|
if not download_each_file:
|
|
@@ -275,7 +325,7 @@ class GitHubWrapper:
|
|
|
275
325
|
else:
|
|
276
326
|
current_target_directory = os.path.join(target_directory, self.path)
|
|
277
327
|
|
|
278
|
-
download_directory(self.path, current_target_directory
|
|
328
|
+
self.download_directory(self.path, current_target_directory)
|
|
279
329
|
|
|
280
330
|
def get_releases_json(
|
|
281
331
|
self,
|
|
@@ -436,7 +486,8 @@ class GitHubWrapper:
|
|
|
436
486
|
target_directory: str,
|
|
437
487
|
asset_pattern: str,
|
|
438
488
|
exclude_string: str = None,
|
|
439
|
-
**kwargs
|
|
489
|
+
**kwargs
|
|
490
|
+
) -> str:
|
|
440
491
|
"""
|
|
441
492
|
This function will download the latest release from the GitHub repository.
|
|
442
493
|
:param target_directory: str, the target directory to download the file.
|
|
@@ -444,7 +495,7 @@ class GitHubWrapper:
|
|
|
444
495
|
:param exclude_string: str, the string to exclude from the search. No wildcards can be used.
|
|
445
496
|
The 'excluded_string' will be filtered before the 'asset_pattern' entries.
|
|
446
497
|
:param kwargs: dict, the print arguments for the 'print_api' function.
|
|
447
|
-
:return:
|
|
498
|
+
:return: str, the downloaded file path.
|
|
448
499
|
"""
|
|
449
500
|
|
|
450
501
|
headers: dict = self._get_headers()
|
|
@@ -471,7 +522,7 @@ class GitHubWrapper:
|
|
|
471
522
|
:param exclude_string: str, the string to exclude from the search. No wildcards can be used.
|
|
472
523
|
:param archive_remove_first_directory: bool, sets if archive extract function will extract the archive
|
|
473
524
|
without first directory in the archive. Check reference in the
|
|
474
|
-
'
|
|
525
|
+
'dkarchiver.arch_wrappers.zips.extract_archive_with_zipfile' function.
|
|
475
526
|
:param kwargs: dict, the print arguments for the 'print_api' function.
|
|
476
527
|
:return:
|
|
477
528
|
"""
|
|
@@ -530,8 +581,77 @@ class GitHubWrapper:
|
|
|
530
581
|
commit_message = latest_commit.get("commit", {}).get("message", "")
|
|
531
582
|
return commit_message
|
|
532
583
|
|
|
584
|
+
def list_files(
|
|
585
|
+
self,
|
|
586
|
+
pattern: str = "*",
|
|
587
|
+
recursive: bool = True,
|
|
588
|
+
path: str | None = None,
|
|
589
|
+
) -> list[str]:
|
|
590
|
+
"""
|
|
591
|
+
List files in the repository (or in a specific subfolder).
|
|
592
|
+
|
|
593
|
+
:param pattern: Glob-style pattern (e.g., "*.ex*", "*test*.py"). Matching is done
|
|
594
|
+
against the file's base name (not the full path).
|
|
595
|
+
:param recursive: If True, include files in all subfolders (returns full repo-relative
|
|
596
|
+
paths). If False, list only the immediate files in the chosen folder.
|
|
597
|
+
:param path: Optional subfolder to list from (e.g., "tests/langs"). If omitted,
|
|
598
|
+
uses self.path if set, otherwise the repo root.
|
|
599
|
+
|
|
600
|
+
:return: A list of repo-relative file paths that match the pattern.
|
|
601
|
+
"""
|
|
602
|
+
headers = self._get_headers()
|
|
603
|
+
base_path = (path or self.path or "").strip("/")
|
|
604
|
+
|
|
605
|
+
if recursive:
|
|
606
|
+
# Use the Git Trees API to fetch all files in one call, then filter.
|
|
607
|
+
tree_url = f"{self.api_url}/git/trees/{self.branch}"
|
|
608
|
+
params = {"recursive": "1"}
|
|
609
|
+
resp = requests.get(tree_url, headers=headers, params=params)
|
|
610
|
+
resp.raise_for_status()
|
|
611
|
+
data = resp.json()
|
|
612
|
+
|
|
613
|
+
files = []
|
|
614
|
+
for entry in data.get("tree", []):
|
|
615
|
+
if entry.get("type") != "blob":
|
|
616
|
+
continue # only files
|
|
617
|
+
entry_path = entry.get("path", "")
|
|
618
|
+
# If a base_path was provided, keep only files under it
|
|
619
|
+
if base_path and not entry_path.startswith(base_path + "/") and entry_path != base_path:
|
|
620
|
+
continue
|
|
621
|
+
# Match pattern against the *file name* (basename)
|
|
622
|
+
if fnmatch.fnmatch(os.path.basename(entry_path), pattern):
|
|
623
|
+
files.append(entry_path)
|
|
624
|
+
return files
|
|
625
|
+
|
|
626
|
+
else:
|
|
627
|
+
# Non-recursive: use the Contents API to list a single directory.
|
|
628
|
+
# If base_path is empty, list the repo root.
|
|
629
|
+
if base_path:
|
|
630
|
+
contents_url = f"{self.contents_url}/{base_path}"
|
|
631
|
+
else:
|
|
632
|
+
contents_url = self.contents_url
|
|
633
|
+
|
|
634
|
+
params = {"ref": self.branch}
|
|
635
|
+
resp = requests.get(contents_url, headers=headers, params=params)
|
|
636
|
+
resp.raise_for_status()
|
|
637
|
+
items = resp.json()
|
|
533
638
|
|
|
534
|
-
|
|
639
|
+
# The Contents API returns a dict when the path points to a single file;
|
|
640
|
+
# normalize to a list to simplify handling.
|
|
641
|
+
if isinstance(items, dict):
|
|
642
|
+
items = [items]
|
|
643
|
+
|
|
644
|
+
files = []
|
|
645
|
+
for item in items:
|
|
646
|
+
if item.get("type") == "file":
|
|
647
|
+
name = item.get("name", "")
|
|
648
|
+
if fnmatch.fnmatch(name, pattern):
|
|
649
|
+
# item["path"] is the full repo-relative path we want to return
|
|
650
|
+
files.append(item.get("path", name))
|
|
651
|
+
return files
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
def _make_parser():
|
|
535
655
|
import argparse
|
|
536
656
|
|
|
537
657
|
parser = argparse.ArgumentParser(description='GitHub Wrapper')
|
|
@@ -562,7 +682,7 @@ def parse_github_args():
|
|
|
562
682
|
'-db', '--download_branch', action='store_true', default=False,
|
|
563
683
|
help='Sets if the branch will be downloaded. In conjunction with path, only the path will be downloaded.')
|
|
564
684
|
|
|
565
|
-
return parser
|
|
685
|
+
return parser
|
|
566
686
|
|
|
567
687
|
|
|
568
688
|
def github_wrapper_main(
|
|
@@ -571,8 +691,8 @@ def github_wrapper_main(
|
|
|
571
691
|
path: str = None,
|
|
572
692
|
target_directory: str = None,
|
|
573
693
|
pat: str = None,
|
|
574
|
-
get_latest_commit_json: bool = False,
|
|
575
694
|
get_latest_commit_message: bool = False,
|
|
695
|
+
get_latest_commit_json: bool = False,
|
|
576
696
|
download_branch: bool = False
|
|
577
697
|
):
|
|
578
698
|
"""
|
|
@@ -610,15 +730,7 @@ def github_wrapper_main(
|
|
|
610
730
|
|
|
611
731
|
|
|
612
732
|
def github_wrapper_main_with_args():
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
branch=args.branch,
|
|
618
|
-
path=args.path,
|
|
619
|
-
target_directory=args.target_directory,
|
|
620
|
-
pat=args.pat,
|
|
621
|
-
get_latest_commit_json=args.get_latest_commit_json,
|
|
622
|
-
get_latest_commit_message=args.get_latest_commit_message,
|
|
623
|
-
download_branch=args.download_branch
|
|
624
|
-
)
|
|
733
|
+
main_parser = _make_parser()
|
|
734
|
+
args = main_parser.parse_args()
|
|
735
|
+
|
|
736
|
+
return github_wrapper_main(**vars(args))
|
|
@@ -1037,6 +1037,8 @@ class ExceptionCsvLogger(CsvLogger):
|
|
|
1037
1037
|
self,
|
|
1038
1038
|
message: Union[str, Exception] = None,
|
|
1039
1039
|
custom_csv_string: str = None,
|
|
1040
|
+
custom_exception_attribute: str = None,
|
|
1041
|
+
custom_exception_attribute_placement: Literal['before', 'after'] = 'before',
|
|
1040
1042
|
stdout: bool = True
|
|
1041
1043
|
):
|
|
1042
1044
|
"""
|
|
@@ -1050,11 +1052,25 @@ class ExceptionCsvLogger(CsvLogger):
|
|
|
1050
1052
|
Meaning, that you need to provide the 'custom_header' during the initialization of the object.
|
|
1051
1053
|
Off course, you can use as many commas as you need in the 'custom_csv_string': "custom1,custom2,custom3".
|
|
1052
1054
|
This need to be mirrored in the 'custom_header' as well: "custom1,custom2,custom3".
|
|
1055
|
+
:param custom_exception_attribute: If the 'message' is an Exception, you can provide a custom attribute
|
|
1056
|
+
name to extract from the Exception object and add it to the exception message.
|
|
1057
|
+
For example, if the Exception has an attribute 'engine_name', you can provide it here
|
|
1058
|
+
and the exception message will be appended with the value of that attribute.
|
|
1059
|
+
:param custom_exception_attribute_placement: 'before' or 'after', where to place
|
|
1060
|
+
the custom exception attribute value in the exception message.
|
|
1053
1061
|
:param stdout: If set to True, the exception will be printed to the console.
|
|
1054
1062
|
"""
|
|
1055
1063
|
|
|
1056
1064
|
if message is None or isinstance(message, Exception):
|
|
1057
|
-
|
|
1065
|
+
custom_attribute: str | None = getattr(message, custom_exception_attribute, None)
|
|
1066
|
+
traceback_string: str = tracebacks.get_as_string()
|
|
1067
|
+
if custom_attribute:
|
|
1068
|
+
if custom_exception_attribute_placement == 'before':
|
|
1069
|
+
message = f"{custom_exception_attribute}: [{custom_attribute}] | {traceback_string}"
|
|
1070
|
+
else:
|
|
1071
|
+
message = f"{traceback_string} | {custom_exception_attribute}: [{custom_attribute}]"
|
|
1072
|
+
else:
|
|
1073
|
+
message = traceback_string
|
|
1058
1074
|
|
|
1059
1075
|
if custom_csv_string:
|
|
1060
1076
|
row_of_cols: list = [datetime.datetime.now(), custom_csv_string, message]
|
atomicshop/wrappers/netshw.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import subprocess
|
|
2
2
|
import re
|
|
3
|
-
from typing import List, Dict, Any
|
|
3
|
+
from typing import List, Dict, Any, Optional
|
|
4
4
|
|
|
5
5
|
# ── regex helpers ─────────────────────────────────────────────────────────
|
|
6
6
|
IP_PATTERN = r'(?:\d{1,3}\.){3}\d{1,3}'
|
|
@@ -10,7 +10,7 @@ RE_SUBNET = re.compile(rf'(?P<prefix>{IP_PATTERN}/\d+)\s+\(mask\s+(?P<ma
|
|
|
10
10
|
RE_IP = re.compile(IP_PATTERN)
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
def
|
|
13
|
+
def get_netsh_show_config() -> str:
|
|
14
14
|
"""Run `netsh interface ipv4 show config` and return the raw text."""
|
|
15
15
|
return subprocess.check_output(
|
|
16
16
|
["netsh", "interface", "ipv4", "show", "config"],
|
|
@@ -18,6 +18,7 @@ def _get_netsh_show_config() -> str:
|
|
|
18
18
|
)
|
|
19
19
|
|
|
20
20
|
|
|
21
|
+
# noinspection PyUnresolvedReferences
|
|
21
22
|
def get_netsh_ipv4() -> List[Dict[str, Any]]:
|
|
22
23
|
"""
|
|
23
24
|
Parse *all* data from `netsh interface ipv4 show config`.
|
|
@@ -27,7 +28,7 @@ def get_netsh_ipv4() -> List[Dict[str, Any]]:
|
|
|
27
28
|
default_gateways, gateway_metric, interface_metric,
|
|
28
29
|
dns_mode, dns_servers, wins_mode, wins_servers
|
|
29
30
|
"""
|
|
30
|
-
config_text =
|
|
31
|
+
config_text = get_netsh_show_config()
|
|
31
32
|
|
|
32
33
|
adapters: List[Dict[str, Any]] = []
|
|
33
34
|
adapter: Dict[str, Any] | None = None
|
|
@@ -148,3 +149,123 @@ def get_netsh_ipv4() -> List[Dict[str, Any]]:
|
|
|
148
149
|
# ad['wins_mode'] = 'mixed'
|
|
149
150
|
|
|
150
151
|
return adapters
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def run_netsh(*args: str) -> subprocess.CompletedProcess:
|
|
155
|
+
"""
|
|
156
|
+
Run a netsh command and return stdout as text.
|
|
157
|
+
|
|
158
|
+
Example:
|
|
159
|
+
run_netsh("interface", "ipv4", "show", "interfaces")
|
|
160
|
+
"""
|
|
161
|
+
cmd = ["netsh"] + list(args)
|
|
162
|
+
result = subprocess.run(
|
|
163
|
+
cmd,
|
|
164
|
+
capture_output=True,
|
|
165
|
+
text=True,
|
|
166
|
+
check=False
|
|
167
|
+
)
|
|
168
|
+
return result
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def enable_dhcp_static_coexistence(interface_name: str) -> subprocess.CompletedProcess:
|
|
172
|
+
"""
|
|
173
|
+
Enable DHCP + static IP coexistence on an interface.
|
|
174
|
+
|
|
175
|
+
Equivalent to:
|
|
176
|
+
netsh interface ipv4 set interface "Ethernet0" dhcpstaticipcoexistence=enabled
|
|
177
|
+
"""
|
|
178
|
+
return run_netsh(
|
|
179
|
+
"interface", "ipv4", "set", "interface",
|
|
180
|
+
interface_name,
|
|
181
|
+
"dhcpstaticipcoexistence=enabled"
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def disable_dhcp_static_coexistence(interface_name: str) -> subprocess.CompletedProcess:
|
|
186
|
+
"""
|
|
187
|
+
Disable DHCP + static IP coexistence on an interface (optional).
|
|
188
|
+
|
|
189
|
+
Equivalent to:
|
|
190
|
+
netsh interface ipv4 set interface "Ethernet0" dhcpstaticipcoexistence=disabled
|
|
191
|
+
"""
|
|
192
|
+
return run_netsh(
|
|
193
|
+
"interface", "ipv4", "set", "interface",
|
|
194
|
+
interface_name,
|
|
195
|
+
"dhcpstaticipcoexistence=disabled"
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def add_virtual_ip(
|
|
200
|
+
interface_name: str,
|
|
201
|
+
ip: str,
|
|
202
|
+
mask: str,
|
|
203
|
+
skip_as_source: bool = True
|
|
204
|
+
) -> subprocess.CompletedProcess:
|
|
205
|
+
"""
|
|
206
|
+
Add a static 'virtual' IP to a DHCP interface, keeping DHCP intact.
|
|
207
|
+
|
|
208
|
+
Equivalent to:
|
|
209
|
+
netsh interface ipv4 add address "Ethernet0" 192.168.1.201 255.255.255.0 skipassource=true
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
interface_name: Interface name, e.g. "Ethernet0"
|
|
213
|
+
ip: IP to add, e.g. "192.168.1.201"
|
|
214
|
+
mask: Subnet mask, e.g. "255.255.255.0"
|
|
215
|
+
skip_as_source: If True, adds 'skipassource=true' so Windows does
|
|
216
|
+
not prefer this IP as the outbound source address.
|
|
217
|
+
"""
|
|
218
|
+
args = [
|
|
219
|
+
"interface", "ipv4", "add", "address",
|
|
220
|
+
interface_name,
|
|
221
|
+
ip,
|
|
222
|
+
mask,
|
|
223
|
+
]
|
|
224
|
+
if skip_as_source:
|
|
225
|
+
args.append("skipassource=true")
|
|
226
|
+
|
|
227
|
+
return run_netsh(*args)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def remove_virtual_ip(
|
|
231
|
+
interface_name: str,
|
|
232
|
+
ip: str
|
|
233
|
+
) -> subprocess.CompletedProcess:
|
|
234
|
+
"""
|
|
235
|
+
Remove a previously added virtual IP from the interface.
|
|
236
|
+
|
|
237
|
+
Equivalent to:
|
|
238
|
+
netsh interface ipv4 delete address "Ethernet0" addr=192.168.1.201
|
|
239
|
+
"""
|
|
240
|
+
return run_netsh(
|
|
241
|
+
"interface", "ipv4", "delete", "address",
|
|
242
|
+
interface_name,
|
|
243
|
+
f"addr={ip}"
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def show_interface_config(
|
|
248
|
+
interface_name: Optional[str] = None
|
|
249
|
+
) -> subprocess.CompletedProcess:
|
|
250
|
+
"""
|
|
251
|
+
Show IPv4 configuration for all interfaces or a specific one.
|
|
252
|
+
|
|
253
|
+
Equivalent to:
|
|
254
|
+
netsh interface ipv4 show config
|
|
255
|
+
or:
|
|
256
|
+
netsh interface ipv4 show config "Ethernet0"
|
|
257
|
+
"""
|
|
258
|
+
if interface_name:
|
|
259
|
+
return run_netsh("interface", "ipv4", "show", "config", interface_name)
|
|
260
|
+
else:
|
|
261
|
+
return run_netsh("interface", "ipv4", "show", "config")
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def list_ipv4_interfaces() -> subprocess.CompletedProcess:
|
|
265
|
+
"""
|
|
266
|
+
List IPv4 interfaces.
|
|
267
|
+
|
|
268
|
+
Equivalent to:
|
|
269
|
+
netsh interface ipv4 show interfaces
|
|
270
|
+
"""
|
|
271
|
+
return run_netsh("interface", "ipv4", "show", "interfaces")
|
|
@@ -219,7 +219,7 @@ def _fetch_content(
|
|
|
219
219
|
with sync_playwright() as p:
|
|
220
220
|
browser = p.chromium.launch(headless=headless) # Set headless=True if you don't want to see the browser
|
|
221
221
|
|
|
222
|
-
user_agent: str = web.USER_AGENTS['
|
|
222
|
+
user_agent: str = web.USER_AGENTS['Chrome 142.0.0 Windows 10/11 x64']
|
|
223
223
|
|
|
224
224
|
if text_fetch_method == "playwright_copypaste":
|
|
225
225
|
context = browser.new_context(permissions=["clipboard-read", "clipboard-write"], user_agent=user_agent)
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import subprocess
|
|
3
|
+
from typing import List, Literal
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_interface_ips(
|
|
7
|
+
interface_name: str,
|
|
8
|
+
ip_type: Literal["virtual", "dynamic", "all"] = "virtual"
|
|
9
|
+
) -> List[str]:
|
|
10
|
+
"""
|
|
11
|
+
Return IPv4 addresses on an interface, filtered by 'mode'.
|
|
12
|
+
|
|
13
|
+
ip_type:
|
|
14
|
+
- "virtual": only static/virtual IPs (PrefixOrigin != 'Dhcp')
|
|
15
|
+
- "dynamic": only DHCP IPs (PrefixOrigin == 'Dhcp')
|
|
16
|
+
- "all": all IPv4 IPs on the interface
|
|
17
|
+
|
|
18
|
+
If the interface does not exist or has no IPv4 addresses, returns [].
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
ps_script = f"""
|
|
22
|
+
try {{
|
|
23
|
+
Get-NetIPAddress -InterfaceAlias "{interface_name}" -AddressFamily IPv4 |
|
|
24
|
+
Select-Object IPAddress,
|
|
25
|
+
@{{
|
|
26
|
+
Name = 'PrefixOrigin';
|
|
27
|
+
Expression = {{ [string]$_.PrefixOrigin }}
|
|
28
|
+
}} |
|
|
29
|
+
ConvertTo-Json -Depth 3
|
|
30
|
+
}} catch {{
|
|
31
|
+
# Return empty JSON array if nothing found / interface missing
|
|
32
|
+
'[]'
|
|
33
|
+
}}
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
result = subprocess.run(
|
|
38
|
+
["powershell", "-NoProfile", "-Command", ps_script],
|
|
39
|
+
capture_output=True,
|
|
40
|
+
text=True,
|
|
41
|
+
check=True
|
|
42
|
+
)
|
|
43
|
+
except subprocess.CalledProcessError as e:
|
|
44
|
+
# If anything unexpected happens, raise a clearer error
|
|
45
|
+
msg = (e.stderr or e.stdout or "").strip()
|
|
46
|
+
raise RuntimeError(f"PowerShell Get-NetIPAddress failed: {msg}") from e
|
|
47
|
+
|
|
48
|
+
stdout = result.stdout.strip()
|
|
49
|
+
if not stdout:
|
|
50
|
+
return []
|
|
51
|
+
|
|
52
|
+
# At this point stdout should be valid JSON (list or single object)
|
|
53
|
+
data = json.loads(stdout)
|
|
54
|
+
|
|
55
|
+
if isinstance(data, dict):
|
|
56
|
+
data = [data]
|
|
57
|
+
|
|
58
|
+
ips: List[str] = []
|
|
59
|
+
ip_type = ip_type.lower()
|
|
60
|
+
|
|
61
|
+
for entry in data:
|
|
62
|
+
ip = entry.get("IPAddress")
|
|
63
|
+
origin_raw = entry.get("PrefixOrigin", "")
|
|
64
|
+
origin = str(origin_raw).lower()
|
|
65
|
+
|
|
66
|
+
if not ip:
|
|
67
|
+
continue
|
|
68
|
+
|
|
69
|
+
if ip_type == "virtual":
|
|
70
|
+
if origin != "dhcp":
|
|
71
|
+
ips.append(ip)
|
|
72
|
+
elif ip_type == "dynamic":
|
|
73
|
+
if origin == "dhcp":
|
|
74
|
+
ips.append(ip)
|
|
75
|
+
elif ip_type == "all":
|
|
76
|
+
ips.append(ip)
|
|
77
|
+
else:
|
|
78
|
+
raise ValueError(f"Unsupported mode: {ip_type!r}")
|
|
79
|
+
|
|
80
|
+
return ips
|
|
@@ -74,3 +74,12 @@ def get_default_connection_name() -> Union[dict, None]:
|
|
|
74
74
|
return {interface: details}
|
|
75
75
|
|
|
76
76
|
return None
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def list_network_interfaces() -> list:
|
|
80
|
+
"""
|
|
81
|
+
Function to list all network interfaces.
|
|
82
|
+
:return: list of interface names.
|
|
83
|
+
"""
|
|
84
|
+
iface_names = list(psutil.net_if_addrs().keys())
|
|
85
|
+
return iface_names
|