atomicshop 2.11.47__py3-none-any.whl → 3.10.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atomicshop/__init__.py +1 -1
- atomicshop/{addons/mains → a_mains}/FACT/update_extract.py +3 -2
- atomicshop/a_mains/addons/process_list/compile.cmd +7 -0
- atomicshop/a_mains/addons/process_list/compiled/Win10x64/process_list.dll +0 -0
- atomicshop/a_mains/addons/process_list/compiled/Win10x64/process_list.exp +0 -0
- atomicshop/a_mains/addons/process_list/compiled/Win10x64/process_list.lib +0 -0
- atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +8 -1
- atomicshop/a_mains/dns_gateway_setting.py +11 -0
- atomicshop/a_mains/get_local_tcp_ports.py +85 -0
- atomicshop/a_mains/github_wrapper.py +11 -0
- atomicshop/a_mains/install_ca_certificate.py +172 -0
- atomicshop/{addons/mains → a_mains}/msi_unpacker.py +3 -1
- atomicshop/a_mains/process_from_port.py +119 -0
- atomicshop/a_mains/set_default_dns_gateway.py +90 -0
- atomicshop/a_mains/update_config_toml.py +38 -0
- atomicshop/appointment_management.py +5 -3
- atomicshop/basics/ansi_escape_codes.py +3 -1
- atomicshop/basics/argparse_template.py +2 -0
- atomicshop/basics/booleans.py +27 -30
- atomicshop/basics/bytes_arrays.py +43 -0
- atomicshop/basics/classes.py +149 -1
- atomicshop/basics/dicts.py +12 -0
- atomicshop/basics/enums.py +2 -2
- atomicshop/basics/exceptions.py +5 -1
- atomicshop/basics/list_of_classes.py +29 -0
- atomicshop/basics/list_of_dicts.py +69 -5
- atomicshop/basics/lists.py +14 -0
- atomicshop/basics/multiprocesses.py +374 -50
- atomicshop/basics/package_module.py +10 -0
- atomicshop/basics/strings.py +160 -7
- atomicshop/basics/threads.py +14 -0
- atomicshop/basics/tracebacks.py +13 -4
- atomicshop/certificates.py +153 -52
- atomicshop/config_init.py +12 -7
- atomicshop/console_user_response.py +7 -14
- atomicshop/consoles.py +9 -0
- atomicshop/datetimes.py +98 -0
- atomicshop/diff_check.py +340 -40
- atomicshop/dns.py +128 -12
- atomicshop/etws/_pywintrace_fix.py +17 -0
- atomicshop/etws/const.py +38 -0
- atomicshop/etws/providers.py +21 -0
- atomicshop/etws/sessions.py +43 -0
- atomicshop/etws/trace.py +168 -0
- atomicshop/etws/traces/trace_dns.py +162 -0
- atomicshop/etws/traces/trace_sysmon_process_creation.py +126 -0
- atomicshop/etws/traces/trace_tcp.py +130 -0
- atomicshop/file_io/csvs.py +222 -24
- atomicshop/file_io/docxs.py +35 -18
- atomicshop/file_io/file_io.py +35 -19
- atomicshop/file_io/jsons.py +49 -0
- atomicshop/file_io/tomls.py +139 -0
- atomicshop/filesystem.py +864 -293
- atomicshop/get_process_list.py +133 -0
- atomicshop/{process_name_cmd.py → get_process_name_cmd_dll.py} +52 -19
- atomicshop/http_parse.py +149 -93
- atomicshop/ip_addresses.py +6 -1
- atomicshop/mitm/centered_settings.py +132 -0
- atomicshop/mitm/config_static.py +207 -0
- atomicshop/mitm/config_toml_editor.py +55 -0
- atomicshop/mitm/connection_thread_worker.py +875 -357
- atomicshop/mitm/engines/__parent/parser___parent.py +4 -17
- atomicshop/mitm/engines/__parent/recorder___parent.py +108 -51
- atomicshop/mitm/engines/__parent/requester___parent.py +116 -0
- atomicshop/mitm/engines/__parent/responder___parent.py +75 -114
- atomicshop/mitm/engines/__reference_general/parser___reference_general.py +10 -7
- atomicshop/mitm/engines/__reference_general/recorder___reference_general.py +5 -5
- atomicshop/mitm/engines/__reference_general/requester___reference_general.py +47 -0
- atomicshop/mitm/engines/__reference_general/responder___reference_general.py +95 -13
- atomicshop/mitm/engines/create_module_template.py +58 -14
- atomicshop/mitm/import_config.py +359 -139
- atomicshop/mitm/initialize_engines.py +160 -74
- atomicshop/mitm/message.py +64 -23
- atomicshop/mitm/mitm_main.py +892 -0
- atomicshop/mitm/recs_files.py +183 -0
- atomicshop/mitm/shared_functions.py +4 -10
- atomicshop/mitm/ssh_tester.py +82 -0
- atomicshop/mitm/statistic_analyzer.py +257 -166
- atomicshop/mitm/statistic_analyzer_helper/analyzer_helper.py +136 -0
- atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +525 -0
- atomicshop/monitor/change_monitor.py +96 -120
- atomicshop/monitor/checks/dns.py +139 -70
- atomicshop/monitor/checks/file.py +77 -0
- atomicshop/monitor/checks/network.py +81 -77
- atomicshop/monitor/checks/process_running.py +33 -34
- atomicshop/monitor/checks/url.py +94 -0
- atomicshop/networks.py +671 -0
- atomicshop/on_exit.py +205 -0
- atomicshop/package_mains_processor.py +84 -0
- atomicshop/permissions/permissions.py +22 -0
- atomicshop/permissions/ubuntu_permissions.py +239 -0
- atomicshop/permissions/win_permissions.py +33 -0
- atomicshop/print_api.py +24 -41
- atomicshop/process.py +63 -17
- atomicshop/process_poller/__init__.py +0 -0
- atomicshop/process_poller/pollers/__init__.py +0 -0
- atomicshop/process_poller/pollers/psutil_pywin32wmi_dll.py +95 -0
- atomicshop/process_poller/process_pool.py +207 -0
- atomicshop/process_poller/simple_process_pool.py +311 -0
- atomicshop/process_poller/tracer_base.py +45 -0
- atomicshop/process_poller/tracers/__init__.py +0 -0
- atomicshop/process_poller/tracers/event_log.py +46 -0
- atomicshop/process_poller/tracers/sysmon_etw.py +68 -0
- atomicshop/python_file_patcher.py +1 -1
- atomicshop/python_functions.py +27 -75
- atomicshop/question_answer_engine.py +2 -2
- atomicshop/scheduling.py +24 -5
- atomicshop/sound.py +4 -2
- atomicshop/speech_recognize.py +8 -0
- atomicshop/ssh_remote.py +158 -172
- atomicshop/startup/__init__.py +0 -0
- atomicshop/startup/win/__init__.py +0 -0
- atomicshop/startup/win/startup_folder.py +53 -0
- atomicshop/startup/win/task_scheduler.py +119 -0
- atomicshop/system_resource_monitor.py +61 -46
- atomicshop/system_resources.py +8 -8
- atomicshop/tempfiles.py +1 -2
- atomicshop/timer.py +30 -11
- atomicshop/urls.py +41 -0
- atomicshop/venvs.py +28 -0
- atomicshop/versioning.py +27 -0
- atomicshop/web.py +110 -25
- atomicshop/web_apis/__init__.py +0 -0
- atomicshop/web_apis/google_custom_search.py +44 -0
- atomicshop/web_apis/google_llm.py +188 -0
- atomicshop/websocket_parse.py +450 -0
- atomicshop/wrappers/certauthw/certauth.py +1 -0
- atomicshop/wrappers/cryptographyw.py +29 -8
- atomicshop/wrappers/ctyping/etw_winapi/__init__.py +0 -0
- atomicshop/wrappers/ctyping/etw_winapi/const.py +335 -0
- atomicshop/wrappers/ctyping/etw_winapi/etw_functions.py +393 -0
- atomicshop/wrappers/ctyping/file_details_winapi.py +67 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
- atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +13 -9
- atomicshop/wrappers/ctyping/msi_windows_installer/tables.py +35 -0
- atomicshop/wrappers/ctyping/setup_device.py +466 -0
- atomicshop/wrappers/ctyping/win_console.py +39 -0
- atomicshop/wrappers/dockerw/dockerw.py +113 -2
- atomicshop/wrappers/elasticsearchw/config_basic.py +0 -12
- atomicshop/wrappers/elasticsearchw/elastic_infra.py +75 -0
- atomicshop/wrappers/elasticsearchw/elasticsearchw.py +2 -20
- atomicshop/wrappers/factw/get_file_data.py +12 -5
- atomicshop/wrappers/factw/install/install_after_restart.py +89 -5
- atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +20 -14
- atomicshop/wrappers/factw/postgresql/firmware.py +4 -6
- atomicshop/wrappers/githubw.py +583 -51
- atomicshop/wrappers/loggingw/consts.py +49 -0
- atomicshop/wrappers/loggingw/filters.py +102 -0
- atomicshop/wrappers/loggingw/formatters.py +58 -71
- atomicshop/wrappers/loggingw/handlers.py +459 -40
- atomicshop/wrappers/loggingw/loggers.py +19 -0
- atomicshop/wrappers/loggingw/loggingw.py +1010 -178
- atomicshop/wrappers/loggingw/reading.py +344 -19
- atomicshop/wrappers/mongodbw/__init__.py +0 -0
- atomicshop/wrappers/mongodbw/mongo_infra.py +31 -0
- atomicshop/wrappers/mongodbw/mongodbw.py +1432 -0
- atomicshop/wrappers/netshw.py +271 -0
- atomicshop/wrappers/playwrightw/engine.py +34 -19
- atomicshop/wrappers/playwrightw/infra.py +5 -0
- atomicshop/wrappers/playwrightw/javascript.py +7 -3
- atomicshop/wrappers/playwrightw/keyboard.py +14 -0
- atomicshop/wrappers/playwrightw/scenarios.py +172 -5
- atomicshop/wrappers/playwrightw/waits.py +9 -7
- atomicshop/wrappers/powershell_networking.py +80 -0
- atomicshop/wrappers/psutilw/processes.py +81 -0
- atomicshop/wrappers/psutilw/psutil_networks.py +85 -0
- atomicshop/wrappers/psutilw/psutilw.py +9 -0
- atomicshop/wrappers/pyopensslw.py +9 -2
- atomicshop/wrappers/pywin32w/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/cert_store.py +116 -0
- atomicshop/wrappers/pywin32w/console.py +34 -0
- atomicshop/wrappers/pywin32w/win_event_log/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribe.py +212 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +57 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +49 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/schannel_logging.py +97 -0
- atomicshop/wrappers/pywin32w/winshell.py +19 -0
- atomicshop/wrappers/pywin32w/wmis/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/wmis/msft_netipaddress.py +113 -0
- atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +259 -0
- atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +112 -0
- atomicshop/wrappers/pywin32w/wmis/wmi_helpers.py +236 -0
- atomicshop/wrappers/socketw/accepter.py +21 -7
- atomicshop/wrappers/socketw/certificator.py +216 -150
- atomicshop/wrappers/socketw/creator.py +190 -50
- atomicshop/wrappers/socketw/dns_server.py +500 -173
- atomicshop/wrappers/socketw/exception_wrapper.py +45 -52
- atomicshop/wrappers/socketw/process_getter.py +86 -0
- atomicshop/wrappers/socketw/receiver.py +144 -102
- atomicshop/wrappers/socketw/sender.py +65 -35
- atomicshop/wrappers/socketw/sni.py +334 -165
- atomicshop/wrappers/socketw/socket_base.py +134 -0
- atomicshop/wrappers/socketw/socket_client.py +137 -95
- atomicshop/wrappers/socketw/socket_server_tester.py +14 -9
- atomicshop/wrappers/socketw/socket_wrapper.py +717 -116
- atomicshop/wrappers/socketw/ssl_base.py +15 -14
- atomicshop/wrappers/socketw/statistics_csv.py +148 -17
- atomicshop/wrappers/sysmonw.py +157 -0
- atomicshop/wrappers/ubuntu_terminal.py +65 -26
- atomicshop/wrappers/win_auditw.py +189 -0
- atomicshop/wrappers/winregw/__init__.py +0 -0
- atomicshop/wrappers/winregw/winreg_installed_software.py +58 -0
- atomicshop/wrappers/winregw/winreg_network.py +232 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info}/METADATA +31 -49
- atomicshop-3.10.5.dist-info/RECORD +306 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info}/WHEEL +1 -1
- atomicshop/_basics_temp.py +0 -101
- atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
- atomicshop/addons/a_setup_scripts/install_pywintrace_0.3.cmd +0 -2
- atomicshop/addons/mains/install_docker_rootless_ubuntu.py +0 -11
- atomicshop/addons/mains/install_docker_ubuntu_main_sudo.py +0 -11
- atomicshop/addons/mains/install_elastic_search_and_kibana_ubuntu.py +0 -10
- atomicshop/addons/mains/install_wsl_ubuntu_lts_admin.py +0 -9
- atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
- atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
- atomicshop/addons/package_setup/Setup.cmd +0 -7
- atomicshop/addons/process_list/compile.cmd +0 -2
- atomicshop/addons/process_list/compiled/Win10x64/process_list.dll +0 -0
- atomicshop/addons/process_list/compiled/Win10x64/process_list.exp +0 -0
- atomicshop/addons/process_list/compiled/Win10x64/process_list.lib +0 -0
- atomicshop/archiver/_search_in_zip.py +0 -189
- atomicshop/archiver/archiver.py +0 -34
- atomicshop/archiver/search_in_archive.py +0 -250
- atomicshop/archiver/sevenz_app_w.py +0 -86
- atomicshop/archiver/sevenzs.py +0 -44
- atomicshop/archiver/zips.py +0 -293
- atomicshop/etw/dns_trace.py +0 -118
- atomicshop/etw/etw.py +0 -61
- atomicshop/file_types.py +0 -24
- atomicshop/mitm/engines/create_module_template_example.py +0 -13
- atomicshop/mitm/initialize_mitm_server.py +0 -240
- atomicshop/monitor/checks/hash.py +0 -44
- atomicshop/monitor/checks/hash_checks/file.py +0 -55
- atomicshop/monitor/checks/hash_checks/url.py +0 -62
- atomicshop/pbtkmultifile_argparse.py +0 -88
- atomicshop/permissions.py +0 -110
- atomicshop/process_poller.py +0 -237
- atomicshop/script_as_string_processor.py +0 -38
- atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
- atomicshop/ssh_scripts/process_from_port.py +0 -27
- atomicshop/wrappers/_process_wrapper_curl.py +0 -27
- atomicshop/wrappers/_process_wrapper_tar.py +0 -21
- atomicshop/wrappers/dockerw/install_docker.py +0 -209
- atomicshop/wrappers/elasticsearchw/infrastructure.py +0 -265
- atomicshop/wrappers/elasticsearchw/install_elastic.py +0 -232
- atomicshop/wrappers/ffmpegw.py +0 -125
- atomicshop/wrappers/loggingw/checks.py +0 -20
- atomicshop/wrappers/nodejsw/install_nodejs.py +0 -139
- atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
- atomicshop/wrappers/socketw/base.py +0 -59
- atomicshop/wrappers/socketw/get_process.py +0 -107
- atomicshop/wrappers/wslw.py +0 -191
- atomicshop-2.11.47.dist-info/RECORD +0 -251
- /atomicshop/{addons/mains → a_mains}/FACT/factw_fact_extractor_docker_image_main_sudo.py +0 -0
- /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
- /atomicshop/{addons/mains → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
- /atomicshop/{addons/mains → a_mains}/search_for_hyperlinks_in_docx.py +0 -0
- /atomicshop/{archiver → etws}/__init__.py +0 -0
- /atomicshop/{etw → etws/traces}/__init__.py +0 -0
- /atomicshop/{monitor/checks/hash_checks → mitm/statistic_analyzer_helper}/__init__.py +0 -0
- /atomicshop/{wrappers/nodejsw → permissions}/__init__.py +0 -0
- /atomicshop/wrappers/pywin32w/{wmi_win32process.py → wmis/win32process.py} +0 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info/licenses}/LICENSE.txt +0 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,84 @@
|
|
|
1
1
|
import multiprocessing
|
|
2
2
|
import multiprocessing.managers
|
|
3
|
+
import os
|
|
3
4
|
import queue
|
|
5
|
+
import threading
|
|
4
6
|
import concurrent.futures
|
|
5
7
|
from concurrent.futures import ProcessPoolExecutor, as_completed
|
|
8
|
+
from collections import deque
|
|
9
|
+
from typing import Callable
|
|
10
|
+
import time
|
|
6
11
|
|
|
7
12
|
from ..import system_resources
|
|
8
13
|
|
|
9
14
|
|
|
10
|
-
def
|
|
15
|
+
def kill_processes(
|
|
16
|
+
processes: list
|
|
17
|
+
):
|
|
18
|
+
"""Terminate all children with SIGTERM (or SIGKILL if you like)."""
|
|
19
|
+
# Ask OS to terminate all processes in the list.
|
|
20
|
+
for p in processes:
|
|
21
|
+
if p.is_alive():
|
|
22
|
+
p.terminate()
|
|
23
|
+
time.sleep(1) # give processes a chance to exit cleanly
|
|
24
|
+
# Force kill all processes in the list.
|
|
25
|
+
for p in processes:
|
|
26
|
+
if p.is_alive():
|
|
27
|
+
p.kill()
|
|
28
|
+
for p in processes: # wait for everything to disappear
|
|
29
|
+
p.join()
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def is_process_crashed(
|
|
33
|
+
processes: list[multiprocessing.Process]
|
|
34
|
+
) -> tuple[int, str] | tuple[None, None]:
|
|
35
|
+
"""
|
|
36
|
+
Check if any of the processes in the list is not alive.
|
|
37
|
+
:param processes: list, list of multiprocessing.Process objects.
|
|
38
|
+
:return: tuple(int, string) or None.
|
|
39
|
+
tuple(0 if any finished cleanly, process name).
|
|
40
|
+
tuple(1 (or exit code integer) if any process crashed, process_name).
|
|
41
|
+
None if all processes are still alive.
|
|
42
|
+
|
|
43
|
+
==============================================
|
|
44
|
+
|
|
45
|
+
Usage example:
|
|
46
|
+
processes = [multiprocessing.Process(target=some_function) for _ in range(5)]
|
|
47
|
+
|
|
48
|
+
for p in processes:
|
|
49
|
+
p.start()
|
|
50
|
+
|
|
51
|
+
# Check if any process has crashed
|
|
52
|
+
try:
|
|
53
|
+
while True:
|
|
54
|
+
# Poll every second; you can use a shorter sleep if you prefer.
|
|
55
|
+
result, process_name = is_process_crashed(processes)
|
|
56
|
+
# If result is None, all processes are still alive.
|
|
57
|
+
if result is not None:
|
|
58
|
+
# If result is 0 or 1, we can exit the loop.
|
|
59
|
+
print(f"Process [{process_name}] finished with exit code {result}.")
|
|
60
|
+
break
|
|
61
|
+
time.sleep(1)
|
|
62
|
+
except KeyboardInterrupt:
|
|
63
|
+
print("Ctrl-C caught – terminating children…")
|
|
64
|
+
kill_all(processes)
|
|
65
|
+
sys.exit(0)
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
for p in processes:
|
|
69
|
+
if p.exitcode is not None: # the process is *dead*
|
|
70
|
+
kill_processes(processes) # stop the rest
|
|
71
|
+
if p.exitcode == 0:
|
|
72
|
+
# print(f"{p.name} exited cleanly; shutting down.")
|
|
73
|
+
return 0, p.name
|
|
74
|
+
else:
|
|
75
|
+
# print(f"{p.name} crashed (exitcode {p.exitcode}). Shutting everything down.")
|
|
76
|
+
return p.exitcode, p.name
|
|
77
|
+
|
|
78
|
+
return None, None # all processes are still alive
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def process_wrap_queue(function_reference: Callable, *args, **kwargs):
|
|
11
82
|
"""
|
|
12
83
|
The function receives function reference and arguments, and executes the function in a thread.
|
|
13
84
|
"_queue" means that a queue.put() is used to store the result of the function and queue.get() to output it.
|
|
@@ -38,7 +109,7 @@ def process_wrap_queue(function_reference, *args, **kwargs):
|
|
|
38
109
|
class MultiProcessorRecursive:
|
|
39
110
|
def __init__(
|
|
40
111
|
self,
|
|
41
|
-
process_function,
|
|
112
|
+
process_function: Callable,
|
|
42
113
|
input_list: list,
|
|
43
114
|
max_workers: int = None,
|
|
44
115
|
cpu_percent_max: int = 80,
|
|
@@ -52,7 +123,7 @@ class MultiProcessorRecursive:
|
|
|
52
123
|
:param process_function: function, function to execute on the input list.
|
|
53
124
|
:param input_list: list, list of inputs to process.
|
|
54
125
|
:param max_workers: integer, number of workers to execute functions in parallel. Default is None, which
|
|
55
|
-
is the number of CPUs.
|
|
126
|
+
is the number of CPUs that will be counted automatically by the multiprocessing module.
|
|
56
127
|
:param cpu_percent_max: integer, maximum CPU percentage. Above that usage, we will wait before starting new
|
|
57
128
|
execution.
|
|
58
129
|
:param memory_percent_max: integer, maximum memory percentage. Above that usage, we will wait, before starting
|
|
@@ -65,7 +136,7 @@ class MultiProcessorRecursive:
|
|
|
65
136
|
If this is used, the system resources will be checked before starting each new execution from this
|
|
66
137
|
shared dict instead of performing new checks.
|
|
67
138
|
|
|
68
|
-
Usage:
|
|
139
|
+
Usage Examples:
|
|
69
140
|
def unpack_file(file_path):
|
|
70
141
|
# Process the file at file_path and unpack it.
|
|
71
142
|
# Return a list of new file paths that were extracted from the provided path.
|
|
@@ -74,64 +145,317 @@ class MultiProcessorRecursive:
|
|
|
74
145
|
# List of file paths to process
|
|
75
146
|
file_paths = ["path1", "path2", "path3"]
|
|
76
147
|
|
|
77
|
-
#
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
148
|
+
# Note: unpack_file Callable is passed to init without parentheses.
|
|
149
|
+
|
|
150
|
+
1. Providing the list directly to process at once:
|
|
151
|
+
# Initialize the processor.
|
|
152
|
+
processor = MultiProcessor(
|
|
153
|
+
process_function=unpack_file,
|
|
154
|
+
input_list=file_paths,
|
|
155
|
+
max_workers=4, # Number of parallel workers
|
|
156
|
+
cpu_percent_max=80, # Max CPU usage percentage
|
|
157
|
+
memory_percent_max=80, # Max memory usage percentage
|
|
158
|
+
wait_time=5 # Time to wait if resources are overused
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
# Process the list of files at once.
|
|
162
|
+
processor.run_process()
|
|
163
|
+
# Shutdown the pool processes after processing.
|
|
164
|
+
processor.shutdown_pool()
|
|
165
|
+
|
|
166
|
+
2. Processing each file in the list differently then adding to the list of the multiprocessing instance then executing.
|
|
167
|
+
# Initialize the processor once, before the loop, with empty input_list.
|
|
168
|
+
processor = MultiProcessor(
|
|
169
|
+
process_function=unpack_file,
|
|
170
|
+
input_list=[],
|
|
171
|
+
max_workers=4, # Number of parallel workers
|
|
172
|
+
cpu_percent_max=80, # Max CPU usage percentage
|
|
173
|
+
memory_percent_max=80, # Max memory usage percentage
|
|
174
|
+
wait_time=5 # Time to wait if resources are overused
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
for file_path in file_paths:
|
|
178
|
+
# <Process each file>.
|
|
179
|
+
# Add the result to the input_list of the processor.
|
|
180
|
+
processor.input_list.append(file_path)
|
|
181
|
+
|
|
182
|
+
# Process the list of files at once.
|
|
183
|
+
processor.run_process()
|
|
184
|
+
# Shutdown the pool processes after processing.
|
|
185
|
+
processor.shutdown_pool()
|
|
186
|
+
|
|
187
|
+
3. Processing each file in the list separately, since we're using an unpacking function that
|
|
188
|
+
will create more files, but the context for this operation is different for extraction
|
|
189
|
+
of each main file inside the list:
|
|
190
|
+
|
|
191
|
+
# Initialize the processor once, before the loop, with empty input_list.
|
|
192
|
+
processor = MultiProcessor(
|
|
193
|
+
process_function=unpack_file,
|
|
194
|
+
input_list=[],
|
|
195
|
+
max_workers=4, # Number of parallel workers
|
|
196
|
+
cpu_percent_max=80, # Max CPU usage percentage
|
|
197
|
+
memory_percent_max=80, # Max memory usage percentage
|
|
198
|
+
wait_time=5 # Time to wait if resources are overused
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
for file_path in file_paths:
|
|
202
|
+
# <Process each file>.
|
|
203
|
+
# Add the result to the input_list of the processor.
|
|
204
|
+
processor.input_list.append(file_path)
|
|
205
|
+
# Process the added file path separately.
|
|
206
|
+
processor.run_process()
|
|
207
|
+
|
|
208
|
+
# Shutdown the pool processes after processing.
|
|
209
|
+
processor.shutdown_pool()
|
|
210
|
+
"""
|
|
211
|
+
|
|
212
|
+
self.process_function: Callable = process_function
|
|
213
|
+
self.input_list: list = input_list
|
|
214
|
+
self.max_workers: int = max_workers
|
|
215
|
+
self.cpu_percent_max: int = cpu_percent_max
|
|
216
|
+
self.memory_percent_max: int = memory_percent_max
|
|
217
|
+
self.wait_time: float = wait_time
|
|
218
|
+
self.system_monitor_manager_dict: multiprocessing.managers.DictProxy = system_monitor_manager_dict
|
|
219
|
+
|
|
220
|
+
# Create the pool once and reuse it
|
|
221
|
+
self.pool: multiprocessing.Pool = multiprocessing.Pool(processes=self.max_workers)
|
|
222
|
+
|
|
223
|
+
# Keep track of outstanding async results across calls
|
|
224
|
+
self.async_results: list = []
|
|
225
|
+
|
|
226
|
+
def run_process(self):
|
|
227
|
+
"""
|
|
228
|
+
Start with the items currently in self.input_list, but whenever a task
|
|
229
|
+
finishes schedule the children it returns *right away*.
|
|
230
|
+
The loop ends when there are no more outstanding tasks.
|
|
231
|
+
"""
|
|
232
|
+
# ---------- internal helpers ----------
|
|
233
|
+
outstanding = 0 # tasks that have been submitted but not yet finished
|
|
234
|
+
done_event = threading.Event() # let the main thread wait until work is over
|
|
235
|
+
|
|
236
|
+
def _submit(item):
|
|
237
|
+
nonlocal outstanding
|
|
238
|
+
# Wait for resources *before* submitting a new job
|
|
239
|
+
system_resources.wait_for_resource_availability(
|
|
240
|
+
cpu_percent_max=self.cpu_percent_max,
|
|
241
|
+
memory_percent_max=self.memory_percent_max,
|
|
242
|
+
wait_time=self.wait_time,
|
|
243
|
+
system_monitor_manager_dict=self.system_monitor_manager_dict
|
|
244
|
+
)
|
|
245
|
+
outstanding += 1
|
|
246
|
+
self.pool.apply_async(
|
|
247
|
+
self.process_function,
|
|
248
|
+
(item,),
|
|
249
|
+
callback=_on_finish, # called in the main process when result is ready
|
|
250
|
+
error_callback=_on_error
|
|
86
251
|
)
|
|
87
252
|
|
|
88
|
-
|
|
89
|
-
|
|
253
|
+
def _on_finish(result):
|
|
254
|
+
"""Pool calls this in the parent process thread when a job completes."""
|
|
255
|
+
nonlocal outstanding
|
|
256
|
+
outstanding -= 1
|
|
257
|
+
|
|
258
|
+
# The worker returned a list of new items – submit them immediately
|
|
259
|
+
if result:
|
|
260
|
+
for child in result:
|
|
261
|
+
_submit(child)
|
|
262
|
+
|
|
263
|
+
# If no work left, release the waiter
|
|
264
|
+
if outstanding == 0:
|
|
265
|
+
done_event.set()
|
|
266
|
+
|
|
267
|
+
def _on_error(exc):
|
|
268
|
+
"""Propagate the first exception and stop everything cleanly."""
|
|
269
|
+
done_event.set()
|
|
270
|
+
raise exc # let your code deal with it – you can customise this
|
|
271
|
+
|
|
272
|
+
# ---------- kick‑off ----------
|
|
273
|
+
# Schedule the items we already have
|
|
274
|
+
for item in self.input_list:
|
|
275
|
+
_submit(item)
|
|
276
|
+
|
|
277
|
+
# Clear the input list; after this point everything is driven by callbacks
|
|
278
|
+
self.input_list.clear()
|
|
279
|
+
|
|
280
|
+
# Wait until all recursively spawned work is finished
|
|
281
|
+
done_event.wait()
|
|
282
|
+
|
|
283
|
+
def shutdown(self):
|
|
284
|
+
"""Shuts down the pool gracefully."""
|
|
285
|
+
if self.pool:
|
|
286
|
+
self.pool.close() # Stop accepting new tasks
|
|
287
|
+
self.pool.join() # Wait for all tasks to complete
|
|
288
|
+
self.pool = None
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
class _MultiProcessorRecursiveWithProcessPoolExecutor:
|
|
292
|
+
def __init__(
|
|
293
|
+
self,
|
|
294
|
+
process_function: Callable,
|
|
295
|
+
input_list: list,
|
|
296
|
+
max_workers: int = None,
|
|
297
|
+
cpu_percent_max: int = 80,
|
|
298
|
+
memory_percent_max: int = 80,
|
|
299
|
+
wait_time: float = 5,
|
|
300
|
+
system_monitor_manager_dict: multiprocessing.managers.DictProxy = None
|
|
301
|
+
):
|
|
90
302
|
"""
|
|
303
|
+
THIS CLASS USES THE concurrent.futures.ProcessPoolExecutor to achieve parallelism.
|
|
304
|
+
For some reason I got freezes on exceptions without the exception output after the run_process() method finished
|
|
305
|
+
and the pool remained open. So, using the MultiProcessorRecursive instead.
|
|
91
306
|
|
|
92
|
-
|
|
307
|
+
MultiProcessor class. Used to execute functions in parallel. The result of each execution is fed back
|
|
308
|
+
to the provided function. Making it sort of recursive execution.
|
|
309
|
+
:param process_function: function, function to execute on the input list.
|
|
310
|
+
:param input_list: list, list of inputs to process.
|
|
311
|
+
:param max_workers: integer, number of workers to execute functions in parallel. Default is None, which
|
|
312
|
+
is the number of CPUs that will be counted automatically by the multiprocessing module.
|
|
313
|
+
:param cpu_percent_max: integer, maximum CPU percentage. Above that usage, we will wait before starting new
|
|
314
|
+
execution.
|
|
315
|
+
:param memory_percent_max: integer, maximum memory percentage. Above that usage, we will wait, before starting
|
|
316
|
+
new execution.
|
|
317
|
+
:param wait_time: float, time to wait if the CPU or memory usage is above the maximum percentage.
|
|
318
|
+
:param system_monitor_manager_dict: multiprocessing.managers.DictProxy, shared manager dict for
|
|
319
|
+
system monitoring. The object is the output of atomicshop.system_resource_monitor.
|
|
320
|
+
If you are already running this monitor, you can pass the manager_dict to both the system monitor and this
|
|
321
|
+
class to share the system resources data.
|
|
322
|
+
If this is used, the system resources will be checked before starting each new execution from this
|
|
323
|
+
shared dict instead of performing new checks.
|
|
324
|
+
|
|
325
|
+
Usage Examples:
|
|
326
|
+
def unpack_file(file_path):
|
|
327
|
+
# Process the file at file_path and unpack it.
|
|
328
|
+
# Return a list of new file paths that were extracted from the provided path.
|
|
329
|
+
return [new_file_path1, new_file_path2] # Example return value
|
|
330
|
+
|
|
331
|
+
# List of file paths to process
|
|
332
|
+
file_paths = ["path1", "path2", "path3"]
|
|
333
|
+
|
|
334
|
+
# Note: unpack_file Callable is passed to init without parentheses.
|
|
335
|
+
|
|
336
|
+
1. Providing the list directly to process at once:
|
|
337
|
+
# Initialize the processor.
|
|
338
|
+
processor = MultiProcessor(
|
|
339
|
+
process_function=unpack_file,
|
|
340
|
+
input_list=file_paths,
|
|
341
|
+
max_workers=4, # Number of parallel workers
|
|
342
|
+
cpu_percent_max=80, # Max CPU usage percentage
|
|
343
|
+
memory_percent_max=80, # Max memory usage percentage
|
|
344
|
+
wait_time=5 # Time to wait if resources are overused
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
# Process the list of files at once.
|
|
348
|
+
processor.run_process()
|
|
349
|
+
# Shutdown the pool processes after processing.
|
|
350
|
+
processor.shutdown_pool()
|
|
351
|
+
|
|
352
|
+
2. Processing each file in the list differently then adding to the list of the multiprocessing instance then executing.
|
|
353
|
+
# Initialize the processor once, before the loop, with empty input_list.
|
|
354
|
+
processor = MultiProcessor(
|
|
355
|
+
process_function=unpack_file,
|
|
356
|
+
input_list=[],
|
|
357
|
+
max_workers=4, # Number of parallel workers
|
|
358
|
+
cpu_percent_max=80, # Max CPU usage percentage
|
|
359
|
+
memory_percent_max=80, # Max memory usage percentage
|
|
360
|
+
wait_time=5 # Time to wait if resources are overused
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
for file_path in file_paths:
|
|
364
|
+
# <Process each file>.
|
|
365
|
+
# Add the result to the input_list of the processor.
|
|
366
|
+
processor.input_list.append(file_path)
|
|
367
|
+
|
|
368
|
+
# Process the list of files at once.
|
|
369
|
+
processor.run_process()
|
|
370
|
+
# Shutdown the pool processes after processing.
|
|
371
|
+
processor.shutdown_pool()
|
|
372
|
+
|
|
373
|
+
3. Processing each file in the list separately, since we're using an unpacking function that
|
|
374
|
+
will create more files, but the context for this operation is different for extraction
|
|
375
|
+
of each main file inside the list:
|
|
376
|
+
|
|
377
|
+
# Initialize the processor once, before the loop, with empty input_list.
|
|
378
|
+
processor = MultiProcessor(
|
|
379
|
+
process_function=unpack_file,
|
|
380
|
+
input_list=[],
|
|
381
|
+
max_workers=4, # Number of parallel workers
|
|
382
|
+
cpu_percent_max=80, # Max CPU usage percentage
|
|
383
|
+
memory_percent_max=80, # Max memory usage percentage
|
|
384
|
+
wait_time=5 # Time to wait if resources are overused
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
for file_path in file_paths:
|
|
388
|
+
# <Process each file>.
|
|
389
|
+
# Add the result to the input_list of the processor.
|
|
390
|
+
processor.input_list.append(file_path)
|
|
391
|
+
# Process the added file path separately.
|
|
392
|
+
processor.run_process()
|
|
393
|
+
|
|
394
|
+
# Shutdown the pool processes after processing.
|
|
395
|
+
processor.shutdown_pool()
|
|
396
|
+
"""
|
|
397
|
+
|
|
398
|
+
self.process_function: Callable = process_function
|
|
93
399
|
self.input_list: list = input_list
|
|
94
|
-
self.max_workers: int = max_workers
|
|
95
400
|
self.cpu_percent_max: int = cpu_percent_max
|
|
96
401
|
self.memory_percent_max: int = memory_percent_max
|
|
97
402
|
self.wait_time: float = wait_time
|
|
98
403
|
self.system_monitor_manager_dict: multiprocessing.managers.DictProxy = system_monitor_manager_dict
|
|
99
404
|
|
|
405
|
+
if max_workers is None:
|
|
406
|
+
max_workers = os.cpu_count()
|
|
407
|
+
self.max_workers: int = max_workers
|
|
408
|
+
|
|
409
|
+
# Create the executor once and reuse it.
|
|
410
|
+
# noinspection PyTypeChecker
|
|
411
|
+
self.executor: ProcessPoolExecutor = None
|
|
412
|
+
|
|
413
|
+
def _ensure_executor(self):
|
|
414
|
+
"""Create a new pool if we do not have one or if the old one was shut."""
|
|
415
|
+
if self.executor is None or getattr(self.executor, '_shutdown', False):
|
|
416
|
+
self.executor = ProcessPoolExecutor(max_workers=self.max_workers)
|
|
417
|
+
|
|
100
418
|
def run_process(self):
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
# Collect results as they complete
|
|
123
|
-
for async_result in async_results:
|
|
124
|
-
try:
|
|
125
|
-
result = async_result.get()
|
|
126
|
-
# Assuming process_function returns a list, extend new_input_list
|
|
127
|
-
new_input_list.extend(result)
|
|
128
|
-
except Exception as e:
|
|
129
|
-
print(f"An error occurred: {e}")
|
|
419
|
+
# Make sure we have a live executor
|
|
420
|
+
self._ensure_executor()
|
|
421
|
+
|
|
422
|
+
work_q = deque(self.input_list) # breadth‑first queue
|
|
423
|
+
self.input_list.clear()
|
|
424
|
+
futures = set()
|
|
425
|
+
|
|
426
|
+
# helper to submit jobs up to the concurrency limit
|
|
427
|
+
def _fill():
|
|
428
|
+
while work_q and len(futures) < self.max_workers:
|
|
429
|
+
item = work_q.popleft()
|
|
430
|
+
system_resources.wait_for_resource_availability(
|
|
431
|
+
cpu_percent_max=self.cpu_percent_max,
|
|
432
|
+
memory_percent_max=self.memory_percent_max,
|
|
433
|
+
wait_time=self.wait_time,
|
|
434
|
+
system_monitor_manager_dict=self.system_monitor_manager_dict
|
|
435
|
+
)
|
|
436
|
+
futures.add(self.executor.submit(self.process_function, item))
|
|
437
|
+
|
|
438
|
+
_fill() # start the first wave
|
|
130
439
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
#
|
|
134
|
-
|
|
440
|
+
while futures:
|
|
441
|
+
for fut in as_completed(futures):
|
|
442
|
+
futures.remove(fut) # a slot just freed up
|
|
443
|
+
|
|
444
|
+
# propagate worker exceptions immediately
|
|
445
|
+
children = fut.result()
|
|
446
|
+
|
|
447
|
+
# schedule the newly discovered items
|
|
448
|
+
if children:
|
|
449
|
+
work_q.extend(children)
|
|
450
|
+
|
|
451
|
+
_fill() # keep the pool saturated
|
|
452
|
+
break # leave the for‑loop so as_completed resets
|
|
453
|
+
|
|
454
|
+
def shutdown(self):
|
|
455
|
+
"""Shuts down the executor gracefully."""
|
|
456
|
+
if self.executor:
|
|
457
|
+
self.executor.shutdown(wait=True) # blocks until all tasks complete
|
|
458
|
+
self.executor = None
|
|
135
459
|
|
|
136
460
|
|
|
137
461
|
class ConcurrentProcessorRecursive:
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""
|
|
2
|
+
# Add static files to your package / module pyproject.toml:
|
|
3
|
+
[tool.setuptools.package-data]
|
|
4
|
+
"atomicshop.addons" = ["**"]
|
|
5
|
+
|
|
6
|
+
# Read relative path of your module inside your package / module script:
|
|
7
|
+
from importlib.resources import files
|
|
8
|
+
PACKAGE_DLL_PATH = 'addons/process_list/compiled/Win10x64/process_list.dll'
|
|
9
|
+
FULL_DLL_PATH = str(files(__package__).joinpath(PACKAGE_DLL_PATH))
|
|
10
|
+
"""
|