atomicshop 2.15.11__py3-none-any.whl → 3.10.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. atomicshop/__init__.py +1 -1
  2. atomicshop/{addons/mains → a_mains}/FACT/update_extract.py +3 -2
  3. atomicshop/a_mains/dns_gateway_setting.py +11 -0
  4. atomicshop/a_mains/get_local_tcp_ports.py +85 -0
  5. atomicshop/a_mains/github_wrapper.py +11 -0
  6. atomicshop/a_mains/install_ca_certificate.py +172 -0
  7. atomicshop/a_mains/process_from_port.py +119 -0
  8. atomicshop/a_mains/set_default_dns_gateway.py +90 -0
  9. atomicshop/a_mains/update_config_toml.py +38 -0
  10. atomicshop/basics/ansi_escape_codes.py +3 -1
  11. atomicshop/basics/argparse_template.py +2 -0
  12. atomicshop/basics/booleans.py +27 -30
  13. atomicshop/basics/bytes_arrays.py +43 -0
  14. atomicshop/basics/classes.py +149 -1
  15. atomicshop/basics/enums.py +2 -2
  16. atomicshop/basics/exceptions.py +5 -1
  17. atomicshop/basics/list_of_classes.py +29 -0
  18. atomicshop/basics/multiprocesses.py +374 -50
  19. atomicshop/basics/strings.py +72 -3
  20. atomicshop/basics/threads.py +14 -0
  21. atomicshop/basics/tracebacks.py +13 -3
  22. atomicshop/certificates.py +153 -52
  23. atomicshop/config_init.py +11 -6
  24. atomicshop/console_user_response.py +7 -14
  25. atomicshop/consoles.py +9 -0
  26. atomicshop/datetimes.py +1 -1
  27. atomicshop/diff_check.py +3 -3
  28. atomicshop/dns.py +128 -3
  29. atomicshop/etws/_pywintrace_fix.py +17 -0
  30. atomicshop/etws/trace.py +40 -42
  31. atomicshop/etws/traces/trace_dns.py +56 -44
  32. atomicshop/etws/traces/trace_tcp.py +130 -0
  33. atomicshop/file_io/csvs.py +27 -5
  34. atomicshop/file_io/docxs.py +34 -17
  35. atomicshop/file_io/file_io.py +31 -17
  36. atomicshop/file_io/jsons.py +49 -0
  37. atomicshop/file_io/tomls.py +139 -0
  38. atomicshop/filesystem.py +616 -291
  39. atomicshop/get_process_list.py +3 -3
  40. atomicshop/http_parse.py +149 -93
  41. atomicshop/ip_addresses.py +6 -1
  42. atomicshop/mitm/centered_settings.py +132 -0
  43. atomicshop/mitm/config_static.py +207 -0
  44. atomicshop/mitm/config_toml_editor.py +55 -0
  45. atomicshop/mitm/connection_thread_worker.py +875 -357
  46. atomicshop/mitm/engines/__parent/parser___parent.py +4 -17
  47. atomicshop/mitm/engines/__parent/recorder___parent.py +108 -51
  48. atomicshop/mitm/engines/__parent/requester___parent.py +116 -0
  49. atomicshop/mitm/engines/__parent/responder___parent.py +75 -114
  50. atomicshop/mitm/engines/__reference_general/parser___reference_general.py +10 -7
  51. atomicshop/mitm/engines/__reference_general/recorder___reference_general.py +5 -5
  52. atomicshop/mitm/engines/__reference_general/requester___reference_general.py +47 -0
  53. atomicshop/mitm/engines/__reference_general/responder___reference_general.py +95 -13
  54. atomicshop/mitm/engines/create_module_template.py +58 -14
  55. atomicshop/mitm/import_config.py +359 -139
  56. atomicshop/mitm/initialize_engines.py +160 -80
  57. atomicshop/mitm/message.py +64 -23
  58. atomicshop/mitm/mitm_main.py +892 -0
  59. atomicshop/mitm/recs_files.py +183 -0
  60. atomicshop/mitm/shared_functions.py +4 -10
  61. atomicshop/mitm/ssh_tester.py +82 -0
  62. atomicshop/mitm/statistic_analyzer.py +136 -40
  63. atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +265 -83
  64. atomicshop/monitor/checks/dns.py +1 -1
  65. atomicshop/networks.py +671 -0
  66. atomicshop/on_exit.py +39 -9
  67. atomicshop/package_mains_processor.py +84 -0
  68. atomicshop/permissions/permissions.py +22 -0
  69. atomicshop/permissions/ubuntu_permissions.py +239 -0
  70. atomicshop/permissions/win_permissions.py +33 -0
  71. atomicshop/print_api.py +24 -42
  72. atomicshop/process.py +24 -6
  73. atomicshop/process_poller/process_pool.py +0 -1
  74. atomicshop/process_poller/simple_process_pool.py +204 -5
  75. atomicshop/python_file_patcher.py +1 -1
  76. atomicshop/python_functions.py +27 -75
  77. atomicshop/speech_recognize.py +8 -0
  78. atomicshop/ssh_remote.py +158 -172
  79. atomicshop/system_resource_monitor.py +61 -47
  80. atomicshop/system_resources.py +8 -8
  81. atomicshop/tempfiles.py +1 -2
  82. atomicshop/urls.py +6 -0
  83. atomicshop/venvs.py +28 -0
  84. atomicshop/versioning.py +27 -0
  85. atomicshop/web.py +98 -27
  86. atomicshop/web_apis/google_custom_search.py +44 -0
  87. atomicshop/web_apis/google_llm.py +188 -0
  88. atomicshop/websocket_parse.py +450 -0
  89. atomicshop/wrappers/certauthw/certauth.py +1 -0
  90. atomicshop/wrappers/cryptographyw.py +29 -8
  91. atomicshop/wrappers/ctyping/etw_winapi/const.py +97 -47
  92. atomicshop/wrappers/ctyping/etw_winapi/etw_functions.py +178 -49
  93. atomicshop/wrappers/ctyping/file_details_winapi.py +67 -0
  94. atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
  95. atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +2 -2
  96. atomicshop/wrappers/ctyping/setup_device.py +466 -0
  97. atomicshop/wrappers/ctyping/win_console.py +39 -0
  98. atomicshop/wrappers/dockerw/dockerw.py +113 -2
  99. atomicshop/wrappers/elasticsearchw/config_basic.py +0 -12
  100. atomicshop/wrappers/elasticsearchw/elastic_infra.py +75 -0
  101. atomicshop/wrappers/elasticsearchw/elasticsearchw.py +2 -20
  102. atomicshop/wrappers/factw/get_file_data.py +12 -5
  103. atomicshop/wrappers/factw/install/install_after_restart.py +89 -5
  104. atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +20 -14
  105. atomicshop/wrappers/githubw.py +537 -54
  106. atomicshop/wrappers/loggingw/consts.py +1 -1
  107. atomicshop/wrappers/loggingw/filters.py +23 -0
  108. atomicshop/wrappers/loggingw/formatters.py +12 -0
  109. atomicshop/wrappers/loggingw/handlers.py +214 -107
  110. atomicshop/wrappers/loggingw/loggers.py +19 -0
  111. atomicshop/wrappers/loggingw/loggingw.py +860 -22
  112. atomicshop/wrappers/loggingw/reading.py +134 -112
  113. atomicshop/wrappers/mongodbw/mongo_infra.py +31 -0
  114. atomicshop/wrappers/mongodbw/mongodbw.py +1324 -36
  115. atomicshop/wrappers/netshw.py +271 -0
  116. atomicshop/wrappers/playwrightw/engine.py +34 -19
  117. atomicshop/wrappers/playwrightw/infra.py +5 -0
  118. atomicshop/wrappers/playwrightw/javascript.py +7 -3
  119. atomicshop/wrappers/playwrightw/keyboard.py +14 -0
  120. atomicshop/wrappers/playwrightw/scenarios.py +172 -5
  121. atomicshop/wrappers/playwrightw/waits.py +9 -7
  122. atomicshop/wrappers/powershell_networking.py +80 -0
  123. atomicshop/wrappers/psutilw/processes.py +37 -1
  124. atomicshop/wrappers/psutilw/psutil_networks.py +85 -0
  125. atomicshop/wrappers/pyopensslw.py +9 -2
  126. atomicshop/wrappers/pywin32w/cert_store.py +116 -0
  127. atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
  128. atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +3 -105
  129. atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +3 -57
  130. atomicshop/wrappers/pywin32w/wmis/msft_netipaddress.py +113 -0
  131. atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +259 -0
  132. atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +112 -0
  133. atomicshop/wrappers/pywin32w/wmis/wmi_helpers.py +236 -0
  134. atomicshop/wrappers/socketw/accepter.py +21 -7
  135. atomicshop/wrappers/socketw/certificator.py +216 -150
  136. atomicshop/wrappers/socketw/creator.py +190 -50
  137. atomicshop/wrappers/socketw/dns_server.py +491 -182
  138. atomicshop/wrappers/socketw/exception_wrapper.py +45 -52
  139. atomicshop/wrappers/socketw/process_getter.py +86 -0
  140. atomicshop/wrappers/socketw/receiver.py +144 -102
  141. atomicshop/wrappers/socketw/sender.py +65 -35
  142. atomicshop/wrappers/socketw/sni.py +334 -165
  143. atomicshop/wrappers/socketw/socket_base.py +134 -0
  144. atomicshop/wrappers/socketw/socket_client.py +137 -95
  145. atomicshop/wrappers/socketw/socket_server_tester.py +11 -7
  146. atomicshop/wrappers/socketw/socket_wrapper.py +717 -116
  147. atomicshop/wrappers/socketw/ssl_base.py +15 -14
  148. atomicshop/wrappers/socketw/statistics_csv.py +148 -17
  149. atomicshop/wrappers/sysmonw.py +1 -1
  150. atomicshop/wrappers/ubuntu_terminal.py +65 -26
  151. atomicshop/wrappers/win_auditw.py +189 -0
  152. atomicshop/wrappers/winregw/__init__.py +0 -0
  153. atomicshop/wrappers/winregw/winreg_installed_software.py +58 -0
  154. atomicshop/wrappers/winregw/winreg_network.py +232 -0
  155. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/METADATA +31 -51
  156. atomicshop-3.10.5.dist-info/RECORD +306 -0
  157. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/WHEEL +1 -1
  158. atomicshop/_basics_temp.py +0 -101
  159. atomicshop/a_installs/win/fibratus.py +0 -9
  160. atomicshop/a_installs/win/mongodb.py +0 -9
  161. atomicshop/a_installs/win/pycharm.py +0 -9
  162. atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
  163. atomicshop/addons/a_setup_scripts/install_pywintrace_0.3.cmd +0 -2
  164. atomicshop/addons/mains/__pycache__/install_fibratus_windows.cpython-312.pyc +0 -0
  165. atomicshop/addons/mains/__pycache__/msi_unpacker.cpython-312.pyc +0 -0
  166. atomicshop/addons/mains/install_docker_rootless_ubuntu.py +0 -11
  167. atomicshop/addons/mains/install_docker_ubuntu_main_sudo.py +0 -11
  168. atomicshop/addons/mains/install_elastic_search_and_kibana_ubuntu.py +0 -10
  169. atomicshop/addons/mains/install_wsl_ubuntu_lts_admin.py +0 -9
  170. atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
  171. atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
  172. atomicshop/addons/package_setup/Setup.cmd +0 -7
  173. atomicshop/archiver/_search_in_zip.py +0 -189
  174. atomicshop/archiver/archiver.py +0 -34
  175. atomicshop/archiver/search_in_archive.py +0 -250
  176. atomicshop/archiver/sevenz_app_w.py +0 -86
  177. atomicshop/archiver/sevenzs.py +0 -44
  178. atomicshop/archiver/zips.py +0 -293
  179. atomicshop/file_types.py +0 -24
  180. atomicshop/mitm/config_editor.py +0 -37
  181. atomicshop/mitm/engines/create_module_template_example.py +0 -13
  182. atomicshop/mitm/initialize_mitm_server.py +0 -268
  183. atomicshop/pbtkmultifile_argparse.py +0 -88
  184. atomicshop/permissions.py +0 -151
  185. atomicshop/script_as_string_processor.py +0 -38
  186. atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
  187. atomicshop/ssh_scripts/process_from_port.py +0 -27
  188. atomicshop/wrappers/_process_wrapper_curl.py +0 -27
  189. atomicshop/wrappers/_process_wrapper_tar.py +0 -21
  190. atomicshop/wrappers/dockerw/install_docker.py +0 -209
  191. atomicshop/wrappers/elasticsearchw/infrastructure.py +0 -265
  192. atomicshop/wrappers/elasticsearchw/install_elastic.py +0 -232
  193. atomicshop/wrappers/ffmpegw.py +0 -125
  194. atomicshop/wrappers/fibratusw/install.py +0 -81
  195. atomicshop/wrappers/mongodbw/infrastructure.py +0 -53
  196. atomicshop/wrappers/mongodbw/install_mongodb.py +0 -190
  197. atomicshop/wrappers/msiw.py +0 -149
  198. atomicshop/wrappers/nodejsw/install_nodejs.py +0 -139
  199. atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
  200. atomicshop/wrappers/psutilw/networks.py +0 -45
  201. atomicshop/wrappers/pycharmw.py +0 -81
  202. atomicshop/wrappers/socketw/base.py +0 -59
  203. atomicshop/wrappers/socketw/get_process.py +0 -107
  204. atomicshop/wrappers/wslw.py +0 -191
  205. atomicshop-2.15.11.dist-info/RECORD +0 -302
  206. /atomicshop/{addons/mains → a_mains}/FACT/factw_fact_extractor_docker_image_main_sudo.py +0 -0
  207. /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
  208. /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
  209. /atomicshop/{addons → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
  210. /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
  211. /atomicshop/{addons → a_mains/addons}/process_list/compile.cmd +0 -0
  212. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.dll +0 -0
  213. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.exp +0 -0
  214. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.lib +0 -0
  215. /atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +0 -0
  216. /atomicshop/{archiver → permissions}/__init__.py +0 -0
  217. /atomicshop/{wrappers/fibratusw → web_apis}/__init__.py +0 -0
  218. /atomicshop/wrappers/{nodejsw → pywin32w/wmis}/__init__.py +0 -0
  219. /atomicshop/wrappers/pywin32w/{wmi_win32process.py → wmis/win32process.py} +0 -0
  220. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info/licenses}/LICENSE.txt +0 -0
  221. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,84 @@
1
1
  import multiprocessing
2
2
  import multiprocessing.managers
3
+ import os
3
4
  import queue
5
+ import threading
4
6
  import concurrent.futures
5
7
  from concurrent.futures import ProcessPoolExecutor, as_completed
8
+ from collections import deque
9
+ from typing import Callable
10
+ import time
6
11
 
7
12
  from ..import system_resources
8
13
 
9
14
 
10
- def process_wrap_queue(function_reference, *args, **kwargs):
15
+ def kill_processes(
16
+ processes: list
17
+ ):
18
+ """Terminate all children with SIGTERM (or SIGKILL if you like)."""
19
+ # Ask OS to terminate all processes in the list.
20
+ for p in processes:
21
+ if p.is_alive():
22
+ p.terminate()
23
+ time.sleep(1) # give processes a chance to exit cleanly
24
+ # Force kill all processes in the list.
25
+ for p in processes:
26
+ if p.is_alive():
27
+ p.kill()
28
+ for p in processes: # wait for everything to disappear
29
+ p.join()
30
+
31
+
32
+ def is_process_crashed(
33
+ processes: list[multiprocessing.Process]
34
+ ) -> tuple[int, str] | tuple[None, None]:
35
+ """
36
+ Check if any of the processes in the list is not alive.
37
+ :param processes: list, list of multiprocessing.Process objects.
38
+ :return: tuple(int, string) or None.
39
+ tuple(0 if any finished cleanly, process name).
40
+ tuple(1 (or exit code integer) if any process crashed, process_name).
41
+ None if all processes are still alive.
42
+
43
+ ==============================================
44
+
45
+ Usage example:
46
+ processes = [multiprocessing.Process(target=some_function) for _ in range(5)]
47
+
48
+ for p in processes:
49
+ p.start()
50
+
51
+ # Check if any process has crashed
52
+ try:
53
+ while True:
54
+ # Poll every second; you can use a shorter sleep if you prefer.
55
+ result, process_name = is_process_crashed(processes)
56
+ # If result is None, all processes are still alive.
57
+ if result is not None:
58
+ # If result is 0 or 1, we can exit the loop.
59
+ print(f"Process [{process_name}] finished with exit code {result}.")
60
+ break
61
+ time.sleep(1)
62
+ except KeyboardInterrupt:
63
+ print("Ctrl-C caught – terminating children…")
64
+ kill_all(processes)
65
+ sys.exit(0)
66
+ """
67
+
68
+ for p in processes:
69
+ if p.exitcode is not None: # the process is *dead*
70
+ kill_processes(processes) # stop the rest
71
+ if p.exitcode == 0:
72
+ # print(f"{p.name} exited cleanly; shutting down.")
73
+ return 0, p.name
74
+ else:
75
+ # print(f"{p.name} crashed (exitcode {p.exitcode}). Shutting everything down.")
76
+ return p.exitcode, p.name
77
+
78
+ return None, None # all processes are still alive
79
+
80
+
81
+ def process_wrap_queue(function_reference: Callable, *args, **kwargs):
11
82
  """
12
83
  The function receives function reference and arguments, and executes the function in a thread.
13
84
  "_queue" means that a queue.put() is used to store the result of the function and queue.get() to output it.
@@ -38,7 +109,7 @@ def process_wrap_queue(function_reference, *args, **kwargs):
38
109
  class MultiProcessorRecursive:
39
110
  def __init__(
40
111
  self,
41
- process_function,
112
+ process_function: Callable,
42
113
  input_list: list,
43
114
  max_workers: int = None,
44
115
  cpu_percent_max: int = 80,
@@ -52,7 +123,7 @@ class MultiProcessorRecursive:
52
123
  :param process_function: function, function to execute on the input list.
53
124
  :param input_list: list, list of inputs to process.
54
125
  :param max_workers: integer, number of workers to execute functions in parallel. Default is None, which
55
- is the number of CPUs.
126
+ is the number of CPUs that will be counted automatically by the multiprocessing module.
56
127
  :param cpu_percent_max: integer, maximum CPU percentage. Above that usage, we will wait before starting new
57
128
  execution.
58
129
  :param memory_percent_max: integer, maximum memory percentage. Above that usage, we will wait, before starting
@@ -65,7 +136,7 @@ class MultiProcessorRecursive:
65
136
  If this is used, the system resources will be checked before starting each new execution from this
66
137
  shared dict instead of performing new checks.
67
138
 
68
- Usage:
139
+ Usage Examples:
69
140
  def unpack_file(file_path):
70
141
  # Process the file at file_path and unpack it.
71
142
  # Return a list of new file paths that were extracted from the provided path.
@@ -74,64 +145,317 @@ class MultiProcessorRecursive:
74
145
  # List of file paths to process
75
146
  file_paths = ["path1", "path2", "path3"]
76
147
 
77
- # Create an instance of MultiProcessor
78
- # Note: unpacking.unpack_file is passed without parentheses
79
- processor = MultiProcessor(
80
- process_function=unpack_file,
81
- input_list=file_paths,
82
- max_workers=4, # Number of parallel workers
83
- cpu_percent_max=80, # Max CPU usage percentage
84
- memory_percent_max=80, # Max memory usage percentage
85
- wait_time=5 # Time to wait if resources are overused
148
+ # Note: unpack_file Callable is passed to init without parentheses.
149
+
150
+ 1. Providing the list directly to process at once:
151
+ # Initialize the processor.
152
+ processor = MultiProcessor(
153
+ process_function=unpack_file,
154
+ input_list=file_paths,
155
+ max_workers=4, # Number of parallel workers
156
+ cpu_percent_max=80, # Max CPU usage percentage
157
+ memory_percent_max=80, # Max memory usage percentage
158
+ wait_time=5 # Time to wait if resources are overused
159
+ )
160
+
161
+ # Process the list of files at once.
162
+ processor.run_process()
163
+ # Shutdown the pool processes after processing.
164
+ processor.shutdown_pool()
165
+
166
+ 2. Processing each file in the list differently then adding to the list of the multiprocessing instance then executing.
167
+ # Initialize the processor once, before the loop, with empty input_list.
168
+ processor = MultiProcessor(
169
+ process_function=unpack_file,
170
+ input_list=[],
171
+ max_workers=4, # Number of parallel workers
172
+ cpu_percent_max=80, # Max CPU usage percentage
173
+ memory_percent_max=80, # Max memory usage percentage
174
+ wait_time=5 # Time to wait if resources are overused
175
+ )
176
+
177
+ for file_path in file_paths:
178
+ # <Process each file>.
179
+ # Add the result to the input_list of the processor.
180
+ processor.input_list.append(file_path)
181
+
182
+ # Process the list of files at once.
183
+ processor.run_process()
184
+ # Shutdown the pool processes after processing.
185
+ processor.shutdown_pool()
186
+
187
+ 3. Processing each file in the list separately, since we're using an unpacking function that
188
+ will create more files, but the context for this operation is different for extraction
189
+ of each main file inside the list:
190
+
191
+ # Initialize the processor once, before the loop, with empty input_list.
192
+ processor = MultiProcessor(
193
+ process_function=unpack_file,
194
+ input_list=[],
195
+ max_workers=4, # Number of parallel workers
196
+ cpu_percent_max=80, # Max CPU usage percentage
197
+ memory_percent_max=80, # Max memory usage percentage
198
+ wait_time=5 # Time to wait if resources are overused
199
+ )
200
+
201
+ for file_path in file_paths:
202
+ # <Process each file>.
203
+ # Add the result to the input_list of the processor.
204
+ processor.input_list.append(file_path)
205
+ # Process the added file path separately.
206
+ processor.run_process()
207
+
208
+ # Shutdown the pool processes after processing.
209
+ processor.shutdown_pool()
210
+ """
211
+
212
+ self.process_function: Callable = process_function
213
+ self.input_list: list = input_list
214
+ self.max_workers: int = max_workers
215
+ self.cpu_percent_max: int = cpu_percent_max
216
+ self.memory_percent_max: int = memory_percent_max
217
+ self.wait_time: float = wait_time
218
+ self.system_monitor_manager_dict: multiprocessing.managers.DictProxy = system_monitor_manager_dict
219
+
220
+ # Create the pool once and reuse it
221
+ self.pool: multiprocessing.Pool = multiprocessing.Pool(processes=self.max_workers)
222
+
223
+ # Keep track of outstanding async results across calls
224
+ self.async_results: list = []
225
+
226
+ def run_process(self):
227
+ """
228
+ Start with the items currently in self.input_list, but whenever a task
229
+ finishes schedule the children it returns *right away*.
230
+ The loop ends when there are no more outstanding tasks.
231
+ """
232
+ # ---------- internal helpers ----------
233
+ outstanding = 0 # tasks that have been submitted but not yet finished
234
+ done_event = threading.Event() # let the main thread wait until work is over
235
+
236
+ def _submit(item):
237
+ nonlocal outstanding
238
+ # Wait for resources *before* submitting a new job
239
+ system_resources.wait_for_resource_availability(
240
+ cpu_percent_max=self.cpu_percent_max,
241
+ memory_percent_max=self.memory_percent_max,
242
+ wait_time=self.wait_time,
243
+ system_monitor_manager_dict=self.system_monitor_manager_dict
244
+ )
245
+ outstanding += 1
246
+ self.pool.apply_async(
247
+ self.process_function,
248
+ (item,),
249
+ callback=_on_finish, # called in the main process when result is ready
250
+ error_callback=_on_error
86
251
  )
87
252
 
88
- # Run the processing
89
- processor.run_process()
253
+ def _on_finish(result):
254
+ """Pool calls this in the parent process thread when a job completes."""
255
+ nonlocal outstanding
256
+ outstanding -= 1
257
+
258
+ # The worker returned a list of new items – submit them immediately
259
+ if result:
260
+ for child in result:
261
+ _submit(child)
262
+
263
+ # If no work left, release the waiter
264
+ if outstanding == 0:
265
+ done_event.set()
266
+
267
+ def _on_error(exc):
268
+ """Propagate the first exception and stop everything cleanly."""
269
+ done_event.set()
270
+ raise exc # let your code deal with it – you can customise this
271
+
272
+ # ---------- kick‑off ----------
273
+ # Schedule the items we already have
274
+ for item in self.input_list:
275
+ _submit(item)
276
+
277
+ # Clear the input list; after this point everything is driven by callbacks
278
+ self.input_list.clear()
279
+
280
+ # Wait until all recursively spawned work is finished
281
+ done_event.wait()
282
+
283
+ def shutdown(self):
284
+ """Shuts down the pool gracefully."""
285
+ if self.pool:
286
+ self.pool.close() # Stop accepting new tasks
287
+ self.pool.join() # Wait for all tasks to complete
288
+ self.pool = None
289
+
290
+
291
+ class _MultiProcessorRecursiveWithProcessPoolExecutor:
292
+ def __init__(
293
+ self,
294
+ process_function: Callable,
295
+ input_list: list,
296
+ max_workers: int = None,
297
+ cpu_percent_max: int = 80,
298
+ memory_percent_max: int = 80,
299
+ wait_time: float = 5,
300
+ system_monitor_manager_dict: multiprocessing.managers.DictProxy = None
301
+ ):
90
302
  """
303
+ THIS CLASS USES THE concurrent.futures.ProcessPoolExecutor to achieve parallelism.
304
+ For some reason I got freezes on exceptions without the exception output after the run_process() method finished
305
+ and the pool remained open. So, using the MultiProcessorRecursive instead.
91
306
 
92
- self.process_function = process_function
307
+ MultiProcessor class. Used to execute functions in parallel. The result of each execution is fed back
308
+ to the provided function. Making it sort of recursive execution.
309
+ :param process_function: function, function to execute on the input list.
310
+ :param input_list: list, list of inputs to process.
311
+ :param max_workers: integer, number of workers to execute functions in parallel. Default is None, which
312
+ is the number of CPUs that will be counted automatically by the multiprocessing module.
313
+ :param cpu_percent_max: integer, maximum CPU percentage. Above that usage, we will wait before starting new
314
+ execution.
315
+ :param memory_percent_max: integer, maximum memory percentage. Above that usage, we will wait, before starting
316
+ new execution.
317
+ :param wait_time: float, time to wait if the CPU or memory usage is above the maximum percentage.
318
+ :param system_monitor_manager_dict: multiprocessing.managers.DictProxy, shared manager dict for
319
+ system monitoring. The object is the output of atomicshop.system_resource_monitor.
320
+ If you are already running this monitor, you can pass the manager_dict to both the system monitor and this
321
+ class to share the system resources data.
322
+ If this is used, the system resources will be checked before starting each new execution from this
323
+ shared dict instead of performing new checks.
324
+
325
+ Usage Examples:
326
+ def unpack_file(file_path):
327
+ # Process the file at file_path and unpack it.
328
+ # Return a list of new file paths that were extracted from the provided path.
329
+ return [new_file_path1, new_file_path2] # Example return value
330
+
331
+ # List of file paths to process
332
+ file_paths = ["path1", "path2", "path3"]
333
+
334
+ # Note: unpack_file Callable is passed to init without parentheses.
335
+
336
+ 1. Providing the list directly to process at once:
337
+ # Initialize the processor.
338
+ processor = MultiProcessor(
339
+ process_function=unpack_file,
340
+ input_list=file_paths,
341
+ max_workers=4, # Number of parallel workers
342
+ cpu_percent_max=80, # Max CPU usage percentage
343
+ memory_percent_max=80, # Max memory usage percentage
344
+ wait_time=5 # Time to wait if resources are overused
345
+ )
346
+
347
+ # Process the list of files at once.
348
+ processor.run_process()
349
+ # Shutdown the pool processes after processing.
350
+ processor.shutdown_pool()
351
+
352
+ 2. Processing each file in the list differently then adding to the list of the multiprocessing instance then executing.
353
+ # Initialize the processor once, before the loop, with empty input_list.
354
+ processor = MultiProcessor(
355
+ process_function=unpack_file,
356
+ input_list=[],
357
+ max_workers=4, # Number of parallel workers
358
+ cpu_percent_max=80, # Max CPU usage percentage
359
+ memory_percent_max=80, # Max memory usage percentage
360
+ wait_time=5 # Time to wait if resources are overused
361
+ )
362
+
363
+ for file_path in file_paths:
364
+ # <Process each file>.
365
+ # Add the result to the input_list of the processor.
366
+ processor.input_list.append(file_path)
367
+
368
+ # Process the list of files at once.
369
+ processor.run_process()
370
+ # Shutdown the pool processes after processing.
371
+ processor.shutdown_pool()
372
+
373
+ 3. Processing each file in the list separately, since we're using an unpacking function that
374
+ will create more files, but the context for this operation is different for extraction
375
+ of each main file inside the list:
376
+
377
+ # Initialize the processor once, before the loop, with empty input_list.
378
+ processor = MultiProcessor(
379
+ process_function=unpack_file,
380
+ input_list=[],
381
+ max_workers=4, # Number of parallel workers
382
+ cpu_percent_max=80, # Max CPU usage percentage
383
+ memory_percent_max=80, # Max memory usage percentage
384
+ wait_time=5 # Time to wait if resources are overused
385
+ )
386
+
387
+ for file_path in file_paths:
388
+ # <Process each file>.
389
+ # Add the result to the input_list of the processor.
390
+ processor.input_list.append(file_path)
391
+ # Process the added file path separately.
392
+ processor.run_process()
393
+
394
+ # Shutdown the pool processes after processing.
395
+ processor.shutdown_pool()
396
+ """
397
+
398
+ self.process_function: Callable = process_function
93
399
  self.input_list: list = input_list
94
- self.max_workers: int = max_workers
95
400
  self.cpu_percent_max: int = cpu_percent_max
96
401
  self.memory_percent_max: int = memory_percent_max
97
402
  self.wait_time: float = wait_time
98
403
  self.system_monitor_manager_dict: multiprocessing.managers.DictProxy = system_monitor_manager_dict
99
404
 
405
+ if max_workers is None:
406
+ max_workers = os.cpu_count()
407
+ self.max_workers: int = max_workers
408
+
409
+ # Create the executor once and reuse it.
410
+ # noinspection PyTypeChecker
411
+ self.executor: ProcessPoolExecutor = None
412
+
413
+ def _ensure_executor(self):
414
+ """Create a new pool if we do not have one or if the old one was shut."""
415
+ if self.executor is None or getattr(self.executor, '_shutdown', False):
416
+ self.executor = ProcessPoolExecutor(max_workers=self.max_workers)
417
+
100
418
  def run_process(self):
101
- with multiprocessing.Pool(processes=self.max_workers) as pool:
102
- # Keep track of the async results
103
- async_results = []
104
-
105
- while self.input_list:
106
- new_input_list = []
107
- for item in self.input_list:
108
- # Check system resources before processing each item
109
- system_resources.wait_for_resource_availability(
110
- cpu_percent_max=self.cpu_percent_max,
111
- memory_percent_max=self.memory_percent_max,
112
- wait_time=self.wait_time,
113
- system_monitor_manager_dict=self.system_monitor_manager_dict)
114
-
115
- # Process the item
116
- async_result = pool.apply_async(self.process_function, (item,))
117
- async_results.append(async_result)
118
-
119
- # Reset input_list for next round of processing
120
- self.input_list = []
121
-
122
- # Collect results as they complete
123
- for async_result in async_results:
124
- try:
125
- result = async_result.get()
126
- # Assuming process_function returns a list, extend new_input_list
127
- new_input_list.extend(result)
128
- except Exception as e:
129
- print(f"An error occurred: {e}")
419
+ # Make sure we have a live executor
420
+ self._ensure_executor()
421
+
422
+ work_q = deque(self.input_list) # breadth‑first queue
423
+ self.input_list.clear()
424
+ futures = set()
425
+
426
+ # helper to submit jobs up to the concurrency limit
427
+ def _fill():
428
+ while work_q and len(futures) < self.max_workers:
429
+ item = work_q.popleft()
430
+ system_resources.wait_for_resource_availability(
431
+ cpu_percent_max=self.cpu_percent_max,
432
+ memory_percent_max=self.memory_percent_max,
433
+ wait_time=self.wait_time,
434
+ system_monitor_manager_dict=self.system_monitor_manager_dict
435
+ )
436
+ futures.add(self.executor.submit(self.process_function, item))
437
+
438
+ _fill() # start the first wave
130
439
 
131
- # Update the input_list for the next iteration
132
- self.input_list = new_input_list
133
- # Clear the async_results for the next iteration
134
- async_results.clear()
440
+ while futures:
441
+ for fut in as_completed(futures):
442
+ futures.remove(fut) # a slot just freed up
443
+
444
+ # propagate worker exceptions immediately
445
+ children = fut.result()
446
+
447
+ # schedule the newly discovered items
448
+ if children:
449
+ work_q.extend(children)
450
+
451
+ _fill() # keep the pool saturated
452
+ break # leave the for‑loop so as_completed resets
453
+
454
+ def shutdown(self):
455
+ """Shuts down the executor gracefully."""
456
+ if self.executor:
457
+ self.executor.shutdown(wait=True) # blocks until all tasks complete
458
+ self.executor = None
135
459
 
136
460
 
137
461
  class ConcurrentProcessorRecursive:
@@ -4,7 +4,7 @@ from pathlib import Path
4
4
  import argparse
5
5
 
6
6
  from . import lists
7
- from ..print_api import print_api
7
+ from .. import print_api
8
8
 
9
9
 
10
10
  def get_nth_character_from_start(input_string: str, nth: int):
@@ -423,7 +423,7 @@ def replace_words_with_values_from_dict(
423
423
 
424
424
 
425
425
  def replace_strings_with_values_from_dict(string_to_replace: str, dictionary: dict) -> str:
426
- """
426
+ r"""
427
427
  Function replaces strings, which are keys with values from dictionary.
428
428
 
429
429
  :param string_to_replace: string, to replace words in.
@@ -537,7 +537,7 @@ def replace_string_in_file(
537
537
  file.writelines(lines)
538
538
 
539
539
  # Output the relevant line numbers
540
- print_api(f"Target string found on the following lines: {changed_lines}", **(print_kwargs or {}))
540
+ print_api.print_api(f"Target string found on the following lines: {changed_lines}", **(print_kwargs or {}))
541
541
  return changed_lines
542
542
 
543
543
 
@@ -571,3 +571,72 @@ def replace_string_in_file_main_argparse():
571
571
  new_string=args.new_string,
572
572
  find_only=args.find_only
573
573
  )
574
+
575
+
576
+ def _replace_string_in_variable():
577
+ """
578
+ Replace string in a string variable, but do it by a meta variable inside the string.
579
+ This is just an example, using the 'Template' class from the 'string' module is a better way to do it.
580
+ """
581
+
582
+ from string import Template
583
+ import os
584
+ import tempfile
585
+ import subprocess
586
+
587
+ get_docker_url: str = "https://get.docker.com"
588
+ docker_proxy_image_name: str = "rpardini/docker-registry-proxy:0.6.5"
589
+ preparation_output_dir: str = str(Path(__file__).parent / "offline-bundle")
590
+
591
+ class BashTemplate(Template):
592
+ # Anything that is not '$', which is a default delimiter in Template class, but also used in bash scripts.
593
+ # The below symbol can be printed from keyboard by holding 'Alt' and typing '0167' on the numeric keypad.
594
+ delimiter = '§'
595
+
596
+ bash_tmpl = BashTemplate(r"""#!/usr/bin/env bash
597
+ #
598
+ set -Eeuo pipefail
599
+
600
+ die() { echo "ERROR: $*" >&2; exit 1; }
601
+ need_root() { [[ $EUID -eq 0 ]] || die "Run as root (use sudo)"; }
602
+ need_cmd() {
603
+ local cmd=$1
604
+ local pkg=${2:-$1} # default package == command
605
+ if ! command -v "$cmd" &>/dev/null; then
606
+ echo "[*] $cmd not found – installing $pkg ..."
607
+ apt-get update -qq
608
+ DEBIAN_FRONTEND=noninteractive \
609
+ apt-get install -y --no-install-recommends "$pkg" || \
610
+ die "Unable to install required package: $pkg"
611
+ fi
612
+ }
613
+
614
+ need_root
615
+ need_cmd curl # binary and pkg are both “curl”
616
+ need_cmd gpg # → apt-get install gpg
617
+
618
+ DRY_LOG=$(curl -fsSL "§url" | bash -s -- --dry-run)
619
+ IMAGE="§proxyimage"
620
+ ARCHIVE="$OUTDIR/registry-proxy-image.tar.gz"
621
+ zip -r "§output_zip" "$OUTDIR"
622
+ """)
623
+
624
+ # Substitute the variables in the bash script template.
625
+ bash_script = bash_tmpl.substitute(
626
+ url=get_docker_url, proxyimage=docker_proxy_image_name, output_zip=preparation_output_dir)
627
+
628
+ # Write it to a secure temporary file.
629
+ with tempfile.NamedTemporaryFile('w', delete=False, suffix='.sh') as f:
630
+ f.write(bash_script)
631
+ temp_path = f.name
632
+ os.chmod(temp_path, 0o755) # make it executable
633
+
634
+ # Decide where the bundle should land (optional argument to the script).
635
+ cmd = ["sudo", temp_path, preparation_output_dir] # use sudo because the script demands root
636
+
637
+ # Run it and stream output live.
638
+ try:
639
+ subprocess.run(cmd, check=True)
640
+ finally:
641
+ # 5. Clean up the temp file unless you want to inspect it.
642
+ os.remove(temp_path)
@@ -18,6 +18,20 @@ def current_thread_id():
18
18
  return thread_id
19
19
 
20
20
 
21
+ def get_current_thread_name():
22
+ return threading.current_thread().name
23
+
24
+
25
+ def set_current_thread_name(name: str):
26
+ threading.current_thread().name = name
27
+
28
+
29
+ def set_current_thread_name_by_process_name():
30
+ import multiprocessing
31
+ current_process_name = multiprocessing.current_process().name
32
+ threading.current_thread().name = current_process_name
33
+
34
+
21
35
  def get_number_of_active_threads():
22
36
  return threading.active_count()
23
37
 
@@ -1,16 +1,26 @@
1
1
  import traceback
2
2
 
3
3
 
4
- def get_as_string(one_line: bool = False, replace_end: str = str()) -> str:
4
+ def get_as_string(
5
+ exc: BaseException = None,
6
+ one_line: bool = False,
7
+ replace_end: str = str()
8
+ ) -> str:
5
9
  """
6
10
  Returns traceback as string.
7
11
 
12
+ :param exc: Exception to get traceback from. If 'None', current exception will be used.
8
13
  :param one_line: If 'True', traceback will be returned as one line.
9
14
  :param replace_end: If 'one_line' is 'True', this string will be used to replace '\n' in traceback.
10
15
  :return: Traceback as string.
11
16
  """
12
17
 
18
+ if exc is None:
19
+ stringed_exception: str = traceback.format_exc()
20
+ else:
21
+ stringed_exception: str = ''.join(traceback.TracebackException.from_exception(exc).format())
22
+
13
23
  if not one_line:
14
- return traceback.format_exc()
24
+ return stringed_exception
15
25
  else:
16
- return traceback.format_exc().replace('\n', replace_end)
26
+ return stringed_exception.replace('\n', replace_end)