atomicshop 2.19.0__py3-none-any.whl → 2.19.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.19.0'
4
+ __version__ = '2.19.2'
@@ -3,6 +3,7 @@ import multiprocessing.managers
3
3
  import queue
4
4
  import concurrent.futures
5
5
  from concurrent.futures import ProcessPoolExecutor, as_completed
6
+ from typing import Callable
6
7
 
7
8
  from ..import system_resources
8
9
 
@@ -38,7 +39,7 @@ def process_wrap_queue(function_reference, *args, **kwargs):
38
39
  class MultiProcessorRecursive:
39
40
  def __init__(
40
41
  self,
41
- process_function,
42
+ process_function: Callable,
42
43
  input_list: list,
43
44
  max_workers: int = None,
44
45
  cpu_percent_max: int = 80,
@@ -52,7 +53,7 @@ class MultiProcessorRecursive:
52
53
  :param process_function: function, function to execute on the input list.
53
54
  :param input_list: list, list of inputs to process.
54
55
  :param max_workers: integer, number of workers to execute functions in parallel. Default is None, which
55
- is the number of CPUs.
56
+ is the number of CPUs that will be counted automatically by the multiprocessing module.
56
57
  :param cpu_percent_max: integer, maximum CPU percentage. Above that usage, we will wait before starting new
57
58
  execution.
58
59
  :param memory_percent_max: integer, maximum memory percentage. Above that usage, we will wait, before starting
@@ -65,7 +66,7 @@ class MultiProcessorRecursive:
65
66
  If this is used, the system resources will be checked before starting each new execution from this
66
67
  shared dict instead of performing new checks.
67
68
 
68
- Usage:
69
+ Usage Examples:
69
70
  def unpack_file(file_path):
70
71
  # Process the file at file_path and unpack it.
71
72
  # Return a list of new file paths that were extracted from the provided path.
@@ -74,22 +75,71 @@ class MultiProcessorRecursive:
74
75
  # List of file paths to process
75
76
  file_paths = ["path1", "path2", "path3"]
76
77
 
77
- # Create an instance of MultiProcessor
78
- # Note: unpacking.unpack_file is passed without parentheses
79
- processor = MultiProcessor(
80
- process_function=unpack_file,
81
- input_list=file_paths,
82
- max_workers=4, # Number of parallel workers
83
- cpu_percent_max=80, # Max CPU usage percentage
84
- memory_percent_max=80, # Max memory usage percentage
85
- wait_time=5 # Time to wait if resources are overused
86
- )
87
-
88
- # Run the processing
89
- processor.run_process()
78
+ # Note: unpack_file Callable is passed to init without parentheses.
79
+
80
+ 1. Providing the list directly to process at once:
81
+ # Initialize the processor.
82
+ processor = MultiProcessor(
83
+ process_function=unpack_file,
84
+ input_list=file_paths,
85
+ max_workers=4, # Number of parallel workers
86
+ cpu_percent_max=80, # Max CPU usage percentage
87
+ memory_percent_max=80, # Max memory usage percentage
88
+ wait_time=5 # Time to wait if resources are overused
89
+ )
90
+
91
+ # Process the list of files at once.
92
+ processor.run_process()
93
+ # Shutdown the pool processes after processing.
94
+ processor.shutdown_pool()
95
+
96
+ 2. Processing each file in the list differently then adding to the list of the multiprocessing instance then executing.
97
+ # Initialize the processor once, before the loop, with empty input_list.
98
+ processor = MultiProcessor(
99
+ process_function=unpack_file,
100
+ input_list=[],
101
+ max_workers=4, # Number of parallel workers
102
+ cpu_percent_max=80, # Max CPU usage percentage
103
+ memory_percent_max=80, # Max memory usage percentage
104
+ wait_time=5 # Time to wait if resources are overused
105
+ )
106
+
107
+ for file_path in file_paths:
108
+ # <Process each file>.
109
+ # Add the result to the input_list of the processor.
110
+ processor.input_list.append(file_path)
111
+
112
+ # Process the list of files at once.
113
+ processor.run_process()
114
+ # Shutdown the pool processes after processing.
115
+ processor.shutdown_pool()
116
+
117
+ 3. Processing each file in the list separately, since we're using an unpacking function that
118
+ will create more files, but the context for this operation is different for extraction
119
+ of each main file inside the list:
120
+
121
+ # Initialize the processor once, before the loop, with empty input_list.
122
+ processor = MultiProcessor(
123
+ process_function=unpack_file,
124
+ input_list=[],
125
+ max_workers=4, # Number of parallel workers
126
+ cpu_percent_max=80, # Max CPU usage percentage
127
+ memory_percent_max=80, # Max memory usage percentage
128
+ wait_time=5 # Time to wait if resources are overused
129
+ )
130
+
131
+ for file_path in file_paths:
132
+ # <Process each file>.
133
+ # Add the result to the input_list of the processor.
134
+ processor.input_list.append(file_path)
135
+ # Process the added file path separately.
136
+ processor.run_process()
137
+
138
+ # Shutdown the pool processes after processing.
139
+ processor.shutdown_pool()
90
140
  """
91
141
 
92
- self.process_function = process_function
142
+ self.process_function: Callable = process_function
93
143
  self.input_list: list = input_list
94
144
  self.max_workers: int = max_workers
95
145
  self.cpu_percent_max: int = cpu_percent_max
@@ -97,41 +147,50 @@ class MultiProcessorRecursive:
97
147
  self.wait_time: float = wait_time
98
148
  self.system_monitor_manager_dict: multiprocessing.managers.DictProxy = system_monitor_manager_dict
99
149
 
150
+ # Create the pool once and reuse it
151
+ self.pool: multiprocessing.Pool = multiprocessing.Pool(processes=self.max_workers)
152
+
153
+ # Keep track of outstanding async results across calls
154
+ self.async_results: list = []
155
+
100
156
  def run_process(self):
101
- with multiprocessing.Pool(processes=self.max_workers) as pool:
102
- # Keep track of the async results
103
- async_results = []
104
-
105
- while self.input_list:
106
- new_input_list = []
107
- for item in self.input_list:
108
- # Check system resources before processing each item
109
- system_resources.wait_for_resource_availability(
110
- cpu_percent_max=self.cpu_percent_max,
111
- memory_percent_max=self.memory_percent_max,
112
- wait_time=self.wait_time,
113
- system_monitor_manager_dict=self.system_monitor_manager_dict)
114
-
115
- # Process the item
116
- async_result = pool.apply_async(self.process_function, (item,))
117
- async_results.append(async_result)
118
-
119
- # Reset input_list for next round of processing
120
- self.input_list = []
121
-
122
- # Collect results as they complete
123
- for async_result in async_results:
124
- try:
125
- result = async_result.get()
126
- # Assuming process_function returns a list, extend new_input_list
127
- new_input_list.extend(result)
128
- except Exception:
129
- raise
130
-
131
- # Update the input_list for the next iteration
132
- self.input_list = new_input_list
133
- # Clear the async_results for the next iteration
134
- async_results.clear()
157
+ while self.input_list:
158
+ new_input_list = []
159
+ for item in self.input_list:
160
+ # Check system resources before processing each item
161
+ system_resources.wait_for_resource_availability(
162
+ cpu_percent_max=self.cpu_percent_max,
163
+ memory_percent_max=self.memory_percent_max,
164
+ wait_time=self.wait_time,
165
+ system_monitor_manager_dict=self.system_monitor_manager_dict)
166
+
167
+ # Process the item
168
+ async_result = self.pool.apply_async(self.process_function, (item,))
169
+ self.async_results.append(async_result)
170
+
171
+ # Reset input_list for next round of processing
172
+ self.input_list = []
173
+
174
+ # Collect results as they complete
175
+ for async_result in self.async_results:
176
+ try:
177
+ result = async_result.get()
178
+ # Assuming process_function returns a list, extend new_input_list
179
+ new_input_list.extend(result)
180
+ except Exception:
181
+ raise
182
+
183
+ # Update the input_list for the next iteration
184
+ self.input_list = new_input_list
185
+ # Clear the async_results for the next iteration
186
+ self.async_results.clear()
187
+
188
+ def shutdown_pool(self):
189
+ """Shuts down the pool gracefully."""
190
+ if self.pool:
191
+ self.pool.close() # Stop accepting new tasks
192
+ self.pool.join() # Wait for all tasks to complete
193
+ self.pool = None
135
194
 
136
195
 
137
196
  class ConcurrentProcessorRecursive:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.19.0
3
+ Version: 2.19.2
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=OIXYlXkKerHNZWXlxoV3_8FloSrzjYDaCOGsfFfHcjg,123
1
+ atomicshop/__init__.py,sha256=SBk9Y1eYsyMmy9xWJ483qY8NFoOX5qMgy1HMpVCjXO0,123
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -101,7 +101,7 @@ atomicshop/basics/isinstancing.py,sha256=fQ35xfqbguQz2BUn-3a4KVGskhTcIn8JjRtxV2r
101
101
  atomicshop/basics/list_of_classes.py,sha256=PJoE1VJdhhQ4gSFr88zW7IApXd4Ez7xLz-7vAM-7gug,978
102
102
  atomicshop/basics/list_of_dicts.py,sha256=tj0LNPf1ljNI_qpoO-PiOT4Ulmk1M-UpTGyn9twVcw8,8039
103
103
  atomicshop/basics/lists.py,sha256=I0C62vrDrNwCTNl0EjUZNa1Jsd8l0rTkp28GEx9QoEI,4258
104
- atomicshop/basics/multiprocesses.py,sha256=oU6LjcLLGBtPIGJzZBpDWoLU3HRmMoanITEOE2luAYw,18799
104
+ atomicshop/basics/multiprocesses.py,sha256=vzL1lGiXpfPWbuLsQF9e7c9vbo59rITL2dBLi0CCpZ0,21741
105
105
  atomicshop/basics/numbers.py,sha256=ESX0z_7o_ok3sOmCKAUBoZinATklgMy2v-4RndqXlVM,1837
106
106
  atomicshop/basics/package_module.py,sha256=fBd0uVgFce25ZCVtLq83iyowRlbwdWYFj_t4Ml7LU14,391
107
107
  atomicshop/basics/randoms.py,sha256=DmYLtnIhDK29tAQrGP1Nt-A-v8WC7WIEB8Edi-nk3N4,282
@@ -322,8 +322,8 @@ atomicshop/wrappers/socketw/statistics_csv.py,sha256=fgMzDXI0cybwUEqAxprRmY3lqbh
322
322
  atomicshop/wrappers/winregw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
323
323
  atomicshop/wrappers/winregw/winreg_installed_software.py,sha256=Qzmyktvob1qp6Tjk2DjLfAqr_yXV0sgWzdMW_9kwNjY,2345
324
324
  atomicshop/wrappers/winregw/winreg_network.py,sha256=AENV88H1qDidrcpyM9OwEZxX5svfi-Jb4N6FkS1xtqA,8851
325
- atomicshop-2.19.0.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
326
- atomicshop-2.19.0.dist-info/METADATA,sha256=BxY2IEfmMAFhO4teHrr8TYRCzgnf-SsrWaIh_l29YOo,10630
327
- atomicshop-2.19.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
328
- atomicshop-2.19.0.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
329
- atomicshop-2.19.0.dist-info/RECORD,,
325
+ atomicshop-2.19.2.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
326
+ atomicshop-2.19.2.dist-info/METADATA,sha256=hyYsPo_Wfx0n6Q8cdQNEWsP4RqH5_oo3og7-2M5hFBs,10630
327
+ atomicshop-2.19.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
328
+ atomicshop-2.19.2.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
329
+ atomicshop-2.19.2.dist-info/RECORD,,