atomicshop 2.19.0__py3-none-any.whl → 2.19.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.19.0'
4
+ __version__ = '2.19.1'
@@ -52,7 +52,7 @@ class MultiProcessorRecursive:
52
52
  :param process_function: function, function to execute on the input list.
53
53
  :param input_list: list, list of inputs to process.
54
54
  :param max_workers: integer, number of workers to execute functions in parallel. Default is None, which
55
- is the number of CPUs.
55
+ is the number of CPUs that will be counted automatically by the multiprocessing module.
56
56
  :param cpu_percent_max: integer, maximum CPU percentage. Above that usage, we will wait before starting new
57
57
  execution.
58
58
  :param memory_percent_max: integer, maximum memory percentage. Above that usage, we will wait, before starting
@@ -65,7 +65,7 @@ class MultiProcessorRecursive:
65
65
  If this is used, the system resources will be checked before starting each new execution from this
66
66
  shared dict instead of performing new checks.
67
67
 
68
- Usage:
68
+ Usage Examples:
69
69
  def unpack_file(file_path):
70
70
  # Process the file at file_path and unpack it.
71
71
  # Return a list of new file paths that were extracted from the provided path.
@@ -74,19 +74,68 @@ class MultiProcessorRecursive:
74
74
  # List of file paths to process
75
75
  file_paths = ["path1", "path2", "path3"]
76
76
 
77
- # Create an instance of MultiProcessor
78
- # Note: unpacking.unpack_file is passed without parentheses
79
- processor = MultiProcessor(
80
- process_function=unpack_file,
81
- input_list=file_paths,
82
- max_workers=4, # Number of parallel workers
83
- cpu_percent_max=80, # Max CPU usage percentage
84
- memory_percent_max=80, # Max memory usage percentage
85
- wait_time=5 # Time to wait if resources are overused
86
- )
87
-
88
- # Run the processing
89
- processor.run_process()
77
+ # Note: unpack_file Callable is passed to init without parentheses.
78
+
79
+ 1. Providing the list directly to process at once:
80
+ # Initialize the processor.
81
+ processor = MultiProcessor(
82
+ process_function=unpack_file,
83
+ input_list=file_paths,
84
+ max_workers=4, # Number of parallel workers
85
+ cpu_percent_max=80, # Max CPU usage percentage
86
+ memory_percent_max=80, # Max memory usage percentage
87
+ wait_time=5 # Time to wait if resources are overused
88
+ )
89
+
90
+ # Process the list of files at once.
91
+ processor.run_process()
92
+ # Shutdown the pool processes after processing.
93
+ processor.shutdown_pool()
94
+
95
+ 2. Processing each file in the list differently then adding to the list of the multiprocessing instance then executing.
96
+ # Initialize the processor once, before the loop, with empty input_list.
97
+ processor = MultiProcessor(
98
+ process_function=unpack_file,
99
+ input_list=[],
100
+ max_workers=4, # Number of parallel workers
101
+ cpu_percent_max=80, # Max CPU usage percentage
102
+ memory_percent_max=80, # Max memory usage percentage
103
+ wait_time=5 # Time to wait if resources are overused
104
+ )
105
+
106
+ for file_path in file_paths:
107
+ # <Process each file>.
108
+ # Add the result to the input_list of the processor.
109
+ processor.input_list.append(file_path)
110
+
111
+ # Process the list of files at once.
112
+ processor.run_process()
113
+ # Shutdown the pool processes after processing.
114
+ processor.shutdown_pool()
115
+
116
+ 3. Processing each file in the list separately, since we're using an unpacking function that
117
+ will create more files, but the context for this operation is different for extraction
118
+ of each main file inside the list:
119
+
120
+ # Initialize the processor once, before the loop, with empty input_list.
121
+ processor = MultiProcessor(
122
+ process_function=unpack_file,
123
+ input_list=[],
124
+ max_workers=4, # Number of parallel workers
125
+ cpu_percent_max=80, # Max CPU usage percentage
126
+ memory_percent_max=80, # Max memory usage percentage
127
+ wait_time=5 # Time to wait if resources are overused
128
+ )
129
+
130
+ for file_path in file_paths:
131
+ # <Process each file>.
132
+ # Add the result to the input_list of the processor.
133
+ processor.input_list.append(file_path)
134
+ # Process the added file path separately.
135
+ processor.run_process()
136
+
137
+ # Shutdown the pool processes after processing.
138
+ processor.shutdown_pool()
90
139
  """
91
140
 
92
141
  self.process_function = process_function
@@ -97,41 +146,57 @@ class MultiProcessorRecursive:
97
146
  self.wait_time: float = wait_time
98
147
  self.system_monitor_manager_dict: multiprocessing.managers.DictProxy = system_monitor_manager_dict
99
148
 
149
+ # Create the pool once and reuse it
150
+ self.pool: multiprocessing.Pool = multiprocessing.Pool(processes=self.max_workers)
151
+
152
+ # Keep track of outstanding async results across calls
153
+ self.async_results: list = []
154
+
100
155
  def run_process(self):
101
- with multiprocessing.Pool(processes=self.max_workers) as pool:
102
- # Keep track of the async results
103
- async_results = []
104
-
105
- while self.input_list:
106
- new_input_list = []
107
- for item in self.input_list:
108
- # Check system resources before processing each item
109
- system_resources.wait_for_resource_availability(
110
- cpu_percent_max=self.cpu_percent_max,
111
- memory_percent_max=self.memory_percent_max,
112
- wait_time=self.wait_time,
113
- system_monitor_manager_dict=self.system_monitor_manager_dict)
114
-
115
- # Process the item
116
- async_result = pool.apply_async(self.process_function, (item,))
117
- async_results.append(async_result)
118
-
119
- # Reset input_list for next round of processing
120
- self.input_list = []
121
-
122
- # Collect results as they complete
123
- for async_result in async_results:
124
- try:
125
- result = async_result.get()
126
- # Assuming process_function returns a list, extend new_input_list
127
- new_input_list.extend(result)
128
- except Exception:
129
- raise
130
-
131
- # Update the input_list for the next iteration
132
- self.input_list = new_input_list
133
- # Clear the async_results for the next iteration
134
- async_results.clear()
156
+ # This method can be called multiple times to add new tasks without creating a new pool
157
+ if not self.input_list:
158
+ return # Nothing to process
159
+
160
+ new_input_list = []
161
+
162
+ for item in self.input_list:
163
+ # Check system resources before scheduling each item
164
+ system_resources.wait_for_resource_availability(
165
+ cpu_percent_max=self.cpu_percent_max,
166
+ memory_percent_max=self.memory_percent_max,
167
+ wait_time=self.wait_time,
168
+ system_monitor_manager_dict=self.system_monitor_manager_dict
169
+ )
170
+
171
+ # Schedule the task on the existing pool
172
+ async_result = self.pool.apply_async(self.process_function, (item,))
173
+ self.async_results.append(async_result)
174
+
175
+ # Clear the input_list now that tasks are scheduled
176
+ self.input_list = []
177
+
178
+ # Collect results for the tasks that were scheduled
179
+ for async_result in self.async_results:
180
+ try:
181
+ result = async_result.get() # Blocking wait for result
182
+ # Assuming process_function returns a list, extend new_input_list
183
+ new_input_list.extend(result)
184
+ except Exception as e:
185
+ # Handle exceptions as needed
186
+ raise e
187
+
188
+ # Clear collected async results since they've been processed
189
+ self.async_results.clear()
190
+
191
+ # Update input_list with new files (if any) for further processing
192
+ self.input_list = new_input_list
193
+
194
+ def shutdown_pool(self):
195
+ """Shuts down the pool gracefully."""
196
+ if self.pool:
197
+ self.pool.close() # Stop accepting new tasks
198
+ self.pool.join() # Wait for all tasks to complete
199
+ self.pool = None
135
200
 
136
201
 
137
202
  class ConcurrentProcessorRecursive:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.19.0
3
+ Version: 2.19.1
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=OIXYlXkKerHNZWXlxoV3_8FloSrzjYDaCOGsfFfHcjg,123
1
+ atomicshop/__init__.py,sha256=YLnFX8FxOuGbWDCdCSt4RjxgiYy5Cc-U3A7YXjRxSd8,123
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -101,7 +101,7 @@ atomicshop/basics/isinstancing.py,sha256=fQ35xfqbguQz2BUn-3a4KVGskhTcIn8JjRtxV2r
101
101
  atomicshop/basics/list_of_classes.py,sha256=PJoE1VJdhhQ4gSFr88zW7IApXd4Ez7xLz-7vAM-7gug,978
102
102
  atomicshop/basics/list_of_dicts.py,sha256=tj0LNPf1ljNI_qpoO-PiOT4Ulmk1M-UpTGyn9twVcw8,8039
103
103
  atomicshop/basics/lists.py,sha256=I0C62vrDrNwCTNl0EjUZNa1Jsd8l0rTkp28GEx9QoEI,4258
104
- atomicshop/basics/multiprocesses.py,sha256=oU6LjcLLGBtPIGJzZBpDWoLU3HRmMoanITEOE2luAYw,18799
104
+ atomicshop/basics/multiprocesses.py,sha256=m8Vg6YDZX_Xz3r5uXvaTkFWQ-x6OvGOnTAVgWMFmvbU,21909
105
105
  atomicshop/basics/numbers.py,sha256=ESX0z_7o_ok3sOmCKAUBoZinATklgMy2v-4RndqXlVM,1837
106
106
  atomicshop/basics/package_module.py,sha256=fBd0uVgFce25ZCVtLq83iyowRlbwdWYFj_t4Ml7LU14,391
107
107
  atomicshop/basics/randoms.py,sha256=DmYLtnIhDK29tAQrGP1Nt-A-v8WC7WIEB8Edi-nk3N4,282
@@ -322,8 +322,8 @@ atomicshop/wrappers/socketw/statistics_csv.py,sha256=fgMzDXI0cybwUEqAxprRmY3lqbh
322
322
  atomicshop/wrappers/winregw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
323
323
  atomicshop/wrappers/winregw/winreg_installed_software.py,sha256=Qzmyktvob1qp6Tjk2DjLfAqr_yXV0sgWzdMW_9kwNjY,2345
324
324
  atomicshop/wrappers/winregw/winreg_network.py,sha256=AENV88H1qDidrcpyM9OwEZxX5svfi-Jb4N6FkS1xtqA,8851
325
- atomicshop-2.19.0.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
326
- atomicshop-2.19.0.dist-info/METADATA,sha256=BxY2IEfmMAFhO4teHrr8TYRCzgnf-SsrWaIh_l29YOo,10630
327
- atomicshop-2.19.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
328
- atomicshop-2.19.0.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
329
- atomicshop-2.19.0.dist-info/RECORD,,
325
+ atomicshop-2.19.1.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
326
+ atomicshop-2.19.1.dist-info/METADATA,sha256=R3uYBPX5Re5tpvd0nokDfmh_F1z1rWPThXWTPLvoFE4,10630
327
+ atomicshop-2.19.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
328
+ atomicshop-2.19.1.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
329
+ atomicshop-2.19.1.dist-info/RECORD,,