abstract-webtools 0.1.6.89__tar.gz → 0.1.6.90__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/PKG-INFO +1 -1
  2. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/setup.py +1 -1
  3. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/videoDownloader.py +111 -0
  4. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools.egg-info/PKG-INFO +1 -1
  5. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/README.md +0 -0
  6. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/pyproject.toml +0 -0
  7. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/setup.cfg +0 -0
  8. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/__init__.py +0 -0
  9. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/abstract_usurpit.py +0 -0
  10. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/abstract_webtools.py +0 -0
  11. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/big_user_agent_list.py +0 -0
  12. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/main.py +0 -0
  13. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/__init__.py +0 -0
  14. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/allss//.py" +0 -0
  15. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/cipherManager.py +0 -0
  16. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/crawlManager.py +0 -0
  17. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/crawlmgr2.py +0 -0
  18. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/curlMgr.py +0 -0
  19. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/domainManager.py +0 -0
  20. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/dynamicRateLimiter.py +0 -0
  21. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/get_test.py +0 -0
  22. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/linkManager/__init__.py +0 -0
  23. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/linkManager/linkManager.py +0 -0
  24. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/mySocketClient.py +0 -0
  25. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/networkManager.py +0 -0
  26. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/requestManager/__init__.py +0 -0
  27. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/requestManager/requestManager.py +0 -0
  28. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/seleniumManager.py +0 -0
  29. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/soupManager/__init__.py +0 -0
  30. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/soupManager/asoueces.py +0 -0
  31. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/soupManager/soupManager.py +0 -0
  32. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/sslManager.py +0 -0
  33. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/tlsAdapter.py +0 -0
  34. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/urlManager/__init__.py +0 -0
  35. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/urlManager/urlManager.py +0 -0
  36. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/userAgentManager.py +0 -0
  37. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/managers/videoDownloader2.py +0 -0
  38. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/soup_gui.py +0 -0
  39. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/url_grabber.py +0 -0
  40. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools/url_grabber_new.py +0 -0
  41. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools.egg-info/SOURCES.txt +0 -0
  42. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools.egg-info/dependency_links.txt +0 -0
  43. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools.egg-info/requires.txt +0 -0
  44. {abstract_webtools-0.1.6.89 → abstract_webtools-0.1.6.90}/src/abstract_webtools.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.89
3
+ Version: 0.1.6.90
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff
@@ -4,7 +4,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
4
4
  long_description = fh.read()
5
5
  setuptools.setup(
6
6
  name='abstract_webtools',
7
- version='0.1.6.89',
7
+ version='0.1.6.90',
8
8
  author='putkoff',
9
9
  author_email='partners@abstractendeavors.com',
10
10
  description='Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.',
@@ -136,6 +136,117 @@ class VideoDownloader:
136
136
  def stop(self):
137
137
  self.monitoring = False
138
138
  self.pause_event.set()
139
+ def download_image(url, save_path=None):
140
+ """
141
+ Downloads an image from a URL and saves it to the specified path.
142
+
143
+ Args:
144
+ url (str): The URL of the image to download
145
+ save_path (str, optional): Path to save the image. If None, uses the filename from URL
146
+
147
+ Returns:
148
+ str: Path where the image was saved, or None if download failed
149
+ """
150
+ try:
151
+ # Send GET request to the URL
152
+ response = requests.get(url, stream=True)
153
+
154
+ # Check if the request was successful
155
+ if response.status_code == 200:
156
+ # Set decode_content=True to automatically handle Content-Encoding
157
+ response.raw.decode_content = True
158
+
159
+ # If no save_path provided, extract filename from URL
160
+ if save_path is None:
161
+ # Get filename from URL
162
+ filename = url.split('/')[-1]
163
+ save_path = filename
164
+
165
+ # Ensure the directory exists
166
+ os.makedirs(os.path.dirname(save_path), exist_ok=True)
167
+
168
+ # Write the image content to file
169
+ with open(save_path, 'wb') as f:
170
+ f.write(response.content)
171
+
172
+ print(f"Image successfully downloaded to {save_path}")
173
+ return save_path
174
+ else:
175
+ print(f"Failed to download image. Status code: {response.status_code}")
176
+ return None
177
+
178
+ except requests.exceptions.RequestException as e:
179
+ print(f"Error downloading image: {str(e)}")
180
+ return None
181
+ except Exception as e:
182
+ print(f"An unexpected error occurred: {str(e)}")
183
+ return None
184
+ def get_thumbnails(directory,info):
185
+ thumbnails_dir = os.path.join(directory,'thumbnails')
186
+ os.makedirs(thumbnails_dir, exist_ok=True)
187
+ thumbnails = info.get('thumbnails',[])
188
+ for i,thumbnail_info in enumerate(thumbnails):
189
+ thumbnail_url = thumbnail_info.get('url')
190
+ thumbnail_base_url = thumbnail_url.split('?')[0]
191
+ baseName = os.path.basename(thumbnail_base_url)
192
+ fileName,ext = os.path.splitext(baseName)
193
+ baseName = f"{fileName}{ext}"
194
+ resolution = info['thumbnails'][i].get('resolution')
195
+ if resolution:
196
+ baseName = f"{resolution}_{baseName}"
197
+ img_id = info['thumbnails'][i].get('id')
198
+ if img_id:
199
+ baseName = f"{img_id}_{baseName}"
200
+ thumbnail_path = os.path.join(thumbnails_dir,baseName)
201
+ info['thumbnails'][i]['path']=thumbnail_path
202
+ download_image(thumbnail_url, save_path=thumbnail_path)
203
+ return info
204
+ def optimize_video_for_safari(input_file, reencode=False):
205
+ """
206
+ Optimizes an MP4 file for Safari by moving the 'moov' atom to the beginning.
207
+ Optionally, re-encodes the video for maximum compatibility.
208
+
209
+ Args:
210
+ input_file (str): Path to the original MP4 file.
211
+ reencode (bool): If True, re-encode the video for Safari compatibility.
212
+
213
+ Returns:
214
+ str: Path to the optimized MP4 file.
215
+ """
216
+ tmp_dir = tempfile.mkdtemp()
217
+ try:
218
+ local_input = os.path.join(tmp_dir, os.path.basename(input_file))
219
+ shutil.copy2(input_file, local_input)
220
+
221
+ base, ext = os.path.splitext(local_input)
222
+ local_output = f"{base}_optimized{ext}"
223
+
224
+ if reencode:
225
+ # Re-encoding command for maximum Safari compatibility
226
+ command = [
227
+ "ffmpeg", "-i", local_input,
228
+ "-c:v", "libx264", "-profile:v", "baseline", "-level", "3.0", "-pix_fmt", "yuv420p",
229
+ "-c:a", "aac", "-b:a", "128k",
230
+ "-movflags", "faststart",
231
+ local_output
232
+ ]
233
+ else:
234
+ # Simple faststart with stream copy
235
+ command = [
236
+ "ffmpeg", "-i", local_input,
237
+ "-c", "copy", "-movflags", "faststart",
238
+ local_output
239
+ ]
240
+
241
+ try:
242
+ subprocess.run(command, check=True)
243
+ shutil.copy2(local_output, input_file)
244
+ print(f"Optimized video saved as {input_file}")
245
+ except subprocess.CalledProcessError as e:
246
+ print(f"Error during optimization: {e}")
247
+ return input_file
248
+ finally:
249
+ shutil.rmtree(tmp_dir)
139
250
  def bool_or_default(obj,default=True):
140
251
  if obj == None:
141
252
  obj = default
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.89
3
+ Version: 0.1.6.90
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff