abstract-webtools 0.1.6.70__tar.gz → 0.1.6.72__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/PKG-INFO +1 -1
  2. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/setup.py +1 -1
  3. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/videoDownloader.py +42 -21
  4. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools.egg-info/PKG-INFO +1 -1
  5. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/README.md +0 -0
  6. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/pyproject.toml +0 -0
  7. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/setup.cfg +0 -0
  8. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/__init__.py +0 -0
  9. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/abstract_usurpit.py +0 -0
  10. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/abstract_webtools.py +0 -0
  11. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/big_user_agent_list.py +0 -0
  12. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/main.py +0 -0
  13. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/__init__.py +0 -0
  14. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/cipherManager.py +0 -0
  15. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/crawlManager.py +0 -0
  16. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/crawlmgr2.py +0 -0
  17. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/curlMgr.py +0 -0
  18. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/domainManager.py +0 -0
  19. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/dynamicRateLimiter.py +0 -0
  20. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/get_test.py +0 -0
  21. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/linkManager/__init__.py +0 -0
  22. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/linkManager/linkManager.py +0 -0
  23. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/mySocketClient.py +0 -0
  24. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/networkManager.py +0 -0
  25. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/requestManager/__init__.py +0 -0
  26. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/requestManager/requestManager.py +0 -0
  27. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/seleniumManager.py +0 -0
  28. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/soupManager/__init__.py +0 -0
  29. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/soupManager/asoueces.py +0 -0
  30. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/soupManager/soupManager.py +0 -0
  31. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/sslManager.py +0 -0
  32. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/tlsAdapter.py +0 -0
  33. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/urlManager/__init__.py +0 -0
  34. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/urlManager/urlManager.py +0 -0
  35. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/userAgentManager.py +0 -0
  36. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/managers/videoDownloader2.py +0 -0
  37. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/soup_gui.py +0 -0
  38. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/url_grabber.py +0 -0
  39. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools/url_grabber_new.py +0 -0
  40. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools.egg-info/SOURCES.txt +0 -0
  41. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools.egg-info/dependency_links.txt +0 -0
  42. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools.egg-info/requires.txt +0 -0
  43. {abstract_webtools-0.1.6.70 → abstract_webtools-0.1.6.72}/src/abstract_webtools.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.70
3
+ Version: 0.1.6.72
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff
@@ -4,7 +4,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
4
4
  long_description = fh.read()
5
5
  setuptools.setup(
6
6
  name='abstract_webtools',
7
- version='0.1.6.70',
7
+ version='0.1.6.72',
8
8
  author='putkoff',
9
9
  author_email='partners@abstractendeavors.com',
10
10
  description='Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.',
@@ -2,7 +2,7 @@ from .requestManager.requestManager import requestManager
2
2
  from .urlManager.urlManager import urlManager
3
3
  from .soupManager.soupManager import soupManager
4
4
  from .linkManager.linkManager import linkManager
5
- import threading,os,re,yt_dlp,urllib.request,m3u8_To_MP4,subprocess,requests
5
+ import threading,os,re,yt_dlp,urllib.request,m3u8_To_MP4,subprocess,requests,,tempfile
6
6
  from abstract_utilities import get_logFile,safe_dump_to_file,get_time_stamp
7
7
  from m3u8 import M3U8 # Install: pip install m3u8
8
8
  from urllib.parse import urljoin
@@ -201,34 +201,54 @@ def get_thumbnails(directory,info):
201
201
  info['thumbnails'][i]['path']=thumbnail_path
202
202
  download_image(thumbnail_url, save_path=thumbnail_path)
203
203
  return info
204
- def optimize_video_for_safari(input_file):
204
+ def optimize_video_for_safari(input_file, reencode=False):
205
205
  """
206
206
  Optimizes an MP4 file for Safari by moving the 'moov' atom to the beginning.
207
- The optimized file will be saved as <original>_optimized.mp4.
207
+ Optionally, re-encodes the video for maximum compatibility.
208
208
 
209
209
  Args:
210
210
  input_file (str): Path to the original MP4 file.
211
+ reencode (bool): If True, re-encode the video for Safari compatibility.
211
212
 
212
213
  Returns:
213
214
  str: Path to the optimized MP4 file.
214
215
  """
215
- # Build the output file name
216
- base, ext = os.path.splitext(input_file)
217
- output_file = f"{base}_optimized{ext}"
218
-
219
- # ffmpeg command to copy streams and reposition the moov atom
220
- command = ["ffmpeg", "-i", input_file, "-c", "copy", "-movflags", "faststart", output_file]
216
+ tmp_dir = tempfile.mkdtemp()
221
217
  try:
222
- subprocess.run(command, check=True)
223
- # Optionally, you could remove the original file or keep both.
224
- os.remove(input_file)
225
- os.rename(output_file, input_file)
226
- print(f"Optimized video saved as {input_file}")
227
- except subprocess.CalledProcessError as e:
228
- print(f"Error during optimization: {e}")
229
- return input_file
218
+ local_input = os.path.join(tmp_dir, os.path.basename(input_file))
219
+ shutil.copy2(input_file, local_input)
220
+
221
+ base, ext = os.path.splitext(local_input)
222
+ local_output = f"{base}_optimized{ext}"
223
+
224
+ if reencode:
225
+ # Re-encoding command for maximum Safari compatibility
226
+ command = [
227
+ "ffmpeg", "-i", local_input,
228
+ "-c:v", "libx264", "-profile:v", "baseline", "-level", "3.0", "-pix_fmt", "yuv420p",
229
+ "-c:a", "aac", "-b:a", "128k",
230
+ "-movflags", "faststart",
231
+ local_output
232
+ ]
233
+ else:
234
+ # Simple faststart with stream copy
235
+ command = [
236
+ "ffmpeg", "-i", local_input,
237
+ "-c", "copy", "-movflags", "faststart",
238
+ local_output
239
+ ]
240
+
241
+ try:
242
+ subprocess.run(command, check=True)
243
+ shutil.copy2(local_output, input_file)
244
+ print(f"Optimized video saved as {input_file}")
245
+ except subprocess.CalledProcessError as e:
246
+ print(f"Error during optimization: {e}")
247
+ return input_file
248
+ finally:
249
+ shutil.rmtree(tmp_dir)
230
250
 
231
- def downloadvideo(url, directory=False, rename_display=True, thumbnails=True, audio=False):
251
+ def downloadvideo(url, directory=False, rename_display=True, thumbnails=True, audio=False,safai_optimize=False):
232
252
  directory = directory or os.getcwd()
233
253
  temp_id = re.sub(r'[^\w\d.-]', '_', url)[-20:]
234
254
  temp_filename = f"temp_{temp_id}.mp4"
@@ -256,9 +276,10 @@ def downloadvideo(url, directory=False, rename_display=True, thumbnails=True, au
256
276
  info['file_path'] = new_path
257
277
 
258
278
  # *** Here we call the optimization function ***
259
- if new_path.lower().endswith('.mp4'):
260
- info['file_path'] = optimize_video_for_safari(new_path)
261
-
279
+ video_path = info.get('file_path')
280
+ if video_path and video_path.lower().endswith('.mp4') and safai_optimize:
281
+ info['file_path'] = optimize_video_for_safari(video_path,reencode=safai_optimize)
282
+
262
283
  info_path = os.path.join(directory, 'info.json')
263
284
  if thumbnails:
264
285
  info = get_thumbnails(directory, info)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.70
3
+ Version: 0.1.6.72
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff