abstract-webtools 0.1.6.68__tar.gz → 0.1.6.70__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/PKG-INFO +1 -1
  2. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/setup.py +1 -1
  3. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/videoDownloader.py +41 -8
  4. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools.egg-info/PKG-INFO +1 -1
  5. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/README.md +0 -0
  6. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/pyproject.toml +0 -0
  7. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/setup.cfg +0 -0
  8. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/__init__.py +0 -0
  9. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/abstract_usurpit.py +0 -0
  10. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/abstract_webtools.py +0 -0
  11. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/big_user_agent_list.py +0 -0
  12. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/main.py +0 -0
  13. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/__init__.py +0 -0
  14. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/cipherManager.py +0 -0
  15. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/crawlManager.py +0 -0
  16. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/crawlmgr2.py +0 -0
  17. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/curlMgr.py +0 -0
  18. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/domainManager.py +0 -0
  19. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/dynamicRateLimiter.py +0 -0
  20. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/get_test.py +0 -0
  21. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/linkManager/__init__.py +0 -0
  22. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/linkManager/linkManager.py +0 -0
  23. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/mySocketClient.py +0 -0
  24. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/networkManager.py +0 -0
  25. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/requestManager/__init__.py +0 -0
  26. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/requestManager/requestManager.py +0 -0
  27. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/seleniumManager.py +0 -0
  28. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/soupManager/__init__.py +0 -0
  29. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/soupManager/asoueces.py +0 -0
  30. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/soupManager/soupManager.py +0 -0
  31. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/sslManager.py +0 -0
  32. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/tlsAdapter.py +0 -0
  33. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/urlManager/__init__.py +0 -0
  34. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/urlManager/urlManager.py +0 -0
  35. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/userAgentManager.py +0 -0
  36. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/managers/videoDownloader2.py +0 -0
  37. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/soup_gui.py +0 -0
  38. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/url_grabber.py +0 -0
  39. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools/url_grabber_new.py +0 -0
  40. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools.egg-info/SOURCES.txt +0 -0
  41. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools.egg-info/dependency_links.txt +0 -0
  42. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools.egg-info/requires.txt +0 -0
  43. {abstract_webtools-0.1.6.68 → abstract_webtools-0.1.6.70}/src/abstract_webtools.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.68
3
+ Version: 0.1.6.70
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff
@@ -4,7 +4,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
4
4
  long_description = fh.read()
5
5
  setuptools.setup(
6
6
  name='abstract_webtools',
7
- version='0.1.6.68',
7
+ version='0.1.6.70',
8
8
  author='putkoff',
9
9
  author_email='partners@abstractendeavors.com',
10
10
  description='Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.',
@@ -201,7 +201,34 @@ def get_thumbnails(directory,info):
201
201
  info['thumbnails'][i]['path']=thumbnail_path
202
202
  download_image(thumbnail_url, save_path=thumbnail_path)
203
203
  return info
204
- def downloadvideo(url,directory=False,rename_display=True,thumbnails=True,audio=False):
204
+ def optimize_video_for_safari(input_file):
205
+ """
206
+ Optimizes an MP4 file for Safari by moving the 'moov' atom to the beginning.
207
+ The optimized file will be saved as <original>_optimized.mp4.
208
+
209
+ Args:
210
+ input_file (str): Path to the original MP4 file.
211
+
212
+ Returns:
213
+ str: Path to the optimized MP4 file.
214
+ """
215
+ # Build the output file name
216
+ base, ext = os.path.splitext(input_file)
217
+ output_file = f"{base}_optimized{ext}"
218
+
219
+ # ffmpeg command to copy streams and reposition the moov atom
220
+ command = ["ffmpeg", "-i", input_file, "-c", "copy", "-movflags", "faststart", output_file]
221
+ try:
222
+ subprocess.run(command, check=True)
223
+ # Optionally, you could remove the original file or keep both.
224
+ os.remove(input_file)
225
+ os.rename(output_file, input_file)
226
+ print(f"Optimized video saved as {input_file}")
227
+ except subprocess.CalledProcessError as e:
228
+ print(f"Error during optimization: {e}")
229
+ return input_file
230
+
231
+ def downloadvideo(url, directory=False, rename_display=True, thumbnails=True, audio=False):
205
232
  directory = directory or os.getcwd()
206
233
  temp_id = re.sub(r'[^\w\d.-]', '_', url)[-20:]
207
234
  temp_filename = f"temp_{temp_id}.mp4"
@@ -213,9 +240,10 @@ def downloadvideo(url,directory=False,rename_display=True,thumbnails=True,audio=
213
240
  output_filename=temp_filename
214
241
  )
215
242
  info = video_mgr.info
216
- display_id= info.get('display_id') or info.get('id')
217
- directory =os.path.join(directory,display_id)
243
+ display_id = info.get('display_id') or info.get('id')
244
+ directory = os.path.join(directory, display_id)
218
245
  os.makedirs(directory, exist_ok=True)
246
+
219
247
  if rename_display and info and 'file_path' in info:
220
248
  # Rename using metadata
221
249
  video_id = info.get('id', temp_id)
@@ -226,15 +254,20 @@ def downloadvideo(url,directory=False,rename_display=True,thumbnails=True,audio=
226
254
  if os.path.exists(info['file_path']):
227
255
  os.rename(info['file_path'], new_path)
228
256
  info['file_path'] = new_path
229
- info_path = os.path.join(directory,'info.json')
257
+
258
+ # *** Here we call the optimization function ***
259
+ if new_path.lower().endswith('.mp4'):
260
+ info['file_path'] = optimize_video_for_safari(new_path)
261
+
262
+ info_path = os.path.join(directory, 'info.json')
230
263
  if thumbnails:
231
- info = get_thumbnails(directory,info)
264
+ info = get_thumbnails(directory, info)
232
265
  if audio:
233
266
  try:
234
267
  info = download_audio(directory, info)
235
268
  except:
236
- info['audio_path'] = audio_path
237
- info['json_path']=info_path
238
- safe_dump_to_file(info,info_path)
269
+ info['audio_path'] = None
270
+ info['json_path'] = info_path
271
+ safe_dump_to_file(info, info_path)
239
272
  return info
240
273
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.68
3
+ Version: 0.1.6.70
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff